File size: 3,464 Bytes
7026716
0ec04e2
7026716
 
0ec04e2
e73ae5f
 
48ddf58
e73ae5f
 
48ddf58
 
1561b35
7026716
48ddf58
0ec04e2
 
7026716
 
 
e73ae5f
7026716
 
 
0ec04e2
 
7026716
 
 
 
 
0ec04e2
 
 
fe5d313
d6b7bed
fe5d313
 
7026716
0ec04e2
 
 
7026716
fe5d313
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
0ec04e2
 
1561b35
 
 
 
 
 
 
 
 
 
 
 
 
 
 
fe5d313
1561b35
fe5d313
 
 
1561b35
 
fe5d313
 
1561b35
fe5d313
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
0ec04e2
 
e73ae5f
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
import os
import gradio as gr
from openai import OpenAI
from typing import List, Tuple

# Define available models
AVAILABLE_MODELS = {
    "Sonar Pro": "sonar-pro",
}

PX_ENDPOINT_URL = "https://api.perplexity.ai"
PX_API_KEY = os.getenv('PX_KEY')
PASSWORD = os.getenv("PASSWD")  # Store the password in an environment variable

px_client = OpenAI(base_url=PX_ENDPOINT_URL, api_key=PX_API_KEY)

def respond(
    message: str,
    history: List[Tuple[str, str]],
    system_message: str,
    model_choice: str,
    max_tokens: int,
    temperature: float,
    top_p: float,
):
    messages = [{"role": "system", "content": system_message}]
    for user_msg, assistant_msg in history:
        if user_msg:
            messages.append({"role": "user", "content": user_msg})
        if assistant_msg:
            messages.append({"role": "assistant", "content": assistant_msg})
    messages.append({"role": "user", "content": message})

    response = ""
    citations = []
    
    stream = px_client.chat.completions.create(
        model=AVAILABLE_MODELS[model_choice],
        messages=messages,
        max_tokens=max_tokens,
        temperature=temperature,
        top_p=top_p,
        stream=True,
    )
    
    for chunk in stream:
        if "choices" in chunk:
            token = chunk.choices[0].delta.content or ""
            response += token
            yield response  # Stream response as it arrives
        if "citations" in chunk:
            citations = chunk["citations"]
    
    # Append citations as clickable links
    if citations:
        citation_text = "\n\nSources:\n" + "\n".join(
            [f"[{i+1}] [{url}]({url})" for i, url in enumerate(citations)]
        )
        response += citation_text
        yield response

def check_password(input_password):
    if input_password == PASSWORD:
        return gr.update(visible=False), gr.update(visible=True)
    else:
        return gr.update(value="", interactive=True), gr.update(visible=False)

with gr.Blocks() as demo:
    with gr.Column():
        password_input = gr.Textbox(
            type="password", label="Enter Password", interactive=True
        )
        submit_button = gr.Button("Submit")
        error_message = gr.Textbox(
            label="Error", visible=False, interactive=False
        )
    
    with gr.Column(visible=False) as chat_interface:
        system_prompt = gr.Textbox(
            value="You are a helpful assistant.", label="System message"
        )
        chat = gr.ChatInterface(
            respond,
            additional_inputs=[],
            height=600  # Increased height for better visibility
        )
        
        with gr.Column():
            model_choice = gr.Dropdown(
                choices=list(AVAILABLE_MODELS.keys()),
                value=list(AVAILABLE_MODELS.keys())[0],
                label="Select Model"
            )
            max_tokens = gr.Slider(
                minimum=1, maximum=30000, value=2048, step=100, label="Max new tokens"
            )
            temperature = gr.Slider(
                minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"
            )
            top_p = gr.Slider(
                minimum=0.1, maximum=1.0, value=0.95, step=0.05, label="Top-p (nucleus sampling)"
            )
    
    submit_button.click(
        check_password, inputs=password_input, outputs=[password_input, chat_interface]
    )

if __name__ == "__main__":
    demo.launch(share=True)