File size: 3,326 Bytes
9c12531
 
 
 
 
8f31e52
c7f8a5d
9c12531
 
 
 
c181c4d
9c12531
c181c4d
 
6045172
c181c4d
8b66151
 
6045172
 
 
 
 
 
 
 
c7f8a5d
 
 
 
 
 
 
8b66151
c7f8a5d
8b66151
 
 
88a0b72
 
c7f8a5d
88a0b72
 
c7f8a5d
88a0b72
c7f8a5d
8b66151
c7f8a5d
8b66151
8dad82c
 
c7f8a5d
 
 
88a0b72
c7f8a5d
 
88a0b72
c7f8a5d
c181c4d
88a0b72
c181c4d
 
 
88a0b72
 
 
 
c7f8a5d
88a0b72
9c12531
0abdfaa
8b66151
 
c7f8a5d
 
8dad82c
c7f8a5d
 
9c12531
 
6045172
9c12531
 
c7f8a5d
9c12531
c7f8a5d
 
 
6045172
8dad82c
c7f8a5d
 
9c12531
 
c7f8a5d
8dad82c
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
import gradio as gr
from openai import OpenAI
import os

ACCESS_TOKEN = os.getenv("HF_TOKEN")
print("Access token loaded.")

client = OpenAI(
    base_url="https://api-inference.huggingface.co/v1/",
    api_key=ACCESS_TOKEN,
)
print("OpenAI client initialized.")

def respond(
    message,
    history: list[tuple[str, str]]
):
    print(f"Received message: {message}")
    print(f"History: {history}")

    # Hardcoded system message and other parameters
    system_message = "You are a cryptocurrency trading assistant and market analyst. Your role is to provide users with data-driven insights, technical analysis (RSI, MACD, Bollinger Bands, Moving Averages, Fibonacci retracements, volume analysis, and price action), and investment advice tailored to their risk tolerance. Focus on actionable information, such as market conditions, key indicators, and investment strategies. Avoid speculation and provide clear, concise, and unbiased recommendations based on current data."
    max_tokens = 512
    temperature = 0.7
    top_p = 0.95
    frequency_penalty = 0.0
    seed = -1  # Random seed

    # Convert seed to None if -1 (meaning random)
    if seed == -1:
        seed = None

    messages = [{"role": "system", "content": system_message}]
    print("Initial messages array constructed.")

    # Add conversation history to the context
    for val in history:
        user_part = val[0]
        assistant_part = val[1]
        if user_part:
            messages.append({"role": "user", "content": user_part})
            print(f"Added user message to context: {user_part}")
        if assistant_part:
            messages.append({"role": "assistant", "content": assistant_part})
            print(f"Added assistant message to context: {assistant_part}")

    # Append the latest user message
    messages.append({"role": "user", "content": message})
    print("Latest user message appended.")

    # Set the model to "meta" by default
    model_to_use = "meta-llama/Llama-3.3-70B-Instruct"
    print(f"Model selected for inference: {model_to_use}")

    # Start with an empty string to build the response as tokens stream in
    response = ""
    print("Sending request to OpenAI API.")

    for message_chunk in client.chat.completions.create(
        model=model_to_use,
        max_tokens=max_tokens,
        stream=True,
        temperature=temperature,
        top_p=top_p,
        frequency_penalty=frequency_penalty,
        seed=seed,
        messages=messages,
    ):
        token_text = message_chunk.choices[0].delta.content
        print(f"Received token: {token_text}")
        response += token_text
        yield response

    print("Completed response generation.")

# GRADIO UI

chatbot = gr.Chatbot(height=600, show_copy_button=True, placeholder="Start chatting!", likeable=True, layout="panel")
print("Chatbot interface created.")

demo = gr.ChatInterface(
    fn=respond,
    additional_inputs=[],  # No additional inputs needed since everything is embedded
    fill_height=True,
    chatbot=chatbot,
    theme="Nymbo/Nymbo_Theme",
)
print("ChatInterface object created.")

with demo:
    # No need for system message input, model selection, or sliders
    pass

print("Gradio interface initialized.")

if __name__ == "__main__":
    print("Launching the demo application.")
    demo.launch()