File size: 2,971 Bytes
f767699
 
 
3720552
f767699
 
 
 
 
 
 
 
 
3720552
f767699
 
 
 
 
 
 
3720552
f767699
 
 
3720552
f767699
 
 
3720552
f767699
 
 
3720552
f767699
 
3720552
f767699
 
 
3720552
f767699
3720552
 
 
 
 
 
 
 
 
 
f767699
 
3720552
 
 
 
 
 
f767699
3720552
f767699
3720552
 
 
 
f767699
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3720552
 
 
 
 
 
 
 
 
f767699
3720552
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
import os
import gradio as gr
from langchain_openai import ChatOpenAI
from langchain_core.messages import HumanMessage, AIMessage
from langgraph.checkpoint.memory import MemorySaver
from langgraph.graph import START, MessagesState, StateGraph

def create_chat_app(api_key):
    # Initialize the chat model
    llm = ChatOpenAI(
        model="gpt-4o-mini",
        api_key=api_key,
        temperature=0
    )

    # Define the graph
    workflow = StateGraph(state_schema=MessagesState)

    # Define the function that calls the model
    def call_model(state: MessagesState):
        response = llm.invoke(state["messages"])
        return {"messages": response}

    # Add node and edge to graph
    workflow.add_edge(START, "model")
    workflow.add_node("model", call_model)

    # Add memory
    memory = MemorySaver()
    return workflow.compile(checkpointer=memory)

def chat(message, history, api_key, thread_id):
    if not api_key:
        return "", [{"role": "assistant", "content": "Please enter your OpenAI API key first."}]
    
    try:
        # Create chat application
        app = create_chat_app(api_key)
        
        # Configure thread
        config = {"configurable": {"thread_id": thread_id}}
        
        # Convert history to messages format
        messages = []
        for msg in history:
            if msg["role"] == "user":
                messages.append(HumanMessage(content=msg["content"]))
            elif msg["role"] == "assistant":
                messages.append(AIMessage(content=msg["content"]))
        
        # Add current message
        messages.append(HumanMessage(content=message))
        
        # Get response
        output = app.invoke({"messages": messages}, config)
        response = output["messages"][-1].content
        
        # Update history
        history.append({"role": "user", "content": message})
        history.append({"role": "assistant", "content": response})
        
        return "", history
    except Exception as e:
        error_message = f"Error: {str(e)}"
        history.append({"role": "user", "content": message})
        history.append({"role": "assistant", "content": error_message})
        return "", history

with gr.Blocks() as demo:
    gr.Markdown("# LangChain Chat with Message History")
    
    with gr.Row():
        api_key = gr.Textbox(
            label="OpenAI API Key", 
            placeholder="Enter your OpenAI API key",
            type="password"
        )
        thread_id = gr.Textbox(
            label="Thread ID",
            value="default_thread",
            placeholder="Enter a unique thread ID"
        )
    
    chatbot = gr.Chatbot(type="messages")
    msg = gr.Textbox(label="Message", placeholder="Type your message here...")
    clear = gr.ClearButton([msg, chatbot])

    msg.submit(
        chat,
        inputs=[msg, chatbot, api_key, thread_id],
        outputs=[msg, chatbot]
    )

if __name__ == "__main__":
    demo.launch()