Uzma-Khatun commited on
Commit
795c3d9
Β·
verified Β·
1 Parent(s): 6796f53

Upload 2 files

Browse files
Files changed (2) hide show
  1. app.py +152 -0
  2. requirements.txt +4 -0
app.py ADDED
@@ -0,0 +1,152 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ def main():
2
+ from dotenv import load_dotenv
3
+ load_dotenv()
4
+
5
+ # Step_1 : Setup UI with streamlit (model provider, model, system prompt, web search query)
6
+ import requests
7
+ import streamlit as st
8
+ from streamlit_lottie import st_lottie
9
+
10
+ # ---------- Page Configuration ----------
11
+ st.set_page_config(
12
+ page_title="LangGraph AI Chatbot",
13
+ layout="centered",
14
+ initial_sidebar_state="collapsed"
15
+ )
16
+
17
+ # ----------------- CUSTOM CSS ------------------
18
+ st.markdown(
19
+ """
20
+ <style>
21
+ body {
22
+ background-color: #111827;
23
+ color: #d1d5db;
24
+ font-family: 'Segoe UI', sans-serif;
25
+ }
26
+ .sidebar-content {
27
+ background-color: #1f2937;
28
+ padding: 20px;
29
+ }
30
+ .chat-message {
31
+ padding: 10px;
32
+ border-radius: 10px;
33
+ margin-bottom: 10px;
34
+ font-size: 16px;
35
+ }
36
+ .user-message {
37
+ background-color: #374151;
38
+ }
39
+ .bot-message {
40
+ background-color: #4b5563;
41
+ }
42
+ .avatar {
43
+ width: 30px;
44
+ height: 30px;
45
+ border-radius: 50%;
46
+ margin-right: 10px;
47
+ }
48
+ button[kind="primary"] {
49
+ background-color: #10b981;
50
+ color: white;
51
+ border-radius: 10px;
52
+ border: none;
53
+ }
54
+ </style>
55
+ """,
56
+ unsafe_allow_html=True
57
+ )
58
+
59
+ # ---------- Lottie Animation ----------
60
+ def load_lottie_url(url: str):
61
+ r = requests.get(url)
62
+ if r.status_code != 200:
63
+ return None
64
+ return r.json()
65
+
66
+ lottie_ai = load_lottie_url("https://assets10.lottiefiles.com/packages/lf20_kkflmtur.json")
67
+ st_lottie(lottie_ai, height=200)
68
+
69
+
70
+ # ---------- Avatar Config ----------
71
+ USER_AVATAR = "https://cdn-icons-png.flaticon.com/512/9131/9131529.png"
72
+ AGENT_AVATAR = "https://cdn-icons-png.flaticon.com/512/4712/4712100.png"
73
+
74
+
75
+ # ---------- Title and Prompt ----------
76
+ st.title("AI Chatbot")
77
+ st.caption("Ask your AI agent anything β€” powered by Groq and LangGraph!")
78
+
79
+ # ---------- Initialize Session State ----------
80
+ if "chat_history" not in st.session_state:
81
+ st.session_state.chat_history = []
82
+
83
+ # ---------- Inputs ----------
84
+ MODEL_NAMES_GROQ = ["llama-3.3-70b-versatile", "llama3-70b-8192"]
85
+ system_prompt = st.text_area("Define your AI Agent : ", height=68, placeholder="Type your system prompt here...")
86
+ select_model = st.selectbox("Select Model (Groq Only) : ", MODEL_NAMES_GROQ)
87
+ allow_web_search = st.checkbox("Allow Web Search")
88
+ user_query = st.text_area("Enter you query :", height=150, placeholder="Ask Anything!")
89
+
90
+ API_URL = "https://ai-agent-backend-uzhn.onrender.com/chat"
91
+
92
+ # ---------- Chat History Display ----------
93
+ for entry in st.session_state.chat_history:
94
+ with st.chat_message("user", avatar=USER_AVATAR):
95
+ st.markdown(entry["user"])
96
+ with st.chat_message("assistant", avatar=AGENT_AVATAR):
97
+ st.markdown(entry["agent"])
98
+
99
+ # ---------- Submit Button ----------
100
+ if st.button("Ask Agent!"):
101
+ if user_query.strip():
102
+ # Step_2 : Connect with backend via URL
103
+
104
+ payload = {
105
+ "model_name" : select_model,
106
+ "system_prompt" : system_prompt,
107
+ "messages" : [user_query],
108
+ "allow_search" : allow_web_search
109
+ }
110
+ with st.spinner("Thinking... πŸ’­"):
111
+ try:
112
+ response = requests.post(API_URL, json=payload)
113
+
114
+ if response.status_code == 200:
115
+ try:
116
+ response_data = response.json()
117
+ except ValueError:
118
+ st.error("⚠️ Unable to parse the server response.")
119
+ response_data = {"response": response.text}
120
+
121
+ if isinstance(response_data, dict) and "error" in response_data:
122
+ st.error(response_data["error"])
123
+ else:
124
+ final_response = (
125
+ response_data if isinstance(response_data, str)
126
+ else response_data .get("response", str(response_data))
127
+ )
128
+
129
+ # Display new message
130
+ with st.chat_message("user", avatar=USER_AVATAR):
131
+ st.markdown(user_query)
132
+ with st.chat_message("assistant", avatar=AGENT_AVATAR):
133
+ st.markdown(final_response)
134
+
135
+ # Feedback section
136
+ feedback = st.radio("Was this helpful?", ["πŸ‘ Yes", "πŸ‘Ž No"], horizontal=True)
137
+ # Export option
138
+ st.download_button(
139
+ label = "πŸ“„ Download Response",
140
+ data = final_response,
141
+ file_name = "agent_response.txt",
142
+ mime = "text/plain"
143
+ )
144
+ else:
145
+ st.error(f"🚫 Server error: {response.status_code}")
146
+ except Exception as e:
147
+ st.error(f"❌ Backend connection error: {e}")
148
+
149
+
150
+ # End of main
151
+ if __name__ == "__main__":
152
+ main()
requirements.txt ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ streamlit
2
+ requests
3
+ python-dotenv
4
+ streamlit-lottie