ashok2216 commited on
Commit
4e6b935
·
verified ·
1 Parent(s): 3639373

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +46 -89
app.py CHANGED
@@ -230,99 +230,56 @@
230
  # # st.snow()
231
 
232
  import streamlit as st
233
- import requests
234
- import os
235
- from huggingface_hub import login
236
 
237
- # Set up Hugging Face token
238
- hf_token = os.getenv("HF_TOKEN")
239
- if hf_token is None:
240
- raise ValueError("Hugging Face token not found. Please set the HF_TOKEN environment variable.")
241
- login(hf_token)
242
-
243
- # Hugging Face Inference API URL for Deepseek model
244
- API_URL = "https://api-inference.huggingface.co/models/deepseek-ai/deepseek-r1"
245
- headers = {"Authorization": f"Bearer {hf_token}"}
246
-
247
- # Function to query the Deepseek model
248
- def query_deepseek(payload):
249
- """
250
- Sends a query to the Deepseek model using Hugging Face Inference API.
251
- """
252
- response = requests.post(API_URL, headers=headers, json=payload)
253
- return response.json()
254
 
255
- # Streamlit app configuration
256
- st.set_page_config(page_title="Deepseek Chatbot", page_icon="🤖", layout="wide")
257
-
258
- # Custom CSS for chatbot UI
259
- st.markdown("""
260
- <style>
261
- .chat-message {
262
- padding: 10px;
263
- border-radius: 10px;
264
- margin: 5px 0;
265
- max-width: 70%;
266
- }
267
- .user-message {
268
- background-color: #0078D4;
269
- color: white;
270
- margin-left: auto;
271
- margin-right: 0;
272
- }
273
- .bot-message {
274
- background-color: #f1f1f1;
275
- color: black;
276
- margin-left: 0;
277
- margin-right: auto;
278
- }
279
- </style>
280
- """, unsafe_allow_html=True)
281
-
282
- # Initialize session state for chat history
283
- if "chat_history" not in st.session_state:
284
- st.session_state.chat_history = []
285
 
286
  # Streamlit app title
287
- st.title("🤖 Deepseek Chatbot")
288
- st.write("Welcome to the Deepseek Chatbot! Ask me anything, and I'll do my best to help.")
289
 
290
- # Chat input
291
- user_input = st.text_input("You:", placeholder="Type your message here...", key="user_input")
292
-
293
- # Send button
294
- if st.button("Send"):
295
- if user_input.strip() != "":
296
- # Add user message to chat history
297
- st.session_state.chat_history.append({"role": "user", "content": user_input})
298
-
299
- # Prepare payload for Deepseek model
300
- payload = {
301
- "inputs": {
302
- "question": user_input,
303
- "context": "", # Add context if needed
304
- }
305
- }
306
-
307
- # Query the Deepseek model
308
- with st.spinner("Thinking..."):
309
- try:
310
- response = query_deepseek(payload)
311
- bot_response = response.get("answer", "Sorry, I couldn't understand that.")
312
- except Exception as e:
313
- bot_response = f"An error occurred: {str(e)}"
314
-
315
- # Add bot response to chat history
316
- st.session_state.chat_history.append({"role": "bot", "content": bot_response})
317
 
318
  # Display chat history
319
- for message in st.session_state.chat_history:
320
- if message["role"] == "user":
321
- st.markdown(f'<div class="chat-message user-message">{message["content"]}</div>', unsafe_allow_html=True)
322
- elif message["role"] == "bot":
323
- st.markdown(f'<div class="chat-message bot-message">{message["content"]}</div>', unsafe_allow_html=True)
324
-
325
- # Clear chat history button
326
- if st.button("Clear Chat"):
327
- st.session_state.chat_history = []
328
- st.experimental_rerun()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
230
  # # st.snow()
231
 
232
  import streamlit as st
233
+ from huggingface_hub import InferenceClient
 
 
234
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
235
 
236
+ hf_token = os.getenv("HF_TOKEN")
237
+ # Set up the Hugging Face Inference Client
238
+ client = InferenceClient(
239
+ provider="together", # Replace with the correct provider if needed
240
+ api_key= hf_token # Replace with your Hugging Face API key
241
+ )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
242
 
243
  # Streamlit app title
244
+ st.title("🤖 Deepseek R1 Chatbot")
245
+ st.write("Chat with the Deepseek R1 model powered by Hugging Face Inference API.")
246
 
247
+ # Initialize session state to store chat history
248
+ if "messages" not in st.session_state:
249
+ st.session_state.messages = []
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
250
 
251
  # Display chat history
252
+ for message in st.session_state.messages:
253
+ with st.chat_message(message["role"]):
254
+ st.markdown(message["content"])
255
+
256
+ # User input
257
+ if prompt := st.chat_input("What would you like to ask?"):
258
+ # Add user message to chat history
259
+ st.session_state.messages.append({"role": "user", "content": prompt})
260
+ with st.chat_message("user"):
261
+ st.markdown(prompt)
262
+
263
+ # Generate response from Deepseek R1 model
264
+ with st.spinner("Thinking..."):
265
+ try:
266
+ # Prepare the messages for the model
267
+ messages = [{"role": m["role"], "content": m["content"]} for m in st.session_state.messages]
268
+
269
+ # Call the Hugging Face Inference API
270
+ completion = client.chat.completions.create(
271
+ model="deepseek-ai/DeepSeek-R1", # Replace with the correct model name
272
+ messages=messages,
273
+ max_tokens=500
274
+ )
275
+
276
+ # Extract the model's response
277
+ response = completion.choices[0].message.content
278
+
279
+ # Add model's response to chat history
280
+ st.session_state.messages.append({"role": "assistant", "content": response})
281
+ with st.chat_message("assistant"):
282
+ st.markdown(response)
283
+
284
+ except Exception as e:
285
+ st.error(f"An error occurred: {e}")