Tanifh commited on
Commit
2f813e0
Β·
verified Β·
1 Parent(s): 8a095f7

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +17 -10
app.py CHANGED
@@ -3,7 +3,7 @@ import requests
3
  import streamlit as st
4
  from llama_cpp import Llama
5
 
6
- # βœ… Set Streamlit Page Config (Must be First)
7
  st.set_page_config(page_title="Phi-3 Mini Chatbot", layout="centered")
8
 
9
  # βœ… Define model path
@@ -25,8 +25,13 @@ if not os.path.exists(MODEL_PATH):
25
  st.stop()
26
 
27
  # βœ… Load model
28
- if "model" not in st.session_state:
29
- st.session_state["model"] = Llama(model_path=MODEL_PATH, n_ctx=4096)
 
 
 
 
 
30
 
31
  # Streamlit UI setup
32
  st.title("πŸ€– Phi-3 Mini Chatbot")
@@ -52,13 +57,15 @@ if st.button("Send") and user_input:
52
  st.chat_message("user").write(user_input)
53
 
54
  # Generate response
55
- response = st.session_state["model"].create_completion(
56
- prompt=user_input, max_tokens=1024, temperature=0.7, top_p=0.9
57
- )["choices"][0]["text"].strip()
58
-
59
- # Add model response to chat history
60
- st.session_state["messages"].append(("assistant", response))
61
- st.chat_message("assistant").write(response)
 
 
62
 
63
  # Run the app with: streamlit run app.py
64
 
 
3
  import streamlit as st
4
  from llama_cpp import Llama
5
 
6
+ # βœ… Streamlit Page Config (Must be first)
7
  st.set_page_config(page_title="Phi-3 Mini Chatbot", layout="centered")
8
 
9
  # βœ… Define model path
 
25
  st.stop()
26
 
27
  # βœ… Load model
28
+ try:
29
+ if "model" not in st.session_state:
30
+ st.session_state["model"] = Llama(model_path=MODEL_PATH, n_ctx=4096)
31
+ st.write("βœ… Model loaded successfully!")
32
+ except Exception as e:
33
+ st.error(f"🚨 Error loading model: {e}")
34
+ st.stop()
35
 
36
  # Streamlit UI setup
37
  st.title("πŸ€– Phi-3 Mini Chatbot")
 
57
  st.chat_message("user").write(user_input)
58
 
59
  # Generate response
60
+ try:
61
+ response = st.session_state["model"].create_completion(
62
+ prompt=user_input, max_tokens=1024, temperature=0.7, top_p=0.9
63
+ )
64
+ response_text = response["choices"][0]["text"].strip()
65
+ st.session_state["messages"].append(("assistant", response_text))
66
+ st.chat_message("assistant").write(response_text)
67
+ except Exception as e:
68
+ st.error(f"🚨 Error generating response: {e}")
69
 
70
  # Run the app with: streamlit run app.py
71