Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -3,6 +3,9 @@ import requests
|
|
3 |
import streamlit as st
|
4 |
from llama_cpp import Llama
|
5 |
|
|
|
|
|
|
|
6 |
# β
Define model path
|
7 |
MODEL_PATH = "./Phi-3-mini-4k-instruct-q4.gguf"
|
8 |
MODEL_URL = "https://huggingface.co/microsoft/Phi-3-mini-4k-instruct-gguf/resolve/main/Phi-3-mini-4k-instruct-q4.gguf"
|
@@ -26,7 +29,6 @@ if "model" not in st.session_state:
|
|
26 |
st.session_state["model"] = Llama(model_path=MODEL_PATH, n_ctx=4096)
|
27 |
|
28 |
# Streamlit UI setup
|
29 |
-
st.set_page_config(page_title="Phi-3 Mini Chatbot", layout="centered")
|
30 |
st.title("π€ Phi-3 Mini Chatbot")
|
31 |
st.markdown("Enter a message and get responses from Phi-3 Mini!")
|
32 |
|
|
|
3 |
import streamlit as st
|
4 |
from llama_cpp import Llama
|
5 |
|
6 |
+
# β
Set Streamlit Page Config (Must be First)
|
7 |
+
st.set_page_config(page_title="Phi-3 Mini Chatbot", layout="centered")
|
8 |
+
|
9 |
# β
Define model path
|
10 |
MODEL_PATH = "./Phi-3-mini-4k-instruct-q4.gguf"
|
11 |
MODEL_URL = "https://huggingface.co/microsoft/Phi-3-mini-4k-instruct-gguf/resolve/main/Phi-3-mini-4k-instruct-q4.gguf"
|
|
|
29 |
st.session_state["model"] = Llama(model_path=MODEL_PATH, n_ctx=4096)
|
30 |
|
31 |
# Streamlit UI setup
|
|
|
32 |
st.title("π€ Phi-3 Mini Chatbot")
|
33 |
st.markdown("Enter a message and get responses from Phi-3 Mini!")
|
34 |
|