Update app.py
Browse files
app.py
CHANGED
@@ -1,118 +1,36 @@
|
|
1 |
-
import os
|
2 |
-
import time
|
3 |
-
|
4 |
-
from langchain_together import TogetherEmbeddings
|
5 |
import streamlit as st
|
6 |
-
|
|
|
7 |
from langchain.document_loaders import PyPDFLoader
|
8 |
from langchain.text_splitter import RecursiveCharacterTextSplitter
|
9 |
-
from langchain.
|
10 |
-
from langchain.
|
11 |
-
from
|
12 |
-
|
13 |
-
|
14 |
-
|
15 |
-
st.set_page_config(page_title="چت بات توانا", page_icon="🪖", layout="wide")
|
16 |
-
|
17 |
-
st.markdown("""
|
18 |
-
<style>
|
19 |
-
@import url('https://fonts.googleapis.com/css2?family=Vazirmatn:wght@400;700&display=swap');
|
20 |
-
html, body, [class*="css"] {
|
21 |
-
font-family: 'Vazirmatn', Tahoma, sans-serif;
|
22 |
-
direction: rtl;
|
23 |
-
text-align: right;
|
24 |
-
}
|
25 |
-
.stApp {
|
26 |
-
background: url("./military_bg.jpeg") no-repeat center center fixed;
|
27 |
-
background-size: cover;
|
28 |
-
backdrop-filter: blur(2px);
|
29 |
-
}
|
30 |
-
.stChatMessage {
|
31 |
-
background-color: rgba(255,255,255,0.8);
|
32 |
-
border: 1px solid #4e8a3e;
|
33 |
-
border-radius: 12px;
|
34 |
-
padding: 16px;
|
35 |
-
margin-bottom: 15px;
|
36 |
-
box-shadow: 0 4px 10px rgba(0,0,0,0.2);
|
37 |
-
animation: fadeIn 0.4s ease-in-out;
|
38 |
-
}
|
39 |
-
.stTextInput > div > input, .stTextArea textarea {
|
40 |
-
background-color: rgba(255,255,255,0.9) !important;
|
41 |
-
border-radius: 8px !important;
|
42 |
-
direction: rtl;
|
43 |
-
text-align: right;
|
44 |
-
font-family: 'Vazirmatn', Tahoma;
|
45 |
-
}
|
46 |
-
.stButton>button {
|
47 |
-
background-color: #4e8a3e !important;
|
48 |
-
color: white !important;
|
49 |
-
font-weight: bold;
|
50 |
-
border-radius: 10px;
|
51 |
-
padding: 8px 20px;
|
52 |
-
transition: 0.3s;
|
53 |
-
}
|
54 |
-
.stButton>button:hover {
|
55 |
-
background-color: #3c6d30 !important;
|
56 |
-
}
|
57 |
-
.header-text {
|
58 |
-
text-align: center;
|
59 |
-
margin-top: 20px;
|
60 |
-
margin-bottom: 40px;
|
61 |
-
background-color: rgba(255, 255, 255, 0.75);
|
62 |
-
padding: 20px;
|
63 |
-
border-radius: 20px;
|
64 |
-
box-shadow: 0 4px 12px rgba(0,0,0,0.2);
|
65 |
-
}
|
66 |
-
.header-text h1 {
|
67 |
-
font-size: 42px;
|
68 |
-
color: #2c3e50;
|
69 |
-
margin: 0;
|
70 |
-
font-weight: bold;
|
71 |
-
}
|
72 |
-
.subtitle {
|
73 |
-
font-size: 18px;
|
74 |
-
color: #34495e;
|
75 |
-
margin-top: 8px;
|
76 |
-
}
|
77 |
-
@keyframes fadeIn {
|
78 |
-
from { opacity: 0; transform: translateY(10px); }
|
79 |
-
to { opacity: 1; transform: translateY(0); }
|
80 |
-
}
|
81 |
-
</style>
|
82 |
-
""", unsafe_allow_html=True)
|
83 |
-
|
84 |
-
col1, col2, col3 = st.columns([1, 0.2, 1])
|
85 |
-
with col2:
|
86 |
-
st.image("army.png", width=240)
|
87 |
-
|
88 |
-
st.markdown("""
|
89 |
-
<div class="header-text">
|
90 |
-
<h1>چت بات توانا</h1>
|
91 |
-
<div class="subtitle">دستیار هوشمند</div>
|
92 |
-
</div>
|
93 |
-
""", unsafe_allow_html=True)
|
94 |
-
|
95 |
-
# ----------------- لود PDF و ساخت ایندکس -----------------
|
96 |
@st.cache_resource
|
97 |
-
def
|
98 |
-
with st.spinner('📄 در حال پردازش
|
99 |
-
|
100 |
-
|
|
|
|
|
|
|
101 |
|
102 |
-
|
103 |
-
embeddings = TogetherEmbeddings(
|
104 |
api_key="0291f33aee03412a47fa5d8e562e515182dcc5d9aac5a7fb5eefdd1759005979"
|
105 |
)
|
106 |
|
107 |
-
|
108 |
-
|
109 |
-
|
110 |
-
|
111 |
-
|
112 |
-
return index
|
113 |
|
114 |
-
|
115 |
-
|
|
|
116 |
|
117 |
llm = ChatOpenAI(
|
118 |
base_url="https://api.together.xyz/v1",
|
@@ -120,12 +38,39 @@ llm = ChatOpenAI(
|
|
120 |
model="meta-llama/Llama-3.3-70B-Instruct-Turbo-Free"
|
121 |
)
|
122 |
|
123 |
-
|
124 |
-
|
125 |
-
|
126 |
-
|
127 |
-
|
128 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
129 |
|
130 |
if 'messages' not in st.session_state:
|
131 |
st.session_state.messages = []
|
@@ -133,35 +78,36 @@ if 'messages' not in st.session_state:
|
|
133 |
if 'pending_prompt' not in st.session_state:
|
134 |
st.session_state.pending_prompt = None
|
135 |
|
136 |
-
# نمایش
|
137 |
for msg in st.session_state.messages:
|
138 |
with st.chat_message(msg['role']):
|
139 |
st.markdown(f"🗨️ {msg['content']}", unsafe_allow_html=True)
|
140 |
|
141 |
-
#
|
142 |
-
prompt = st.chat_input("
|
143 |
|
144 |
if prompt:
|
145 |
st.session_state.messages.append({'role': 'user', 'content': prompt})
|
146 |
st.session_state.pending_prompt = prompt
|
147 |
st.rerun()
|
148 |
|
|
|
149 |
if st.session_state.pending_prompt:
|
150 |
with st.chat_message('ai'):
|
151 |
thinking = st.empty()
|
152 |
-
thinking.markdown("🤖 در حال
|
153 |
|
154 |
-
#
|
155 |
-
response =
|
156 |
-
answer = response.
|
157 |
if not answer:
|
158 |
-
answer = "
|
159 |
|
160 |
thinking.empty()
|
161 |
full_response = ""
|
162 |
placeholder = st.empty()
|
163 |
|
164 |
-
#
|
165 |
for word in answer.split():
|
166 |
full_response += word + " "
|
167 |
placeholder.markdown(full_response + "▌")
|
|
|
|
|
|
|
|
|
|
|
1 |
import streamlit as st
|
2 |
+
import time
|
3 |
+
import numpy as np
|
4 |
from langchain.document_loaders import PyPDFLoader
|
5 |
from langchain.text_splitter import RecursiveCharacterTextSplitter
|
6 |
+
from langchain.embeddings import TogetherEmbeddings
|
7 |
+
from langchain.chat_models import ChatOpenAI
|
8 |
+
from sklearn.metrics.pairwise import cosine_similarity
|
9 |
+
|
10 |
+
# ------------------ بارگذاری چانکها و امبدینگها ------------------
|
11 |
+
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
12 |
@st.cache_resource
|
13 |
+
def load_chunks_and_embeddings():
|
14 |
+
with st.spinner('📄 در حال پردازش PDF و ساخت امبدینگها...'):
|
15 |
+
loader = PyPDFLoader('test1.pdf')
|
16 |
+
pages = loader.load()
|
17 |
+
|
18 |
+
splitter = RecursiveCharacterTextSplitter(chunk_size=300, chunk_overlap=0)
|
19 |
+
chunks = splitter.split_documents(pages)
|
20 |
|
21 |
+
embeddings_model = TogetherEmbeddings(
|
|
|
22 |
api_key="0291f33aee03412a47fa5d8e562e515182dcc5d9aac5a7fb5eefdd1759005979"
|
23 |
)
|
24 |
|
25 |
+
chunk_texts = [chunk.page_content for chunk in chunks]
|
26 |
+
chunk_embeddings = embeddings_model.embed_documents(chunk_texts)
|
27 |
+
|
28 |
+
st.success(f"✅ تعداد {len(chunk_texts)} چانک ساخته شد.")
|
29 |
+
return chunk_texts, chunk_embeddings, embeddings_model
|
|
|
30 |
|
31 |
+
chunk_texts, chunk_embeddings, embeddings_model = load_chunks_and_embeddings()
|
32 |
+
|
33 |
+
# ------------------ ساخت مدل LLM ------------------
|
34 |
|
35 |
llm = ChatOpenAI(
|
36 |
base_url="https://api.together.xyz/v1",
|
|
|
38 |
model="meta-llama/Llama-3.3-70B-Instruct-Turbo-Free"
|
39 |
)
|
40 |
|
41 |
+
# ------------------ تابع پاسخ به سوال از طریق نزدیکترین چانک ------------------
|
42 |
+
|
43 |
+
def answer_from_pdf(question):
|
44 |
+
# ۱- امبد سوال
|
45 |
+
question_embedding = embeddings_model.embed_query(question)
|
46 |
+
|
47 |
+
# ۲- شباهت پیدا کن
|
48 |
+
similarities = cosine_similarity(
|
49 |
+
[question_embedding],
|
50 |
+
chunk_embeddings
|
51 |
+
)
|
52 |
+
|
53 |
+
# ۳- نزدیکترین چانک
|
54 |
+
best_idx = np.argmax(similarities)
|
55 |
+
best_chunk = chunk_texts[best_idx]
|
56 |
+
|
57 |
+
# ۴- ساخت پرامپت
|
58 |
+
prompt = f"""بر اساس متن زیر فقط به زبان فارسی پاسخ بده:
|
59 |
+
|
60 |
+
متن:
|
61 |
+
{best_chunk}
|
62 |
+
|
63 |
+
سوال:
|
64 |
+
{question}
|
65 |
+
|
66 |
+
پاسخ:"""
|
67 |
+
|
68 |
+
response = llm.invoke(prompt)
|
69 |
+
return response.content
|
70 |
+
|
71 |
+
# ------------------ Chat Streamlit UI ------------------
|
72 |
+
|
73 |
+
st.title('📚 چت با PDF')
|
74 |
|
75 |
if 'messages' not in st.session_state:
|
76 |
st.session_state.messages = []
|
|
|
78 |
if 'pending_prompt' not in st.session_state:
|
79 |
st.session_state.pending_prompt = None
|
80 |
|
81 |
+
# نمایش هیستوری چت
|
82 |
for msg in st.session_state.messages:
|
83 |
with st.chat_message(msg['role']):
|
84 |
st.markdown(f"🗨️ {msg['content']}", unsafe_allow_html=True)
|
85 |
|
86 |
+
# گرفتن سوال از کاربر
|
87 |
+
prompt = st.chat_input("سوال خود را وارد کنید...")
|
88 |
|
89 |
if prompt:
|
90 |
st.session_state.messages.append({'role': 'user', 'content': prompt})
|
91 |
st.session_state.pending_prompt = prompt
|
92 |
st.rerun()
|
93 |
|
94 |
+
# وقتی سوال جدید داری
|
95 |
if st.session_state.pending_prompt:
|
96 |
with st.chat_message('ai'):
|
97 |
thinking = st.empty()
|
98 |
+
thinking.markdown("🤖 در حال پردازش...")
|
99 |
|
100 |
+
# پاسخ بر اساس نزدیکترین چانک
|
101 |
+
response = answer_from_pdf(st.session_state.pending_prompt)
|
102 |
+
answer = response.strip()
|
103 |
if not answer:
|
104 |
+
answer = "متاسفم، اطلاعات دقیقی در این مورد ندارم."
|
105 |
|
106 |
thinking.empty()
|
107 |
full_response = ""
|
108 |
placeholder = st.empty()
|
109 |
|
110 |
+
# تدریجی نشون دادن پاسخ
|
111 |
for word in answer.split():
|
112 |
full_response += word + " "
|
113 |
placeholder.markdown(full_response + "▌")
|