.s
Browse files
app.py
CHANGED
@@ -1,85 +1,78 @@
|
|
1 |
import time
|
2 |
import streamlit as st
|
3 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
4 |
st.markdown("""
|
5 |
<style>
|
6 |
.main {
|
7 |
-
background-color: #
|
8 |
}
|
9 |
|
10 |
.stChatMessage {
|
11 |
-
background-color: #
|
12 |
border-radius: 12px;
|
13 |
-
padding:
|
14 |
-
margin-bottom:
|
15 |
direction: rtl;
|
16 |
text-align: right;
|
17 |
font-family: 'Tahoma', sans-serif;
|
18 |
}
|
19 |
|
20 |
-
.stMarkdown, .stTextInput, .stTextArea {
|
21 |
direction: rtl !important;
|
22 |
text-align: right !important;
|
23 |
font-family: 'Tahoma', sans-serif;
|
24 |
}
|
25 |
|
26 |
-
.
|
27 |
display: flex;
|
28 |
flex-direction: column;
|
29 |
align-items: center;
|
30 |
justify-content: center;
|
31 |
-
margin-top:
|
32 |
-
margin-bottom:
|
33 |
}
|
34 |
|
35 |
-
.
|
36 |
-
width:
|
37 |
-
height: auto;
|
38 |
border-radius: 15px;
|
39 |
-
|
40 |
-
|
41 |
}
|
42 |
|
43 |
-
.
|
44 |
-
font-size:
|
45 |
-
font-weight: bold;
|
46 |
color: #2c3e50;
|
47 |
font-family: 'Tahoma', sans-serif;
|
48 |
-
text-align: center;
|
49 |
margin: 0;
|
50 |
-
letter-spacing: 1px;
|
51 |
}
|
52 |
</style>
|
53 |
""", unsafe_allow_html=True)
|
54 |
|
55 |
-
# حالا در جای مناسب در برنامه:
|
56 |
st.markdown("""
|
57 |
-
<div class="
|
58 |
-
<img src="
|
59 |
-
<h1
|
60 |
</div>
|
61 |
""", unsafe_allow_html=True)
|
62 |
|
63 |
-
from langchain.document_loaders import PyPDFLoader
|
64 |
-
from langchain.text_splitter import RecursiveCharacterTextSplitter
|
65 |
-
from langchain.embeddings.base import Embeddings
|
66 |
-
from langchain.vectorstores import FAISS
|
67 |
-
from langchain.indexes import VectorstoreIndexCreator
|
68 |
-
from langchain.chains import RetrievalQA
|
69 |
-
from langchain.chat_models import ChatOpenAI
|
70 |
-
from typing import List
|
71 |
-
from together import Together
|
72 |
-
|
73 |
class TogetherEmbeddings(Embeddings):
|
74 |
def __init__(self, model_name: str, api_key: str):
|
75 |
self.model_name = model_name
|
76 |
self.client = Together(api_key=api_key)
|
77 |
|
78 |
def embed_documents(self, texts: List[str]) -> List[List[float]]:
|
79 |
-
response = self.client.embeddings.create(
|
80 |
-
model=self.model_name,
|
81 |
-
input=texts
|
82 |
-
)
|
83 |
return [item.embedding for item in response.data]
|
84 |
|
85 |
def embed_query(self, text: str) -> List[float]:
|
@@ -87,8 +80,8 @@ class TogetherEmbeddings(Embeddings):
|
|
87 |
|
88 |
@st.cache_resource
|
89 |
def get_pdf_index():
|
90 |
-
with st.spinner('
|
91 |
-
|
92 |
embeddings = TogetherEmbeddings(
|
93 |
model_name="togethercomputer/m2-bert-80M-8k-retrieval",
|
94 |
api_key="0291f33aee03412a47fa5d8e562e515182dcc5d9aac5a7fb5eefdd1759005979"
|
@@ -96,14 +89,16 @@ def get_pdf_index():
|
|
96 |
return VectorstoreIndexCreator(
|
97 |
embedding=embeddings,
|
98 |
text_splitter=RecursiveCharacterTextSplitter(chunk_size=300, chunk_overlap=0)
|
99 |
-
).from_loaders(
|
100 |
|
101 |
index = get_pdf_index()
|
|
|
102 |
llm = ChatOpenAI(
|
103 |
base_url="https://api.together.xyz/v1",
|
104 |
api_key='0291f33aee03412a47fa5d8e562e515182dcc5d9aac5a7fb5eefdd1759005979',
|
105 |
model="meta-llama/Llama-3.3-70B-Instruct-Turbo-Free"
|
106 |
)
|
|
|
107 |
chain = RetrievalQA.from_chain_type(
|
108 |
llm=llm,
|
109 |
chain_type='stuff',
|
@@ -111,25 +106,17 @@ chain = RetrievalQA.from_chain_type(
|
|
111 |
input_key='question'
|
112 |
)
|
113 |
|
114 |
-
# --- UI زیباسازی ---
|
115 |
-
|
116 |
-
col1, col2 = st.columns([1, 10])
|
117 |
-
with col1:
|
118 |
-
st.image("army.png", width=70)
|
119 |
-
with col2:
|
120 |
-
st.title('🤖 چتبات هوشمند ارتش')
|
121 |
-
|
122 |
if 'messages' not in st.session_state:
|
123 |
st.session_state.messages = []
|
124 |
|
125 |
if 'pending_prompt' not in st.session_state:
|
126 |
st.session_state.pending_prompt = None
|
127 |
|
128 |
-
for
|
129 |
-
with st.chat_message(
|
130 |
-
st.markdown(f"🗨️ {
|
131 |
|
132 |
-
prompt = st.chat_input(
|
133 |
|
134 |
if prompt:
|
135 |
st.session_state.messages.append({'role': 'user', 'content': prompt})
|
@@ -138,19 +125,19 @@ if prompt:
|
|
138 |
|
139 |
if st.session_state.pending_prompt:
|
140 |
with st.chat_message('ai'):
|
141 |
-
|
142 |
-
|
143 |
|
144 |
response = chain.run(f'پاسخ را فقط به زبان فارسی بده. سوال: {st.session_state.pending_prompt}')
|
145 |
-
|
146 |
-
if not
|
147 |
-
|
148 |
|
149 |
-
|
150 |
full_response = ""
|
151 |
placeholder = st.empty()
|
152 |
-
for
|
153 |
-
full_response +=
|
154 |
placeholder.markdown(full_response + "▌")
|
155 |
time.sleep(0.03)
|
156 |
|
|
|
1 |
import time
|
2 |
import streamlit as st
|
3 |
+
from langchain.document_loaders import PyPDFLoader
|
4 |
+
from langchain.text_splitter import RecursiveCharacterTextSplitter
|
5 |
+
from langchain.embeddings.base import Embeddings
|
6 |
+
from langchain.vectorstores import FAISS
|
7 |
+
from langchain.indexes import VectorstoreIndexCreator
|
8 |
+
from langchain.chains import RetrievalQA
|
9 |
+
from langchain.chat_models import ChatOpenAI
|
10 |
+
from typing import List
|
11 |
+
from together import Together
|
12 |
+
|
13 |
+
st.set_page_config(page_title="چتبات ارتش", page_icon="🪖", layout="wide")
|
14 |
+
|
15 |
st.markdown("""
|
16 |
<style>
|
17 |
.main {
|
18 |
+
background-color: #f9f9f9;
|
19 |
}
|
20 |
|
21 |
.stChatMessage {
|
22 |
+
background-color: #e0f7fa;
|
23 |
border-radius: 12px;
|
24 |
+
padding: 12px;
|
25 |
+
margin-bottom: 12px;
|
26 |
direction: rtl;
|
27 |
text-align: right;
|
28 |
font-family: 'Tahoma', sans-serif;
|
29 |
}
|
30 |
|
31 |
+
.stMarkdown, .stTextInput, .stTextArea, .stButton {
|
32 |
direction: rtl !important;
|
33 |
text-align: right !important;
|
34 |
font-family: 'Tahoma', sans-serif;
|
35 |
}
|
36 |
|
37 |
+
.header-container {
|
38 |
display: flex;
|
39 |
flex-direction: column;
|
40 |
align-items: center;
|
41 |
justify-content: center;
|
42 |
+
margin-top: 30px;
|
43 |
+
margin-bottom: 40px;
|
44 |
}
|
45 |
|
46 |
+
.header-container img {
|
47 |
+
width: 140px;
|
|
|
48 |
border-radius: 15px;
|
49 |
+
margin-bottom: 12px;
|
50 |
+
box-shadow: 0 4px 15px rgba(0,0,0,0.2);
|
51 |
}
|
52 |
|
53 |
+
.header-container h1 {
|
54 |
+
font-size: 36px;
|
|
|
55 |
color: #2c3e50;
|
56 |
font-family: 'Tahoma', sans-serif;
|
|
|
57 |
margin: 0;
|
|
|
58 |
}
|
59 |
</style>
|
60 |
""", unsafe_allow_html=True)
|
61 |
|
|
|
62 |
st.markdown("""
|
63 |
+
<div class="header-container">
|
64 |
+
<img src="https://your-logo-url.com/logo.png" alt="لوگو">
|
65 |
+
<h1>هوش مصنوعی توانا</h1>
|
66 |
</div>
|
67 |
""", unsafe_allow_html=True)
|
68 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
69 |
class TogetherEmbeddings(Embeddings):
|
70 |
def __init__(self, model_name: str, api_key: str):
|
71 |
self.model_name = model_name
|
72 |
self.client = Together(api_key=api_key)
|
73 |
|
74 |
def embed_documents(self, texts: List[str]) -> List[List[float]]:
|
75 |
+
response = self.client.embeddings.create(model=self.model_name, input=texts)
|
|
|
|
|
|
|
76 |
return [item.embedding for item in response.data]
|
77 |
|
78 |
def embed_query(self, text: str) -> List[float]:
|
|
|
80 |
|
81 |
@st.cache_resource
|
82 |
def get_pdf_index():
|
83 |
+
with st.spinner('📄 در حال پردازش فایل PDF...'):
|
84 |
+
loader = [PyPDFLoader('test1.pdf')]
|
85 |
embeddings = TogetherEmbeddings(
|
86 |
model_name="togethercomputer/m2-bert-80M-8k-retrieval",
|
87 |
api_key="0291f33aee03412a47fa5d8e562e515182dcc5d9aac5a7fb5eefdd1759005979"
|
|
|
89 |
return VectorstoreIndexCreator(
|
90 |
embedding=embeddings,
|
91 |
text_splitter=RecursiveCharacterTextSplitter(chunk_size=300, chunk_overlap=0)
|
92 |
+
).from_loaders(loader)
|
93 |
|
94 |
index = get_pdf_index()
|
95 |
+
|
96 |
llm = ChatOpenAI(
|
97 |
base_url="https://api.together.xyz/v1",
|
98 |
api_key='0291f33aee03412a47fa5d8e562e515182dcc5d9aac5a7fb5eefdd1759005979',
|
99 |
model="meta-llama/Llama-3.3-70B-Instruct-Turbo-Free"
|
100 |
)
|
101 |
+
|
102 |
chain = RetrievalQA.from_chain_type(
|
103 |
llm=llm,
|
104 |
chain_type='stuff',
|
|
|
106 |
input_key='question'
|
107 |
)
|
108 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
109 |
if 'messages' not in st.session_state:
|
110 |
st.session_state.messages = []
|
111 |
|
112 |
if 'pending_prompt' not in st.session_state:
|
113 |
st.session_state.pending_prompt = None
|
114 |
|
115 |
+
for msg in st.session_state.messages:
|
116 |
+
with st.chat_message(msg['role']):
|
117 |
+
st.markdown(f"🗨️ {msg['content']}", unsafe_allow_html=True)
|
118 |
|
119 |
+
prompt = st.chat_input("چطور میتونم کمک کنم؟")
|
120 |
|
121 |
if prompt:
|
122 |
st.session_state.messages.append({'role': 'user', 'content': prompt})
|
|
|
125 |
|
126 |
if st.session_state.pending_prompt:
|
127 |
with st.chat_message('ai'):
|
128 |
+
thinking = st.empty()
|
129 |
+
thinking.markdown("🤖 در حال فکر کردن...")
|
130 |
|
131 |
response = chain.run(f'پاسخ را فقط به زبان فارسی بده. سوال: {st.session_state.pending_prompt}')
|
132 |
+
answer = response.split("Helpful Answer:")[-1].strip()
|
133 |
+
if not answer:
|
134 |
+
answer = "متأسفم، اطلاعات دقیقی در این مورد ندارم."
|
135 |
|
136 |
+
thinking.empty()
|
137 |
full_response = ""
|
138 |
placeholder = st.empty()
|
139 |
+
for word in answer.split():
|
140 |
+
full_response += word + " "
|
141 |
placeholder.markdown(full_response + "▌")
|
142 |
time.sleep(0.03)
|
143 |
|