File size: 6,602 Bytes
7a0f03d
9fe2e05
 
defb0a9
dc99e66
 
2c4dca2
dc99e66
9ebd8d9
0e02e4f
5985f75
 
128e483
e3f5de5
ab566ee
a0c39f3
 
6564690
7a0f03d
2c08c25
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
b84e65e
 
 
 
 
 
 
 
 
 
 
dc99e66
41af8de
e3f5de5
 
 
4dfc654
2c4dca2
7ee9982
a0c39f3
1e42623
4dfc654
2c4dca2
 
c615e88
 
 
a0c39f3
c615e88
 
a0c39f3
c615e88
a0c39f3
1e42623
a0c39f3
2c4dca2
 
 
 
 
 
 
 
 
 
49a9882
5606c57
 
 
 
 
49a9882
 
5985f75
2c4dca2
680827f
 
49a9882
 
680827f
5985f75
2c4dca2
9fe2e05
 
 
 
 
 
7a0f03d
c9690b4
 
 
9fe2e05
2c4dca2
dc99e66
9fe2e05
 
 
 
 
 
2c4dca2
9fe2e05
 
128e483
2c4dca2
128e483
7a0f03d
5606c57
2c4dca2
5606c57
 
 
 
9587c62
5606c57
9fe2e05
128e483
 
 
 
5606c57
128e483
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
import os
import time
import streamlit as st
from langchain.chat_models import ChatOpenAI
from langchain.document_loaders import PyPDFLoader
from langchain.text_splitter import RecursiveCharacterTextSplitter
from langchain.schema import Document
from langchain.chains import RetrievalQA
from langchain_core.retrievers import BaseRetriever
from langchain_core.prompts import PromptTemplate
from typing import List
from pydantic import Field
import numpy as np
from sentence_transformers import SentenceTransformer
import faiss
from langchain.indexes import VectorstoreIndexCreator
from langchain.vectorstores import FAISS

# ----------------- تنظیمات صفحه -----------------
st.set_page_config(page_title="چت‌ بات توانا", page_icon="🪖", layout="wide")

st.markdown("""
    <style>
    @import url('https://fonts.googleapis.com/css2?family=Vazirmatn:wght@400;700&display=swap');
    html, body, [class*="css"] {
        font-family: 'Vazirmatn', Tahoma, sans-serif;
        direction: rtl;
        text-align: right;
    }
    .stApp {
        background: url("./military_bg.jpeg") no-repeat center center fixed;
        background-size: cover;
        backdrop-filter: blur(2px);
    }
    .stChatMessage {
        background-color: rgba(255,255,255,0.8);
        border: 1px solid #4e8a3e;
        border-radius: 12px;
        padding: 16px;
        margin-bottom: 15px;
        box-shadow: 0 4px 10px rgba(0,0,0,0.2);
        animation: fadeIn 0.4s ease-in-out;
    }
    .stTextInput > div > input, .stTextArea textarea {
        background-color: rgba(255,255,255,0.9) !important;
        border-radius: 8px !important;
        direction: rtl;
        text-align: right;
        font-family: 'Vazirmatn', Tahoma;
    }
    .stButton>button {
        background-color: #4e8a3e !important;
        color: white !important;
        font-weight: bold;
        border-radius: 10px;
        padding: 8px 20px;
        transition: 0.3s;
    }
    .stButton>button:hover {
        background-color: #3c6d30 !important;
    }
    .header-text {
        text-align: center;
        margin-top: 20px;
        margin-bottom: 40px;
        background-color: rgba(255, 255, 255, 0.75);
        padding: 20px;
        border-radius: 20px;
        box-shadow: 0 4px 12px rgba(0,0,0,0.2);
    }
    .header-text h1 {
        font-size: 42px;
        color: #2c3e50;
        margin: 0;
        font-weight: bold;
    }
    .subtitle {
        font-size: 18px;
        color: #34495e;
        margin-top: 8px;
    }
    @keyframes fadeIn {
        from { opacity: 0; transform: translateY(10px); }
        to { opacity: 1; transform: translateY(0); }
    }
    </style>
""", unsafe_allow_html=True)

col1, col2, col3 = st.columns([1, 0.2, 1])
with col2:
    st.image("army.png", width=240)

st.markdown("""
    <div class="header-text">
        <h1>چت‌ بات توانا</h1>
        <div class="subtitle">دستیار هوشمند برای تصمیم‌گیری در میدان نبرد</div>
    </div>
""", unsafe_allow_html=True)

# ----------------- لود PDF و ساخت ایندکس -----------------

@st.cache_resource
def get_pdf_index():
    with st.spinner('📄 در حال پردازش فایل PDF...'):
        loader = PyPDFLoader('test1.pdf')
        documents = loader.load()

        splitter = RecursiveCharacterTextSplitter(chunk_size=2000, chunk_overlap=128)
        texts = []
        for doc in documents:
            texts.extend(splitter.split_text(doc.page_content))


        embedding_function = SentenceTransformer("togethercomputer/m2-bert-80M-8k-retrieval", trust_remote_code=True)
        
        vectorstore_index_creator = VectorstoreIndexCreator(
            vectorstore_cls=FAISS,
            embedding_function=embedding_function
        )

        index = vectorstore_index_creator.from_documents([Document(page_content=text) for text in texts])

        return index

# ----------------- بارگذاری دیتا -----------------
documents, embeddings, index, model = get_pdf_index()

retriever = SimpleRetriever(
    documents=documents,
    embeddings=embeddings,
    index=index,
    model=model
)

# ----------------- تعریف LLM -----------------
llm = ChatOpenAI(
    base_url="https://api.together.xyz/v1",
    api_key='0291f33aee03412a47fa5d8e562e515182dcc5d9aac5a7fb5eefdd1759005979',
    model="meta-llama/Llama-3.3-70B-Instruct-Turbo-Free"
)

# ----------------- ساخت Chain -----------------
qa_chain = RetrievalQA.from_chain_type(
    llm=llm,
    retriever=retriever,
    chain_type="stuff",
    chain_type_kwargs={"prompt": custom_prompt}
)

# ----------------- چت استیت -----------------
if 'messages' not in st.session_state:
    st.session_state.messages = []

if 'pending_prompt' not in st.session_state:
    st.session_state.pending_prompt = None

# ----------------- نمایش پیام‌های قبلی -----------------
for msg in st.session_state.messages:
    with st.chat_message(msg['role']):
        st.markdown(f"🗨️ {msg['content']}", unsafe_allow_html=True)

# ----------------- ورودی کاربر -----------------
prompt = st.chat_input("سوالی در مورد فایل بپرس...")

if prompt:
    st.session_state.messages.append({'role': 'user', 'content': prompt})
    st.session_state.pending_prompt = prompt
    st.rerun()

# ----------------- پاسخ‌دهی مدل -----------------
if st.session_state.pending_prompt:
    with st.chat_message('ai'):
        thinking = st.empty()
        thinking.markdown("🤖 در حال فکر کردن...")

        try:
            # اگر مدل نتواند پاسخ دقیقی پیدا کند
            response = qa_chain.run(st.session_state.pending_prompt)
            if not response.strip():  # اگر پاسخ خالی یا بی‌فایده بود
                response = "متاسفانه اطلاعات دقیقی برای پاسخ به این سوال موجود نیست."
            else:
                response = response.strip()
        except Exception as e:
            response = "متاسفانه اطلاعات لازم برای پاسخ به این سوال موجود نیست."

        thinking.empty()

        full_response = ""
        placeholder = st.empty()
        for word in response.split():
            full_response += word + " "
            placeholder.markdown(full_response + "▌")
            time.sleep(0.03)

        placeholder.markdown(full_response)
        st.session_state.messages.append({'role': 'ai', 'content': full_response})
        st.session_state.pending_prompt = None