|
import time |
|
import streamlit as st |
|
from langchain.document_loaders import PyPDFLoader |
|
from langchain.text_splitter import RecursiveCharacterTextSplitter |
|
from langchain.embeddings.base import Embeddings |
|
from langchain.vectorstores import FAISS |
|
from langchain.indexes import VectorstoreIndexCreator |
|
from langchain.chains import RetrievalQA |
|
from langchain.chat_models import ChatOpenAI |
|
from typing import List |
|
from together import Together |
|
import pandas as pd |
|
import streamlit as st |
|
|
|
|
|
st.set_page_config(page_title="رزم یار ارتش", page_icon="🪖", layout="wide") |
|
|
|
|
|
st.markdown(""" |
|
<style> |
|
@import url('https://fonts.googleapis.com/css2?family=Vazirmatn:wght@400;700&display=swap'); |
|
|
|
html, body, [class*="css"] { |
|
font-family: 'Vazirmatn', Tahoma, sans-serif; |
|
direction: rtl; |
|
text-align: right; |
|
} |
|
.stApp { |
|
background: linear-gradient(to left, #f0f4f7, #d9e2ec); |
|
} |
|
.sidebar .sidebar-content { |
|
background-color: #ffffff; |
|
border-left: 2px solid #4e8a3e; |
|
padding-top: 10px; |
|
} |
|
.sidebar .sidebar-content div { |
|
margin-bottom: 10px; |
|
font-weight: bold; |
|
color: #2c3e50; |
|
font-size: 15px; |
|
} |
|
.stButton>button { |
|
background-color: #4e8a3e !important; |
|
color: white !important; |
|
font-weight: bold; |
|
border-radius: 8px; |
|
padding: 5px 16px; |
|
transition: 0.3s; |
|
font-size: 14px; |
|
} |
|
.stButton>button:hover { |
|
background-color: #3c6d30 !important; |
|
} |
|
.header-text { |
|
text-align: center; |
|
margin-top: 15px; |
|
margin-bottom: 25px; |
|
background-color: rgba(255, 255, 255, 0.85); |
|
padding: 16px; |
|
border-radius: 16px; |
|
box-shadow: 0 4px 10px rgba(0,0,0,0.1); |
|
} |
|
.header-text h1 { |
|
font-size: 36px; |
|
color: #2c3e50; |
|
margin: 0; |
|
font-weight: bold; |
|
} |
|
.subtitle { |
|
font-size: 16px; |
|
color: #34495e; |
|
margin-top: 5px; |
|
} |
|
.chat-message { |
|
background-color: rgba(255, 255, 255, 0.95); |
|
border: 1px solid #4e8a3e; |
|
border-radius: 12px; |
|
padding: 14px; |
|
margin-bottom: 10px; |
|
box-shadow: 0 4px 8px rgba(0,0,0,0.08); |
|
animation: fadeIn 0.5s ease; |
|
} |
|
.stTextInput>div>input, .stTextArea textarea { |
|
background-color: rgba(255,255,255,0.9) !important; |
|
border-radius: 8px !important; |
|
direction: rtl; |
|
text-align: right; |
|
font-family: 'Vazirmatn', Tahoma; |
|
} |
|
img.small-logo { |
|
width: 90px; |
|
margin-bottom: 15px; |
|
display: block; |
|
margin-right: auto; |
|
margin-left: auto; |
|
} |
|
.menu-item { |
|
display: flex; |
|
align-items: center; |
|
gap: 8px; |
|
padding: 6px 0; |
|
font-size: 15px; |
|
cursor: pointer; |
|
} |
|
.menu-item img { |
|
width: 20px; |
|
height: 20px; |
|
} |
|
</style> |
|
""", unsafe_allow_html=True) |
|
|
|
|
|
with st.sidebar: |
|
st.image("log.png", width=90) |
|
|
|
|
|
st.markdown(""" |
|
<div class="menu-item"> |
|
<img src="https://cdn-icons-png.flaticon.com/512/3596/3596165.png" /> |
|
گفتگوی جدید |
|
</div> |
|
<div class="menu-item"> |
|
<img src="https://cdn-icons-png.flaticon.com/512/709/709496.png" /> |
|
تاریخچه |
|
</div> |
|
<hr/> |
|
<div class="menu-item"> |
|
<img src="https://cdn-icons-png.flaticon.com/512/1828/1828932.png" /> |
|
مدلهای هوش مصنوعی |
|
</div> |
|
<div class="menu-item"> |
|
<img src="https://cdn-icons-png.flaticon.com/512/681/681494.png" /> |
|
تولید محتوا |
|
</div> |
|
<hr/> |
|
<div class="menu-item"> |
|
<img src="https://cdn-icons-png.flaticon.com/512/3601/3601646.png" /> |
|
دستیار ویژه |
|
</div> |
|
<div class="menu-item"> |
|
<img src="https://cdn-icons-png.flaticon.com/512/709/709612.png" /> |
|
ابزار مالی |
|
</div> |
|
<hr/> |
|
<div class="menu-item"> |
|
<img src="https://cdn-icons-png.flaticon.com/512/2099/2099058.png" /> |
|
تنظیمات |
|
</div> |
|
<div class="menu-item"> |
|
<img src="https://cdn-icons-png.flaticon.com/512/597/597177.png" /> |
|
پشتیبانی |
|
</div> |
|
""", unsafe_allow_html=True) |
|
st.markdown(""" |
|
<style> |
|
/* تنظیم سایز سایدبار */ |
|
[data-testid="stSidebar"] { |
|
width: 220px !important; |
|
flex-shrink: 0; |
|
} |
|
[data-testid="stSidebar"] > div { |
|
width: 220px !important; |
|
} |
|
</style> |
|
""", unsafe_allow_html=True) |
|
|
|
|
|
st.markdown(""" |
|
<div class="header-text"> |
|
<h1>رزم یار ارتش</h1> |
|
<div class="subtitle">دستیار هوشمند ارتشی برای پشتیبانی و راهبری</div> |
|
</div> |
|
""", unsafe_allow_html=True) |
|
|
|
st.markdown('<div class="chat-message">👋 سلام! چطور میتونم کمکتون کنم؟</div>', unsafe_allow_html=True) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class TogetherEmbeddings(Embeddings): |
|
def __init__(self, model_name: str, api_key: str): |
|
self.model_name = model_name |
|
self.client = Together(api_key=api_key) |
|
|
|
def embed_documents(self, texts: List[str]) -> List[List[float]]: |
|
|
|
batch_size = 100 |
|
embeddings = [] |
|
for i in range(0, len(texts), batch_size): |
|
batch = texts[i:i + batch_size] |
|
response = self.client.embeddings.create(model=self.model_name, input=batch) |
|
embeddings.extend([item.embedding for item in response.data]) |
|
return embeddings |
|
|
|
def embed_query(self, text: str) -> List[float]: |
|
return self.embed_documents([text])[0] |
|
|
|
@st.cache_resource |
|
def get_csv_index(csv_file): |
|
with st.spinner('📄 در حال پردازش فایل CSV...'): |
|
|
|
df = pd.read_csv(csv_file) |
|
|
|
|
|
texts = df.iloc[:, 0].astype(str).tolist() |
|
|
|
|
|
texts = [text for text in texts if text.strip()] |
|
|
|
|
|
text_splitter = RecursiveCharacterTextSplitter( |
|
chunk_size=300, |
|
chunk_overlap=50, |
|
length_function=len, |
|
separators=["\n\n", "\n", " ", ""] |
|
) |
|
|
|
split_texts = [] |
|
for text in texts: |
|
split_texts.extend(text_splitter.split_text(text)) |
|
|
|
|
|
embeddings = TogetherEmbeddings( |
|
model_name="togethercomputer/m2-bert-80M-8k-retrieval", |
|
api_key="0291f33aee03412a47fa5d8e562e515182dcc5d9aac5a7fb5eefdd1759005979" |
|
) |
|
|
|
|
|
index_creator = VectorstoreIndexCreator( |
|
embedding=embeddings, |
|
text_splitter=text_splitter |
|
) |
|
|
|
|
|
from langchain.docstore.document import Document |
|
documents = [Document(page_content=text) for text in split_texts] |
|
|
|
return index_creator.from_documents(documents) |
|
|
|
|
|
csv_file_path = 'output (1).csv' |
|
|
|
try: |
|
|
|
csv_index = get_csv_index(csv_file_path) |
|
st.success("ایندکس فایل CSV با موفقیت ساخته شد!") |
|
except Exception as e: |
|
st.error(f"خطا در ساخت ایندکس: {str(e)}") |
|
|
|
|
|
|
|
|
|
llm = ChatOpenAI( |
|
base_url="https://api.together.xyz/v1", |
|
api_key='0291f33aee03412a47fa5d8e562e515182dcc5d9aac5a7fb5eefdd1759005979', |
|
model="meta-llama/Llama-3.3-70B-Instruct-Turbo-Free" |
|
) |
|
|
|
chain = RetrievalQA.from_chain_type( |
|
llm=llm, |
|
chain_type='stuff', |
|
retriever=csv_index.vectorstore.as_retriever(), |
|
input_key='question' |
|
) |
|
|
|
if 'messages' not in st.session_state: |
|
st.session_state.messages = [] |
|
|
|
if 'pending_prompt' not in st.session_state: |
|
st.session_state.pending_prompt = None |
|
|
|
for msg in st.session_state.messages: |
|
with st.chat_message(msg['role']): |
|
st.markdown(f"🗨️ {msg['content']}", unsafe_allow_html=True) |
|
|
|
prompt = st.chat_input("چطور میتونم کمک کنم؟") |
|
|
|
if prompt: |
|
st.session_state.messages.append({'role': 'user', 'content': prompt}) |
|
st.session_state.pending_prompt = prompt |
|
st.rerun() |
|
|
|
if st.session_state.pending_prompt: |
|
with st.chat_message('ai'): |
|
thinking = st.empty() |
|
thinking.markdown("🤖 در حال فکر کردن...") |
|
|
|
response = chain.run(f'پاسخ را فقط به زبان فارسی جواب بده به هیچ عنوان از زبان چینی در پاسخ استفاده نکن. سوال: {st.session_state.pending_prompt}') |
|
answer = response.split("Helpful Answer:")[-1].strip() if "Helpful Answer:" in response else response.strip() |
|
if not answer: |
|
answer = "متأسفم، اطلاعات دقیقی در این مورد ندارم." |
|
|
|
thinking.empty() |
|
full_response = "" |
|
placeholder = st.empty() |
|
for word in answer.split(): |
|
full_response += word + " " |
|
placeholder.markdown(full_response + "▌") |
|
time.sleep(0.03) |
|
|
|
placeholder.markdown(full_response) |
|
st.session_state.messages.append({'role': 'ai', 'content': full_response}) |
|
st.session_state.pending_prompt = None |