Spaces:
Sleeping
Sleeping
# app.py | |
import gradio as gr | |
from transformers import AutoTokenizer, AutoModelForSequenceClassification | |
from transformers import AutoModelForSeq2SeqLM, AutoTokenizer | |
import torch | |
# تحميل النموذج والمحول | |
model_name = "aubmindlab/bert-base-arabertv2" | |
tokenizer = AutoTokenizer.from_pretrained(model_name) | |
model = AutoModelForSequenceClassification.from_pretrained(model_name) | |
device = torch.device("cuda" if torch.cuda.is_available() else "cpu") | |
model = model.to(device) | |
def generate_answer(question, context): | |
input_text = f"سؤال: {question} سياق: {context}" | |
inputs = tokenizer(input_text, return_tensors="pt", padding=True).to(device) | |
output = model.generate(**inputs, max_length=256) | |
answer = tokenizer.decode(output[0], skip_special_tokens=True) | |
return answer | |
def ask_question(question, context): | |
if not question.strip(): | |
return "❌ الرجاء كتابة سؤال." | |
answer = generate_answer(question, context) | |
return f"✅ الإجابة: {answer}" | |
with gr.Blocks(title="سؤال وجواب من الكتب") as demo: | |
gr.Markdown(""" | |
# 📚 اسأل كتبك! | |
اطرح أي سؤال وسنبحث لك عن الجواب من محتوى الكتب بدقة وفهم! | |
""") | |
with gr.Row(): | |
question = gr.Textbox(label="✍️ اكتب سؤالك هنا:", placeholder="مثال: ما معنى الذكاء الاصطناعي؟") | |
with gr.Row(): | |
context = gr.Textbox(label="📖 اكتب أو الصق نص من كتابك هنا:", placeholder="انسخ فقرة أو أكثر من الكتاب...") | |
with gr.Row(): | |
ask_btn = gr.Button("🔍 احصل على الإجابة") | |
output = gr.Textbox(label="💬 الإجابة:") | |
ask_btn.click(fn=ask_question, inputs=[question, context], outputs=output) | |
demo.launch() | |