Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -6,7 +6,8 @@ import numpy as np
|
|
6 |
from transformers import pipeline
|
7 |
|
8 |
dataset = load_dataset("lex_glue", "scotus")
|
9 |
-
|
|
|
10 |
|
11 |
embedder = SentenceTransformer('sentence-transformers/all-MiniLM-L6-v2')
|
12 |
corpus_embeddings = embedder.encode(corpus, convert_to_numpy=True)
|
@@ -20,7 +21,8 @@ gen_pipeline = pipeline("text2text-generation", model="facebook/bart-large-cnn")
|
|
20 |
def rag_query(user_question):
|
21 |
question_embedding = embedder.encode([user_question])
|
22 |
_, indices = index.search(np.array(question_embedding), k=3)
|
23 |
-
|
|
|
24 |
prompt = f"Question: {user_question}\nContext: {context}\nAnswer:"
|
25 |
result = gen_pipeline(prompt, max_length=250, do_sample=False)[0]['generated_text']
|
26 |
return result
|
@@ -35,14 +37,14 @@ iface = gr.Interface(
|
|
35 |
fn=chatbot_interface,
|
36 |
inputs=[
|
37 |
gr.Textbox(lines=2, placeholder="Enter your legal question here...", label="Your Question"),
|
38 |
-
gr.State([]) #
|
39 |
],
|
40 |
outputs=[
|
41 |
gr.Textbox(label="Chat History", lines=20, interactive=False),
|
42 |
gr.State()
|
43 |
],
|
44 |
title="🧑⚖️ Legal Assistant Chatbot",
|
45 |
-
description="Ask legal questions based on case data (LexGLUE - SCOTUS subset). The bot
|
46 |
)
|
47 |
|
48 |
iface.launch()
|
|
|
6 |
from transformers import pipeline
|
7 |
|
8 |
dataset = load_dataset("lex_glue", "scotus")
|
9 |
+
corpus_data = dataset['train'].select(range(200))
|
10 |
+
corpus = [doc['text'] for doc in corpus_data]
|
11 |
|
12 |
embedder = SentenceTransformer('sentence-transformers/all-MiniLM-L6-v2')
|
13 |
corpus_embeddings = embedder.encode(corpus, convert_to_numpy=True)
|
|
|
21 |
def rag_query(user_question):
|
22 |
question_embedding = embedder.encode([user_question])
|
23 |
_, indices = index.search(np.array(question_embedding), k=3)
|
24 |
+
valid_indices = [i for i in indices[0] if i < len(corpus)]
|
25 |
+
context = " ".join([corpus[i] for i in valid_indices])
|
26 |
prompt = f"Question: {user_question}\nContext: {context}\nAnswer:"
|
27 |
result = gen_pipeline(prompt, max_length=250, do_sample=False)[0]['generated_text']
|
28 |
return result
|
|
|
37 |
fn=chatbot_interface,
|
38 |
inputs=[
|
39 |
gr.Textbox(lines=2, placeholder="Enter your legal question here...", label="Your Question"),
|
40 |
+
gr.State([]) # Session state to store history
|
41 |
],
|
42 |
outputs=[
|
43 |
gr.Textbox(label="Chat History", lines=20, interactive=False),
|
44 |
gr.State()
|
45 |
],
|
46 |
title="🧑⚖️ Legal Assistant Chatbot",
|
47 |
+
description="Ask legal questions based on case data (LexGLUE - SCOTUS subset). The bot retrieves context and generates an answer."
|
48 |
)
|
49 |
|
50 |
iface.launch()
|