Climate / app.py
rahideer's picture
Create app.py
9ffcbf9 verified
raw
history blame
1.72 kB
import os
import zipfile
import pandas as pd
import gradio as gr
from sentence_transformers import SentenceTransformer
import faiss
import numpy as np
# Step 1: Unzip the dataset
zip_file = "climate.zip" # your uploaded zip file
extracted_path = "climate_data"
if not os.path.exists(extracted_path):
with zipfile.ZipFile(zip_file, 'r') as zip_ref:
zip_ref.extractall(extracted_path)
# Step 2: Load train.csv
csv_path = os.path.join(extracted_path, "train.csv")
df = pd.read_csv(csv_path, header=None, names=["label", "title", "description"])
# Combine title and description for semantic search
df["content"] = df["title"].fillna("") + ". " + df["description"].fillna("")
# Step 3: Encode using SentenceTransformer
model = SentenceTransformer('all-MiniLM-L6-v2')
corpus_embeddings = model.encode(df["content"].tolist(), show_progress_bar=True)
# Step 4: Create FAISS index
embedding_dim = corpus_embeddings.shape[1]
index = faiss.IndexFlatL2(embedding_dim)
index.add(corpus_embeddings)
def retrieve_and_respond(claim, k=5):
query_embedding = model.encode([claim])
D, I = index.search(np.array(query_embedding), k)
results = []
for idx in I[0]:
row = df.iloc[idx]
results.append(f"*Title:* {row['title']}\n*Description:* {row['description']}\n*Label:* {row['label']}\n")
return "\n\n".join(results)
# Step 5: Gradio Interface
iface = gr.Interface(
fn=retrieve_and_respond,
inputs=gr.Textbox(lines=2, placeholder="Enter a news-related claim here..."),
outputs="markdown",
title="Claim Verifier using RAG (AG News)",
description="Enter a claim and retrieve the most relevant AG News articles to verify or refute it."
)
iface.launch()