yilunzhang's picture
fix
a4ae333 unverified
raw
history blame contribute delete
671 Bytes
import os
import gradio as gr
import torch
from transformers import pipeline
from utils import clean_text
pipeline = pipeline(
task="text-classification",
model="fakespot-ai/roberta-base-ai-text-detection-v1",
device="cuda" if torch.cuda.is_available() else "cpu",
token=os.environ.get("ACCESS_TOKEN")
)
def predict(text: str) -> list[dict]:
cleaned_text = clean_text(text)
predictions = pipeline(cleaned_text, top_k=None)
return {
p["label"]: p["score"] for p in predictions
}
demo = gr.Interface(
predict,
inputs=gr.Textbox(),
outputs=gr.Label(num_top_classes=2),
title="AI Text Detector"
)
demo.launch()