Spaces:
Sleeping
Sleeping
File size: 4,020 Bytes
36e5180 ca12785 36e5180 34531ec b9b3c97 aa2c783 34531ec f0ef3c4 bb5b784 34531ec f0ef3c4 0a036bb f0ef3c4 9bc335b f0ef3c4 398fb47 34531ec f0ef3c4 34531ec f0ef3c4 34531ec f0ef3c4 01cff1f f0ef3c4 c2c44ed f0ef3c4 398fb47 ca12785 f0ef3c4 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 |
import gradio as gr
from transformers import RobertaTokenizer, RobertaForSequenceClassification
import torch
# Define available models
model_options = {
"GoalZero/aidetection-ada-v0.2": "GoalZero/aidetection-ada-v0.2",
"GoalZero/aidetection-ada-v0.1": "GoalZero/aidetection-ada-v0.1",
"GoalZero/babbage-mini-v0.1": "GoalZero/babbage-mini-v0.1"
}
# Initialize global variables for model and tokenizer
model = None
tokenizer = None
def load_model(model_name):
"""Helper function to load model and tokenizer"""
try:
return (
RobertaForSequenceClassification.from_pretrained(model_name),
RobertaTokenizer.from_pretrained(model_name)
)
except Exception as e:
raise Exception(f"Failed to load model {model_name}: {str(e)}")
# Load default model
try:
default_model = "GoalZero/aidetection-ada-v0.2"
model, tokenizer = load_model(default_model)
except Exception as e:
print(f"Error loading default model: {str(e)}")
def classify_text(text, model_choice):
global model, tokenizer
try:
# Check if we need to change the model
if model is None or model_choice != model.name_or_path:
model, tokenizer = load_model(model_choice)
# Clean the input text
cleaned_text = text.replace('.', '').replace('\n', ' ')
# Tokenize the cleaned input text
inputs = tokenizer(
cleaned_text,
return_tensors='pt',
padding=True,
truncation=True,
max_length=128
)
# Get the model's prediction
with torch.no_grad():
outputs = model(**inputs)
# Apply softmax to get probabilities
probabilities = torch.nn.functional.softmax(outputs.logits, dim=-1)
# Get the probability of class '1'
prob_1 = probabilities[0][1].item()
return {
"AI Probability": round(prob_1 * 100, 10),
"Model used": model_choice
}
except Exception as e:
return {
"error": f"An error occurred: {str(e)}",
"Model used": model_choice
}
# Create the Gradio interface
iface = gr.Interface(
fn=classify_text,
inputs=[
gr.Textbox(
lines=2,
placeholder="Enter text here...",
label="Input Text"
),
gr.Dropdown(
choices=list(model_options.keys()),
value="GoalZero/aidetection-ada-v0.2",
label="Select Model Version"
)
],
outputs=gr.JSON(label="Results"),
title="GoalZero Ada AI Detection",
description="Enter text to get the probability of it being AI-written. Select a model version to use.",
examples=[
["Waymo is an American autonomous driving technology company that originated as the Google Self-Driving Car Project in 2009. It is now a subsidiary of Alphabet Inc., headquartered in Mountain View, California. The name \"Waymo\" was adopted in December 2016 when the project was rebranded and spun out of Google to focus on developing fully autonomous vehicles aimed at improving transportation safety and convenience", "GoalZero/babbage-mini-v0.1"],
["WWII demonstrated the importance of alliances in global conflicts. The Axis and Allied powers were formed as countries sought to protect their interests and expand their influence. This lesson underscores the potential for future global conflicts to involve complex alliances, similar to the Cold War era’s NATO and Warsaw Pact alignments.", "GoalZero/aidetection-ada-v0.2"],
["Eustace was a thorough gentleman. There was candor in his quack, and affability in his waddle; and underneath his snowy down beat a pure and sympathetic heart. In short, he was a most exemplary duck.", "GoalZero/aidetection-ada-v0.1"]
]
)
# Launch the app
if __name__ == "__main__":
iface.launch(share=True) |