RuttoniAI / app.py
lu2000luk's picture
Create app.py
f680ddd
raw
history blame
691 Bytes
import os
os.system("pip install gradio transformers torch")
from transformers import T5Tokenizer, T5ForConditionalGeneration
import gradio as gr
model = T5ForConditionalGeneration.from_pretrained("./Ruttoni_AI")
tokenizer = T5Tokenizer.from_pretrained("google/flan-t5-base")
print("Model loaded!")
# Generate a summary using the trained model
def generate_summary(input_text):
input_ids = tokenizer.encode(input_text, return_tensors='pt')
outputs = model.generate(input_ids)
summary = tokenizer.decode(outputs[0], skip_special_tokens=True)
return summary
ai = gr.Interface(fn=generate_summary, inputs="text", outputs="text")
ai.launch()
print("Interface Started!")