Chat-GPT / app.py
Samuelblue's picture
app.py
a7247a1
raw
history blame
633 Bytes
import gradio as gr
from transformers import AutoModelWithLMHead, AutoTokenizer
tokenizer = AutoTokenizer.from_pretrained("microsoft/DialoGPT-medium")
model = AutoModelWithLMHead.from_pretrained("microsoft/DialoGPT-medium")
def chatbot(input_text):
input_ids = tokenizer.encode(input_text, return_tensors='pt')
output = model.generate(input_ids, max_length=50, do_sample=True, top_k=50, top_p=0.95, num_return_sequences=3)
output_text = tokenizer.decode(output[0], skip_special_tokens=True)
return output_text
iface = gr.Interface(fn=chatbot, inputs="text", outputs="text", description="ChatGPT")
iface.launch()