arampacha's picture
trying stuff
23a6073
raw
history blame
793 Bytes
import gradio as gr
from transformers import AutoModelForCausalLM, AutoTokenizer
model_name = "flax-community/gpt-neo-125M-apps"
# define model and tokenizer
model = AutoModelForCausalLM.from_pretrained(model_name)
tokenizer = AutoTokenizer.from_pretrained(model_name)
tokenizer.pad_token = tokenizer.eos_token
def generate_solution(prompt, **kwargs):
input_ids = tokenizer(prompt, return_tensors="pt").input_ids
start = len(input_ids[0])
output = model.generate(input_ids, pad_token_id=tokenizer.pad_token_id, **kwargs)
return tokenizer.decode(output[0][start:])
inputs = [
gr.inputs.Textbox(placeholder="Define a problem here ...", lines=5)
]
gr.Interface(
generate_solution,
inputs=inputs,
outputs="text",
title="Coding problem solver",
).launch()