cad-llm / app.py
evanthebouncy's picture
plz work
85f3da5
raw
history blame contribute delete
759 Bytes
import gradio as gr
from transformers import T5ForConditionalGeneration, AutoTokenizer, TrainingArguments
tokenizer = AutoTokenizer.from_pretrained('evanthebouncy/cad-llm')
model = T5ForConditionalGeneration.from_pretrained('evanthebouncy/cad-llm')
temp = 1.0
def generate_samples_with_temp(txt_n):
txt, n = txt_n.split('|')
n_samples = int(n)
to_tokenizer = [txt for i in range(n_samples)]
outputs = model.generate(tokenizer(to_tokenizer, return_tensors='pt', padding=True).input_ids, do_sample=True, max_length=128, temperature = temp)
results = tokenizer.batch_decode(outputs, skip_special_tokens=True)
return '\n'.join(results)
iface = gr.Interface(fn=generate_samples_with_temp, inputs="text", outputs="text")
iface.launch()