beingpraveen's picture
Update app.py
05672c2
raw
history blame
1.15 kB
import streamlit as st
import layer
from transformers import AutoModelWithLMHead, AutoTokenizer
tokenizer = AutoTokenizer.from_pretrained("mrm8488/t5-base-finetuned-wikiSQL")
model = AutoModelWithLMHead.from_pretrained("mrm8488/t5-base-finetuned-wikiSQL")
def get_sql(query):
input_text = "translate English to SQL: %s </s>" % query
features = tokenizer([input_text], return_tensors='pt')
output = model.generate(input_ids=features['input_ids'],
attention_mask=features['attention_mask'])
return tokenizer.decode(output[0])
# model = layer.get_model('layer/t5-fine-tuning-with-layer/models/t5-english-to-sql').get_train()
# tokenizer = layer.get_model('layer/t5-fine-tuning-with-layer/models/t5-tokenizer').get_train()
# def convert(query):
# inputs = tokenizer.encode(f"translate English to SQL: {query}", return_tensors="pt")
# outputs = model.generate(inputs, max_length=1024)
# sql = tokenizer.decode(outputs[0], skip_special_tokens=True)
# return sql
query = st.text_input("Enter Text here", value="")
output = get_sql(query)
st.text_area(label="Output Sql Query:", value=output, height=100)