import gradio as gr from transformers import AutoTokenizer, AutoModelForSeq2SeqLM # Load tokenizer and model tokenizer = AutoTokenizer.from_pretrained("hrshtsharma2012/NL2SQL-Picard-final") model = AutoModelForSeq2SeqLM.from_pretrained("hrshtsharma2012/NL2SQL-Picard-final") def generate_sql(query): input_text = "translate English to SQL: " + query inputs = tokenizer(input_text, return_tensors="pt", padding=True) outputs = model.generate(**inputs, max_length=512) sql_query = tokenizer.decode(outputs[0], skip_special_tokens=True) # Check if the output is a valid SQL query if not sql_query.lower().startswith("select"): return "The model did not generate a valid SQL query. Please try a different input or use a different model." return sql_query # Create a Gradio interface interface = gr.Interface( fn=generate_sql, inputs=gr.Textbox(lines=2, placeholder="Enter your natural language query here..."), outputs="text", title="NL to SQL with Picard", description="This model converts natural language queries into SQL. Enter your query!" ) # Launch the app if __name__ == "__main__": interface.launch()