Spaces:
Build error
Build error
import gradio as gr | |
from transformers import AutoTokenizer, AutoModelForSeq2SeqLM | |
# Load the fine-tuned model and tokenizer | |
model = AutoModelForSeq2SeqLM.from_pretrained("Codellama-7b-Instruct") | |
tokenizer = AutoTokenizer.from_pretrained("Codellama-7b-Instruct") | |
# Define a function to generate a response from the model | |
def generate_response(input_text): | |
inputs = tokenizer(input_text, return_tensors="pt") | |
outputs = model.generate(**inputs) | |
response = tokenizer.decode(outputs[0]) | |
return response | |
# Create a Gradio interface | |
interface = gr.Interface(generate_response, input_type="text", output_type="text", | |
title="Codellama-7b-Instruct Chatbot", | |
description="A chatbot powered by the Codellama-7b-Instruct model.", | |
article="This chatbot is fine-tuned on a dataset of instructional text and can be used to generate responses to natural language prompts.", | |
theme="default", | |
share=True, | |
enable_chat=True) | |
# Launch the interface on a local server | |
interface.launch() |