Translator-API / app.py
Lenylvt's picture
Update app.py
f4c6da9 verified
raw
history blame
1.48 kB
from huggingface_hub import InferenceClient
import gradio as gr
# Initialize the inference client with the Mixtral model
client = InferenceClient("mistralai/Mixtral-8x7B-Instruct-v0.1")
def translate_text(text, target_language):
# Correctly format the prompt for the translation task
prompt = f"Translate the following text to {target_language}: {text}"
# Correctly perform the model call for text generation
response = client.text_generation(
inputs=prompt,
parameters={"max_new_tokens": 100},
options={"wait_for_model": True}
)
# Extract the translated text from the response
translated_text = response[0]['generated_text']
# Depending on the model's response, you may need to clean the output
# For example, remove the input part from the response if necessary
translated_text = translated_text.split(prompt)[-1].strip()
return translated_text
languages = [
"French",
"Spanish",
"German",
"Italian",
"Portuguese",
# Add more languages as needed
]
iface = gr.Interface(
fn=translate_text,
inputs=[
gr.Textbox(label="Text to Translate", placeholder="Enter text here..."),
gr.Dropdown(label="Target Language", choices=languages)
],
outputs=gr.Textbox(label="Translated Text"),
title="Simple Translator with Mixtral",
description="Translate text to various languages using the Mixtral model from Hugging Face."
)
iface.launch()