bs-demo2 / app.py
Rioo26's picture
Update app.py
03b8235 verified
raw
history blame
420 Bytes
import gradio as gr
from transformers import pipeline
# Use a pipeline as a high-level helper
messages = [
{"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="X-D-Lab/MindChat-Qwen2-0_5B")
pipe(messages)
def respond(message):
result = pipe(message, max_new_tokens=100)
return result[0]["generated_text"]
gr.Interface(fn=respond, inputs="text", outputs="text").launch()