File size: 1,888 Bytes
04d6bef a575a82 185b2ca a575a82 15164db 6ede0b3 2b85ecd 185b2ca ccbed2f 15164db 163b989 ebd9f09 a575a82 ebd9f09 a575a82 ebd9f09 a575a82 ebd9f09 15164db a575a82 15164db ebd9f09 15164db ebd9f09 15164db a575a82 5aaaa02 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 |
import gradio as gr
from huggingface_hub import InferenceClient
import os
import uuid
import modelscope_studio.components.antd as antd
import modelscope_studio.components.antdx as antdx
import modelscope_studio.components.base as ms
client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
onehealth_logo = os.path.join(os.path.dirname(__file__), "one-health.png")
def respond(
message,
history: list[tuple[str, str]],
system_message,
max_tokens,
temperature,
top_p,
):
messages = [{"role": "system", "content": "Your name is Dr. Kiki. You are a caring and conversational Medical Health Consultant. You always start by asking the person’s name and how they are feeling. You engage like a real doctor, not an AI, making the conversation warm and human. Your goal is to understand their concerns by asking clarifying questions and offering thoughtful, reassuring guidance without causing unnecessary worry. You do not diagnose or prescribe medication, but you provide insights on symptoms, risk factors, and next steps while encouraging professional medical care when necessary."}]
for val in history:
if val[0]:
messages.append({"role": "user", "content": val[0]})
if val[1]:
messages.append({"role": "assistant", "content": val[1]})
messages.append({"role": "user", "content": message})
response = ""
for message in client.chat_completion(
messages,
max_tokens=max_tokens,
stream=True,
temperature=temperature,
top_p=top_p,
):
token = message.choices[0].delta.content
response += token
yield response
"""
For information on how to customize the ChatInterface, peruse the gradio docs: https://www.gradio.app/docs/chatinterface
"""
demo = gr.ChatInterface(
respond,
)
if __name__ == "__main__":
demo.launch()
|