Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -44,8 +44,8 @@ def has_no_history(chatbot, history):
|
|
44 |
return not chatbot and not history
|
45 |
|
46 |
|
47 |
-
header = "
|
48 |
-
prompt_template = "### Human: {query}
|
49 |
|
50 |
def generate(
|
51 |
user_message,
|
@@ -143,7 +143,7 @@ def process_example(args):
|
|
143 |
return [x, y]
|
144 |
|
145 |
|
146 |
-
title = """<h1 align="center">
|
147 |
custom_css = """
|
148 |
#banner-image {
|
149 |
display: block;
|
@@ -163,7 +163,7 @@ with gr.Blocks(analytics_enabled=False, css=custom_css) as demo:
|
|
163 |
with gr.Column():
|
164 |
gr.Markdown(
|
165 |
"""
|
166 |
-
π» This demo showcases the
|
167 |
"""
|
168 |
)
|
169 |
|
|
|
44 |
return not chatbot and not history
|
45 |
|
46 |
|
47 |
+
header = ""
|
48 |
+
prompt_template = "### Human: {query}### Assistant: {response}"
|
49 |
|
50 |
def generate(
|
51 |
user_message,
|
|
|
143 |
return [x, y]
|
144 |
|
145 |
|
146 |
+
title = """<h1 align="center">TinyLlama Chat Playground π¬</h1>"""
|
147 |
custom_css = """
|
148 |
#banner-image {
|
149 |
display: block;
|
|
|
163 |
with gr.Column():
|
164 |
gr.Markdown(
|
165 |
"""
|
166 |
+
π» This demo showcases the PY007/TinyLlama-1.1B-Chat-v0.1 model that is finetuned from TinyLlama 503B checkpoints on the openassitant-guanaco dataset.
|
167 |
"""
|
168 |
)
|
169 |
|