Spaces:
Sleeping
Sleeping
Commit
·
92158f0
1
Parent(s):
7962d2c
Use conversational
Browse files
app.py
CHANGED
@@ -4,7 +4,6 @@ import gradio
|
|
4 |
history = []
|
5 |
|
6 |
|
7 |
-
|
8 |
def get_history_messages():
|
9 |
messages = []
|
10 |
for user, assist in history:
|
@@ -14,8 +13,8 @@ def get_history_messages():
|
|
14 |
|
15 |
|
16 |
def predict(prompt):
|
17 |
-
pipe = pipeline("
|
18 |
-
response = pipe
|
19 |
[
|
20 |
*get_history_messages(),
|
21 |
{"role": "user", "content": prompt}
|
@@ -24,7 +23,7 @@ def predict(prompt):
|
|
24 |
history.append((prompt, ""))
|
25 |
message = ""
|
26 |
for chunk in response:
|
27 |
-
message += chunk[
|
28 |
history[-1] = (prompt, message)
|
29 |
yield "", history
|
30 |
|
|
|
4 |
history = []
|
5 |
|
6 |
|
|
|
7 |
def get_history_messages():
|
8 |
messages = []
|
9 |
for user, assist in history:
|
|
|
13 |
|
14 |
|
15 |
def predict(prompt):
|
16 |
+
pipe = pipeline("conversational", model="cognitivecomputations/TinyDolphin-2.8-1.1b")
|
17 |
+
response = pipe(
|
18 |
[
|
19 |
*get_history_messages(),
|
20 |
{"role": "user", "content": prompt}
|
|
|
23 |
history.append((prompt, ""))
|
24 |
message = ""
|
25 |
for chunk in response:
|
26 |
+
message += chunk.messages[-1]["content"]
|
27 |
history[-1] = (prompt, message)
|
28 |
yield "", history
|
29 |
|