adride00
commited on
Commit
·
66038ba
1
Parent(s):
45c4182
Refactor app.py to integrate the DialoGPT model for generating conversational responses. Update the answer function to utilize message history and improve response coherence. Adjust the Gradio interface description to reflect the new functionality.
Browse files
app.py
CHANGED
@@ -1,22 +1,28 @@
|
|
1 |
-
|
2 |
-
import gradio as gr
|
3 |
-
import random, datetime
|
4 |
|
5 |
-
|
6 |
-
|
7 |
-
|
8 |
-
|
9 |
-
|
10 |
-
""
|
11 |
-
|
12 |
-
|
13 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
14 |
|
15 |
demo = gr.ChatInterface(
|
16 |
fn=answer,
|
17 |
title="IA de Cikode",
|
18 |
-
description="
|
19 |
-
type="messages"
|
20 |
-
)
|
21 |
|
22 |
-
demo.queue(default_concurrency_limit=1).launch()
|
|
|
1 |
+
import gradio as gr, transformers, torch
|
|
|
|
|
2 |
|
3 |
+
# 1. Carga del modelo (⇢ 1–2 min la primera vez en CPU)
|
4 |
+
generator = transformers.pipeline(
|
5 |
+
"conversational",
|
6 |
+
model="microsoft/DialoGPT-medium", # modelo gratuito
|
7 |
+
tokenizer="microsoft/DialoGPT-medium",
|
8 |
+
device_map="auto" if torch.cuda.is_available() else None
|
9 |
+
) # :contentReference[oaicite:3]{index=3}
|
10 |
+
|
11 |
+
def answer(message, history):
|
12 |
+
from transformers import Conversation
|
13 |
+
# convierte el history de Gradio al formato Conversation
|
14 |
+
convo = Conversation(
|
15 |
+
text=message,
|
16 |
+
past_user_inputs=[h["content"] for h in history if h["role"]=="user"],
|
17 |
+
generated_responses=[h["content"] for h in history if h["role"]=="assistant"]
|
18 |
+
) # :contentReference[oaicite:4]{index=4}
|
19 |
+
response = generator(convo, max_new_tokens=60)
|
20 |
+
return response.generated_responses[-1]
|
21 |
|
22 |
demo = gr.ChatInterface(
|
23 |
fn=answer,
|
24 |
title="IA de Cikode",
|
25 |
+
description="Chat demo con DialoGPT (CPU).",
|
26 |
+
type="messages")
|
|
|
27 |
|
28 |
+
demo.queue(default_concurrency_limit=1).launch()
|