Spaces:
Running
Running
Upload 12 files
Browse files- session_state.py +4 -10
session_state.py
CHANGED
@@ -110,7 +110,7 @@ class SessionState:
|
|
110 |
self.model = genai.GenerativeModel(model_name)
|
111 |
|
112 |
def initialize_chat(self, history=None):
|
113 |
-
"""Inicializa el chat con el modelo
|
114 |
if history is None:
|
115 |
history = self.gemini_history
|
116 |
|
@@ -118,14 +118,8 @@ class SessionState:
|
|
118 |
if self.model is None:
|
119 |
self.initialize_model()
|
120 |
|
121 |
-
#
|
122 |
-
self.chat = self.model.start_chat(
|
123 |
-
history=history,
|
124 |
-
generation_config={
|
125 |
-
"temperature": 0.9,
|
126 |
-
"stream": True # Forzar streaming siempre
|
127 |
-
}
|
128 |
-
)
|
129 |
|
130 |
# Verificar que el chat se inicializ贸 correctamente
|
131 |
if self.chat is None:
|
@@ -139,7 +133,7 @@ class SessionState:
|
|
139 |
|
140 |
return self.chat.send_message(
|
141 |
prompt,
|
142 |
-
stream=stream,
|
143 |
generation_config={
|
144 |
"temperature": 0.9
|
145 |
}
|
|
|
110 |
self.model = genai.GenerativeModel(model_name)
|
111 |
|
112 |
def initialize_chat(self, history=None):
|
113 |
+
"""Inicializa el chat con el modelo"""
|
114 |
if history is None:
|
115 |
history = self.gemini_history
|
116 |
|
|
|
118 |
if self.model is None:
|
119 |
self.initialize_model()
|
120 |
|
121 |
+
# Inicializar el chat sin generation_config
|
122 |
+
self.chat = self.model.start_chat(history=history)
|
|
|
|
|
|
|
|
|
|
|
|
|
123 |
|
124 |
# Verificar que el chat se inicializ贸 correctamente
|
125 |
if self.chat is None:
|
|
|
133 |
|
134 |
return self.chat.send_message(
|
135 |
prompt,
|
136 |
+
stream=stream,
|
137 |
generation_config={
|
138 |
"temperature": 0.9
|
139 |
}
|