File size: 5,029 Bytes
956fbae
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
import streamlit as st
import time
import joblib
import google.generativeai as genai

class SessionState:
    """

    Clase para gestionar el estado de la sesi贸n de Streamlit de manera centralizada.

    Encapsula todas las operaciones relacionadas con st.session_state.

    """
    
    def __init__(self):
        # Inicializar valores por defecto si no existen
        if 'chat_id' not in st.session_state:
            st.session_state.chat_id = None
        
        if 'chat_title' not in st.session_state:
            st.session_state.chat_title = None
            
        if 'messages' not in st.session_state:
            st.session_state.messages = []
            
        if 'gemini_history' not in st.session_state:
            st.session_state.gemini_history = []
            
        if 'model' not in st.session_state:
            st.session_state.model = None
            
        if 'chat' not in st.session_state:
            st.session_state.chat = None
            
        if 'prompt' not in st.session_state:
            st.session_state.prompt = None
    
    # Getters y setters para cada propiedad
    @property
    def chat_id(self):
        return st.session_state.chat_id
    
    @chat_id.setter
    def chat_id(self, value):
        st.session_state.chat_id = value
    
    @property
    def chat_title(self):
        return st.session_state.chat_title
    
    @chat_title.setter
    def chat_title(self, value):
        st.session_state.chat_title = value
    
    @property
    def messages(self):
        return st.session_state.messages
    
    @messages.setter
    def messages(self, value):
        st.session_state.messages = value
    
    @property
    def gemini_history(self):
        return st.session_state.gemini_history
    
    @gemini_history.setter
    def gemini_history(self, value):
        st.session_state.gemini_history = value
    
    @property
    def model(self):
        return st.session_state.model
    
    @model.setter
    def model(self, value):
        st.session_state.model = value
    
    @property
    def chat(self):
        return st.session_state.chat
    
    @chat.setter
    def chat(self, value):
        st.session_state.chat = value
    
    @property
    def prompt(self):
        return st.session_state.prompt
    
    @prompt.setter
    def prompt(self, value):
        st.session_state.prompt = value
    
    # M茅todos de utilidad
    def add_message(self, role, content, avatar=None):
        """A帽ade un mensaje al historial"""
        message = {
            'role': role,
            'content': content,
        }
        if avatar:
            message['avatar'] = avatar
        self.messages.append(message)
    
    def clear_prompt(self):
        """Limpia el prompt del estado de la sesi贸n"""
        self.prompt = None
    
    def initialize_model(self, model_name='gemini-2.0-flash'):
        """Inicializa el modelo de IA"""
        self.model = genai.GenerativeModel(model_name)
    
    def initialize_chat(self, history=None):
        """Inicializa el chat con el modelo"""
        if history is None:
            history = self.gemini_history
        self.chat = self.model.start_chat(history=history)
    
    def generate_chat_title(self, prompt, model_name='gemini-2.0-flash'):
        """Genera un t铆tulo para el chat basado en el primer mensaje"""
        try:
            title_generator = genai.GenerativeModel(model_name)
            title_response = title_generator.generate_content(
                f"Genera un t铆tulo corto (m谩ximo 5 palabras) que describa de qu茅 trata esta consulta, sin usar comillas ni puntuaci贸n: '{prompt}'")
            return title_response.text.strip()
        except Exception as e:
            print(f"Error al generar t铆tulo: {e}")
            return None
    
    def save_chat_history(self, chat_id=None):
        """Guarda el historial del chat"""
        if chat_id is None:
            chat_id = self.chat_id
        
        joblib.dump(self.messages, f'data/{chat_id}-st_messages')
        joblib.dump(self.gemini_history, f'data/{chat_id}-gemini_messages')
    
    def load_chat_history(self, chat_id=None):
        """Carga el historial del chat"""
        if chat_id is None:
            chat_id = self.chat_id
        
        try:
            self.messages = joblib.load(f'data/{chat_id}-st_messages')
            self.gemini_history = joblib.load(f'data/{chat_id}-gemini_messages')
            return True
        except:
            self.messages = []
            self.gemini_history = []
            return False
    
    def has_messages(self):
        """Verifica si hay mensajes en el historial"""
        return len(self.messages) > 0
    
    def has_prompt(self):
        """Verifica si hay un prompt en el estado de la sesi贸n"""
        return self.prompt is not None and self.prompt.strip() != ""