Shreeti Shrestha commited on
Commit
ac75410
·
1 Parent(s): 4fd3a9c

fix llm crash

Browse files
Files changed (2) hide show
  1. pages/llm_tutor.py +37 -113
  2. utils/questions_dataset.py +1 -1
pages/llm_tutor.py CHANGED
@@ -1,6 +1,4 @@
1
  import time
2
- import os
3
- import joblib
4
  import streamlit as st
5
  from utils.questions_dataset import system_instruction, get_model_tools
6
  from google.genai import types
@@ -8,7 +6,6 @@ from google import genai
8
 
9
  st.set_page_config(page_title="LSAT Group A", page_icon="📘")
10
 
11
- # GOOGLE_API_KEY=os.environ.get('GOOGLE_API_KEY')
12
  GEMINI_API_KEY = "AIzaSyAjpHA08BUwLhK-tIlORxcB18RAp3541-M"
13
  client = genai.Client(api_key=GEMINI_API_KEY)
14
 
@@ -16,61 +13,12 @@ new_chat_id = f'{time.time()}'
16
  MODEL_ROLE = 'ai'
17
  AI_AVATAR_ICON = '✨'
18
 
19
- # Create a data/ folder if it doesn't already exist
20
- try:
21
- os.mkdir('data/')
22
- except:
23
- # data/ folder already exists
24
- pass
25
-
26
- # Load past chats (if available)
27
- try:
28
- past_chats: dict = joblib.load('data/past_chats_list')
29
- except:
30
- past_chats = {}
31
-
32
- # Sidebar allows a list of past chats
33
- with st.sidebar:
34
- st.write('# Past Chats')
35
- if st.session_state.get('chat_id') is None:
36
- st.session_state.chat_id = st.selectbox(
37
- label='Pick a past chat',
38
- options=[new_chat_id] + list(past_chats.keys()),
39
- format_func=lambda x: past_chats.get(x, 'New Chat'),
40
- placeholder='_',
41
- )
42
- else:
43
- # This will happen the first time AI response comes in
44
- st.session_state.chat_id = st.selectbox(
45
- label='Pick a past chat',
46
- options=[new_chat_id, st.session_state.chat_id] + list(past_chats.keys()),
47
- index=1,
48
- format_func=lambda x: past_chats.get(x, 'New Chat' if x != st.session_state.chat_id else st.session_state.chat_title),
49
- placeholder='_',
50
- )
51
-
52
- # Save new chats after a message has been sent to AI
53
- st.session_state.chat_title = f'ChatSession-{st.session_state.chat_id}'
54
-
55
-
56
  st.title("📘Logical Reasoning: Group A")
57
  next_btn = st.button("Click here when finished")
58
 
59
  st.write("Use this AI Tutor to help you understand the concepts. You can ask it to explain the concepts, provide examples, or clarify any doubts you have.")
60
  st.write("Start by sending a hello message!")
61
 
62
- # Chat history (allows to ask multiple questions)
63
- try:
64
- st.session_state.messages = joblib.load(
65
- f'data/{st.session_state.chat_id}-st_messages'
66
- )
67
- st.session_state.gemini_history = joblib.load(
68
- f'data/{st.session_state.chat_id}-gemini_messages'
69
- )
70
- except:
71
- st.session_state.messages = []
72
- st.session_state.gemini_history = []
73
-
74
  sys_prompt = system_instruction % (
75
  st.session_state.prequiz_df['num_correct'][0],
76
  st.session_state.prequiz_df['num_questions'][0],
@@ -90,13 +38,25 @@ sys_prompt = system_instruction % (
90
  st.session_state.prequiz_df['num_questions'][7],
91
  st.session_state.prequiz_df['num_correct'][8],
92
  st.session_state.prequiz_df['num_questions'][8]
93
- )
94
- st.session_state.chat = client.chats.create(model='gemini-2.0-flash',
 
 
 
 
 
 
 
 
 
95
  config=types.GenerateContentConfig(
96
  tools=[get_model_tools()],
97
  system_instruction=sys_prompt),
98
- history=st.session_state.gemini_history
99
- )
 
 
 
100
 
101
  # Display chat messages from history on app rerun
102
  for message in st.session_state.messages:
@@ -104,71 +64,35 @@ for message in st.session_state.messages:
104
  name=message['role'],
105
  avatar=message.get('avatar'),
106
  ):
107
- st.markdown(message['content'])
108
 
109
- # React to user input
110
- if prompt := st.chat_input('Your message here...'):
111
- # Save this as a chat for later
112
- if st.session_state.chat_id not in past_chats.keys():
113
- past_chats[st.session_state.chat_id] = st.session_state.chat_title
114
- joblib.dump(past_chats, 'data/past_chats_list')
115
- # Display user message in chat message container
116
- with st.chat_message('user'):
117
- st.markdown(prompt)
118
- # Add user message to chat history
119
- st.session_state.messages.append(
120
- dict(
121
- role='user',
122
- content=prompt,
123
- )
124
- )
125
- ## Send message to AI
126
- response = st.session_state.chat.send_message_stream(
127
- prompt,
128
- )
129
- # Display assistant response in chat message container
130
  with st.chat_message(
131
  name=MODEL_ROLE,
132
  avatar=AI_AVATAR_ICON,
133
- ):
134
- message_placeholder = st.empty()
135
- full_response = ''
136
- assistant_response = response
137
- # Streams in a chunk at a time
138
  for chunk in response:
139
- # Simulate stream of chunk
140
- if chunk.text == None:
141
- chunk.text = "Let's try that one more time so I understand. Please tell me one more time."
142
-
143
- for ch in chunk.text.split(' '):
144
- full_response += ch + ' '
145
  time.sleep(0.05)
146
- # Rewrites with a cursor at end
147
- message_placeholder.write(full_response + '▌')
148
- # Write full message with placeholder
149
- message_placeholder.write(full_response)
150
 
151
- # Add assistant response to chat history
152
- st.session_state.messages.append(
153
- dict(
154
- role=MODEL_ROLE,
155
- content=full_response,
156
- avatar=AI_AVATAR_ICON,
157
- )
158
- )
159
 
160
  st.session_state.gemini_history = st.session_state.chat.get_history()
161
-
162
- # Save to file
163
- joblib.dump(
164
- st.session_state.messages,
165
- f'data/{st.session_state.chat_id}-st_messages',
166
- )
167
- joblib.dump(
168
- st.session_state.gemini_history,
169
- f'data/{st.session_state.chat_id}-gemini_messages',
170
- )
171
-
172
 
173
  if next_btn:
174
- st.switch_page("pages/postquiz.py")
 
1
  import time
 
 
2
  import streamlit as st
3
  from utils.questions_dataset import system_instruction, get_model_tools
4
  from google.genai import types
 
6
 
7
  st.set_page_config(page_title="LSAT Group A", page_icon="📘")
8
 
 
9
  GEMINI_API_KEY = "AIzaSyAjpHA08BUwLhK-tIlORxcB18RAp3541-M"
10
  client = genai.Client(api_key=GEMINI_API_KEY)
11
 
 
13
  MODEL_ROLE = 'ai'
14
  AI_AVATAR_ICON = '✨'
15
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
16
  st.title("📘Logical Reasoning: Group A")
17
  next_btn = st.button("Click here when finished")
18
 
19
  st.write("Use this AI Tutor to help you understand the concepts. You can ask it to explain the concepts, provide examples, or clarify any doubts you have.")
20
  st.write("Start by sending a hello message!")
21
 
 
 
 
 
 
 
 
 
 
 
 
 
22
  sys_prompt = system_instruction % (
23
  st.session_state.prequiz_df['num_correct'][0],
24
  st.session_state.prequiz_df['num_questions'][0],
 
38
  st.session_state.prequiz_df['num_questions'][7],
39
  st.session_state.prequiz_df['num_correct'][8],
40
  st.session_state.prequiz_df['num_questions'][8]
41
+ ) if st.session_state.prequiz_df is not None else ""
42
+
43
+ st.session_state.chat_id = new_chat_id
44
+ st.session_state.chat_title = f'ChatSession-{st.session_state.chat_id}'
45
+ st.session_state.gemini_history = []
46
+
47
+ # Initialize session state
48
+ if "chat" not in st.session_state:
49
+ st.session_state.chat = None
50
+
51
+ st.session_state.chat = client.chats.create(model='gemini-2.0-flash',
52
  config=types.GenerateContentConfig(
53
  tools=[get_model_tools()],
54
  system_instruction=sys_prompt),
55
+ history=st.session_state.gemini_history
56
+ )
57
+ if "messages" not in st.session_state:
58
+ st.session_state.messages = []
59
+
60
 
61
  # Display chat messages from history on app rerun
62
  for message in st.session_state.messages:
 
64
  name=message['role'],
65
  avatar=message.get('avatar'),
66
  ):
67
+ st.markdown(message['text'])
68
 
69
+ # Chat input
70
+ user_input = st.chat_input("💬 Ask your tutor a question...")
71
+ if user_input:
72
+ st.chat_message("user").markdown(user_input)
73
+ st.session_state.messages.append({"role": "user", "text": user_input})
74
+ full_response = ""
75
+
76
+ response = st.session_state.chat.send_message_stream(user_input)
77
+
78
+ full_reply = ""
 
 
 
 
 
 
 
 
 
 
 
79
  with st.chat_message(
80
  name=MODEL_ROLE,
81
  avatar=AI_AVATAR_ICON,
82
+ ):
83
+ response_box = st.empty()
 
 
 
84
  for chunk in response:
85
+ chunk_text = chunk.text
86
+ if chunk_text:
87
+ full_reply += chunk_text
 
 
 
88
  time.sleep(0.05)
89
+ response_box.markdown(full_reply + "▌")
 
 
 
90
 
91
+ # Final display after stream ends
92
+ response_box.markdown(full_reply)
93
+ st.session_state.messages.append({"role": "assistant", "text": full_reply, "avatar": AI_AVATAR_ICON})
 
 
 
 
 
94
 
95
  st.session_state.gemini_history = st.session_state.chat.get_history()
 
 
 
 
 
 
 
 
 
 
 
96
 
97
  if next_btn:
98
+ st.switch_page("pages/postquiz.py")
utils/questions_dataset.py CHANGED
@@ -67,5 +67,5 @@ system_instruction = """You are an AI tutor that teaches users LSAT Logical Reas
67
  Question the user to ensure that they understand the material.
68
  Use practice questions from the tool to ensure they understand the material.
69
  If no practice questions are found in the tool for a certain subtopic, find general practice questions on that subtopic.
70
- Never give a one word answer. Always keep the conversation moving.
71
  Once the user has studied all the topics, prompt them to press the "Click here when finished" button. """
 
67
  Question the user to ensure that they understand the material.
68
  Use practice questions from the tool to ensure they understand the material.
69
  If no practice questions are found in the tool for a certain subtopic, find general practice questions on that subtopic.
70
+ Never give a one word answer. Always keep the conversation moving by prompting the user to either continue to another subtopic or keep practicing.
71
  Once the user has studied all the topics, prompt them to press the "Click here when finished" button. """