import time import os import joblib import streamlit as st from utils.questions_dataset import system_instruction, get_model_tools from google.genai import types from google import genai st.set_page_config(page_title="LSAT Group A", page_icon="📘") # GOOGLE_API_KEY=os.environ.get('GOOGLE_API_KEY') GEMINI_API_KEY = "AIzaSyAjpHA08BUwLhK-tIlORxcB18RAp3541-M" client = genai.Client(api_key=GEMINI_API_KEY) new_chat_id = f'{time.time()}' MODEL_ROLE = 'ai' AI_AVATAR_ICON = '✨' # Create a data/ folder if it doesn't already exist try: os.mkdir('data/') except: # data/ folder already exists pass # Load past chats (if available) try: past_chats: dict = joblib.load('data/past_chats_list') except: past_chats = {} # Sidebar allows a list of past chats with st.sidebar: st.write('# Past Chats') if st.session_state.get('chat_id') is None: st.session_state.chat_id = st.selectbox( label='Pick a past chat', options=[new_chat_id] + list(past_chats.keys()), format_func=lambda x: past_chats.get(x, 'New Chat'), placeholder='_', ) else: # This will happen the first time AI response comes in st.session_state.chat_id = st.selectbox( label='Pick a past chat', options=[new_chat_id, st.session_state.chat_id] + list(past_chats.keys()), index=1, format_func=lambda x: past_chats.get(x, 'New Chat' if x != st.session_state.chat_id else st.session_state.chat_title), placeholder='_', ) # Save new chats after a message has been sent to AI st.session_state.chat_title = f'ChatSession-{st.session_state.chat_id}' st.title("📘Logical Reasoning: Group A") next_btn = st.button("Click here when finished") st.write("Use this AI Tutor to help you understand the concepts. You can ask it to explain the concepts, provide examples, or clarify any doubts you have.") st.write("Start by sending a hello message!") # Chat history (allows to ask multiple questions) try: st.session_state.messages = joblib.load( f'data/{st.session_state.chat_id}-st_messages' ) st.session_state.gemini_history = joblib.load( f'data/{st.session_state.chat_id}-gemini_messages' ) except: st.session_state.messages = [] st.session_state.gemini_history = [] sys_prompt = system_instruction % ( st.session_state.prequiz_df['num_correct'][0], st.session_state.prequiz_df['num_questions'][0], st.session_state.prequiz_df['num_correct'][1], st.session_state.prequiz_df['num_questions'][1], st.session_state.prequiz_df['num_correct'][2], st.session_state.prequiz_df['num_questions'][2], st.session_state.prequiz_df['num_correct'][3], st.session_state.prequiz_df['num_questions'][3], st.session_state.prequiz_df['num_correct'][4], st.session_state.prequiz_df['num_questions'][4], st.session_state.prequiz_df['num_correct'][5], st.session_state.prequiz_df['num_questions'][5], st.session_state.prequiz_df['num_correct'][6], st.session_state.prequiz_df['num_questions'][6], st.session_state.prequiz_df['num_correct'][7], st.session_state.prequiz_df['num_questions'][7], st.session_state.prequiz_df['num_correct'][8], st.session_state.prequiz_df['num_questions'][8] ) st.session_state.chat = client.chats.create(model='gemini-2.0-flash', config=types.GenerateContentConfig( tools=[get_model_tools()], system_instruction=sys_prompt), history=st.session_state.gemini_history ) # Display chat messages from history on app rerun for message in st.session_state.messages: with st.chat_message( name=message['role'], avatar=message.get('avatar'), ): st.markdown(message['content']) # React to user input if prompt := st.chat_input('Your message here...'): # Save this as a chat for later if st.session_state.chat_id not in past_chats.keys(): past_chats[st.session_state.chat_id] = st.session_state.chat_title joblib.dump(past_chats, 'data/past_chats_list') # Display user message in chat message container with st.chat_message('user'): st.markdown(prompt) # Add user message to chat history st.session_state.messages.append( dict( role='user', content=prompt, ) ) ## Send message to AI response = st.session_state.chat.send_message_stream( prompt, ) # Display assistant response in chat message container with st.chat_message( name=MODEL_ROLE, avatar=AI_AVATAR_ICON, ): message_placeholder = st.empty() full_response = '' assistant_response = response # Streams in a chunk at a time for chunk in response: # Simulate stream of chunk if chunk.text == None: chunk.text = "Let's try that one more time so I understand. Please tell me one more time." for ch in chunk.text.split(' '): full_response += ch + ' ' time.sleep(0.05) # Rewrites with a cursor at end message_placeholder.write(full_response + '▌') # Write full message with placeholder message_placeholder.write(full_response) # Add assistant response to chat history st.session_state.messages.append( dict( role=MODEL_ROLE, content=full_response, avatar=AI_AVATAR_ICON, ) ) st.session_state.gemini_history = st.session_state.chat.get_history() # Save to file joblib.dump( st.session_state.messages, f'data/{st.session_state.chat_id}-st_messages', ) joblib.dump( st.session_state.gemini_history, f'data/{st.session_state.chat_id}-gemini_messages', ) if next_btn: st.switch_page("pages/postquiz.py")