shreetishresthanp commited on
Commit
07b98cf
·
verified ·
1 Parent(s): 5960e06

Delete llm_tutor.py

Browse files
Files changed (1) hide show
  1. llm_tutor.py +0 -146
llm_tutor.py DELETED
@@ -1,146 +0,0 @@
1
- import time
2
- import os
3
- import joblib
4
- import streamlit as st
5
- from utils.questions_dataset import system_instruction, get_model_tools
6
- from google.genai import types
7
- from google import genai
8
-
9
- # GOOGLE_API_KEY=os.environ.get('GOOGLE_API_KEY')
10
- GEMINI_API_KEY = "AIzaSyAjpHA08BUwLhK-tIlORxcB18RAp3541-M"
11
- client = genai.Client(api_key=GEMINI_API_KEY)
12
-
13
- new_chat_id = f'{time.time()}'
14
- MODEL_ROLE = 'ai'
15
- AI_AVATAR_ICON = '✨'
16
-
17
- # Create a data/ folder if it doesn't already exist
18
- try:
19
- os.mkdir('data/')
20
- except:
21
- # data/ folder already exists
22
- pass
23
-
24
- # Load past chats (if available)
25
- try:
26
- past_chats: dict = joblib.load('data/past_chats_list')
27
- except:
28
- past_chats = {}
29
-
30
- # Sidebar allows a list of past chats
31
- with st.sidebar:
32
- st.write('# Past Chats')
33
- if st.session_state.get('chat_id') is None:
34
- st.session_state.chat_id = st.selectbox(
35
- label='Pick a past chat',
36
- options=[new_chat_id] + list(past_chats.keys()),
37
- format_func=lambda x: past_chats.get(x, 'New Chat'),
38
- placeholder='_',
39
- )
40
- else:
41
- # This will happen the first time AI response comes in
42
- st.session_state.chat_id = st.selectbox(
43
- label='Pick a past chat',
44
- options=[new_chat_id, st.session_state.chat_id] + list(past_chats.keys()),
45
- index=1,
46
- format_func=lambda x: past_chats.get(x, 'New Chat' if x != st.session_state.chat_id else st.session_state.chat_title),
47
- placeholder='_',
48
- )
49
-
50
- # Save new chats after a message has been sent to AI
51
- st.session_state.chat_title = f'ChatSession-{st.session_state.chat_id}'
52
-
53
-
54
- st.write('# Chat with LSAT Tutor')
55
-
56
- # Chat history (allows to ask multiple questions)
57
- try:
58
- st.session_state.messages = joblib.load(
59
- f'data/{st.session_state.chat_id}-st_messages'
60
- )
61
- st.session_state.gemini_history = joblib.load(
62
- f'data/{st.session_state.chat_id}-gemini_messages'
63
- )
64
- except:
65
- st.session_state.messages = []
66
- st.session_state.gemini_history = []
67
- print('new_cache made')
68
-
69
-
70
- st.session_state.chat = client.chats.create(model='gemini-2.0-flash',
71
- config=types.GenerateContentConfig(
72
- tools=[get_model_tools()],
73
- system_instruction=system_instruction),
74
- history=st.session_state.gemini_history
75
- )
76
-
77
- # Display chat messages from history on app rerun
78
- for message in st.session_state.messages:
79
- with st.chat_message(
80
- name=message['role'],
81
- avatar=message.get('avatar'),
82
- ):
83
- st.markdown(message['content'])
84
-
85
- # React to user input
86
- if prompt := st.chat_input('Your message here...'):
87
- # Save this as a chat for later
88
- if st.session_state.chat_id not in past_chats.keys():
89
- past_chats[st.session_state.chat_id] = st.session_state.chat_title
90
- joblib.dump(past_chats, 'data/past_chats_list')
91
- # Display user message in chat message container
92
- with st.chat_message('user'):
93
- st.markdown(prompt)
94
- # Add user message to chat history
95
- st.session_state.messages.append(
96
- dict(
97
- role='user',
98
- content=prompt,
99
- )
100
- )
101
- ## Send message to AI
102
- response = st.session_state.chat.send_message_stream(
103
- prompt,
104
- )
105
- # Display assistant response in chat message container
106
- with st.chat_message(
107
- name=MODEL_ROLE,
108
- avatar=AI_AVATAR_ICON,
109
- ):
110
- message_placeholder = st.empty()
111
- full_response = ''
112
- assistant_response = response
113
- # Streams in a chunk at a time
114
- for chunk in response:
115
- # Simulate stream of chunk
116
- if chunk.text == None:
117
- full_response = "No response!! Report to admin!"
118
-
119
- for ch in chunk.text.split(' '):
120
- full_response += ch + ' '
121
- time.sleep(0.05)
122
- # Rewrites with a cursor at end
123
- message_placeholder.write(full_response + '▌')
124
- # Write full message with placeholder
125
- message_placeholder.write(full_response)
126
-
127
- # Add assistant response to chat history
128
- st.session_state.messages.append(
129
- dict(
130
- role=MODEL_ROLE,
131
- content=full_response,
132
- avatar=AI_AVATAR_ICON,
133
- )
134
- )
135
-
136
- st.session_state.gemini_history = st.session_state.chat.get_history()
137
-
138
- # Save to file
139
- joblib.dump(
140
- st.session_state.messages,
141
- f'data/{st.session_state.chat_id}-st_messages',
142
- )
143
- joblib.dump(
144
- st.session_state.gemini_history,
145
- f'data/{st.session_state.chat_id}-gemini_messages',
146
- )