Sambhavnoobcoder commited on
Commit
8cb4906
·
verified ·
1 Parent(s): 3877cfc

trying to use gr.chatInterface

Browse files
Files changed (1) hide show
  1. app.py +12 -19
app.py CHANGED
@@ -10,9 +10,6 @@ import gradio as gr
10
  GOOGLE_API_KEY = 'AIzaSyA0yLvySmj8xjMd0sedSgklg1fj0wBDyyw' # Replace with your API key
11
  genai.configure(api_key=GOOGLE_API_KEY)
12
 
13
- # Initialize conversation history
14
- conversation_history = []
15
-
16
  # Fetch lecture notes and model architectures
17
  def fetch_lecture_notes():
18
  lecture_urls = [
@@ -63,6 +60,8 @@ def initialize_faiss_index(embeddings):
63
  return index
64
 
65
  # Handle natural language queries
 
 
66
  def handle_query(query, faiss_index, embeddings_texts, model):
67
  global conversation_history
68
 
@@ -129,34 +128,28 @@ def chatbot(message, history):
129
  response, sources = handle_query(message, faiss_index, all_texts, embedding_model)
130
  print("Query:", message)
131
  print("Response:", response)
132
-
133
- # Format the response with conversation history
134
- formatted_response = "Conversation History:\n\n"
135
- for entry in conversation_history:
136
- formatted_response += entry + "\n"
137
 
138
- formatted_response += "\nCurrent Response:\n" + response
139
-
140
  if sources:
141
  print("Sources:", sources)
142
- formatted_response += "\n\nSources:\n" + "\n".join(sources)
 
143
  else:
144
  print("Sources: None of the provided sources were used.")
 
 
145
 
146
  # Generate a concise and relevant summary using Gemini
147
  prompt = "Summarize the user queries so far"
148
- user_queries_summary = " ".join([entry for entry in conversation_history if entry.startswith("User: ")])
149
  concise_response = generate_concise_response(prompt, user_queries_summary)
150
  print("Concise Response:")
151
  print(concise_response)
152
-
153
- formatted_response += "\n\nConcise Summary:\n" + concise_response
154
-
155
- print("----")
156
-
157
- return formatted_response
158
 
159
- print("dummy here")
160
  iface = gr.ChatInterface(
161
  chatbot,
162
  title="LLM Research Assistant",
 
10
  GOOGLE_API_KEY = 'AIzaSyA0yLvySmj8xjMd0sedSgklg1fj0wBDyyw' # Replace with your API key
11
  genai.configure(api_key=GOOGLE_API_KEY)
12
 
 
 
 
13
  # Fetch lecture notes and model architectures
14
  def fetch_lecture_notes():
15
  lecture_urls = [
 
60
  return index
61
 
62
  # Handle natural language queries
63
+ conversation_history = []
64
+
65
  def handle_query(query, faiss_index, embeddings_texts, model):
66
  global conversation_history
67
 
 
128
  response, sources = handle_query(message, faiss_index, all_texts, embedding_model)
129
  print("Query:", message)
130
  print("Response:", response)
 
 
 
 
 
131
 
132
+ total_text = response if response else "No response generated."
133
+
134
  if sources:
135
  print("Sources:", sources)
136
+ relevant_source = "\n".join(sources)
137
+ total_text += f"\n\nSources:\n{relevant_source}"
138
  else:
139
  print("Sources: None of the provided sources were used.")
140
+
141
+ print("----")
142
 
143
  # Generate a concise and relevant summary using Gemini
144
  prompt = "Summarize the user queries so far"
145
+ user_queries_summary = " ".join([msg[0] for msg in history] + [message])
146
  concise_response = generate_concise_response(prompt, user_queries_summary)
147
  print("Concise Response:")
148
  print(concise_response)
149
+
150
+ return total_text
 
 
 
 
151
 
152
+ # Create the Gradio interface
153
  iface = gr.ChatInterface(
154
  chatbot,
155
  title="LLM Research Assistant",