Spaces:
Runtime error
Runtime error
Delete GenAI_1.ipynb
Browse files- GenAI_1.ipynb +0 -315
GenAI_1.ipynb
DELETED
@@ -1,315 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"cells": [
|
3 |
-
{
|
4 |
-
"cell_type": "code",
|
5 |
-
"execution_count": 2,
|
6 |
-
"id": "9633aea7-5c45-44f9-a78b-b5bc39984754",
|
7 |
-
"metadata": {},
|
8 |
-
"outputs": [],
|
9 |
-
"source": [
|
10 |
-
"from langchain_google_genai import ChatGoogleGenerativeAI\n",
|
11 |
-
"from langchain.prompts import PromptTemplate\n",
|
12 |
-
"from langchain.chains import LLMChain\n",
|
13 |
-
"\n",
|
14 |
-
"import os\n",
|
15 |
-
"\n",
|
16 |
-
"import google.generativeai as genai\n",
|
17 |
-
"from langchain.document_loaders import PyPDFLoader\n",
|
18 |
-
"from langchain.text_splitter import RecursiveCharacterTextSplitter\n",
|
19 |
-
"from langchain_google_genai import ChatGoogleGenerativeAI, GoogleGenerativeAIEmbeddings\n",
|
20 |
-
"from langchain.vectorstores import FAISS\n",
|
21 |
-
"import gradio as gr\n",
|
22 |
-
"\n",
|
23 |
-
"\n",
|
24 |
-
"os.environ[\"MY_SECRET_KEY\"] = \"AIzaSyDRj3wAgqOCjc_D45W_u-G3y9dk5YDgxEo\""
|
25 |
-
]
|
26 |
-
},
|
27 |
-
{
|
28 |
-
"cell_type": "code",
|
29 |
-
"execution_count": 3,
|
30 |
-
"id": "41abde7b-366d-427e-8938-35ce7a4ed778",
|
31 |
-
"metadata": {},
|
32 |
-
"outputs": [],
|
33 |
-
"source": [
|
34 |
-
"#pip install pypdf\n",
|
35 |
-
"#!pip install faiss-cpu"
|
36 |
-
]
|
37 |
-
},
|
38 |
-
{
|
39 |
-
"cell_type": "code",
|
40 |
-
"execution_count": 4,
|
41 |
-
"id": "b7e3810f-c5fb-44d7-b4b7-a30ac507d78b",
|
42 |
-
"metadata": {},
|
43 |
-
"outputs": [],
|
44 |
-
"source": [
|
45 |
-
"google_api_key = os.environ[\"MY_SECRET_KEY\"]\n",
|
46 |
-
"\n",
|
47 |
-
"# Check if the API key was found\n",
|
48 |
-
"if google_api_key:\n",
|
49 |
-
" # Set the environment variable if the API key was found\n",
|
50 |
-
" os.environ[\"GOOGLE_API_KEY\"] = google_api_key\n",
|
51 |
-
"\n",
|
52 |
-
" llm = ChatGoogleGenerativeAI(\n",
|
53 |
-
" model=\"gemini-pro\", # Specify the model name\n",
|
54 |
-
" google_api_key=os.environ[\"GOOGLE_API_KEY\"]\n",
|
55 |
-
" )\n",
|
56 |
-
"else:\n",
|
57 |
-
" print(\"Error: GOOGLE_API_KEY not found in Colab secrets. Please store your API key.\")\n",
|
58 |
-
"\n",
|
59 |
-
"\n",
|
60 |
-
"\n",
|
61 |
-
"genai.configure(api_key=google_api_key)\n",
|
62 |
-
"model = genai.GenerativeModel(\"gemini-pro\")"
|
63 |
-
]
|
64 |
-
},
|
65 |
-
{
|
66 |
-
"cell_type": "code",
|
67 |
-
"execution_count": 5,
|
68 |
-
"id": "ef330936-8c45-4aff-b2cf-fe9dfaaf2764",
|
69 |
-
"metadata": {},
|
70 |
-
"outputs": [],
|
71 |
-
"source": [
|
72 |
-
"work_dir=os.getcwd()"
|
73 |
-
]
|
74 |
-
},
|
75 |
-
{
|
76 |
-
"cell_type": "code",
|
77 |
-
"execution_count": 6,
|
78 |
-
"id": "a55af811-7758-4090-a5f8-748b6192971b",
|
79 |
-
"metadata": {},
|
80 |
-
"outputs": [
|
81 |
-
{
|
82 |
-
"name": "stdout",
|
83 |
-
"output_type": "stream",
|
84 |
-
"text": [
|
85 |
-
"Current Working Directory: /Users/saurabhverma/GENAI\n"
|
86 |
-
]
|
87 |
-
}
|
88 |
-
],
|
89 |
-
"source": [
|
90 |
-
"# Verify file existence\n",
|
91 |
-
"assert \"Team1.pdf\" in os.listdir(work_dir), \"Team1.pdf not found in the specified directory!\"\n",
|
92 |
-
"print(f\"Current Working Directory: {os.getcwd()}\")"
|
93 |
-
]
|
94 |
-
},
|
95 |
-
{
|
96 |
-
"cell_type": "code",
|
97 |
-
"execution_count": 7,
|
98 |
-
"id": "7a0a4457-2f9c-40db-9dd4-d57e3edf1fd0",
|
99 |
-
"metadata": {},
|
100 |
-
"outputs": [],
|
101 |
-
"source": [
|
102 |
-
"# Load PDF and split text\n",
|
103 |
-
"pdf_path = \"Team1.pdf\" # Ensure this file is uploaded to Colab\n",
|
104 |
-
"loader = PyPDFLoader(pdf_path)\n",
|
105 |
-
"documents = loader.load()\n",
|
106 |
-
"\n",
|
107 |
-
"# Split text into chunks\n",
|
108 |
-
"text_splitter = RecursiveCharacterTextSplitter(chunk_size=500, chunk_overlap=10)\n",
|
109 |
-
"text_chunks = text_splitter.split_documents(documents)"
|
110 |
-
]
|
111 |
-
},
|
112 |
-
{
|
113 |
-
"cell_type": "code",
|
114 |
-
"execution_count": 8,
|
115 |
-
"id": "b5387499-a756-49de-86b0-96a5ce712ba7",
|
116 |
-
"metadata": {},
|
117 |
-
"outputs": [],
|
118 |
-
"source": [
|
119 |
-
"# Generate embeddings\n",
|
120 |
-
"embeddings = GoogleGenerativeAIEmbeddings(model=\"models/embedding-001\")\n",
|
121 |
-
"\n",
|
122 |
-
"# Store embeddings in FAISS index\n",
|
123 |
-
"vectorstore = FAISS.from_documents(text_chunks, embeddings)\n",
|
124 |
-
"retriever = vectorstore.as_retriever(search_kwargs={\"k\": 4})"
|
125 |
-
]
|
126 |
-
},
|
127 |
-
{
|
128 |
-
"cell_type": "code",
|
129 |
-
"execution_count": 9,
|
130 |
-
"id": "35554163-75cd-4f0b-a538-565a48700245",
|
131 |
-
"metadata": {},
|
132 |
-
"outputs": [],
|
133 |
-
"source": [
|
134 |
-
"# Set up Gemini model\n",
|
135 |
-
"llm = ChatGoogleGenerativeAI(model=\"gemini-2.0-flash-001\", temperature=0)\n",
|
136 |
-
"\n"
|
137 |
-
]
|
138 |
-
},
|
139 |
-
{
|
140 |
-
"cell_type": "code",
|
141 |
-
"execution_count": 10,
|
142 |
-
"id": "e95b424b-11c1-46f3-9b4e-9e2d42d1f05d",
|
143 |
-
"metadata": {},
|
144 |
-
"outputs": [],
|
145 |
-
"source": [
|
146 |
-
"import gradio as gr\n",
|
147 |
-
"from langchain.prompts import PromptTemplate\n",
|
148 |
-
"from langchain.chains import LLMChain\n",
|
149 |
-
"\n",
|
150 |
-
"def rag_query(query):\n",
|
151 |
-
" # Retrieve relevant documents\n",
|
152 |
-
" docs = retriever.get_relevant_documents(query)\n",
|
153 |
-
" \n",
|
154 |
-
" # Otherwise, use RAG\n",
|
155 |
-
" context = \"\\n\".join([doc.page_content for doc in docs])\n",
|
156 |
-
" prompt = f\"Context:\\n{context}\\n\\nQuestion: {query}\\nAnswer directly and concisely:\"\n",
|
157 |
-
"\n",
|
158 |
-
" try:\n",
|
159 |
-
" response = llm.invoke(prompt)\n",
|
160 |
-
" except Exception as e:\n",
|
161 |
-
" response = f\"Error in RAG processing: {str(e)}\"\n",
|
162 |
-
"\n",
|
163 |
-
" return response.content\n",
|
164 |
-
"\n",
|
165 |
-
"\n"
|
166 |
-
]
|
167 |
-
},
|
168 |
-
{
|
169 |
-
"cell_type": "code",
|
170 |
-
"execution_count": 11,
|
171 |
-
"id": "552ff2fa-3c70-4054-803e-633efc7601f4",
|
172 |
-
"metadata": {},
|
173 |
-
"outputs": [],
|
174 |
-
"source": [
|
175 |
-
"import gradio as gr\n",
|
176 |
-
"from langchain.prompts import PromptTemplate\n",
|
177 |
-
"from langchain.chains import LLMChain\n",
|
178 |
-
"from langchain_google_genai import ChatGoogleGenerativeAI\n",
|
179 |
-
"\n",
|
180 |
-
"# Initialize LLM once (avoid repeated initialization)\n",
|
181 |
-
"llm = ChatGoogleGenerativeAI(model=\"gemini-2.0-flash\", temperature=0)\n",
|
182 |
-
"\n",
|
183 |
-
"# Define the general query function\n",
|
184 |
-
"def general_query(query):\n",
|
185 |
-
" try:\n",
|
186 |
-
" # Define the prompt correctly\n",
|
187 |
-
" prompt = PromptTemplate.from_template(\"Answer the following query: {query}\")\n",
|
188 |
-
" \n",
|
189 |
-
" # Create an LLM Chain\n",
|
190 |
-
" chain = LLMChain(llm=llm, prompt=prompt)\n",
|
191 |
-
" \n",
|
192 |
-
" # Run chatbot and return response\n",
|
193 |
-
" response = chain.run(query=query)\n",
|
194 |
-
" \n",
|
195 |
-
" return response # Return response directly (not response.content)\n",
|
196 |
-
" \n",
|
197 |
-
" except Exception as e:\n",
|
198 |
-
" return f\"Error: {str(e)}\"\n",
|
199 |
-
"\n"
|
200 |
-
]
|
201 |
-
},
|
202 |
-
{
|
203 |
-
"cell_type": "code",
|
204 |
-
"execution_count": 12,
|
205 |
-
"id": "ab63a509-e927-405a-985b-d07039e05e9f",
|
206 |
-
"metadata": {},
|
207 |
-
"outputs": [
|
208 |
-
{
|
209 |
-
"name": "stdout",
|
210 |
-
"output_type": "stream",
|
211 |
-
"text": [
|
212 |
-
"* Running on local URL: http://127.0.0.1:7860\n",
|
213 |
-
"* Running on public URL: https://efeff91c52754b11ed.gradio.live\n",
|
214 |
-
"\n",
|
215 |
-
"This share link expires in 72 hours. For free permanent hosting and GPU upgrades, run `gradio deploy` from the terminal in the working directory to deploy to Hugging Face Spaces (https://huggingface.co/spaces)\n"
|
216 |
-
]
|
217 |
-
},
|
218 |
-
{
|
219 |
-
"data": {
|
220 |
-
"text/html": [
|
221 |
-
"<div><iframe src=\"https://efeff91c52754b11ed.gradio.live\" width=\"100%\" height=\"500\" allow=\"autoplay; camera; microphone; clipboard-read; clipboard-write;\" frameborder=\"0\" allowfullscreen></iframe></div>"
|
222 |
-
],
|
223 |
-
"text/plain": [
|
224 |
-
"<IPython.core.display.HTML object>"
|
225 |
-
]
|
226 |
-
},
|
227 |
-
"metadata": {},
|
228 |
-
"output_type": "display_data"
|
229 |
-
},
|
230 |
-
{
|
231 |
-
"data": {
|
232 |
-
"text/plain": []
|
233 |
-
},
|
234 |
-
"execution_count": 12,
|
235 |
-
"metadata": {},
|
236 |
-
"output_type": "execute_result"
|
237 |
-
}
|
238 |
-
],
|
239 |
-
"source": [
|
240 |
-
"import gradio as gr\n",
|
241 |
-
"\n",
|
242 |
-
"\n",
|
243 |
-
"# Function to call the selected query method\n",
|
244 |
-
"def query_router(query, method):\n",
|
245 |
-
" if method == \"Team Query\": # Ensure exact match with dropdown options\n",
|
246 |
-
" return rag_query(query)\n",
|
247 |
-
" elif method == \"General Query\":\n",
|
248 |
-
" return general_query(query)\n",
|
249 |
-
" return \"Invalid selection!\"\n",
|
250 |
-
"\n",
|
251 |
-
"# Define local image paths\n",
|
252 |
-
"logo_path = \"equinix-sign.jpg\" # Ensure this file exists\n",
|
253 |
-
"\n",
|
254 |
-
"# Custom CSS for background styling\n",
|
255 |
-
"custom_css = \"\"\"\n",
|
256 |
-
".gradio-container {\n",
|
257 |
-
" background-color: #f0f0f0;\n",
|
258 |
-
" text-align: center;\n",
|
259 |
-
"}\n",
|
260 |
-
"#logo img {\n",
|
261 |
-
" display: block;\n",
|
262 |
-
" margin: 0 auto;\n",
|
263 |
-
" max-width: 200px; /* Adjust size */\n",
|
264 |
-
"}\n",
|
265 |
-
"\"\"\"\n",
|
266 |
-
"\n",
|
267 |
-
"# Create Gradio UI\n",
|
268 |
-
"with gr.Blocks(css=custom_css) as ui:\n",
|
269 |
-
" gr.Image(logo_path, elem_id=\"logo\", show_label=False, height=100, width=200) # Display Logo\n",
|
270 |
-
" \n",
|
271 |
-
" # Title & Description\n",
|
272 |
-
" gr.Markdown(\"<h1 style='text-align: center; color: black;'>Equinix Chatbot for Automation Team</h1>\")\n",
|
273 |
-
" gr.Markdown(\"<p style='text-align: center; color: black;'>Ask me anything!</p>\")\n",
|
274 |
-
"\n",
|
275 |
-
" # Input & Dropdown Section\n",
|
276 |
-
" with gr.Row():\n",
|
277 |
-
" query_input = gr.Textbox(label=\"Enter your query\")\n",
|
278 |
-
" query_method = gr.Dropdown([\"Team Query\", \"General Query\"], label=\"Select Query Type\")\n",
|
279 |
-
" \n",
|
280 |
-
" # Button for submitting query\n",
|
281 |
-
" submit_button = gr.Button(\"Submit\")\n",
|
282 |
-
"\n",
|
283 |
-
" # Output Textbox\n",
|
284 |
-
" output_box = gr.Textbox(label=\"Response\", interactive=False)\n",
|
285 |
-
"\n",
|
286 |
-
" # Button Click Event\n",
|
287 |
-
" submit_button.click(query_router, inputs=[query_input, query_method], outputs=output_box)\n",
|
288 |
-
"\n",
|
289 |
-
"# Launch UI\n",
|
290 |
-
"ui.launch(share=True)\n"
|
291 |
-
]
|
292 |
-
}
|
293 |
-
],
|
294 |
-
"metadata": {
|
295 |
-
"kernelspec": {
|
296 |
-
"display_name": "Python 3 (ipykernel)",
|
297 |
-
"language": "python",
|
298 |
-
"name": "python3"
|
299 |
-
},
|
300 |
-
"language_info": {
|
301 |
-
"codemirror_mode": {
|
302 |
-
"name": "ipython",
|
303 |
-
"version": 3
|
304 |
-
},
|
305 |
-
"file_extension": ".py",
|
306 |
-
"mimetype": "text/x-python",
|
307 |
-
"name": "python",
|
308 |
-
"nbconvert_exporter": "python",
|
309 |
-
"pygments_lexer": "ipython3",
|
310 |
-
"version": "3.12.4"
|
311 |
-
}
|
312 |
-
},
|
313 |
-
"nbformat": 4,
|
314 |
-
"nbformat_minor": 5
|
315 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|