Spaces:
Runtime error
Runtime error
attempting to ad loging
Browse files
app.py
CHANGED
@@ -50,10 +50,12 @@ dataset = load_dataset('tosin2013/autogen', streaming=True)
|
|
50 |
dataset = Dataset.from_list(list(dataset['train']))
|
51 |
|
52 |
# Initialize embeddings
|
|
|
53 |
embeddings = HuggingFaceEmbeddings(
|
54 |
model_name="sentence-transformers/all-MiniLM-L6-v2",
|
55 |
model_kwargs={"device": "cpu"}
|
56 |
)
|
|
|
57 |
|
58 |
# Extract texts from the dataset
|
59 |
texts = dataset['input']
|
@@ -61,7 +63,9 @@ texts = dataset['input']
|
|
61 |
# Create and cache embeddings for the texts
|
62 |
if not os.path.exists('embeddings.npy'):
|
63 |
print("[LOG] Generating embeddings...")
|
|
|
64 |
text_embeddings = embeddings.embed_documents(texts)
|
|
|
65 |
np.save('embeddings.npy', text_embeddings)
|
66 |
else:
|
67 |
print("[LOG] Loading cached embeddings...")
|
@@ -88,7 +92,9 @@ def get_relevant_documents(query, k=5):
|
|
88 |
import time
|
89 |
start_time = time.time()
|
90 |
|
|
|
91 |
query_embedding = embeddings.embed_query(query)
|
|
|
92 |
distances, indices = nn.kneighbors([query_embedding], n_neighbors=k)
|
93 |
relevant_docs = [texts[i] for i in indices[0]]
|
94 |
|
@@ -290,6 +296,7 @@ Provide the AutoGen v0.4 agent code that fulfills the user's request. Utilize fe
|
|
290 |
|
291 |
|
292 |
# Create Gradio interface
|
|
|
293 |
with gr.Blocks() as demo:
|
294 |
gr.Markdown(f"""
|
295 |
## AutoGen v0.4 Agent Code Generator QA Agent
|
@@ -327,12 +334,14 @@ with gr.Blocks() as demo:
|
|
327 |
outputs=[chatbot],
|
328 |
queue=True
|
329 |
)
|
|
|
330 |
|
331 |
clear_btn.click(
|
332 |
lambda: (None, ""),
|
333 |
inputs=[],
|
334 |
outputs=[chatbot, question]
|
335 |
)
|
|
|
336 |
|
337 |
import socket
|
338 |
|
@@ -350,14 +359,7 @@ if __name__ == "__main__":
|
|
350 |
try:
|
351 |
port = find_available_port()
|
352 |
print(f"[LOG] Launching application on port {port}")
|
|
|
353 |
demo.launch()
|
354 |
-
# Verify server is actually running
|
355 |
-
import time
|
356 |
-
time.sleep(2) # Give server time to start
|
357 |
-
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
|
358 |
-
if s.connect_ex(('localhost', port)) == 0:
|
359 |
-
print(f"[SUCCESS] Server is running on port {port}")
|
360 |
-
else:
|
361 |
-
print(f"[ERROR] Failed to bind to port {port}")
|
362 |
except Exception as e:
|
363 |
print(f"[ERROR] Failed to start application: {str(e)}")
|
|
|
50 |
dataset = Dataset.from_list(list(dataset['train']))
|
51 |
|
52 |
# Initialize embeddings
|
53 |
+
print("[EMBEDDINGS] Loading sentence-transformers model...")
|
54 |
embeddings = HuggingFaceEmbeddings(
|
55 |
model_name="sentence-transformers/all-MiniLM-L6-v2",
|
56 |
model_kwargs={"device": "cpu"}
|
57 |
)
|
58 |
+
print("[EMBEDDINGS] Sentence-transformers model loaded successfully")
|
59 |
|
60 |
# Extract texts from the dataset
|
61 |
texts = dataset['input']
|
|
|
63 |
# Create and cache embeddings for the texts
|
64 |
if not os.path.exists('embeddings.npy'):
|
65 |
print("[LOG] Generating embeddings...")
|
66 |
+
print("[EMBEDDINGS] Generating document embeddings...")
|
67 |
text_embeddings = embeddings.embed_documents(texts)
|
68 |
+
print(f"[EMBEDDINGS] Generated embeddings for {len(texts)} documents")
|
69 |
np.save('embeddings.npy', text_embeddings)
|
70 |
else:
|
71 |
print("[LOG] Loading cached embeddings...")
|
|
|
92 |
import time
|
93 |
start_time = time.time()
|
94 |
|
95 |
+
print("[EMBEDDINGS] Generating embedding for query...")
|
96 |
query_embedding = embeddings.embed_query(query)
|
97 |
+
print("[EMBEDDINGS] Query embedding generated successfully")
|
98 |
distances, indices = nn.kneighbors([query_embedding], n_neighbors=k)
|
99 |
relevant_docs = [texts[i] for i in indices[0]]
|
100 |
|
|
|
296 |
|
297 |
|
298 |
# Create Gradio interface
|
299 |
+
print("[CHAT] Initializing chat interface...")
|
300 |
with gr.Blocks() as demo:
|
301 |
gr.Markdown(f"""
|
302 |
## AutoGen v0.4 Agent Code Generator QA Agent
|
|
|
334 |
outputs=[chatbot],
|
335 |
queue=True
|
336 |
)
|
337 |
+
print("[CHAT] Submit button handler configured")
|
338 |
|
339 |
clear_btn.click(
|
340 |
lambda: (None, ""),
|
341 |
inputs=[],
|
342 |
outputs=[chatbot, question]
|
343 |
)
|
344 |
+
print("[CHAT] Clear button handler configured")
|
345 |
|
346 |
import socket
|
347 |
|
|
|
359 |
try:
|
360 |
port = find_available_port()
|
361 |
print(f"[LOG] Launching application on port {port}")
|
362 |
+
print("[CHAT] Starting chat server...")
|
363 |
demo.launch()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
364 |
except Exception as e:
|
365 |
print(f"[ERROR] Failed to start application: {str(e)}")
|