import gradio as gr from rag_engine import RAGEngine import torch import os import logging import traceback # Configure logging logging.basicConfig( level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s' ) logger = logging.getLogger(__name__) def safe_search(query, max_results): """Wrapper function to handle errors gracefully""" try: rag = RAGEngine() results = rag.search_and_process(query, max_results) if 'error' in results: return f"# ❌ Error\nSorry, an error occurred while processing your search:\n```\n{results['error']}\n```" return format_results(results) except Exception as e: error_msg = f"An error occurred: {str(e)}\n\nTraceback:\n{traceback.format_exc()}" logger.error(error_msg) return f"# ❌ Error\nSorry, an error occurred while processing your search:\n```\n{str(e)}\n```" def format_results(results): """Format search results for display""" if not results or not results.get('results'): return "# ⚠️ No Results\nNo search results were found. Please try a different query." insights = results.get('insights', {}) output = [] # Main Summary output.append("📝 Executive Summary") output.append("-" * 50) output.append(insights.get('main_summary', '')) output.append("\n") # Key Findings output.append("🔑 Key Findings") output.append("-" * 50) for i, point in enumerate(insights.get('key_findings', []), 1): output.append(f"{i}. {point}") output.append("\n") # Sources output.append("📚 Sources") output.append("-" * 50) for source in insights.get('sources', []): output.append(f"• {source.get('title', '')}") output.append(f" {source.get('url', '')}") output.append("\n") # Follow-up Questions output.append("❓ Suggested Questions") output.append("-" * 50) for question in results.get('follow_up_questions', []): output.append(f"• {question}") # Add main results if 'results' in results: output.append("\n") output.append("📄 Detailed Results") output.append("-" * 50) output.append("\n") for i, result in enumerate(results['results'], 1): if not isinstance(result, dict): continue output.append(f"### {i}. ") if 'url' in result: title = result.get('title', 'Untitled') output.append(f"[{title}]({result['url']})\n") if 'summary' in result: output.append(f"\n{result['summary']}\n\n") # Add similar chunks if available if 'similar_chunks' in results: output.append("\n") output.append("🔍 Related Content") output.append("-" * 50) output.append("\n") for i, chunk in enumerate(results['similar_chunks'], 1): if not isinstance(chunk, dict): continue output.append(f"### Related {i}\n") if 'metadata' in chunk: meta = chunk['metadata'] if 'title' in meta and 'url' in meta: output.append(f"From [{meta['title']}]({meta['url']})\n") if 'content' in chunk: output.append(f"\n{chunk['content'][:200]}...\n\n") return "\n".join(output) def create_demo(): """Create the Gradio interface""" with gr.Blocks(title="Web Search + RAG") as demo: gr.Markdown("# 🔍 Intelligent Web Search") gr.Markdown("Search the web with AI-powered insights and analysis.") with gr.Row(): with gr.Column(): query = gr.Textbox( label="Search Query", placeholder="Enter your search query...", lines=2 ) max_results = gr.Slider( minimum=1, maximum=10, value=5, step=1, label="Number of Results" ) search_button = gr.Button("🔍 Search") output = gr.Textbox( label="Search Results", lines=20 ) search_button.click( fn=safe_search, inputs=[query, max_results], outputs=output ) gr.Examples( examples=[ ["What is RAG in AI?", 5], ["Latest developments in quantum computing", 3], ["How does BERT work?", 5] ], inputs=[query, max_results] ) return demo # Create the demo demo = create_demo() # Launch for Spaces demo.launch()