File size: 5,280 Bytes
44198e0
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
import gradio as gr
from rag_engine import RAGEngine
import torch
import os
import logging
import traceback
import asyncio

# Configure logging
logging.basicConfig(
    level=logging.INFO,
    format='%(asctime)s - %(levelname)s - %(message)s'
)
logger = logging.getLogger(__name__)

def safe_search(query, max_results):
    """Wrapper function to handle errors gracefully"""
    try:
        rag = RAGEngine()
        results = asyncio.run(rag.search_and_process(query, max_results))
        return format_results(results)
    except Exception as e:
        error_msg = f"An error occurred: {str(e)}\n\nTraceback:\n{traceback.format_exc()}"
        logger.error(error_msg)
        return f"# ❌ Error\nSorry, an error occurred while processing your search:\n```\n{str(e)}\n```"

def format_results(results):
    """Format search results for display"""
    if not results:
        return "# ⚠️ No Results\nNo search results were found. Please try a different query."
    
    formatted = f"# πŸ” Search Results\n\n"
    
    # Add insights section
    if 'insights' in results:
        formatted += f"## πŸ’‘ Key Insights\n{results['insights']}\n\n"
    
    # Add follow-up questions
    if 'follow_up_questions' in results:
        formatted += "## ❓ Follow-up Questions\n"
        for q in results['follow_up_questions']:
            if q and q.strip():
                formatted += f"- {q.strip()}\n"
        formatted += "\n"
    
    # Add main results
    if 'results' in results:
        formatted += "## πŸ“„ Detailed Results\n\n"
        for i, result in enumerate(results['results'], 1):
            formatted += f"### {i}. "
            if 'url' in result:
                formatted += f"[{result.get('title', 'Untitled')}]({result['url']})\n"
            else:
                formatted += f"{result.get('title', 'Untitled')}\n"
                
            if result.get('processed_content'):
                content = result['processed_content']
                if 'summary' in content:
                    formatted += f"**Summary:** {content['summary']}\n\n"
                if content.get('metadata', {}).get('description'):
                    formatted += f"**Description:** {content['metadata']['description']}\n\n"
                if content.get('content_type') == 'code':
                    formatted += f"**Code Analysis:** {content.get('explanation', '')}\n\n"
                else:
                    formatted += f"**Detailed Explanation:** {content.get('explanation', '')}\n\n"
            
            if 'snippet' in result:
                formatted += f"**Snippet:** {result['snippet']}\n\n"
            formatted += "---\n\n"
    
    # Add similar queries if available
    if results.get('similar_queries'):
        formatted += "## πŸ”„ Related Searches\n"
        for query in results['similar_queries']:
            if isinstance(query, dict) and 'query' in query:
                formatted += f"- {query['query']}\n"
            elif isinstance(query, str):
                formatted += f"- {query}\n"
    
    return formatted

def create_demo():
    """Create the Gradio interface"""
    
    # Create cache directory
    os.makedirs(".cache", exist_ok=True)
    
    demo = gr.Blocks(
        title="AI-Powered Search Engine",
        css="""
        .gradio-container {max-width: 1200px !important}
        .markdown-text {font-size: 16px !important}
        """
    )
    
    with demo:
        gr.Markdown("""
        # πŸ” Intelligent Web Search Engine
        
        This advanced search engine uses AI to provide deep understanding of search results:
        - 🧠 Multi-model AI analysis
        - πŸ“Š Semantic search and caching
        - πŸ’‘ Automatic insights generation
        - ❓ Smart follow-up questions
        - πŸ”„ Related searches
        """)
        
        with gr.Row():
            with gr.Column():
                query = gr.Textbox(
                    label="Search Query",
                    placeholder="Enter your search query...",
                    lines=2
                )
                max_results = gr.Slider(
                    minimum=3,
                    maximum=10,
                    value=5,
                    step=1,
                    label="Maximum Results"
                )
                search_btn = gr.Button("πŸ” Search", variant="primary")
            
        with gr.Column():
            output = gr.Markdown(
                label="Results",
                show_label=False
            )
            
        search_btn.click(
            fn=safe_search,
            inputs=[query, max_results],
            outputs=output
        )
        
        gr.Examples(
            examples=[
                ["What are the latest developments in quantum computing?", 5],
                ["How does Python's asyncio work? Show code examples", 5],
                ["Explain the transformer architecture in deep learning", 5],
                ["What are the environmental impacts of renewable energy?", 5]
            ],
            inputs=[query, max_results],
            outputs=output,
            fn=safe_search,
            cache_examples=True
        )
    
    return demo

# Create the demo
demo = create_demo()

# Launch for Spaces
demo.launch()