import gradio as gr from openai import OpenAI import os import json from urllib.parse import quote import html # Initialize OpenAI client with API key and base URL from environment variables client = OpenAI( api_key=os.environ["OPENAI_API_KEY"], base_url=os.environ["OPENAI_BASE_URL"] ) # Define constants for pagination RESULTS_PER_PAGE = 10 TOTAL_RESULTS = 30 # Generate 30 results to allow pagination def fetch_search_results(query): """Fetch search results from the LLM based on the user's query.""" if not query.strip(): return None, "Please enter a search query." prompt = f""" You are a search engine that provides informative and relevant results. For the given query '{query}', generate {TOTAL_RESULTS} search results. Each result should include: - 'title': A concise, descriptive title of the result. - 'snippet': A short summary (2-3 sentences) of the content. - 'url': A plausible, clickable URL where the information might be found (e.g., a real or hypothetical website). Format the response as a JSON array of objects, where each object has 'title', 'snippet', and 'url' fields. Ensure the results are diverse, relevant to the query, and the URLs are realistic (e.g., https://example.com/page). """ try: response = client.chat.completions.create( model="gemini-2.0-flash-lite", # Updated model name messages=[ {"role": "system", "content": "You are a helpful search engine."}, {"role": "user", "content": prompt} ], response_format={"type": "json_object"} ) content = response.choices[0].message.content results = json.loads(content) # Handle different possible JSON structures if isinstance(results, dict) and "results" in results: results = results["results"] elif isinstance(results, list): pass else: return None, "Error: Unexpected JSON structure." return results, None except Exception as e: error_msg = str(e) if "404" in error_msg: return None, f"Error 404: Model or endpoint not found. Check OPENAI_BASE_URL ({os.environ['OPENAI_BASE_URL']}) and model name." elif "401" in error_msg: return None, "Error 401: Invalid API key. Check OPENAI_API_KEY." else: return None, f"Error: {error_msg}" def generate_search_page(query, page=1): """Generate a full HTML search results page.""" if not query.strip(): return """ LLM Search Engine

LLM Search Engine

Please enter a search query.

""" results, error = fetch_search_results(query) if error: return f""" LLM Search Engine

LLM Search Engine

{error}

""" # Calculate pagination boundaries start_idx = (page - 1) * RESULTS_PER_PAGE end_idx = start_idx + RESULTS_PER_PAGE total_pages = (len(results) + RESULTS_PER_PAGE - 1) // RESULTS_PER_PAGE # Ensure indices are within bounds if start_idx >= len(results): return f""" LLM Search Engine

LLM Search Engine

No more results to display.

""" paginated_results = results[start_idx:end_idx] # Generate full HTML page html_content = f""" LLM Search Engine

LLM Search Engine

Results for '{html.escape(query)}' (Page {page} of {total_pages})

""" for result in paginated_results: title = html.escape(result.get("title", "No title")) snippet = html.escape(result.get("snippet", "No snippet")) url = html.escape(result.get("url", "#")) html_content += f"""
{title}
{url}

{snippet}

""" # Pagination links encoded_query = quote(query) prev_link = f'Previous' if page > 1 else 'Previous' next_link = f'Next' if page < total_pages else 'Next' html_content += f"""
""" return html_content # Define the Gradio app with Blocks with gr.Blocks(title="LLM Search Engine") as app: output_html = gr.HTML() def update_page(query, page): try: page = int(page) except (ValueError, TypeError): page = 1 return generate_search_page(query, page) # Initial load with URL parameters app.load( fn=update_page, inputs=None, outputs=output_html, _js="() => [new URLSearchParams(window.location.search).get('query') || '', new URLSearchParams(window.location.search).get('page') || '1']" ) app.launch()