import gradio as gr import os import json os.environ["OPENAI_API_KEY"] = os.getenv('api_key') from google import genai from google.genai import types import math import types import uuid from langchain.chat_models import init_chat_model from langchain.embeddings import init_embeddings from langgraph.store.memory import InMemoryStore from langgraph_bigtool import create_agent from langgraph_bigtool.utils import ( convert_positional_only_function_to_tool ) MODEL_ID = "gemini-2.0-flash-exp" from google import genai client = genai.Client(api_key=os.getenv('api_g_key')) def llm_response(text): response = client.models.generate_content( model=MODEL_ID, contents= text) return response.text # Collect functions from `math` built-in all_tools = [] for function_name in dir(math): function = getattr(math, function_name) if not isinstance( function, types.BuiltinFunctionType ): continue # This is an idiosyncrasy of the `math` library if tool := convert_positional_only_function_to_tool( function ): all_tools.append(tool) # Create registry of tools. This is a dict mapping # identifiers to tool instances. tool_registry = { str(uuid.uuid4()): tool for tool in all_tools } # Index tool names and descriptions in the LangGraph # Store. Here we use a simple in-memory store. embeddings = init_embeddings("openai:text-embedding-3-small") store = InMemoryStore( index={ "embed": embeddings, "dims": 1536, "fields": ["description"], } ) for tool_id, tool in tool_registry.items(): store.put( ("tools",), tool_id, { "description": f"{tool.name}: {tool.description}", }, ) # Initialize agent llm = init_chat_model("openai:gpt-4o-mini") builder = create_agent(llm, tool_registry) agent = builder.compile(store=store) from langchain_core.tools import Tool import sympy from sympy import symbols def make_sympy_tool(func, name, description): def _tool(expr: str) -> str: local_symbols = symbols("x y z a b c n") parsed_expr = sympy.sympify(expr, locals={s.name: s for s in local_symbols}) result = func(parsed_expr) return str(result) return Tool.from_function( name=name, description=description, func=_tool ) from sympy import simplify, expand, factor import textwrap sympy_tools = [ make_sympy_tool(simplify, "simplify", "Simplifies a symbolic expression"), make_sympy_tool(expand, "expand", "Expands a symbolic expression"), make_sympy_tool(factor, "factor", "Factors a symbolic expression"), ] for tool in sympy_tools: tool_id = str(uuid.uuid4()) tool_registry[tool_id] = tool store.put( ("tools",), tool_id, {"description": f"{tool.name}: {tool.description}"}, ) builder = create_agent(llm, tool_registry) agent = builder.compile(store=store) def pvsnp(problem): '''output = [] for step in agent.stream( {"messages": "Use tools to answer:"+problem}, stream_mode="updates", ): for _, update in step.items(): for message in update.get("messages", []): message.pretty_print() output.append(message.pretty_print()) print (output)''' output = agent.invoke({"messages": "Use tools to answer: "+problem}) answer = output['messages'] critic_answer = llm_response(f'''Given the problem: {problem} and the agent response: {answer}, come up with a user friendly explanation that highlights the answer along with the tools leveraged.''') final_answer = f""" Observer's Response: {critic_answer} Agent's Raw Response: {answer} """ return final_answer iface = gr.Interface( fn=pvsnp, inputs=gr.Textbox(label="Enter a math problem or expression (e.g., integrate x^2 * sin(x), solve x^3 + 2x = 5, or simplify (x + 1)^2 - x^2)"), outputs=gr.Textbox(label="PolyMath’s response"), # Output as HTML title="PolyMath", description="PolyMath is an AI-powered math agent designed to tackle both symbolic and numeric computations with precision. It's like having a digital mathematician by your side — whether you're exploring calculus, number theory, or algebraic puzzles.", theme = gr.themes.Ocean(), examples = ["Simplify x*2+2x+1", "Solve x^2 - 4x + 3 = 0", "Integrate x * e^x dx" ] ) # Launch the app iface.launch()