Spaces:
Sleeping
Sleeping
File size: 4,482 Bytes
ddb9676 1c48a8d 0d10f9a 34a47f0 1c48a8d 84703e3 34a47f0 ddb9676 34a47f0 ddb9676 34a47f0 ac0b367 84703e3 34a47f0 a0b5cdb 34a47f0 7e89de8 0d10f9a 7e89de8 a8a550d 0d10f9a 7f21534 a0b5cdb 84703e3 a0b5cdb 2abd7e5 614be3c 2abd7e5 a0b5cdb 84703e3 7e89de8 ac0b367 ddb9676 d4ffe9c e830431 7e89de8 e830431 9d26912 e830431 614be3c ddb9676 34a47f0 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 |
import gradio as gr
import os
import json
os.environ["OPENAI_API_KEY"] = os.getenv('api_key')
from google import genai
from google.genai import types
import math
import types
import uuid
from langchain.chat_models import init_chat_model
from langchain.embeddings import init_embeddings
from langgraph.store.memory import InMemoryStore
from langgraph_bigtool import create_agent
from langgraph_bigtool.utils import (
convert_positional_only_function_to_tool
)
MODEL_ID = "gemini-2.0-flash-exp"
from google import genai
client = genai.Client(api_key=os.getenv('api_g_key'))
def llm_response(text):
response = client.models.generate_content(
model=MODEL_ID,
contents= text)
return response.text
# Collect functions from `math` built-in
all_tools = []
for function_name in dir(math):
function = getattr(math, function_name)
if not isinstance(
function, types.BuiltinFunctionType
):
continue
# This is an idiosyncrasy of the `math` library
if tool := convert_positional_only_function_to_tool(
function
):
all_tools.append(tool)
# Create registry of tools. This is a dict mapping
# identifiers to tool instances.
tool_registry = {
str(uuid.uuid4()): tool
for tool in all_tools
}
# Index tool names and descriptions in the LangGraph
# Store. Here we use a simple in-memory store.
embeddings = init_embeddings("openai:text-embedding-3-small")
store = InMemoryStore(
index={
"embed": embeddings,
"dims": 1536,
"fields": ["description"],
}
)
for tool_id, tool in tool_registry.items():
store.put(
("tools",),
tool_id,
{
"description": f"{tool.name}: {tool.description}",
},
)
# Initialize agent
llm = init_chat_model("openai:gpt-4o-mini")
builder = create_agent(llm, tool_registry)
agent = builder.compile(store=store)
from langchain_core.tools import Tool
import sympy
from sympy import symbols
def make_sympy_tool(func, name, description):
def _tool(expr: str) -> str:
local_symbols = symbols("x y z a b c n")
parsed_expr = sympy.sympify(expr, locals={s.name: s for s in local_symbols})
result = func(parsed_expr)
return str(result)
return Tool.from_function(
name=name,
description=description,
func=_tool
)
from sympy import simplify, expand, factor
import textwrap
sympy_tools = [
make_sympy_tool(simplify, "simplify", "Simplifies a symbolic expression"),
make_sympy_tool(expand, "expand", "Expands a symbolic expression"),
make_sympy_tool(factor, "factor", "Factors a symbolic expression"),
]
for tool in sympy_tools:
tool_id = str(uuid.uuid4())
tool_registry[tool_id] = tool
store.put(
("tools",),
tool_id,
{"description": f"{tool.name}: {tool.description}"},
)
builder = create_agent(llm, tool_registry)
agent = builder.compile(store=store)
def pvsnp(problem):
'''output = []
for step in agent.stream(
{"messages": "Use tools to answer:"+problem},
stream_mode="updates",
):
for _, update in step.items():
for message in update.get("messages", []):
message.pretty_print()
output.append(message.pretty_print())
print (output)'''
output = agent.invoke({"messages": "Use tools to answer: "+problem})
answer = output['messages']
critic_answer = llm_response(f'''Given the problem: {problem} and the agent response: {answer}, come up with a user friendly explanation that highlights the answer along
with the tools leveraged.''')
final_answer = f"""
Observer's Response:
{critic_answer}
Agent's Raw Response: {answer}
"""
return final_answer
iface = gr.Interface(
fn=pvsnp,
inputs=gr.Textbox(label="Enter a math problem or expression (e.g., integrate x^2 * sin(x), solve x^3 + 2x = 5, or simplify (x + 1)^2 - x^2)"),
outputs=gr.Textbox(label="PolyMath’s response"), # Output as HTML
title="PolyMath",
description="PolyMath is an AI-powered math agent designed to tackle both symbolic and numeric computations with precision. It's like having a digital mathematician by your side — whether you're exploring calculus, number theory, or algebraic puzzles.",
theme = gr.themes.Ocean(),
examples = ["Simplify x*2+2x+1", "Solve x^2 - 4x + 3 = 0", "Integrate x * e^x dx" ]
)
# Launch the app
iface.launch()
|