File size: 1,391 Bytes
81d00fe
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
import os
import logging
from graph import agent_graph

# Configure logging
logging.basicConfig(level=logging.INFO)  # Default to INFO level
logger = logging.getLogger(__name__)

# Enable LiteLLM debug logging only if environment variable is set
import litellm
if os.getenv('LITELLM_DEBUG', 'false').lower() == 'true':
    litellm.set_verbose = True
    logger.setLevel(logging.DEBUG)
else:
    litellm.set_verbose = False
    logger.setLevel(logging.INFO)

class AgentRunner:
    def __init__(self):
        logger.debug("Initializing AgentRunner")
        logger.info("AgentRunner initialized.")

    def __call__(self, question: str) -> str:
        logger.debug(f"Processing question: {question[:50]}...")
        logger.info(f"Agent received question (first 50 chars): {question[:50]}...")
        try:
            # Run the graph with the question
            result = agent_graph.invoke({
                "messages": [],
                "question": question,
                "answer": None
            })
            
            # Extract and return the answer
            answer = result["answer"]
            logger.debug(f"Successfully generated answer: {answer}")
            logger.info(f"Agent returning answer: {answer}")
            return answer
        except Exception as e:
            logger.error(f"Error in agent execution: {str(e)}", exc_info=True)
            raise