File size: 2,372 Bytes
ce0ec3b
 
 
de58e79
ce0ec3b
 
 
 
 
013068f
775ea0b
8fca8f3
 
 
 
 
 
 
013068f
 
ce0ec3b
013068f
ce0ec3b
 
 
 
013068f
ce0ec3b
 
 
05a3ce6
c3b461c
 
f709e28
1841510
8fca8f3
ce0ec3b
d672483
92894aa
ce0ec3b
c3b461c
0e9bb01
 
 
 
 
013068f
ce0ec3b
7cc9db2
ce0ec3b
 
 
652f14d
 
 
775ea0b
033ead0
f709e28
 
 
 
 
 
 
 
 
 
033ead0
f709e28
652f14d
033ead0
775ea0b
ce0ec3b
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
#!/usr/bin/env python3
"""
Lyrics Analyzer Agent - Main Entry Point

This module serves as the entry point for the Lyrics Analyzer application, which
uses a system of specialized agents to search for and analyze song lyrics.  
"""
from loguru import logger
from smolagents import LiteLLMModel

from agents.single_agent import create_single_agent
from config import (
    get_model_id,
    get_ollama_api_base,
    load_api_keys,
    setup_logger,
)
from Gradio_UI import GradioUI


def main():
    """
    Main function to initialize and run the Lyrics Analyzer Agent.
    
    This function sets up logging, loads API keys, initializes the LLM model,
    and starts the Gradio UI server with the manager agent.
    """
    # Setup logger and API keys
    setup_logger()
    load_api_keys()
    
    # use_local = os.environ.get('SPACE_ID') is None
    use_local_llm = False
    use_localhost = False

    # If using Ollama, we need to specify the API base URL
    # Initialize the LLM model based on configuration
    model_id = get_model_id(provider='openrouter')

    logger.info(f"Initializing with model: {model_id}")
    if use_local_llm:
        api_base = get_ollama_api_base()
        logger.info(f"Using Ollama API base: {api_base}")
        model = LiteLLMModel(model_id=model_id, api_base=api_base)
    else:
        model = LiteLLMModel(model_id=model_id)
    
    # Create the manager agent which will create and manage the other agents
    single_agent = create_single_agent(model, analysis_tool_model_id=model_id)
    
    # Start the Gradio UI server
    logger.info("Initializing Gradio UI and launching server")
    
    # Determine if we're in test mode (local) or production (HuggingFace)
    # HuggingFace environment has SPACE_ID environment variable
    
   
    if use_localhost:
        launch_kwargs = {
            "debug": True,
            "share": False,
            "server_name": "127.0.0.1",
            "server_port": 3000
        }
    else:
        launch_kwargs = {
            "debug": True,
            "share": False
        }
    
    # Инструкции агенту настраиваются непосредственно в GradioUI.py
    GradioUI(single_agent).launch(**launch_kwargs)
    logger.success("Server started successfully")


# Run the application when executed directly
if __name__ == "__main__":
    main()