File size: 6,283 Bytes
1ef515b 20253a0 25858dd 20253a0 0848a53 1ef515b 25858dd 1ef515b 20253a0 1ef515b 20253a0 1ef515b 0848a53 1ef515b 20253a0 1ef515b 20253a0 1ef515b 20253a0 1ef515b 20253a0 1ef515b 20253a0 1ef515b 6404ebc 1ef515b 0848a53 1ef515b 20253a0 0848a53 1ef515b |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 |
# ========= Copyright 2023-2024 @ CAMEL-AI.org. All Rights Reserved. =========
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ========= Copyright 2023-2024 @ CAMEL-AI.org. All Rights Reserved. =========
"""
This module provides integration with the Groq API platform for the OWL system.
It configures different agent roles with appropriate Groq models based on their requirements:
- Tool-intensive roles (assistant, web, planning, video, image) use GROQ_LLAMA_3_3_70B
- Document processing uses GROQ_MIXTRAL_8_7B
- Simple roles (user) use GROQ_LLAMA_3_1_8B
To use this module:
1. Set GROQ_API_KEY in your .env file
2. Set OPENAI_API_BASE_URL to "https://api.groq.com/openai/v1"
3. Run with: python -m examples.run_groq
"""
import sys
from dotenv import load_dotenv
from camel.models import ModelFactory
from camel.toolkits import (
AudioAnalysisToolkit,
CodeExecutionToolkit,
ExcelToolkit,
ImageAnalysisToolkit,
SearchToolkit,
VideoAnalysisToolkit,
BrowserToolkit,
FileWriteToolkit,
)
from camel.types import ModelPlatformType, ModelType
from camel.logger import set_log_level
from owl.utils import OwlRolePlaying, run_society, DocumentProcessingToolkit
load_dotenv()
set_log_level(level="DEBUG")
def construct_society(question: str) -> OwlRolePlaying:
r"""Construct a society of agents based on the given question.
Args:
question (str): The task or question to be addressed by the society.
Returns:
OwlRolePlaying: A configured society of agents ready to address the question.
"""
# Create models for different components
models = {
"user": ModelFactory.create(
model_platform=ModelPlatformType.GROQ,
model_type=ModelType.GROQ_LLAMA_3_1_8B, # Simple role, can use 8B model
model_config_dict={"temperature": 0},
),
"assistant": ModelFactory.create(
model_platform=ModelPlatformType.GROQ,
model_type=ModelType.GROQ_LLAMA_3_3_70B, # Main assistant needs tool capability
model_config_dict={"temperature": 0},
),
"browsing": ModelFactory.create(
model_platform=ModelPlatformType.GROQ,
model_type=ModelType.GROQ_LLAMA_3_3_70B, # Web browsing requires tool usage
model_config_dict={"temperature": 0},
),
"planning": ModelFactory.create(
model_platform=ModelPlatformType.GROQ,
model_type=ModelType.GROQ_LLAMA_3_3_70B, # Planning requires complex reasoning
model_config_dict={"temperature": 0},
),
"video": ModelFactory.create(
model_platform=ModelPlatformType.GROQ,
model_type=ModelType.GROQ_LLAMA_3_3_70B, # Video analysis is multimodal
model_config_dict={"temperature": 0},
),
"image": ModelFactory.create(
model_platform=ModelPlatformType.GROQ,
model_type=ModelType.GROQ_LLAMA_3_3_70B, # Image analysis is multimodal
model_config_dict={"temperature": 0},
),
"document": ModelFactory.create(
model_platform=ModelPlatformType.GROQ,
model_type=ModelType.GROQ_MIXTRAL_8_7B, # Document processing can use Mixtral
model_config_dict={"temperature": 0},
),
}
# Configure toolkits
tools = [
*BrowserToolkit(
headless=False, # Set to True for headless mode (e.g., on remote servers)
web_agent_model=models["browsing"],
planning_agent_model=models["planning"],
).get_tools(),
*VideoAnalysisToolkit(model=models["video"]).get_tools(),
*AudioAnalysisToolkit().get_tools(), # This requires OpenAI Key
*CodeExecutionToolkit(sandbox="subprocess", verbose=True).get_tools(),
*ImageAnalysisToolkit(model=models["image"]).get_tools(),
SearchToolkit().search_duckduckgo,
SearchToolkit().search_google, # Comment this out if you don't have google search
SearchToolkit().search_wiki,
*ExcelToolkit().get_tools(),
*DocumentProcessingToolkit(model=models["document"]).get_tools(),
*FileWriteToolkit(output_dir="./").get_tools(),
]
# Configure agent roles and parameters
user_agent_kwargs = {"model": models["user"]}
assistant_agent_kwargs = {"model": models["assistant"], "tools": tools}
# Configure task parameters
task_kwargs = {
"task_prompt": question,
"with_task_specify": False,
}
# Create and return the society
society = OwlRolePlaying(
**task_kwargs,
user_role_name="user",
user_agent_kwargs=user_agent_kwargs,
assistant_role_name="assistant",
assistant_agent_kwargs=assistant_agent_kwargs,
)
return society
def main():
r"""Main function to run the OWL system with an example question."""
# Example research question
default_task = "Navigate to Amazon.com and identify one product that is attractive to coders. Please provide me with the product name and price. No need to verify your answer."
# Construct and run the society
# Note: This configuration uses GROQ_LLAMA_3_3_70B for tool-intensive roles (assistant, web, planning, video, image)
# and GROQ_MIXTRAL_8_7B for document processing. GROQ_LLAMA_3_1_8B is used only for the user role
# which doesn't require tool usage capabilities.
# Override default task if command line argument is provided
task = sys.argv[1] if len(sys.argv) > 1 else default_task
# Construct and run the society
society = construct_society(task)
answer, chat_history, token_count = run_society(society)
# Output the result
print(f"\033[94mAnswer: {answer}\033[0m")
if __name__ == "__main__":
main()
|