Wendong-Fan commited on
Commit
e395a4e
·
1 Parent(s): c8057c7

Revert "feat: enhance OpenAI-compatible model support with role-specific configurations (#356)"

Browse files

This reverts commit bd8f220416c1bbaa06ed81909dfcef083ed0be45, reversing
changes made to 106a169ee93a97e54cee672d6ee4ade5df409cdf.

.gitignore CHANGED
@@ -27,7 +27,6 @@ venv/
27
  env/
28
  ENV/
29
  .env
30
- .venv
31
 
32
  # IDE
33
  .idea/
@@ -59,4 +58,3 @@ coverage.xml
59
  owl/camel/types/__pycache__/
60
  owl/camel/__pycache__/
61
  owl/camel/utils/__pycache_/
62
- tmp/
 
27
  env/
28
  ENV/
29
  .env
 
30
 
31
  # IDE
32
  .idea/
 
58
  owl/camel/types/__pycache__/
59
  owl/camel/__pycache__/
60
  owl/camel/utils/__pycache_/
 
README.md CHANGED
@@ -364,10 +364,8 @@ python examples/run_qwen_zh.py
364
  # Run with Deepseek model
365
  python examples/run_deepseek_zh.py
366
 
367
- # Run with other OpenAI-compatible models, supporting different models for different roles
368
  python examples/run_openai_compatiable_model.py
369
- # Example with question
370
- python examples/run_openai_compatiable_model.py "Navigate to Amazon.com and identify one product that is attractive to coders. Please provide me with the product name and price. No need to verify your answer."
371
 
372
  # Run with Azure OpenAI
373
  python examples/run_azure_openai.py
 
364
  # Run with Deepseek model
365
  python examples/run_deepseek_zh.py
366
 
367
+ # Run with other OpenAI-compatible models
368
  python examples/run_openai_compatiable_model.py
 
 
369
 
370
  # Run with Azure OpenAI
371
  python examples/run_azure_openai.py
README_zh.md CHANGED
@@ -363,10 +363,8 @@ python examples/run_qwen_zh.py
363
  # 使用 Deepseek 模型运行
364
  python examples/run_deepseek_zh.py
365
 
366
- # 使用其他 OpenAI 兼容模型运行,支持不同的 role 使用不同的模型
367
  python examples/run_openai_compatiable_model.py
368
- # 带问题的示例
369
- python examples/run_openai_compatiable_model.py "浏览京东并找出一款对程序员有吸引力的产品。请提供产品名称和价格。"
370
 
371
  # 使用 Azure OpenAI模型运行
372
  python examples/run_azure_openai.py
 
363
  # 使用 Deepseek 模型运行
364
  python examples/run_deepseek_zh.py
365
 
366
+ # 使用其他 OpenAI 兼容模型运行
367
  python examples/run_openai_compatiable_model.py
 
 
368
 
369
  # 使用 Azure OpenAI模型运行
370
  python examples/run_azure_openai.py
examples/run_openai_compatiable_model.py CHANGED
@@ -53,56 +53,38 @@ def construct_society(question: str) -> RolePlaying:
53
  models = {
54
  "user": ModelFactory.create(
55
  model_platform=ModelPlatformType.OPENAI_COMPATIBLE_MODEL,
56
-
57
- model_type=os.getenv("USER_ROLE_API_MODEL_TYPE", os.getenv("LLM_ROLE_API_MODEL_TYPE", "qwen-max")),
58
- api_key=os.getenv("USER_ROLE_API_KEY", os.getenv("LLM_ROLE_API_KEY", os.getenv("QWEN_API_KEY", "Your_Key"))),
59
- url=os.getenv("USER_ROLE_API_BASE_URL", os.getenv("LLM_ROLE_API_BASE_URL", "https://dashscope.aliyuncs.com/compatible-mode/v1")),
60
- model_config_dict={
61
- "temperature": float(os.getenv("USER_ROLE_API_MODEL_TEMPERATURE", os.getenv("LLM_ROLE_API_MODEL_TEMPERATURE", "0.4"))),
62
- "max_tokens": int(os.getenv("USER_ROLE_API_MODEL_MAX_TOKENS", os.getenv("LLM_ROLE_API_MODEL_MAX_TOKENS", "4096")))
63
- },
64
  ),
65
  "assistant": ModelFactory.create(
66
  model_platform=ModelPlatformType.OPENAI_COMPATIBLE_MODEL,
67
- model_type=os.getenv("ASSISTANT_ROLE_API_MODEL_TYPE", os.getenv("LLM_ROLE_API_MODEL_TYPE", "qwen-max")),
68
- api_key=os.getenv("ASSISTANT_ROLE_API_KEY", os.getenv("LLM_ROLE_API_KEY", os.getenv("QWEN_API_KEY", "Your_Key"))),
69
- url=os.getenv("ASSISTANT_ROLE_API_BASE_URL", os.getenv("LLM_ROLE_API_BASE_URL", "https://dashscope.aliyuncs.com/compatible-mode/v1")),
70
- model_config_dict={
71
- "temperature": float(os.getenv("ASSISTANT_ROLE_API_MODEL_TEMPERATURE", os.getenv("LLM_ROLE_API_MODEL_TEMPERATURE", "0.4"))),
72
- "max_tokens": int(os.getenv("ASSISTANT_ROLE_API_MODEL_MAX_TOKENS", os.getenv("LLM_ROLE_API_MODEL_MAX_TOKENS", "4096")))
73
- },
74
-
75
  ),
76
  "browsing": ModelFactory.create(
77
  model_platform=ModelPlatformType.OPENAI_COMPATIBLE_MODEL,
78
-
79
- model_type=os.getenv("WEB_ROLE_API_BASE_URL", os.getenv("VLLM_ROLE_API_MODEL_TYPE", "qwen-vl-max")),
80
- api_key=os.getenv("WEB_ROLE_API_KEY", os.getenv("VLLM_ROLE_API_KEY", os.getenv("QWEN_API_KEY", "Your_Key"))),
81
- url=os.getenv("USER_ROLE_API_BASE_URL", os.getenv("VLLM_ROLE_API_BASE_URL", "https://dashscope.aliyuncs.com/compatible-mode/v1")),
82
- model_config_dict={
83
- "temperature": float(os.getenv("WEB_ROLE_API_MODEL_TEMPERATURE", os.getenv("VLLM_ROLE_API_MODEL_TEMPERATURE", "0.4"))),
84
- "max_tokens": int(os.getenv("WEB_ROLE_API_MODEL_MAX_TOKENS", os.getenv("VLLM_ROLE_API_MODEL_MAX_TOKENS", "4096")))
85
- },
86
  ),
87
  "planning": ModelFactory.create(
88
  model_platform=ModelPlatformType.OPENAI_COMPATIBLE_MODEL,
89
- model_type=os.getenv("PLANNING_ROLE_API_MODEL_TYPE", os.getenv("LLM_ROLE_API_MODEL_TYPE", "qwen-max")),
90
- api_key=os.getenv("PLANNING_ROLE_API_KEY", os.getenv("LLM_ROLE_API_KEY", os.getenv("QWEN_API_KEY", "Your_Key"))),
91
- url=os.getenv("PLANNING_ROLE_API_BASE_URL", os.getenv("LLM_ROLE_API_BASE_URL", "https://dashscope.aliyuncs.com/compatible-mode/v1")),
92
- model_config_dict={
93
- "temperature": float(os.getenv("PLANNING_ROLE_API_MODEL_TEMPERATURE", os.getenv("LLM_ROLE_API_MODEL_TEMPERATURE", "0.4"))),
94
- "max_tokens": int(os.getenv("PLANNING_ROLE_API_MODEL_MAX_TOKENS", os.getenv("LLM_ROLE_API_MODEL_MAX_TOKENS", "4096")))
95
- },
96
  ),
97
  "image": ModelFactory.create(
98
  model_platform=ModelPlatformType.OPENAI_COMPATIBLE_MODEL,
99
- model_type=os.getenv("IMAGE_ROLE_API_MODEL_TYPE", os.getenv("VLLM_ROLE_API_MODEL_TYPE", "qwen-vl-max")),
100
- api_key=os.getenv("IMAGE_ROLE_API_KEY", os.getenv("VLLM_ROLE_API_KEY", os.getenv("QWEN_API_KEY", "Your_Key"))),
101
- url=os.getenv("IMAGE_ROLE_API_BASE_URL", os.getenv("VLLM_ROLE_API_BASE_URL", "https://dashscope.aliyuncs.com/compatible-mode/v1")),
102
- model_config_dict={
103
- "temperature": float(os.getenv("IMAGE_ROLE_API_MODEL_TEMPERATURE", os.getenv("VLLM_ROLE_API_MODEL_TEMPERATURE", "0.4"))),
104
- "max_tokens": int(os.getenv("IMAGE_ROLE_API_MODEL_MAX_TOKENS", os.getenv("VLLM_ROLE_API_MODEL_MAX_TOKENS", "4096")))
105
- },
106
  ),
107
  }
108
 
@@ -144,16 +126,13 @@ def construct_society(question: str) -> RolePlaying:
144
  return society
145
 
146
 
 
 
 
 
147
 
148
- def main(question: str = "Navigate to Amazon.com and identify one product that is attractive to coders. Please provide me with the product name and price. No need to verify your answer."):
149
- r"""Main function to run the OWL system with an example question.
150
- Args:
151
- question (str): The task or question to be addressed by the society.
152
- If not provided, a default question will be used.
153
- Defaults to "Navigate to Amazon.com and identify one product that is attractive to coders. Please provide me with the product name and price. No need to verify your answer."
154
- Returns:
155
- None
156
- """
157
 
158
  # Construct and run the society
159
  society = construct_society(task)
@@ -162,9 +141,7 @@ def main(question: str = "Navigate to Amazon.com and identify one product that i
162
 
163
  # Output the result
164
  print(f"\033[94mAnswer: {answer}\033[0m")
165
- # Output the token count
166
- print(f"\033[94mToken count: {token_count}\033[0m")
167
 
168
 
169
  if __name__ == "__main__":
170
- main(sys.argv[1] if len(sys.argv) > 1 else "")
 
53
  models = {
54
  "user": ModelFactory.create(
55
  model_platform=ModelPlatformType.OPENAI_COMPATIBLE_MODEL,
56
+ model_type="qwen-max",
57
+ api_key=os.getenv("QWEN_API_KEY"),
58
+ url="https://dashscope.aliyuncs.com/compatible-mode/v1",
59
+ model_config_dict={"temperature": 0.4, "max_tokens": 128000},
 
 
 
 
60
  ),
61
  "assistant": ModelFactory.create(
62
  model_platform=ModelPlatformType.OPENAI_COMPATIBLE_MODEL,
63
+ model_type="qwen-max",
64
+ api_key=os.getenv("QWEN_API_KEY"),
65
+ url="https://dashscope.aliyuncs.com/compatible-mode/v1",
66
+ model_config_dict={"temperature": 0.4, "max_tokens": 128000},
 
 
 
 
67
  ),
68
  "browsing": ModelFactory.create(
69
  model_platform=ModelPlatformType.OPENAI_COMPATIBLE_MODEL,
70
+ model_type="qwen-vl-max",
71
+ api_key=os.getenv("QWEN_API_KEY"),
72
+ url="https://dashscope.aliyuncs.com/compatible-mode/v1",
73
+ model_config_dict={"temperature": 0.4, "max_tokens": 128000},
 
 
 
 
74
  ),
75
  "planning": ModelFactory.create(
76
  model_platform=ModelPlatformType.OPENAI_COMPATIBLE_MODEL,
77
+ model_type="qwen-max",
78
+ api_key=os.getenv("QWEN_API_KEY"),
79
+ url="https://dashscope.aliyuncs.com/compatible-mode/v1",
80
+ model_config_dict={"temperature": 0.4, "max_tokens": 128000},
 
 
 
81
  ),
82
  "image": ModelFactory.create(
83
  model_platform=ModelPlatformType.OPENAI_COMPATIBLE_MODEL,
84
+ model_type="qwen-vl-max",
85
+ api_key=os.getenv("QWEN_API_KEY"),
86
+ url="https://dashscope.aliyuncs.com/compatible-mode/v1",
87
+ model_config_dict={"temperature": 0.4, "max_tokens": 128000},
 
 
 
88
  ),
89
  }
90
 
 
126
  return society
127
 
128
 
129
+ def main():
130
+ r"""Main function to run the OWL system with an example question."""
131
+ # Example research question
132
+ default_task = "Navigate to Amazon.com and identify one product that is attractive to coders. Please provide me with the product name and price. No need to verify your answer."
133
 
134
+ # Override default task if command line argument is provided
135
+ task = sys.argv[1] if len(sys.argv) > 1 else default_task
 
 
 
 
 
 
 
136
 
137
  # Construct and run the society
138
  society = construct_society(task)
 
141
 
142
  # Output the result
143
  print(f"\033[94mAnswer: {answer}\033[0m")
 
 
144
 
145
 
146
  if __name__ == "__main__":
147
+ main()
owl/.env_template CHANGED
@@ -24,45 +24,6 @@ QWEN_API_KEY='Your_Key'
24
  # DeepSeek API (https://platform.deepseek.com/api_keys)
25
  DEEPSEEK_API_KEY='Your_Key'
26
 
27
- # Multi-platform LLM/VLLM API, default values for user assistant planning web image roles
28
- # LLM_ROLE_API_BASE_URL=''
29
- # LLM_ROLE_API_KEY='Your_Key'
30
- # LLM_ROLE_API_MODEL_TYPE=''
31
- # LLM_ROLE_API_MODEL_TEMPERATURE='0.0'
32
- # LLM_ROLE_API_MODEL_MAX_TOKENS='0'
33
- # VLLM_ROLE_API_BASE_URL=''
34
- # VLLM_ROLE_API_KEY='Your_Key'
35
- # VLLM_ROLE_API_MODEL_TYPE=''
36
- # VLLM_ROLE_API_MODEL_TEMPERATURE='0.0'
37
- # VLLM_ROLE_API_MODEL_MAX_TOKENS='0'
38
-
39
- # Multi-platform LLM/VLLM API for user assistant planning web image roles
40
- # USER_ROLE_API_BASE_URL=''
41
- # USER_ROLE_API_KEY='Your_Key'
42
- # USER_ROLE_API_MODEL_TYPE=''
43
- # USER_ROLE_API_MODEL_TEMPERATURE='0.8'
44
- # USER_ROLE_API_MODEL_MAX_TOKENS='4096'
45
- # ASSISTANT_ROLE_API_BASE_URL=''
46
- # ASSISTANT_ROLE_API_KEY='Your_Key'
47
- # ASSISTANT_ROLE_API_MODEL_TYPE=''
48
- # ASSISTANT_ROLE_API_MODEL_TEMPERATURE='0.2'
49
- # ASSISTANT_ROLE_API_MODEL_MAX_TOKENS='4096'
50
- # PLANNING_ROLE_API_BASE_URL=''
51
- # PLANNING_ROLE_API_KEY='Your_Key'
52
- # PLANNING_ROLE_API_MODEL_TYPE=''
53
- # PLANNING_ROLE_API_MODEL_TEMPERATURE='0.4'
54
- # PLANNING_ROLE_API_MODEL_MAX_TOKENS='8192'
55
- # WEB_ROLE_API_BASE_URL=''
56
- # WEB_ROLE_API_KEY='Your_Key'
57
- # WEB_ROLE_API_MODEL_TYPE=''
58
- # WEB_ROLE_API_MODEL_TEMPERATURE='0.0'
59
- # WEB_ROLE_API_MODEL_MAX_TOKENS='0'
60
- # IMAGE_ROLE_API_BASE_URL=''
61
- # IMAGE_ROLE_API_KEY='Your_Key'
62
- # IMAGE_ROLE_API_MODEL_TYPE=''
63
- # IMAGE_ROLE_API_MODEL_TEMPERATURE='0.0'
64
- # IMAGE_ROLE_API_MODEL_MAX_TOKENS='0'
65
-
66
  #===========================================
67
  # Tools & Services API
68
  #===========================================
 
24
  # DeepSeek API (https://platform.deepseek.com/api_keys)
25
  DEEPSEEK_API_KEY='Your_Key'
26
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
27
  #===========================================
28
  # Tools & Services API
29
  #===========================================
owl/webapp.py CHANGED
@@ -245,7 +245,7 @@ MODULE_DESCRIPTIONS = {
245
  "run": "Default mode: Using OpenAI model's default agent collaboration mode, suitable for most tasks.",
246
  "run_mini": "Using OpenAI model with minimal configuration to process tasks",
247
  "run_deepseek_zh": "Using deepseek model to process Chinese tasks",
248
- "run_openai_compatiable_model": "Using multiple openai compatible model to process tasks",
249
  "run_ollama": "Using local ollama model to process tasks",
250
  "run_qwen_mini_zh": "Using qwen model with minimal configuration to process tasks",
251
  "run_qwen_zh": "Using qwen model to process tasks",
 
245
  "run": "Default mode: Using OpenAI model's default agent collaboration mode, suitable for most tasks.",
246
  "run_mini": "Using OpenAI model with minimal configuration to process tasks",
247
  "run_deepseek_zh": "Using deepseek model to process Chinese tasks",
248
+ "run_openai_compatiable_model": "Using openai compatible model to process tasks",
249
  "run_ollama": "Using local ollama model to process tasks",
250
  "run_qwen_mini_zh": "Using qwen model with minimal configuration to process tasks",
251
  "run_qwen_zh": "Using qwen model to process tasks",
owl/webapp_zh.py CHANGED
@@ -245,7 +245,7 @@ MODULE_DESCRIPTIONS = {
245
  "run": "默认模式:使用OpenAI模型的默认的智能体协作模式,适合大多数任务。",
246
  "run_mini": "使用使用OpenAI模型最小化配置处理任务",
247
  "run_deepseek_zh": "使用deepseek模型处理中文任务",
248
- "run_openai_compatiable_model": "使用多个openai兼容模型处理任务",
249
  "run_ollama": "使用本地ollama模型处理任务",
250
  "run_qwen_mini_zh": "使用qwen模型最小化配置处理任务",
251
  "run_qwen_zh": "使用qwen模型处理任务",
 
245
  "run": "默认模式:使用OpenAI模型的默认的智能体协作模式,适合大多数任务。",
246
  "run_mini": "使用使用OpenAI模型最小化配置处理任务",
247
  "run_deepseek_zh": "使用deepseek模型处理中文任务",
248
+ "run_openai_compatiable_model": "使用openai兼容模型处理任务",
249
  "run_ollama": "使用本地ollama模型处理任务",
250
  "run_qwen_mini_zh": "使用qwen模型最小化配置处理任务",
251
  "run_qwen_zh": "使用qwen模型处理任务",