UntilDot commited on
Commit
03202d5
·
verified ·
1 Parent(s): a50063c

Upload agents.py

Browse files
Files changed (1) hide show
  1. agents.py +62 -0
agents.py ADDED
@@ -0,0 +1,62 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import asyncio
2
+ import httpx
3
+ import os
4
+
5
+ OPENROUTER_BASE = "https://openrouter.ai/api/v1/chat/completions"
6
+ HEADERS = {
7
+ "Authorization": f"Bearer {os.getenv('OPENROUTER_API_KEY')}",
8
+ "Content-Type": "application/json",
9
+ }
10
+
11
+ ALLOWED_MODELS = [
12
+ "deepseek/deepseek-chat-v3-0324:free",
13
+ "google/gemini-2.0-flash-exp:free",
14
+ "meta-llama/llama-4-maverick:free",
15
+ "microsoft/mai-ds-r1:free",
16
+ "meta-llama/llama-4-scout:free",
17
+ "google/gemma-3-27b-it:free",
18
+ "qwen/qwq-32b:free",
19
+ "qwen/qwen2.5-vl-72b-instruct:free",
20
+ "qwen/qwen-2.5-72b-instruct:free",
21
+ "google/gemini-2.5-pro-exp-03-25:free",
22
+ "deepseek/deepseek-r1:free",
23
+ ]
24
+
25
+ async def call_openrouter(model: str, prompt: str) -> str:
26
+ body = {
27
+ "model": model,
28
+ "messages": [{"role": "user", "content": prompt}],
29
+ "temperature": 0.7,
30
+ }
31
+ async with httpx.AsyncClient(timeout=30) as client:
32
+ response = await client.post(OPENROUTER_BASE, headers=HEADERS, json=body)
33
+ response.raise_for_status()
34
+ return response.json()["choices"][0]["message"]["content"]
35
+
36
+ async def query_llm_agent(name: str, prompt: str, settings: dict) -> str:
37
+ selected_model = settings.get("models", {}).get(name)
38
+
39
+ if not selected_model:
40
+ return f"[{name}] No model selected."
41
+
42
+ # Smart fix: if :free missing, auto-add it
43
+ if not selected_model.endswith(":free"):
44
+ selected_model += ":free"
45
+
46
+ if selected_model not in ALLOWED_MODELS:
47
+ return f"[{name}] Model '{selected_model}' is not supported."
48
+
49
+ try:
50
+ response = await call_openrouter(selected_model, prompt)
51
+ return f"[{name}] {response}"
52
+ except Exception as e:
53
+ return f"[{name}] Error: {str(e)}"
54
+
55
+ async def query_all_llms(prompt: str, settings: dict) -> list:
56
+ agents = ["LLM-A", "LLM-B", "LLM-C"]
57
+ tasks = [query_llm_agent(agent, prompt, settings) for agent in agents]
58
+ results = await asyncio.gather(*tasks)
59
+ return results
60
+
61
+ def query_all_llms_sync(prompt: str, settings: dict) -> list:
62
+ return asyncio.run(query_all_llms(prompt, settings))