Chandima Prabhath commited on
Commit
8a6c511
·
1 Parent(s): 123c627

Refactor OpenAI client initialization to use hardcoded base URL and API key; simplify generate_llm function by removing model parameter.

Browse files
Files changed (1) hide show
  1. polLLM.py +3 -4
polLLM.py CHANGED
@@ -25,8 +25,8 @@ _CHAR = _config.get("char", "Eve")
25
 
26
  # --- OpenAI client init ---
27
  client = OpenAI(
28
- base_url = os.getenv("OPENAI_BASE_URL", "https://text.pollinations.ai/openai"),
29
- api_key = os.getenv("OPENAI_API_KEY", "")
30
  )
31
 
32
  def _build_system_prompt() -> str:
@@ -37,13 +37,12 @@ def _build_system_prompt() -> str:
37
 
38
  def generate_llm(
39
  prompt: str,
40
- model: str = None,
41
  ) -> str:
42
  """
43
  Send a chat-completion request to the LLM, with retries and backoff.
44
  Reads defaults from config.yaml, but can be overridden per-call.
45
  """
46
- model = model or _DEFAULT_MODEL
47
  system_prompt = _build_system_prompt()
48
  messages = [
49
  {"role": "system", "content": system_prompt},
 
25
 
26
  # --- OpenAI client init ---
27
  client = OpenAI(
28
+ base_url = "https://text.pollinations.ai/openai",
29
+ api_key = "OPENAI_API_KEY"
30
  )
31
 
32
  def _build_system_prompt() -> str:
 
37
 
38
  def generate_llm(
39
  prompt: str,
 
40
  ) -> str:
41
  """
42
  Send a chat-completion request to the LLM, with retries and backoff.
43
  Reads defaults from config.yaml, but can be overridden per-call.
44
  """
45
+ model = _DEFAULT_MODEL
46
  system_prompt = _build_system_prompt()
47
  messages = [
48
  {"role": "system", "content": system_prompt},