Spaces:
Sleeping
Sleeping
apple muncy
commited on
Commit
·
10470c9
1
Parent(s):
7b40222
trying os.getenv()
Browse filesSigned-off-by: apple muncy <[email protected]>
app.py
CHANGED
@@ -17,7 +17,7 @@ from smolagents import GradioUI, LiteLLMModel
|
|
17 |
from retriever import load_guest_dataset
|
18 |
import yaml
|
19 |
from dotenv import load_dotenv
|
20 |
-
|
21 |
load_dotenv()
|
22 |
|
23 |
# Import our custom tools from their modules
|
@@ -30,10 +30,11 @@ with open("prompts.yaml", 'r') as stream:
|
|
30 |
# Get the system prompt from the YAML file
|
31 |
system_prompt = prompt_templates["system_prompt"]
|
32 |
|
|
|
33 |
# Initialize the chat model
|
34 |
llm = HuggingFaceEndpoint(
|
35 |
repo_id="Qwen/Qwen2.5-Coder-32B-Instruct",
|
36 |
-
huggingfacehub_api_token=
|
37 |
)
|
38 |
|
39 |
chat = ChatHuggingFace(llm=llm, verbose=True)
|
|
|
17 |
from retriever import load_guest_dataset
|
18 |
import yaml
|
19 |
from dotenv import load_dotenv
|
20 |
+
import os
|
21 |
load_dotenv()
|
22 |
|
23 |
# Import our custom tools from their modules
|
|
|
30 |
# Get the system prompt from the YAML file
|
31 |
system_prompt = prompt_templates["system_prompt"]
|
32 |
|
33 |
+
hf_api_key = os.getenv("HF_TOKEN")
|
34 |
# Initialize the chat model
|
35 |
llm = HuggingFaceEndpoint(
|
36 |
repo_id="Qwen/Qwen2.5-Coder-32B-Instruct",
|
37 |
+
huggingfacehub_api_token=hf_api_key,
|
38 |
)
|
39 |
|
40 |
chat = ChatHuggingFace(llm=llm, verbose=True)
|