Spaces:
Running
on
CPU Upgrade
Running
on
CPU Upgrade
Update app.py
Browse files
app.py
CHANGED
@@ -77,9 +77,9 @@ def lookup_crs_value(crs_key):
|
|
77 |
###########################################
|
78 |
# ToDo move functions to utils and model specifications to config file!
|
79 |
# Configuration for the dedicated model
|
80 |
-
# https://
|
81 |
DEDICATED_MODEL = "meta-llama/Llama-3.1-8B-Instruct"
|
82 |
-
DEDICATED_ENDPOINT = "https://
|
83 |
# Write access token from the settings
|
84 |
WRITE_ACCESS_TOKEN = st.secrets["Llama_3_1"]
|
85 |
|
@@ -87,7 +87,7 @@ def get_rag_answer(query, top_results):
|
|
87 |
# Build context from each top result using title, objective, and description.
|
88 |
context = "\n\n".join([build_context_for_result(res) for res in top_results])
|
89 |
# Truncate context to 11500 tokens (approximation)
|
90 |
-
context = truncate_to_tokens(context,
|
91 |
# Improved prompt with role instruction and formatting instruction.
|
92 |
prompt = (
|
93 |
"You are a project portfolio adviser at the development cooperation GIZ. "
|
|
|
77 |
###########################################
|
78 |
# ToDo move functions to utils and model specifications to config file!
|
79 |
# Configuration for the dedicated model
|
80 |
+
# https://qu2d8m6dmsollhly.us-east-1.aws.endpoints.huggingface.cloud # 4k token
|
81 |
DEDICATED_MODEL = "meta-llama/Llama-3.1-8B-Instruct"
|
82 |
+
DEDICATED_ENDPOINT = "https://nwea79x4q1clc89l.eu-west-1.aws.endpoints.huggingface.cloud"
|
83 |
# Write access token from the settings
|
84 |
WRITE_ACCESS_TOKEN = st.secrets["Llama_3_1"]
|
85 |
|
|
|
87 |
# Build context from each top result using title, objective, and description.
|
88 |
context = "\n\n".join([build_context_for_result(res) for res in top_results])
|
89 |
# Truncate context to 11500 tokens (approximation)
|
90 |
+
context = truncate_to_tokens(context, 11500)
|
91 |
# Improved prompt with role instruction and formatting instruction.
|
92 |
prompt = (
|
93 |
"You are a project portfolio adviser at the development cooperation GIZ. "
|