karths commited on
Commit
dba755a
·
verified ·
1 Parent(s): 087c4c8

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -4
app.py CHANGED
@@ -10,7 +10,7 @@ headers = {"Authorization": f"Bearer {HF_TOKEN}"}
10
  client = AsyncInferenceClient(api_url)
11
 
12
 
13
- system_message = """
14
  ### Instruction:
15
  Refactor the provided Python code to improve its maintainability and efficiency and reduce complexity. Include the refactored code along with the comments on the changes made for improving the metrics.
16
  ### Input:
@@ -93,8 +93,7 @@ def analyze_sales_data(sales_records):
93
  # Stream text - stream tokens with InferenceClient from TGI
94
  async def predict(message, chatbot, temperature=0.1, max_new_tokens=4096, top_p=0.6, repetition_penalty=1.15,):
95
 
96
- if system_prompt != "":
97
- input_prompt = f"{system_prompt}"
98
 
99
  temperature = float(temperature)
100
  if temperature < 1e-2:
@@ -102,7 +101,7 @@ async def predict(message, chatbot, temperature=0.1, max_new_tokens=4096, top_p
102
  top_p = float(top_p)
103
 
104
 
105
- input_prompt = input_prompt + str(message) + " [/INST] "
106
 
107
  partial_message = ""
108
  async for token in await client.text_generation(prompt=input_prompt,
 
10
  client = AsyncInferenceClient(api_url)
11
 
12
 
13
+ system_prompt = """
14
  ### Instruction:
15
  Refactor the provided Python code to improve its maintainability and efficiency and reduce complexity. Include the refactored code along with the comments on the changes made for improving the metrics.
16
  ### Input:
 
93
  # Stream text - stream tokens with InferenceClient from TGI
94
  async def predict(message, chatbot, temperature=0.1, max_new_tokens=4096, top_p=0.6, repetition_penalty=1.15,):
95
 
96
+
 
97
 
98
  temperature = float(temperature)
99
  if temperature < 1e-2:
 
101
  top_p = float(top_p)
102
 
103
 
104
+ input_prompt = system_prompt + str(message) + " [/INST] "
105
 
106
  partial_message = ""
107
  async for token in await client.text_generation(prompt=input_prompt,