Spaces:
Sleeping
Sleeping
Update qa_summary.py
Browse files- qa_summary.py +3 -2
qa_summary.py
CHANGED
@@ -16,8 +16,9 @@ def generate_answer(llm_name, texts, query, queries, mode='validate'):
|
|
16 |
tokenizer = AutoTokenizer.from_pretrained("mistralai/Mistral-7B-Instruct-v0.2", use_fast=True)
|
17 |
llm_model = AutoModelForCausalLM.from_pretrained(
|
18 |
"mistralai/Mistral-7B-Instruct-v0.2",
|
19 |
-
device_map="auto",
|
20 |
-
|
|
|
21 |
)
|
22 |
|
23 |
elif llm_name == 'phi3mini':
|
|
|
16 |
tokenizer = AutoTokenizer.from_pretrained("mistralai/Mistral-7B-Instruct-v0.2", use_fast=True)
|
17 |
llm_model = AutoModelForCausalLM.from_pretrained(
|
18 |
"mistralai/Mistral-7B-Instruct-v0.2",
|
19 |
+
#device_map="auto",
|
20 |
+
device_map="cuda",
|
21 |
+
torch_dtype=torch.float16,
|
22 |
)
|
23 |
|
24 |
elif llm_name == 'phi3mini':
|