Spaces:
Paused
Paused
Update generate_answer.py
Browse files- generate_answer.py +2 -0
generate_answer.py
CHANGED
@@ -1,7 +1,9 @@
|
|
1 |
# from transformers import AutoTokenizer, AutoModelForCausalLM
|
2 |
from transformers import AutoModelForCausalLM, GemmaTokenizer
|
3 |
from langchain.prompts import PromptTemplate
|
|
|
4 |
|
|
|
5 |
# model = AutoModelForCausalLM.from_pretrained("microsoft/Phi-3-mini-4k-instruct", trust_remote_code=True)
|
6 |
# tokenizer = AutoTokenizer.from_pretrained("microsoft/Phi-3-mini-4k-instruct", trust_remote_code=True)
|
7 |
|
|
|
1 |
# from transformers import AutoTokenizer, AutoModelForCausalLM
|
2 |
from transformers import AutoModelForCausalLM, GemmaTokenizer
|
3 |
from langchain.prompts import PromptTemplate
|
4 |
+
import os
|
5 |
|
6 |
+
os.environ["HF_TOKEN"] = os.getenv('HF_TOKEN')
|
7 |
# model = AutoModelForCausalLM.from_pretrained("microsoft/Phi-3-mini-4k-instruct", trust_remote_code=True)
|
8 |
# tokenizer = AutoTokenizer.from_pretrained("microsoft/Phi-3-mini-4k-instruct", trust_remote_code=True)
|
9 |
|