Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -7,13 +7,13 @@ Original file is located at
|
|
7 |
https://colab.research.google.com/drive/1zRuAxGm_11lNIeBxFlHVzc5tNKhyLef4
|
8 |
"""
|
9 |
import gradio as gr
|
10 |
-
|
11 |
# 鍔犺級 LLaMA 妯″瀷
|
12 |
from transformers import AutoTokenizer, AutoModelForCausalLM
|
13 |
token = os.getenv("Git Access")
|
14 |
|
15 |
-
tokenizer = AutoTokenizer.from_pretrained("meta-llama/Llama-2-7b-hf",use_auth_token=token)
|
16 |
-
model = AutoModelForCausalLM.from_pretrained("meta-llama/Llama-2-7b-hf",use_auth_token=token)
|
17 |
|
18 |
# 瀹氱京鎺ㄧ悊鍑芥暩
|
19 |
def generate_text(prompt):
|
|
|
7 |
https://colab.research.google.com/drive/1zRuAxGm_11lNIeBxFlHVzc5tNKhyLef4
|
8 |
"""
|
9 |
import gradio as gr
|
10 |
+
import os
|
11 |
# 鍔犺級 LLaMA 妯″瀷
|
12 |
from transformers import AutoTokenizer, AutoModelForCausalLM
|
13 |
token = os.getenv("Git Access")
|
14 |
|
15 |
+
tokenizer = AutoTokenizer.from_pretrained("meta-llama/Llama-2-7b-chat-hf",use_auth_token=token)
|
16 |
+
model = AutoModelForCausalLM.from_pretrained("meta-llama/Llama-2-7b-chat-hf",use_auth_token=token)
|
17 |
|
18 |
# 瀹氱京鎺ㄧ悊鍑芥暩
|
19 |
def generate_text(prompt):
|