metastable-void
commited on
renamed
Browse files
README.md
CHANGED
@@ -6,7 +6,7 @@ colorTo: purple
|
|
6 |
python_version: 3.11
|
7 |
models:
|
8 |
- llm-jp/llm-jp-3-1.8b-instruct
|
9 |
-
- vericava/llm-jp-3-1.8b-instruct-lora-vericava7
|
10 |
sdk: gradio
|
11 |
sdk_version: 5.23.0
|
12 |
app_file: app.py
|
|
|
6 |
python_version: 3.11
|
7 |
models:
|
8 |
- llm-jp/llm-jp-3-1.8b-instruct
|
9 |
+
- vericava/llm-jp-3-1.8b-instruct-lora-vericava7-llama
|
10 |
sdk: gradio
|
11 |
sdk_version: 5.23.0
|
12 |
app_file: app.py
|
app.py
CHANGED
@@ -20,7 +20,7 @@ MAX_INPUT_TOKEN_LENGTH = int(os.getenv("MAX_INPUT_TOKEN_LENGTH", "32768"))
|
|
20 |
|
21 |
|
22 |
if torch.cuda.is_available():
|
23 |
-
model_id = "vericava/llm-jp-3-1.8b-instruct-lora-vericava7"
|
24 |
my_pipeline=pipeline(
|
25 |
model=model_id,
|
26 |
)
|
|
|
20 |
|
21 |
|
22 |
if torch.cuda.is_available():
|
23 |
+
model_id = "vericava/llm-jp-3-1.8b-instruct-lora-vericava7-llama"
|
24 |
my_pipeline=pipeline(
|
25 |
model=model_id,
|
26 |
)
|