Update app.py
Browse files
app.py
CHANGED
@@ -17,7 +17,7 @@ import os
|
|
17 |
try:
|
18 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
19 |
import torch
|
20 |
-
TRANSFORMERS_AVAILABLE =
|
21 |
except ImportError:
|
22 |
TRANSFORMERS_AVAILABLE = False
|
23 |
|
@@ -54,7 +54,7 @@ class EventScraper:
|
|
54 |
# Try local model first
|
55 |
if TRANSFORMERS_AVAILABLE:
|
56 |
try:
|
57 |
-
model_name = "meta-llama/Llama-3.2-
|
58 |
self.tokenizer = AutoTokenizer.from_pretrained(model_name)
|
59 |
self.model = AutoModelForCausalLM.from_pretrained(
|
60 |
model_name,
|
|
|
17 |
try:
|
18 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
19 |
import torch
|
20 |
+
TRANSFORMERS_AVAILABLE = False # TODO change back to true to use local llm
|
21 |
except ImportError:
|
22 |
TRANSFORMERS_AVAILABLE = False
|
23 |
|
|
|
54 |
# Try local model first
|
55 |
if TRANSFORMERS_AVAILABLE:
|
56 |
try:
|
57 |
+
model_name = "meta-llama/Llama-3.2-1B-Instruct" # 3B is very slow on HF :(
|
58 |
self.tokenizer = AutoTokenizer.from_pretrained(model_name)
|
59 |
self.model = AutoModelForCausalLM.from_pretrained(
|
60 |
model_name,
|