Chris4K commited on
Commit
d03a443
·
verified ·
1 Parent(s): 3a3dc4d

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -17,7 +17,7 @@ import os
17
  try:
18
  from transformers import AutoModelForCausalLM, AutoTokenizer
19
  import torch
20
- TRANSFORMERS_AVAILABLE = True
21
  except ImportError:
22
  TRANSFORMERS_AVAILABLE = False
23
 
@@ -54,7 +54,7 @@ class EventScraper:
54
  # Try local model first
55
  if TRANSFORMERS_AVAILABLE:
56
  try:
57
- model_name = "meta-llama/Llama-3.2-3B-Instruct"
58
  self.tokenizer = AutoTokenizer.from_pretrained(model_name)
59
  self.model = AutoModelForCausalLM.from_pretrained(
60
  model_name,
 
17
  try:
18
  from transformers import AutoModelForCausalLM, AutoTokenizer
19
  import torch
20
+ TRANSFORMERS_AVAILABLE = False # TODO change back to true to use local llm
21
  except ImportError:
22
  TRANSFORMERS_AVAILABLE = False
23
 
 
54
  # Try local model first
55
  if TRANSFORMERS_AVAILABLE:
56
  try:
57
+ model_name = "meta-llama/Llama-3.2-1B-Instruct" # 3B is very slow on HF :(
58
  self.tokenizer = AutoTokenizer.from_pretrained(model_name)
59
  self.model = AutoModelForCausalLM.from_pretrained(
60
  model_name,