metastable-void commited on
Commit
1a76180
·
unverified ·
1 Parent(s): f602cdc
Files changed (1) hide show
  1. app.py +2 -0
app.py CHANGED
@@ -22,6 +22,8 @@ MAX_INPUT_TOKEN_LENGTH = int(os.getenv("MAX_INPUT_TOKEN_LENGTH", "32768"))
22
  if torch.cuda.is_available():
23
  model_id = "vericava/llm-jp-3-1.8b-instruct-lora-vericava7-llama"
24
  base_model_id = "llm-jp/llm-jp-3-1.8b-instruct"
 
 
25
  my_pipeline=pipeline(
26
  model=model_id,
27
  tokenizer=base_model_id,
 
22
  if torch.cuda.is_available():
23
  model_id = "vericava/llm-jp-3-1.8b-instruct-lora-vericava7-llama"
24
  base_model_id = "llm-jp/llm-jp-3-1.8b-instruct"
25
+ tokenizer = AutoTokenizer.from_pretrained(base_model_id)
26
+ model = AutoModelForCausalLM.from_pretrained(model_id)
27
  my_pipeline=pipeline(
28
  model=model_id,
29
  tokenizer=base_model_id,