metastable-void commited on
Commit
c522c05
·
unverified ·
1 Parent(s): 97befb1

updated to use my model

Browse files
Files changed (1) hide show
  1. app.py +1 -1
app.py CHANGED
@@ -19,7 +19,7 @@ DEFAULT_MAX_NEW_TOKENS = 1024
19
  MAX_INPUT_TOKEN_LENGTH = int(os.getenv("MAX_INPUT_TOKEN_LENGTH", "32768"))
20
 
21
  if torch.cuda.is_available():
22
- model_id = "cyberagent/calm2-7b-chat"
23
  my_pipeline=pipeline(
24
  model=model_id,
25
  )
 
19
  MAX_INPUT_TOKEN_LENGTH = int(os.getenv("MAX_INPUT_TOKEN_LENGTH", "32768"))
20
 
21
  if torch.cuda.is_available():
22
+ model_id = "vericava/llm-jp-3-1.8b-instruct-lora-vericava7"
23
  my_pipeline=pipeline(
24
  model=model_id,
25
  )