Akjava commited on
Commit
adcc570
·
1 Parent(s): abed9dd
Files changed (1) hide show
  1. app.py +3 -2
app.py CHANGED
@@ -11,7 +11,8 @@ if not huggingface_token:
11
  print("no HUGGINGFACE_TOKEN if you need set secret ")
12
  #raise ValueError("HUGGINGFACE_TOKEN environment variable is not set")
13
 
14
- model_id = "Qwen/Qwen1.5-0.5B-Chat"
 
15
 
16
  device = "auto" # torch.device("cuda" if torch.cuda.is_available() else "cpu")
17
  dtype = torch.bfloat16
@@ -79,7 +80,7 @@ def generate_text(messages):
79
 
80
 
81
  text_generator = pipeline("text-generation", model=model, tokenizer=tokenizer,torch_dtype=dtype,device_map=device) #pipeline has not to(device)
82
- result = text_generator(messages, max_new_tokens=256, do_sample=True, temperature=0.7)
83
 
84
  generated_output = result[0]["generated_text"]
85
  if isinstance(generated_output, list):
 
11
  print("no HUGGINGFACE_TOKEN if you need set secret ")
12
  #raise ValueError("HUGGINGFACE_TOKEN environment variable is not set")
13
 
14
+ #model_id = "Qwen/Qwen1.5-0.5B-Chat"
15
+ model_id = "Kendamarron/Tokara-0.5B-Chat-v0.1"
16
 
17
  device = "auto" # torch.device("cuda" if torch.cuda.is_available() else "cpu")
18
  dtype = torch.bfloat16
 
80
 
81
 
82
  text_generator = pipeline("text-generation", model=model, tokenizer=tokenizer,torch_dtype=dtype,device_map=device) #pipeline has not to(device)
83
+ result = text_generator(messages, max_new_tokens=256, do_sample=True, temperature=0.7,repetition_penalty=1.1,top_p=0.95,top_k=40)
84
 
85
  generated_output = result[0]["generated_text"]
86
  if isinstance(generated_output, list):