randomUser69696 commited on
Commit
8969ebe
·
1 Parent(s): 12cbab2

fixed the input

Browse files
Files changed (1) hide show
  1. app.py +4 -4
app.py CHANGED
@@ -117,16 +117,16 @@ login(token=os.getenv("hf_token") )
117
  model_id = os.getenv("MODEL_ID", "google/gemma-3-12b-it")
118
  processor = AutoProcessor.from_pretrained(model_id, padding_side="left")
119
  model = Gemma3ForConditionalGeneration.from_pretrained(
120
- model_id, device_map="auto", torch_dtype=torch.bfloat16, attn_implementation="eager"
121
  )
122
 
123
  def run_fn(message):
124
- messages = []
125
 
126
- messages.append({"role": "user", "content": (message)})
127
 
128
  inputs = processor.apply_chat_template(
129
- messages,
130
  add_generation_prompt=True,
131
  tokenize=True,
132
  return_dict=True,
 
117
  model_id = os.getenv("MODEL_ID", "google/gemma-3-12b-it")
118
  processor = AutoProcessor.from_pretrained(model_id, padding_side="left")
119
  model = Gemma3ForConditionalGeneration.from_pretrained(
120
+ model_id, device_map="auto", torch_dtype=torch.bfloat16, attn_implementation="eager",cache_dir = "F:\\huggingface_cache"
121
  )
122
 
123
  def run_fn(message):
124
+ messages_list = []
125
 
126
+ messages_list.append({"role": "user", "content": message})
127
 
128
  inputs = processor.apply_chat_template(
129
+ messages_list,
130
  add_generation_prompt=True,
131
  tokenize=True,
132
  return_dict=True,