eleftherias commited on
Commit
05947c2
·
1 Parent(s): b7378ca

Fix chat template validation

Browse files
backend/app/services/models.py CHANGED
@@ -545,17 +545,6 @@ class ModelService(HuggingFaceService):
545
  raise Exception(error)
546
  logger.info(LogFormatter.success("Model on hub validation passed"))
547
 
548
- # Validate that the model has a chat template
549
- has_chat_template = hasattr(model_info, "chat_template") and model_info.chat_template is not None
550
- if not has_chat_template:
551
- logger.error(
552
- LogFormatter.error(
553
- "Model does not have a chat template"
554
- )
555
- )
556
- raise Exception(
557
- "Model does not have a chat template. Chat templates are required to accurately evaluate responses")
558
-
559
  # Validate model card
560
  valid, error, model_card = await self.validator.check_model_card(
561
  model_data["model_id"]
@@ -585,17 +574,15 @@ class ModelService(HuggingFaceService):
585
  logger.error(LogFormatter.error("Size limit exceeded", error_msg))
586
  raise Exception(error_msg)
587
 
588
- # Chat template validation if requested
589
- if model_data["use_chat_template"]:
590
- valid, error = await self.validator.check_chat_template(
591
- model_data["model_id"], model_data["revision"]
 
 
592
  )
593
- if not valid:
594
- logger.error(
595
- LogFormatter.error("Chat template validation failed", error)
596
- )
597
- raise Exception(error)
598
- logger.info(LogFormatter.success("Chat template validation passed"))
599
 
600
  architectures = model_info.config.get("architectures", "")
601
  if architectures:
@@ -615,7 +602,6 @@ class ModelService(HuggingFaceService):
615
  # "model_type": model_data["model_type"],
616
  "job_id": -1,
617
  "job_start_time": None,
618
- # "use_chat_template": model_data["use_chat_template"],
619
  "sender": user_id,
620
  }
621
 
 
545
  raise Exception(error)
546
  logger.info(LogFormatter.success("Model on hub validation passed"))
547
 
 
 
 
 
 
 
 
 
 
 
 
548
  # Validate model card
549
  valid, error, model_card = await self.validator.check_model_card(
550
  model_data["model_id"]
 
574
  logger.error(LogFormatter.error("Size limit exceeded", error_msg))
575
  raise Exception(error_msg)
576
 
577
+ valid, error = await self.validator.check_chat_template(
578
+ model_data["model_id"], model_data["revision"]
579
+ )
580
+ if not valid:
581
+ logger.error(
582
+ LogFormatter.error("Chat template validation failed", error)
583
  )
584
+ raise Exception(error)
585
+ logger.info(LogFormatter.success("Chat template validation passed"))
 
 
 
 
586
 
587
  architectures = model_info.config.get("architectures", "")
588
  if architectures:
 
602
  # "model_type": model_data["model_type"],
603
  "job_id": -1,
604
  "job_start_time": None,
 
605
  "sender": user_id,
606
  }
607
 
backend/app/utils/model_validation.py CHANGED
@@ -218,7 +218,7 @@ class ModelValidator:
218
  tokenizer_config = json.load(f)
219
 
220
  if "chat_template" not in tokenizer_config:
221
- error_msg = f"The model {model_id} doesn't have a chat_template in its tokenizer_config.json. Please add a chat_template before submitting or submit without it."
222
  logger.error(LogFormatter.error(error_msg))
223
  return False, error_msg
224
 
 
218
  tokenizer_config = json.load(f)
219
 
220
  if "chat_template" not in tokenizer_config:
221
+ error_msg = f"The model {model_id} doesn't have a chat_template in its tokenizer_config.json. Chat templates are required to accurately evaluate responses."
222
  logger.error(LogFormatter.error(error_msg))
223
  return False, error_msg
224