Only enable fp16 on z image models that actually support it. (#12065)

This commit is contained in:
comfyanonymous
2026-01-24 19:32:28 -08:00
committed by GitHub
parent ed6002cb60
commit 635406e283
3 changed files with 6 additions and 1 deletions

View File

@@ -1093,7 +1093,7 @@ class ZImage(Lumina2):
def __init__(self, unet_config):
super().__init__(unet_config)
if comfy.model_management.extended_fp16_support():
if comfy.model_management.extended_fp16_support() and unet_config.get("allow_fp16", False):
self.supported_inference_dtypes = self.supported_inference_dtypes.copy()
self.supported_inference_dtypes.insert(1, torch.float16)