Add Flux fp16 support hack.
This commit is contained in:
@@ -642,7 +642,7 @@ class Flux(supported_models_base.BASE):
|
||||
|
||||
memory_usage_factor = 2.8
|
||||
|
||||
supported_inference_dtypes = [torch.bfloat16, torch.float32]
|
||||
supported_inference_dtypes = [torch.bfloat16, torch.float16, torch.float32]
|
||||
|
||||
vae_key_prefix = ["vae."]
|
||||
text_encoder_key_prefix = ["text_encoders."]
|
||||
|
||||
Reference in New Issue
Block a user