Add a --bf16-unet to test running the unet in bf16.
This commit is contained in:
@@ -449,6 +449,8 @@ def unet_inital_load_device(parameters, dtype):
|
||||
return cpu_dev
|
||||
|
||||
def unet_dtype(device=None, model_params=0):
|
||||
if args.bf16_unet:
|
||||
return torch.bfloat16
|
||||
if should_use_fp16(device=device, model_params=model_params):
|
||||
return torch.float16
|
||||
return torch.float32
|
||||
|
||||
Reference in New Issue
Block a user