Fix alphas cumprod

This commit is contained in:
Kohaku-Blueleaf 2023-10-25 12:54:28 +08:00
parent 4830b25136
commit bf5067f50c
2 changed files with 3 additions and 2 deletions

View File

@ -396,6 +396,8 @@ def load_model_weights(model, checkpoint_info: CheckpointInfo, state_dict, timer
enable_fp8 = True enable_fp8 = True
elif model.is_sdxl and shared.cmd_opts.opt_unet_fp8_storage_xl: elif model.is_sdxl and shared.cmd_opts.opt_unet_fp8_storage_xl:
enable_fp8 = True enable_fp8 = True
else:
enable_fp8 = False
if enable_fp8: if enable_fp8:
devices.fp8 = True devices.fp8 = True
@ -416,7 +418,6 @@ def load_model_weights(model, checkpoint_info: CheckpointInfo, state_dict, timer
module.to(torch.float8_e4m3fn) module.to(torch.float8_e4m3fn)
model.model.diffusion_model = model.model.diffusion_model.to(torch.float8_e4m3fn) model.model.diffusion_model = model.model.diffusion_model.to(torch.float8_e4m3fn)
timer.record("apply fp8 unet") timer.record("apply fp8 unet")
model.alphas_cumprod = model.alphas_cumprod.to(torch.float32)
devices.unet_needs_upcast = shared.cmd_opts.upcast_sampling and devices.dtype == torch.float16 and devices.dtype_unet == torch.float16 devices.unet_needs_upcast = shared.cmd_opts.upcast_sampling and devices.dtype == torch.float16 and devices.dtype_unet == torch.float16

View File

@ -93,7 +93,7 @@ def extend_sdxl(model):
model.parameterization = "v" if isinstance(model.denoiser.scaling, sgm.modules.diffusionmodules.denoiser_scaling.VScaling) else "eps" model.parameterization = "v" if isinstance(model.denoiser.scaling, sgm.modules.diffusionmodules.denoiser_scaling.VScaling) else "eps"
discretization = sgm.modules.diffusionmodules.discretizer.LegacyDDPMDiscretization() discretization = sgm.modules.diffusionmodules.discretizer.LegacyDDPMDiscretization()
model.alphas_cumprod = torch.asarray(discretization.alphas_cumprod, device=devices.device, dtype=dtype) model.alphas_cumprod = torch.asarray(discretization.alphas_cumprod, device=devices.device, dtype=torch.float32)
model.conditioner.wrapped = torch.nn.Module() model.conditioner.wrapped = torch.nn.Module()