2022-09-03 09:08:45 +00:00
import torch
2022-09-28 07:49:07 +00:00
import inspect
2022-09-03 09:08:45 +00:00
import k_diffusion . sampling
2023-08-10 14:04:59 +00:00
from modules import sd_samplers_common , sd_samplers_extra , sd_samplers_cfg_denoiser
2023-08-10 14:21:01 +00:00
from modules . sd_samplers_cfg_denoiser import CFGDenoiser # noqa: F401
2023-08-17 01:45:19 +00:00
from modules . script_callbacks import ExtraNoiseParams , extra_noise_callback
2022-09-03 09:08:45 +00:00
2023-08-08 16:20:11 +00:00
from modules . shared import opts
2022-09-03 09:08:45 +00:00
import modules . shared as shared
2022-09-03 14:21:15 +00:00
samplers_k_diffusion = [
2023-08-14 05:50:53 +00:00
( ' DPM++ 2M Karras ' , ' sample_dpmpp_2m ' , [ ' k_dpmpp_2m_ka ' ] , { ' scheduler ' : ' karras ' } ) ,
( ' DPM++ SDE Karras ' , ' sample_dpmpp_sde ' , [ ' k_dpmpp_sde_ka ' ] , { ' scheduler ' : ' karras ' , " second_order " : True , " brownian_noise " : True } ) ,
( ' DPM++ 2M SDE Exponential ' , ' sample_dpmpp_2m_sde ' , [ ' k_dpmpp_2m_sde_exp ' ] , { ' scheduler ' : ' exponential ' , " brownian_noise " : True } ) ,
( ' DPM++ 2M SDE Karras ' , ' sample_dpmpp_2m_sde ' , [ ' k_dpmpp_2m_sde_ka ' ] , { ' scheduler ' : ' karras ' , " brownian_noise " : True } ) ,
2023-05-16 08:54:02 +00:00
( ' Euler a ' , ' sample_euler_ancestral ' , [ ' k_euler_a ' , ' k_euler_ancestral ' ] , { " uses_ensd " : True } ) ,
2022-10-06 11:12:52 +00:00
( ' Euler ' , ' sample_euler ' , [ ' k_euler ' ] , { } ) ,
( ' LMS ' , ' sample_lms ' , [ ' k_lms ' ] , { } ) ,
2023-05-16 09:36:15 +00:00
( ' Heun ' , ' sample_heun ' , [ ' k_heun ' ] , { " second_order " : True } ) ,
2023-08-14 09:07:38 +00:00
( ' DPM2 ' , ' sample_dpm_2 ' , [ ' k_dpm_2 ' ] , { ' discard_next_to_last_sigma ' : True , " second_order " : True } ) ,
( ' DPM2 a ' , ' sample_dpm_2_ancestral ' , [ ' k_dpm_2_a ' ] , { ' discard_next_to_last_sigma ' : True , " uses_ensd " : True , " second_order " : True } ) ,
2023-05-16 09:36:15 +00:00
( ' DPM++ 2S a ' , ' sample_dpmpp_2s_ancestral ' , [ ' k_dpmpp_2s_a ' ] , { " uses_ensd " : True , " second_order " : True } ) ,
2022-11-05 15:32:22 +00:00
( ' DPM++ 2M ' , ' sample_dpmpp_2m ' , [ ' k_dpmpp_2m ' ] , { } ) ,
2023-05-21 04:31:39 +00:00
( ' DPM++ SDE ' , ' sample_dpmpp_sde ' , [ ' k_dpmpp_sde ' ] , { " second_order " : True , " brownian_noise " : True } ) ,
2023-05-22 17:06:57 +00:00
( ' DPM++ 2M SDE ' , ' sample_dpmpp_2m_sde ' , [ ' k_dpmpp_2m_sde_ka ' ] , { " brownian_noise " : True } ) ,
2023-08-14 03:46:36 +00:00
( ' DPM++ 2M SDE Heun ' , ' sample_dpmpp_2m_sde ' , [ ' k_dpmpp_2m_sde_heun ' ] , { " brownian_noise " : True , " solver_type " : " heun " } ) ,
( ' DPM++ 2M SDE Heun Karras ' , ' sample_dpmpp_2m_sde ' , [ ' k_dpmpp_2m_sde_heun_ka ' ] , { ' scheduler ' : ' karras ' , " brownian_noise " : True , " solver_type " : " heun " } ) ,
( ' DPM++ 2M SDE Heun Exponential ' , ' sample_dpmpp_2m_sde ' , [ ' k_dpmpp_2m_sde_heun_exp ' ] , { ' scheduler ' : ' exponential ' , " brownian_noise " : True , " solver_type " : " heun " } ) ,
2023-08-13 12:46:07 +00:00
( ' DPM++ 3M SDE ' , ' sample_dpmpp_3m_sde ' , [ ' k_dpmpp_3m_sde ' ] , { ' discard_next_to_last_sigma ' : True , " brownian_noise " : True } ) ,
( ' DPM++ 3M SDE Karras ' , ' sample_dpmpp_3m_sde ' , [ ' k_dpmpp_3m_sde_ka ' ] , { ' scheduler ' : ' karras ' , ' discard_next_to_last_sigma ' : True , " brownian_noise " : True } ) ,
( ' DPM++ 3M SDE Exponential ' , ' sample_dpmpp_3m_sde ' , [ ' k_dpmpp_3m_sde_exp ' ] , { ' scheduler ' : ' exponential ' , ' discard_next_to_last_sigma ' : True , " brownian_noise " : True } ) ,
2023-05-16 08:54:02 +00:00
( ' DPM fast ' , ' sample_dpm_fast ' , [ ' k_dpm_fast ' ] , { " uses_ensd " : True } ) ,
( ' DPM adaptive ' , ' sample_dpm_adaptive ' , [ ' k_dpm_ad ' ] , { " uses_ensd " : True } ) ,
2022-10-06 11:12:52 +00:00
( ' LMS Karras ' , ' sample_lms ' , [ ' k_lms_ka ' ] , { ' scheduler ' : ' karras ' } ) ,
2023-05-16 09:36:15 +00:00
( ' DPM2 Karras ' , ' sample_dpm_2 ' , [ ' k_dpm_2_ka ' ] , { ' scheduler ' : ' karras ' , ' discard_next_to_last_sigma ' : True , " uses_ensd " : True , " second_order " : True } ) ,
( ' DPM2 a Karras ' , ' sample_dpm_2_ancestral ' , [ ' k_dpm_2_a_ka ' ] , { ' scheduler ' : ' karras ' , ' discard_next_to_last_sigma ' : True , " uses_ensd " : True , " second_order " : True } ) ,
( ' DPM++ 2S a Karras ' , ' sample_dpmpp_2s_ancestral ' , [ ' k_dpmpp_2s_a_ka ' ] , { ' scheduler ' : ' karras ' , " uses_ensd " : True , " second_order " : True } ) ,
2023-08-14 09:07:38 +00:00
( ' Restart ' , sd_samplers_extra . restart_sampler , [ ' restart ' ] , { ' scheduler ' : ' karras ' , " second_order " : True } ) ,
2022-09-03 14:21:15 +00:00
]
2023-07-18 04:32:01 +00:00
2022-09-03 14:21:15 +00:00
samplers_data_k_diffusion = [
2023-01-30 06:51:06 +00:00
sd_samplers_common . SamplerData ( label , lambda model , funcname = funcname : KDiffusionSampler ( funcname , model ) , aliases , options )
2022-10-06 11:12:52 +00:00
for label , funcname , aliases , options in samplers_k_diffusion
2023-07-29 05:11:59 +00:00
if callable ( funcname ) or hasattr ( k_diffusion . sampling , funcname )
2022-09-03 14:21:15 +00:00
]
2022-09-26 08:56:47 +00:00
sampler_extra_params = {
2022-09-28 07:49:07 +00:00
' sample_euler ' : [ ' s_churn ' , ' s_tmin ' , ' s_tmax ' , ' s_noise ' ] ,
' sample_heun ' : [ ' s_churn ' , ' s_tmin ' , ' s_tmax ' , ' s_noise ' ] ,
' sample_dpm_2 ' : [ ' s_churn ' , ' s_tmin ' , ' s_tmax ' , ' s_noise ' ] ,
2023-08-13 12:22:24 +00:00
' sample_dpm_fast ' : [ ' s_noise ' ] ,
' sample_dpm_2_ancestral ' : [ ' s_noise ' ] ,
' sample_dpmpp_2s_ancestral ' : [ ' s_noise ' ] ,
' sample_dpmpp_sde ' : [ ' s_noise ' ] ,
' sample_dpmpp_2m_sde ' : [ ' s_noise ' ] ,
' sample_dpmpp_3m_sde ' : [ ' s_noise ' ] ,
2022-09-26 08:56:47 +00:00
}
2022-09-03 09:08:45 +00:00
2023-05-22 15:26:28 +00:00
k_diffusion_samplers_map = { x . name : x for x in samplers_data_k_diffusion }
2023-05-22 15:02:05 +00:00
k_diffusion_scheduler = {
2023-05-23 16:18:09 +00:00
' Automatic ' : None ,
2023-05-22 15:02:05 +00:00
' karras ' : k_diffusion . sampling . get_sigmas_karras ,
' exponential ' : k_diffusion . sampling . get_sigmas_exponential ,
' polyexponential ' : k_diffusion . sampling . get_sigmas_polyexponential
}
2022-10-22 17:48:13 +00:00
2023-08-08 19:09:40 +00:00
class CFGDenoiserKDiffusion ( sd_samplers_cfg_denoiser . CFGDenoiser ) :
@property
def inner_model ( self ) :
if self . model_wrap is None :
denoiser = k_diffusion . external . CompVisVDenoiser if shared . sd_model . parameterization == " v " else k_diffusion . external . CompVisDenoiser
self . model_wrap = denoiser ( shared . sd_model , quantize = shared . opts . enable_quantization )
return self . model_wrap
2023-08-08 16:20:11 +00:00
class KDiffusionSampler ( sd_samplers_common . Sampler ) :
2023-08-12 09:39:59 +00:00
def __init__ ( self , funcname , sd_model , options = None ) :
2023-08-08 16:20:11 +00:00
super ( ) . __init__ ( funcname )
2022-09-13 18:49:58 +00:00
2023-08-13 13:08:34 +00:00
self . extra_params = sampler_extra_params . get ( funcname , [ ] )
2023-08-12 09:39:59 +00:00
self . options = options or { }
2023-08-08 16:20:11 +00:00
self . func = funcname if callable ( funcname ) else getattr ( k_diffusion . sampling , self . funcname )
2022-09-13 18:49:58 +00:00
2023-08-08 19:09:40 +00:00
self . model_wrap_cfg = CFGDenoiserKDiffusion ( self )
self . model_wrap = self . model_wrap_cfg . inner_model
2022-09-28 15:09:06 +00:00
2022-12-24 06:03:45 +00:00
def get_sigmas ( self , p , steps ) :
2023-01-05 07:43:21 +00:00
discard_next_to_last_sigma = self . config is not None and self . config . options . get ( ' discard_next_to_last_sigma ' , False )
if opts . always_discard_next_to_last_sigma and not discard_next_to_last_sigma :
discard_next_to_last_sigma = True
p . extra_generation_params [ " Discard penultimate sigma " ] = True
steps + = 1 if discard_next_to_last_sigma else 0
2022-12-26 20:49:13 +00:00
2022-09-30 00:46:06 +00:00
if p . sampler_noise_scheduler_override :
2022-10-06 20:27:01 +00:00
sigmas = p . sampler_noise_scheduler_override ( steps )
2023-05-23 16:18:09 +00:00
elif opts . k_sched_type != " Automatic " :
2023-05-24 12:35:58 +00:00
m_sigma_min , m_sigma_max = ( self . model_wrap . sigmas [ 0 ] . item ( ) , self . model_wrap . sigmas [ - 1 ] . item ( ) )
2023-05-27 16:53:09 +00:00
sigma_min , sigma_max = ( 0.1 , 10 ) if opts . use_old_karras_scheduler_sigmas else ( m_sigma_min , m_sigma_max )
2023-05-22 15:02:05 +00:00
sigmas_kwargs = {
2023-05-27 16:53:09 +00:00
' sigma_min ' : sigma_min ,
' sigma_max ' : sigma_max ,
2023-05-22 15:02:05 +00:00
}
2023-05-24 12:35:58 +00:00
sigmas_func = k_diffusion_scheduler [ opts . k_sched_type ]
2023-05-27 16:53:09 +00:00
p . extra_generation_params [ " Schedule type " ] = opts . k_sched_type
if opts . sigma_min != m_sigma_min and opts . sigma_min != 0 :
sigmas_kwargs [ ' sigma_min ' ] = opts . sigma_min
p . extra_generation_params [ " Schedule min sigma " ] = opts . sigma_min
if opts . sigma_max != m_sigma_max and opts . sigma_max != 0 :
sigmas_kwargs [ ' sigma_max ' ] = opts . sigma_max
p . extra_generation_params [ " Schedule max sigma " ] = opts . sigma_max
default_rho = 1. if opts . k_sched_type == " polyexponential " else 7.
if opts . k_sched_type != ' exponential ' and opts . rho != 0 and opts . rho != default_rho :
2023-05-23 03:34:51 +00:00
sigmas_kwargs [ ' rho ' ] = opts . rho
2023-05-27 16:53:09 +00:00
p . extra_generation_params [ " Schedule rho " ] = opts . rho
2023-05-24 12:35:58 +00:00
2023-05-22 15:02:05 +00:00
sigmas = sigmas_func ( n = steps , * * sigmas_kwargs , device = shared . device )
2022-10-06 20:27:01 +00:00
elif self . config is not None and self . config . options . get ( ' scheduler ' , None ) == ' karras ' :
2023-01-01 06:51:37 +00:00
sigma_min , sigma_max = ( 0.1 , 10 ) if opts . use_old_karras_scheduler_sigmas else ( self . model_wrap . sigmas [ 0 ] . item ( ) , self . model_wrap . sigmas [ - 1 ] . item ( ) )
sigmas = k_diffusion . sampling . get_sigmas_karras ( n = steps , sigma_min = sigma_min , sigma_max = sigma_max , device = shared . device )
2023-08-04 04:51:49 +00:00
elif self . config is not None and self . config . options . get ( ' scheduler ' , None ) == ' exponential ' :
m_sigma_min , m_sigma_max = ( self . model_wrap . sigmas [ 0 ] . item ( ) , self . model_wrap . sigmas [ - 1 ] . item ( ) )
sigmas = k_diffusion . sampling . get_sigmas_exponential ( n = steps , sigma_min = m_sigma_min , sigma_max = m_sigma_max , device = shared . device )
2022-09-30 00:46:06 +00:00
else :
2022-10-06 20:27:01 +00:00
sigmas = self . model_wrap . get_sigmas ( steps )
2022-09-28 15:09:06 +00:00
2023-01-05 07:43:21 +00:00
if discard_next_to_last_sigma :
2022-12-19 03:16:42 +00:00
sigmas = torch . cat ( [ sigmas [ : - 2 ] , sigmas [ - 1 : ] ] )
2022-12-24 06:03:45 +00:00
return sigmas
def sample_img2img ( self , p , x , noise , conditioning , unconditional_conditioning , steps = None , image_conditioning = None ) :
2023-01-30 06:51:06 +00:00
steps , t_enc = sd_samplers_common . setup_img2img_steps ( p , steps )
2022-12-24 06:03:45 +00:00
sigmas = self . get_sigmas ( p , steps )
2022-09-28 15:09:06 +00:00
sigma_sched = sigmas [ steps - t_enc - 1 : ]
2023-08-08 16:20:11 +00:00
2023-08-29 12:38:05 +00:00
xi = x + noise * sigma_sched [ 0 ]
2023-05-11 15:28:15 +00:00
2023-08-15 06:19:19 +00:00
if opts . img2img_extra_noise > 0 :
p . extra_generation_params [ " Extra noise " ] = opts . img2img_extra_noise
2023-08-29 18:22:04 +00:00
extra_noise_params = ExtraNoiseParams ( noise , x , xi )
2023-08-17 01:45:19 +00:00
extra_noise_callback ( extra_noise_params )
noise = extra_noise_params . noise
2023-08-15 06:19:19 +00:00
xi + = noise * opts . img2img_extra_noise
2022-10-10 23:02:44 +00:00
extra_params_kwargs = self . initialize ( p )
2023-02-11 02:12:16 +00:00
parameters = inspect . signature ( self . func ) . parameters
if ' sigma_min ' in parameters :
2022-10-10 23:36:00 +00:00
## last sigma is zero which isn't allowed by DPM Fast & Adaptive so taking value before last
2022-10-10 23:02:44 +00:00
extra_params_kwargs [ ' sigma_min ' ] = sigma_sched [ - 2 ]
2023-02-11 02:12:16 +00:00
if ' sigma_max ' in parameters :
2022-10-10 23:02:44 +00:00
extra_params_kwargs [ ' sigma_max ' ] = sigma_sched [ 0 ]
2023-02-11 02:12:16 +00:00
if ' n ' in parameters :
2022-10-10 23:02:44 +00:00
extra_params_kwargs [ ' n ' ] = len ( sigma_sched ) - 1
2023-02-11 02:12:16 +00:00
if ' sigma_sched ' in parameters :
2022-10-10 23:02:44 +00:00
extra_params_kwargs [ ' sigma_sched ' ] = sigma_sched
2023-02-11 02:12:16 +00:00
if ' sigmas ' in parameters :
2022-10-10 23:02:44 +00:00
extra_params_kwargs [ ' sigmas ' ] = sigma_sched
2022-09-28 15:09:06 +00:00
2023-05-21 04:31:39 +00:00
if self . config . options . get ( ' brownian_noise ' , False ) :
2023-02-15 08:57:18 +00:00
noise_sampler = self . create_noise_sampler ( x , sigmas , p )
2023-02-11 02:12:16 +00:00
extra_params_kwargs [ ' noise_sampler ' ] = noise_sampler
2023-08-14 03:46:36 +00:00
if self . config . options . get ( ' solver_type ' , None ) == ' heun ' :
extra_params_kwargs [ ' solver_type ' ] = ' heun '
2022-09-28 15:09:06 +00:00
self . model_wrap_cfg . init_latent = x
2022-10-20 20:49:14 +00:00
self . last_latent = x
2023-08-06 14:53:33 +00:00
self . sampler_extra_args = {
2023-05-11 15:28:15 +00:00
' cond ' : conditioning ,
' image_cond ' : image_conditioning ,
' uncond ' : unconditional_conditioning ,
2023-02-03 23:19:56 +00:00
' cond_scale ' : p . cfg_scale ,
2023-03-28 22:18:28 +00:00
' s_min_uncond ' : self . s_min_uncond
2023-02-03 23:19:56 +00:00
}
2023-08-06 14:53:33 +00:00
samples = self . launch_sampling ( t_enc + 1 , lambda : self . func ( self . model_wrap_cfg , xi , extra_args = self . sampler_extra_args , disable = False , callback = self . callback_state , * * extra_params_kwargs ) )
2022-10-10 23:02:44 +00:00
2023-06-27 03:18:43 +00:00
if self . model_wrap_cfg . padded_cond_uncond :
p . extra_generation_params [ " Pad conds " ] = True
2022-10-18 14:23:38 +00:00
return samples
2022-09-03 09:08:45 +00:00
2023-02-11 02:12:16 +00:00
def sample ( self , p , x , conditioning , unconditional_conditioning , steps = None , image_conditioning = None ) :
2022-09-19 13:42:56 +00:00
steps = steps or p . steps
2022-12-24 06:03:45 +00:00
sigmas = self . get_sigmas ( p , steps )
2022-10-06 11:12:52 +00:00
2023-08-29 05:51:13 +00:00
if opts . sgm_noise_multiplier :
p . extra_generation_params [ " SGM noise multiplier " ] = True
x = x * torch . sqrt ( 1.0 + sigmas [ 0 ] * * 2.0 )
else :
x = x * sigmas [ 0 ]
2022-09-03 09:08:45 +00:00
2022-09-28 15:09:06 +00:00
extra_params_kwargs = self . initialize ( p )
2023-02-11 02:12:16 +00:00
parameters = inspect . signature ( self . func ) . parameters
2023-08-08 16:20:11 +00:00
if ' n ' in parameters :
extra_params_kwargs [ ' n ' ] = steps
2023-02-11 02:12:16 +00:00
if ' sigma_min ' in parameters :
2022-09-29 10:30:33 +00:00
extra_params_kwargs [ ' sigma_min ' ] = self . model_wrap . sigmas [ 0 ] . item ( )
extra_params_kwargs [ ' sigma_max ' ] = self . model_wrap . sigmas [ - 1 ] . item ( )
2023-08-08 16:20:11 +00:00
if ' sigmas ' in parameters :
2022-09-29 10:30:33 +00:00
extra_params_kwargs [ ' sigmas ' ] = sigmas
2022-10-18 14:23:38 +00:00
2023-05-21 04:31:39 +00:00
if self . config . options . get ( ' brownian_noise ' , False ) :
2023-02-15 08:57:18 +00:00
noise_sampler = self . create_noise_sampler ( x , sigmas , p )
2023-02-11 02:12:16 +00:00
extra_params_kwargs [ ' noise_sampler ' ] = noise_sampler
2023-08-14 03:46:36 +00:00
if self . config . options . get ( ' solver_type ' , None ) == ' heun ' :
extra_params_kwargs [ ' solver_type ' ] = ' heun '
2022-10-20 20:49:14 +00:00
self . last_latent = x
2023-08-06 14:53:33 +00:00
self . sampler_extra_args = {
2023-05-11 15:28:15 +00:00
' cond ' : conditioning ,
' image_cond ' : image_conditioning ,
' uncond ' : unconditional_conditioning ,
2023-03-28 22:18:28 +00:00
' cond_scale ' : p . cfg_scale ,
' s_min_uncond ' : self . s_min_uncond
2023-08-06 14:53:33 +00:00
}
2023-08-14 03:46:36 +00:00
2023-08-06 14:53:33 +00:00
samples = self . launch_sampling ( steps , lambda : self . func ( self . model_wrap_cfg , x , extra_args = self . sampler_extra_args , disable = False , callback = self . callback_state , * * extra_params_kwargs ) )
2022-10-18 14:23:38 +00:00
2023-06-27 03:18:43 +00:00
if self . model_wrap_cfg . padded_cond_uncond :
p . extra_generation_params [ " Pad conds " ] = True
2022-09-19 13:42:56 +00:00
return samples
2022-09-03 09:08:45 +00:00
2023-08-08 16:20:11 +00:00