@@ -217,6 +217,8 @@ def __init__(
217217 rescale_betas_zero_snr : bool = False ,
218218 use_dynamic_shifting : bool = False ,
219219 time_shift_type : Literal ["exponential" ] = "exponential" ,
220+ sigma_min : Optional [float ] = None ,
221+ sigma_max : Optional [float ] = None ,
220222 ) -> None :
221223 if self .config .use_beta_sigmas and not is_scipy_available ():
222224 raise ImportError ("Make sure to install scipy if you want to use beta sigmas." )
@@ -350,7 +352,11 @@ def set_timesteps(
350352 log_sigmas = np .log (sigmas )
351353 sigmas = np .flip (sigmas ).copy ()
352354 sigmas = self ._convert_to_karras (in_sigmas = sigmas , num_inference_steps = num_inference_steps )
353- timesteps = np .array ([self ._sigma_to_t (sigma , log_sigmas ) for sigma in sigmas ]).round ()
355+ if self .config .use_flow_sigmas :
356+ timesteps = (sigmas * self .config .num_train_timesteps ).copy ()
357+ else :
358+ timesteps = np .array ([self ._sigma_to_t (sigma , log_sigmas ) for sigma in sigmas ]).round ()
359+
354360 if self .config .final_sigmas_type == "sigma_min" :
355361 sigma_last = sigmas [- 1 ]
356362 elif self .config .final_sigmas_type == "zero" :
@@ -572,6 +578,8 @@ def _convert_to_karras(self, in_sigmas: torch.Tensor, num_inference_steps: int)
572578 min_inv_rho = sigma_min ** (1 / rho )
573579 max_inv_rho = sigma_max ** (1 / rho )
574580 sigmas = (max_inv_rho + ramp * (min_inv_rho - max_inv_rho )) ** rho
581+ if self .config .use_flow_sigmas :
582+ sigmas = sigmas / (sigmas + 1 )
575583 return sigmas
576584
577585 # Copied from diffusers.schedulers.scheduling_euler_discrete.EulerDiscreteScheduler._convert_to_exponential
0 commit comments