|
|
|
|
@ -196,6 +196,7 @@ class VanillaStableDiffusionSampler:
|
|
|
|
|
x1 = self.sampler.stochastic_encode(x, torch.tensor([t_enc] * int(x.shape[0])).to(shared.device), noise=noise)
|
|
|
|
|
|
|
|
|
|
self.init_latent = x
|
|
|
|
|
self.last_latent = x
|
|
|
|
|
self.step = 0
|
|
|
|
|
|
|
|
|
|
samples = self.launch_sampling(steps, lambda: self.sampler.decode(x1, conditioning, t_enc, unconditional_guidance_scale=p.cfg_scale, unconditional_conditioning=unconditional_conditioning))
|
|
|
|
|
@ -206,6 +207,7 @@ class VanillaStableDiffusionSampler:
|
|
|
|
|
self.initialize(p)
|
|
|
|
|
|
|
|
|
|
self.init_latent = None
|
|
|
|
|
self.last_latent = x
|
|
|
|
|
self.step = 0
|
|
|
|
|
|
|
|
|
|
steps = steps or p.steps
|
|
|
|
|
@ -388,6 +390,7 @@ class KDiffusionSampler:
|
|
|
|
|
extra_params_kwargs['sigmas'] = sigma_sched
|
|
|
|
|
|
|
|
|
|
self.model_wrap_cfg.init_latent = x
|
|
|
|
|
self.last_latent = x
|
|
|
|
|
|
|
|
|
|
samples = self.launch_sampling(steps, lambda: self.func(self.model_wrap_cfg, xi, extra_args={'cond': conditioning, 'uncond': unconditional_conditioning, 'cond_scale': p.cfg_scale}, disable=False, callback=self.callback_state, **extra_params_kwargs))
|
|
|
|
|
|
|
|
|
|
@ -414,6 +417,7 @@ class KDiffusionSampler:
|
|
|
|
|
else:
|
|
|
|
|
extra_params_kwargs['sigmas'] = sigmas
|
|
|
|
|
|
|
|
|
|
self.last_latent = x
|
|
|
|
|
samples = self.launch_sampling(steps, lambda: self.func(self.model_wrap_cfg, x, extra_args={'cond': conditioning, 'uncond': unconditional_conditioning, 'cond_scale': p.cfg_scale}, disable=False, callback=self.callback_state, **extra_params_kwargs))
|
|
|
|
|
|
|
|
|
|
return samples
|
|
|
|
|
|