|
import torch |
|
|
|
from diffusers import DDPMScheduler |
|
|
|
from .test_schedulers import SchedulerCommonTest |
|
|
|
|
|
class DDPMSchedulerTest(SchedulerCommonTest): |
|
scheduler_classes = (DDPMScheduler,) |
|
|
|
def get_scheduler_config(self, **kwargs): |
|
config = { |
|
"num_train_timesteps": 1000, |
|
"beta_start": 0.0001, |
|
"beta_end": 0.02, |
|
"beta_schedule": "linear", |
|
"variance_type": "fixed_small", |
|
"clip_sample": True, |
|
} |
|
|
|
config.update(**kwargs) |
|
return config |
|
|
|
def test_timesteps(self): |
|
for timesteps in [1, 5, 100, 1000]: |
|
self.check_over_configs(num_train_timesteps=timesteps) |
|
|
|
def test_betas(self): |
|
for beta_start, beta_end in zip([0.0001, 0.001, 0.01, 0.1], [0.002, 0.02, 0.2, 2]): |
|
self.check_over_configs(beta_start=beta_start, beta_end=beta_end) |
|
|
|
def test_schedules(self): |
|
for schedule in ["linear", "squaredcos_cap_v2"]: |
|
self.check_over_configs(beta_schedule=schedule) |
|
|
|
def test_variance_type(self): |
|
for variance in ["fixed_small", "fixed_large", "other"]: |
|
self.check_over_configs(variance_type=variance) |
|
|
|
def test_clip_sample(self): |
|
for clip_sample in [True, False]: |
|
self.check_over_configs(clip_sample=clip_sample) |
|
|
|
def test_thresholding(self): |
|
self.check_over_configs(thresholding=False) |
|
for threshold in [0.5, 1.0, 2.0]: |
|
for prediction_type in ["epsilon", "sample", "v_prediction"]: |
|
self.check_over_configs( |
|
thresholding=True, |
|
prediction_type=prediction_type, |
|
sample_max_value=threshold, |
|
) |
|
|
|
def test_prediction_type(self): |
|
for prediction_type in ["epsilon", "sample", "v_prediction"]: |
|
self.check_over_configs(prediction_type=prediction_type) |
|
|
|
def test_time_indices(self): |
|
for t in [0, 500, 999]: |
|
self.check_over_forward(time_step=t) |
|
|
|
def test_variance(self): |
|
scheduler_class = self.scheduler_classes[0] |
|
scheduler_config = self.get_scheduler_config() |
|
scheduler = scheduler_class(**scheduler_config) |
|
|
|
assert torch.sum(torch.abs(scheduler._get_variance(0) - 0.0)) < 1e-5 |
|
assert torch.sum(torch.abs(scheduler._get_variance(487) - 0.00979)) < 1e-5 |
|
assert torch.sum(torch.abs(scheduler._get_variance(999) - 0.02)) < 1e-5 |
|
|
|
def test_full_loop_no_noise(self): |
|
scheduler_class = self.scheduler_classes[0] |
|
scheduler_config = self.get_scheduler_config() |
|
scheduler = scheduler_class(**scheduler_config) |
|
|
|
num_trained_timesteps = len(scheduler) |
|
|
|
model = self.dummy_model() |
|
sample = self.dummy_sample_deter |
|
generator = torch.manual_seed(0) |
|
|
|
for t in reversed(range(num_trained_timesteps)): |
|
|
|
residual = model(sample, t) |
|
|
|
|
|
pred_prev_sample = scheduler.step(residual, t, sample, generator=generator).prev_sample |
|
|
|
|
|
|
|
|
|
|
|
|
|
sample = pred_prev_sample |
|
|
|
result_sum = torch.sum(torch.abs(sample)) |
|
result_mean = torch.mean(torch.abs(sample)) |
|
|
|
assert abs(result_sum.item() - 258.9606) < 1e-2 |
|
assert abs(result_mean.item() - 0.3372) < 1e-3 |
|
|
|
def test_full_loop_with_v_prediction(self): |
|
scheduler_class = self.scheduler_classes[0] |
|
scheduler_config = self.get_scheduler_config(prediction_type="v_prediction") |
|
scheduler = scheduler_class(**scheduler_config) |
|
|
|
num_trained_timesteps = len(scheduler) |
|
|
|
model = self.dummy_model() |
|
sample = self.dummy_sample_deter |
|
generator = torch.manual_seed(0) |
|
|
|
for t in reversed(range(num_trained_timesteps)): |
|
|
|
residual = model(sample, t) |
|
|
|
|
|
pred_prev_sample = scheduler.step(residual, t, sample, generator=generator).prev_sample |
|
|
|
|
|
|
|
|
|
|
|
|
|
sample = pred_prev_sample |
|
|
|
result_sum = torch.sum(torch.abs(sample)) |
|
result_mean = torch.mean(torch.abs(sample)) |
|
|
|
assert abs(result_sum.item() - 202.0296) < 1e-2 |
|
assert abs(result_mean.item() - 0.2631) < 1e-3 |
|
|