From f91f6bd1ef954eef1f8fadea995b42e3db1a39a3 Mon Sep 17 00:00:00 2001 From: Sayak Paul <spsayakpaul@gmail.com> Date: Tue, 4 Apr 2023 09:06:38 +0530 Subject: [PATCH 1/4] fix: norm group test for UNet3D. --- tests/models/test_models_unet_3d_condition.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/tests/models/test_models_unet_3d_condition.py b/tests/models/test_models_unet_3d_condition.py index 729367a0c164..5a0d74a3ea5a 100644 --- a/tests/models/test_models_unet_3d_condition.py +++ b/tests/models/test_models_unet_3d_condition.py @@ -119,12 +119,11 @@ def test_xformers_enable_works(self): == "XFormersAttnProcessor" ), "xformers is not enabled" - # Overriding because `block_out_channels` needs to be different for this model. + # Overriding to set `norm_num_groups` needs to be different for this model. def test_forward_with_norm_groups(self): init_dict, inputs_dict = self.prepare_init_args_and_inputs_for_common() init_dict["norm_num_groups"] = 32 - init_dict["block_out_channels"] = (32, 64, 64, 64) model = self.model_class(**init_dict) model.to(torch_device) From 72b52e2f81c7b902d6cf9263685aed6c13e20b90 Mon Sep 17 00:00:00 2001 From: Sayak Paul <spsayakpaul@gmail.com> Date: Wed, 12 Apr 2023 15:37:16 +0530 Subject: [PATCH 2/4] chore: speed up the panorama tests (fast). --- .../test_stable_diffusion_panorama.py | 18 +++++++++++++----- tests/test_pipelines_common.py | 13 +++++++------ 2 files changed, 20 insertions(+), 11 deletions(-) diff --git a/tests/pipelines/stable_diffusion/test_stable_diffusion_panorama.py b/tests/pipelines/stable_diffusion/test_stable_diffusion_panorama.py index de9e8a79fb34..752ed6e969c3 100644 --- a/tests/pipelines/stable_diffusion/test_stable_diffusion_panorama.py +++ b/tests/pipelines/stable_diffusion/test_stable_diffusion_panorama.py @@ -49,7 +49,7 @@ def get_dummy_components(self): torch.manual_seed(0) unet = UNet2DConditionModel( block_out_channels=(32, 64), - layers_per_block=2, + layers_per_block=1, sample_size=32, in_channels=4, out_channels=4, @@ -101,7 +101,7 @@ def get_dummy_inputs(self, device, seed=0): # Setting height and width to None to prevent OOMs on CPU. "height": None, "width": None, - "num_inference_steps": 2, + "num_inference_steps": 1, "guidance_scale": 6.0, "output_type": "numpy", } @@ -119,10 +119,18 @@ def test_stable_diffusion_panorama_default_case(self): image_slice = image[0, -3:, -3:, -1] assert image.shape == (1, 64, 64, 3) - expected_slice = np.array([0.4794, 0.5084, 0.4992, 0.3941, 0.3555, 0.4754, 0.5248, 0.5224, 0.4839]) + expected_slice = np.array([0.6186, 0.5374, 0.4915, 0.4135, 0.4114, 0.4563, 0.5128, 0.4977, 0.4757]) assert np.abs(image_slice.flatten() - expected_slice).max() < 1e-2 + # override to speed the overall test timing up. + def test_inference_batch_consistent(self): + super().test_inference_batch_consistent(batch_sizes=[1, 2]) + + # override to speed the overall test timing up. + def test_inference_batch_single_identical(self): + super().test_inference_batch_single_identical(batch_size=2) + def test_stable_diffusion_panorama_negative_prompt(self): device = "cpu" # ensure determinism for the device-dependent torch.Generator components = self.get_dummy_components() @@ -138,7 +146,7 @@ def test_stable_diffusion_panorama_negative_prompt(self): assert image.shape == (1, 64, 64, 3) - expected_slice = np.array([0.5029, 0.5075, 0.5002, 0.3965, 0.3584, 0.4746, 0.5271, 0.5273, 0.4877]) + expected_slice = np.array([0.6187, 0.5375, 0.4915, 0.4136, 0.4114, 0.4563, 0.5128, 0.4976, 0.4757]) assert np.abs(image_slice.flatten() - expected_slice).max() < 1e-2 @@ -158,7 +166,7 @@ def test_stable_diffusion_panorama_euler(self): assert image.shape == (1, 64, 64, 3) - expected_slice = np.array([0.4934, 0.5455, 0.4847, 0.5022, 0.5572, 0.4833, 0.5207, 0.4952, 0.5051]) + expected_slice = np.array([0.4886, 0.5586, 0.4476, 0.5053, 0.6013, 0.4737, 0.5538, 0.5100, 0.4927]) assert np.abs(image_slice.flatten() - expected_slice).max() < 1e-2 diff --git a/tests/test_pipelines_common.py b/tests/test_pipelines_common.py index 13fbe924c799..806a030d43e6 100644 --- a/tests/test_pipelines_common.py +++ b/tests/test_pipelines_common.py @@ -175,11 +175,11 @@ def test_pipeline_call_signature(self): f"Required optional parameters not present: {remaining_required_optional_parameters}", ) - def test_inference_batch_consistent(self): - self._test_inference_batch_consistent() + def test_inference_batch_consistent(self, batch_sizes=[2, 4, 13]): + self._test_inference_batch_consistent(batch_sizes=batch_sizes) def _test_inference_batch_consistent( - self, batch_sizes=[2, 4, 13], additional_params_copy_to_batched_inputs=["num_inference_steps"] + self, batch_sizes, additional_params_copy_to_batched_inputs=["num_inference_steps"] ): components = self.get_dummy_components() pipe = self.pipeline_class(**components) @@ -235,11 +235,12 @@ def _test_inference_batch_consistent( logger.setLevel(level=diffusers.logging.WARNING) - def test_inference_batch_single_identical(self): - self._test_inference_batch_single_identical() + def test_inference_batch_single_identical(self, batch_size=3): + self._test_inference_batch_single_identical(batch_size=batch_size) def _test_inference_batch_single_identical( self, + batch_size, test_max_difference=None, test_mean_pixel_difference=None, relax_max_difference=False, @@ -267,7 +268,7 @@ def _test_inference_batch_single_identical( # batchify inputs batched_inputs = {} - batch_size = 3 + batch_size = batch_size for name, value in inputs.items(): if name in self.batch_params: # prompt is string From c005927e02b896c6794c9876b96ea4d3e9b73206 Mon Sep 17 00:00:00 2001 From: Sayak Paul <spsayakpaul@gmail.com> Date: Wed, 12 Apr 2023 16:49:02 +0530 Subject: [PATCH 3/4] set default value of _test_inference_batch_single_identical. --- tests/test_pipelines_common.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_pipelines_common.py b/tests/test_pipelines_common.py index 806a030d43e6..0cd159f455f1 100644 --- a/tests/test_pipelines_common.py +++ b/tests/test_pipelines_common.py @@ -240,7 +240,7 @@ def test_inference_batch_single_identical(self, batch_size=3): def _test_inference_batch_single_identical( self, - batch_size, + batch_size=3, test_max_difference=None, test_mean_pixel_difference=None, relax_max_difference=False, From 80e33b39179f3144e3f8e4bb07d4f93b99a552a7 Mon Sep 17 00:00:00 2001 From: Sayak Paul <spsayakpaul@gmail.com> Date: Wed, 12 Apr 2023 19:42:35 +0530 Subject: [PATCH 4/4] fix: batch_sizes default value. --- tests/test_pipelines_common.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_pipelines_common.py b/tests/test_pipelines_common.py index 0cd159f455f1..981bc9061ef9 100644 --- a/tests/test_pipelines_common.py +++ b/tests/test_pipelines_common.py @@ -179,7 +179,7 @@ def test_inference_batch_consistent(self, batch_sizes=[2, 4, 13]): self._test_inference_batch_consistent(batch_sizes=batch_sizes) def _test_inference_batch_consistent( - self, batch_sizes, additional_params_copy_to_batched_inputs=["num_inference_steps"] + self, batch_sizes=[2, 4, 13], additional_params_copy_to_batched_inputs=["num_inference_steps"] ): components = self.get_dummy_components() pipe = self.pipeline_class(**components)