We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 723e8f6 commit 08cc36dCopy full SHA for 08cc36d
tests/test_pipelines_common.py
@@ -233,11 +233,11 @@ def test_inference_batch_single_identical(self):
233
234
output = pipe(**inputs)
235
236
+ logger.setLevel(level=diffusers.logging.WARNING)
237
if torch_device != "mps":
238
# TODO(Pedro) - not sure why, but not at all reproducible at the moment it seems
239
# make sure that batched and non-batched is identical
240
assert np.abs(output_batch[0][0] - output[0][0]).max() < 1e-4
- logger.setLevel(level=diffusers.logging.WARNING)
241
242
def test_dict_tuple_outputs_equivalent(self):
243
if torch_device == "mps" and self.pipeline_class in (
0 commit comments