diff --git a/tests/pipelines/test_pipelines_common.py b/tests/pipelines/test_pipelines_common.py index db8209835be4..d5adc374e73c 100644 --- a/tests/pipelines/test_pipelines_common.py +++ b/tests/pipelines/test_pipelines_common.py @@ -2124,6 +2124,10 @@ def test_encode_prompt_works_in_isolation(self, extra_required_param_value_dict= f"encode_prompt has no default in either encode_prompt or __call__." ) + if "_cut_context" in encode_prompt_param_names and "_cut_context" not in encode_prompt_inputs: + # As in full_pipeline, `_cut_context` is set to True. + encode_prompt_inputs["_cut_context"] = True + # Compute `encode_prompt()`. with torch.no_grad(): encoded_prompt_outputs = pipe_with_just_text_encoder.encode_prompt(**encode_prompt_inputs)