diff --git a/examples/models/flamingo/preprocess/export_preprocess_lib.py b/examples/models/flamingo/preprocess/export_preprocess_lib.py index 358b1f2149a..366f5989222 100644 --- a/examples/models/flamingo/preprocess/export_preprocess_lib.py +++ b/examples/models/flamingo/preprocess/export_preprocess_lib.py @@ -14,7 +14,7 @@ from executorch.extension.llm.custom_ops import preprocess_custom_ops # noqa from torch.export import Dim, ExportedProgram -from torchtune.models.clip.inference._transforms import _CLIPImageTransform +from torchtune.models.clip.inference._transform import _CLIPImageTransform def get_example_inputs() -> Tuple[torch.Tensor, torch.Tensor, torch.Tensor]: diff --git a/examples/models/flamingo/preprocess/test_preprocess.py b/examples/models/flamingo/preprocess/test_preprocess.py index 34ad0ab8ed1..b990f44ca1b 100644 --- a/examples/models/flamingo/preprocess/test_preprocess.py +++ b/examples/models/flamingo/preprocess/test_preprocess.py @@ -22,7 +22,7 @@ from parameterized import parameterized from PIL import Image -from torchtune.models.clip.inference._transforms import ( +from torchtune.models.clip.inference._transform import ( _CLIPImageTransform, CLIPImageTransform, )