Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix the Anomaly Task to work & Add integration test for Anomaly #3007

Merged
merged 14 commits into from
Mar 13, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .github/workflows/pre_merge.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -94,6 +94,7 @@ jobs:
- task: "instance_segmentation"
- task: "semantic_segmentation"
- task: "visual_prompting"
- task: "anomaly"
name: Integration-Test-${{ matrix.task }}-py310
# This is what will cancel the job concurrency
concurrency:
Expand Down
13 changes: 12 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,18 @@ mmlab = [
"oss2==2.17.0",
]
anomaly = [
"anomalib==1.0.0",
# [FIXME] @ashwinvaidya17: Install using a temporary hot-fix commit due to a torchmetrics version conflict.
"anomalib @ git+https://github.com/openvinotoolkit/anomalib.git@e78091883a620229c277a79674a904d9f785f8d5",
# This is a dependency to avoid conflicts with installing the anomalib[core] option.
"av>=10.0.0",
"einops>=0.3.2",
"freia>=0.2",
"imgaug==0.4.0",
"kornia>=0.6.6,<0.6.10",
"matplotlib>=3.4.3",
"opencv-python>=4.5.3.56",
"pandas>=1.1.0",
"open-clip-torch>=2.23.0",
]

[project.scripts]
Expand Down
8 changes: 4 additions & 4 deletions src/otx/algo/anomaly/padim.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,26 +15,26 @@
"""OTX Padim model.

Args:
input_size (tuple[int, int], optional): Input size. Defaults to (256, 256).
backbone (str, optional): Feature extractor backbone. Defaults to "resnet18".
layers (list[str], optional): Feature extractor layers. Defaults to ["layer1", "layer2", "layer3"].
pre_trained (bool, optional): Pretrained backbone. Defaults to True.
n_features (int | None, optional): Number of features. Defaults to None.
num_classes (int, optional): Anoamly don't use num_classes ,
but OTXModel always receives num_classes, so need this.
"""

def __init__(
self,
input_size: tuple[int, int] = (256, 256),
backbone: str = "resnet18",
layers: list[str] = ["layer1", "layer2", "layer3"], # noqa: B006
pre_trained: bool = True,
n_features: int | None = None,
num_classes: int = 2,
) -> None:
OTXAnomaly.__init__(self)
OTXModel.__init__(self, num_classes=2)
OTXModel.__init__(self, num_classes=num_classes)
AnomalibPadim.__init__(

Check warning on line 36 in src/otx/algo/anomaly/padim.py

View check run for this annotation

Codecov / codecov/patch

src/otx/algo/anomaly/padim.py#L35-L36

Added lines #L35 - L36 were not covered by tests
self,
input_size=input_size,
backbone=backbone,
layers=layers,
pre_trained=pre_trained,
Expand Down
8 changes: 4 additions & 4 deletions src/otx/algo/anomaly/stfpm.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,21 +21,21 @@ class Stfpm(OTXAnomaly, OTXModel, AnomalibStfpm):

Args:
layers (Sequence[str]): Feature extractor layers.
input_size (tuple[int, int]): Input size.
backbone (str, optional): Feature extractor backbone. Defaults to "resnet18".
num_classes (int, optional): Anoamly don't use num_classes ,
but OTXModel always receives num_classes, so need this.
"""

def __init__(
self,
layers: Sequence[str] = ["layer1", "layer2", "layer3"],
input_size: tuple[int, int] = (256, 256),
backbone: str = "resnet18",
num_classes: int = 2,
) -> None:
OTXAnomaly.__init__(self)
OTXModel.__init__(self, num_classes=2)
OTXModel.__init__(self, num_classes=num_classes)
harimkang marked this conversation as resolved.
Show resolved Hide resolved
AnomalibStfpm.__init__(
self,
input_size=input_size,
backbone=backbone,
layers=layers,
)
Expand Down
2 changes: 1 addition & 1 deletion src/otx/cli/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -413,7 +413,7 @@ def instantiate_model(self, model_config: Namespace) -> tuple:
# Update num_classes
if not self.get_config_value(self.config_init, "disable_infer_num_classes", False):
num_classes = self.datamodule.label_info.num_classes
if num_classes != model_config.init_args.num_classes:
if hasattr(model_config.init_args, "num_classes") and num_classes != model_config.init_args.num_classes:
warning_msg = (
f"The `num_classes` in dataset is {num_classes} "
f"but, the `num_classes` of model is {model_config.init_args.num_classes}. "
Expand Down
2 changes: 1 addition & 1 deletion src/otx/core/data/entity/anomaly/classification.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@
) -> AnomalyClassificationDataBatch:
"""Collection function to collect `OTXDataEntity` into `OTXBatchDataEntity` in data loader."""
batch = super().collate_fn(entities)
images = tv_tensors.Image(data=torch.stack(batch.images, dim=0)) if stack_images else batch.images
images = tv_tensors.Image(data=torch.stack(tuple(batch.images), dim=0)) if stack_images else batch.images

Check warning on line 56 in src/otx/core/data/entity/anomaly/classification.py

View check run for this annotation

Codecov / codecov/patch

src/otx/core/data/entity/anomaly/classification.py#L56

Added line #L56 was not covered by tests
return AnomalyClassificationDataBatch(
batch_size=batch.batch_size,
images=images,
Expand Down
2 changes: 1 addition & 1 deletion src/otx/core/data/entity/anomaly/detection.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@
) -> AnomalyDetectionDataBatch:
"""Collection function to collect `OTXDataEntity` into `OTXBatchDataEntity` in data loader."""
batch = super().collate_fn(entities)
images = tv_tensors.Image(data=torch.stack(batch.images, dim=0)) if stack_images else batch.images
images = tv_tensors.Image(data=torch.stack(tuple(batch.images), dim=0)) if stack_images else batch.images

Check warning on line 59 in src/otx/core/data/entity/anomaly/detection.py

View check run for this annotation

Codecov / codecov/patch

src/otx/core/data/entity/anomaly/detection.py#L59

Added line #L59 was not covered by tests
return AnomalyDetectionDataBatch(
batch_size=batch.batch_size,
images=images,
Expand Down
2 changes: 1 addition & 1 deletion src/otx/core/data/entity/anomaly/segmentation.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@
) -> AnomalySegmentationDataBatch:
"""Collection function to collect `OTXDataEntity` into `OTXBatchDataEntity` in data loader."""
batch = super().collate_fn(entities)
images = tv_tensors.Image(data=torch.stack(batch.images, dim=0)) if stack_images else batch.images
images = tv_tensors.Image(data=torch.stack(tuple(batch.images), dim=0)) if stack_images else batch.images

Check warning on line 57 in src/otx/core/data/entity/anomaly/segmentation.py

View check run for this annotation

Codecov / codecov/patch

src/otx/core/data/entity/anomaly/segmentation.py#L57

Added line #L57 was not covered by tests
return AnomalySegmentationDataBatch(
batch_size=batch.batch_size,
images=images,
Expand Down
22 changes: 18 additions & 4 deletions src/otx/core/model/module/anomaly/anomaly_lightning.py
Original file line number Diff line number Diff line change
Expand Up @@ -134,7 +134,7 @@
def __init__(self) -> None:
self.optimizer: list[OptimizerCallable] | OptimizerCallable = None
self.scheduler: list[LRSchedulerCallable] | LRSchedulerCallable = None
self.input_size: list[int] = [256, 256]
self._input_size: tuple[int, int] = (256, 256)

Check warning on line 137 in src/otx/core/model/module/anomaly/anomaly_lightning.py

View check run for this annotation

Codecov / codecov/patch

src/otx/core/model/module/anomaly/anomaly_lightning.py#L137

Added line #L137 was not covered by tests
self.mean_values: tuple[float, float, float] = (0.0, 0.0, 0.0)
self.scale_values: tuple[float, float, float] = (1.0, 1.0, 1.0)
self.trainer: Trainer
Expand All @@ -147,6 +147,19 @@
self.image_metrics: AnomalibMetricCollection
self.pixel_metrics: AnomalibMetricCollection

@property
def input_size(self) -> tuple[int, int]:

Check warning on line 151 in src/otx/core/model/module/anomaly/anomaly_lightning.py

View check run for this annotation

Codecov / codecov/patch

src/otx/core/model/module/anomaly/anomaly_lightning.py#L150-L151

Added lines #L150 - L151 were not covered by tests
"""Returns the input size of the model.

Returns:
tuple[int, int]: The input size of the model as a tuple of (height, width).
"""
return self._input_size

Check warning on line 157 in src/otx/core/model/module/anomaly/anomaly_lightning.py

View check run for this annotation

Codecov / codecov/patch

src/otx/core/model/module/anomaly/anomaly_lightning.py#L157

Added line #L157 was not covered by tests

@input_size.setter
def input_size(self, value: tuple[int, int]) -> None:
self._input_size = value

Check warning on line 161 in src/otx/core/model/module/anomaly/anomaly_lightning.py

View check run for this annotation

Codecov / codecov/patch

src/otx/core/model/module/anomaly/anomaly_lightning.py#L159-L161

Added lines #L159 - L161 were not covered by tests

@property
def task(self) -> AnomalibTaskType:
"""Return the task type of the model."""
Expand Down Expand Up @@ -342,13 +355,13 @@
"""
state_dict = super().state_dict() # type: ignore[misc]
# This is defined in OTXModel
state_dict["meta_info"] = self.meta_info # type: ignore[attr-defined]
state_dict["label_info"] = self.label_info # type: ignore[attr-defined]

Check warning on line 358 in src/otx/core/model/module/anomaly/anomaly_lightning.py

View check run for this annotation

Codecov / codecov/patch

src/otx/core/model/module/anomaly/anomaly_lightning.py#L358

Added line #L358 was not covered by tests
return state_dict

def load_state_dict(self, ckpt: OrderedDict[str, Any], *args, **kwargs) -> None:
"""Pass the checkpoint to the anomaly model."""
ckpt = ckpt.get("state_dict", ckpt)
ckpt.pop("meta_info", None) # [TODO](ashwinvaidya17): Revisit this method when OTXModel is the lightning model
ckpt.pop("label_info", None) # [TODO](ashwinvaidya17): Revisit this method when OTXModel is the lightning model

Check warning on line 364 in src/otx/core/model/module/anomaly/anomaly_lightning.py

View check run for this annotation

Codecov / codecov/patch

src/otx/core/model/module/anomaly/anomaly_lightning.py#L364

Added line #L364 was not covered by tests
return super().load_state_dict(ckpt, *args, **kwargs) # type: ignore[misc]

def forward(
Expand Down Expand Up @@ -441,8 +454,9 @@
"""
min_val = self.normalization_metrics.state_dict()["min"].cpu().numpy().tolist()
max_val = self.normalization_metrics.state_dict()["max"].cpu().numpy().tolist()
image_shape = (256, 256) if self.input_size is None else self.input_size

Check warning on line 457 in src/otx/core/model/module/anomaly/anomaly_lightning.py

View check run for this annotation

Codecov / codecov/patch

src/otx/core/model/module/anomaly/anomaly_lightning.py#L457

Added line #L457 was not covered by tests
exporter = _AnomalyModelExporter(
image_shape=(self.input_size[0], self.input_size[1]),
image_shape=image_shape,
image_threshold=self.image_threshold.value.cpu().numpy().tolist(),
pixel_threshold=self.pixel_threshold.value.cpu().numpy().tolist(),
task=self.task,
Expand Down
7 changes: 4 additions & 3 deletions src/otx/engine/utils/auto_configurator.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,9 +42,9 @@
OTXTaskType.INSTANCE_SEGMENTATION: RECIPE_PATH / "instance_segmentation" / "maskrcnn_r50.yaml",
OTXTaskType.ACTION_CLASSIFICATION: RECIPE_PATH / "action" / "action_classification" / "x3d.yaml",
OTXTaskType.ACTION_DETECTION: RECIPE_PATH / "action" / "action_detection" / "x3d_fastrcnn.yaml",
OTXTaskType.ANOMALY_CLASSIFICATION: RECIPE_PATH / "anomaly" / "anomaly_classification" / "padim.yaml",
OTXTaskType.ANOMALY_SEGMENTATION: RECIPE_PATH / "anomaly" / "anomaly_segmentation" / "padim.yaml",
OTXTaskType.ANOMALY_DETECTION: RECIPE_PATH / "anomaly" / "anomaly_detection" / "padim.yaml",
OTXTaskType.ANOMALY_CLASSIFICATION: RECIPE_PATH / "anomaly_classification" / "padim.yaml",
OTXTaskType.ANOMALY_SEGMENTATION: RECIPE_PATH / "anomaly_segmentation" / "padim.yaml",
OTXTaskType.ANOMALY_DETECTION: RECIPE_PATH / "anomaly_detection" / "padim.yaml",
OTXTaskType.VISUAL_PROMPTING: RECIPE_PATH / "visual_prompting" / "sam_tiny_vit.yaml",
OTXTaskType.ZERO_SHOT_VISUAL_PROMPTING: RECIPE_PATH / "zero_shot_visual_prompting" / "sam_tiny_vit.yaml",
}
Expand All @@ -67,6 +67,7 @@
"common_semantic_segmentation_with_subset_dirs": [OTXTaskType.SEMANTIC_SEGMENTATION],
"kinetics": [OTXTaskType.ACTION_CLASSIFICATION],
"ava": [OTXTaskType.ACTION_DETECTION],
"mvtec": [OTXTaskType.ANOMALY_CLASSIFICATION, OTXTaskType.ANOMALY_DETECTION, OTXTaskType.ANOMALY_SEGMENTATION],
}

OVMODEL_PER_TASK = {
Expand Down
10 changes: 6 additions & 4 deletions src/otx/recipe/anomaly_classification/padim.yaml
Original file line number Diff line number Diff line change
@@ -1,9 +1,6 @@
model:
class_path: otx.algo.anomaly.padim.Padim
init_args:
input_size:
- 256
- 256
layers: ["layer1", "layer2", "layer3"]
backbone: "resnet18"
pre_trained: True
Expand All @@ -15,10 +12,15 @@ engine:

callback_monitor: step # this has no effect as Padim does not need to be trained

data: ../../_base_/data/torchvision_base.yaml
data: ../_base_/data/torchvision_base.yaml
overrides:
precision: 32
max_epochs: 1
limit_val_batches: 0 # this is set to 0 as the default dataloader does not have validation set. But this also means that the model will not give correct performance numbers
callbacks:
- class_path: otx.algo.callbacks.adaptive_train_scheduling.AdaptiveTrainScheduling
init_args:
max_interval: 1
data:
task: ANOMALY_CLASSIFICATION
config:
Expand Down
8 changes: 4 additions & 4 deletions src/otx/recipe/anomaly_classification/stfpm.yaml
Original file line number Diff line number Diff line change
@@ -1,9 +1,6 @@
model:
class_path: otx.algo.anomaly.stfpm.Stfpm
init_args:
input_size:
- 256
- 256
layers: ["layer1", "layer2", "layer3"]
backbone: "resnet18"

Expand All @@ -21,14 +18,17 @@ engine:

callback_monitor: train_loss_epoch # val loss is not available as there is no validation set from default dataloader

data: ../../_base_/data/torchvision_base.yaml
data: ../_base_/data/torchvision_base.yaml
overrides:
max_epochs: 100
limit_val_batches: 0 # this is set to 0 as the default dataloader does not have validation set. But this also means that the model will not give correct performance numbers
callbacks:
- class_path: lightning.pytorch.callbacks.EarlyStopping
init_args:
patience: 5
- class_path: otx.algo.callbacks.adaptive_train_scheduling.AdaptiveTrainScheduling
init_args:
max_interval: 1
data:
task: ANOMALY_CLASSIFICATION
config:
Expand Down
10 changes: 6 additions & 4 deletions src/otx/recipe/anomaly_detection/padim.yaml
Original file line number Diff line number Diff line change
@@ -1,9 +1,6 @@
model:
class_path: otx.algo.anomaly.padim.Padim
init_args:
input_size:
- 256
- 256
layers: ["layer1", "layer2", "layer3"]
backbone: "resnet18"
pre_trained: True
Expand All @@ -15,10 +12,15 @@ engine:

callback_monitor: step # this has no effect as Padim does not need to be trained

data: ../../_base_/data/torchvision_base.yaml
data: ../_base_/data/torchvision_base.yaml
overrides:
precision: 32
max_epochs: 1
limit_val_batches: 0 # this is set to 0 as the default dataloader does not have validation set. But this also means that the model will not give correct performance numbers
callbacks:
- class_path: otx.algo.callbacks.adaptive_train_scheduling.AdaptiveTrainScheduling
init_args:
max_interval: 1
data:
task: ANOMALY_DETECTION
config:
Expand Down
8 changes: 4 additions & 4 deletions src/otx/recipe/anomaly_detection/stfpm.yaml
Original file line number Diff line number Diff line change
@@ -1,9 +1,6 @@
model:
class_path: otx.algo.anomaly.stfpm.Stfpm
init_args:
input_size:
- 256
- 256
layers: ["layer1", "layer2", "layer3"]
backbone: "resnet18"

Expand All @@ -21,14 +18,17 @@ engine:

callback_monitor: train_loss_epoch # val loss is not available as there is no validation set from default dataloader

data: ../../_base_/data/torchvision_base.yaml
data: ../_base_/data/torchvision_base.yaml
overrides:
max_epochs: 100
limit_val_batches: 0 # this is set to 0 as the default dataloader does not have validation set. But this also means that the model will not give correct performance numbers
callbacks:
- class_path: lightning.pytorch.callbacks.EarlyStopping
init_args:
patience: 5
- class_path: otx.algo.callbacks.adaptive_train_scheduling.AdaptiveTrainScheduling
init_args:
max_interval: 1
data:
task: ANOMALY_DETECTION
config:
Expand Down
10 changes: 6 additions & 4 deletions src/otx/recipe/anomaly_segmentation/padim.yaml
Original file line number Diff line number Diff line change
@@ -1,9 +1,6 @@
model:
class_path: otx.algo.anomaly.padim.Padim
init_args:
input_size:
- 256
- 256
layers: ["layer1", "layer2", "layer3"]
backbone: "resnet18"
pre_trained: True
Expand All @@ -15,10 +12,15 @@ engine:

callback_monitor: step # this has no effect as Padim does not need to be trained

data: ../../_base_/data/torchvision_base.yaml
data: ../_base_/data/torchvision_base.yaml
overrides:
precision: 32
max_epochs: 1
limit_val_batches: 0 # this is set to 0 as the default dataloader does not have validation set. But this also means that the model will not give correct performance numbers
callbacks:
- class_path: otx.algo.callbacks.adaptive_train_scheduling.AdaptiveTrainScheduling
init_args:
max_interval: 1
data:
task: ANOMALY_SEGMENTATION
config:
Expand Down
8 changes: 4 additions & 4 deletions src/otx/recipe/anomaly_segmentation/stfpm.yaml
Original file line number Diff line number Diff line change
@@ -1,9 +1,6 @@
model:
class_path: otx.algo.anomaly.stfpm.Stfpm
init_args:
input_size:
- 256
- 256
layers: ["layer1", "layer2", "layer3"]
backbone: "resnet18"

Expand All @@ -21,14 +18,17 @@ engine:

callback_monitor: train_loss_epoch # val loss is not available as there is no validation set from default dataloader

data: ../../_base_/data/torchvision_base.yaml
data: ../_base_/data/torchvision_base.yaml
overrides:
max_epochs: 100
limit_val_batches: 0 # this is set to 0 as the default dataloader does not have validation set. But this also means that the model will not give correct performance numbers
callbacks:
- class_path: lightning.pytorch.callbacks.EarlyStopping
init_args:
patience: 5
- class_path: otx.algo.callbacks.adaptive_train_scheduling.AdaptiveTrainScheduling
init_args:
max_interval: 1
data:
task: ANOMALY_SEGMENTATION
config:
Expand Down
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
2 changes: 2 additions & 0 deletions tests/integration/api/test_auto_configuration.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,8 @@ def test_auto_configuration(
pytest.skip(
reason="H-labels require num_multiclass_head, num_multilabel_classes, which skip until we have the ability to automate this.",
)
if task.lower().startswith("anomaly"):
pytest.skip(reason="This will be added in a future pipeline behavior.")

tmp_path_train = tmp_path / f"auto_train_{task}"
data_root = fxt_target_dataset_per_task[task.lower()]
Expand Down
5 changes: 3 additions & 2 deletions tests/integration/api/test_engine_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,8 +37,6 @@ def test_engine_from_config(
pytest.skip(
reason="H-labels require num_multiclass_head, num_multilabel_classes, which skip until we have the ability to automate this.",
)
if "anomaly" in task.lower():
pytest.skip(reason="There's no dataset for anomaly tasks.")

tmp_path_train = tmp_path / task
engine = Engine.from_config(
Expand Down Expand Up @@ -70,6 +68,9 @@ def test_engine_from_config(
OTXTaskType.ACTION_DETECTION,
OTXTaskType.H_LABEL_CLS,
OTXTaskType.ROTATED_DETECTION,
OTXTaskType.ANOMALY_CLASSIFICATION,
OTXTaskType.ANOMALY_DETECTION,
OTXTaskType.ANOMALY_SEGMENTATION,
]:
return

Expand Down
Loading
Loading