diff --git a/backends/arm/operator_support/ethos_u55_support.py b/backends/arm/operator_support/ethos_u55_support.py index 2e9bd846045..225efeab01f 100644 --- a/backends/arm/operator_support/ethos_u55_support.py +++ b/backends/arm/operator_support/ethos_u55_support.py @@ -128,7 +128,7 @@ class EthosU55NotSupported(OperatorSupportBase): exir_ops.edge.aten.bitwise_and.Scalar, exir_ops.edge.aten.bitwise_or.Scalar, exir_ops.edge.aten.bitwise_xor.Scalar, - exir_ops.edge.aten.bitwise_not, + exir_ops.edge.aten.bitwise_not.default, exir_ops.edge.aten.logical_and.default, exir_ops.edge.aten.logical_or.default, exir_ops.edge.aten.logical_xor.default, diff --git a/backends/arm/operator_support/tosa_profile_supported_op_lists.py b/backends/arm/operator_support/tosa_profile_supported_op_lists.py index 9820fbd05d5..60d2c47ce72 100644 --- a/backends/arm/operator_support/tosa_profile_supported_op_lists.py +++ b/backends/arm/operator_support/tosa_profile_supported_op_lists.py @@ -136,6 +136,7 @@ exir_ops.edge.aten.logit.default, exir_ops.edge.aten.acos.default, exir_ops.edge.aten.elu.default, + exir_ops.edge.aten.bitwise_not.default, } diff --git a/backends/arm/operators/__init__.py b/backends/arm/operators/__init__.py index f7a9638254e..e2bda4b7641 100644 --- a/backends/arm/operators/__init__.py +++ b/backends/arm/operators/__init__.py @@ -13,6 +13,7 @@ op_amin, op_any, op_avg_pool2d, + op_bitwise_not, op_bmm, op_cat, op_ceil, diff --git a/backends/arm/operators/op_bitwise_not.py b/backends/arm/operators/op_bitwise_not.py new file mode 100644 index 00000000000..908cf68e9b2 --- /dev/null +++ b/backends/arm/operators/op_bitwise_not.py @@ -0,0 +1,59 @@ +# Copyright 2025 Arm Limited and/or its affiliates. +# +# This source code is licensed under the BSD-style license found in the +# LICENSE file in the root directory of this source tree. + +from typing import Any, List + +from executorch.backends.arm.operators.node_visitor import ( + NodeVisitor, + register_node_visitor, +) +from executorch.backends.arm.operators.operator_validation_utils import ( + validate_num_inputs, + validate_same_dtype, + validate_valid_dtype, +) +from executorch.backends.arm.tosa.mapping import TosaArg +from executorch.backends.arm.tosa.specification import TosaSpecification +from torch.fx import Node + + +@register_node_visitor +class BitwiseNotVisitor(NodeVisitor): + target = "aten.bitwise_not.default" + + # bitwise_not is not supported on the FP profile + tosa_specs = [ + TosaSpecification.create_from_string("TOSA-1.0+INT"), + ] + + def __init__(self, *args): + super().__init__(*args) + + def define_node( + self, + node: Node, + tosa_graph: Any, + inputs: List[TosaArg], + output: TosaArg, + ) -> None: + + import serializer.tosa_serializer as ts # type: ignore + + validate_num_inputs(self.target, inputs, 1) + validate_same_dtype(self.target, [*inputs, output], ts) + validate_valid_dtype( + self.target, + [*inputs, output], + [ts.DType.INT8, ts.DType.INT16, ts.DType.INT32], + output.tosa_spec, + ) + + self._serialize_operator( + node, + tosa_graph, + ts.TosaOp.Op().BITWISE_NOT, + [inputs[0].name], + [output.name], + ) diff --git a/backends/arm/test/ops/test_bitwise_not.py b/backends/arm/test/ops/test_bitwise_not.py new file mode 100644 index 00000000000..4f48bc134ba --- /dev/null +++ b/backends/arm/test/ops/test_bitwise_not.py @@ -0,0 +1,120 @@ +# Copyright 2025 Arm Limited and/or its affiliates. +# +# This source code is licensed under the BSD-style license found in the +# LICENSE file in the root directory of this source tree. + +from typing import Tuple + +import torch + +from executorch.backends.arm.test import common +from executorch.backends.arm.test.tester.test_pipeline import ( + EthosU85PipelineINT, + OpNotSupportedPipeline, + TosaPipelineINT, + VgfPipeline, +) + +aten_op = "torch.ops.aten.bitwise_not.default" +exir_op = "executorch_exir_dialects_edge__ops_aten_bitwise_not_default" + +input_t1 = Tuple[torch.Tensor] + +test_data_suite = { + "zeros": torch.zeros(1, 10, 10, 10, dtype=torch.int32), + "ones": torch.ones(10, 2, 3, dtype=torch.int8), + "pattern1_int8": 0xAA * torch.ones(1, 2, 2, 2, dtype=torch.int8), + "pattern1_int16": 0xAAAA * torch.ones(1, 2, 2, 2, dtype=torch.int16), + "pattern1_int32": 0xAAAAAAAA * torch.ones(1, 2, 2, 2, dtype=torch.int32), + "pattern2_int8": 0xCC * torch.ones(1, 2, 2, 2, dtype=torch.int8), + "pattern2_int16": 0xCCCC * torch.ones(1, 2, 2, 2, dtype=torch.int16), + "pattern2_int32": 0xCCCCCCCC * torch.ones(1, 2, 2, 2, dtype=torch.int32), + "rand_rank2": torch.randint(-128, 127, (10, 10), dtype=torch.int8), + "rand_rank4": torch.randint(-128, 127, (1, 10, 10, 10), dtype=torch.int8), +} + + +class BitwiseNot(torch.nn.Module): + + def forward(self, x: torch.Tensor): + return torch.bitwise_not(x) + + +@common.parametrize("test_data", test_data_suite) +def test_bitwise_not_tosa_FP(test_data: Tuple): + # We don't delegate bitwise_not since it is not supported on the FP profile. + pipeline = OpNotSupportedPipeline[input_t1]( + BitwiseNot(), + (test_data,), + {exir_op: 1}, + quantize=False, + ) + pipeline.run() + + +@common.parametrize("test_data", test_data_suite) +def test_bitwise_not_tosa_INT(test_data: Tuple): + pipeline = TosaPipelineINT[input_t1]( + BitwiseNot(), + (test_data,), + aten_op=aten_op, + exir_op=exir_op, + ) + pipeline.pop_stage("quantize") + pipeline.pop_stage("check.quant_nodes") + pipeline.run() + + +@common.parametrize("test_data", test_data_suite) +def test_bitwise_not_u55_INT(test_data: Tuple): + # We don't delegate bitwise_not since it is not supported on U55. + pipeline = OpNotSupportedPipeline[input_t1]( + BitwiseNot(), + (test_data,), + {exir_op: 1}, + quantize=True, + u55_subset=True, + ) + pipeline.run() + + +@common.XfailIfNoCorstone320 +@common.parametrize("test_data", test_data_suite) +def test_bitwise_not_u85_INT(test_data: Tuple): + pipeline = EthosU85PipelineINT[input_t1]( + BitwiseNot(), + (test_data,), + aten_ops=aten_op, + exir_ops=exir_op, + ) + pipeline.pop_stage("quantize") + pipeline.pop_stage("check.quant_nodes") + pipeline.run() + + +@common.parametrize("test_data", test_data_suite) +@common.SkipIfNoModelConverter +def test_bitwise_not_vgf_FP(test_data: Tuple): + # We don't delegate bitwise_not since it is not supported on the FP profile. + pipeline = OpNotSupportedPipeline[input_t1]( + BitwiseNot(), + (test_data,), + {exir_op: 1}, + quantize=False, + ) + pipeline.run() + + +@common.parametrize("test_data", test_data_suite) +@common.SkipIfNoModelConverter +def test_bitwise_not_vgf_INT(test_data: Tuple): + pipeline = VgfPipeline[input_t1]( + BitwiseNot(), + (test_data,), + aten_op, + exir_op, + tosa_version="TOSA-1.0+INT", + ) + pipeline.pop_stage("quantize") + pipeline.pop_stage("check.quant_nodes") + pipeline.run()