diff --git a/coremltools/converters/mil/frontend/torch/ops.py b/coremltools/converters/mil/frontend/torch/ops.py index e15adc3be..ce819418f 100644 --- a/coremltools/converters/mil/frontend/torch/ops.py +++ b/coremltools/converters/mil/frontend/torch/ops.py @@ -4925,7 +4925,10 @@ def reciprocal(context, node): @register_torch_op def log(context, node): inputs = _get_inputs(context, node, expected=1) - context.add(mb.log(x=inputs[0], name=node.name)) + x = inputs[0] + if types.is_int(x.dtype): + x = mb.cast(x=x, dtype="fp32") + context.add(mb.log(x=x, name=node.name)) @register_torch_op(torch_alias=["round"]) diff --git a/coremltools/converters/mil/frontend/torch/test/test_torch_ops.py b/coremltools/converters/mil/frontend/torch/test/test_torch_ops.py index 1deb09a1b..56d9f0cf4 100644 --- a/coremltools/converters/mil/frontend/torch/test/test_torch_ops.py +++ b/coremltools/converters/mil/frontend/torch/test/test_torch_ops.py @@ -5438,6 +5438,33 @@ def test_elementwise_numerically_stable( rand_range=(20, 100), ) + @pytest.mark.parametrize( + "compute_unit, backend, dtype", + itertools.product( + compute_units, + backends, + [np.int32, np.float32], + ), + ) + def test_log_dtype( + self, compute_unit, backend, dtype + ): + SHAPE = (2, 3) + + input_data = np.random.randint(1, 100, SHAPE).astype(dtype) + input_data = torch.from_numpy(input_data) + model = ModuleWrapper(torch.log) + converter_input_type = [TensorType(shape=SHAPE, dtype=dtype)] + + self.run_compare_torch( + input_data, + model, + backend=backend, + compute_unit=compute_unit, + input_as_shape=False, + converter_input_type=converter_input_type + ) + class TestAtan2(TorchBaseTest): @pytest.mark.parametrize(