Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 9 additions & 5 deletions torch_xla/csrc/aten_xla_type.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -688,11 +688,15 @@ at::Tensor XLANativeFunctions::atan2(const at::Tensor& self,
return at::native::call_fallback_fn<&xla_cpu_fallback,
ATEN_OP(atan2)>::call(self, other);
}
return DoBinaryOp(self, other,
[&](const XLATensorPtr& xself, const XLATensorPtr& xother,
at::ScalarType dtype) {
return XLATensor::atan2(xself, xother, dtype);
});

auto common_device = torch_xla::bridge::GetXlaDevice(self, other);
XLA_CHECK(common_device);
torch::lazy::NodePtr node =
torch::lazy::MakeNode<Atan2>(bridge::GetXlaTensor(self)->GetIrValue(),
bridge::GetXlaTensor(other)->GetIrValue());

return torch_xla::bridge::AtenFromXlaTensor(
torch_xla::XLATensor::Create(std::move(node), *common_device));
}

at::Tensor XLANativeFunctions::avg_pool2d(
Expand Down
1 change: 0 additions & 1 deletion torch_xla/csrc/ops/ops.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,6 @@ PTXLA_UNARY_OP(Sqrt, at::aten::sqrt, xla::Sqrt);
PTXLA_BINARY_OP(Min, at::aten::min, xla::Min);
PTXLA_BINARY_OP(Pow, at::aten::pow, xla::Pow);
PTXLA_BINARY_OP(Fmod, at::aten::fmod, xla::Rem);
PTXLA_BINARY_OP(Atan2, at::aten::atan2, xla::Atan2);

torch::lazy::NodePtr LogBase(const torch::lazy::Value& input,
torch::lazy::OpKind op, double base) {
Expand Down
3 changes: 0 additions & 3 deletions torch_xla/csrc/ops/ops.h
Original file line number Diff line number Diff line change
Expand Up @@ -75,9 +75,6 @@ torch::lazy::NodePtr Sin(const torch::lazy::Value& input);

torch::lazy::NodePtr Sinh(const torch::lazy::Value& input);

torch::lazy::NodePtr Atan2(const torch::lazy::Value& input,
const torch::lazy::Value& other);

torch::lazy::NodePtr Tan(const torch::lazy::Value& input);

torch::lazy::NodePtr Neg(const torch::lazy::Value& input);
Expand Down
7 changes: 7 additions & 0 deletions torch_xla/csrc/ops/ops_lower_fn.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -120,6 +120,13 @@ torch_xla::XlaOpVector Atan::Lower(LoweringContext* loctx) const {
return ReturnOp(xla::Atan(xla_input), loctx);
}

torch_xla::XlaOpVector Atan2::Lower(LoweringContext* loctx) const {
xla::XlaOp xla_input = loctx->GetOutputOp(operand(0));
xla::XlaOp xla_other = loctx->GetOutputOp(operand(1));
auto promoted = XlaHelpers::Promote(xla_input, xla_other);
return ReturnOp(xla::Atan2(promoted.first, promoted.second), loctx);
}

torch_xla::XlaOpVector Atanh::Lower(LoweringContext* loctx) const {
xla::XlaOp xla_input = loctx->GetOutputOp(operand(0));
return ReturnOp(xla::Atanh(xla_input), loctx);
Expand Down
11 changes: 11 additions & 0 deletions torch_xla/csrc/ops/ops_xla_shape_fn.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -193,6 +193,17 @@ xla::Shape AtanOutputShape(const torch::lazy::Value& input) {
return GetXlaShape(input);
}

xla::Shape Atan2OutputShape(const torch::lazy::Value& input,
const torch::lazy::Value& other) {
auto lower_for_shape_fn =
[&](absl::Span<const xla::XlaOp> operands) -> xla::XlaOp {
auto promoted = XlaHelpers::Promote(operands[0], operands[1]);
return xla::Atan2(promoted.first, promoted.second);
};
return InferOutputShape({GetXlaShape(input), GetXlaShape(other)},
lower_for_shape_fn);
}

xla::Shape AtanhOutputShape(const torch::lazy::Value& input) {
return GetXlaShape(input);
}
Expand Down
3 changes: 3 additions & 0 deletions torch_xla/csrc/ops/ops_xla_shape_fn.h
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,9 @@ xla::Shape AsinhOutputShape(const torch::lazy::Value& input);

xla::Shape AtanOutputShape(const torch::lazy::Value& input);

xla::Shape Atan2OutputShape(const torch::lazy::Value& input,
const torch::lazy::Value& other);

xla::Shape AtanhOutputShape(const torch::lazy::Value& input);

xla::Shape BinaryCrossEntropyOutputShape(
Expand Down
4 changes: 0 additions & 4 deletions torch_xla/csrc/tensor.h
Original file line number Diff line number Diff line change
Expand Up @@ -428,10 +428,6 @@ class XLATensor : public c10::intrusive_ptr_target {
std::vector<int64_t> stride,
c10::optional<int64_t> storage_offset);

static XLATensorPtr atan2(
const XLATensorPtr& input, const XLATensorPtr& other,
c10::optional<at::ScalarType> logical_element_type = c10::nullopt);

static XLATensorPtr avg_pool_nd(const XLATensorPtr& input,
int64_t spatial_dim_count,
std::vector<int64_t> kernel_size,
Expand Down
7 changes: 0 additions & 7 deletions torch_xla/csrc/tensor_methods.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -711,13 +711,6 @@ void XLATensor::as_strided_(XLATensorPtr& input, std::vector<int64_t> size,
}
}

XLATensorPtr XLATensor::atan2(
const XLATensorPtr& input, const XLATensorPtr& other,
c10::optional<at::ScalarType> logical_element_type) {
return input->CreateFrom(Atan2(input->GetIrValue(), other->GetIrValue()),
logical_element_type);
}

XLATensorPtr XLATensor::avg_pool_nd(const XLATensorPtr& input,
int64_t spatial_dim_count,
std::vector<int64_t> kernel_size,
Expand Down
1 change: 1 addition & 0 deletions xla_native_functions.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -87,6 +87,7 @@ ir_gen:
- _adaptive_avg_pool2d_backward
- _adaptive_avg_pool3d
- _adaptive_avg_pool3d_backward
- atan2
- bitwise_and.Tensor
- bitwise_or.Tensor
- bitwise_xor.Tensor
Expand Down