Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 3 additions & 11 deletions torch_xla/csrc/init_python_bindings.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -3377,17 +3377,9 @@ void InitXlaModuleBindings(py::module m) {
[](const std::vector<at::Tensor>& tensors) -> py::bytes {
absl::StatusOr<std::vector<absl_nonnull XLATensorPtr>>
xtensors_status = bridge::GetXlaTensors(tensors);
ABSL_CHECK(xtensors_status.ok())
<< "\n\n"
<< "Internal Error:\n"
<< " _get_graph_hash(): error retrieving the XLA tensors "
"from the given tensor arguments. "
<< "This is a bug! Please, open an issue in the PyTorch/XLA "
<< "GitHub repository: https://github.com/pytorch/xla"
<< "\n\n"
<< "Status Error:\n"
<< " " << BuildStatusErrorMessage(xtensors_status.status())
<< "\n";
XLA_CHECK_OK(xtensors_status,
"_get_graph_hash(): error retrieving the XLA tensors "
"from the given tensor arguments.");
std::vector<absl_nonnull XLATensorPtr> xtensors =
xtensors_status.value();
torch::lazy::hash_t hash =
Expand Down
1 change: 0 additions & 1 deletion torch_xla/csrc/runtime/debug_macros.h
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,6 @@
// unnecessary or undesirable.
#define XLA_ERROR() TF_ERROR_STREAM()
#define XLA_CHECK(c) TF_CHECK(c)
#define XLA_CHECK_OK(c) TF_CHECK_OK(c)
#define XLA_CHECK_EQ(a, b) TF_CHECK_EQ(a, b)
#define XLA_CHECK_NE(a, b) TF_CHECK_NE(a, b)
#define XLA_CHECK_LE(a, b) TF_CHECK_LE(a, b)
Expand Down
26 changes: 26 additions & 0 deletions torch_xla/csrc/status.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -124,4 +124,30 @@ void MaybeThrow(const absl::Status& status) {

void GetValueOrThrow(const absl::Status& status) { MaybeThrow(status); }

void OkOrDie(const absl::Status& status, const char* file, const int32_t line,
const char* function, std::string_view message) {
if (status.ok()) {
return;
}

std::ostringstream oss;
oss << "\n\n"
<< "Internal Error:\n";

if (!message.empty()) {
oss << " " << message << "\n";
}

oss << " This is a bug! Please, open an issue in the PyTorch/XLA "
<< "GitHub repository: https://github.com/pytorch/xla"
<< "\n\n"
<< "Status Error:\n"
<< " "
<< BuildStatusErrorMessage(
status_internal::MaybeWithNewMessage(status, file, line, function))
<< "\n";

ABSL_CHECK(status.ok()) << oss.str();
}

} // namespace torch_xla
26 changes: 26 additions & 0 deletions torch_xla/csrc/status.h
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,8 @@
#ifndef XLA_TORCH_XLA_CSRC_STATUS_H_
#define XLA_TORCH_XLA_CSRC_STATUS_H_

#include <sstream>

#include "absl/status/statusor.h"

namespace torch_xla {
Expand Down Expand Up @@ -125,6 +127,22 @@ constexpr char kStatusPropagationTraceKey[] =
lhs = std::move(XLA_STATUS_VAR_).value(), \
##__VA_ARGS__)

// Crashes if `status` is not an ok status.
//
// Example:
//
// XLA_CHECK_OK(
// FnThatReturnStatus(),
// "New error message"
// );
//
// If `FnThatReturnStatus()` returns a non-ok status, this macro will
// call `ABSL_CHECK()`, which will crash.
//
#define XLA_CHECK_OK(status, ...) \
::torch_xla::OkOrDie(::torch_xla::status_internal::GetStatus(status), \
__FILE__, __LINE__, __FUNCTION__, ##__VA_ARGS__)

namespace status_internal {

// Adds source location information to the status propagation trace if
Expand Down Expand Up @@ -211,6 +229,14 @@ T GetValueOrThrow(absl::StatusOr<T>&& status) {
// `GetValueOrThrow` overload for `Status`.
void GetValueOrThrow(const absl::Status& status);

// Checks that `status` is an ok status.
//
// Otherwise, it will create a new status instance with the given source
// location information, and incorporate its message (alongside the
// status propagation trace) to the crash report.
void OkOrDie(const absl::Status& status, const char* file, const int32_t line,
const char* function, std::string_view message = "");

} // namespace torch_xla

#endif // XLA_TORCH_XLA_CSRC_STATUS_H_