diff --git a/imperative/python/megengine/xla/ir_utils.py b/imperative/python/megengine/xla/ir_utils.py index 985a9797e..6a3654afb 100644 --- a/imperative/python/megengine/xla/ir_utils.py +++ b/imperative/python/megengine/xla/ir_utils.py @@ -229,11 +229,11 @@ def _str_shape(shp): def _str_eqn(self, eqn): inps = ", ".join(map(self._str_var, eqn.inputs)) oups = ", ".join(map(self._str_var, eqn.outputs)) - str_op = str(eqn.op) + str_op = str(eqn.type) if isinstance(eqn.op, mops.Reduce): assert str(eqn.op.mode).startswith("Reduce.Mode.") str_op = str_op + str(eqn.op.mode)[len("Reduce.Mode.") :] - ret = f"{oups} = {str_op}({inps})" + ret = f"{oups} = {str_op}({inps}) scope: {eqn.scope}" return ret def __str__(self) -> str: diff --git a/imperative/python/src/tensor.cpp b/imperative/python/src/tensor.cpp index baec4237e..a090f42b4 100644 --- a/imperative/python/src/tensor.cpp +++ b/imperative/python/src/tensor.cpp @@ -1649,6 +1649,14 @@ void init_tensor(py::module m) { else return py::cast(opkind2str.find(self.kind)->second); }) + .def_property_readonly( + "scope", + [](SeqItem& self) -> py::object { + if (self.op && !self.op->scope().empty()) { + return py::cast(self.op->scope()); + } + return py::none(); + }) .def_property_readonly( "kind", [opkind2str](SeqItem& self) { diff --git a/imperative/src/impl/transformations/grad.cpp b/imperative/src/impl/transformations/grad.cpp index e13c01865..be43100cb 100644 --- a/imperative/src/impl/transformations/grad.cpp +++ b/imperative/src/impl/transformations/grad.cpp @@ -208,6 +208,8 @@ void GradKey::backward() { auto& tape = m_frozen_tape; for (std::ptrdiff_t k = tape.size() - 1; k >= 0; --k) { auto& [grad_fn, op] = tape[k]; + std::string scope_name = op ? op->make_name() + ".Backward" : "CustomBackward"; + Transformation::push_scope(scope_name); auto grad_receiver = [&, grad_fn = grad_fn](size_t i, ValueRef grad) { auto& dest = grad_fn->m_dests[i]; if (dest) { @@ -233,13 +235,12 @@ void GradKey::backward() { for (auto&& slot : grad_fn->m_slots) { *iter++ = slot.m_grad; } - std::string name = op ? op->name() + "Backward" : "CustomBackward"; if (Profiler::is_profiling()) { - imperative::apply(PushScope(name, ScopeType::BACKWARD), Span(nullptr, nullptr)); + imperative::apply(PushScope(scope_name, ScopeType::BACKWARD), Span(nullptr, nullptr)); } backward(grads, grad_receiver); if (Profiler::is_profiling()) { - imperative::apply(PopScope(name, ScopeType::BACKWARD), Span(nullptr, nullptr)); + imperative::apply(PopScope(scope_name, ScopeType::BACKWARD), Span(nullptr, nullptr)); } } }, grad_fn->m_backward); @@ -256,6 +257,7 @@ void GradKey::backward() { } } grad_fn->clear(); + Transformation::pop_scope(scope_name); } tape.clear(); }