Skip to content

Commit

Permalink
s/uniqueName/debugName/ (#22096)
Browse files Browse the repository at this point in the history
Summary:
Pull Request resolved: #22096
ghimport-source-id: 8f1d994

Test Plan: Imported from OSS

Differential Revision: D15956004

Pulled By: jamesr66a

fbshipit-source-id: 319d2d20ef0863249a8a2bdd228b4f792d37bfab
  • Loading branch information
James Reed authored and facebook-github-bot committed Jun 22, 2019
1 parent 7d637de commit f7b2778
Show file tree
Hide file tree
Showing 24 changed files with 93 additions and 93 deletions.
2 changes: 1 addition & 1 deletion test/cpp/jit/test_alias_analysis.h
Expand Up @@ -54,7 +54,7 @@ struct TopoMoveTestFixture {
inputs.push_back(nodes.at(name)->output());
}
auto node = graph->appendNode(graph->create(prim::AutogradZero, inputs));
node->output()->setUniqueName(name);
node->output()->setDebugName(name);
nodes[name] = node;

if (blockInputNames.size() != 0) {
Expand Down
4 changes: 2 additions & 2 deletions test/cpp/jit/test_fuser.h
Expand Up @@ -182,8 +182,8 @@ void testRegisterFusionCachesKernel(std::ostream& out = std::cout) {
auto b = SymbolicVariable::asNewInput(*graph, type);
auto c = a * b;
auto d = c * a;
c.value()->setUniqueName(cname);
d.value()->setUniqueName(dname);
c.value()->setDebugName(cname);
d.value()->setDebugName(dname);
graph->registerOutput(d.value());
torch::jit::overrideCanFuseOnCPU(true);
FuseGraph(graph);
Expand Down
2 changes: 1 addition & 1 deletion test/cpp/jit/test_misc.h
Expand Up @@ -913,7 +913,7 @@ std::vector<std::string> values_to_value_ids(
const std::vector<Value*>& values) {
std::vector<std::string> result;
for (auto v : values) {
result.push_back(v->uniqueName());
result.push_back(v->debugName());
}
return result;
};
Expand Down
16 changes: 8 additions & 8 deletions test/cpp/jit/test_netdef_converter.h
Expand Up @@ -194,20 +194,20 @@ void testNetDefConverter() {
std::unordered_map<std::string, Value*> vmap;
convertNetDefToIR(net, &graph, &vmap);
AT_ASSERT(graph.inputs().size() == 3);
AT_ASSERT(graph.inputs()[0]->uniqueName() == "a");
AT_ASSERT(graph.inputs()[1]->uniqueName() == "b");
AT_ASSERT(graph.inputs()[2]->uniqueName() == "c");
AT_ASSERT(graph.inputs()[0]->debugName() == "a");
AT_ASSERT(graph.inputs()[1]->debugName() == "b");
AT_ASSERT(graph.inputs()[2]->debugName() == "c");

AT_ASSERT(graph.outputs().size() == 1);
AT_ASSERT(graph.outputs()[0]->uniqueName() == "x");
AT_ASSERT(graph.outputs()[0]->debugName() == "x");

Node* quux = graph.outputs()[0]->node();
Value* a0 = quux->inputs()[0];
Value* x0 = quux->inputs()[1];
Value* u = quux->inputs()[2];
AT_ASSERT(a0->uniqueName() != "a" && a0->uniqueNameBase() == "a");
AT_ASSERT(x0->uniqueName() != "x" && x0->uniqueNameBase() == "x");
AT_ASSERT(u->uniqueName() == "u");
AT_ASSERT(a0->debugName() != "a" && a0->debugNameBase() == "a");
AT_ASSERT(x0->debugName() != "x" && x0->debugNameBase() == "x");
AT_ASSERT(u->debugName() == "u");

// Convert back to netdef and check if the names are preserved.
// We still expect them to be in SSA form, but we should preserve names for
Expand All @@ -231,7 +231,7 @@ void testNetDefConverter() {
std::unordered_map<std::string, Value*> vmap;
convertNetDefToIR(net, &graph, &vmap, "caffe2::");
// Sanity check that value map is returned and it works.
AT_ASSERT(vmap["a"]->uniqueName() == "a");
AT_ASSERT(vmap["a"]->debugName() == "a");

caffe2::NetDef net2;
convertIRToNetDef(&net2, graph, "caffe2::");
Expand Down
2 changes: 1 addition & 1 deletion test/test_jit.py
Expand Up @@ -6512,7 +6512,7 @@ def contained_blocks(node):
return len(node.findAllNodes("prim::If")) * 2 + len(node.findAllNodes("prim::Loop"))
for node in ifs + loops:
outs = list(node.outputs())
out_name = list(map(lambda x: x.uniqueName(), outs))
out_name = list(map(lambda x: x.debugName(), outs))
if len(out_name) == 0:
continue
fc = FileCheck()
Expand Down
6 changes: 3 additions & 3 deletions torch/csrc/jit/export.cpp
Expand Up @@ -214,7 +214,7 @@ void EncoderBase::EncodeValueInfo(
onnx::ValueInfoProto* v,
const Value* n,
const std::unordered_map<std::string, std::unordered_map<int64_t, std::string>>& dynamic_axes) {
std::string name = n->uniqueName();
std::string name = n->debugName();
v->set_name(name);
if (CompleteTensorTypePtr node_type = n->type()->cast<CompleteTensorType>()) {
onnx::TypeProto* t = v->mutable_type();
Expand Down Expand Up @@ -285,11 +285,11 @@ void EncoderBase::EncodeBlock(
if (input->node()->mustBeNone() && !is_raw_export) {
p_n->add_input("");
} else {
p_n->add_input(input->uniqueName());
p_n->add_input(input->debugName());
}
}
for (auto output : node->outputs()) {
p_n->add_output(output->uniqueName());
p_n->add_output(output->debugName());
EncodeIntermediateValueInfo(graph_proto, output);
}
if (!node->kind().is_onnx()) {
Expand Down
4 changes: 2 additions & 2 deletions torch/csrc/jit/function.h
Expand Up @@ -104,8 +104,8 @@ struct TORCH_API Function : public std::enable_shared_from_this<Function> {
size_t num_inputs = function.num_inputs();
for (size_t i = 0; i < num_inputs; ++i) {
const Value* v = g.inputs().at(i);
std::string name = v->hasUniqueName() ? v->uniqueNameBase()
: ("argument_" + std::to_string(i));
std::string name = v->hasDebugName() ? v->debugNameBase()
: ("argument_" + std::to_string(i));
args.emplace_back(std::move(name), unshapedType(g.inputs()[i]->type()));
}
for (size_t i = 0; i < g.outputs().size(); ++i) {
Expand Down
20 changes: 10 additions & 10 deletions torch/csrc/jit/ir.cpp
Expand Up @@ -34,7 +34,7 @@ static constexpr topo_position_t kMidPoint = 0;
static constexpr topo_position_t kAppendInterval = 1099511627776ULL /* 2^40 */;

static void printValueRef(std::ostream& out, const Value* n) {
out << "%" << n->uniqueName();
out << "%" << n->debugName();
}

// NB: This overload will become ambiguous with the one Caffe2 provides in its
Expand Down Expand Up @@ -636,7 +636,7 @@ std::shared_ptr<Graph> Graph::copy() {
auto new_g = std::make_shared<Graph>();
auto env = [](Value* v) -> Value* {
AT_ERROR(
"Graph::copy() encountered a use of a value " + v->uniqueName() +
"Graph::copy() encountered a use of a value " + v->debugName() +
" not in scope. Run lint!");
};
new_g->block()->cloneFrom(this->block(), env);
Expand Down Expand Up @@ -688,8 +688,8 @@ bool Value::mustNotBeNone() const {
!type()->cast<OptionalType>();
}

std::string Value::uniqueNameBase() const {
std::string name = uniqueName();
std::string Value::debugNameBase() const {
std::string name = debugName();
std::string name_base = name;
auto last_dot_pos = name.find_last_of('.');
if (last_dot_pos != std::string::npos && last_dot_pos + 1 != name.size()) {
Expand All @@ -715,15 +715,15 @@ bool Value::isValidName(const std::string& name) {
return true;
}

Value* Value::setUniqueName(const std::string& name) {
Value* Value::setDebugName(const std::string& name) {
if (!isValidName(name)) {
throw std::runtime_error("Invalid name: '" + name + "'");
}

auto& names = node()->owningGraph()->unique_names_;

// clear any old name from the map
if (hasUniqueName()) {
if (hasDebugName()) {
names.erase(unique_name_);
unique_name_ = "";
}
Expand Down Expand Up @@ -752,7 +752,7 @@ Value* Value::setUniqueName(const std::string& name) {
ss << name_base << "." << suffix++;
replacement_name = ss.str();
} while (names.count(replacement_name) > 0);
old_owner_of_name->second->setUniqueName(replacement_name);
old_owner_of_name->second->setDebugName(replacement_name);
}

names[name] = this;
Expand All @@ -762,8 +762,8 @@ Value* Value::setUniqueName(const std::string& name) {

Value* Value::copyMetadata(Value* from) {
setType(from->type());
if (from->hasUniqueName()) {
setUniqueName(from->uniqueName());
if (from->hasDebugName()) {
setDebugName(from->debugName());
}
return this;
}
Expand Down Expand Up @@ -1530,7 +1530,7 @@ void Graph::freeNode(Node* n) {
all_nodes.erase(it);
}
void Graph::freeValue(Value* v) {
v->setUniqueName("");
v->setDebugName("");
auto it = all_values.find(v);
AT_ASSERT(it != all_values.end());
delete *it;
Expand Down
16 changes: 8 additions & 8 deletions torch/csrc/jit/ir.h
Expand Up @@ -175,18 +175,18 @@ struct Value {
size_t unique() const {
return unique_;
}
bool hasUniqueName() const {
bool hasDebugName() const {
return !unique_name_.empty();
}
static bool isValidName(const std::string& name);
TORCH_API Value* setUniqueName(const std::string& name);
std::string uniqueName() const {
if (hasUniqueName()) {
TORCH_API Value* setDebugName(const std::string& name);
std::string debugName() const {
if (hasDebugName()) {
return unique_name_;
}
return std::to_string(unique());
}
TORCH_API std::string uniqueNameBase() const;
TORCH_API std::string debugNameBase() const;
Node* node() {
return node_;
}
Expand Down Expand Up @@ -884,12 +884,12 @@ struct Block {

Value* addInput(std::string name = "") {
Value* v = input_->addOutput();
v->setUniqueName(std::move(name));
v->setDebugName(std::move(name));
return v;
}
Value* insertInput(size_t i, std::string name = "") {
Value* v = input_->insertOutput(i);
v->setUniqueName(std::move(name));
v->setDebugName(std::move(name));
return v;
}
void eraseInput(size_t i) {
Expand Down Expand Up @@ -1016,7 +1016,7 @@ struct Graph {
const Node* return_node() const {
return block_->return_node();
}
const std::unordered_map<std::string, Value*>& uniqueNames() const {
const std::unordered_map<std::string, Value*>& debugNames() const {
return unique_names_;
}

Expand Down
14 changes: 7 additions & 7 deletions torch/csrc/jit/netdef_converter.cpp
Expand Up @@ -130,22 +130,22 @@ void convertNetDefToIR(
AT_ASSERT(namesMap.count(v));
const std::string& name = namesMap.at(v);
if (Value::isValidName(name)) {
v->setUniqueName(name);
v->setDebugName(name);
}
}
}
for (Value* v : g->inputs()) {
AT_ASSERT(namesMap.count(v));
const std::string& name = namesMap.at(v);
if (Value::isValidName(name)) {
v->setUniqueName(name);
v->setDebugName(name);
}
}
for (Value* v : g->outputs()) {
AT_ASSERT(namesMap.count(v));
const std::string& name = namesMap.at(v);
if (Value::isValidName(name)) {
v->setUniqueName(name);
v->setDebugName(name);
}
}
}
Expand Down Expand Up @@ -210,10 +210,10 @@ static void convertNodeToCaffe2Op(const Node* node, caffe2::NetDef* net,
caffe2::OperatorDef op;
op.set_type(removePrefixIfNeeded(node->kind().toQualString(), prefix));
for (const Value* input : node->inputs()) {
op.add_input(input->uniqueName());
op.add_input(input->debugName());
}
for (const Value* output : node->outputs()) {
op.add_output(output->uniqueName());
op.add_output(output->debugName());
}
std::vector<Symbol> names = node->attributeNames();
for (const Symbol& name : names) {
Expand All @@ -228,15 +228,15 @@ void convertIRToNetDef(caffe2::NetDef* net, const Graph& g,
net->mutable_op()->Clear();

for (const Value* value : g.inputs()) {
net->add_external_input(value->uniqueName());
net->add_external_input(value->debugName());
}

for (const Node* node : g.nodes()) {
convertNodeToCaffe2Op(node, net, prefix);
}

for (const Value* value : g.outputs()) {
net->add_external_output(value->uniqueName());
net->add_external_output(value->debugName());
}
}

Expand Down
4 changes: 2 additions & 2 deletions torch/csrc/jit/passes/alias_analysis.cpp
Expand Up @@ -163,7 +163,7 @@ static std::string getElementName(const Element* e) {
if (e->value == nullptr) {
return "WILDCARD";
} else {
return e->value->uniqueName();
return e->value->debugName();
}
}

Expand Down Expand Up @@ -197,7 +197,7 @@ void AliasDb::dump() const {
// std::cout << *node;
// std::cout << " ";
// for (const auto value : values) {
// std::cout << value->uniqueName() << ", ";
// std::cout << value->debugName() << ", ";
// }
// std::cout << "\n";
// }
Expand Down
4 changes: 2 additions & 2 deletions torch/csrc/jit/passes/canonicalize.cpp
Expand Up @@ -19,14 +19,14 @@ std::shared_ptr<Graph> Canonicalize(
auto* r_input = r->addInput();
r_input->copyMetadata(input);
if (!keep_unique_names)
r_input->setUniqueName("");
r_input->setDebugName("");
rn_env[input] = r_input;
}
for (auto* node : graph->nodes()) {
auto* r_node = r->createClone(node, rn_fn);
if (!keep_unique_names) {
for (auto* output : r_node->outputs()) {
output->setUniqueName("");
output->setDebugName("");
}
}
r->appendNode(r_node);
Expand Down
4 changes: 2 additions & 2 deletions torch/csrc/jit/passes/inliner.cpp
Expand Up @@ -16,8 +16,8 @@ static void replace(
WithInsertPoint guard(to_replace);
auto new_output =
inlineCallTo(*to_replace->owningGraph(), *fn->graph(), inputs).at(0);
if (to_replace->output()->hasUniqueName()) {
new_output->setUniqueName(to_replace->output()->uniqueName());
if (to_replace->output()->hasDebugName()) {
new_output->setDebugName(to_replace->output()->debugName());
}
to_replace->output()->replaceAllUsesWith(new_output);
}
Expand Down
4 changes: 2 additions & 2 deletions torch/csrc/jit/passes/liveness.cpp
Expand Up @@ -27,7 +27,7 @@ struct LivenessAnalyzer {
std::cout << "Liveness info:\n";
for (auto e : liveness_sets) {
if (e.first->outputs().size() > 0) {
std::cout << e.first->outputs()[0]->uniqueName();
std::cout << e.first->outputs()[0]->debugName();
}

std::cout << " " << e.first->kind().toQualString();
Expand All @@ -48,7 +48,7 @@ struct LivenessAnalyzer {
} else {
std::cout << ", ";
}
std::cout << el->uniqueName() << "(" << el->unique() << ")";
std::cout << el->debugName() << "(" << el->unique() << ")";
}
std::cout << "]";
}
Expand Down
4 changes: 2 additions & 2 deletions torch/csrc/jit/passes/onnx/constant_fold.cpp
Expand Up @@ -38,7 +38,7 @@ ValueToParamPairMap buildValueToParamsMap(
const ParamMap& paramsDict) {
ValueToParamPairMap valsToParamsMap;
for (auto& input : b->inputs()) {
auto it = paramsDict.find(input->uniqueName());
auto it = paramsDict.find(input->debugName());
if (it != paramsDict.end()) {
valsToParamsMap.emplace(input, *it);
}
Expand Down Expand Up @@ -252,7 +252,7 @@ void ConstantFoldONNX(Block* b, ParamMap& paramsDict) {
auto newSourceNodeOutput = b->addInput();
valsToParamsMap.insert(
{newSourceNodeOutput,
std::make_pair(newSourceNodeOutput->uniqueName(), updatedVal)});
std::make_pair(newSourceNodeOutput->debugName(), updatedVal)});
newSourceNodeOutput->inferTypeFrom(updatedVal);
node->outputs().at(0)->replaceAllUsesWith(newSourceNodeOutput);

Expand Down

0 comments on commit f7b2778

Please sign in to comment.