Skip to content
This repository has been archived by the owner on May 22, 2023. It is now read-only.

Relay->Relax translator (ResNet example) #75

Merged
merged 9 commits into from Feb 14, 2022
Merged
Show file tree
Hide file tree
Changes from 7 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
53 changes: 53 additions & 0 deletions apps/relax_examples/resnet.py
@@ -0,0 +1,53 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Example ResNet workload by translating the Relay program to Relax"""

import tvm
import tvm.testing
from tvm.relay import testing
from tvm import relax, relay
from tvm.relax.testing import relay_translator, nn
from tvm.runtime import vm as vm_rt
from tvm.script import relax as R
import numpy as np

if __name__ == "__main__":
relay_mod, _ = testing.resnet.get_workload(num_layers=50, batch_size=1, dtype="float32")

# translate the ResNet model from Relay to Relax
relax_mod = relay_translator.from_relay(relay_mod["main"])

# print the ResNet IRmodule got translated
print(R.parser.astext(relax_mod))
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

might not need this print? and how about moving this file to tests/python/relax/

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I'd prefer to put it into apps/relax_examples because it's a demo to translate the Relay ResNet workload to Relax. And I put the translator in python/tvm/relax/testing/ because the relay->relax translator is a current workaround to allow us to construct real workloads quickly. We will need pytorch/tf/onnx importers once we have high-level Ops on Relax.


# build the IRModule and create relax vm
target = tvm.target.Target("llvm", host="llvm")
ex, lib = relax.vm.build(relax_mod, target)
vm = relax.VirtualMachine(ex, tvm.cpu(), mod=lib)

# init weights and run the model on relax vm
shape = (1, 3, 224, 224)
data = tvm.nd.array(np.random.rand(*shape).astype(np.float32))
params = nn.init_params(relax_mod)
res = vm["main"](data, *params)
YuchenJin marked this conversation as resolved.
Show resolved Hide resolved

# check correctness by comparing with relay result
exe = relay.vm.compile(relay_mod, target)
relay_vm = vm_rt.VirtualMachine(exe, tvm.cpu())
inputs = [data] + params
expected_output = relay_vm.run(*inputs)
tvm.testing.assert_allclose(res.numpy(), expected_output.numpy(), rtol=1e-4, atol=1e-4)
54 changes: 52 additions & 2 deletions include/tvm/relax/expr.h
Expand Up @@ -22,6 +22,7 @@
#include <tvm/ir/expr.h>
#include <tvm/ir/span.h>
#include <tvm/node/node.h>
#include <tvm/relax/type.h>
#include <tvm/relay/expr.h>
#include <tvm/runtime/container/array.h>
#include <tvm/runtime/container/map.h>
Expand All @@ -35,8 +36,6 @@ using Expr = RelayExpr;
using ExprNode = RelayExprNode;
using relay::Call;
using relay::CallNode;
using relay::Constant;
using relay::ConstantNode;
using relay::Id;
using relay::If;
using relay::IfNode;
Expand Down Expand Up @@ -83,6 +82,57 @@ class ShapeExpr : public Expr {
TVM_DEFINE_OBJECT_REF_COW_METHOD(ShapeExprNode);
};

/*!
* \brief Constant tensor, backed by an NDArray on the cpu(0) device.
*
* \note Scalar constants are represented by rank-0 const tensor.
* Constant folding are handled uniformly via Tensor types.
*/
class Constant;
/*!
* \brief Constant tensor type.
*/
class ConstantNode : public ExprNode {
public:
/*! \brief The data of the tensor */
runtime::NDArray data;
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Can we use relay::ConstantNode?

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The reason I added relax::ConstantNode was that when we meet a ConstantNode in relay in the translation, we need to fill its shape_ field so that it can be converted to a te tensor. The constructor of relax::ConstantNode fills the shape_ field here: https://github.com/tlc-pack/relax/blob/relax-resnet/src/relax/ir/expr.cc#L61.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Can we change the original constructor to make shape as an optional field?

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Now I reuse the relay::ConstantNode and fill its shape and checked_type during construction.


void VisitAttrs(tvm::AttrVisitor* v) {
v->Visit("data", &data);
v->Visit("span", &span);
v->Visit("checked_type_", &checked_type_);
v->Visit("shape_", &shape_);
}

bool SEqualReduce(const ConstantNode* other, SEqualReducer equal) const {
return equal(data, other->data);
}

void SHashReduce(SHashReducer hash_reduce) const {
hash_reduce(checked_type_);
hash_reduce(data);
hash_reduce(shape_);
}

static constexpr const char* _type_key = "relax.expr.Constant";
YuchenJin marked this conversation as resolved.
Show resolved Hide resolved
static constexpr const bool _type_has_method_sequal_reduce = true;
static constexpr const bool _type_has_method_shash_reduce = true;
TVM_DECLARE_FINAL_OBJECT_INFO(ConstantNode, ExprNode);
};

class Constant : public Expr {
public:
/*!
* \brief The constructor
* \param data The data of the constant tensor.
* \param span The source span of the expression.
*/
TVM_DLL explicit Constant(const runtime::NDArray& data, Span span = Span());

TVM_DEFINE_OBJECT_REF_METHODS(Constant, Expr, ConstantNode);
TVM_DEFINE_OBJECT_REF_COW_METHOD(ConstantNode);
};

/*! \brief The variable class for all Relax bindings. */
class VarNode : public ExprNode {
public:
Expand Down
2 changes: 2 additions & 0 deletions include/tvm/relax/ir_functor.h
Expand Up @@ -77,6 +77,7 @@ class IRFunctor<R(const ObjectRef& n, Args...)> {
virtual R VisitNode_(const relay::TupleGetItemNode* op, Args... args) IR_FUNCTOR_DEFAULT;

// IR nodes introduced by Relax
virtual R VisitNode_(const relax::ConstantNode* op, Args... args) IR_FUNCTOR_DEFAULT;
virtual R VisitNode_(const relax::VarNode* op, Args... args) IR_FUNCTOR_DEFAULT;
virtual R VisitNode_(const relax::DataflowVarNode* op, Args... args) IR_FUNCTOR_DEFAULT;
virtual R VisitNode_(const relax::ShapeExprNode* op, Args... args) IR_FUNCTOR_DEFAULT;
Expand All @@ -103,6 +104,7 @@ class IRFunctor<R(const ObjectRef& n, Args...)> {
RELAX_IR_FUNCTOR_DISPATCH(relay::IfNode);
RELAX_IR_FUNCTOR_DISPATCH(OpNode);
RELAX_IR_FUNCTOR_DISPATCH(relay::TupleGetItemNode);
RELAX_IR_FUNCTOR_DISPATCH(relax::ConstantNode);
RELAX_IR_FUNCTOR_DISPATCH(relax::VarNode);
RELAX_IR_FUNCTOR_DISPATCH(relax::DataflowVarNode);
RELAX_IR_FUNCTOR_DISPATCH(relax::ShapeExprNode);
Expand Down