Relay->Relax translator (ResNet example) #75
Changes from 7 commits
1151660
598782c
06f5f40
274fa28
9554410
c34048e
99a07c1
ffd2e06
b95a7e5
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,53 @@ | ||
# Licensed to the Apache Software Foundation (ASF) under one | ||
# or more contributor license agreements. See the NOTICE file | ||
# distributed with this work for additional information | ||
# regarding copyright ownership. The ASF licenses this file | ||
# to you under the Apache License, Version 2.0 (the | ||
# "License"); you may not use this file except in compliance | ||
# with the License. You may obtain a copy of the License at | ||
# | ||
# http://www.apache.org/licenses/LICENSE-2.0 | ||
# | ||
# Unless required by applicable law or agreed to in writing, | ||
# software distributed under the License is distributed on an | ||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY | ||
# KIND, either express or implied. See the License for the | ||
# specific language governing permissions and limitations | ||
# under the License. | ||
"""Example ResNet workload by translating the Relay program to Relax""" | ||
|
||
import tvm | ||
import tvm.testing | ||
from tvm.relay import testing | ||
from tvm import relax, relay | ||
from tvm.relax.testing import relay_translator, nn | ||
from tvm.runtime import vm as vm_rt | ||
from tvm.script import relax as R | ||
import numpy as np | ||
|
||
if __name__ == "__main__": | ||
relay_mod, _ = testing.resnet.get_workload(num_layers=50, batch_size=1, dtype="float32") | ||
|
||
# translate the ResNet model from Relay to Relax | ||
relax_mod = relay_translator.from_relay(relay_mod["main"]) | ||
|
||
# print the ResNet IRmodule got translated | ||
print(R.parser.astext(relax_mod)) | ||
|
||
# build the IRModule and create relax vm | ||
target = tvm.target.Target("llvm", host="llvm") | ||
ex, lib = relax.vm.build(relax_mod, target) | ||
vm = relax.VirtualMachine(ex, tvm.cpu(), mod=lib) | ||
|
||
# init weights and run the model on relax vm | ||
shape = (1, 3, 224, 224) | ||
data = tvm.nd.array(np.random.rand(*shape).astype(np.float32)) | ||
params = nn.init_params(relax_mod) | ||
res = vm["main"](data, *params) | ||
YuchenJin marked this conversation as resolved.
Show resolved
Hide resolved
|
||
|
||
# check correctness by comparing with relay result | ||
exe = relay.vm.compile(relay_mod, target) | ||
relay_vm = vm_rt.VirtualMachine(exe, tvm.cpu()) | ||
inputs = [data] + params | ||
expected_output = relay_vm.run(*inputs) | ||
tvm.testing.assert_allclose(res.numpy(), expected_output.numpy(), rtol=1e-4, atol=1e-4) |
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -22,6 +22,7 @@ | |
#include <tvm/ir/expr.h> | ||
#include <tvm/ir/span.h> | ||
#include <tvm/node/node.h> | ||
#include <tvm/relax/type.h> | ||
#include <tvm/relay/expr.h> | ||
#include <tvm/runtime/container/array.h> | ||
#include <tvm/runtime/container/map.h> | ||
|
@@ -35,8 +36,6 @@ using Expr = RelayExpr; | |
using ExprNode = RelayExprNode; | ||
using relay::Call; | ||
using relay::CallNode; | ||
using relay::Constant; | ||
using relay::ConstantNode; | ||
using relay::Id; | ||
using relay::If; | ||
using relay::IfNode; | ||
|
@@ -83,6 +82,57 @@ class ShapeExpr : public Expr { | |
TVM_DEFINE_OBJECT_REF_COW_METHOD(ShapeExprNode); | ||
}; | ||
|
||
/*! | ||
* \brief Constant tensor, backed by an NDArray on the cpu(0) device. | ||
* | ||
* \note Scalar constants are represented by rank-0 const tensor. | ||
* Constant folding are handled uniformly via Tensor types. | ||
*/ | ||
class Constant; | ||
/*! | ||
* \brief Constant tensor type. | ||
*/ | ||
class ConstantNode : public ExprNode { | ||
public: | ||
/*! \brief The data of the tensor */ | ||
runtime::NDArray data; | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Can we use relay::ConstantNode? There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. The reason I added relax::ConstantNode was that when we meet a There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Can we change the original constructor to make shape as an optional field? There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Now I reuse the relay::ConstantNode and fill its shape and checked_type during construction. |
||
|
||
void VisitAttrs(tvm::AttrVisitor* v) { | ||
v->Visit("data", &data); | ||
v->Visit("span", &span); | ||
v->Visit("checked_type_", &checked_type_); | ||
v->Visit("shape_", &shape_); | ||
} | ||
|
||
bool SEqualReduce(const ConstantNode* other, SEqualReducer equal) const { | ||
return equal(data, other->data); | ||
} | ||
|
||
void SHashReduce(SHashReducer hash_reduce) const { | ||
hash_reduce(checked_type_); | ||
hash_reduce(data); | ||
hash_reduce(shape_); | ||
} | ||
|
||
static constexpr const char* _type_key = "relax.expr.Constant"; | ||
YuchenJin marked this conversation as resolved.
Show resolved
Hide resolved
|
||
static constexpr const bool _type_has_method_sequal_reduce = true; | ||
static constexpr const bool _type_has_method_shash_reduce = true; | ||
TVM_DECLARE_FINAL_OBJECT_INFO(ConstantNode, ExprNode); | ||
}; | ||
|
||
class Constant : public Expr { | ||
public: | ||
/*! | ||
* \brief The constructor | ||
* \param data The data of the constant tensor. | ||
* \param span The source span of the expression. | ||
*/ | ||
TVM_DLL explicit Constant(const runtime::NDArray& data, Span span = Span()); | ||
|
||
TVM_DEFINE_OBJECT_REF_METHODS(Constant, Expr, ConstantNode); | ||
TVM_DEFINE_OBJECT_REF_COW_METHOD(ConstantNode); | ||
}; | ||
|
||
/*! \brief The variable class for all Relax bindings. */ | ||
class VarNode : public ExprNode { | ||
public: | ||
|
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
might not need this print? and how about moving this file to tests/python/relax/
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
I'd prefer to put it into
apps/relax_examples
because it's a demo to translate the Relay ResNet workload to Relax. And I put the translator inpython/tvm/relax/testing/
because the relay->relax translator is a current workaround to allow us to construct real workloads quickly. We will need pytorch/tf/onnx importers once we have high-level Ops on Relax.