Skip to content

Commit

Permalink
Implement explicit IR representation of memory alloction (#3560)
Browse files Browse the repository at this point in the history
  • Loading branch information
jroesch authored and tqchen committed Nov 1, 2019
1 parent 1916406 commit 2083513
Show file tree
Hide file tree
Showing 42 changed files with 1,645 additions and 322 deletions.
1 change: 1 addition & 0 deletions CMakeLists.txt
Expand Up @@ -272,6 +272,7 @@ add_library(tvm_runtime SHARED ${RUNTIME_SRCS})
if(USE_RELAY_DEBUG)
message(STATUS "Building Relay in debug mode...")
set_target_properties(tvm PROPERTIES COMPILE_DEFINITIONS "USE_RELAY_DEBUG")
set_target_properties(tvm PROPERTIES COMPILE_DEFINITIONS "DMLC_LOG_DEBUG")
else()
set_target_properties(tvm PROPERTIES COMPILE_DEFINITIONS "NDEBUG")
endif(USE_RELAY_DEBUG)
Expand Down
73 changes: 73 additions & 0 deletions include/tvm/relay/attrs/memory.h
@@ -0,0 +1,73 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

/*!
* \file tvm/relay/attrs/memory.h
* \brief Attributes for memory operators.
*/
#ifndef TVM_RELAY_ATTRS_MEMORY_H_
#define TVM_RELAY_ATTRS_MEMORY_H_

#include <tvm/attrs.h>
#include <tvm/relay/expr.h>
#include <string>

namespace tvm {
namespace relay {

/*!
* \brief Options for allocating tensors.
*/
struct AllocTensorAttrs : public tvm::AttrsNode<AllocTensorAttrs> {
Constant const_shape;
Array<IndexExpr> assert_shape;
DataType dtype;

TVM_DECLARE_ATTRS(AllocTensorAttrs, "relay.attrs.AllocTensorAttrs") {
TVM_ATTR_FIELD(dtype)
.describe(
"The dtype of the tensor to allocate.")
.set_default(Float(32, 1));
TVM_ATTR_FIELD(const_shape)
.describe(
"The shape of constant used to aid in type inference.");
TVM_ATTR_FIELD(assert_shape)
.describe(
"The shape to cast the return type of the allocation to, "\
"used to specify the shape obtained via further analysis.");
}
};

/*!
* \brief Options for the shape function operator.
*/
struct ShapeFuncAttrs : public tvm::AttrsNode<ShapeFuncAttrs> {
Array<Integer> is_input;

TVM_DECLARE_ATTRS(ShapeFuncAttrs, "relay.attrs.ShapeFuncAttrs") {
TVM_ATTR_FIELD(is_input)
.describe(
"A bool indicating whether the shape function should"\
"expect shape or input in each position.");
}
};

} // namespace relay
} // namespace tvm
#endif // TVM_RELAY_ATTRS_MEMORY_H_
6 changes: 6 additions & 0 deletions include/tvm/relay/base.h
Expand Up @@ -47,6 +47,12 @@ namespace relay {
(*fdebug)("RELAY_DEBUG", __FILE__, __LINE__, __VA_ARGS__); \
}

#define RELAY_DEBUG_INTERP(...) \
{ auto fdebug = runtime::Registry::Get("relay.debug_interp"); \
CHECK(fdebug) << "Could not find Relay Python debugger function."; \
(*fdebug)("RELAY_DEBUG", __FILE__, __LINE__, __VA_ARGS__); \
}

/*!
* \brief We always used NodeRef for referencing nodes.
*
Expand Down
8 changes: 7 additions & 1 deletion include/tvm/relay/module.h
Expand Up @@ -76,7 +76,8 @@ class ModuleNode : public RelayNode {
}

TVM_DLL static Module make(tvm::Map<GlobalVar, Function> global_funcs,
tvm::Map<GlobalTypeVar, TypeData> global_type_defs);
tvm::Map<GlobalTypeVar, TypeData> global_type_defs,
std::unordered_set<std::string> imports = {});

/*!
* \brief Add a function to the global environment.
Expand Down Expand Up @@ -235,6 +236,11 @@ class ModuleNode : public RelayNode {
*/
TVM_DLL void ImportFromStd(const std::string& path);

/*!
* \brief The set of imported files.
*/
TVM_DLL std::unordered_set<std::string> Imports() const;

/*! \brief Construct a module from a standalone expression.
*
* Allows one to optionally pass a global function map and
Expand Down
12 changes: 12 additions & 0 deletions include/tvm/runtime/object.h
Expand Up @@ -283,6 +283,8 @@ class Object {
* \note The deleter will be called when ref_counter_ becomes zero.
*/
inline void DecRef();

private:
/*!
* \return The usage count of the cell.
* \note We use stl style naming to be consistent with known API in shared_ptr.
Expand Down Expand Up @@ -675,6 +677,16 @@ struct ObjectEqual {
operator bool() const { return data_ != nullptr; } \
using ContainerType = ObjectName;

#define TVM_DEFINE_OBJECT_REF_METHODS_MUT(TypeName, ParentType, ObjectName) \
TypeName() {} \
explicit TypeName( \
::tvm::runtime::ObjectPtr<::tvm::runtime::Object> n) \
: ParentType(n) {} \
ObjectName* operator->() { \
return static_cast<ObjectName*>(data_.get()); \
} \
operator bool() const { return data_ != nullptr; } \
using ContainerType = ObjectName;

// Implementations details below
// Object reference counting.
Expand Down
33 changes: 30 additions & 3 deletions include/tvm/runtime/vm.h
Expand Up @@ -138,6 +138,7 @@ enum class Opcode {
GetTag = 13U,
LoadConsti = 14U,
Fatal = 15U,
AllocStorage = 16U,
};

/*! \brief A single virtual machine instruction.
Expand All @@ -158,6 +159,8 @@ struct Instruction {

union {
struct /* AllocTensor Operands */ {
/*! \brief The storage to allocate from. */
RegName storage;
/*! \brief The number of dimensions. */
uint32_t ndim;
/*! \brief The shape of tensor. */
Expand All @@ -166,6 +169,8 @@ struct Instruction {
DLDataType dtype;
} alloc_tensor;
struct /* AllocTensorReg Operands */ {
/*! \brief The storage to allocate from. */
RegName storage;
/*! \brief The register to read the shape out of. */
RegName shape_register;
/*! \brief The datatype of tensor to be allocated. */
Expand Down Expand Up @@ -253,6 +258,14 @@ struct Instruction {
/*! \brief The free variables as an array. */
RegName* free_vars;
};
struct /* AllocStorage Operands */ {
/*! \brief The size of the allocation. */
RegName allocation_size;
/*! \brief The alignment of the allocation. */
RegName alignment;
/*! \brief The hint of the dtype. */
DLDataType dtype_hint;
} alloc_storage;
};

/*! \brief Construct a return instruction.
Expand All @@ -274,19 +287,23 @@ struct Instruction {
static Instruction InvokePacked(Index packed_index, Index arity, Index output_size,
const std::vector<RegName>& args);
/*! \brief Construct an allocate tensor instruction with constant shape.
* \param storage The storage to allocate out of.
* \param shape The shape of the tensor.
* \param dtype The dtype of the tensor.
* \param dst The destination register.
* \return The allocate tensor instruction.
*/
static Instruction AllocTensor(std::vector<int64_t> shape, DLDataType dtype, RegName dst);
static Instruction AllocTensor(RegName storage,
const std::vector<int64_t>& shape, DLDataType dtype, RegName dst);
/*! \brief Construct an allocate tensor instruction with register.
* \param storage The storage to allocate out of.
* \param shape_register The register containing the shape.
* \param dtype The dtype of the tensor.
* \param dst The destination register.
* \return The allocate tensor instruction.
*/
static Instruction AllocTensorReg(RegName shape_register, DLDataType dtype, RegName dst);
static Instruction AllocTensorReg(RegName storage,
RegName shape_register, DLDataType dtype, RegName dst);
/*! \brief Construct an allocate datatype instruction.
* \param tag The datatype tag.
* \param num_fields The number of fields for the datatype.
Expand All @@ -295,7 +312,7 @@ struct Instruction {
* \return The allocate instruction tensor.
*/
static Instruction AllocADT(Index tag, Index num_fields, const std::vector<RegName>& fields,
RegName dst);
RegName dst);
/*! \brief Construct an allocate closure instruction.
* \param func_index The index of the function table.
* \param num_freevar The number of free variables.
Expand Down Expand Up @@ -364,6 +381,16 @@ struct Instruction {
*/
static Instruction Move(RegName src, RegName dst);

/*! \brief Allocate a storage block.
* \param size The size of the allocation.
* \param alignment The allocation's alignment.
* \param dtype_hint The data type hint for the allocator.
* \param dst The destination to place the storage.
* \return The alloc storage instruction.
*/
static Instruction AllocStorage(RegName size, RegName alignment,
DLDataType dtype_hint, RegName dst);

Instruction();
Instruction(const Instruction& instr);
Instruction& operator=(const Instruction& instr);
Expand Down
2 changes: 2 additions & 0 deletions python/tvm/relay/__init__.py
Expand Up @@ -59,6 +59,8 @@
from . import qnn

from .scope_builder import ScopeBuilder
# Load Memory pass
from . import memory_alloc

# Required to traverse large programs
setrecursionlimit(10000)
Expand Down
4 changes: 4 additions & 0 deletions python/tvm/relay/backend/compile_engine.py
Expand Up @@ -99,6 +99,10 @@ def lower(self, source_func, target=None):
msg += "--------------------------\n"
raise RuntimeError(msg)

def lower_shape_func(self, source_func, target=None):
key = _get_cache_key(source_func, target)
return _backend._CompileEngineLowerShapeFunc(self, key)

def jit(self, source_func, target=None):
"""JIT a source_func to a tvm.Function.
Expand Down
7 changes: 6 additions & 1 deletion python/tvm/relay/debug.py
Expand Up @@ -25,9 +25,14 @@ def _debugger_init(expr, stack):
import pdb
pdb.set_trace()

# pylint: disable=unused-argument
@register_func("relay.debug")
def _debug(*args):
import pdb
pdb.set_trace()

# pylint: disable=unused-argument
@register_func("relay.debug_interp")
def _debug_interp(*args):
_, _, _, ist = args
print("Relay Debugger")
print(" You can manipulate the expression under evaluation with the name `expr`.")
Expand Down
3 changes: 3 additions & 0 deletions python/tvm/relay/expr.py
Expand Up @@ -317,6 +317,9 @@ def set_params(self, params):

return _expr.FunctionSetParams(self, params)

def set_attribute(self, name, ref):
return _expr.FunctionSetAttr(self, name, ref)


@register_relay_node
class Call(Expr):
Expand Down

0 comments on commit 2083513

Please sign in to comment.