Skip to content

Commit

Permalink
[Fixed Issue apache#2]: Added the storage tagging to 'Resource' class…
Browse files Browse the repository at this point in the history
… and changed to storage tagging to rvalue reference.
  • Loading branch information
ArmageddonKnight committed Dec 6, 2019
1 parent cb14c26 commit e5a00b0
Show file tree
Hide file tree
Showing 3 changed files with 57 additions and 42 deletions.
42 changes: 26 additions & 16 deletions include/mxnet/resource.h
Original file line number Diff line number Diff line change
Expand Up @@ -120,16 +120,19 @@ struct Resource {
* when running on device, so the launched kernels that depend on the temp space
* can finish correctly.
*
* \param shape the Shape of returning tensor.
* \param stream the stream of retruning tensor.
* \param shape the Shape of returning tensor.
* \param stream the Stream of returning tensor.
* \param attr_name the Name of the operator requesting the resource.
* \return the mshadow tensor requested.
* \tparam xpu the device type of random number generator.
* \tparam ndim the number of dimension of the tensor requested.
*/
template<typename xpu, int ndim>
inline mshadow::Tensor<xpu, ndim, real_t> get_space(
mshadow::Shape<ndim> shape, mshadow::Stream<xpu> *stream) const {
return get_space_typed<xpu, ndim, real_t>(shape, stream);
mshadow::Shape<ndim> shape, mshadow::Stream<xpu> *stream,
std::string &&attr_name=MXNET_STORAGE_HANDLE_DEFAULT_ATTR_NAME(
"temp_space")) const {
return get_space_typed<xpu, ndim, real_t>(shape, stream, attr_name);
}
/*!
* \brief Get cpu space requested as mshadow Tensor.
Expand All @@ -148,33 +151,39 @@ struct Resource {
* \brief Get space requested as mshadow Tensor in specified type.
* The caller can request arbitrary size.
*
* \param shape the Shape of returning tensor.
* \param stream the stream of retruning tensor.
* \param shape the Shape of returning tensor.
* \param stream the Stream of returning tensor.
* \parma attr_name the Name of the operator requesting the resource.
* \return the mshadow tensor requested.
* \tparam xpu the device type of random number generator.
* \tparam ndim the number of dimension of the tensor requested.
*/
template<typename xpu, int ndim, typename DType>
inline mshadow::Tensor<xpu, ndim, DType> get_space_typed(
mshadow::Shape<ndim> shape, mshadow::Stream<xpu> *stream) const {
mshadow::Shape<ndim> shape, mshadow::Stream<xpu> *stream,
std::string &&attr_name=MXNET_STORAGE_HANDLE_DEFAULT_ATTR_NAMER(
"temp_space")) const {
CHECK_EQ(req.type, ResourceRequest::kTempSpace);
return mshadow::Tensor<xpu, ndim, DType>(
reinterpret_cast<DType*>(get_space_internal(shape.Size() * sizeof(DType))),
reinterpret_cast<DType*>(get_space_internal(
shape.Size() * sizeof(DType), attr_name)),
shape, shape[ndim - 1], stream);
}
#if MXNET_USE_CUDNN == 1
/*!
* \brief Get cudnn dropout descriptor from shared state space.
* \brief Get cuDNN dropout descriptor from shared state space.
*
* \param dropout_desc reference to previously created cudnn dropout descriptor.
* \param stream the stream of retruning tensor.
* \param dropout_desc Reference to previously created cuDNN dropout descriptor.
* \param stream the Stream of returning tensor.
* \param attr_name the Name of the operator requesting the resource.
* \return the mshadow tensor requested.
*/
void get_cudnn_dropout_desc(
cudnnDropoutDescriptor_t* dropout_desc,
cudnnDropoutDescriptor_t *dropout_desc,
mshadow::Stream<gpu> *stream,
const float dropout,
uint64_t seed) const;
const float dropout, uint64_t seed,
std::string &&attr_name=MXNET_STORAGE_HANDLE_DEFAULT_ATTR_NAME(
"cudnn_dropout_state")) const;
#endif // MXNET_USE_CUDNN == 1

/*!
Expand All @@ -195,10 +204,11 @@ struct Resource {
}
/*!
* \brief internal function to get space from resources.
* \param size The size of the space.
* \param size the Size of the space.
* \param attr_name the Name of the operator requesting the resource.
* \return The allocated space.
*/
void* get_space_internal(size_t size) const;
void* get_space_internal(size_t size, std::string &&attr_name) const;
/*!
* \brief internal function to get cpu space from resources.
* \param size The size of space.
Expand Down
28 changes: 14 additions & 14 deletions include/mxnet/storage.h
Original file line number Diff line number Diff line change
Expand Up @@ -44,14 +44,14 @@ inline std::string __extract_fname(const std::string& path) {

#if __GNUG__ // if compiled with GCC
#define MXNET_STORAGE_HANDLE_DEFAULT_ATTR_NAME(tag) \
+ ":" + __extract_fname(__FILE__) \
+ " +" + std::to_string(__LINE__) \
+ ":" + __extract_fname(__builtin_FILE()) \
+ " +" + std::to_string(__builtin_LINE())
+ "_" + __extract_fname(__FILE__) \
+ "+" + std::to_string(__LINE__) \
+ "_" + __extract_fname(__builtin_FILE()) \
+ "+" + std::to_string(__builtin_LINE())
#else // !__GNUG__
#define MXNET_STORAGE_HANDLE_DEFAULT_ATTR_NAME(tag) \
+ ":" + __extract_fname(__FILE__) \
+ " +" + std::to_string(__LINE__)
+ "_" + __extract_fname(__FILE__) \
+ "+" + std::to_string(__LINE__)
#endif // __GNUG__

/*!
Expand Down Expand Up @@ -91,12 +91,12 @@ class Storage {
* It is also used for tracking storage allocations.
*/
enum class DataStruct {
kDataEntry, ///<- Data Entries (!Important)
kTempSpace, ///<- Temporary Workspace
kParameters, ///<- Weight Parameters
kParameterGrads, ///<- Weight Parameter Gradients
kOptimizerStates, ///<- Optimizer States (e.g., Adam Mean & Var)
kAuxStates, ///<- Auxiliary States
kDataEntry, ///< Data Entries (!Important)
kTempSpace, ///< Temporary Workspace
kParameters, ///< Weight Parameters
kParameterGrads, ///< Weight Parameter Gradients
kOptimizerStates, ///< Optimizer States (e.g., Adam Mean & Var)
kAuxStates, ///< Auxiliary States
kUnknown} data_struct;
};
/*!
Expand All @@ -109,9 +109,9 @@ class Storage {
* \return Handle struct.
*/
Handle Alloc(size_t size, Context ctx,
const std::string& attr_name=
std::string&& attr_name=
MXNET_STORAGE_HANDLE_DEFAULT_ATTR_NAME("unknown"),
const std::string& attr_scope="unknown",
std::string&& attr_scope="unknown",
const Handle::DataStruct& data_struct=
Handle::DataStruct::kUnknown) {
Handle hd;
Expand Down
29 changes: 17 additions & 12 deletions src/resource.cc
Original file line number Diff line number Diff line change
Expand Up @@ -65,11 +65,13 @@ struct SpaceAllocator {
host_handle.size = 0;
}

inline void* GetSpace(size_t size) {
inline void* GetSpace(size_t size, std::string &&attr_name) {
if (handle.size >= size) return handle.dptr;

Storage::Get()->DirectFree(handle);
handle = Storage::Get()->Alloc(size, ctx);
handle = Storage::Get()->Alloc(size, ctx,
attr_name, "unknown",
Storage::Handle::DataStruct::kTempSpace);
return handle.dptr;
}

Expand Down Expand Up @@ -410,8 +412,9 @@ class ResourceManagerImpl : public ResourceManager {
};
} // namespace resource

void* Resource::get_space_internal(size_t size) const {
return static_cast<resource::SpaceAllocator*>(ptr_)->GetSpace(size);
void* Resource::get_space_internal(size_t size,
std::string &&attr_name) const {
return static_cast<resource::SpaceAllocator*>(ptr_)->GetSpace(size, attr_name);
}

void* Resource::get_host_space_internal(size_t size) const {
Expand All @@ -420,27 +423,29 @@ void* Resource::get_host_space_internal(size_t size) const {

#if MXNET_USE_CUDNN == 1
void Resource::get_cudnn_dropout_desc(
cudnnDropoutDescriptor_t* dropout_desc,
cudnnDropoutDescriptor_t *dropout_desc,
mshadow::Stream<gpu> *stream,
const float dropout,
uint64_t seed) const {
const float dropout, uint64_t seed,
std::string &&attr_name) const {

CHECK_EQ(req.type, ResourceRequest::kCuDNNDropoutDesc);
auto state_space = static_cast<resource::SpaceAllocator*>(ptr_);
CHECK_EQ(state_space->ctx.dev_id, stream->dev_id)
<< "The device id of cudnn dropout state space doesn't match that from stream.";
<< "The device id of cuDNN dropout state space doesn't match that from stream.";
if (!state_space->handle.size) {
// not initialized yet.
size_t dropout_state_size;
CUDNN_CALL(cudnnDropoutGetStatesSize(stream->dnn_handle_, &dropout_state_size));
// reserve GPU space
Storage::Get()->DirectFree(
Storage::Get()->Alloc(dropout_state_size, state_space->ctx));
Storage::Get()->Alloc(
dropout_state_size,
state_space->ctx, "skip me"));
CUDNN_CALL(cudnnSetDropoutDescriptor(*dropout_desc, stream->dnn_handle_,
dropout,
state_space->GetSpace(dropout_state_size),
dropout_state_size,
seed));
state_space->GetSpace(dropout_state_size,
attr_name),
dropout_state_size, seed));
} else {
// cudnnRestoreDropoutDescriptor() introduced with cuDNN v7
STATIC_ASSERT_CUDNN_VERSION_GE(7000);
Expand Down

0 comments on commit e5a00b0

Please sign in to comment.