Skip to content

Commit

Permalink
Merge pull request #2230 from antinucleon/master
Browse files Browse the repository at this point in the history
[Resource] Allow to get temp host memory in any context
  • Loading branch information
antinucleon committed May 24, 2016
2 parents 6523a7a + 3a99dff commit 37c2b28
Show file tree
Hide file tree
Showing 3 changed files with 59 additions and 1 deletion.
2 changes: 1 addition & 1 deletion dmlc-core
35 changes: 35 additions & 0 deletions include/mxnet/resource.h
Original file line number Diff line number Diff line change
Expand Up @@ -85,6 +85,19 @@ struct Resource {
mshadow::Shape<ndim> shape, mshadow::Stream<xpu> *stream) const {
return get_space_typed<xpu, ndim, real_t>(shape, stream);
}
/*!
* \brief Get cpu space requested as mshadow Tensor.
* The caller can request arbitrary size.
*
* \param shape the Shape of returning tensor.
* \return the mshadow tensor requested.
* \tparam ndim the number of dimension of the tensor requested.
*/
template<int ndim>
inline mshadow::Tensor<cpu, ndim, real_t> get_host_space(
mshadow::Shape<ndim> shape) const {
return get_host_space_typed<cpu, ndim, real_t>(shape);
}
/*!
* \brief Get space requested as mshadow Tensor in specified type.
* The caller can request arbitrary size.
Expand All @@ -103,12 +116,34 @@ struct Resource {
reinterpret_cast<DType*>(get_space_internal(shape.Size() * sizeof(DType))),
shape, shape[ndim - 1], stream);
}
/*!
* \brief Get CPU space as mshadow Tensor in specified type.
* The caller can request arbitrary size.
*
* \param shape the Shape of returning tensor
* \return the mshadow tensor requested
* \tparam ndim the number of dimnesion of tensor requested
* \tparam DType request data type
*/
template<int ndim, typename DType>
inline mshadow::Tensor<cpu, ndim, DType> get_host_space_typed(
mshadow::Shape<ndim> shape) const {
return mshadow::Tensor<cpu, ndim, DType>(
reinterpret_cast<DType*>(get_host_space_internal(shape.Size() * sizeof(DType))),
shape, shape[ndim - 1], NULL);
}
/*!
* \brief internal function to get space from resources.
* \param size The size of the space.
* \return The allocated space.
*/
void* get_space_internal(size_t size) const;
/*!
* \brief internal function to get cpu space from resources.
* \param size The size of space.
* \return The allocated space
*/
void *get_host_space_internal(size_t size) const;
};

/*! \brief Global resource manager */
Expand Down
23 changes: 23 additions & 0 deletions src/resource.cc
Original file line number Diff line number Diff line change
Expand Up @@ -22,10 +22,14 @@ struct SpaceAllocator {
Context ctx;
// internal handle
Storage::Handle handle;
// internal CPU handle
Storage::Handle host_handle;

SpaceAllocator() {
handle.dptr = nullptr;
handle.size = 0;
host_handle.dptr = nullptr;
host_handle.size = 0;
}

inline void Release() {
Expand All @@ -35,12 +39,26 @@ struct SpaceAllocator {
}
}

inline void ReleaseHost() {
if (host_handle.size != 0) {
Storage::Get()->Free(host_handle);
host_handle.size = 0;
}
}

inline void* GetSpace(size_t size) {
if (handle.size >= size) return handle.dptr;
this->Release();
handle = Storage::Get()->Alloc(size, ctx);
return handle.dptr;
}

inline void* GetHostSpace(size_t size) {
if (host_handle.size >= size) return host_handle.dptr;
this->ReleaseHost();
host_handle = Storage::Get()->Alloc(size, Context());
return host_handle.dptr;
}
};


Expand Down Expand Up @@ -186,6 +204,7 @@ class ResourceManagerImpl : public ResourceManager {
[r](RunContext rctx){
SpaceAllocator rcpy = r;
MSHADOW_CATCH_ERROR(rcpy.Release());
MSHADOW_CATCH_ERROR(rcpy.ReleaseHost());
}, ctx, resource[i].var);
}
}
Expand Down Expand Up @@ -228,6 +247,10 @@ void* Resource::get_space_internal(size_t size) const {
return static_cast<resource::SpaceAllocator*>(ptr_)->GetSpace(size);
}

void* Resource::get_host_space_internal(size_t size) const {
return static_cast<resource::SpaceAllocator*>(ptr_)->GetHostSpace(size);
}

ResourceManager* ResourceManager::Get() {
static resource::ResourceManagerImpl inst;
return &inst;
Expand Down

0 comments on commit 37c2b28

Please sign in to comment.