Skip to content

[Windows] [memory_allocator.h] remove ET_TRY macros #8914

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 5 additions & 2 deletions backends/apple/mps/runtime/MPSBackend.mm
Original file line number Diff line number Diff line change
Expand Up @@ -43,8 +43,11 @@ bool is_available() const override {
BackendInitContext& context,
FreeableBuffer* processed,
ArrayRef<CompileSpec> compile_specs) const override {
auto executor = ET_ALLOCATE_INSTANCE_OR_RETURN_ERROR(
context.get_runtime_allocator(), mps::delegate::MPSExecutor);
auto executor = context.get_runtime_allocator()->allocateInstance<mps::delegate::MPSExecutor>();
if (executor == nullptr) {
return Error::MemoryAllocationFailed;
}

// NOTE: Since we use placement new and since this type is not trivially
// destructible, we must call the destructor manually in destroy().
new (executor) mps::delegate::MPSExecutor;
Expand Down
7 changes: 5 additions & 2 deletions backends/arm/runtime/EthosUBackend.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -120,8 +120,11 @@ class EthosUBackend final : public ::executorch::runtime::BackendInterface {
}

MemoryAllocator* allocator = context.get_runtime_allocator();
ExecutionHandle* handle =
ET_ALLOCATE_INSTANCE_OR_RETURN_ERROR(allocator, ExecutionHandle);
ExecutionHandle* handle = allocator->allocateInstance<ExecutionHandle>();
if (handle == nullptr) {
return Error::MemoryAllocationFailed;
}

handle->processed = processed;

// Return the same buffer we were passed - this data will be
Expand Down
8 changes: 6 additions & 2 deletions backends/mediatek/runtime/NeuronBackend.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -68,8 +68,12 @@ Result<DelegateHandle*> NeuronBackend::init(
processed->size());

MemoryAllocator* runtime_allocator = context.get_runtime_allocator();
NeuronExecuTorchDelegate* delegate = ET_ALLOCATE_INSTANCE_OR_RETURN_ERROR(
runtime_allocator, NeuronExecuTorchDelegate);
NeuronExecuTorchDelegate* delegate =
runtime_allocator->allocateInstance<NeuronExecuTorchDelegate>();
if (delegate == nullptr) {
return Error::MemoryAllocationFailed;
}

new (delegate) NeuronExecuTorchDelegate();

if (delegate == nullptr) {
Expand Down
6 changes: 4 additions & 2 deletions backends/qualcomm/runtime/QnnExecuTorchBackend.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -66,8 +66,10 @@ Result<DelegateHandle*> QnnExecuTorchBackend::init(

// Create QnnManager
MemoryAllocator* runtime_allocator = context.get_runtime_allocator();
QnnManager* qnn_manager =
ET_ALLOCATE_INSTANCE_OR_RETURN_ERROR(runtime_allocator, QnnManager);
QnnManager* qnn_manager = runtime_allocator->allocateInstance<QnnManager>();
if (qnn_manager == nullptr) {
return Error::MemoryAllocationFailed;
}

// NOTE: Since we use placement new and since this type is not trivially
// destructible, we must call the destructor manually in destroy().
Expand Down
7 changes: 5 additions & 2 deletions backends/vulkan/runtime/VulkanBackend.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -510,8 +510,11 @@ class VulkanBackend final : public ::executorch::runtime::BackendInterface {
BackendInitContext& context,
FreeableBuffer* processed,
ArrayRef<CompileSpec> compile_specs) const override {
ComputeGraph* compute_graph = ET_ALLOCATE_INSTANCE_OR_RETURN_ERROR(
context.get_runtime_allocator(), ComputeGraph);
ComputeGraph* compute_graph =
context.get_runtime_allocator()->allocateInstance<ComputeGraph>();
if (compute_graph == nullptr) {
return Error::MemoryAllocationFailed;
}

new (compute_graph) ComputeGraph(get_graph_config(compile_specs));

Expand Down
7 changes: 5 additions & 2 deletions backends/xnnpack/runtime/XNNPACKBackend.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -73,8 +73,11 @@ class XnnpackBackend final : public ::executorch::runtime::BackendInterface {
BackendInitContext& context,
FreeableBuffer* processed,
ArrayRef<CompileSpec> compile_specs) const override {
auto executor = ET_ALLOCATE_INSTANCE_OR_RETURN_ERROR(
context.get_runtime_allocator(), xnnpack::delegate::XNNExecutor);
auto executor = context.get_runtime_allocator()
->allocateInstance<xnnpack::delegate::XNNExecutor>();
if (executor == nullptr) {
return Error::MemoryAllocationFailed;
}

#ifdef ENABLE_XNNPACK_SHARED_WORKSPACE
// This is needed to serialize access to xnn_create_runtime which is not
Expand Down
64 changes: 47 additions & 17 deletions exir/backend/test/demos/rpc/ExecutorBackend.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -72,8 +72,11 @@ class ExecutorBackend final : public ::executorch::runtime::BackendInterface {
// `processed` contains an executorch program. Wrap it in a DataLoader that
// will return the data directly without copying it.
MemoryAllocator* runtime_allocator = context.get_runtime_allocator();
auto loader = ET_ALLOCATE_INSTANCE_OR_RETURN_ERROR(
runtime_allocator, BufferDataLoader);
auto loader = runtime_allocator->allocateInstance<BufferDataLoader>();
if (loader == nullptr) {
return Error::MemoryAllocationFailed;
}

new (loader) BufferDataLoader(processed->data(), processed->size());
// Can't free `processed` because the program will point into that memory.

Expand All @@ -84,8 +87,11 @@ class ExecutorBackend final : public ::executorch::runtime::BackendInterface {
}

// Move the Program off the stack.
auto client_program =
ET_ALLOCATE_INSTANCE_OR_RETURN_ERROR(runtime_allocator, Program);
auto client_program = runtime_allocator->allocateInstance<Program>();
if (client_program == nullptr) {
return Error::MemoryAllocationFailed;
}

new (client_program) Program(std::move(program_result.get()));

Result<MethodMeta> method_meta = client_program->method_meta("forward");
Expand All @@ -97,35 +103,56 @@ class ExecutorBackend final : public ::executorch::runtime::BackendInterface {
// Building all different allocators for the client executor
auto num_memory_planned_buffers = method_meta->num_memory_planned_buffers();

Span<uint8_t>* memory_planned_buffers = ET_ALLOCATE_LIST_OR_RETURN_ERROR(
runtime_allocator, Span<uint8_t>, num_memory_planned_buffers);
Span<uint8_t>* memory_planned_buffers =
runtime_allocator->allocateList<Span<uint8_t>>(
num_memory_planned_buffers);
if (memory_planned_buffers == nullptr) {
return Error::MemoryAllocationFailed;
}

for (size_t id = 0; id < num_memory_planned_buffers; ++id) {
size_t buffer_size = static_cast<size_t>(
method_meta->memory_planned_buffer_size(id).get());
uint8_t* buffer_i = ET_ALLOCATE_LIST_OR_RETURN_ERROR(
runtime_allocator, uint8_t, buffer_size);
uint8_t* buffer_i = runtime_allocator->allocateList<uint8_t>(buffer_size);
if (buffer_i == nullptr) {
return Error::MemoryAllocationFailed;
}

memory_planned_buffers[id] = {buffer_i, buffer_size};
}

auto client_planned_memory = ET_ALLOCATE_INSTANCE_OR_RETURN_ERROR(
runtime_allocator, HierarchicalAllocator);
auto client_planned_memory =
runtime_allocator->allocateInstance<HierarchicalAllocator>();
if (client_planned_memory == nullptr) {
return Error::MemoryAllocationFailed;
}

new (client_planned_memory) HierarchicalAllocator(
{memory_planned_buffers, num_memory_planned_buffers});

// Allocate some memory from runtime allocator for the client executor, in
// real case, like if it's an executor in dsp, it should allocate memory
// dedicated to this specific hardware
auto client_method_allocator = ET_ALLOCATE_INSTANCE_OR_RETURN_ERROR(
runtime_allocator, MemoryAllocator);
auto client_method_allocator =
runtime_allocator->allocateInstance<MemoryAllocator>();
if (client_method_allocator == nullptr) {
return Error::MemoryAllocationFailed;
}

const size_t kClientRuntimeMemorySize = 4 * 1024U;
auto runtime_pool = ET_ALLOCATE_OR_RETURN_ERROR(
runtime_allocator, kClientRuntimeMemorySize);
auto runtime_pool = runtime_allocator->allocate(kClientRuntimeMemorySize);
if (runtime_pool == nullptr) {
return Error::MemoryAllocationFailed;
}
new (client_method_allocator) MemoryAllocator(
kClientRuntimeMemorySize, static_cast<uint8_t*>(runtime_pool));

auto client_memory_manager =
ET_ALLOCATE_INSTANCE_OR_RETURN_ERROR(runtime_allocator, MemoryManager);
runtime_allocator->allocateInstance<MemoryManager>();
if (client_memory_manager == nullptr) {
return Error::MemoryAllocationFailed;
}

new (client_memory_manager)
MemoryManager(client_method_allocator, client_planned_memory);

Expand All @@ -140,8 +167,11 @@ class ExecutorBackend final : public ::executorch::runtime::BackendInterface {
return method_res.error();
}

auto client_method =
ET_ALLOCATE_INSTANCE_OR_RETURN_ERROR(runtime_allocator, Method);
auto client_method = runtime_allocator->allocateInstance<Method>();
if (client_method == nullptr) {
return Error::MemoryAllocationFailed;
}

new (client_method) Method(std::move(method_res.get()));

// Return the client method so it will be passed to `execute()` as
Expand Down
170 changes: 0 additions & 170 deletions runtime/core/memory_allocator.h
Original file line number Diff line number Diff line change
Expand Up @@ -198,176 +198,6 @@ class MemoryAllocator {
int32_t prof_id_ = -1;
};

#if ET_HAVE_GNU_STATEMENT_EXPRESSIONS
/**
* Tries allocating from the specified MemoryAllocator*.
*
* - On success, returns a pointer to the allocated buffer.
* - On failure, executes the provided code block, which must return or panic.
*
* Example:
* @code
* char* buf = ET_TRY_ALLOCATE_OR(
* memory_allocator, bufsize, {
* *out_err = Error::MemoryAllocationFailed;
* return nullopt;
* });
* @endcode
*/
#define ET_TRY_ALLOCATE_OR(memory_allocator__, nbytes__, ...) \
({ \
void* et_try_allocate_result = memory_allocator__->allocate(nbytes__); \
if (et_try_allocate_result == nullptr && nbytes__ > 0) { \
__VA_ARGS__ \
/* The args must return. */ \
ET_UNREACHABLE(); \
} \
et_try_allocate_result; \
})

/**
* Tries allocating an instance of type__ from the specified MemoryAllocator*.
*
* - On success, returns a pointer to the allocated buffer. Note that the memory
* will not be initialized.
* - On failure, executes the provided code block, which must return or panic.
*
* Example:
* @code
* char* buf = ET_TRY_ALLOCATE_INSTANCE_OR(
* memory_allocator,
* MyType,
* { *out_err = Error::MemoryAllocationFailed; return nullopt; });
* @endcode
*/
#define ET_TRY_ALLOCATE_INSTANCE_OR(memory_allocator__, type__, ...) \
({ \
type__* et_try_allocate_result = \
memory_allocator__->allocateInstance<type__>(); \
if (et_try_allocate_result == nullptr) { \
__VA_ARGS__ \
/* The args must return. */ \
ET_UNREACHABLE(); \
} \
et_try_allocate_result; \
})

/**
* Tries allocating multiple elements of a given type from the specified
* MemoryAllocator*.
*
* - On success, returns a pointer to the allocated buffer.
* - On failure, executes the provided code block, which must return or panic.
*
* Example:
* @code
* Tensor* tensor_list = ET_TRY_ALLOCATE_LIST_OR(
* memory_allocator, Tensor, num_tensors, {
* *out_err = Error::MemoryAllocationFailed;
* return nullopt;
* });
* @endcode
*/
#define ET_TRY_ALLOCATE_LIST_OR(memory_allocator__, type__, nelem__, ...) \
({ \
type__* et_try_allocate_result = \
memory_allocator__->allocateList<type__>(nelem__); \
if (et_try_allocate_result == nullptr && nelem__ > 0) { \
__VA_ARGS__ \
/* The args must return. */ \
ET_UNREACHABLE(); \
} \
et_try_allocate_result; \
})
#else // !ET_HAVE_GNU_STATEMENT_EXPRESSIONS
/**
* The recommended alternative for statement expression-incompatible compilers
* is to directly allocate the memory.
* e.g. memory_allocator__->allocate(nbytes__);
*/
#define ET_TRY_ALLOCATE_OR(memory_allocator__, nbytes__, ...) \
static_assert( \
false, \
"ET_TRY_ALLOCATE_OR uses statement expressions and \
thus is not available for use with this compiler.");

/**
* The recommended alternative for statement expression-incompatible compilers
* is to directly allocate the memory.
* e.g. memory_allocator__->allocateInstance<type__>();
*/
#define ET_TRY_ALLOCATE_INSTANCE_OR(memory_allocator__, type__, ...) \
static_assert( \
false, \
"ET_TRY_ALLOCATE_INSTANCE_OR uses statement \
expressions and thus is not available for use with this compiler.");

/**
* The recommended alternative for statement expression-incompatible compilers
* is to directly use allocate the memory.
* e.g. memory_allocator__->allocateList<type__>(nelem__);
*/
#define ET_TRY_ALLOCATE_LIST_OR(memory_allocator__, type__, nelem__, ...) \
static_assert( \
false, \
"ET_TRY_ALLOCATE_LIST_OR uses statement \
expressions and thus is not available for use with this compiler.");
#endif // !ET_HAVE_GNU_STATEMENT_EXPRESSIONS

/**
* Tries allocating from the specified MemoryAllocator*.
*
* - On success, returns a pointer to the allocated buffer.
* - On failure, returns `Error::MemoryAllocationFailed` from the calling
* function, which must be declared to return `executorch::runtime::Error`.
*
* Example:
* @code
* char* buf = ET_ALLOCATE_OR_RETURN_ERROR(memory_allocator, bufsize);
* @endcode
*/
#define ET_ALLOCATE_OR_RETURN_ERROR(memory_allocator__, nbytes__) \
ET_TRY_ALLOCATE_OR(memory_allocator__, nbytes__, { \
return ::executorch::runtime::Error::MemoryAllocationFailed; \
})

/**
* Tries allocating an instance of type__ from the specified MemoryAllocator*.
*
* - On success, returns a pointer to the allocated buffer. Note that the memory
* will not be initialized.
* - On failure, returns `Error::MemoryAllocationFailed` from the calling
* function, which must be declared to return `executorch::runtime::Error`.
*
* Example:
* @code
* char* buf = ET_ALLOCATE_INSTANCE_OR_RETURN_ERROR(memory_allocator, MyType);
* @endcode
*/
#define ET_ALLOCATE_INSTANCE_OR_RETURN_ERROR(memory_allocator__, type__) \
ET_TRY_ALLOCATE_INSTANCE_OR(memory_allocator__, type__, { \
return ::executorch::runtime::Error::MemoryAllocationFailed; \
})

/**
* Tries allocating multiple elements of a given type from the specified
* MemoryAllocator*.
*
* - On success, returns a pointer to the allocated buffer.
* - On failure, returns `Error::MemoryAllocationFailed` from the calling
* function, which must be declared to return `executorch::runtime::Error`.
*
* Example:
* @code
* Tensor* tensor_list = ET_ALLOCATE_LIST_OR_RETURN_ERROR(
* memory_allocator, Tensor, num_tensors);
* @endcode
*/
#define ET_ALLOCATE_LIST_OR_RETURN_ERROR(memory_allocator__, type__, nelem__) \
ET_TRY_ALLOCATE_LIST_OR(memory_allocator__, type__, nelem__, { \
return ::executorch::runtime::Error::MemoryAllocationFailed; \
})

} // namespace runtime
} // namespace executorch

Expand Down
Loading
Loading