|
| 1 | +// Copyright (c) Microsoft Corporation. All rights reserved. |
| 2 | +// Licensed under the MIT License. |
| 3 | + |
| 4 | +// |
| 5 | +// This file contains the post-run code for the ORT WebAssembly module. The code in this file will be injected into the |
| 6 | +// final module using Emscripten's `--post-js` option. |
| 7 | +// |
| 8 | +// This file will only be used in build with flag `--use_webnn`. |
| 9 | + |
| 10 | +/** |
| 11 | + * This function is called only once when initializing the WebNN backend. |
| 12 | + * |
| 13 | + * @param params WebNN initialization parameters. |
| 14 | + */ |
| 15 | +Module["webnnInit"] = (params) => { |
| 16 | + // Functions called from EM_ASM need to be assigned in a way that can be minified. |
| 17 | + // Functions called via emscripten::val::module_property need to be assigned by name so that the minifier doesn't |
| 18 | + // change the name. |
| 19 | + |
| 20 | + const backend = params[0]; |
| 21 | + [ |
| 22 | + Module.webnnReserveTensorId, |
| 23 | + Module.webnnReleaseTensorId, |
| 24 | + Module["webnnEnsureTensor"], |
| 25 | + Module.webnnUploadTensor, |
| 26 | + Module["webnnDownloadTensor"], |
| 27 | + Module["webnnEnableTraceEvent"], |
| 28 | + ] = params.slice(1); |
| 29 | + |
| 30 | + // This function is called from both JS and an EM_ASM block, it needs both a minifiable name and an explicit name. |
| 31 | + Module["webnnReleaseTensorId"] = Module.webnnReleaseTensorId; |
| 32 | + Module["webnnUploadTensor"] = Module.webnnUploadTensor; |
| 33 | + |
| 34 | + // Functions called from JS also need to have explicit names. |
| 35 | + Module["webnnOnRunStart"] = (sessionId) => { |
| 36 | + return backend["onRunStart"](sessionId); |
| 37 | + }; |
| 38 | + Module["webnnOnRunEnd"] = backend["onRunEnd"].bind(backend); |
| 39 | + Module["webnnRegisterMLContext"] = (sessionId, mlContext) => { |
| 40 | + backend["registerMLContext"](sessionId, mlContext); |
| 41 | + }; |
| 42 | + Module["webnnOnReleaseSession"] = (sessionId) => { |
| 43 | + backend["onReleaseSession"](sessionId); |
| 44 | + }; |
| 45 | + Module["webnnCreateMLTensorDownloader"] = (tensorId, type) => { |
| 46 | + return backend["createMLTensorDownloader"](tensorId, type); |
| 47 | + }; |
| 48 | + Module["webnnRegisterMLTensor"] = (sessionId, tensor, dataType, shape) => { |
| 49 | + return backend["registerMLTensor"](sessionId, tensor, dataType, shape); |
| 50 | + }; |
| 51 | + Module["webnnCreateMLContext"] = (optionsOrGpuDevice) => { |
| 52 | + return backend["createMLContext"](optionsOrGpuDevice); |
| 53 | + }; |
| 54 | + Module["webnnRegisterMLConstant"] = ( |
| 55 | + externalFilePath, |
| 56 | + dataOffset, |
| 57 | + dataLength, |
| 58 | + builder, |
| 59 | + desc, |
| 60 | + shouldConvertInt64ToInt32 |
| 61 | + ) => { |
| 62 | + return backend["registerMLConstant"]( |
| 63 | + externalFilePath, |
| 64 | + dataOffset, |
| 65 | + dataLength, |
| 66 | + builder, |
| 67 | + desc, |
| 68 | + Module.MountedFiles, |
| 69 | + shouldConvertInt64ToInt32 |
| 70 | + ); |
| 71 | + }; |
| 72 | + Module["webnnRegisterGraphInput"] = |
| 73 | + backend["registerGraphInput"].bind(backend); |
| 74 | + Module["webnnIsGraphInput"] = backend["isGraphInput"].bind(backend); |
| 75 | + Module["webnnRegisterGraphOutput"] = |
| 76 | + backend["registerGraphOutput"].bind(backend); |
| 77 | + Module["webnnIsGraphOutput"] = backend["isGraphOutput"].bind(backend); |
| 78 | + |
| 79 | + Module["webnnCreateTemporaryTensor"] = |
| 80 | + backend["createTemporaryTensor"].bind(backend); |
| 81 | + Module["webnnIsGraphInputOutputTypeSupported"] = |
| 82 | + backend["isGraphInputOutputTypeSupported"].bind(backend); |
| 83 | +}; |
0 commit comments