Skip to content

Fix pthreads with closure compiler #9569

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 32 commits into from
Oct 16, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
32 commits
Select commit Hold shift + click to select a range
d52fc09
wip [ci skip]
kripken Oct 1, 2019
ef25998
wip but test hangs [ci skip]
kripken Oct 2, 2019
2db2b06
Merge remote-tracking branch 'origin/incoming' into pclos
kripken Oct 2, 2019
b3edb10
Merge remote-tracking branch 'origin/incoming' into pclos
kripken Oct 2, 2019
5c4ec01
escaping [ci skip]
kripken Oct 2, 2019
70bf88c
wip [ci skip]
kripken Oct 2, 2019
a1ab547
cleanup
kripken Oct 2, 2019
21790b0
work [ci skip]
kripken Oct 2, 2019
82d64bd
more [ci skip]
kripken Oct 2, 2019
d997f87
fix [ci skip]
kripken Oct 2, 2019
db8bb49
more [ci skip]
kripken Oct 2, 2019
66e443e
fix [ci skip]
kripken Oct 2, 2019
79b982c
fix [ci skip]
kripken Oct 2, 2019
082844b
fix
kripken Oct 2, 2019
dacf03c
fix
kripken Oct 2, 2019
400860e
Merge remote-tracking branch 'origin/incoming' into pclos
kripken Oct 2, 2019
9cca502
even simpler
kripken Oct 2, 2019
3ba9b83
cleanup
kripken Oct 2, 2019
12e3ffc
fix
kripken Oct 3, 2019
c702690
TO_REVERT
kripken Oct 3, 2019
bbb3d0f
cleanup
kripken Oct 3, 2019
1a80c35
fix
kripken Oct 3, 2019
d359916
fixes
kripken Oct 3, 2019
942027e
Revert "TO_REVERT"
kripken Oct 3, 2019
b6427c4
fastcomp fix
kripken Oct 3, 2019
5ea6a10
Use chrome stable on CI for now as there is some breakage
kripken Oct 3, 2019
ae56b58
Merge branch 'beta' into pclos
kripken Oct 3, 2019
800376c
ifdef
kripken Oct 7, 2019
5502533
Merge remote-tracking branch 'origin/incoming' into pclos
kripken Oct 7, 2019
2100a64
missing dep
kripken Oct 7, 2019
7cc8ed0
Merge remote-tracking branch 'origin/incoming' into pclos
kripken Oct 16, 2019
e6c0d74
better comments
kripken Oct 16, 2019
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 7 additions & 1 deletion emcc.py
Original file line number Diff line number Diff line change
Expand Up @@ -1191,6 +1191,9 @@ def is_supported_link_flag(f):
# These runtime methods are called from worker.js
shared.Settings.EXPORTED_RUNTIME_METHODS += ['establishStackSpace', 'dynCall_ii']

if shared.Settings.STACK_OVERFLOW_CHECK:
shared.Settings.EXPORTED_RUNTIME_METHODS += ['writeStackCookie', 'checkStackCookie', 'abortStackOverflow']

if shared.Settings.MODULARIZE_INSTANCE:
shared.Settings.MODULARIZE = 1

Expand Down Expand Up @@ -1357,7 +1360,10 @@ def is_supported_link_flag(f):
newargs += ['-pthread']
# some pthreads code is in asm.js library functions, which are auto-exported; for the wasm backend, we must
# manually export them
shared.Settings.EXPORTED_FUNCTIONS += ['_emscripten_get_global_libc', '___pthread_tsd_run_dtors', '__register_pthread_ptr', '_pthread_self', '___emscripten_pthread_data_constructor']
shared.Settings.EXPORTED_FUNCTIONS += [
'_emscripten_get_global_libc', '___pthread_tsd_run_dtors',
'__register_pthread_ptr', '_pthread_self',
'___emscripten_pthread_data_constructor', '_emscripten_futex_wake']

# set location of worker.js
shared.Settings.PTHREAD_WORKER_FILE = unsuffixed(os.path.basename(target)) + '.worker.js'
Expand Down
12 changes: 12 additions & 0 deletions src/closure-externs.js
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,18 @@ Math.max = function() {};
Math.clz32 = function() {};
Math.trunc = function() {};

/**
* Atomics
*/

var Atomics = {};
Atomics.compareExchange = function() {};
Atomics.exchange = function() {};
Atomics.wait = function() {};
Atomics.notify = function() {};
Atomics.load = function() {};
Atomics.store = function() {};

/**
* SIMD.js support (not in upstream closure yet).
*/
Expand Down
14 changes: 7 additions & 7 deletions src/library.js
Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,8 @@
LibraryManager.library = {
// keep this low in memory, because we flatten arrays with them in them
#if USE_PTHREADS
_impure_ptr: '; if (ENVIRONMENT_IS_PTHREAD) __impure_ptr = PthreadWorkerInit.__impure_ptr; else PthreadWorkerInit.__impure_ptr __impure_ptr = {{{ makeStaticAlloc(4) }}}',
__dso_handle: '; if (ENVIRONMENT_IS_PTHREAD) ___dso_handle = PthreadWorkerInit.___dso_handle; else PthreadWorkerInit.___dso_handle = ___dso_handle = {{{ makeStaticAlloc(4) }}}',
_impure_ptr: '; if (ENVIRONMENT_IS_PTHREAD) __impure_ptr = PthreadWorkerInit["__impure_ptr"]; else PthreadWorkerInit["__impure_ptr"] = {{{ makeStaticAlloc(4) }}}',
__dso_handle: '; if (ENVIRONMENT_IS_PTHREAD) ___dso_handle = PthreadWorkerInit["___dso_handle"]; else PthreadWorkerInit["___dso_handle"] = ___dso_handle = {{{ makeStaticAlloc(4) }}}',
#else
_impure_ptr: '{{{ makeStaticAlloc(1) }}}',
__dso_handle: '{{{ makeStaticAlloc(1) }}}',
Expand Down Expand Up @@ -1880,9 +1880,9 @@ LibraryManager.library = {

// Statically allocated time struct.
#if USE_PTHREADS
__tm_current: '; if (ENVIRONMENT_IS_PTHREAD) ___tm_current = PthreadWorkerInit.___tm_current; else PthreadWorkerInit.___tm_current = ___tm_current = {{{ makeStaticAlloc(C_STRUCTS.tm.__size__) }}}',
__tm_timezone: '; if (ENVIRONMENT_IS_PTHREAD) ___tm_timezone = PthreadWorkerInit.___tm_timezone; else PthreadWorkerInit.___tm_timezone = ___tm_timezone = {{{ makeStaticString("GMT") }}}',
__tm_formatted: '; if (ENVIRONMENT_IS_PTHREAD) ___tm_formatted = PthreadWorkerInit.___tm_formatted; else PthreadWorkerInit.___tm_formatted = ___tm_formatted = {{{ makeStaticAlloc(C_STRUCTS.tm.__size__) }}}',
__tm_current: '; if (ENVIRONMENT_IS_PTHREAD) ___tm_current = PthreadWorkerInit["___tm_current"]; else PthreadWorkerInit["___tm_current"] = ___tm_current = {{{ makeStaticAlloc(C_STRUCTS.tm.__size__) }}}',
__tm_timezone: '; if (ENVIRONMENT_IS_PTHREAD) ___tm_timezone = PthreadWorkerInit["___tm_timezone"]; else PthreadWorkerInit["___tm_timezone"] = ___tm_timezone = {{{ makeStaticString("GMT") }}}',
__tm_formatted: '; if (ENVIRONMENT_IS_PTHREAD) ___tm_formatted = PthreadWorkerInit["___tm_formatted"]; else PthreadWorkerInit["___tm_formatted"] = ___tm_formatted = {{{ makeStaticAlloc(C_STRUCTS.tm.__size__) }}}',
#else
__tm_current: '{{{ makeStaticAlloc(C_STRUCTS.tm.__size__) }}}',
// Statically allocated copy of the string "GMT" for gmtime() to point to
Expand Down Expand Up @@ -3239,8 +3239,8 @@ LibraryManager.library = {
// ==========================================================================

#if USE_PTHREADS
in6addr_any: '; if (ENVIRONMENT_IS_PTHREAD) _in6addr_any = PthreadWorkerInit._in6addr_any; else PthreadWorkerInit._in6addr_any = _in6addr_any = {{{ makeStaticAlloc(16) }}}',
in6addr_loopback: '; if (ENVIRONMENT_IS_PTHREAD) _in6addr_loopback = PthreadWorkerInit._in6addr_loopback; else PthreadWorkerInit._in6addr_loopback = _in6addr_loopback = {{{ makeStaticAlloc(16) }}}',
in6addr_any: '; if (ENVIRONMENT_IS_PTHREAD) _in6addr_any = PthreadWorkerInit["_in6addr_any"]; else PthreadWorkerInit["_in6addr_any"] = _in6addr_any = {{{ makeStaticAlloc(16) }}}',
in6addr_loopback: '; if (ENVIRONMENT_IS_PTHREAD) _in6addr_loopback = PthreadWorkerInit["_in6addr_loopback"]; else PthreadWorkerInit["_in6addr_loopback"] = _in6addr_loopback = {{{ makeStaticAlloc(16) }}}',
#else
in6addr_any:
'{{{ makeStaticAlloc(16) }}}',
Expand Down
2 changes: 1 addition & 1 deletion src/library_fetch.js
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
var LibraryFetch = {
#if USE_PTHREADS
$Fetch__postset: 'if (!ENVIRONMENT_IS_PTHREAD) Fetch.staticInit();',
fetch_work_queue: '; if (ENVIRONMENT_IS_PTHREAD) _fetch_work_queue = PthreadWorkerInit._fetch_work_queue; else PthreadWorkerInit._fetch_work_queue = _fetch_work_queue = {{{ makeStaticAlloc(12) }}}',
fetch_work_queue: '; if (ENVIRONMENT_IS_PTHREAD) _fetch_work_queue = PthreadWorkerInit["_fetch_work_queue"]; else PthreadWorkerInit["_fetch_work_queue"] = _fetch_work_queue = {{{ makeStaticAlloc(12) }}}',
#else
$Fetch__postset: 'Fetch.staticInit();',
fetch_work_queue: '{{{ makeStaticAlloc(12) }}}',
Expand Down
130 changes: 72 additions & 58 deletions src/library_pthread.js
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,11 @@
// found in the LICENSE file.

var LibraryPThread = {
$PThread__postset: 'if (!ENVIRONMENT_IS_PTHREAD) PThread.initMainThreadBlock();',
$PThread__deps: ['$PROCINFO', '_register_pthread_ptr', 'emscripten_main_thread_process_queued_calls', '$ERRNO_CODES', 'emscripten_futex_wake'],
$PThread__postset: 'if (!ENVIRONMENT_IS_PTHREAD) PThread.initMainThreadBlock(); else PThread.initWorker();',
$PThread__deps: ['$PROCINFO', '_register_pthread_ptr',
'emscripten_main_thread_process_queued_calls',
'$ERRNO_CODES', 'emscripten_futex_wake', '_kill_thread',
'_cancel_thread', '_cleanup_thread'],
$PThread: {
MAIN_THREAD_ID: 1, // A special constant that identifies the main JS thread ID.
mainThreadInfo: {
Expand Down Expand Up @@ -65,6 +68,16 @@ var LibraryPThread = {
PThread.createProfilerBlock(PThread.mainThreadBlock);
PThread.setThreadName(PThread.mainThreadBlock, "Browser main thread");
PThread.setThreadStatus(PThread.mainThreadBlock, {{{ cDefine('EM_THREAD_STATUS_RUNNING') }}});
#endif
},
initWorker: function() {
#if USE_CLOSURE_COMPILER
// worker.js is not compiled together with us, and must access certain
// things.
PThread['receiveObjectTransfer'] = PThread.receiveObjectTransfer;
PThread['setThreadStatus'] = PThread.setThreadStatus;
PThread['threadCancel'] = PThread.threadCancel;
PThread['threadExit'] = PThread.threadExit;
#endif
},
// Maps pthread_t to pthread info objects
Expand Down Expand Up @@ -178,7 +191,7 @@ var LibraryPThread = {
if (ENVIRONMENT_IS_PTHREAD) {
// Note: in theory we would like to return any offscreen canvases back to the main thread,
// but if we ever fetched a rendering context for them that would not be valid, so we don't try.
postMessage({ cmd: 'exit' });
postMessage({ 'cmd': 'exit' });
}
}
},
Expand All @@ -190,7 +203,7 @@ var LibraryPThread = {
_emscripten_futex_wake(threadInfoStruct + {{{ C_STRUCTS.pthread.threadStatus }}}, {{{ cDefine('INT_MAX') }}}); // wake all threads
threadInfoStruct = selfThreadId = 0; // Not hosting a pthread anymore in this worker, reset the info structures to null.
__register_pthread_ptr(0, 0, 0); // Unregister the thread block also inside the asm.js scope.
postMessage({ cmd: 'cancelDone' });
postMessage({ 'cmd': 'cancelDone' });
},

terminateAllThreads: function() {
Expand Down Expand Up @@ -305,32 +318,32 @@ var LibraryPThread = {

// Ask the new worker to load up the Emscripten-compiled page. This is a heavy operation.
worker.postMessage({
cmd: 'load',
'cmd': 'load',
// If the application main .js file was loaded from a Blob, then it is not possible
// to access the URL of the current script that could be passed to a Web Worker so that
// it could load up the same file. In that case, developer must either deliver the Blob
// object in Module['mainScriptUrlOrBlob'], or a URL to it, so that pthread Workers can
// independently load up the same main application file.
urlOrBlob: Module['mainScriptUrlOrBlob'] || _scriptDir,
'urlOrBlob': Module['mainScriptUrlOrBlob'] || _scriptDir,
#if WASM
wasmMemory: wasmMemory,
wasmModule: wasmModule,
'wasmMemory': wasmMemory,
'wasmModule': wasmModule,
#if LOAD_SOURCE_MAP
wasmSourceMap: wasmSourceMap,
'wasmSourceMap': wasmSourceMap,
#endif
#if USE_OFFSET_CONVERTER
wasmOffsetConverter: wasmOffsetConverter,
'wasmOffsetConverter': wasmOffsetConverter,
#endif
#else
buffer: HEAPU8.buffer,
asmJsUrlOrBlob: Module["asmJsUrlOrBlob"],
'buffer': HEAPU8.buffer,
'asmJsUrlOrBlob': Module["asmJsUrlOrBlob"],
#endif
#if !WASM_BACKEND
tempDoublePtr: tempDoublePtr,
'tempDoublePtr': tempDoublePtr,
#endif
DYNAMIC_BASE: DYNAMIC_BASE,
DYNAMICTOP_PTR: DYNAMICTOP_PTR,
PthreadWorkerInit: PthreadWorkerInit
'DYNAMIC_BASE': DYNAMIC_BASE,
'DYNAMICTOP_PTR': DYNAMICTOP_PTR,
'PthreadWorkerInit': PthreadWorkerInit
});
PThread.unusedWorkers.push(worker);
}
Expand All @@ -346,34 +359,35 @@ var LibraryPThread = {
var worker = workers[i];
(function(worker) {
worker.onmessage = function(e) {
var d = e.data;
var d = e['data'];
var cmd = d['cmd'];
// Sometimes we need to backproxy events to the calling thread (e.g. HTML5 DOM events handlers such as emscripten_set_mousemove_callback()), so keep track in a globally accessible variable about the thread that initiated the proxying.
if (worker.pthread) PThread.currentProxiedOperationCallerThread = worker.pthread.threadInfoStruct;

// If this message is intended to a recipient that is not the main thread, forward it to the target thread.
if (d.targetThread && d.targetThread != _pthread_self()) {
if (d['targetThread'] && d['targetThread'] != _pthread_self()) {
var thread = PThread.pthreads[d.targetThread];
if (thread) {
thread.worker.postMessage(e.data, d.transferList);
thread.worker.postMessage(e.data, d['transferList']);
} else {
console.error('Internal error! Worker sent a message "' + d.cmd + '" to target pthread ' + d.targetThread + ', but that thread no longer exists!');
console.error('Internal error! Worker sent a message "' + cmd + '" to target pthread ' + d['targetThread'] + ', but that thread no longer exists!');
}
PThread.currentProxiedOperationCallerThread = undefined;
return;
}

if (d.cmd === 'processQueuedMainThreadWork') {
if (cmd === 'processQueuedMainThreadWork') {
// TODO: Must post message to main Emscripten thread in PROXY_TO_WORKER mode.
_emscripten_main_thread_process_queued_calls();
} else if (d.cmd === 'spawnThread') {
} else if (cmd === 'spawnThread') {
__spawn_thread(e.data);
} else if (d.cmd === 'cleanupThread') {
__cleanup_thread(d.thread);
} else if (d.cmd === 'killThread') {
__kill_thread(d.thread);
} else if (d.cmd === 'cancelThread') {
__cancel_thread(d.thread);
} else if (d.cmd === 'loaded') {
} else if (cmd === 'cleanupThread') {
__cleanup_thread(d['thread']);
} else if (cmd === 'killThread') {
__kill_thread(d['thread']);
} else if (cmd === 'cancelThread') {
__cancel_thread(d['thread']);
} else if (cmd === 'loaded') {
worker.loaded = true;
// If this Worker is already pending to start running a thread, launch the thread now
if (worker.runPthread) {
Expand All @@ -384,34 +398,34 @@ var LibraryPThread = {
if (numWorkersLoaded === numWorkers && onFinishedLoading) {
onFinishedLoading();
}
} else if (d.cmd === 'print') {
out('Thread ' + d.threadId + ': ' + d.text);
} else if (d.cmd === 'printErr') {
err('Thread ' + d.threadId + ': ' + d.text);
} else if (d.cmd === 'alert') {
alert('Thread ' + d.threadId + ': ' + d.text);
} else if (d.cmd === 'exit') {
} else if (cmd === 'print') {
out('Thread ' + d['threadId'] + ': ' + d['text']);
} else if (cmd === 'printErr') {
err('Thread ' + d['threadId'] + ': ' + d['text']);
} else if (cmd === 'alert') {
alert('Thread ' + d['threadId'] + ': ' + d['text']);
} else if (cmd === 'exit') {
var detached = worker.pthread && Atomics.load(HEAPU32, (worker.pthread.thread + {{{ C_STRUCTS.pthread.detached }}}) >> 2);
if (detached) {
PThread.returnWorkerToPool(worker);
}
} else if (d.cmd === 'exitProcess') {
} else if (cmd === 'exitProcess') {
// A pthread has requested to exit the whole application process (runtime).
noExitRuntime = false;
try {
exit(d.returnCode);
exit(d['returnCode']);
} catch (e) {
if (e instanceof ExitStatus) return;
throw e;
}
} else if (d.cmd === 'cancelDone') {
} else if (cmd === 'cancelDone') {
PThread.returnWorkerToPool(worker);
} else if (d.cmd === 'objectTransfer') {
} else if (cmd === 'objectTransfer') {
PThread.receiveObjectTransfer(e.data);
} else if (e.data.target === 'setimmediate') {
worker.postMessage(e.data); // Worker wants to postMessage() to itself to implement setImmediate() emulation.
} else {
err("worker sent an unknown command " + d.cmd);
err("worker sent an unknown command " + cmd);
}
PThread.currentProxiedOperationCallerThread = undefined;
};
Expand Down Expand Up @@ -483,7 +497,7 @@ var LibraryPThread = {
if (ENVIRONMENT_IS_PTHREAD) throw 'Internal Error! _cancel_thread() can only ever be called from main application thread!';
if (!pthread_ptr) throw 'Internal Error! Null pthread_ptr in _cancel_thread!';
var pthread = PThread.pthreads[pthread_ptr];
pthread.worker.postMessage({ cmd: 'cancel' });
pthread.worker.postMessage({ 'cmd': 'cancel' });
},

_spawn_thread: function(threadParams) {
Expand Down Expand Up @@ -537,17 +551,17 @@ var LibraryPThread = {

worker.pthread = pthread;
var msg = {
cmd: 'run',
start_routine: threadParams.startRoutine,
arg: threadParams.arg,
threadInfoStruct: threadParams.pthread_ptr,
selfThreadId: threadParams.pthread_ptr, // TODO: Remove this since thread ID is now the same as the thread address.
parentThreadId: threadParams.parent_pthread_ptr,
stackBase: threadParams.stackBase,
stackSize: threadParams.stackSize,
'cmd': 'run',
'start_routine': threadParams.startRoutine,
'arg': threadParams.arg,
'threadInfoStruct': threadParams.pthread_ptr,
'selfThreadId': threadParams.pthread_ptr, // TODO: Remove this since thread ID is now the same as the thread address.
'parentThreadId': threadParams.parent_pthread_ptr,
'stackBase': threadParams.stackBase,
'stackSize': threadParams.stackSize,
#if OFFSCREENCANVAS_SUPPORT
moduleCanvasId: threadParams.moduleCanvasId,
offscreenCanvases: threadParams.offscreenCanvases,
'moduleCanvasId': threadParams.moduleCanvasId,
'offscreenCanvases': threadParams.offscreenCanvases,
#endif
};
worker.runPthread = function() {
Expand All @@ -562,7 +576,7 @@ var LibraryPThread = {
},

_num_logical_cores__deps: ['emscripten_force_num_logical_cores'],
_num_logical_cores: '; if (ENVIRONMENT_IS_PTHREAD) __num_logical_cores = PthreadWorkerInit.__num_logical_cores; else { PthreadWorkerInit.__num_logical_cores = __num_logical_cores = {{{ makeStaticAlloc(4) }}}; HEAPU32[__num_logical_cores>>2] = navigator["hardwareConcurrency"] || ' + {{{ PTHREAD_HINT_NUM_CORES }}} + '; }',
_num_logical_cores: '; if (ENVIRONMENT_IS_PTHREAD) __num_logical_cores = PthreadWorkerInit["__num_logical_cores"]; else { PthreadWorkerInit["__num_logical_cores"] = __num_logical_cores = {{{ makeStaticAlloc(4) }}}; HEAPU32[__num_logical_cores>>2] = navigator["hardwareConcurrency"] || ' + {{{ PTHREAD_HINT_NUM_CORES }}} + '; }',

emscripten_has_threading_support: function() {
return typeof SharedArrayBuffer !== 'undefined';
Expand Down Expand Up @@ -822,7 +836,7 @@ var LibraryPThread = {
Atomics.store(HEAPU32, (thread + {{{ C_STRUCTS.pthread.detached }}} ) >> 2, 1); // Mark the thread as detached.

if (!ENVIRONMENT_IS_PTHREAD) __cleanup_thread(thread);
else postMessage({ cmd: 'cleanupThread', thread: thread });
else postMessage({ 'cmd': 'cleanupThread', 'thread': thread });
return 0;
}
// TODO HACK! Replace the _js variant with just _pthread_testcancel:
Expand Down Expand Up @@ -854,7 +868,7 @@ var LibraryPThread = {
}
if (signal != 0) {
if (!ENVIRONMENT_IS_PTHREAD) __kill_thread(thread);
else postMessage({ cmd: 'killThread', thread: thread});
else postMessage({ 'cmd': 'killThread', 'thread': thread});
}
return 0;
},
Expand All @@ -876,7 +890,7 @@ var LibraryPThread = {
}
Atomics.compareExchange(HEAPU32, (thread + {{{ C_STRUCTS.pthread.threadStatus }}} ) >> 2, 0, 2); // Signal the thread that it needs to cancel itself.
if (!ENVIRONMENT_IS_PTHREAD) __cancel_thread(thread);
else postMessage({ cmd: 'cancelThread', thread: thread});
else postMessage({ 'cmd': 'cancelThread', 'thread': thread});
return 0;
},

Expand Down Expand Up @@ -1068,7 +1082,7 @@ var LibraryPThread = {
},

// Stores the memory address that the main thread is waiting on, if any.
_main_thread_futex_wait_address: '; if (ENVIRONMENT_IS_PTHREAD) __main_thread_futex_wait_address = PthreadWorkerInit.__main_thread_futex_wait_address; else PthreadWorkerInit.__main_thread_futex_wait_address = __main_thread_futex_wait_address = {{{ makeStaticAlloc(4) }}}',
_main_thread_futex_wait_address: '; if (ENVIRONMENT_IS_PTHREAD) __main_thread_futex_wait_address = PthreadWorkerInit["__main_thread_futex_wait_address"]; else PthreadWorkerInit["__main_thread_futex_wait_address"] = __main_thread_futex_wait_address = {{{ makeStaticAlloc(4) }}}',

// Returns 0 on success, or one of the values -ETIMEDOUT, -EWOULDBLOCK or -EINVAL on error.
emscripten_futex_wait__deps: ['_main_thread_futex_wait_address', 'emscripten_main_thread_process_queued_calls'],
Expand Down Expand Up @@ -1159,7 +1173,7 @@ var LibraryPThread = {

__call_main: function(argc, argv) {
var returnCode = _main(argc, argv);
if (!noExitRuntime) postMessage({ cmd: 'exitProcess', returnCode: returnCode });
if (!noExitRuntime) postMessage({ 'cmd': 'exitProcess', 'returnCode': returnCode });
return returnCode;
},

Expand Down
Loading