Skip to content

Commit 3c69dae

Browse files
committed
[libwebaudio.js] Convert to handle allocator. NFC
Also: - Removing duplication in precondition check. - Use `dbg()` for debug messages The use of the HandleAllocator does add about 70 bytes to the JS payload, but that will be amortized over the different places where HandleAllocator is also uses.
1 parent 87d9a75 commit 3c69dae

File tree

4 files changed

+168
-149
lines changed

4 files changed

+168
-149
lines changed

src/lib/libwebaudio.js

Lines changed: 85 additions & 82 deletions
Original file line numberDiff line numberDiff line change
@@ -9,38 +9,58 @@
99
#endif
1010

1111
var LibraryWebAudio = {
12-
$EmAudio: {},
13-
$EmAudioCounter: 0,
12+
$emAudio__deps: ['$HandleAllocator'],
13+
$emAudio: 'new HandleAllocator();',
1414

1515
// Call this function from JavaScript to register a Wasm-side handle to an AudioContext that
1616
// you have already created manually without calling emscripten_create_audio_context().
1717
// Note: To let that AudioContext be garbage collected later, call the function
1818
// emscriptenDestroyAudioContext() to unbind it from Wasm.
19-
$emscriptenRegisterAudioObject__deps: ['$EmAudio', '$EmAudioCounter'],
2019
$emscriptenRegisterAudioObject: (object) => {
2120
#if ASSERTIONS
2221
assert(object, 'Called emscriptenRegisterAudioObject() with a null object handle!');
2322
#endif
24-
EmAudio[++EmAudioCounter] = object;
23+
var id = emAudio.allocate(object);
2524
#if WEBAUDIO_DEBUG
26-
console.log(`Registered new WebAudio object ${object} with ID ${EmAudioCounter}`);
25+
dbg(`Registered new WebAudio object ${object} with ID ${id}`);
2726
#endif
28-
return EmAudioCounter;
27+
return id;
2928
},
3029

30+
#if ASSERTIONS || WEBAUDIO_DEBUG
31+
$emAudioCheckHandle__internal: true,
32+
$emAudioCheckHandle(handle, methodName, isNode = false) {
33+
#if WEBAUDIO_DEBUG
34+
dbg(`called ${methodName}() with ID ${handle}`);
35+
#endif
36+
#if ASSERTIONS
37+
assert(emAudio.has(handle), `Called ${methodName}() on a nonexisting handle ${handle}`);
38+
var obj = emAudio.get(handle);
39+
if (isNode == 2) {
40+
// Some method accept either a node or an audio context
41+
assert(obj instanceof window.AudioNode || obj instanceof (window.AudioContext || window.webkitAudioContext), `Called ${methodName}() on a context handle ${handle} that is not an AudioNode, but of type ${typeof obj}`);
42+
} else if (isNode) {
43+
assert(obj instanceof window.AudioNode, `Called ${methodName}() on a context handle ${handle} that is not an AudioNode, but of type ${typeof obj}`);
44+
} else {
45+
assert(obj instanceof (window.AudioContext || window.webkitAudioContext), `Called ${methodName}() on a context handle ${handle} that is not an AudioContext, but of type ${typeof obj}`);
46+
}
47+
#endif
48+
},
49+
#endif
50+
3151
// Call this function from JavaScript to destroy a Wasm-side handle to an AudioContext.
3252
// After calling this function, it is no longer possible to reference this AudioContext
3353
// from Wasm code - and the GC can reclaim it after all references to it are cleared.
3454
$emscriptenDestroyAudioContext: 'emscripten_destroy_audio_context',
3555

3656
// Call this function from JavaScript to get the Web Audio object corresponding to the given
3757
// Wasm handle ID.
38-
$emscriptenGetAudioObject: (objectHandle) => EmAudio[objectHandle],
58+
$emscriptenGetAudioObject: (objectHandle) => emAudio.get(objectHandle),
3959

4060
// Performs the work of getting the AudioContext's render quantum size.
4161
$emscriptenGetContextQuantumSize: (contextHandle) => {
4262
// TODO: in a future release this will be something like:
43-
// return EmAudio[contextHandle].renderQuantumSize || 128;
63+
// return emAudio.get(contextHandle).renderQuantumSize || 128;
4464
// It comes two caveats: it needs the hint when generating the context adding to
4565
// emscripten_create_audio_context(), and altering the quantum requires a secure
4666
// context and fallback implementing. Until then we simply use the 1.0 API value:
@@ -68,7 +88,7 @@ var LibraryWebAudio = {
6888
} : undefined;
6989

7090
#if WEBAUDIO_DEBUG
71-
console.log(`Creating new WebAudio context with parameters:`);
91+
dbg(`Creating new WebAudio context with parameters:`);
7292
console.dir(opts);
7393
#endif
7494

@@ -81,79 +101,65 @@ var LibraryWebAudio = {
81101
},
82102

83103
emscripten_resume_audio_context_async: (contextHandle, callback, userData) => {
104+
var audio = emAudio.get(contextHandle);
84105
function cb(state) {
85106
#if WEBAUDIO_DEBUG
86-
console.log(`emscripten_resume_audio_context_async() callback: New audio state="${EmAudio[contextHandle].state}", ID=${state}`);
107+
dbg(`emscripten_resume_audio_context_async() callback: New audio state="${audio.state}", ID=${state}`);
87108
#endif
88109
{{{ makeDynCall('viip', 'callback') }}}(contextHandle, state, userData);
89110
}
90111
#if WEBAUDIO_DEBUG
91-
console.log(`emscripten_resume_audio_context_async() resuming...`);
112+
dbg('emscripten_resume_audio_context_async() resuming...');
92113
#endif
93-
EmAudio[contextHandle].resume().then(() => { cb(1/*running*/) }).catch(() => { cb(0/*suspended*/) });
114+
audio.resume().then(() => { cb(1/*running*/) }).catch(() => { cb(0/*suspended*/) });
94115
},
95116

96117
emscripten_resume_audio_context_sync: (contextHandle) => {
97-
#if ASSERTIONS
98-
assert(EmAudio[contextHandle], `Called emscripten_resume_audio_context_sync() on a nonexisting context handle ${contextHandle}`);
99-
assert(EmAudio[contextHandle] instanceof (window.AudioContext || window.webkitAudioContext), `Called emscripten_resume_audio_context_sync() on a context handle ${contextHandle} that is not an AudioContext, but of type ${typeof EmAudio[contextHandle]}`);
100-
#endif
101-
#if WEBAUDIO_DEBUG
102-
console.log(`AudioContext.resume() on WebAudio context with ID ${contextHandle}`);
118+
#if ASSERTIONS || WEBAUDIO_DEBUG
119+
emAudioCheckHandle(contextHandle, 'emscripten_resume_audio_context_sync');
103120
#endif
104-
EmAudio[contextHandle].resume();
121+
emAudio.get(contextHandle).resume();
105122
},
106123

107124
emscripten_audio_context_state: (contextHandle) => {
108-
#if ASSERTIONS
109-
assert(EmAudio[contextHandle], `Called emscripten_audio_context_state() on a nonexisting context handle ${contextHandle}`);
110-
assert(EmAudio[contextHandle] instanceof (window.AudioContext || window.webkitAudioContext), `Called emscripten_audio_context_state() on a context handle ${contextHandle} that is not an AudioContext, but of type ${typeof EmAudio[contextHandle]}`);
125+
#if ASSERTIONS || WEBAUDIO_DEBUG
126+
emAudioCheckHandle(contextHandle, 'emscripten_audio_context_state');
111127
#endif
112-
return ['suspended', 'running', 'closed', 'interrupted'].indexOf(EmAudio[contextHandle].state);
128+
return ['suspended', 'running', 'closed', 'interrupted'].indexOf(emAudio.get(contextHandle).state);
113129
},
114130

115131
emscripten_destroy_audio_context: (contextHandle) => {
116-
#if ASSERTIONS
117-
assert(EmAudio[contextHandle], `Called emscripten_destroy_audio_context() on an already freed context handle ${contextHandle}`);
118-
assert(EmAudio[contextHandle] instanceof (window.AudioContext || window.webkitAudioContext), `Called emscripten_destroy_audio_context() on a context handle ${contextHandle} that is not an AudioContext, but of type ${typeof EmAudio[contextHandle]}`);
119-
#endif
120-
#if WEBAUDIO_DEBUG
121-
console.log(`Destroyed WebAudio context with ID ${contextHandle}`);
132+
#if ASSERTIONS || WEBAUDIO_DEBUG
133+
emAudioCheckHandle(contextHandle, 'emscripten_destroy_audio_context');
122134
#endif
123-
EmAudio[contextHandle].suspend();
124-
delete EmAudio[contextHandle];
135+
emAudio.get(contextHandle).suspend();
136+
emAudio.free(contextHandle);
125137
},
126138

127139
emscripten_destroy_web_audio_node: (objectHandle) => {
128-
#if ASSERTIONS
129-
assert(EmAudio[objectHandle], `Called emscripten_destroy_web_audio_node() on a nonexisting/already freed object handle ${objectHandle}`);
130-
assert(EmAudio[objectHandle].disconnect, `Called emscripten_destroy_web_audio_node() on a handle ${objectHandle} that is not an Web Audio Node, but of type ${typeof EmAudio[objectHandle]}`);
131-
#endif
132-
#if WEBAUDIO_DEBUG
133-
console.log(`Destroyed Web Audio Node with ID ${objectHandle}`);
140+
#if ASSERTIONS || WEBAUDIO_DEBUG
141+
emAudioCheckHandle(objectHandle, 'emscripten_destroy_web_audio_node', true);
134142
#endif
135143
// Explicitly disconnect the node from Web Audio graph before letting it GC,
136144
// to work around browser bugs such as https://webkit.org/b/222098#c23
137-
EmAudio[objectHandle].disconnect();
138-
delete EmAudio[objectHandle];
145+
emAduio.get(objectHandle).disconnect();
146+
emAudio.free(objectHandle);
139147
},
140148

141149
#if AUDIO_WORKLET
142150
// emscripten_start_wasm_audio_worklet_thread_async() doesn't use stackAlloc,
143151
// etc., but the created worklet does.
144152
emscripten_start_wasm_audio_worklet_thread_async__deps: [
145153
'$_wasmWorkersID',
146-
'$_EmAudioDispatchProcessorCallback',
154+
'$_emAudioDispatchProcessorCallback',
147155
'$stackAlloc', '$stackRestore', '$stackSave'],
148156
emscripten_start_wasm_audio_worklet_thread_async: (contextHandle, stackLowestAddress, stackSize, callback, userData) => {
149157

150-
#if ASSERTIONS
151-
assert(contextHandle, `Called emscripten_start_wasm_audio_worklet_thread_async() with a null Web Audio Context handle!`);
152-
assert(EmAudio[contextHandle], `Called emscripten_start_wasm_audio_worklet_thread_async() with a nonexisting/already freed Web Audio Context handle ${contextHandle}!`);
153-
assert(EmAudio[contextHandle] instanceof (window.AudioContext || window.webkitAudioContext), `Called emscripten_start_wasm_audio_worklet_thread_async() on a context handle ${contextHandle} that is not an AudioContext, but of type ${typeof EmAudio[contextHandle]}`);
158+
#if ASSERTIONS || WEBAUDIO_DEBUG
159+
emAudioCheckHandle(contextHandle, 'emscripten_start_wasm_audio_worklet_thread_async');
154160
#endif
155161

156-
var audioContext = EmAudio[contextHandle];
162+
var audioContext = emAudio.get(contextHandle);
157163
var audioWorklet = audioContext.audioWorklet;
158164

159165
#if ASSERTIONS
@@ -166,12 +172,12 @@ var LibraryWebAudio = {
166172
#endif
167173

168174
#if WEBAUDIO_DEBUG
169-
console.log(`emscripten_start_wasm_audio_worklet_thread_async() adding audioworklet.js...`);
175+
dbg(`emscripten_start_wasm_audio_worklet_thread_async() adding audioworklet.js...`);
170176
#endif
171177

172178
var audioWorkletCreationFailed = () => {
173179
#if ASSERTIONS || WEBAUDIO_DEBUG
174-
console.error(`emscripten_start_wasm_audio_worklet_thread_async() addModule() failed!`);
180+
dbg(`emscripten_start_wasm_audio_worklet_thread_async() addModule() failed!`);
175181
#endif
176182
{{{ makeDynCall('viip', 'callback') }}}(contextHandle, 0/*EM_FALSE*/, userData);
177183
};
@@ -190,7 +196,7 @@ var LibraryWebAudio = {
190196

191197
audioWorklet.addModule({{{ wasmWorkerJs }}}).then(() => {
192198
#if WEBAUDIO_DEBUG
193-
console.log(`emscripten_start_wasm_audio_worklet_thread_async() addModule() completed`);
199+
dbg(`emscripten_start_wasm_audio_worklet_thread_async() addModule() completed`);
194200
#endif
195201

196202
#if MIN_FIREFOX_VERSION < 138 || MIN_CHROME_VERSION != TARGET_NOT_SUPPORTED || MIN_SAFARI_VERSION != TARGET_NOT_SUPPORTED
@@ -234,13 +240,13 @@ var LibraryWebAudio = {
234240
stackLowestAddress, // sb = stack base
235241
stackSize, // sz = stack size
236242
});
237-
audioWorklet['port'].onmessage = _EmAudioDispatchProcessorCallback;
243+
audioWorklet['port'].onmessage = _emAudioDispatchProcessorCallback;
238244
{{{ makeDynCall('viip', 'callback') }}}(contextHandle, 1/*EM_TRUE*/, userData);
239245
}).catch(audioWorkletCreationFailed);
240246
},
241247

242-
$_EmAudioDispatchProcessorCallback__deps: ['$getWasmTableEntry'],
243-
$_EmAudioDispatchProcessorCallback: (e) => {
248+
$_emAudioDispatchProcessorCallback__deps: ['$getWasmTableEntry'],
249+
$_emAudioDispatchProcessorCallback: (e) => {
244250
var data = e.data;
245251
// '_wsc' is short for 'wasm call', trying to use an identifier name that
246252
// will never conflict with user code. This is used to call both the 3-param
@@ -250,10 +256,8 @@ var LibraryWebAudio = {
250256
},
251257

252258
emscripten_create_wasm_audio_worklet_processor_async: (contextHandle, options, callback, userData) => {
253-
#if ASSERTIONS
254-
assert(contextHandle, `Called emscripten_create_wasm_audio_worklet_processor_async() with a null Web Audio Context handle!`);
255-
assert(EmAudio[contextHandle], `Called emscripten_create_wasm_audio_worklet_processor_async() with a nonexisting/already freed Web Audio Context handle ${contextHandle}!`);
256-
assert(EmAudio[contextHandle] instanceof (window.AudioContext || window.webkitAudioContext), `Called emscripten_create_wasm_audio_worklet_processor_async() on a context handle ${contextHandle} that is not an AudioContext, but of type ${typeof EmAudio[contextHandle]}`);
259+
#if ASSERTIONS || WEBAUDIO_DEBUG
260+
emAudioCheckHandle(contextHandle, 'emscripten_create_wasm_audio_worklet_processor_async');
257261
#endif
258262

259263
var processorName = UTF8ToString({{{ makeGetValue('options', C_STRUCTS.WebAudioWorkletProcessorCreateOptions.name, '*') }}});
@@ -282,7 +286,7 @@ var LibraryWebAudio = {
282286
console.log(`emscripten_create_wasm_audio_worklet_processor_async() creating a new AudioWorklet processor with name ${processorName}`);
283287
#endif
284288

285-
EmAudio[contextHandle].audioWorklet['port'].postMessage({
289+
emAudio.get(contextHandle).audioWorklet['port'].postMessage({
286290
// Deliberately mangled and short names used here ('_wpn', the 'Worklet
287291
// Processor Name' used as a 'key' to verify the message type so as to
288292
// not get accidentally mixed with user submitted messages, the remainder
@@ -299,10 +303,8 @@ var LibraryWebAudio = {
299303

300304
emscripten_create_wasm_audio_worklet_node__deps: ['$emscriptenGetContextQuantumSize'],
301305
emscripten_create_wasm_audio_worklet_node: (contextHandle, name, options, callback, userData) => {
302-
#if ASSERTIONS
303-
assert(contextHandle, `Called emscripten_create_wasm_audio_worklet_node() with a null Web Audio Context handle!`);
304-
assert(EmAudio[contextHandle], `Called emscripten_create_wasm_audio_worklet_node() with a nonexisting/already freed Web Audio Context handle ${contextHandle}!`);
305-
assert(EmAudio[contextHandle] instanceof (window.AudioContext || window.webkitAudioContext), `Called emscripten_create_wasm_audio_worklet_node() on a context handle ${contextHandle} that is not an AudioContext, but of type ${typeof EmAudio[contextHandle]}`);
306+
#if ASSERTIONS || WEBAUDIO_DEBUG
307+
emAudioCheckHandle(contextHandle, 'emscripten_create_wasm_audio_worklet_node');
306308
#endif
307309

308310
function readChannelCountArray(heapIndex, numOutputs) {
@@ -329,53 +331,49 @@ var LibraryWebAudio = {
329331
} : undefined;
330332

331333
#if WEBAUDIO_DEBUG
332-
console.log(`Creating AudioWorkletNode "${UTF8ToString(name)}" on context=${contextHandle} with options:`);
334+
dbg(`Creating AudioWorkletNode "${UTF8ToString(name)}" on context=${contextHandle} with options:`);
333335
console.dir(opts);
334336
#endif
335-
return emscriptenRegisterAudioObject(new AudioWorkletNode(EmAudio[contextHandle], UTF8ToString(name), opts));
337+
return emscriptenRegisterAudioObject(new AudioWorkletNode(emAudio.get(contextHandle), UTF8ToString(name), opts));
336338
},
337339
#endif // ~AUDIO_WORKLET
338340

339341
emscripten_audio_context_quantum_size__deps: ['$emscriptenGetContextQuantumSize'],
340342
emscripten_audio_context_quantum_size: (contextHandle) => {
341-
#if ASSERTIONS
342-
assert(EmAudio[contextHandle], `Called emscripten_audio_context_quantum_size() with an invalid Web Audio Context handle ${contextHandle}`);
343-
assert(EmAudio[contextHandle] instanceof (window.AudioContext || window.webkitAudioContext), `Called emscripten_audio_context_quantum_size() on handle ${contextHandle} that is not an AudioContext, but of type ${EmAudio[contextHandle]}`);
343+
#if ASSERTIONS || WEBAUDIO_DEBUG
344+
emAudioCheckHandle(contextHandle, 'emscripten_audio_context_quantum_size')
344345
#endif
345346
return emscriptenGetContextQuantumSize(contextHandle);
346347
},
347348

348349
emscripten_audio_context_sample_rate: (contextHandle) => {
349-
#if ASSERTIONS
350-
assert(EmAudio[contextHandle], `Called emscripten_audio_context_sample_rate() with an invalid Web Audio Context handle ${contextHandle}`);
351-
assert(EmAudio[contextHandle] instanceof (window.AudioContext || window.webkitAudioContext), `Called emscripten_audio_context_sample_rate() on handle ${contextHandle} that is not an AudioContext, but of type ${EmAudio[contextHandle]}`);
350+
#if ASSERTIONS || WEBAUDIO_DEBUG
351+
emAudioCheckHandle(contextHandle, 'emscripten_audio_context_sample_rate');
352352
#endif
353-
return EmAudio[contextHandle]['sampleRate'];
353+
return emAudio.get(contextHandle)['sampleRate'];
354354
},
355355

356356
emscripten_audio_node_connect: (source, destination, outputIndex, inputIndex) => {
357-
var srcNode = EmAudio[source];
358-
var dstNode = EmAudio[destination];
359-
#if ASSERTIONS
360-
assert(srcNode, `Called emscripten_audio_node_connect() with an invalid AudioNode handle ${source}`);
361-
assert(srcNode instanceof window.AudioNode, `Called emscripten_audio_node_connect() on handle ${source} that is not an AudiotNode, but of type ${srcNode}`);
362-
assert(dstNode, `Called emscripten_audio_node_connect() with an invalid AudioNode handle ${destination}!`);
363-
assert(dstNode instanceof (window.AudioContext || window.webkitAudioContext) || dstNode instanceof window.AudioNode, `Called emscripten_audio_node_connect() on handle ${destination} that is not an AudioContext or AudioNode, but of type ${dstNode}`);
357+
#if ASSERTIONS || WEBAUDIO_DEBUG
358+
emAudioCheckHandle(source, 'emscripten_audio_node_connect', 1);
359+
emAudioCheckHandle(destination, 'emscripten_audio_node_connect', 2);
364360
#endif
361+
var srcNode = emAudio.get(source);
362+
var dstNode = emAudio.get(destination);
365363
#if WEBAUDIO_DEBUG
366-
console.log(`Connecting audio node ID ${source} to audio node ID ${destination} (${srcNode} to ${dstNode})`);
364+
dbg(`Connecting audio node ID ${source} to audio node ID ${destination} (${srcNode} to ${dstNode})`);
367365
#endif
368366
srcNode.connect(dstNode.destination || dstNode, outputIndex, inputIndex);
369367
},
370368

371369
emscripten_current_thread_is_audio_worklet: () => ENVIRONMENT_IS_AUDIO_WORKLET,
372370

373371
emscripten_audio_worklet_post_function_v: (audioContext, funcPtr) => {
374-
(audioContext ? EmAudio[audioContext].audioWorklet['port'] : port).postMessage({'_wsc': funcPtr, args: [] }); // "WaSm Call"
372+
(audioContext ? emAudio.get(audioContext).audioWorklet['port'] : port).postMessage({'_wsc': funcPtr, args: [] }); // "WaSm Call"
375373
},
376374

377375
$emscripten_audio_worklet_post_function_1: (audioContext, funcPtr, arg0) => {
378-
(audioContext ? EmAudio[audioContext].audioWorklet['port'] : port).postMessage({'_wsc': funcPtr, args: [arg0] }); // "WaSm Call"
376+
(audioContext ? emAudio.get(audioContext).audioWorklet['port'] : port).postMessage({'_wsc': funcPtr, args: [arg0] }); // "WaSm Call"
379377
},
380378

381379
emscripten_audio_worklet_post_function_vi__deps: ['$emscripten_audio_worklet_post_function_1'],
@@ -389,7 +387,7 @@ var LibraryWebAudio = {
389387
},
390388

391389
$emscripten_audio_worklet_post_function_2: (audioContext, funcPtr, arg0, arg1) => {
392-
(audioContext ? EmAudio[audioContext].audioWorklet['port'] : port).postMessage({'_wsc': funcPtr, args: [arg0, arg1] }); // "WaSm Call"
390+
(audioContext ? emAudio.get(audioContext).audioWorklet['port'] : port).postMessage({'_wsc': funcPtr, args: [arg0, arg1] }); // "WaSm Call"
393391
},
394392

395393
emscripten_audio_worklet_post_function_vii__deps: ['$emscripten_audio_worklet_post_function_2'],
@@ -403,7 +401,7 @@ var LibraryWebAudio = {
403401
},
404402

405403
$emscripten_audio_worklet_post_function_3: (audioContext, funcPtr, arg0, arg1, arg2) => {
406-
(audioContext ? EmAudio[audioContext].audioWorklet['port'] : port).postMessage({'_wsc': funcPtr, args: [arg0, arg1, arg2] }); // "WaSm Call"
404+
(audioContext ? emAudio.get(audioContext).audioWorklet['port'] : port).postMessage({'_wsc': funcPtr, args: [arg0, arg1, arg2] }); // "WaSm Call"
407405
},
408406
emscripten_audio_worklet_post_function_viii__deps: ['$emscripten_audio_worklet_post_function_3'],
409407
emscripten_audio_worklet_post_function_viii: (audioContext, funcPtr, arg0, arg1, arg2) => {
@@ -423,8 +421,13 @@ var LibraryWebAudio = {
423421
assert(UTF8ToString(sigPtr)[0] != 'v', 'Do NOT specify the return argument in the signature string for a call to emscripten_audio_worklet_post_function_sig(), just pass the function arguments.');
424422
assert(varargs);
425423
#endif
426-
(audioContext ? EmAudio[audioContext].audioWorklet['port'] : port).postMessage({'_wsc': funcPtr, args: readEmAsmArgs(sigPtr, varargs) });
424+
(audioContext ? emAudio.get(audioContext).audioWorklet['port'] : port).postMessage({'_wsc': funcPtr, args: readEmAsmArgs(sigPtr, varargs) });
427425
}
428426
};
429427

428+
autoAddDeps(LibraryWebAudio, '$emAudio');
429+
#if ASSERTIONS || WEBAUDIO_DEBUG
430+
autoAddDeps(LibraryWebAudio, '$emAudioCheckHandle');
431+
#endif
432+
430433
addToLibrary(LibraryWebAudio);

0 commit comments

Comments
 (0)