Skip to content
This repository was archived by the owner on May 13, 2024. It is now read-only.

Fix WebAudio init handling #289

Merged
merged 4 commits into from
Oct 11, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 1 addition & 10 deletions src/js/main.js
Original file line number Diff line number Diff line change
Expand Up @@ -6,16 +6,7 @@
* tree.
*/
'use strict';
/* exported addExplicitTest, addTest, audioContext */

// Global WebAudio context that can be shared by all tests.
// There is a very finite number of WebAudio contexts.
try {
window.AudioContext = window.AudioContext || window.webkitAudioContext;
var audioContext = new AudioContext();
} catch (e) {
console.log('Failed to instantiate an audio context, error: ' + e);
}
/* exported addExplicitTest, addTest */

var enumeratedTestSuites = [];
var enumeratedTestFilters = [];
Expand Down
22 changes: 13 additions & 9 deletions src/js/mictest.js
Original file line number Diff line number Diff line change
Expand Up @@ -44,16 +44,20 @@ function MicTest(test) {
for (var i = 0; i < this.inputChannelCount; ++i) {
this.collectedAudio[i] = [];
}
var AudioContext = window.AudioContext || window.webkitAudioContext;
this.audioContext = new AudioContext();
}

MicTest.prototype = {
run: function() {
if (typeof audioContext === 'undefined') {
this.test.reportError('WebAudio is not supported, test cannot run.');
// Resuming as per new spec after user interaction.
this.audioContext.resume().then(function() {
doGetUserMedia(this.constraints, this.gotStream.bind(this))
}.bind(this))
.catch(function(error) {
this.test.reportError('WebAudio run failure: ' + error);
this.test.done();
} else {
doGetUserMedia(this.constraints, this.gotStream.bind(this));
}
}.bind(this));
},

gotStream: function(stream) {
Expand All @@ -77,11 +81,11 @@ MicTest.prototype = {
},

createAudioBuffer: function() {
this.audioSource = audioContext.createMediaStreamSource(this.stream);
this.scriptNode = audioContext.createScriptProcessor(this.bufferSize,
this.audioSource = this.audioContext.createMediaStreamSource(this.stream);
this.scriptNode = this.audioContext.createScriptProcessor(this.bufferSize,
this.inputChannelCount, this.outputChannelCount);
this.audioSource.connect(this.scriptNode);
this.scriptNode.connect(audioContext.destination);
this.scriptNode.connect(this.audioContext.destination);
this.scriptNode.onaudioprocess = this.collectAudio.bind(this);
this.stopCollectingAudio = setTimeoutWithProgressBar(
this.onStopCollectingAudio.bind(this), 5000);
Expand Down Expand Up @@ -124,7 +128,7 @@ MicTest.prototype = {
onStopCollectingAudio: function() {
this.stream.getAudioTracks()[0].stop();
this.audioSource.disconnect(this.scriptNode);
this.scriptNode.disconnect(audioContext.destination);
this.scriptNode.disconnect(this.audioContext.destination);
this.analyzeAudio(this.collectedAudio);
this.test.done();
},
Expand Down
2 changes: 1 addition & 1 deletion src/ui/testrtc-main.html
Original file line number Diff line number Diff line change
Expand Up @@ -194,7 +194,7 @@ <h3>STUN</h3>
onFail.apply(this, arguments);
} else {
reportFatal('Failed to get access to local media due to ' +
'error: ' + error.name);
'error: ' + error);
}
});
} catch (e) {
Expand Down
5 changes: 5 additions & 0 deletions test/sanity-test.js
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,11 @@ test('Run TestRTC', function(t) {
t.pass('Page loaded');
})
.then(function() {
// Have to wait for the getUserMedia polymer element has been shown before clicking on the start button.
driver.wait(webdriver.until.elementIsVisible(driver.findElement(
webdriver.By.id('dialog'))))
driver.wait(webdriver.until.elementIsNotVisible(driver.findElement(
webdriver.By.id('dialog'))))
return driver.wait(webdriver.until.elementLocated(
webdriver.By.css('#startButton')), 10000,
'Failed to locate startButton');
Expand Down