1
0
mirror of https://github.com/psychopy/psychojs.git synced 2025-05-12 08:38:10 +00:00

sound: enforce formatting rules

This commit is contained in:
Sotiri Bakagiannis 2021-07-09 14:07:55 +01:00
parent 57a590c536
commit 5468898716
8 changed files with 325 additions and 409 deletions

View File

@ -7,11 +7,10 @@
* @license Distributed under the terms of the MIT License * @license Distributed under the terms of the MIT License
*/ */
import {PsychObject} from '../util/PsychObject.js'; import { PsychoJS } from "../core/PsychoJS.js";
import {PsychoJS} from '../core/PsychoJS.js'; import { ExperimentHandler } from "../data/ExperimentHandler.js";
import {ExperimentHandler} from '../data/ExperimentHandler.js'; import { PsychObject } from "../util/PsychObject.js";
import * as util from '../util/Util.js'; import * as util from "../util/Util.js";
/** /**
* <p>AudioClip encapsulates an audio recording.</p> * <p>AudioClip encapsulates an audio recording.</p>
@ -28,20 +27,19 @@ import * as util from '../util/Util.js';
*/ */
export class AudioClip extends PsychObject export class AudioClip extends PsychObject
{ {
constructor({ psychoJS, name, sampleRateHz, format, data, autoLog } = {})
constructor({psychoJS, name, sampleRateHz, format, data, autoLog} = {})
{ {
super(psychoJS); super(psychoJS);
this._addAttribute('name', name, 'audioclip'); this._addAttribute("name", name, "audioclip");
this._addAttribute('format', format); this._addAttribute("format", format);
this._addAttribute('sampleRateHz', sampleRateHz); this._addAttribute("sampleRateHz", sampleRateHz);
this._addAttribute('data', data); this._addAttribute("data", data);
this._addAttribute('autoLog', false, autoLog); this._addAttribute("autoLog", false, autoLog);
this._addAttribute('status', AudioClip.Status.CREATED); this._addAttribute("status", AudioClip.Status.CREATED);
// add a volume attribute, for playback: // add a volume attribute, for playback:
this._addAttribute('volume', 1.0); this._addAttribute("volume", 1.0);
if (this._autoLog) if (this._autoLog)
{ {
@ -52,7 +50,6 @@ export class AudioClip extends PsychObject
this._decodeAudio(); this._decodeAudio();
} }
/** /**
* Set the volume of the playback. * Set the volume of the playback.
* *
@ -66,7 +63,6 @@ export class AudioClip extends PsychObject
this._volume = volume; this._volume = volume;
} }
/** /**
* Start playing the audio clip. * Start playing the audio clip.
* *
@ -76,7 +72,7 @@ export class AudioClip extends PsychObject
*/ */
async startPlayback() async startPlayback()
{ {
this._psychoJS.logger.debug('request to play the audio clip'); this._psychoJS.logger.debug("request to play the audio clip");
// wait for the decoding to complete: // wait for the decoding to complete:
await this._decodeAudio(); await this._decodeAudio();
@ -103,7 +99,6 @@ export class AudioClip extends PsychObject
this._source.start(); this._source.start();
} }
/** /**
* Stop playing the audio clip. * Stop playing the audio clip.
* *
@ -120,7 +115,6 @@ export class AudioClip extends PsychObject
this._source.stop(); this._source.stop();
} }
/** /**
* Get the duration of the audio clip, in seconds. * Get the duration of the audio clip, in seconds.
* *
@ -137,7 +131,6 @@ export class AudioClip extends PsychObject
return this._audioBuffer.duration; return this._audioBuffer.duration;
} }
/** /**
* Upload the audio clip to the pavlovia server. * Upload the audio clip to the pavlovia server.
* *
@ -147,17 +140,18 @@ export class AudioClip extends PsychObject
*/ */
upload() upload()
{ {
this._psychoJS.logger.debug('request to upload the audio clip to pavlovia.org'); this._psychoJS.logger.debug("request to upload the audio clip to pavlovia.org");
// add a format-dependent audio extension to the name: // add a format-dependent audio extension to the name:
const filename = this._name + util.extensionFromMimeType(this._format); const filename = this._name + util.extensionFromMimeType(this._format);
// if the audio recording cannot be uploaded, e.g. the experiment is running locally, or // if the audio recording cannot be uploaded, e.g. the experiment is running locally, or
// if it is piloting mode, then we offer the audio clip as a file for download: // if it is piloting mode, then we offer the audio clip as a file for download:
if (this._psychoJS.getEnvironment() !== ExperimentHandler.Environment.SERVER || if (
this._psychoJS.config.experiment.status !== 'RUNNING' || this._psychoJS.getEnvironment() !== ExperimentHandler.Environment.SERVER
this._psychoJS._serverMsg.has('__pilotToken')) || this._psychoJS.config.experiment.status !== "RUNNING"
|| this._psychoJS._serverMsg.has("__pilotToken")
)
{ {
return this.download(filename); return this.download(filename);
} }
@ -166,8 +160,6 @@ export class AudioClip extends PsychObject
return this._psychoJS.serverManager.uploadAudio(this._data, filename); return this._psychoJS.serverManager.uploadAudio(this._data, filename);
} }
/** /**
* Offer the audio clip to the participant as a sound file to download. * Offer the audio clip to the participant as a sound file to download.
* *
@ -175,9 +167,9 @@ export class AudioClip extends PsychObject
* @function * @function
* @public * @public
*/ */
download(filename = 'audio.webm') download(filename = "audio.webm")
{ {
const anchor = document.createElement('a'); const anchor = document.createElement("a");
anchor.href = window.URL.createObjectURL(this._data); anchor.href = window.URL.createObjectURL(this._data);
anchor.download = filename; anchor.download = filename;
document.body.appendChild(anchor); document.body.appendChild(anchor);
@ -185,7 +177,6 @@ export class AudioClip extends PsychObject
document.body.removeChild(anchor); document.body.removeChild(anchor);
} }
/** /**
* Transcribe the audio clip. * Transcribe the audio clip.
* *
@ -196,10 +187,10 @@ export class AudioClip extends PsychObject
* @return {Promise<>} a promise resolving to the transcript and associated * @return {Promise<>} a promise resolving to the transcript and associated
* transcription confidence * transcription confidence
*/ */
async transcribe({engine, languageCode} = {}) async transcribe({ engine, languageCode } = {})
{ {
const response = { const response = {
origin: 'AudioClip.transcribe', origin: "AudioClip.transcribe",
context: `when transcribing audio clip: ${this._name}`, context: `when transcribing audio clip: ${this._name}`,
}; };
@ -215,11 +206,11 @@ export class AudioClip extends PsychObject
transcriptionKey = key.value; transcriptionKey = key.value;
} }
} }
if (typeof transcriptionKey === 'undefined') if (typeof transcriptionKey === "undefined")
{ {
throw { throw {
...response, ...response,
error: `missing key for engine: ${fullEngineName}` error: `missing key for engine: ${fullEngineName}`,
}; };
} }
@ -235,13 +226,11 @@ export class AudioClip extends PsychObject
{ {
throw { throw {
...response, ...response,
error: `unsupported speech-to-text engine: ${engine}` error: `unsupported speech-to-text engine: ${engine}`,
}; };
} }
} }
/** /**
* Transcribe the audio clip using the Google Cloud Speech-To-Text Engine. * Transcribe the audio clip using the Google Cloud Speech-To-Text Engine.
* *
@ -272,31 +261,31 @@ export class AudioClip extends PsychObject
// query the Google speech-to-text service: // query the Google speech-to-text service:
const body = { const body = {
config: { config: {
encoding: 'LINEAR16', encoding: "LINEAR16",
sampleRateHertz: this._sampleRateHz, sampleRateHertz: this._sampleRateHz,
languageCode languageCode,
}, },
audio: { audio: {
content: base64Data content: base64Data,
}, },
}; };
const url = `https://speech.googleapis.com/v1/speech:recognize?key=${transcriptionKey}`; const url = `https://speech.googleapis.com/v1/speech:recognize?key=${transcriptionKey}`;
const response = await fetch(url, { const response = await fetch(url, {
method: 'POST', method: "POST",
headers: { headers: {
'Content-Type': 'application/json', "Content-Type": "application/json",
}, },
body: JSON.stringify(body) body: JSON.stringify(body),
}); });
// convert the response to json: // convert the response to json:
const decodedResponse = await response.json(); const decodedResponse = await response.json();
this._psychoJS.logger.debug('speech.googleapis.com response:', JSON.stringify(decodedResponse)); this._psychoJS.logger.debug("speech.googleapis.com response:", JSON.stringify(decodedResponse));
// TODO deal with more than one results and/or alternatives // TODO deal with more than one results and/or alternatives
if (('results' in decodedResponse) && (decodedResponse.results.length > 0)) if (("results" in decodedResponse) && (decodedResponse.results.length > 0))
{ {
resolve(decodedResponse.results[0].alternatives[0]); resolve(decodedResponse.results[0].alternatives[0]);
} }
@ -304,21 +293,20 @@ export class AudioClip extends PsychObject
{ {
// no transcription available: // no transcription available:
resolve({ resolve({
transcript: '', transcript: "",
confidence: -1 confidence: -1,
}); });
} }
}); });
} }
/** /**
* Decode the formatted audio data (e.g. webm) into a 32bit float PCM audio buffer. * Decode the formatted audio data (e.g. webm) into a 32bit float PCM audio buffer.
* *
*/ */
_decodeAudio() _decodeAudio()
{ {
this._psychoJS.logger.debug('request to decode the data of the audio clip'); this._psychoJS.logger.debug("request to decode the data of the audio clip");
// if the audio clip is ready, the PCM audio data is available in _audioData, a Float32Array: // if the audio clip is ready, the PCM audio data is available in _audioData, a Float32Array:
if (this._status === AudioClip.Status.READY) if (this._status === AudioClip.Status.READY)
@ -326,12 +314,11 @@ export class AudioClip extends PsychObject
return; return;
} }
// if we are already decoding, wait until the process completed: // if we are already decoding, wait until the process completed:
if (this._status === AudioClip.Status.DECODING) if (this._status === AudioClip.Status.DECODING)
{ {
const self = this; const self = this;
return new Promise(function (resolve, reject) return new Promise(function(resolve, reject)
{ {
self._decodingCallbacks.push(resolve); self._decodingCallbacks.push(resolve);
@ -339,7 +326,6 @@ export class AudioClip extends PsychObject
}.bind(this)); }.bind(this));
} }
// otherwise, start decoding the input formatted audio data: // otherwise, start decoding the input formatted audio data:
this._status = AudioClip.Status.DECODING; this._status = AudioClip.Status.DECODING;
this._audioData = null; this._audioData = null;
@ -348,7 +334,7 @@ export class AudioClip extends PsychObject
this._decodingCallbacks = []; this._decodingCallbacks = [];
this._audioContext = new (window.AudioContext || window.webkitAudioContext)({ this._audioContext = new (window.AudioContext || window.webkitAudioContext)({
sampleRate: this._sampleRateHz sampleRate: this._sampleRateHz,
}); });
const reader = new window.FileReader(); const reader = new window.FileReader();
@ -383,12 +369,11 @@ export class AudioClip extends PsychObject
reader.onerror = (error) => reader.onerror = (error) =>
{ {
// TODO // TODO
} };
reader.readAsArrayBuffer(this._data); reader.readAsArrayBuffer(this._data);
} }
/** /**
* Convert an array buffer to a base64 string. * Convert an array buffer to a base64 string.
* *
@ -403,63 +388,65 @@ export class AudioClip extends PsychObject
*/ */
_base64ArrayBuffer(arrayBuffer) _base64ArrayBuffer(arrayBuffer)
{ {
let base64 = ''; let base64 = "";
const encodings = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'; const encodings = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
const bytes = new Uint8Array(arrayBuffer); const bytes = new Uint8Array(arrayBuffer);
const byteLength = bytes.byteLength; const byteLength = bytes.byteLength;
const byteRemainder = byteLength % 3; const byteRemainder = byteLength % 3;
const mainLength = byteLength - byteRemainder; const mainLength = byteLength - byteRemainder;
let a; let a;
let b; let b;
let c; let c;
let d; let d;
let chunk; let chunk;
// Main loop deals with bytes in chunks of 3 // Main loop deals with bytes in chunks of 3
for (let i = 0; i < mainLength; i += 3) { for (let i = 0; i < mainLength; i += 3)
// Combine the three bytes into a single integer {
chunk = (bytes[i] << 16) | (bytes[i + 1] << 8) | bytes[i + 2]; // Combine the three bytes into a single integer
chunk = (bytes[i] << 16) | (bytes[i + 1] << 8) | bytes[i + 2];
// Use bitmasks to extract 6-bit segments from the triplet // Use bitmasks to extract 6-bit segments from the triplet
a = (chunk & 16515072) >> 18; // 16515072 = (2^6 - 1) << 18 a = (chunk & 16515072) >> 18; // 16515072 = (2^6 - 1) << 18
b = (chunk & 258048) >> 12; // 258048 = (2^6 - 1) << 12 b = (chunk & 258048) >> 12; // 258048 = (2^6 - 1) << 12
c = (chunk & 4032) >> 6; // 4032 = (2^6 - 1) << 6 c = (chunk & 4032) >> 6; // 4032 = (2^6 - 1) << 6
d = chunk & 63; // 63 = 2^6 - 1 d = chunk & 63; // 63 = 2^6 - 1
// Convert the raw binary segments to the appropriate ASCII encoding // Convert the raw binary segments to the appropriate ASCII encoding
base64 += encodings[a] + encodings[b] + encodings[c] + encodings[d]; base64 += encodings[a] + encodings[b] + encodings[c] + encodings[d];
}
// Deal with the remaining bytes and padding
if (byteRemainder === 1)
{
chunk = bytes[mainLength];
a = (chunk & 252) >> 2; // 252 = (2^6 - 1) << 2
// Set the 4 least significant bits to zero
b = (chunk & 3) << 4; // 3 = 2^2 - 1
base64 += `${encodings[a]}${encodings[b]}==`;
}
else if (byteRemainder === 2)
{
chunk = (bytes[mainLength] << 8) | bytes[mainLength + 1];
a = (chunk & 64512) >> 10; // 64512 = (2^6 - 1) << 10
b = (chunk & 1008) >> 4; // 1008 = (2^6 - 1) << 4
// Set the 2 least significant bits to zero
c = (chunk & 15) << 2; // 15 = 2^4 - 1
base64 += `${encodings[a]}${encodings[b]}${encodings[c]}=`;
}
return base64;
} }
// Deal with the remaining bytes and padding
if (byteRemainder === 1) {
chunk = bytes[mainLength];
a = (chunk & 252) >> 2; // 252 = (2^6 - 1) << 2
// Set the 4 least significant bits to zero
b = (chunk & 3) << 4; // 3 = 2^2 - 1
base64 += `${encodings[a]}${encodings[b]}==`;
} else if (byteRemainder === 2) {
chunk = (bytes[mainLength] << 8) | bytes[mainLength + 1];
a = (chunk & 64512) >> 10; // 64512 = (2^6 - 1) << 10
b = (chunk & 1008) >> 4; // 1008 = (2^6 - 1) << 4
// Set the 2 least significant bits to zero
c = (chunk & 15) << 2; // 15 = 2^4 - 1
base64 += `${encodings[a]}${encodings[b]}${encodings[c]}=`;
}
return base64;
} }
}
/** /**
* Recognition engines. * Recognition engines.
* *
@ -472,10 +459,9 @@ AudioClip.Engine = {
/** /**
* Google Cloud Speech-to-Text. * Google Cloud Speech-to-Text.
*/ */
GOOGLE: Symbol.for('GOOGLE') GOOGLE: Symbol.for("GOOGLE"),
}; };
/** /**
* AudioClip status. * AudioClip status.
* *
@ -484,9 +470,9 @@ AudioClip.Engine = {
* @public * @public
*/ */
AudioClip.Status = { AudioClip.Status = {
CREATED: Symbol.for('CREATED'), CREATED: Symbol.for("CREATED"),
DECODING: Symbol.for('DECODING'), DECODING: Symbol.for("DECODING"),
READY: Symbol.for('READY') READY: Symbol.for("READY"),
}; };

View File

@ -7,9 +7,8 @@
* @license Distributed under the terms of the MIT License * @license Distributed under the terms of the MIT License
*/ */
import {SoundPlayer} from './SoundPlayer.js'; import { AudioClip } from "./AudioClip.js";
import {AudioClip} from "./AudioClip.js"; import { SoundPlayer } from "./SoundPlayer.js";
/** /**
* <p>This class handles the playback of an audio clip, e.g. a microphone recording.</p> * <p>This class handles the playback of an audio clip, e.g. a microphone recording.</p>
@ -29,28 +28,27 @@ import {AudioClip} from "./AudioClip.js";
export class AudioClipPlayer extends SoundPlayer export class AudioClipPlayer extends SoundPlayer
{ {
constructor({ constructor({
psychoJS, psychoJS,
audioClip, audioClip,
startTime = 0, startTime = 0,
stopTime = -1, stopTime = -1,
stereo = true, stereo = true,
volume = 0, volume = 0,
loops = 0 loops = 0,
} = {}) } = {})
{ {
super(psychoJS); super(psychoJS);
this._addAttribute('audioClip', audioClip); this._addAttribute("audioClip", audioClip);
this._addAttribute('startTime', startTime); this._addAttribute("startTime", startTime);
this._addAttribute('stopTime', stopTime); this._addAttribute("stopTime", stopTime);
this._addAttribute('stereo', stereo); this._addAttribute("stereo", stereo);
this._addAttribute('loops', loops); this._addAttribute("loops", loops);
this._addAttribute('volume', volume); this._addAttribute("volume", volume);
this._currentLoopIndex = -1; this._currentLoopIndex = -1;
} }
/** /**
* Determine whether this player can play the given sound. * Determine whether this player can play the given sound.
* *
@ -73,7 +71,7 @@ export class AudioClipPlayer extends SoundPlayer
stopTime: sound.stopTime, stopTime: sound.stopTime,
stereo: sound.stereo, stereo: sound.stereo,
loops: sound.loops, loops: sound.loops,
volume: sound.volume volume: sound.volume,
}); });
return player; return player;
} }
@ -82,7 +80,6 @@ export class AudioClipPlayer extends SoundPlayer
return undefined; return undefined;
} }
/** /**
* Get the duration of the AudioClip, in seconds. * Get the duration of the AudioClip, in seconds.
* *
@ -96,7 +93,6 @@ export class AudioClipPlayer extends SoundPlayer
return this._audioClip.getDuration(); return this._audioClip.getDuration();
} }
/** /**
* Set the duration of the audio clip. * Set the duration of the audio clip.
* *
@ -110,13 +106,12 @@ export class AudioClipPlayer extends SoundPlayer
// TODO // TODO
throw { throw {
origin: 'AudioClipPlayer.setDuration', origin: "AudioClipPlayer.setDuration",
context: 'when setting the duration of the playback for audio clip player: ' + this._name, context: "when setting the duration of the playback for audio clip player: " + this._name,
error: 'not implemented yet' error: "not implemented yet",
}; };
} }
/** /**
* Set the volume of the playback. * Set the volume of the playback.
* *
@ -133,7 +128,6 @@ export class AudioClipPlayer extends SoundPlayer
this._audioClip.setVolume((mute) ? 0.0 : volume); this._audioClip.setVolume((mute) ? 0.0 : volume);
} }
/** /**
* Set the number of loops. * Set the number of loops.
* *
@ -150,7 +144,6 @@ export class AudioClipPlayer extends SoundPlayer
// TODO // TODO
} }
/** /**
* Start playing the sound. * Start playing the sound.
* *
@ -162,7 +155,7 @@ export class AudioClipPlayer extends SoundPlayer
*/ */
play(loops, fadeDuration = 17) play(loops, fadeDuration = 17)
{ {
if (typeof loops !== 'undefined') if (typeof loops !== "undefined")
{ {
this.setLoops(loops); this.setLoops(loops);
} }
@ -176,7 +169,6 @@ export class AudioClipPlayer extends SoundPlayer
this._audioClip.startPlayback(); this._audioClip.startPlayback();
} }
/** /**
* Stop playing the sound immediately. * Stop playing the sound immediately.
* *
@ -189,5 +181,4 @@ export class AudioClipPlayer extends SoundPlayer
{ {
this._audioClip.stopPlayback(fadeDuration); this._audioClip.stopPlayback(fadeDuration);
} }
} }

View File

@ -7,12 +7,12 @@
* @license Distributed under the terms of the MIT License * @license Distributed under the terms of the MIT License
*/ */
import {Clock} from "../util/Clock.js"; import { PsychoJS } from "../core/PsychoJS.js";
import {PsychObject} from "../util/PsychObject.js"; import { ExperimentHandler } from "../data/ExperimentHandler.js";
import {PsychoJS} from "../core/PsychoJS.js"; import { Clock } from "../util/Clock.js";
import * as util from '../util/Util.js'; import { PsychObject } from "../util/PsychObject.js";
import {ExperimentHandler} from "../data/ExperimentHandler.js"; import * as util from "../util/Util.js";
import {AudioClip} from "./AudioClip.js"; import { AudioClip } from "./AudioClip.js";
/** /**
* <p>This manager handles the recording of audio signal.</p> * <p>This manager handles the recording of audio signal.</p>
@ -29,18 +29,17 @@ import {AudioClip} from "./AudioClip.js";
*/ */
export class Microphone extends PsychObject export class Microphone extends PsychObject
{ {
constructor({ win, name, format, sampleRateHz, clock, autoLog } = {})
constructor({win, name, format, sampleRateHz, clock, autoLog} = {})
{ {
super(win._psychoJS); super(win._psychoJS);
this._addAttribute('win', win, undefined); this._addAttribute("win", win, undefined);
this._addAttribute('name', name, 'microphone'); this._addAttribute("name", name, "microphone");
this._addAttribute('format', format, 'audio/webm;codecs=opus', this._onChange); this._addAttribute("format", format, "audio/webm;codecs=opus", this._onChange);
this._addAttribute('sampleRateHz', sampleRateHz, 48000, this._onChange); this._addAttribute("sampleRateHz", sampleRateHz, 48000, this._onChange);
this._addAttribute('clock', clock, new Clock()); this._addAttribute("clock", clock, new Clock());
this._addAttribute('autoLog', false, autoLog); this._addAttribute("autoLog", false, autoLog);
this._addAttribute('status', PsychoJS.Status.NOT_STARTED); this._addAttribute("status", PsychoJS.Status.NOT_STARTED);
// prepare the recording: // prepare the recording:
this._prepareRecording(); this._prepareRecording();
@ -51,7 +50,6 @@ export class Microphone extends PsychObject
} }
} }
/** /**
* Submit a request to start the recording. * Submit a request to start the recording.
* *
@ -68,19 +66,18 @@ export class Microphone extends PsychObject
// with a new recording: // with a new recording:
if (this._status === PsychoJS.Status.PAUSED) if (this._status === PsychoJS.Status.PAUSED)
{ {
return this.resume({clear: true}); return this.resume({ clear: true });
} }
if (this._status !== PsychoJS.Status.STARTED) if (this._status !== PsychoJS.Status.STARTED)
{ {
this._psychoJS.logger.debug('request to start audio recording'); this._psychoJS.logger.debug("request to start audio recording");
try try
{ {
if (!this._recorder) if (!this._recorder)
{ {
throw 'the recorder has not been created yet, possibly because the participant has not given the authorisation to record audio'; throw "the recorder has not been created yet, possibly because the participant has not given the authorisation to record audio";
} }
this._recorder.start(); this._recorder.start();
@ -96,21 +93,18 @@ export class Microphone extends PsychObject
} }
catch (error) catch (error)
{ {
this._psychoJS.logger.error('unable to start the audio recording: ' + JSON.stringify(error)); this._psychoJS.logger.error("unable to start the audio recording: " + JSON.stringify(error));
this._status = PsychoJS.Status.ERROR; this._status = PsychoJS.Status.ERROR;
throw { throw {
origin: 'Microphone.start', origin: "Microphone.start",
context: 'when starting the audio recording for microphone: ' + this._name, context: "when starting the audio recording for microphone: " + this._name,
error error,
}; };
} }
} }
} }
/** /**
* Submit a request to stop the recording. * Submit a request to stop the recording.
* *
@ -122,14 +116,14 @@ export class Microphone extends PsychObject
* @return {Promise} promise fulfilled when the recording actually stopped, and the recorded * @return {Promise} promise fulfilled when the recording actually stopped, and the recorded
* data was made available * data was made available
*/ */
stop({filename} = {}) stop({ filename } = {})
{ {
if (this._status === PsychoJS.Status.STARTED || this._status === PsychoJS.Status.PAUSED) if (this._status === PsychoJS.Status.STARTED || this._status === PsychoJS.Status.PAUSED)
{ {
this._psychoJS.logger.debug('request to stop audio recording'); this._psychoJS.logger.debug("request to stop audio recording");
this._stopOptions = { this._stopOptions = {
filename filename,
}; };
// note: calling the stop method of the MediaRecorder will first raise a dataavailable event, // note: calling the stop method of the MediaRecorder will first raise a dataavailable event,
@ -148,7 +142,6 @@ export class Microphone extends PsychObject
} }
} }
/** /**
* Submit a request to pause the recording. * Submit a request to pause the recording.
* *
@ -160,13 +153,13 @@ export class Microphone extends PsychObject
{ {
if (this._status === PsychoJS.Status.STARTED) if (this._status === PsychoJS.Status.STARTED)
{ {
this._psychoJS.logger.debug('request to pause audio recording'); this._psychoJS.logger.debug("request to pause audio recording");
try try
{ {
if (!this._recorder) if (!this._recorder)
{ {
throw 'the recorder has not been created yet, possibly because the participant has not given the authorisation to record audio'; throw "the recorder has not been created yet, possibly because the participant has not given the authorisation to record audio";
} }
// note: calling the pause method of the MediaRecorder raises a pause event // note: calling the pause method of the MediaRecorder raises a pause event
@ -182,20 +175,18 @@ export class Microphone extends PsychObject
} }
catch (error) catch (error)
{ {
self._psychoJS.logger.error('unable to pause the audio recording: ' + JSON.stringify(error)); self._psychoJS.logger.error("unable to pause the audio recording: " + JSON.stringify(error));
this._status = PsychoJS.Status.ERROR; this._status = PsychoJS.Status.ERROR;
throw { throw {
origin: 'Microphone.pause', origin: "Microphone.pause",
context: 'when pausing the audio recording for microphone: ' + this._name, context: "when pausing the audio recording for microphone: " + this._name,
error error,
}; };
} }
} }
} }
/** /**
* Submit a request to resume the recording. * Submit a request to resume the recording.
* *
@ -207,17 +198,17 @@ export class Microphone extends PsychObject
* resuming the recording * resuming the recording
* @return {Promise} promise fulfilled when the recording actually resumed * @return {Promise} promise fulfilled when the recording actually resumed
*/ */
resume({clear = false } = {}) resume({ clear = false } = {})
{ {
if (this._status === PsychoJS.Status.PAUSED) if (this._status === PsychoJS.Status.PAUSED)
{ {
this._psychoJS.logger.debug('request to resume audio recording'); this._psychoJS.logger.debug("request to resume audio recording");
try try
{ {
if (!this._recorder) if (!this._recorder)
{ {
throw 'the recorder has not been created yet, possibly because the participant has not given the authorisation to record audio'; throw "the recorder has not been created yet, possibly because the participant has not given the authorisation to record audio";
} }
// empty the audio buffer is needed: // empty the audio buffer is needed:
@ -239,20 +230,18 @@ export class Microphone extends PsychObject
} }
catch (error) catch (error)
{ {
self._psychoJS.logger.error('unable to resume the audio recording: ' + JSON.stringify(error)); self._psychoJS.logger.error("unable to resume the audio recording: " + JSON.stringify(error));
this._status = PsychoJS.Status.ERROR; this._status = PsychoJS.Status.ERROR;
throw { throw {
origin: 'Microphone.resume', origin: "Microphone.resume",
context: 'when resuming the audio recording for microphone: ' + this._name, context: "when resuming the audio recording for microphone: " + this._name,
error error,
}; };
} }
} }
} }
/** /**
* Submit a request to flush the recording. * Submit a request to flush the recording.
* *
@ -264,7 +253,7 @@ export class Microphone extends PsychObject
{ {
if (this._status === PsychoJS.Status.STARTED || this._status === PsychoJS.Status.PAUSED) if (this._status === PsychoJS.Status.STARTED || this._status === PsychoJS.Status.PAUSED)
{ {
this._psychoJS.logger.debug('request to flush audio recording'); this._psychoJS.logger.debug("request to flush audio recording");
// note: calling the requestData method of the MediaRecorder will raise a // note: calling the requestData method of the MediaRecorder will raise a
// dataavailable event // dataavailable event
@ -281,7 +270,6 @@ export class Microphone extends PsychObject
} }
} }
/** /**
* Offer the audio recording to the participant as a sound file to download. * Offer the audio recording to the participant as a sound file to download.
* *
@ -290,11 +278,11 @@ export class Microphone extends PsychObject
* @public * @public
* @param {string} filename the filename * @param {string} filename the filename
*/ */
download(filename = 'audio.webm') download(filename = "audio.webm")
{ {
const audioBlob = new Blob(this._audioBuffer); const audioBlob = new Blob(this._audioBuffer);
const anchor = document.createElement('a'); const anchor = document.createElement("a");
anchor.href = window.URL.createObjectURL(audioBlob); anchor.href = window.URL.createObjectURL(audioBlob);
anchor.download = filename; anchor.download = filename;
document.body.appendChild(anchor); document.body.appendChild(anchor);
@ -302,7 +290,6 @@ export class Microphone extends PsychObject
document.body.removeChild(anchor); document.body.removeChild(anchor);
} }
/** /**
* Upload the audio recording to the pavlovia server. * Upload the audio recording to the pavlovia server.
* *
@ -311,10 +298,10 @@ export class Microphone extends PsychObject
* @public * @public
* @param {string} tag an optional tag for the audio file * @param {string} tag an optional tag for the audio file
*/ */
async upload({tag} = {}) async upload({ tag } = {})
{ {
// default tag: the name of this Microphone object // default tag: the name of this Microphone object
if (typeof tag === 'undefined') if (typeof tag === "undefined")
{ {
tag = this._name; tag = this._name;
} }
@ -322,12 +309,13 @@ export class Microphone extends PsychObject
// add a format-dependent audio extension to the tag: // add a format-dependent audio extension to the tag:
tag += util.extensionFromMimeType(this._format); tag += util.extensionFromMimeType(this._format);
// if the audio recording cannot be uploaded, e.g. the experiment is running locally, or // if the audio recording cannot be uploaded, e.g. the experiment is running locally, or
// if it is piloting mode, then we offer the audio recording as a file for download: // if it is piloting mode, then we offer the audio recording as a file for download:
if (this._psychoJS.getEnvironment() !== ExperimentHandler.Environment.SERVER || if (
this._psychoJS.config.experiment.status !== 'RUNNING' || this._psychoJS.getEnvironment() !== ExperimentHandler.Environment.SERVER
this._psychoJS._serverMsg.has('__pilotToken')) || this._psychoJS.config.experiment.status !== "RUNNING"
|| this._psychoJS._serverMsg.has("__pilotToken")
)
{ {
return this.download(tag); return this.download(tag);
} }
@ -337,7 +325,6 @@ export class Microphone extends PsychObject
return this._psychoJS.serverManager.uploadAudio(audioBlob, tag); return this._psychoJS.serverManager.uploadAudio(audioBlob, tag);
} }
/** /**
* Get the current audio recording as an AudioClip in the given format. * Get the current audio recording as an AudioClip in the given format.
* *
@ -347,27 +334,25 @@ export class Microphone extends PsychObject
* @param {string} tag an optional tag for the audio clip * @param {string} tag an optional tag for the audio clip
* @param {boolean} [flush=false] whether or not to first flush the recording * @param {boolean} [flush=false] whether or not to first flush the recording
*/ */
async getRecording({tag, flush = false} = {}) async getRecording({ tag, flush = false } = {})
{ {
// default tag: the name of this Microphone object // default tag: the name of this Microphone object
if (typeof tag === 'undefined') if (typeof tag === "undefined")
{ {
tag = this._name; tag = this._name;
} }
const audioClip = new AudioClip({ const audioClip = new AudioClip({
psychoJS: this._psychoJS, psychoJS: this._psychoJS,
name: tag, name: tag,
format: this._format, format: this._format,
sampleRateHz: this._sampleRateHz, sampleRateHz: this._sampleRateHz,
data: new Blob(this._audioBuffer) data: new Blob(this._audioBuffer),
}); });
return audioClip; return audioClip;
} }
/** /**
* Callback for changes to the recording settings. * Callback for changes to the recording settings.
* *
@ -389,7 +374,6 @@ export class Microphone extends PsychObject
this.start(); this.start();
} }
/** /**
* Prepare the recording. * Prepare the recording.
* *
@ -409,15 +393,15 @@ export class Microphone extends PsychObject
advanced: [ advanced: [
{ {
channelCount: 1, channelCount: 1,
sampleRate: this._sampleRateHz sampleRate: this._sampleRateHz,
} },
] ],
} },
}); });
// check that the specified format is supported, use default if it is not: // check that the specified format is supported, use default if it is not:
let options; let options;
if (typeof this._format === 'string' && MediaRecorder.isTypeSupported(this._format)) if (typeof this._format === "string" && MediaRecorder.isTypeSupported(this._format))
{ {
options = { type: this._format }; options = { type: this._format };
} }
@ -428,7 +412,6 @@ export class Microphone extends PsychObject
this._recorder = new MediaRecorder(stream, options); this._recorder = new MediaRecorder(stream, options);
// setup the callbacks: // setup the callbacks:
const self = this; const self = this;
@ -440,7 +423,7 @@ export class Microphone extends PsychObject
self._audioBuffer.length = 0; self._audioBuffer.length = 0;
self._clock.reset(); self._clock.reset();
self._status = PsychoJS.Status.STARTED; self._status = PsychoJS.Status.STARTED;
self._psychoJS.logger.debug('audio recording started'); self._psychoJS.logger.debug("audio recording started");
// resolve the Microphone.start promise: // resolve the Microphone.start promise:
if (self._startCallback) if (self._startCallback)
@ -453,7 +436,7 @@ export class Microphone extends PsychObject
this._recorder.onpause = () => this._recorder.onpause = () =>
{ {
self._status = PsychoJS.Status.PAUSED; self._status = PsychoJS.Status.PAUSED;
self._psychoJS.logger.debug('audio recording paused'); self._psychoJS.logger.debug("audio recording paused");
// resolve the Microphone.pause promise: // resolve the Microphone.pause promise:
if (self._pauseCallback) if (self._pauseCallback)
@ -466,7 +449,7 @@ export class Microphone extends PsychObject
this._recorder.onresume = () => this._recorder.onresume = () =>
{ {
self._status = PsychoJS.Status.STARTED; self._status = PsychoJS.Status.STARTED;
self._psychoJS.logger.debug('audio recording resumed'); self._psychoJS.logger.debug("audio recording resumed");
// resolve the Microphone.resume promise: // resolve the Microphone.resume promise:
if (self._resumeCallback) if (self._resumeCallback)
@ -482,7 +465,7 @@ export class Microphone extends PsychObject
// add data to the buffer: // add data to the buffer:
self._audioBuffer.push(data); self._audioBuffer.push(data);
self._psychoJS.logger.debug('audio data added to the buffer'); self._psychoJS.logger.debug("audio data added to the buffer");
// resolve the data available promise, if needed: // resolve the data available promise, if needed:
if (self._dataAvailableCallback) if (self._dataAvailableCallback)
@ -494,7 +477,7 @@ export class Microphone extends PsychObject
// called upon Microphone.stop(), after data has been made available: // called upon Microphone.stop(), after data has been made available:
this._recorder.onstop = () => this._recorder.onstop = () =>
{ {
self._psychoJS.logger.debug('audio recording stopped'); self._psychoJS.logger.debug("audio recording stopped");
self._status = PsychoJS.Status.NOT_STARTED; self._status = PsychoJS.Status.NOT_STARTED;
// resolve the Microphone.stop promise: // resolve the Microphone.stop promise:
@ -506,7 +489,7 @@ export class Microphone extends PsychObject
// treat stop options if there are any: // treat stop options if there are any:
// download to a file, immediately offered to the participant: // download to a file, immediately offered to the participant:
if (typeof self._stopOptions.filename === 'string') if (typeof self._stopOptions.filename === "string")
{ {
self.download(self._stopOptions.filename); self.download(self._stopOptions.filename);
} }
@ -516,12 +499,8 @@ export class Microphone extends PsychObject
this._recorder.onerror = (event) => this._recorder.onerror = (event) =>
{ {
// TODO // TODO
self._psychoJS.logger.error('audio recording error: ' + JSON.stringify(event)); self._psychoJS.logger.error("audio recording error: " + JSON.stringify(event));
self._status = PsychoJS.Status.ERROR; self._status = PsychoJS.Status.ERROR;
}; };
} }
} }

View File

@ -8,12 +8,11 @@
* @license Distributed under the terms of the MIT License * @license Distributed under the terms of the MIT License
*/ */
import {PsychoJS} from '../core/PsychoJS.js'; import { PsychoJS } from "../core/PsychoJS.js";
import {PsychObject} from '../util/PsychObject.js'; import { PsychObject } from "../util/PsychObject.js";
import {TonePlayer} from './TonePlayer.js'; import { AudioClipPlayer } from "./AudioClipPlayer.js";
import {TrackPlayer} from './TrackPlayer.js'; import { TonePlayer } from "./TonePlayer.js";
import {AudioClipPlayer} from './AudioClipPlayer.js'; import { TrackPlayer } from "./TrackPlayer.js";
/** /**
* <p>This class handles sound playing (tones and tracks)</p> * <p>This class handles sound playing (tones and tracks)</p>
@ -54,35 +53,35 @@ import {AudioClipPlayer} from './AudioClipPlayer.js';
export class Sound extends PsychObject export class Sound extends PsychObject
{ {
constructor({ constructor({
name, name,
win, win,
value = 'C', value = "C",
octave = 4, octave = 4,
secs = 0.5, secs = 0.5,
startTime = 0, startTime = 0,
stopTime = -1, stopTime = -1,
stereo = true, stereo = true,
volume = 1.0, volume = 1.0,
loops = 0, loops = 0,
//hamming = true, // hamming = true,
autoLog = true autoLog = true,
} = {}) } = {})
{ {
super(win._psychoJS, name); super(win._psychoJS, name);
// the SoundPlayer, e.g. TonePlayer: // the SoundPlayer, e.g. TonePlayer:
this._player = undefined; this._player = undefined;
this._addAttribute('win', win); this._addAttribute("win", win);
this._addAttribute('value', value); this._addAttribute("value", value);
this._addAttribute('octave', octave); this._addAttribute("octave", octave);
this._addAttribute('secs', secs); this._addAttribute("secs", secs);
this._addAttribute('startTime', startTime); this._addAttribute("startTime", startTime);
this._addAttribute('stopTime', stopTime); this._addAttribute("stopTime", stopTime);
this._addAttribute('stereo', stereo); this._addAttribute("stereo", stereo);
this._addAttribute('volume', volume); this._addAttribute("volume", volume);
this._addAttribute('loops', loops); this._addAttribute("loops", loops);
this._addAttribute('autoLog', autoLog); this._addAttribute("autoLog", autoLog);
// identify an appropriate player: // identify an appropriate player:
this._getPlayer(); this._getPlayer();
@ -90,7 +89,6 @@ export class Sound extends PsychObject
this.status = PsychoJS.Status.NOT_STARTED; this.status = PsychoJS.Status.NOT_STARTED;
} }
/** /**
* Start playing the sound. * Start playing the sound.
* *
@ -107,7 +105,6 @@ export class Sound extends PsychObject
this._player.play(loops); this._player.play(loops);
} }
/** /**
* Stop playing the sound immediately. * Stop playing the sound immediately.
* *
@ -116,14 +113,13 @@ export class Sound extends PsychObject
* @param {boolean} [options.log= true] - whether or not to log * @param {boolean} [options.log= true] - whether or not to log
*/ */
stop({ stop({
log = true log = true,
} = {}) } = {})
{ {
this._player.stop(); this._player.stop();
this.status = PsychoJS.Status.STOPPED; this.status = PsychoJS.Status.STOPPED;
} }
/** /**
* Get the duration of the sound, in seconds. * Get the duration of the sound, in seconds.
* *
@ -135,7 +131,6 @@ export class Sound extends PsychObject
return this._player.getDuration(); return this._player.getDuration();
} }
/** /**
* Set the playing volume of the sound. * Set the playing volume of the sound.
* *
@ -146,15 +141,14 @@ export class Sound extends PsychObject
*/ */
setVolume(volume, mute = false, log = true) setVolume(volume, mute = false, log = true)
{ {
this._setAttribute('volume', volume, log); this._setAttribute("volume", volume, log);
if (typeof this._player !== 'undefined') if (typeof this._player !== "undefined")
{ {
this._player.setVolume(volume, mute); this._player.setVolume(volume, mute);
} }
} }
/** /**
* Set the sound value on demand past initialisation. * Set the sound value on demand past initialisation.
* *
@ -166,9 +160,9 @@ export class Sound extends PsychObject
{ {
if (sound instanceof Sound) if (sound instanceof Sound)
{ {
this._setAttribute('value', sound.value, log); this._setAttribute("value", sound.value, log);
if (typeof this._player !== 'undefined') if (typeof this._player !== "undefined")
{ {
this._player = this._player.constructor.accept(this); this._player = this._player.constructor.accept(this);
} }
@ -178,13 +172,12 @@ export class Sound extends PsychObject
} }
throw { throw {
origin: 'Sound.setSound', origin: "Sound.setSound",
context: 'when replacing the current sound', context: "when replacing the current sound",
error: 'invalid input, need an instance of the Sound class.' error: "invalid input, need an instance of the Sound class.",
}; };
} }
/** /**
* Set the number of loops. * Set the number of loops.
* *
@ -194,15 +187,14 @@ export class Sound extends PsychObject
*/ */
setLoops(loops = 0, log = true) setLoops(loops = 0, log = true)
{ {
this._setAttribute('loops', loops, log); this._setAttribute("loops", loops, log);
if (typeof this._player !== 'undefined') if (typeof this._player !== "undefined")
{ {
this._player.setLoops(loops); this._player.setLoops(loops);
} }
} }
/** /**
* Set the duration (in seconds) * Set the duration (in seconds)
* *
@ -212,15 +204,14 @@ export class Sound extends PsychObject
*/ */
setSecs(secs = 0.5, log = true) setSecs(secs = 0.5, log = true)
{ {
this._setAttribute('secs', secs, log); this._setAttribute("secs", secs, log);
if (typeof this._player !== 'undefined') if (typeof this._player !== "undefined")
{ {
this._player.setDuration(secs); this._player.setDuration(secs);
} }
} }
/** /**
* Identify the appropriate player for the sound. * Identify the appropriate player for the sound.
* *
@ -231,26 +222,24 @@ export class Sound extends PsychObject
_getPlayer() _getPlayer()
{ {
const acceptFns = [ const acceptFns = [
sound => TonePlayer.accept(sound), (sound) => TonePlayer.accept(sound),
sound => TrackPlayer.accept(sound), (sound) => TrackPlayer.accept(sound),
sound => AudioClipPlayer.accept(sound) (sound) => AudioClipPlayer.accept(sound),
]; ];
for (const acceptFn of acceptFns) for (const acceptFn of acceptFns)
{ {
this._player = acceptFn(this); this._player = acceptFn(this);
if (typeof this._player !== 'undefined') if (typeof this._player !== "undefined")
{ {
return this._player; return this._player;
} }
} }
throw { throw {
origin: 'SoundPlayer._getPlayer', origin: "SoundPlayer._getPlayer",
context: 'when finding a player for the sound', context: "when finding a player for the sound",
error: 'could not find an appropriate player.' error: "could not find an appropriate player.",
}; };
} }
} }

View File

@ -7,8 +7,7 @@
* @license Distributed under the terms of the MIT License * @license Distributed under the terms of the MIT License
*/ */
import {PsychObject} from '../util/PsychObject.js'; import { PsychObject } from "../util/PsychObject.js";
/** /**
* <p>SoundPlayer is an interface for the sound players, who are responsible for actually playing the sounds, i.e. the tracks or the tones.</p> * <p>SoundPlayer is an interface for the sound players, who are responsible for actually playing the sounds, i.e. the tracks or the tones.</p>
@ -25,7 +24,6 @@ export class SoundPlayer extends PsychObject
super(psychoJS); super(psychoJS);
} }
/** /**
* Determine whether this player can play the given sound. * Determine whether this player can play the given sound.
* *
@ -40,13 +38,12 @@ export class SoundPlayer extends PsychObject
static accept(sound) static accept(sound)
{ {
throw { throw {
origin: 'SoundPlayer.accept', origin: "SoundPlayer.accept",
context: 'when evaluating whether this player can play a given sound', context: "when evaluating whether this player can play a given sound",
error: 'this method is abstract and should not be called.' error: "this method is abstract and should not be called.",
}; };
} }
/** /**
* Start playing the sound. * Start playing the sound.
* *
@ -59,13 +56,12 @@ export class SoundPlayer extends PsychObject
play(loops) play(loops)
{ {
throw { throw {
origin: 'SoundPlayer.play', origin: "SoundPlayer.play",
context: 'when starting the playback of a sound', context: "when starting the playback of a sound",
error: 'this method is abstract and should not be called.' error: "this method is abstract and should not be called.",
}; };
} }
/** /**
* Stop playing the sound immediately. * Stop playing the sound immediately.
* *
@ -77,13 +73,12 @@ export class SoundPlayer extends PsychObject
stop() stop()
{ {
throw { throw {
origin: 'SoundPlayer.stop', origin: "SoundPlayer.stop",
context: 'when stopping the playback of a sound', context: "when stopping the playback of a sound",
error: 'this method is abstract and should not be called.' error: "this method is abstract and should not be called.",
}; };
} }
/** /**
* Get the duration of the sound, in seconds. * Get the duration of the sound, in seconds.
* *
@ -95,13 +90,12 @@ export class SoundPlayer extends PsychObject
getDuration() getDuration()
{ {
throw { throw {
origin: 'SoundPlayer.getDuration', origin: "SoundPlayer.getDuration",
context: 'when getting the duration of the sound', context: "when getting the duration of the sound",
error: 'this method is abstract and should not be called.' error: "this method is abstract and should not be called.",
}; };
} }
/** /**
* Set the duration of the sound, in seconds. * Set the duration of the sound, in seconds.
* *
@ -113,13 +107,12 @@ export class SoundPlayer extends PsychObject
setDuration(duration_s) setDuration(duration_s)
{ {
throw { throw {
origin: 'SoundPlayer.setDuration', origin: "SoundPlayer.setDuration",
context: 'when setting the duration of the sound', context: "when setting the duration of the sound",
error: 'this method is abstract and should not be called.' error: "this method is abstract and should not be called.",
}; };
} }
/** /**
* Set the number of loops. * Set the number of loops.
* *
@ -132,13 +125,12 @@ export class SoundPlayer extends PsychObject
setLoops(loops) setLoops(loops)
{ {
throw { throw {
origin: 'SoundPlayer.setLoops', origin: "SoundPlayer.setLoops",
context: 'when setting the number of loops', context: "when setting the number of loops",
error: 'this method is abstract and should not be called.' error: "this method is abstract and should not be called.",
}; };
} }
/** /**
* Set the volume of the tone. * Set the volume of the tone.
* *
@ -152,10 +144,9 @@ export class SoundPlayer extends PsychObject
setVolume(volume, mute = false) setVolume(volume, mute = false)
{ {
throw { throw {
origin: 'SoundPlayer.setVolume', origin: "SoundPlayer.setVolume",
context: 'when setting the volume of the sound', context: "when setting the volume of the sound",
error: 'this method is abstract and should not be called.' error: "this method is abstract and should not be called.",
}; };
} }
} }

View File

@ -7,10 +7,9 @@
* @license Distributed under the terms of the MIT License * @license Distributed under the terms of the MIT License
*/ */
import * as Tone from 'tone'; import * as Tone from "tone";
import { isNumeric } from "../util/Util.js"; import { isNumeric } from "../util/Util.js";
import {SoundPlayer} from './SoundPlayer.js'; import { SoundPlayer } from "./SoundPlayer.js";
/** /**
* <p>This class handles the playing of tones.</p> * <p>This class handles the playing of tones.</p>
@ -28,23 +27,23 @@ import {SoundPlayer} from './SoundPlayer.js';
export class TonePlayer extends SoundPlayer export class TonePlayer extends SoundPlayer
{ {
constructor({ constructor({
psychoJS, psychoJS,
note = 'C4', note = "C4",
duration_s = 0.5, duration_s = 0.5,
volume = 1.0, volume = 1.0,
loops = 0, loops = 0,
soundLibrary = TonePlayer.SoundLibrary.TONE_JS, soundLibrary = TonePlayer.SoundLibrary.TONE_JS,
autoLog = true autoLog = true,
} = {}) } = {})
{ {
super(psychoJS); super(psychoJS);
this._addAttribute('note', note); this._addAttribute("note", note);
this._addAttribute('duration_s', duration_s); this._addAttribute("duration_s", duration_s);
this._addAttribute('volume', volume); this._addAttribute("volume", volume);
this._addAttribute('loops', loops); this._addAttribute("loops", loops);
this._addAttribute('soundLibrary', soundLibrary); this._addAttribute("soundLibrary", soundLibrary);
this._addAttribute('autoLog', autoLog); this._addAttribute("autoLog", autoLog);
// initialise the sound library: // initialise the sound library:
this._initSoundLibrary(); this._initSoundLibrary();
@ -58,7 +57,6 @@ export class TonePlayer extends SoundPlayer
} }
} }
/** /**
* Determine whether this player can play the given sound. * Determine whether this player can play the given sound.
* *
@ -82,32 +80,32 @@ export class TonePlayer extends SoundPlayer
note: sound.value, note: sound.value,
duration_s: sound.secs, duration_s: sound.secs,
volume: sound.volume, volume: sound.volume,
loops: sound.loops loops: sound.loops,
}); });
} }
// if the sound's value is a string, we check whether it is a note: // if the sound's value is a string, we check whether it is a note:
if (typeof sound.value === 'string') if (typeof sound.value === "string")
{ {
// mapping between the PsychoPY notes and the standard ones: // mapping between the PsychoPY notes and the standard ones:
let psychopyToToneMap = new Map(); let psychopyToToneMap = new Map();
for (const note of ['A', 'B', 'C', 'D', 'E', 'F', 'G']) for (const note of ["A", "B", "C", "D", "E", "F", "G"])
{ {
psychopyToToneMap.set(note, note); psychopyToToneMap.set(note, note);
psychopyToToneMap.set(note + 'fl', note + 'b'); psychopyToToneMap.set(note + "fl", note + "b");
psychopyToToneMap.set(note + 'sh', note + '#'); psychopyToToneMap.set(note + "sh", note + "#");
} }
// check whether the sound's value is a recognised note: // check whether the sound's value is a recognised note:
const note = psychopyToToneMap.get(sound.value); const note = psychopyToToneMap.get(sound.value);
if (typeof note !== 'undefined') if (typeof note !== "undefined")
{ {
return new TonePlayer({ return new TonePlayer({
psychoJS: sound.psychoJS, psychoJS: sound.psychoJS,
note: note + sound.octave, note: note + sound.octave,
duration_s: sound.secs, duration_s: sound.secs,
volume: sound.volume, volume: sound.volume,
loops: sound.loops loops: sound.loops,
}); });
} }
} }
@ -116,7 +114,6 @@ export class TonePlayer extends SoundPlayer
return undefined; return undefined;
} }
/** /**
* Get the duration of the sound. * Get the duration of the sound.
* *
@ -130,7 +127,6 @@ export class TonePlayer extends SoundPlayer
return this.duration_s; return this.duration_s;
} }
/** /**
* Set the duration of the tone. * Set the duration of the tone.
* *
@ -144,7 +140,6 @@ export class TonePlayer extends SoundPlayer
this.duration_s = duration_s; this.duration_s = duration_s;
} }
/** /**
* Set the number of loops. * Set the number of loops.
* *
@ -158,7 +153,6 @@ export class TonePlayer extends SoundPlayer
this._loops = loops; this._loops = loops;
} }
/** /**
* Set the volume of the tone. * Set the volume of the tone.
* *
@ -174,7 +168,7 @@ export class TonePlayer extends SoundPlayer
if (this._soundLibrary === TonePlayer.SoundLibrary.TONE_JS) if (this._soundLibrary === TonePlayer.SoundLibrary.TONE_JS)
{ {
if (typeof this._volumeNode !== 'undefined') if (typeof this._volumeNode !== "undefined")
{ {
this._volumeNode.mute = mute; this._volumeNode.mute = mute;
this._volumeNode.volume.value = -60 + volume * 66; this._volumeNode.volume.value = -60 + volume * 66;
@ -191,7 +185,6 @@ export class TonePlayer extends SoundPlayer
} }
} }
/** /**
* Start playing the sound. * Start playing the sound.
* *
@ -202,7 +195,7 @@ export class TonePlayer extends SoundPlayer
*/ */
play(loops) play(loops)
{ {
if (typeof loops !== 'undefined') if (typeof loops !== "undefined")
{ {
this._loops = loops; this._loops = loops;
} }
@ -223,7 +216,7 @@ export class TonePlayer extends SoundPlayer
playToneCallback = () => playToneCallback = () =>
{ {
self._webAudioOscillator = self._audioContext.createOscillator(); self._webAudioOscillator = self._audioContext.createOscillator();
self._webAudioOscillator.type = 'sine'; self._webAudioOscillator.type = "sine";
self._webAudioOscillator.frequency.value = 440; self._webAudioOscillator.frequency.value = 440;
self._webAudioOscillator.connect(self._audioContext.destination); self._webAudioOscillator.connect(self._audioContext.destination);
const contextCurrentTime = self._audioContext.currentTime; const contextCurrentTime = self._audioContext.currentTime;
@ -237,7 +230,6 @@ export class TonePlayer extends SoundPlayer
{ {
playToneCallback(); playToneCallback();
} }
// repeat forever: // repeat forever:
else if (this.loops === -1) else if (this.loops === -1)
{ {
@ -245,22 +237,21 @@ export class TonePlayer extends SoundPlayer
playToneCallback, playToneCallback,
this.duration_s, this.duration_s,
Tone.now(), Tone.now(),
Infinity Infinity,
); );
} }
else
// repeat this._loops times: // repeat this._loops times:
else
{ {
this._toneId = Tone.Transport.scheduleRepeat( this._toneId = Tone.Transport.scheduleRepeat(
playToneCallback, playToneCallback,
this.duration_s, this.duration_s,
Tone.now(), Tone.now(),
this.duration_s * (this._loops + 1) this.duration_s * (this._loops + 1),
); );
} }
} }
/** /**
* Stop playing the sound immediately. * Stop playing the sound immediately.
* *
@ -288,7 +279,6 @@ export class TonePlayer extends SoundPlayer
} }
} }
/** /**
* Initialise the sound library. * Initialise the sound library.
* *
@ -302,24 +292,24 @@ export class TonePlayer extends SoundPlayer
_initSoundLibrary() _initSoundLibrary()
{ {
const response = { const response = {
origin: 'TonePlayer._initSoundLibrary', origin: "TonePlayer._initSoundLibrary",
context: 'when initialising the sound library' context: "when initialising the sound library",
}; };
if (this._soundLibrary === TonePlayer.SoundLibrary.TONE_JS) if (this._soundLibrary === TonePlayer.SoundLibrary.TONE_JS)
{ {
// check that Tone.js is available: // check that Tone.js is available:
if (typeof Tone === 'undefined') if (typeof Tone === "undefined")
{ {
throw Object.assign(response, { throw Object.assign(response, {
error: "Tone.js is not available. A different sound library must be selected. Please contact the experiment designer." error: "Tone.js is not available. A different sound library must be selected. Please contact the experiment designer.",
}); });
} }
// start the Tone Transport if it has not started already: // start the Tone Transport if it has not started already:
if (typeof Tone !== 'undefined' && Tone.Transport.state !== 'started') if (typeof Tone !== "undefined" && Tone.Transport.state !== "started")
{ {
this.psychoJS.logger.info('[PsychoJS] start Tone Transport'); this.psychoJS.logger.info("[PsychoJS] start Tone Transport");
Tone.Transport.start(Tone.now()); Tone.Transport.start(Tone.now());
// this is necessary to prevent Tone from introducing a delay when triggering a note // this is necessary to prevent Tone from introducing a delay when triggering a note
@ -330,14 +320,14 @@ export class TonePlayer extends SoundPlayer
// create a synth: we use a triangular oscillator with hardly any envelope: // create a synth: we use a triangular oscillator with hardly any envelope:
this._synthOtions = { this._synthOtions = {
oscillator: { oscillator: {
type: 'square' //'triangle' type: "square", // 'triangle'
}, },
envelope: { envelope: {
attack: 0.001, // 1ms attack: 0.001, // 1ms
decay: 0.001, // 1ms decay: 0.001, // 1ms
sustain: 1, sustain: 1,
release: 0.001 // 1ms release: 0.001, // 1ms
} },
}; };
this._synth = new Tone.Synth(this._synthOtions); this._synth = new Tone.Synth(this._synthOtions);
@ -346,7 +336,7 @@ export class TonePlayer extends SoundPlayer
this._synth.connect(this._volumeNode); this._synth.connect(this._volumeNode);
// connect the volume node to the master output: // connect the volume node to the master output:
if (typeof this._volumeNode.toDestination === 'function') if (typeof this._volumeNode.toDestination === "function")
{ {
this._volumeNode.toDestination(); this._volumeNode.toDestination();
} }
@ -358,15 +348,15 @@ export class TonePlayer extends SoundPlayer
else else
{ {
// create an AudioContext: // create an AudioContext:
if (typeof this._audioContext === 'undefined') if (typeof this._audioContext === "undefined")
{ {
const AudioContext = window.AudioContext || window.webkitAudioContext; const AudioContext = window.AudioContext || window.webkitAudioContext;
// if AudioContext is not available (e.g. on IE), we throw an exception: // if AudioContext is not available (e.g. on IE), we throw an exception:
if (typeof AudioContext === 'undefined') if (typeof AudioContext === "undefined")
{ {
throw Object.assign(response, { throw Object.assign(response, {
error: `AudioContext is not available on your browser, ${this._psychoJS.browser}, please contact the experiment designer.` error: `AudioContext is not available on your browser, ${this._psychoJS.browser}, please contact the experiment designer.`,
}); });
} }
@ -374,15 +364,13 @@ export class TonePlayer extends SoundPlayer
} }
} }
} }
} }
/** /**
* *
* @type {{TONE_JS: *, AUDIO_CONTEXT: *}} * @type {{TONE_JS: *, AUDIO_CONTEXT: *}}
*/ */
TonePlayer.SoundLibrary = { TonePlayer.SoundLibrary = {
AUDIO_CONTEXT: Symbol.for('AUDIO_CONTEXT'), AUDIO_CONTEXT: Symbol.for("AUDIO_CONTEXT"),
TONE_JS: Symbol.for('TONE_JS') TONE_JS: Symbol.for("TONE_JS"),
}; };

View File

@ -7,8 +7,7 @@
* @license Distributed under the terms of the MIT License * @license Distributed under the terms of the MIT License
*/ */
import {SoundPlayer} from './SoundPlayer.js'; import { SoundPlayer } from "./SoundPlayer.js";
/** /**
* <p>This class handles the playback of sound tracks.</p> * <p>This class handles the playback of sound tracks.</p>
@ -30,28 +29,27 @@ import {SoundPlayer} from './SoundPlayer.js';
export class TrackPlayer extends SoundPlayer export class TrackPlayer extends SoundPlayer
{ {
constructor({ constructor({
psychoJS, psychoJS,
howl, howl,
startTime = 0, startTime = 0,
stopTime = -1, stopTime = -1,
stereo = true, stereo = true,
volume = 0, volume = 0,
loops = 0 loops = 0,
} = {}) } = {})
{ {
super(psychoJS); super(psychoJS);
this._addAttribute('howl', howl); this._addAttribute("howl", howl);
this._addAttribute('startTime', startTime); this._addAttribute("startTime", startTime);
this._addAttribute('stopTime', stopTime); this._addAttribute("stopTime", stopTime);
this._addAttribute('stereo', stereo); this._addAttribute("stereo", stereo);
this._addAttribute('loops', loops); this._addAttribute("loops", loops);
this._addAttribute('volume', volume); this._addAttribute("volume", volume);
this._currentLoopIndex = -1; this._currentLoopIndex = -1;
} }
/** /**
* Determine whether this player can play the given sound. * Determine whether this player can play the given sound.
* *
@ -66,10 +64,10 @@ export class TrackPlayer extends SoundPlayer
static accept(sound) static accept(sound)
{ {
// if the sound's value is a string, we check whether it is the name of a resource: // if the sound's value is a string, we check whether it is the name of a resource:
if (typeof sound.value === 'string') if (typeof sound.value === "string")
{ {
const howl = sound.psychoJS.serverManager.getResource(sound.value); const howl = sound.psychoJS.serverManager.getResource(sound.value);
if (typeof howl !== 'undefined') if (typeof howl !== "undefined")
{ {
// build the player: // build the player:
const player = new TrackPlayer({ const player = new TrackPlayer({
@ -79,7 +77,7 @@ export class TrackPlayer extends SoundPlayer
stopTime: sound.stopTime, stopTime: sound.stopTime,
stereo: sound.stereo, stereo: sound.stereo,
loops: sound.loops, loops: sound.loops,
volume: sound.volume volume: sound.volume,
}); });
return player; return player;
} }
@ -89,7 +87,6 @@ export class TrackPlayer extends SoundPlayer
return undefined; return undefined;
} }
/** /**
* Get the duration of the sound, in seconds. * Get the duration of the sound, in seconds.
* *
@ -103,7 +100,6 @@ export class TrackPlayer extends SoundPlayer
return this._howl.duration(); return this._howl.duration();
} }
/** /**
* Set the duration of the track. * Set the duration of the track.
* *
@ -114,14 +110,13 @@ export class TrackPlayer extends SoundPlayer
*/ */
setDuration(duration_s) setDuration(duration_s)
{ {
if (typeof this._howl !== 'undefined') if (typeof this._howl !== "undefined")
{ {
// Unfortunately Howler.js provides duration setting method // Unfortunately Howler.js provides duration setting method
this._howl._duration = duration_s; this._howl._duration = duration_s;
} }
} }
/** /**
* Set the volume of the tone. * Set the volume of the tone.
* *
@ -139,7 +134,6 @@ export class TrackPlayer extends SoundPlayer
this._howl.mute(mute); this._howl.mute(mute);
} }
/** /**
* Set the number of loops. * Set the number of loops.
* *
@ -163,7 +157,6 @@ export class TrackPlayer extends SoundPlayer
} }
} }
/** /**
* Start playing the sound. * Start playing the sound.
* *
@ -175,7 +168,7 @@ export class TrackPlayer extends SoundPlayer
*/ */
play(loops, fadeDuration = 17) play(loops, fadeDuration = 17)
{ {
if (typeof loops !== 'undefined') if (typeof loops !== "undefined")
{ {
this.setLoops(loops); this.setLoops(loops);
} }
@ -184,7 +177,7 @@ export class TrackPlayer extends SoundPlayer
if (loops > 0) if (loops > 0)
{ {
const self = this; const self = this;
this._howl.on('end', (event) => this._howl.on("end", (event) =>
{ {
++this._currentLoopIndex; ++this._currentLoopIndex;
if (self._currentLoopIndex > self._loops) if (self._currentLoopIndex > self._loops)
@ -205,7 +198,6 @@ export class TrackPlayer extends SoundPlayer
this._howl.fade(0, this._volume, fadeDuration, this._id); this._howl.fade(0, this._volume, fadeDuration, this._id);
} }
/** /**
* Stop playing the sound immediately. * Stop playing the sound immediately.
* *
@ -216,11 +208,11 @@ export class TrackPlayer extends SoundPlayer
*/ */
stop(fadeDuration = 17) stop(fadeDuration = 17)
{ {
this._howl.once('fade', (id) => { this._howl.once("fade", (id) =>
{
this._howl.stop(id); this._howl.stop(id);
this._howl.off('end'); this._howl.off("end");
}); });
this._howl.fade(this._howl.volume(), 0, fadeDuration, this._id); this._howl.fade(this._howl.volume(), 0, fadeDuration, this._id);
} }
} }

View File

@ -1,9 +1,9 @@
export * from './Sound.js'; export * from "./Sound.js";
export * from './SoundPlayer.js'; export * from "./SoundPlayer.js";
export * from './TonePlayer.js'; export * from "./TonePlayer.js";
export * from './TrackPlayer.js'; export * from "./TrackPlayer.js";
export * from './Microphone.js'; export * from "./AudioClip.js";
export * from './AudioClip.js'; export * from "./AudioClipPlayer.js";
export * from './AudioClipPlayer.js'; export * from "./Microphone.js";
//export * from './Transcriber.js'; // export * from './Transcriber.js';