Merge branch 'master' into feature-key-code-update

This commit is contained in:
Becky Gilbert 2021-02-19 10:24:21 -08:00 committed by GitHub
commit 0800497060
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
9 changed files with 414 additions and 341 deletions

View File

@ -172,23 +172,45 @@ filepath | string | The path to the audio file that was preloaded.
### Return value
Returns buffered audio file for playback. If the browser supports it the buffer will be playable with the WebAudio API. Otherwise, the returned buffer will be an HTML5 Audio object.
Returns a Promise that resolves when the audio file loads. Success handler's parameter will be the audio buffer. If the experiment is running using the WebAudio API it will be an AudioBuffer object. Otherwise, it will be an HTML5 Audio object. The failure handler's parameter is the error generated by `preloadAudio`.
### Description
Gets an AudioBuffer that can be played with the WebAudio API or an Audio object that can be played with HTML5 Audio. The file must be preloaded with the `preload` plugin.
Gets an AudioBuffer that can be played with the WebAudio API or an Audio object that can be played with HTML5 Audio.
It is strongly recommended that you preload audio files before calling this method. This method will load the files if they are not preloaded, but this may result in delays during the experiment as audio is downloaded.
### Examples
#### HTML 5 Audio
```javascript
// the code below is used to play audio in the audio-keyboard-response plugin
var source = context.createBufferSource();
source.buffer = jsPsych.pluginAPI.getAudioBuffer(trial.stimulus);
source.connect(context.destination);
startTime = context.currentTime;
source.start(startTime);
jsPsych.pluginAPI.getAudioBuffer('my-sound.mp3')
.then(function(audio){
audio.play();
})
.catch(function(err){
console.error('Audio file failed to load')
})
```
#### WebAudio API
```javascript
var context = jsPsych.pluginAPI.audioContext();
jsPsych.pluginAPI.getAudioBuffer('my-sound.mp3')
.then(function(buffer){
audio = context.createBufferSource();
audio.buffer = buffer;
audio.connect(context.destination);
audio.start(context.currentTime);
})
.catch(function(err){
console.error('Audio file failed to load')
})
```
See the `audio-keyboard-response` plugin for an example in a fuller context.
---
## jsPsych.pluginAPI.getAutoPreloadList

View File

@ -4,7 +4,7 @@ This plugin can be used to calibrate the [WebGazer extension](/extensions/jspsyc
## Parameters
In addition to the [parameters available in all plugins](overview.md#parameters-available-in-all-plugins), this plugin accepts the following parameters. Parameters with a default value of *undefined* must be specified. Other parameters can be left unspecified if the default value is acceptable.
In addition to the [parameters available in all plugins](/overview/plugins#parameters-available-in-all-plugins), this plugin accepts the following parameters. Parameters with a default value of *undefined* must be specified. Other parameters can be left unspecified if the default value is acceptable.
Parameter | Type | Default Value | Description
----------|------|---------------|------------
@ -17,7 +17,7 @@ time_per_point | numeric | 1000 | If `calibration_mode` is set to `view`, then t
## Data Generated
In addition to the [default data collected by all plugins](overview.md#data-collected-by-plugins), this plugin collects the following data for each trial.
In addition to the [default data collected by all plugins](/overview/plugins#data-collected-by-plugins), this plugin collects the following data for each trial.
Name | Type | Value
-----|------|------

View File

@ -4,7 +4,7 @@ This plugin initializes the camera and helps the participant center their face i
## Parameters
In addition to the [parameters available in all plugins](overview.md#parameters-available-in-all-plugins), this plugin accepts the following parameters. Parameters with a default value of *undefined* must be specified. Other parameters can be left unspecified if the default value is acceptable.
In addition to the [parameters available in all plugins](/overview/plugins#parameters-available-in-all-plugins), this plugin accepts the following parameters. Parameters with a default value of *undefined* must be specified. Other parameters can be left unspecified if the default value is acceptable.
Parameter | Type | Default Value | Description
----------|------|---------------|------------
@ -13,7 +13,7 @@ button_text | string | Continue | The text for the button that participants clic
## Data Generated
In addition to the [default data collected by all plugins](overview.md#data-collected-by-plugins), this plugin collects the following data for each trial.
In addition to the [default data collected by all plugins](/overview/plugins#data-collected-by-plugins), this plugin collects the following data for each trial.
Name | Type | Value
-----|------|------

View File

@ -4,7 +4,7 @@ This plugin can be used to measure the accuracy and precision of gaze prediction
## Parameters
In addition to the [parameters available in all plugins](overview.md#parameters-available-in-all-plugins), this plugin accepts the following parameters. Parameters with a default value of *undefined* must be specified. Other parameters can be left unspecified if the default value is acceptable.
In addition to the [parameters available in all plugins](/overview/plugins#parameters-available-in-all-plugins), this plugin accepts the following parameters. Parameters with a default value of *undefined* must be specified. Other parameters can be left unspecified if the default value is acceptable.
Parameter | Type | Default Value | Description
----------|------|---------------|------------
@ -20,7 +20,7 @@ show_validation_data | bool | false | If `true` then a visualization of the vali
## Data Generated
In addition to the [default data collected by all plugins](overview.md#data-collected-by-plugins), this plugin collects the following data for each trial.
In addition to the [default data collected by all plugins](/overview/plugins#data-collected-by-plugins), this plugin collects the following data for each trial.
Name | Type | Value
-----|------|------

View File

@ -1,5 +1,11 @@
# Video tutorials
## YouTube Channel
A variety of video tutorials are available on [Josh de Leeuw's YouTube channel](https://www.youtube.com/playlist?list=PLnfo1lBY1P2Mf_o6rV5wiqqn92Mw3UTGh). Some tutorials walk through creating a basic version of an entire experiment, like the tutorial on creating a [dichotic listening experiment](https://www.youtube.com/playlist?list=PLnfo1lBY1P2Mf_o6rV5wiqqn92Mw3UTGh) aimed at new users. Others focus on specific features of jsPsych, like how to use [functions as parameters](https://www.youtube.com/watch?v=8-j2aAZ_iOk&list=PLnfo1lBY1P2Mf_o6rV5wiqqn92Mw3UTGh&index=5) to create experiments that change in response to participant input or how to [create a new plugin](https://www.youtube.com/watch?v=XQcsFwAmbiw&list=PLnfo1lBY1P2Mf_o6rV5wiqqn92Mw3UTGh&index=4).
Recordings from a Summer 2020 workshop on conducting online research with jsPsych are available on the [workshop's YouTube channel](https://www.youtube.com/channel/UCBZ5F1UysHWlplUNDRwbsWA). [Session 1](https://www.youtube.com/watch?v=BuhfsIFRFe8) provides an overview of jsPsych suitable for brand new users. [Session 3](https://www.youtube.com/watch?v=LP7o0iAALik) covers some more advanced features of jsPsych.
## Workshops
**Moving Research Online (2020)**. Recordings from a [Summer 2020 workshop](https://www.movingresearchonline.info) on conducting online research are available on the [workshop's YouTube channel](https://www.youtube.com/channel/UCBZ5F1UysHWlplUNDRwbsWA). [Session 1](https://www.youtube.com/watch?v=BuhfsIFRFe8) provides an overview of jsPsych suitable for brand new users. [Session 3](https://www.youtube.com/watch?v=LP7o0iAALik) covers some more advanced features of jsPsych. This workshop was funded by the National Science Foundation.
**babySTEP (2021)**. The Centre for Comparative Psycholinguistics (CCP, University of Alberta Department of Linguistics) hosted a two-part jsPsych workshop in 2021 as part of their annual [STEP program](https://ccp.artsrn.ualberta.ca/portfolio/step/). [Day 1](https://drive.google.com/file/d/1_bd_Tz1IoyGaZzuPoR_Qb6Rtd5wg4t4D/view?usp=drive_web) covered the basics of creating a jsPsych experiment, with an emphasis on audio stimuli. [Day 2](https://drive.google.com/file/d/1dIw1xIVY1lCHwFKGRaUnWMguwHfdkbGK/view?usp=drive_web) was organized around pre-submitted questions. The video demonstrates how to create a more complex experiment involving reading a sentence and hearing different audio options for completing the sentences, and answers several questions about timing accuracy, recording participant generated audio, embedding jsPsych into course (or other) websites, and some (non-empirical) advice about attention checks.

View File

@ -2532,12 +2532,22 @@ jsPsych.pluginAPI = (function() {
module.getAudioBuffer = function(audioID) {
if (audio_buffers[audioID] === 'tmp') {
console.error('Audio file failed to load in the time allotted.')
return;
}
return audio_buffers[audioID];
return new Promise(function(resolve, reject){
// check whether audio file already preloaded
if(typeof audio_buffers[audioID] == 'undefined' || audio_buffers[audioID] == 'tmp'){
// if audio is not already loaded, try to load it
function complete(){
resolve(audio_buffers[audioID])
}
function error(e){
reject(e.error);
}
module.preloadAudio([audioID], complete, function(){}, error)
} else {
// audio is already loaded
resolve(audio_buffers[audioID]);
}
});
}

View File

@ -8,28 +8,28 @@
*
**/
jsPsych.plugins["audio-button-response"] = (function() {
var plugin = {};
jsPsych.plugins["audio-button-response"] = (function () {
var plugin = {};
jsPsych.pluginAPI.registerPreload('audio-button-response', 'stimulus', 'audio');
jsPsych.pluginAPI.registerPreload('audio-button-response', 'stimulus', 'audio');
plugin.info = {
name: 'audio-button-response',
description: '',
parameters: {
stimulus: {
type: jsPsych.plugins.parameterType.AUDIO,
plugin.info = {
name: 'audio-button-response',
description: '',
parameters: {
stimulus: {
type: jsPsych.plugins.parameterType.AUDIO,
pretty_name: 'Stimulus',
default: undefined,
description: 'The audio to be played.'
},
choices: {
type: jsPsych.plugins.parameterType.STRING,
default: undefined,
description: 'The audio to be played.'
},
choices: {
type: jsPsych.plugins.parameterType.STRING,
pretty_name: 'Choices',
default: undefined,
array: true,
description: 'The button labels.'
},
default: undefined,
array: true,
description: 'The button labels.'
},
button_html: {
type: jsPsych.plugins.parameterType.HTML_STRING,
pretty_name: 'Button HTML',
@ -77,76 +77,17 @@ jsPsych.plugins["audio-button-response"] = (function() {
type: jsPsych.plugins.parameterType.BOOL,
pretty_name: 'Response allowed while playing',
default: true,
description: 'If true, then responses are allowed while the audio is playing. '+
description: 'If true, then responses are allowed while the audio is playing. ' +
'If false, then the audio must finish playing before a response is accepted.'
}
}
}
plugin.trial = function(display_element, trial) {
plugin.trial = function (display_element, trial) {
// setup stimulus
var context = jsPsych.pluginAPI.audioContext();
if(context !== null){
var source = context.createBufferSource();
source.buffer = jsPsych.pluginAPI.getAudioBuffer(trial.stimulus);
source.connect(context.destination);
} else {
var audio = jsPsych.pluginAPI.getAudioBuffer(trial.stimulus);
audio.currentTime = 0;
}
// set up end event if trial needs it
if(trial.trial_ends_after_audio){
if(context !== null){
source.addEventListener('ended', end_trial);
} else {
audio.addEventListener('ended', end_trial);
}
}
// enable buttons after audio ends if necessary
if ((!trial.response_allowed_while_playing) & (!trial.trial_ends_after_audio)) {
if (context !== null) {
source.addEventListener('ended', enable_buttons);
} else {
audio.addEventListener('ended', enable_buttons);
}
}
//display buttons
var buttons = [];
if (Array.isArray(trial.button_html)) {
if (trial.button_html.length == trial.choices.length) {
buttons = trial.button_html;
} else {
console.error('Error in image-button-response plugin. The length of the button_html array does not equal the length of the choices array');
}
} else {
for (var i = 0; i < trial.choices.length; i++) {
buttons.push(trial.button_html);
}
}
var html = '<div id="jspsych-audio-button-response-btngroup">';
for (var i = 0; i < trial.choices.length; i++) {
var str = buttons[i].replace(/%choice%/g, trial.choices[i]);
html += '<div class="jspsych-audio-button-response-button" style="cursor: pointer; display: inline-block; margin:'+trial.margin_vertical+' '+trial.margin_horizontal+'" id="jspsych-audio-button-response-button-' + i +'" data-choice="'+i+'">'+str+'</div>';
}
html += '</div>';
//show prompt if there is one
if (trial.prompt !== null) {
html += trial.prompt;
}
display_element.innerHTML = html;
if(trial.response_allowed_while_playing){
enable_buttons();
} else {
disable_buttons();
}
var audio;
// store response
var response = {
@ -154,16 +95,103 @@ jsPsych.plugins["audio-button-response"] = (function() {
button: null
};
// record webaudio context start time
var startTime;
// load audio file
jsPsych.pluginAPI.getAudioBuffer(trial.stimulus)
.then(function (buffer) {
if (context !== null) {
audio = context.createBufferSource();
audio.buffer = buffer;
audio.connect(context.destination);
} else {
audio = buffer;
audio.currentTime = 0;
}
setupTrial();
})
.catch(function (err) {
console.error(`Failed to load audio file "${trial.stimulus}". Try checking the file path. We recommend using the preload plugin to load audio files.`)
console.error(err)
});
function setupTrial() {
// set up end event if trial needs it
if (trial.trial_ends_after_audio) {
audio.addEventListener('ended', end_trial);
}
// enable buttons after audio ends if necessary
if ((!trial.response_allowed_while_playing) & (!trial.trial_ends_after_audio)) {
audio.addEventListener('ended', enable_buttons);
}
//display buttons
var buttons = [];
if (Array.isArray(trial.button_html)) {
if (trial.button_html.length == trial.choices.length) {
buttons = trial.button_html;
} else {
console.error('Error in audio-button-response plugin. The length of the button_html array does not equal the length of the choices array');
}
} else {
for (var i = 0; i < trial.choices.length; i++) {
buttons.push(trial.button_html);
}
}
var html = '<div id="jspsych-audio-button-response-btngroup">';
for (var i = 0; i < trial.choices.length; i++) {
var str = buttons[i].replace(/%choice%/g, trial.choices[i]);
html += '<div class="jspsych-audio-button-response-button" style="cursor: pointer; display: inline-block; margin:' + trial.margin_vertical + ' ' + trial.margin_horizontal + '" id="jspsych-audio-button-response-button-' + i + '" data-choice="' + i + '">' + str + '</div>';
}
html += '</div>';
//show prompt if there is one
if (trial.prompt !== null) {
html += trial.prompt;
}
display_element.innerHTML = html;
if (trial.response_allowed_while_playing) {
enable_buttons();
} else {
disable_buttons();
}
// start time
startTime = performance.now();
// start audio
if (context !== null) {
startTime = context.currentTime;
audio.start(startTime);
} else {
audio.play();
}
// end trial if time limit is set
if (trial.trial_duration !== null) {
jsPsych.pluginAPI.setTimeout(function () {
end_trial();
}, trial.trial_duration);
}
}
// function to handle responses by the subject
function after_response(choice) {
// measure rt
var endTime = performance.now();
var rt = endTime - startTime;
if(context !== null){
endTime = context.currentTime;
rt = Math.round((endTime - startTime) * 1000);
}
if (context !== null) {
endTime = context.currentTime;
rt = Math.round((endTime - startTime) * 1000);
}
response.button = parseInt(choice);
response.rt = rt;
@ -181,17 +209,16 @@ jsPsych.plugins["audio-button-response"] = (function() {
// kill any remaining setTimeout handlers
jsPsych.pluginAPI.clearAllTimeouts();
// stop the audio file if it is playing
// remove end event listeners if they exist
if(context !== null){
source.stop();
source.removeEventListener('ended', end_trial);
source.removeEventListener('ended', enable_buttons);
} else {
audio.pause();
audio.removeEventListener('ended', end_trial);
audio.removeEventListener('ended', enable_buttons);
}
// stop the audio file if it is playing
// remove end event listeners if they exist
if (context !== null) {
audio.stop();
} else {
audio.pause();
}
audio.removeEventListener('ended', end_trial);
audio.removeEventListener('ended', enable_buttons);
// gather the data to store for the trial
var trial_data = {
@ -207,16 +234,16 @@ jsPsych.plugins["audio-button-response"] = (function() {
jsPsych.finishTrial(trial_data);
}
function button_response(e){
function button_response(e) {
var choice = e.currentTarget.getAttribute('data-choice'); // don't use dataset for jsdom compatibility
after_response(choice);
}
function disable_buttons() {
var btns = document.querySelectorAll('.jspsych-audio-button-response-button');
for (var i=0; i<btns.length; i++) {
for (var i = 0; i < btns.length; i++) {
var btn_el = btns[i].querySelector('button');
if(btn_el){
if (btn_el) {
btn_el.disabled = true;
}
btns[i].removeEventListener('click', button_response);
@ -225,32 +252,16 @@ jsPsych.plugins["audio-button-response"] = (function() {
function enable_buttons() {
var btns = document.querySelectorAll('.jspsych-audio-button-response-button');
for (var i=0; i<btns.length; i++) {
for (var i = 0; i < btns.length; i++) {
var btn_el = btns[i].querySelector('button');
if(btn_el){
if (btn_el) {
btn_el.disabled = false;
}
btns[i].addEventListener('click', button_response);
}
}
// start time
var startTime = performance.now();
// start audio
if(context !== null){
startTime = context.currentTime;
source.start(startTime);
} else {
audio.play();
}
// end trial if time limit is set
if (trial.trial_duration !== null) {
jsPsych.pluginAPI.setTimeout(function() {
end_trial();
}, trial.trial_duration);
}
};

View File

@ -8,7 +8,7 @@
*
**/
jsPsych.plugins["audio-keyboard-response"] = (function() {
jsPsych.plugins["audio-keyboard-response"] = (function () {
var plugin = {};
@ -59,38 +59,17 @@ jsPsych.plugins["audio-keyboard-response"] = (function() {
type: jsPsych.plugins.parameterType.BOOL,
pretty_name: 'Response allowed while playing',
default: true,
description: 'If true, then responses are allowed while the audio is playing. '+
description: 'If true, then responses are allowed while the audio is playing. ' +
'If false, then the audio must finish playing before a response is accepted.'
}
}
}
plugin.trial = function(display_element, trial) {
plugin.trial = function (display_element, trial) {
// setup stimulus
var context = jsPsych.pluginAPI.audioContext();
if(context !== null){
var source = context.createBufferSource();
source.buffer = jsPsych.pluginAPI.getAudioBuffer(trial.stimulus);
source.connect(context.destination);
} else {
var audio = jsPsych.pluginAPI.getAudioBuffer(trial.stimulus);
audio.currentTime = 0;
}
// set up end event if trial needs it
if(trial.trial_ends_after_audio){
if(context !== null){
source.addEventListener('ended', end_trial);
} else {
audio.addEventListener('ended', end_trial);
}
}
// show prompt if there is one
if (trial.prompt !== null) {
display_element.innerHTML = trial.prompt;
}
var audio;
// store response
var response = {
@ -98,6 +77,62 @@ jsPsych.plugins["audio-keyboard-response"] = (function() {
key: null
};
// record webaudio context start time
var startTime;
// load audio file
jsPsych.pluginAPI.getAudioBuffer(trial.stimulus)
.then(function (buffer) {
if (context !== null) {
audio = context.createBufferSource();
audio.buffer = buffer;
audio.connect(context.destination);
} else {
audio = buffer;
audio.currentTime = 0;
}
setupTrial();
})
.catch(function (err) {
console.error(`Failed to load audio file "${trial.stimulus}". Try checking the file path. We recommend using the preload plugin to load audio files.`)
console.error(err)
});
function setupTrial() {
// set up end event if trial needs it
if (trial.trial_ends_after_audio) {
audio.addEventListener('ended', end_trial);
}
// show prompt if there is one
if (trial.prompt !== null) {
display_element.innerHTML = trial.prompt;
}
// start audio
if (context !== null) {
startTime = context.currentTime;
audio.start(startTime);
} else {
audio.play();
}
// start keyboard listener when trial starts or sound ends
if (trial.response_allowed_while_playing) {
setup_keyboard_listener();
} else if (!trial.trial_ends_after_audio) {
audio.addEventListener('ended', setup_keyboard_listener);
}
// end trial if time limit is set
if (trial.trial_duration !== null) {
jsPsych.pluginAPI.setTimeout(function () {
end_trial();
}, trial.trial_duration);
}
}
// function to end trial when it is time
function end_trial() {
@ -106,21 +141,21 @@ jsPsych.plugins["audio-keyboard-response"] = (function() {
// stop the audio file if it is playing
// remove end event listeners if they exist
if(context !== null){
source.stop();
source.removeEventListener('ended', end_trial);
source.removeEventListener('ended', setup_keyboard_listener);
if (context !== null) {
audio.stop();
} else {
audio.pause();
audio.removeEventListener('ended', end_trial);
audio.removeEventListener('ended', setup_keyboard_listener);
}
audio.removeEventListener('ended', end_trial);
audio.removeEventListener('ended', setup_keyboard_listener);
// kill keyboard listeners
jsPsych.pluginAPI.cancelAllKeyboardResponses();
// gather the data to store for the trial
if(context !== null && response.rt !== null){
if (context !== null && response.rt !== null) {
response.rt = Math.round(response.rt * 1000);
}
var trial_data = {
@ -137,7 +172,7 @@ jsPsych.plugins["audio-keyboard-response"] = (function() {
}
// function to handle responses by the subject
var after_response = function(info) {
function after_response(info) {
// only record the first response
if (response.key == null) {
@ -151,8 +186,8 @@ jsPsych.plugins["audio-keyboard-response"] = (function() {
function setup_keyboard_listener() {
// start the response listener
if(context !== null) {
var keyboardListener = jsPsych.pluginAPI.getKeyboardResponse({
if (context !== null) {
jsPsych.pluginAPI.getKeyboardResponse({
callback_function: after_response,
valid_responses: trial.choices,
rt_method: 'audio',
@ -162,7 +197,7 @@ jsPsych.plugins["audio-keyboard-response"] = (function() {
audio_context_start_time: startTime
});
} else {
var keyboardListener = jsPsych.pluginAPI.getKeyboardResponse({
jsPsych.pluginAPI.getKeyboardResponse({
callback_function: after_response,
valid_responses: trial.choices,
rt_method: 'performance',
@ -171,33 +206,6 @@ jsPsych.plugins["audio-keyboard-response"] = (function() {
});
}
}
// start audio
if(context !== null){
var startTime = context.currentTime;
source.start(startTime);
} else {
audio.play();
}
// start keyboard listener when trial starts or sound ends
if (trial.response_allowed_while_playing) {
setup_keyboard_listener();
} else if (!trial.trial_ends_after_audio) {
if(context !== null){
source.addEventListener('ended', setup_keyboard_listener);
} else {
audio.addEventListener('ended', setup_keyboard_listener);
}
}
// end trial if time limit is set
if (trial.trial_duration !== null) {
jsPsych.pluginAPI.setTimeout(function() {
end_trial();
}, trial.trial_duration);
}
};
return plugin;

View File

@ -1,11 +1,11 @@
jsPsych.plugins['audio-slider-response'] = (function() {
var plugin = {};
jsPsych.plugins['audio-slider-response'] = (function () {
var plugin = {};
jsPsych.pluginAPI.registerPreload('audio-slider-response', 'stimulus', 'audio');
jsPsych.pluginAPI.registerPreload('audio-slider-response', 'stimulus', 'audio');
plugin.info = {
name: 'audio-slider-response',
description: '',
plugin.info = {
name: 'audio-slider-response',
description: '',
parameters: {
stimulus: {
type: jsPsych.plugins.parameterType.AUDIO,
@ -25,12 +25,12 @@ jsPsych.plugins['audio-slider-response'] = (function() {
default: 100,
description: 'Sets the maximum value of the slider',
},
slider_start: {
type: jsPsych.plugins.parameterType.INT,
pretty_name: 'Slider starting value',
default: 50,
description: 'Sets the starting value of the slider',
},
slider_start: {
type: jsPsych.plugins.parameterType.INT,
pretty_name: 'Slider starting value',
default: 50,
description: 'Sets the starting value of the slider',
},
step: {
type: jsPsych.plugins.parameterType.INT,
pretty_name: 'Step',
@ -39,14 +39,14 @@ jsPsych.plugins['audio-slider-response'] = (function() {
},
labels: {
type: jsPsych.plugins.parameterType.HTML_STRING,
pretty_name:'Labels',
pretty_name: 'Labels',
default: [],
array: true,
description: 'Labels of the slider.',
},
slider_width: {
type: jsPsych.plugins.parameterType.INT,
pretty_name:'Slider width',
pretty_name: 'Slider width',
default: null,
description: 'Width of the slider in pixels.'
},
@ -85,138 +85,180 @@ jsPsych.plugins['audio-slider-response'] = (function() {
type: jsPsych.plugins.parameterType.BOOL,
pretty_name: 'Response allowed while playing',
default: true,
description: 'If true, then responses are allowed while the audio is playing. '+
description: 'If true, then responses are allowed while the audio is playing. ' +
'If false, then the audio must finish playing before a response is accepted.'
}
}
}
plugin.trial = function(display_element, trial) {
plugin.trial = function (display_element, trial) {
// half of the thumb width value from jspsych.css, used to adjust the label positions
var half_thumb_width = 7.5;
var half_thumb_width = 7.5;
// setup stimulus
var context = jsPsych.pluginAPI.audioContext();
if(context !== null){
var source = context.createBufferSource();
source.buffer = jsPsych.pluginAPI.getAudioBuffer(trial.stimulus);
source.connect(context.destination);
} else {
var audio = jsPsych.pluginAPI.getAudioBuffer(trial.stimulus);
audio.currentTime = 0;
}
var audio;
// set up end event if trial needs it
if(trial.trial_ends_after_audio){
if(context !== null){
source.addEventListener('ended', end_trial);
} else {
audio.addEventListener('ended', end_trial);
}
}
// record webaudio context start time
var startTime;
// enable slider after audio ends if necessary
if ((!trial.response_allowed_while_playing) & (!trial.trial_ends_after_audio)) {
if (context !== null) {
source.addEventListener('ended', enable_slider);
} else {
audio.addEventListener('ended', enable_slider);
}
}
// for storing data related to response
var response;
var html = '<div id="jspsych-audio-slider-response-wrapper" style="margin: 100px 0px;">';
html += '<div class="jspsych-audio-slider-response-container" style="position:relative; margin: 0 auto 3em auto; width:';
if(trial.slider_width !== null){
html += trial.slider_width+'px;';
} else {
html += 'auto;';
}
html += '">';
html += '<input type="range" class="jspsych-slider" value="'+trial.slider_start+'" min="'+trial.min+'" max="'+trial.max+'" step="'+trial.step+'" id="jspsych-audio-slider-response-response"';
if (!trial.response_allowed_while_playing) {
html += ' disabled';
}
html += '></input><div>'
for(var j=0; j < trial.labels.length; j++){
var label_width_perc = 100/(trial.labels.length-1);
var percent_of_range = j * (100/(trial.labels.length - 1));
var percent_dist_from_center = ((percent_of_range-50)/50)*100;
var offset = (percent_dist_from_center * half_thumb_width)/100;
html += '<div style="border: 1px solid transparent; display: inline-block; position: absolute; '+
'left:calc('+percent_of_range+'% - ('+label_width_perc+'% / 2) - '+offset+'px); text-align: center; width: '+label_width_perc+'%;">';
html += '<span style="text-align: center; font-size: 80%;">'+trial.labels[j]+'</span>';
html += '</div>'
}
html += '</div>';
html += '</div>';
html += '</div>';
if (trial.prompt !== null){
html += trial.prompt;
}
// add submit button
var next_disabled_attribute = "";
if (trial.require_movement | !trial.response_allowed_while_playing) {
next_disabled_attribute = "disabled";
}
html += '<button id="jspsych-audio-slider-response-next" class="jspsych-btn" '+ next_disabled_attribute + '>'+trial.button_label+'</button>';
display_element.innerHTML = html;
var response = {
rt: null,
response: null
};
if (!trial.response_allowed_while_playing) {
display_element.querySelector('#jspsych-audio-slider-response-response').disabled = true;
display_element.querySelector('#jspsych-audio-slider-response-next').disabled = true;
}
if(trial.require_movement){
display_element.querySelector('#jspsych-audio-slider-response-response').addEventListener('click', function(){
display_element.querySelector('#jspsych-audio-slider-response-next').disabled = false;
// load audio file
jsPsych.pluginAPI.getAudioBuffer(trial.stimulus)
.then(function (buffer) {
if (context !== null) {
audio = context.createBufferSource();
audio.buffer = buffer;
audio.connect(context.destination);
} else {
audio = buffer;
audio.currentTime = 0;
}
setupTrial();
})
.catch(function (err) {
console.error(`Failed to load audio file "${trial.stimulus}". Try checking the file path. We recommend using the preload plugin to load audio files.`)
console.error(err)
});
}
display_element.querySelector('#jspsych-audio-slider-response-next').addEventListener('click', function() {
// measure response time
var endTime = performance.now();
var rt = endTime - startTime;
if(context !== null){
endTime = context.currentTime;
rt = Math.round((endTime - startTime) * 1000);
}
response.rt = rt;
response.response = display_element.querySelector('#jspsych-audio-slider-response-response').valueAsNumber;
function setupTrial() {
if(trial.response_ends_trial){
end_trial();
// set up end event if trial needs it
if (trial.trial_ends_after_audio) {
audio.addEventListener('ended', end_trial);
}
// enable slider after audio ends if necessary
if ((!trial.response_allowed_while_playing) & (!trial.trial_ends_after_audio)) {
audio.addEventListener('ended', enable_slider);
}
var html = '<div id="jspsych-audio-slider-response-wrapper" style="margin: 100px 0px;">';
html += '<div class="jspsych-audio-slider-response-container" style="position:relative; margin: 0 auto 3em auto; width:';
if (trial.slider_width !== null) {
html += trial.slider_width + 'px;';
} else {
html += 'auto;';
}
html += '">';
html += '<input type="range" class="jspsych-slider" value="' + trial.slider_start + '" min="' + trial.min + '" max="' + trial.max + '" step="' + trial.step + '" id="jspsych-audio-slider-response-response"';
if (!trial.response_allowed_while_playing) {
html += ' disabled';
}
html += '></input><div>'
for (var j = 0; j < trial.labels.length; j++) {
var label_width_perc = 100 / (trial.labels.length - 1);
var percent_of_range = j * (100 / (trial.labels.length - 1));
var percent_dist_from_center = ((percent_of_range - 50) / 50) * 100;
var offset = (percent_dist_from_center * half_thumb_width) / 100;
html += '<div style="border: 1px solid transparent; display: inline-block; position: absolute; ' +
'left:calc(' + percent_of_range + '% - (' + label_width_perc + '% / 2) - ' + offset + 'px); text-align: center; width: ' + label_width_perc + '%;">';
html += '<span style="text-align: center; font-size: 80%;">' + trial.labels[j] + '</span>';
html += '</div>'
}
html += '</div>';
html += '</div>';
html += '</div>';
if (trial.prompt !== null) {
html += trial.prompt;
}
// add submit button
var next_disabled_attribute = "";
if (trial.require_movement | !trial.response_allowed_while_playing) {
next_disabled_attribute = "disabled";
}
html += '<button id="jspsych-audio-slider-response-next" class="jspsych-btn" ' + next_disabled_attribute + '>' + trial.button_label + '</button>';
display_element.innerHTML = html;
response = {
rt: null,
response: null
};
if (!trial.response_allowed_while_playing) {
display_element.querySelector('#jspsych-audio-slider-response-response').disabled = true;
display_element.querySelector('#jspsych-audio-slider-response-next').disabled = true;
}
});
if (trial.require_movement) {
display_element.querySelector('#jspsych-audio-slider-response-response').addEventListener('click', function () {
display_element.querySelector('#jspsych-audio-slider-response-next').disabled = false;
});
}
function end_trial(){
display_element.querySelector('#jspsych-audio-slider-response-next').addEventListener('click', function () {
// measure response time
var endTime = performance.now();
var rt = endTime - startTime;
if (context !== null) {
endTime = context.currentTime;
rt = Math.round((endTime - startTime) * 1000);
}
response.rt = rt;
response.response = display_element.querySelector('#jspsych-audio-slider-response-response').valueAsNumber;
if (trial.response_ends_trial) {
end_trial();
} else {
display_element.querySelector('#jspsych-audio-slider-response-next').disabled = true;
}
});
startTime = performance.now();
// start audio
if (context !== null) {
startTime = context.currentTime;
audio.start(startTime);
} else {
audio.play();
}
// end trial if trial_duration is set
if (trial.trial_duration !== null) {
jsPsych.pluginAPI.setTimeout(function () {
end_trial();
}, trial.trial_duration);
}
}
// function to enable slider after audio ends
function enable_slider() {
document.querySelector('#jspsych-audio-slider-response-response').disabled = false;
if (!trial.require_movement) {
document.querySelector('#jspsych-audio-slider-response-next').disabled = false;
}
}
function end_trial() {
// kill any remaining setTimeout handlers
jsPsych.pluginAPI.clearAllTimeouts();
// stop the audio file if it is playing
// remove end event listeners if they exist
if(context !== null){
source.stop();
source.removeEventListener('ended', end_trial);
source.removeEventListener('ended', enable_slider);
// remove end event listeners if they exist
if (context !== null) {
audio.stop();
} else {
audio.pause();
audio.removeEventListener('ended', end_trial);
audio.removeEventListener('ended', enable_slider);
}
audio.removeEventListener('ended', end_trial);
audio.removeEventListener('ended', enable_slider);
// save data
var trialdata = {
rt: response.rt,
@ -230,32 +272,6 @@ jsPsych.plugins['audio-slider-response'] = (function() {
// next trial
jsPsych.finishTrial(trialdata);
}
// function to enable slider after audio ends
function enable_slider() {
document.querySelector('#jspsych-audio-slider-response-response').disabled = false;
if (!trial.require_movement) {
document.querySelector('#jspsych-audio-slider-response-next').disabled = false;
}
}
var startTime = performance.now();
// start audio
if(context !== null){
startTime = context.currentTime;
source.start(startTime);
} else {
audio.play();
}
// end trial if trial_duration is set
if (trial.trial_duration !== null) {
jsPsych.pluginAPI.setTimeout(function() {
end_trial();
}, trial.trial_duration);
}
};
return plugin;