Changed AudioPlayer to implement interface; fixed build error; rewrote plugins using AudioPlayer and added tests; changed test-utils' clickTarget to respect disabled forms

This commit is contained in:
Bankminer78 2024-07-15 13:47:23 -04:00
parent d296962c7e
commit e2a9e6b33e
7 changed files with 660 additions and 408 deletions

View File

@ -3,7 +3,15 @@ export interface AudioPlayerOptions {
audioContext?: AudioContext;
}
export class AudioPlayer {
export interface AudioPlayerInterface {
load(): Promise<void>;
play(): void;
stop(): void;
addEventListener(eventName: string, callback: EventListenerOrEventListenerObject): void;
removeEventListener(eventName: string, callback: EventListenerOrEventListenerObject): void;
}
export class AudioPlayer implements AudioPlayerInterface {
private audio: HTMLAudioElement | AudioBufferSourceNode;
private audioContext: AudioContext | null;
private useWebAudio: boolean;

View File

@ -1,13 +1,62 @@
import { clickTarget, simulateTimeline, startTimeline } from "@jspsych/test-utils";
jest.mock("../../jspsych/src/modules/plugin-api/AudioPlayer");
import { clickTarget, flushPromises, simulateTimeline, startTimeline } from "@jspsych/test-utils";
import { initJsPsych } from "jspsych";
//@ts-expect-error mock
import { mockStop } from "../../jspsych/src/modules/plugin-api/AudioPlayer";
import audioButtonResponse from ".";
jest.useFakeTimers();
beforeEach(() => {
jest.clearAllMocks();
});
// skip this until we figure out how to mock the audio loading
describe.skip("audio-button-response", () => {
describe("audio-button-response", () => {
it.only("works with all defaults", async () => {
const { expectFinished, expectRunning, displayElement, getHTML } = await startTimeline([
{
type: audioButtonResponse,
choices: ["choice1"],
stimulus: "foo.mp3",
},
]);
expectRunning();
console.log(getHTML());
clickTarget(displayElement.querySelector("button"));
expectFinished();
await flushPromises();
});
it("works with use_webaudio:false", async () => {
const jsPsych = initJsPsych({ use_webaudio: false });
const { expectFinished, expectRunning, displayElement } = await startTimeline(
[
{
type: audioButtonResponse,
choices: ["choice1"],
stimulus: "foo.mp3",
},
],
jsPsych
);
await expectRunning();
clickTarget(displayElement.querySelector("button"));
await expectFinished();
});
test("on_load event triggered after page setup complete", async () => {
const onLoadCallback = jest.fn();
const timeline = [
{
type: audioButtonResponse,
@ -15,9 +64,7 @@ describe.skip("audio-button-response", () => {
prompt: "foo",
choices: ["choice1"],
on_load: () => {
expect(getHTML()).toContain("foo");
clickTarget(displayElement.querySelector("button"));
onLoadCallback();
},
},
];
@ -26,11 +73,107 @@ describe.skip("audio-button-response", () => {
use_webaudio: false,
});
const { getHTML, finished, displayElement } = await startTimeline(timeline, jsPsych);
await startTimeline(timeline, jsPsych);
expect(getHTML()).not.toContain("foo");
expect(onLoadCallback).toHaveBeenCalled();
});
it("trial ends when button is clicked", async () => {
const jsPsych = initJsPsych({ use_webaudio: false });
await finished;
const { expectFinished, expectRunning, displayElement } = await startTimeline(
[
{
type: audioButtonResponse,
stimulus: "foo.mp3",
prompt: "foo",
choices: ["choice1"],
},
],
jsPsych
);
await expectRunning();
clickTarget(displayElement.querySelector("button"));
await expectFinished();
});
it("ends when trial_ends_after_audio is true and audio finishes", async () => {
const jsPsych = initJsPsych({ use_webaudio: false });
const { expectFinished, expectRunning } = await startTimeline(
[
{
type: audioButtonResponse,
stimulus: "foo.mp3",
choices: ["choice1"],
trial_duration: 30000,
trial_ends_after_audio: true,
},
],
jsPsych
);
await expectRunning();
jest.runAllTimers();
await expectFinished();
});
it("ends when trial_duration is shorter than the audio duration, stopping the audio", async () => {
const jsPsych = initJsPsych({ use_webaudio: false });
const { expectFinished, expectRunning } = await startTimeline(
[
{
type: audioButtonResponse,
stimulus: "foo.mp3",
choices: ["choice1"],
trial_duration: 500,
},
],
jsPsych
);
await expectRunning();
expect(mockStop).not.toHaveBeenCalled();
jest.advanceTimersByTime(500);
expect(mockStop).toHaveBeenCalled();
await expectFinished();
});
it("prevents responses when response_allowed_while_playing is false", async () => {
const jsPsych = initJsPsych({ use_webaudio: false });
const { expectFinished, expectRunning, displayElement, getHTML } = await startTimeline(
[
{
type: audioButtonResponse,
stimulus: "foo.mp3",
choices: ["choice1"],
response_allowed_while_playing: false,
},
],
jsPsych
);
await expectRunning();
clickTarget(displayElement.querySelector("button"));
await expectRunning();
jest.runAllTimers();
await expectRunning();
clickTarget(displayElement.querySelector("button"));
await expectFinished();
});
});

View File

@ -1,5 +1,8 @@
import autoBind from "auto-bind";
import { JsPsych, JsPsychPlugin, ParameterType, TrialType } from "jspsych";
import { AudioPlayerInterface } from "../../jspsych/src/modules/plugin-api/AudioPlayer";
const info = <const>{
name: "audio-button-response",
parameters: {
@ -98,196 +101,163 @@ type Info = typeof info;
*/
class AudioButtonResponsePlugin implements JsPsychPlugin<Info> {
static info = info;
private audio;
private audio: AudioPlayerInterface;
private params: TrialType<Info>;
private buttonElements: HTMLElement[] = [];
private display: HTMLElement;
private response: { rt: number; button: number } = { rt: null, button: null };
private context: AudioContext;
private startTime: number;
private trial_complete: (trial_data: { rt: number; stimulus: string; response: number }) => void;
constructor(private jsPsych: JsPsych) {}
constructor(private jsPsych: JsPsych) {
autoBind(this);
}
trial(display_element: HTMLElement, trial: TrialType<Info>, on_load: () => void) {
async trial(display_element: HTMLElement, trial: TrialType<Info>, on_load: () => void) {
// hold the .resolve() function from the Promise that ends the trial
let trial_complete;
this.trial_complete;
this.params = trial;
this.display = display_element;
// setup stimulus
var context = this.jsPsych.pluginAPI.audioContext();
// store response
var response = {
rt: null,
button: null,
};
// record webaudio context start time
var startTime;
this.context = this.jsPsych.pluginAPI.audioContext();
// load audio file
this.jsPsych.pluginAPI
.getAudioBuffer(trial.stimulus)
.then((buffer) => {
if (context !== null) {
this.audio = context.createBufferSource();
this.audio.buffer = buffer;
this.audio.connect(context.destination);
} else {
this.audio = buffer;
this.audio.currentTime = 0;
}
setupTrial();
})
.catch((err) => {
console.error(
`Failed to load audio file "${trial.stimulus}". Try checking the file path. We recommend using the preload plugin to load audio files.`
this.audio = await this.jsPsych.pluginAPI.getAudioPlayer(trial.stimulus);
// set up end event if trial needs it
if (trial.trial_ends_after_audio) {
this.audio.addEventListener("ended", this.end_trial);
}
// enable buttons after audio ends if necessary
if (!trial.response_allowed_while_playing && !trial.trial_ends_after_audio) {
this.audio.addEventListener("ended", this.enable_buttons);
}
// record start time
this.startTime = performance.now();
// Display buttons
const buttonGroupElement = document.createElement("div");
buttonGroupElement.id = "jspsych-audio-button-response-btngroup";
if (trial.button_layout === "grid") {
buttonGroupElement.classList.add("jspsych-btn-group-grid");
if (trial.grid_rows === null && trial.grid_columns === null) {
throw new Error(
"You cannot set `grid_rows` to `null` without providing a value for `grid_columns`."
);
console.error(err);
}
const n_cols =
trial.grid_columns === null
? Math.ceil(trial.choices.length / trial.grid_rows)
: trial.grid_columns;
const n_rows =
trial.grid_rows === null
? Math.ceil(trial.choices.length / trial.grid_columns)
: trial.grid_rows;
buttonGroupElement.style.gridTemplateColumns = `repeat(${n_cols}, 1fr)`;
buttonGroupElement.style.gridTemplateRows = `repeat(${n_rows}, 1fr)`;
} else if (trial.button_layout === "flex") {
buttonGroupElement.classList.add("jspsych-btn-group-flex");
}
for (const [choiceIndex, choice] of trial.choices.entries()) {
buttonGroupElement.insertAdjacentHTML("beforeend", trial.button_html(choice, choiceIndex));
const buttonElement = buttonGroupElement.lastChild as HTMLElement;
buttonElement.dataset.choice = choiceIndex.toString();
buttonElement.addEventListener("click", () => {
this.after_response(choiceIndex);
});
this.buttonElements.push(buttonElement);
}
const setupTrial = () => {
// set up end event if trial needs it
if (trial.trial_ends_after_audio) {
this.audio.addEventListener("ended", end_trial);
}
display_element.appendChild(buttonGroupElement);
// enable buttons after audio ends if necessary
if (!trial.response_allowed_while_playing && !trial.trial_ends_after_audio) {
this.audio.addEventListener("ended", enable_buttons);
}
// Show prompt if there is one
if (trial.prompt !== null) {
display_element.insertAdjacentHTML("beforeend", trial.prompt);
}
// Display buttons
const buttonGroupElement = document.createElement("div");
buttonGroupElement.id = "jspsych-audio-button-response-btngroup";
if (trial.button_layout === "grid") {
buttonGroupElement.classList.add("jspsych-btn-group-grid");
if (trial.grid_rows === null && trial.grid_columns === null) {
throw new Error(
"You cannot set `grid_rows` to `null` without providing a value for `grid_columns`."
);
}
const n_cols =
trial.grid_columns === null
? Math.ceil(trial.choices.length / trial.grid_rows)
: trial.grid_columns;
const n_rows =
trial.grid_rows === null
? Math.ceil(trial.choices.length / trial.grid_columns)
: trial.grid_rows;
buttonGroupElement.style.gridTemplateColumns = `repeat(${n_cols}, 1fr)`;
buttonGroupElement.style.gridTemplateRows = `repeat(${n_rows}, 1fr)`;
} else if (trial.button_layout === "flex") {
buttonGroupElement.classList.add("jspsych-btn-group-flex");
}
if (!trial.response_allowed_while_playing) {
this.disable_buttons();
}
for (const [choiceIndex, choice] of trial.choices.entries()) {
buttonGroupElement.insertAdjacentHTML("beforeend", trial.button_html(choice, choiceIndex));
const buttonElement = buttonGroupElement.lastChild as HTMLElement;
buttonElement.dataset.choice = choiceIndex.toString();
buttonElement.addEventListener("click", () => {
after_response(choiceIndex);
});
this.buttonElements.push(buttonElement);
}
// end trial if time limit is set
if (trial.trial_duration !== null) {
this.jsPsych.pluginAPI.setTimeout(() => {
this.end_trial();
}, trial.trial_duration);
}
display_element.appendChild(buttonGroupElement);
on_load();
// Show prompt if there is one
if (trial.prompt !== null) {
display_element.insertAdjacentHTML("beforeend", trial.prompt);
}
if (!trial.response_allowed_while_playing) {
disable_buttons();
}
// start time
startTime = performance.now();
// start audio
if (context !== null) {
startTime = context.currentTime;
this.audio.start(startTime);
} else {
this.audio.play();
}
// end trial if time limit is set
if (trial.trial_duration !== null) {
this.jsPsych.pluginAPI.setTimeout(() => {
end_trial();
}, trial.trial_duration);
}
on_load();
};
// function to handle responses by the subject
const after_response = (choice) => {
// measure rt
var endTime = performance.now();
var rt = Math.round(endTime - startTime);
if (context !== null) {
endTime = context.currentTime;
rt = Math.round((endTime - startTime) * 1000);
}
response.button = parseInt(choice);
response.rt = rt;
// disable all the buttons after a response
disable_buttons();
if (trial.response_ends_trial) {
end_trial();
}
};
// function to end trial when it is time
const end_trial = () => {
// kill any remaining setTimeout handlers
this.jsPsych.pluginAPI.clearAllTimeouts();
// stop the audio file if it is playing
// remove end event listeners if they exist
if (context !== null) {
this.audio.stop();
} else {
this.audio.pause();
}
this.audio.removeEventListener("ended", end_trial);
this.audio.removeEventListener("ended", enable_buttons);
// gather the data to store for the trial
var trial_data = {
rt: response.rt,
stimulus: trial.stimulus,
response: response.button,
};
// clear the display
display_element.innerHTML = "";
// move on to the next trial
this.jsPsych.finishTrial(trial_data);
trial_complete();
};
const disable_buttons = () => {
for (const button of this.buttonElements) {
button.setAttribute("disabled", "disabled");
}
};
const enable_buttons = () => {
for (const button of this.buttonElements) {
button.removeAttribute("disabled");
}
};
this.audio.play();
return new Promise((resolve) => {
trial_complete = resolve;
this.trial_complete = resolve;
});
}
simulate(
private disable_buttons = () => {
for (const button of this.buttonElements) {
button.setAttribute("disabled", "disabled");
}
};
private enable_buttons = () => {
for (const button of this.buttonElements) {
button.removeAttribute("disabled");
}
};
// function to handle responses by the subject
private after_response = (choice) => {
// measure rt
var endTime = performance.now();
var rt = Math.round(endTime - this.startTime);
if (this.context !== null) {
endTime = this.context.currentTime;
rt = Math.round((endTime - this.startTime) * 1000);
}
this.response.button = parseInt(choice);
this.response.rt = rt;
// disable all the buttons after a response
this.disable_buttons();
if (this.params.response_ends_trial) {
this.end_trial();
}
};
// method to end trial when it is time
private end_trial = () => {
// kill any remaining setTimeout handlers
this.jsPsych.pluginAPI.clearAllTimeouts();
// stop the audio file if it is playing
this.audio.stop();
// remove end event listeners if they exist
this.audio.removeEventListener("ended", this.end_trial);
this.audio.removeEventListener("ended", this.enable_buttons);
// gather the data to store for the trial
var trial_data = {
rt: this.response.rt,
stimulus: this.params.stimulus,
response: this.response.button,
};
// clear the display
this.display.innerHTML = "";
// move on to the next trial
this.trial_complete(trial_data);
};
async simulate(
trial: TrialType<Info>,
simulation_mode,
simulation_options: any,

View File

@ -1,7 +1,7 @@
import autoBind from "auto-bind";
import { JsPsych, JsPsychPlugin, ParameterType, TrialType } from "jspsych";
import { AudioPlayer } from "../../jspsych/src/modules/plugin-api/AudioPlayer";
import { AudioPlayerInterface } from "../../jspsych/src/modules/plugin-api/AudioPlayer";
const info = <const>{
name: "audio-keyboard-response",
@ -63,7 +63,7 @@ type Info = typeof info;
*/
class AudioKeyboardResponsePlugin implements JsPsychPlugin<Info> {
static info = info;
private audio: AudioPlayer;
private audio: AudioPlayerInterface;
private params: TrialType<Info>;
private display: HTMLElement;
private response: { rt: number; key: string } = { rt: null, key: null };
@ -79,7 +79,6 @@ class AudioKeyboardResponsePlugin implements JsPsychPlugin<Info> {
this.finish = resolve;
this.params = trial;
this.display = display_element;
// load audio file
this.audio = await this.jsPsych.pluginAPI.getAudioPlayer(trial.stimulus);

View File

@ -1,10 +1,140 @@
import { pressKey, simulateTimeline, startTimeline } from "@jspsych/test-utils";
jest.mock("../../jspsych/src/modules/plugin-api/AudioPlayer");
import {
clickTarget,
flushPromises,
pressKey,
simulateTimeline,
startTimeline,
} from "@jspsych/test-utils";
import { initJsPsych } from "jspsych";
//@ts-expect-error mock
import { mockStop } from "../../jspsych/src/modules/plugin-api/AudioPlayer";
import audioSliderResponse from ".";
jest.useFakeTimers();
beforeEach(() => {
jest.clearAllMocks();
});
describe("audio-slider-response", () => {
// this relies on AudioContext, which we haven't mocked yet
it.skip("works with all defaults", async () => {
const { expectFinished, expectRunning } = await startTimeline([
{
type: audioSliderResponse,
stimulus: "foo.mp3",
},
]);
expectRunning();
pressKey("a");
expectFinished();
await flushPromises();
});
it("works with use_webaudio:false", async () => {
const jsPsych = initJsPsych({ use_webaudio: false });
const { expectFinished, expectRunning, displayElement, getHTML } = await startTimeline(
[
{
type: audioSliderResponse,
stimulus: "foo.mp3",
},
],
jsPsych
);
await expectRunning();
//jest.runAllTimers();
clickTarget(displayElement.querySelector("button"));
await expectFinished();
});
it("ends when trial_ends_after_audio is true and audio finishes", async () => {
const jsPsych = initJsPsych({ use_webaudio: false });
const { expectFinished, expectRunning } = await startTimeline(
[
{
type: audioSliderResponse,
stimulus: "foo.mp3",
trial_ends_after_audio: true,
},
],
jsPsych
);
await expectRunning();
jest.runAllTimers();
await expectFinished();
});
it("prevents responses when response_allowed_while_playing is false", async () => {
const jsPsych = initJsPsych({ use_webaudio: false });
const { expectFinished, expectRunning, displayElement } = await startTimeline(
[
{
type: audioSliderResponse,
stimulus: "foo.mp3",
response_allowed_while_playing: false,
},
],
jsPsych
);
await expectRunning();
clickTarget(displayElement.querySelector("button"));
await expectRunning();
jest.runAllTimers();
await expectRunning();
clickTarget(displayElement.querySelector("button"));
await expectFinished();
});
it("ends when trial_duration is shorter than the audio duration, stopping the audio", async () => {
const jsPsych = initJsPsych({ use_webaudio: false });
const { expectFinished, expectRunning } = await startTimeline(
[
{
type: audioSliderResponse,
stimulus: "foo.mp3",
trial_duration: 500,
},
],
jsPsych
);
await expectRunning();
expect(mockStop).not.toHaveBeenCalled();
jest.advanceTimersByTime(500);
expect(mockStop).toHaveBeenCalled();
await expectFinished();
});
});
describe("audio-slider-response simulation", () => {
test("data mode works", async () => {
const timeline = [

View File

@ -1,5 +1,8 @@
import autoBind from "auto-bind";
import { JsPsych, JsPsychPlugin, ParameterType, TrialType } from "jspsych";
import { AudioPlayerInterface } from "../../jspsych/src/modules/plugin-api/AudioPlayer";
const info = <const>{
name: "audio-slider-response",
parameters: {
@ -104,245 +107,237 @@ type Info = typeof info;
*/
class AudioSliderResponsePlugin implements JsPsychPlugin<Info> {
static info = info;
private audio;
private audio: AudioPlayerInterface;
private context: AudioContext;
private params: TrialType<Info>;
private display: HTMLElement;
private response: { rt: number; response: number } = { rt: null, response: null };
private startTime: number;
private half_thumb_width: number;
private trial_complete: (trial_data: {
rt: number;
slider_start: number;
response: number;
}) => void;
constructor(private jsPsych: JsPsych) {}
trial(display_element: HTMLElement, trial: TrialType<Info>, on_load: () => void) {
// hold the .resolve() function from the Promise that ends the trial
let trial_complete;
constructor(private jsPsych: JsPsych) {
autoBind(this);
}
async trial(display_element: HTMLElement, trial: TrialType<Info>, on_load: () => void) {
// record webaudio context start time
this.startTime;
this.params = trial;
this.display = display_element;
// for storing data related to response
this.response;
// half of the thumb width value from jspsych.css, used to adjust the label positions
var half_thumb_width = 7.5;
this.half_thumb_width = 7.5;
// hold the .resolve() function from the Promise that ends the trial
this.trial_complete;
// setup stimulus
var context = this.jsPsych.pluginAPI.audioContext();
// record webaudio context start time
var startTime;
// for storing data related to response
var response;
this.context = this.jsPsych.pluginAPI.audioContext();
// load audio file
this.jsPsych.pluginAPI
.getAudioBuffer(trial.stimulus)
.then((buffer) => {
if (context !== null) {
this.audio = context.createBufferSource();
this.audio.buffer = buffer;
this.audio.connect(context.destination);
} else {
this.audio = buffer;
this.audio.currentTime = 0;
}
setupTrial();
})
.catch((err) => {
console.error(
`Failed to load audio file "${trial.stimulus}". Try checking the file path. We recommend using the preload plugin to load audio files.`
);
console.error(err);
});
this.audio = await this.jsPsych.pluginAPI.getAudioPlayer(trial.stimulus);
const setupTrial = () => {
// set up end event if trial needs it
if (trial.trial_ends_after_audio) {
this.audio.addEventListener("ended", end_trial);
}
this.setupTrial();
// enable slider after audio ends if necessary
if (!trial.response_allowed_while_playing && !trial.trial_ends_after_audio) {
this.audio.addEventListener("ended", enable_slider);
}
var html = '<div id="jspsych-audio-slider-response-wrapper" style="margin: 100px 0px;">';
html +=
'<div class="jspsych-audio-slider-response-container" style="position:relative; margin: 0 auto 3em auto; width:';
if (trial.slider_width !== null) {
html += trial.slider_width + "px;";
} else {
html += "auto;";
}
html += '">';
html +=
'<input type="range" class="jspsych-slider" value="' +
trial.slider_start +
'" min="' +
trial.min +
'" max="' +
trial.max +
'" step="' +
trial.step +
'" id="jspsych-audio-slider-response-response"';
if (!trial.response_allowed_while_playing) {
html += " disabled";
}
html += "></input><div>";
for (var j = 0; j < trial.labels.length; j++) {
var label_width_perc = 100 / (trial.labels.length - 1);
var percent_of_range = j * (100 / (trial.labels.length - 1));
var percent_dist_from_center = ((percent_of_range - 50) / 50) * 100;
var offset = (percent_dist_from_center * half_thumb_width) / 100;
html +=
'<div style="border: 1px solid transparent; display: inline-block; position: absolute; ' +
"left:calc(" +
percent_of_range +
"% - (" +
label_width_perc +
"% / 2) - " +
offset +
"px); text-align: center; width: " +
label_width_perc +
'%;">';
html += '<span style="text-align: center; font-size: 80%;">' + trial.labels[j] + "</span>";
html += "</div>";
}
html += "</div>";
html += "</div>";
html += "</div>";
if (trial.prompt !== null) {
html += trial.prompt;
}
// add submit button
var next_disabled_attribute = "";
if (trial.require_movement || !trial.response_allowed_while_playing) {
next_disabled_attribute = "disabled";
}
html +=
'<button id="jspsych-audio-slider-response-next" class="jspsych-btn" ' +
next_disabled_attribute +
">" +
trial.button_label +
"</button>";
display_element.innerHTML = html;
response = {
rt: null,
response: null,
};
if (!trial.response_allowed_while_playing) {
display_element.querySelector<HTMLInputElement>(
"#jspsych-audio-slider-response-response"
).disabled = true;
display_element.querySelector<HTMLInputElement>(
"#jspsych-audio-slider-response-next"
).disabled = true;
}
if (trial.require_movement) {
const enable_button = () => {
display_element.querySelector<HTMLInputElement>(
"#jspsych-audio-slider-response-next"
).disabled = false;
};
display_element
.querySelector("#jspsych-audio-slider-response-response")
.addEventListener("mousedown", enable_button);
display_element
.querySelector("#jspsych-audio-slider-response-response")
.addEventListener("touchstart", enable_button);
display_element
.querySelector("#jspsych-audio-slider-response-response")
.addEventListener("change", enable_button);
}
display_element
.querySelector("#jspsych-audio-slider-response-next")
.addEventListener("click", () => {
// measure response time
var endTime = performance.now();
var rt = Math.round(endTime - startTime);
if (context !== null) {
endTime = context.currentTime;
rt = Math.round((endTime - startTime) * 1000);
}
response.rt = rt;
response.response = display_element.querySelector<HTMLInputElement>(
"#jspsych-audio-slider-response-response"
).valueAsNumber;
if (trial.response_ends_trial) {
end_trial();
} else {
display_element.querySelector<HTMLInputElement>(
"#jspsych-audio-slider-response-next"
).disabled = true;
}
});
startTime = performance.now();
// start audio
if (context !== null) {
startTime = context.currentTime;
this.audio.start(startTime);
} else {
this.audio.play();
}
// end trial if trial_duration is set
if (trial.trial_duration !== null) {
this.jsPsych.pluginAPI.setTimeout(() => {
end_trial();
}, trial.trial_duration);
}
on_load();
};
// function to enable slider after audio ends
function enable_slider() {
document.querySelector<HTMLInputElement>("#jspsych-audio-slider-response-response").disabled =
false;
if (!trial.require_movement) {
document.querySelector<HTMLButtonElement>("#jspsych-audio-slider-response-next").disabled =
false;
}
}
const end_trial = () => {
// kill any remaining setTimeout handlers
this.jsPsych.pluginAPI.clearAllTimeouts();
// stop the audio file if it is playing
// remove end event listeners if they exist
if (context !== null) {
this.audio.stop();
} else {
this.audio.pause();
}
this.audio.removeEventListener("ended", end_trial);
this.audio.removeEventListener("ended", enable_slider);
// save data
var trialdata = {
rt: response.rt,
stimulus: trial.stimulus,
slider_start: trial.slider_start,
response: response.response,
};
display_element.innerHTML = "";
// next trial
this.jsPsych.finishTrial(trialdata);
trial_complete();
};
on_load();
return new Promise((resolve) => {
trial_complete = resolve;
this.trial_complete = resolve;
console.log("PROMISE");
});
}
// to enable slider after audio ends
private enable_slider() {
document.querySelector<HTMLInputElement>("#jspsych-audio-slider-response-response").disabled =
false;
if (!this.params.require_movement) {
document.querySelector<HTMLButtonElement>("#jspsych-audio-slider-response-next").disabled =
false;
}
}
private setupTrial = () => {
console.log("SETUP TRIAL");
// set up end event if trial needs it
if (this.params.trial_ends_after_audio) {
this.audio.addEventListener("ended", this.end_trial);
}
// enable slider after audio ends if necessary
if (!this.params.response_allowed_while_playing && !this.params.trial_ends_after_audio) {
this.audio.addEventListener("ended", this.enable_slider);
}
var html = '<div id="jspsych-audio-slider-response-wrapper" style="margin: 100px 0px;">';
html +=
'<div class="jspsych-audio-slider-response-container" style="position:relative; margin: 0 auto 3em auto; width:';
if (this.params.slider_width !== null) {
html += this.params.slider_width + "px;";
} else {
html += "auto;";
}
html += '">';
html +=
'<input type="range" class="jspsych-slider" value="' +
this.params.slider_start +
'" min="' +
this.params.min +
'" max="' +
this.params.max +
'" step="' +
this.params.step +
'" id="jspsych-audio-slider-response-response"';
if (!this.params.response_allowed_while_playing) {
html += " disabled";
}
html += "></input><div>";
for (var j = 0; j < this.params.labels.length; j++) {
var label_width_perc = 100 / (this.params.labels.length - 1);
var percent_of_range = j * (100 / (this.params.labels.length - 1));
var percent_dist_from_center = ((percent_of_range - 50) / 50) * 100;
var offset = (percent_dist_from_center * this.half_thumb_width) / 100;
html +=
'<div style="border: 1px solid transparent; display: inline-block; position: absolute; ' +
"left:calc(" +
percent_of_range +
"% - (" +
label_width_perc +
"% / 2) - " +
offset +
"px); text-align: center; width: " +
label_width_perc +
'%;">';
html +=
'<span style="text-align: center; font-size: 80%;">' + this.params.labels[j] + "</span>";
html += "</div>";
}
html += "</div>";
html += "</div>";
html += "</div>";
if (this.params.prompt !== null) {
html += this.params.prompt;
}
// add submit button
var next_disabled_attribute = "";
if (this.params.require_movement || !this.params.response_allowed_while_playing) {
next_disabled_attribute = "disabled";
}
html +=
'<button id="jspsych-audio-slider-response-next" class="jspsych-btn" ' +
next_disabled_attribute +
">" +
this.params.button_label +
"</button>";
this.display.innerHTML = html;
console.log("iinner", this.display.innerHTML);
this.response = {
rt: null,
response: null,
};
if (!this.params.response_allowed_while_playing) {
this.display.querySelector<HTMLInputElement>(
"#jspsych-audio-slider-response-response"
).disabled = true;
this.display.querySelector<HTMLInputElement>("#jspsych-audio-slider-response-next").disabled =
true;
}
if (this.params.require_movement) {
const enable_button = () => {
this.display.querySelector<HTMLInputElement>(
"#jspsych-audio-slider-response-next"
).disabled = false;
};
this.display
.querySelector("#jspsych-audio-slider-response-response")
.addEventListener("mousedown", enable_button);
this.display
.querySelector("#jspsych-audio-slider-response-response")
.addEventListener("touchstart", enable_button);
this.display
.querySelector("#jspsych-audio-slider-response-response")
.addEventListener("change", enable_button);
}
this.display
.querySelector("#jspsych-audio-slider-response-next")
.addEventListener("click", () => {
// measure response time
var endTime = performance.now();
var rt = Math.round(endTime - this.startTime);
if (this.context !== null) {
endTime = this.context.currentTime;
rt = Math.round((endTime - this.startTime) * 1000);
}
this.response.rt = rt;
this.response.response = this.display.querySelector<HTMLInputElement>(
"#jspsych-audio-slider-response-response"
).valueAsNumber;
if (this.params.response_ends_trial) {
this.end_trial();
} else {
this.display.querySelector<HTMLInputElement>(
"#jspsych-audio-slider-response-next"
).disabled = true;
}
});
this.startTime = performance.now();
// start audio
this.audio.play();
// end trial if trial_duration is set
if (this.params.trial_duration !== null) {
this.jsPsych.pluginAPI.setTimeout(() => {
this.end_trial();
}, this.params.trial_duration);
}
console.log("END SETUP TRIAL");
};
private end_trial = () => {
// kill any remaining setTimeout handlers
this.jsPsych.pluginAPI.clearAllTimeouts();
// stop the audio file if it is playing
this.audio.stop();
// remove end event listeners if they exist
this.audio.removeEventListener("ended", this.end_trial);
this.audio.removeEventListener("ended", this.enable_slider);
// save data
var trialdata = {
rt: this.response.rt,
stimulus: this.params.stimulus,
slider_start: this.params.slider_start,
response: this.response.response,
};
this.display.innerHTML = "";
// next trial
this.trial_complete(trialdata);
};
simulate(
trial: TrialType<Info>,
simulation_mode,

View File

@ -33,6 +33,13 @@ export async function mouseDownMouseUpTarget(target: Element) {
}
export async function clickTarget(target: Element) {
// Check if the target is a form element and if it's disabled
if (target instanceof HTMLButtonElement || target instanceof HTMLInputElement) {
if (target.disabled) {
console.log("Target is disabled, not dispatching click event.");
return; // Exit the function if the target is disabled
}
}
await dispatchEvent(new MouseEvent("click", { bubbles: true }), target);
}