1
0
mirror of https://github.com/psychopy/psychojs.git synced 2025-05-10 10:40:54 +00:00

Merge pull request #443 from thewhodidthis/next

Catch up with main
This commit is contained in:
Sotiri Bakagiannis 2021-07-26 06:58:57 +01:00 committed by GitHub
commit f2dbead84f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
14 changed files with 1320 additions and 52 deletions

4
package-lock.json generated
View File

@ -1,12 +1,12 @@
{
"name": "psychojs",
"version": "2021.2.0",
"version": "2021.2.x",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "psychojs",
"version": "2021.2.0",
"version": "2021.2.x",
"license": "MIT",
"dependencies": {
"howler": "^2.2.1",

View File

@ -1,6 +1,6 @@
{
"name": "psychojs",
"version": "2021.2.0",
"version": "2021.2.1",
"private": true,
"description": "Helps run in-browser neuroscience, psychology, and psychophysics experiments",
"license": "MIT",

View File

@ -142,9 +142,9 @@ export class PsychoJS
psychoJS: this,
});
// to be loading `configURL` files in `_configure` calls from
const hostsEvidently = new Set([...hosts, "https://pavlovia.org/run/", "https://run.pavlovia.org/"]);
this._hosts = Array.from(hostsEvidently);
// add the pavlovia server to the list of hosts:
const hostsWithPavlovia = new Set([...hosts, "https://pavlovia.org/run/", "https://run.pavlovia.org/"]);
this._hosts = Array.from(hostsWithPavlovia);
// GUI:
this._gui = new GUI(this);
@ -178,7 +178,7 @@ export class PsychoJS
this.logger.info("[PsychoJS] Initialised.");
this.logger.info("[PsychoJS] @version 2021.2.0");
// Hide #root::after
// hide the initialisation message:
jQuery("#root").addClass("is-ready");
}
@ -572,17 +572,17 @@ export class PsychoJS
{
this.status = PsychoJS.Status.CONFIGURING;
// if the experiment is running from the pavlovia.org server, we read the configuration file:
// if the experiment is running from an approved hosts, e.e pavlovia.org,
// we read the configuration file:
const experimentUrl = window.location.href;
// go through each url in allow list
const isHost = this._hosts.some((url) => experimentUrl.indexOf(url) === 0);
const isHost = this._hosts.some(url => experimentUrl.indexOf(url) === 0);
if (isHost)
{
const serverResponse = await this._serverManager.getConfiguration(configURL);
this._config = serverResponse.config;
// legacy experiments had a psychoJsManager block instead of a pavlovia block,
// and the URL pointed to https://pavlovia.org/server
// update the configuration for legacy experiments, which had a psychoJsManager
// block instead of a pavlovia block, with URL pointing to https://pavlovia.org/server
if ("psychoJsManager" in this._config)
{
delete this._config.psychoJsManager;

View File

@ -429,15 +429,14 @@ export class ServerManager extends PsychObject
}
// whether all resources have been requested:
const allResources = (resources.length === 1 && resources[0] === ServerManager.ALL_RESOURCES);
const allResources = (resources.length === 1 &&
resources[0] === ServerManager.ALL_RESOURCES);
// if the experiment is hosted on the pavlovia.org server and
// resources is [ServerManager.ALL_RESOURCES], then we register all the resources
// in the "resources" sub-directory
if (
this._psychoJS.config.environment === ExperimentHandler.Environment.SERVER
&& allResources
)
if (this._psychoJS.config.environment === ExperimentHandler.Environment.SERVER &&
allResources)
{
// list the resources from the resources directory of the experiment on the server:
const serverResponse = await this._listResources();
@ -465,10 +464,8 @@ export class ServerManager extends PsychObject
{
// we cannot ask for all resources to be registered locally, since we cannot list
// them:
if (
this._psychoJS.config.environment === ExperimentHandler.Environment.LOCAL
&& allResources
)
if (this._psychoJS.config.environment === ExperimentHandler.Environment.LOCAL &&
allResources)
{
throw "resources must be manually specified when the experiment is running locally: ALL_RESOURCES cannot be used";
}
@ -506,8 +503,21 @@ export class ServerManager extends PsychObject
}
}
// download those registered resources for which download = true:
/*await*/ this._downloadResources(resourcesToDownload);
// download those registered resources for which download = true
// note: we return a Promise that will be resolved when all the resources are downloaded
return new Promise((resolve, reject) =>
{
const uuid = this.on(ServerManager.Event.RESOURCE, (signal) =>
{
if (signal.message === ServerManager.Event.DOWNLOAD_COMPLETED)
{
this.off(ServerManager.Event.RESOURCE, uuid);
resolve();
}
});
this._downloadResources(resourcesToDownload);
});
}
catch (error)
{
@ -746,14 +756,14 @@ export class ServerManager extends PsychObject
/**
* Asynchronously upload audio data to the pavlovia server.
*
* @name module:core.ServerManager#uploadAudio
* @name module:core.ServerManager#uploadAudioVideo
* @function
* @public
* @param {Blob} audioBlob - the audio blob to be uploaded
* @param {string} tag - additional tag
* @returns {Promise<ServerManager.UploadDataPromise>} the response
*/
async uploadAudio(audioBlob, tag)
async uploadAudioVideo(audioBlob, tag)
{
const response = {
origin: "ServerManager.uploadAudio",
@ -792,11 +802,11 @@ export class ServerManager extends PsychObject
// query the pavlovia server:
const response = await fetch(url, {
method: "POST",
mode: "cors", // no-cors, *cors, same-origin
cache: "no-cache", // *default, no-cache, reload, force-cache, only-if-cached
credentials: "same-origin", // include, *same-origin, omit
redirect: "follow", // manual, *follow, error
referrerPolicy: "no-referrer", // no-referrer, *no-referrer-when-downgrade, origin, origin-when-cross-origin, same-origin, strict-origin, strict-origin-when-cross-origin, unsafe-url
mode: "cors",
cache: "no-cache",
credentials: "same-origin",
redirect: "follow",
referrerPolicy: "no-referrer",
body: formData,
});
const jsonResponse = await response.json();
@ -898,7 +908,7 @@ export class ServerManager extends PsychObject
* @protected
* @param {Set} resources - a set of names of previously registered resources
*/
_downloadResources(resources)
async _downloadResources(resources)
{
const response = {
origin: "ServerManager._downloadResources",

View File

@ -30,7 +30,7 @@ export class ExperimentHandler extends PsychObject
/**
* Getter for experimentEnded.
*
* @name module:core.Window#experimentEnded
* @name module:data.ExperimentHandler#experimentEnded
* @function
* @public
*/
@ -42,7 +42,7 @@ export class ExperimentHandler extends PsychObject
/**
* Setter for experimentEnded.
*
* @name module:core.Window#experimentEnded
* @name module:data.ExperimentHandler#experimentEnded
* @function
* @public
*/

348
src/data/QuestHandler.js Normal file
View File

@ -0,0 +1,348 @@
/** @module data */
/**
* Quest Trial Handler
*
* @author Alain Pitiot & Thomas Pronk
* @version 2021.2.0
* @copyright (c) 2017-2020 Ilixa Ltd. (http://ilixa.com) (c) 2020-2021 Open Science Tools Ltd. (https://opensciencetools.org)
* @license Distributed under the terms of the MIT License
*/
import {TrialHandler} from "./TrialHandler.js";
/**
* <p>A Trial Handler that implements the Quest algorithm for quick measurement of
psychophysical thresholds. QuestHandler relies on the [jsQuest]{@link https://github.com/kurokida/jsQUEST} library, a port of Prof Dennis Pelli's QUEST algorithm by [Daiichiro Kuroki]{@link https://github.com/kurokida}.</p>
*
* @class module.data.QuestHandler
* @extends TrialHandler
* @param {Object} options
* @param {module:core.PsychoJS} options.psychoJS - the PsychoJS instance
* @param {string} options.varName - the name of the variable / intensity / contrast / threshold manipulated by QUEST
* @param {number} options.startVal - initial guess for the threshold
* @param {number} options.startValSd - standard deviation of the initial guess
* @param {number} options.minVal - minimum value for the threshold
* @param {number} options.maxVal - maximum value for the threshold
* @param {number} [options.pThreshold=0.82] - threshold criterion expressed as probability of getting a correct response
* @param {number} options.nTrials - maximum number of trials
* @param {number} options.stopInterval - minimum [5%, 95%] confidence interval required for the loop to stop
* @param {module:data.QuestHandler.Method} options.method - the QUEST method
* @param {number} [options.beta=3.5] - steepness of the QUEST psychometric function
* @param {number} [options.delta=0.01] - fraction of trials with blind responses
* @param {number} [options.gamma=0.5] - fraction of trails that would generate a correct response when the threshold is infinitely small
* @param {number} [options.grain=0.01] - quantization of the internal table
* @param {string} options.name - name of the handler
* @param {boolean} [options.autoLog= false] - whether or not to log
*/
export class QuestHandler extends TrialHandler
{
/**
* @constructor
* @public
*/
constructor({
psychoJS,
varName,
startVal,
startValSd,
minVal,
maxVal,
pThreshold,
nTrials,
stopInterval,
method,
beta,
delta,
gamma,
grain,
name,
autoLog
} = {})
{
super({
psychoJS,
name,
autoLog,
method: TrialHandler.Method.SEQUENTIAL,
trialList: Array(nTrials),
nReps: 1
});
this._addAttribute('varName', varName);
this._addAttribute('startVal', startVal);
this._addAttribute('minVal', minVal, Number.MIN_VALUE);
this._addAttribute('maxVal', maxVal, Number.MAX_VALUE);
this._addAttribute('startValSd', startValSd);
this._addAttribute('pThreshold', pThreshold, 0.82);
this._addAttribute('nTrials', nTrials);
this._addAttribute('stopInterval', stopInterval, Number.MIN_VALUE);
this._addAttribute('beta', beta, 3.5);
this._addAttribute('delta', delta, 0.01);
this._addAttribute('gamma', gamma, 0.5);
this._addAttribute('grain', grain, 0.01);
this._addAttribute('method', method, QuestHandler.Method.QUANTILE);
// setup jsQuest:
this._setupJsQuest();
}
/**
* Add a response and update the PDF.
*
* @name module:data.QuestHandler#addResponse
* @function
* @public
* @param{number} response - the response to the trial, must be either 0 (incorrect or
* non-detected) or 1 (correct or detected).
*/
addResponse(response)
{
// check that response is either 0 or 1:
if (response !== 0 && response !== 1)
{
throw {
origin: 'QuestHandler.addResponse',
context: 'when adding a trial response',
error: `the response must be either 0 or 1, got: ${JSON.stringify(response)}`
};
}
// update the QUEST pdf:
this._jsQuest = jsQUEST.QuestUpdate(this._jsQuest, this._questValue, response);
if (!this._finished)
{
// estimate the next value of the QUEST variable (and update the trial list and snapshots):
this._estimateQuestValue();
}
}
/**
* Simulate a response.
*
* @name module:data.QuestHandler#simulate
* @function
* @public
* @param{number} trueValue - the true, known value of the threshold / contrast / intensity
*/
simulate(trueValue)
{
const response = jsQUEST.QuestSimulate(this._jsQuest, this._questValue, trueValue);
// restrict to limits:
this._questValue = Math.max(this._minVal, Math.min(this._maxVal, this._questValue));
this._psychoJS.logger.debug(`simulated response: ${response}`);
return response;
}
/**
* Get the mean of the Quest posterior PDF.
*
* @name module:data.QuestHandler#mean
* @function
* @public
* @returns {number} the mean
*/
mean()
{
return jsQUEST.QuestMean(this._jsQuest);
}
/**
* Get the standard deviation of the Quest posterior PDF.
*
* @name module:data.QuestHandler#sd
* @function
* @public
* @returns {number} the standard deviation
*/
sd()
{
return jsQUEST.QuestSd(this._jsQuest);
}
/**
* Get the mode of the Quest posterior PDF.
*
* @name module:data.QuestHandler#mode
* @function
* @public
* @returns {number} the mode
*/
mode()
{
const [mode, pdf] = jsQUEST.QuestMode(this._jsQuest);
return mode;
}
/**
* Get the standard deviation of the Quest posterior PDF.
*
* @name module:data.QuestHandler#quantile
* @function
* @public
* @param{number} quantileOrder the quantile order
* @returns {number} the quantile
*/
quantile(quantileOrder)
{
return jsQUEST.QuestQuantile(this._jsQuest, quantileOrder);
}
/**
* Get an estimate of the 5%-95% confidence interval (CI).
*
* @name module:data.QuestHandler#confInterval
* @function
* @public
* @param{boolean} [getDifference=false] if true, return the width of the CI instead of the CI
*/
confInterval(getDifference = false)
{
const CI = [
jsQUEST.QuestQuantile(this._jsQuest, 0.05),
jsQUEST.QuestQuantile(this._jsQuest, 0.95)
];
if (getDifference)
{
return Math.abs(CI[0] - CI[1]);
}
else
{
return CI;
}
}
/**
* Setup the JS Quest object.
*
* @name module:data.QuestHandler#_setupJsQuest
* @function
* @protected
*/
_setupJsQuest()
{
this._jsQuest = jsQUEST.QuestCreate(
this._startVal,
this._startValSd,
this._pThreshold,
this._beta,
this._delta,
this._gamma,
this._grain);
this._estimateQuestValue();
}
/**
* Estimate the next value of the QUEST variable, based on the current value
* and on the selected QUEST method.
*
* @name module:data.QuestHandler#_estimateQuestValue
* @function
* @protected
*/
_estimateQuestValue()
{
// estimate the value based on the chosen QUEST method:
if (this._method === QuestHandler.Method.QUANTILE)
{
this._questValue = jsQUEST.QuestQuantile(this._jsQuest);
}
else if (this._method === QuestHandler.Method.MEAN)
{
this._questValue = jsQUEST.QuestMean(this._jsQuest);
}
else if (this._method === QuestHandler.Method.MODE)
{
const [mode, pdf] = jsQUEST.QuestMode(this._jsQuest);
this._questValue = mode;
}
else
{
throw {
origin: 'QuestHandler._estimateQuestValue',
context: 'when estimating the next value of the QUEST variable',
error: `unknown method: ${this._method}, please use: mean, mode, or quantile`
};
}
this._psychoJS.logger.debug(`estimated value for QUEST variable ${this._varName}: ${this._questValue}`);
// check whether we should finish the trial:
if (this.thisN > 0 &&
(this.nRemaining === 0 || this.confInterval(true) < this._stopInterval))
{
this._finished = true;
// update the snapshots associated with the current trial in the trial list:
for (let t = 0; t < this._trialList.length-1; ++t)
{
// the current trial is the last defined one:
if (typeof this._trialList[t+1] === 'undefined')
{
this._snapshots[t].finished = true;
break;
}
}
return;
}
// update the next undefined trial in the trial list, and the associated snapshot:
for (let t = 0; t < this._trialList.length; ++t)
{
if (typeof this._trialList[t] === 'undefined')
{
this._trialList[t] = { [this._varName]: this._questValue };
if (typeof this._snapshots[t] !== 'undefined')
{
this._snapshots[t][this._varName] = this._questValue;
this._snapshots[t].trialAttributes.push(this._varName);
}
break;
}
}
}
}
/**
* QuestHandler method
*
* @enum {Symbol}
* @readonly
* @public
*/
QuestHandler.Method = {
/**
* Quantile threshold estimate.
*/
QUANTILE: Symbol.for('QUANTILE'),
/**
* Mean threshold estimate.
*/
MEAN: Symbol.for('MEAN'),
/**
* Mode threshold estimate.
*/
MODE: Symbol.for('MODE')
};

View File

@ -1,3 +1,4 @@
export * from "./ExperimentHandler.js";
export * from "./TrialHandler.js";
// export * from './Shelf.js';
export * from './ExperimentHandler.js';
export * from './TrialHandler.js';
export * from './QuestHandler';
//export * from './Shelf.js';

View File

@ -157,7 +157,7 @@ export class AudioClip extends PsychObject
}
// upload the data:
return this._psychoJS.serverManager.uploadAudio(this._data, filename);
return this._psychoJS.serverManager.uploadAudioVideo(this._data, filename);
}
/**

View File

@ -38,7 +38,7 @@ export class Microphone extends PsychObject
this._addAttribute("format", format, "audio/webm;codecs=opus", this._onChange);
this._addAttribute("sampleRateHz", sampleRateHz, 48000, this._onChange);
this._addAttribute("clock", clock, new Clock());
this._addAttribute("autoLog", false, autoLog);
this._addAttribute("autoLog", autoLog, autoLog);
this._addAttribute("status", PsychoJS.Status.NOT_STARTED);
// prepare the recording:
@ -322,7 +322,7 @@ export class Microphone extends PsychObject
// upload the blob:
const audioBlob = new Blob(this._audioBuffer);
return this._psychoJS.serverManager.uploadAudio(audioBlob, tag);
return this._psychoJS.serverManager.uploadAudioVideo(audioBlob, tag);
}
/**

View File

@ -1406,5 +1406,10 @@ export function extensionFromMimeType(mimeType)
return ".wav";
}
return ".dat";
if (mimeType.indexOf("video/webm") === 0)
{
return ".webm";
}
return '.dat';
}

580
src/visual/Camera.js Normal file
View File

@ -0,0 +1,580 @@
/**
* Manager handling the recording of video signal.
*
* @author Alain Pitiot
* @version 2021.2.0
* @copyright (c) 2021 Open Science Tools Ltd. (https://opensciencetools.org)
* @license Distributed under the terms of the MIT License
*/
import {Clock} from "../util/Clock.js";
import {PsychObject} from "../util/PsychObject.js";
import {PsychoJS} from "../core/PsychoJS.js";
import * as util from "../util/Util.js";
import {ExperimentHandler} from "../data/ExperimentHandler.js";
// import {VideoClip} from "./VideoClip";
/**
* <p>This manager handles the recording of video signal.</p>
*
* @name module:visual.Camera
* @class
* @param {Object} options
* @param @param {module:core.Window} options.win - the associated Window
* @param {string} [options.format='video/webm;codecs=vp9'] the video format
* @param {Clock} [options.clock= undefined] - an optional clock
* @param {boolean} [options.autoLog= false] - whether or not to log
*
* @todo add video constraints as parameter
*/
export class Camera extends PsychObject
{
/**
* @constructor
* @public
*/
constructor({win, name, format, clock, autoLog} = {})
{
super(win._psychoJS);
this._addAttribute("win", win, undefined);
this._addAttribute("name", name, "camera");
this._addAttribute("format", format, "video/webm;codecs=vp9", this._onChange);
this._addAttribute("clock", clock, new Clock());
this._addAttribute("autoLog", autoLog, false);
this._addAttribute("status", PsychoJS.Status.NOT_STARTED);
// prepare the recording:
this._prepareRecording();
if (this._autoLog)
{
this._psychoJS.experimentLogger.exp(`Created ${this.name} = ${this.toString()}`);
}
}
/**
* Get the underlying video stream.
*
* @name module:visual.Camera#getStream
* @function
* @public
* @returns {MediaStream} the video stream
*/
getStream()
{
return this._stream;
}
/**
* Get a video element pointing to the Camera stream.
*
* @name module:visual.Camera#getVideo
* @function
* @public
* @returns {HTMLVideoElement} a video element
*/
getVideo()
{
// note: we need to return a new video each time, since the camera feed can be used by
// several stimuli and one of them might pause the feed
// create a video with the appropriate size:
const video = document.createElement("video");
this._videos.push(video);
video.width = this._streamSettings.width;
video.height = this._streamSettings.height;
video.autoplay = true;
// prevent clicking:
video.onclick = (mouseEvent) =>
{
mouseEvent.preventDefault();
return false;
};
// use the camera stream as source for the video:
video.srcObject = this._stream;
return video;
}
/**
* Submit a request to start the recording.
*
* @name module:visual.Camera#start
* @function
* @public
* @return {Promise} promise fulfilled when the recording actually started
*/
start()
{
// if the camera is currently paused, a call to start resumes it
// with a new recording:
if (this._status === PsychoJS.Status.PAUSED)
{
return this.resume({clear: true});
}
if (this._status !== PsychoJS.Status.STARTED)
{
this._psychoJS.logger.debug("request to start video recording");
try
{
if (!this._recorder)
{
throw "the recorder has not been created yet, possibly because the participant has not given the authorisation to record video";
}
this._recorder.start();
// return a promise, which will be satisfied when the recording actually starts, which
// is also when the reset of the clock and the change of status takes place
const self = this;
return new Promise((resolve, reject) =>
{
self._startCallback = resolve;
self._errorCallback = reject;
});
}
catch (error)
{
this._psychoJS.logger.error("unable to start the video recording: " + JSON.stringify(error));
this._status = PsychoJS.Status.ERROR;
throw {
origin: "Camera.start",
context: "when starting the video recording for camera: " + this._name,
error
};
}
}
}
/**
* Submit a request to stop the recording.
*
* @name module:visual.Camera#stop
* @function
* @public
* @param {Object} options
* @param {string} [options.filename] the name of the file to which the video recording
* will be saved
* @return {Promise} promise fulfilled when the recording actually stopped, and the recorded
* data was made available
*/
stop({filename} = {})
{
if (this._status === PsychoJS.Status.STARTED || this._status === PsychoJS.Status.PAUSED)
{
this._psychoJS.logger.debug("request to stop video recording");
// stop the videos:
for (const video of this._videos)
{
video.pause();
}
this._stopOptions = {
filename
};
// note: calling the stop method of the MediaRecorder will first raise
// a dataavailable event, and then a stop event
// ref: https://developer.mozilla.org/en-US/docs/Web/API/MediaRecorder/stop
this._recorder.stop();
// return a promise, which will be satisfied when the recording actually stops and the data
// has been made available:
const self = this;
return new Promise((resolve, reject) =>
{
self._stopCallback = resolve;
self._errorCallback = reject;
});
}
}
/**
* Submit a request to pause the recording.
*
* @name module:visual.Camera#pause
* @function
* @public
* @return {Promise} promise fulfilled when the recording actually paused
*/
pause()
{
if (this._status === PsychoJS.Status.STARTED)
{
this._psychoJS.logger.debug("request to pause video recording");
try
{
if (!this._recorder)
{
throw "the recorder has not been created yet, possibly because the participant has not given the authorisation to record video";
}
// note: calling the pause method of the MediaRecorder raises a pause event
this._recorder.pause();
// return a promise, which will be satisfied when the recording actually pauses:
const self = this;
return new Promise((resolve, reject) =>
{
self._pauseCallback = resolve;
self._errorCallback = reject;
});
}
catch (error)
{
self._psychoJS.logger.error("unable to pause the video recording: " + JSON.stringify(error));
this._status = PsychoJS.Status.ERROR;
throw {
origin: "Camera.pause",
context: "when pausing the video recording for camera: " + this._name,
error
};
}
}
}
/**
* Submit a request to resume the recording.
*
* <p>resume has no effect if the recording was not previously paused.</p>
*
* @name module:visual.Camera#resume
* @function
* @param {Object} options
* @param {boolean} [options.clear= false] whether or not to empty the video buffer before
* resuming the recording
* @return {Promise} promise fulfilled when the recording actually resumed
*/
resume({clear = false } = {})
{
if (this._status === PsychoJS.Status.PAUSED)
{
this._psychoJS.logger.debug("request to resume video recording");
try
{
if (!this._recorder)
{
throw "the recorder has not been created yet, possibly because the participant has not given the authorisation to record video";
}
// empty the audio buffer is needed:
if (clear)
{
this._audioBuffer = [];
this._videoBuffer.length = 0;
}
this._recorder.resume();
// return a promise, which will be satisfied when the recording actually resumes:
const self = this;
return new Promise((resolve, reject) =>
{
self._resumeCallback = resolve;
self._errorCallback = reject;
});
}
catch (error)
{
self._psychoJS.logger.error("unable to resume the video recording: " + JSON.stringify(error));
this._status = PsychoJS.Status.ERROR;
throw {
origin: "Camera.resume",
context: "when resuming the video recording for camera: " + this._name,
error
};
}
}
}
/**
* Submit a request to flush the recording.
*
* @name module:visual.Camera#flush
* @function
* @public
* @return {Promise} promise fulfilled when the data has actually been made available
*/
flush()
{
if (this._status === PsychoJS.Status.STARTED || this._status === PsychoJS.Status.PAUSED)
{
this._psychoJS.logger.debug("request to flush video recording");
// note: calling the requestData method of the MediaRecorder will raise a
// dataavailable event
// ref: https://developer.mozilla.org/en-US/docs/Web/API/MediaRecorder/requestData
this._recorder.requestData();
// return a promise, which will be satisfied when the data has been made available:
const self = this;
return new Promise((resolve, reject) =>
{
self._dataAvailableCallback = resolve;
self._errorCallback = reject;
});
}
}
/**
* Offer the audio recording to the participant as a video file to download.
*
* @name module:visual.Camera#download
* @function
* @public
* @param {string} filename - the filename of the video file
*/
download(filename = "video.webm")
{
const videoBlob = new Blob(this._videoBuffer);
const anchor = document.createElement("a");
anchor.href = window.URL.createObjectURL(videoBlob);
anchor.download = filename;
document.body.appendChild(anchor);
anchor.click();
document.body.removeChild(anchor);
}
/**
* Upload the video recording to the pavlovia server.
*
* @name module:visual.Camera#upload
* @function
* @public
* @param {string} tag an optional tag for the audio file
*/
async upload({tag} = {})
{
// default tag: the name of this Camera object
if (typeof tag === "undefined")
{
tag = this._name;
}
// add a format-dependent video extension to the tag:
tag += util.extensionFromMimeType(this._format);
// if the video recording cannot be uploaded, e.g. the experiment is running locally, or
// if it is piloting mode, then we offer the video recording as a file for download:
if (this._psychoJS.getEnvironment() !== ExperimentHandler.Environment.SERVER ||
this._psychoJS.config.experiment.status !== "RUNNING" ||
this._psychoJS._serverMsg.has("__pilotToken"))
{
return this.download(tag);
}
// upload the blob:
const videoBlob = new Blob(this._videoBuffer);
return this._psychoJS.serverManager.uploadAudioVideo(videoBlob, tag);
}
/**
* Get the current video recording as a VideoClip in the given format.
*
* @name module:visual.Camera#getRecording
* @function
* @public
* @param {string} tag an optional tag for the video clip
* @param {boolean} [flush=false] whether or not to first flush the recording
*/
async getRecording({tag, flush = false} = {})
{
// default tag: the name of this Microphone object
if (typeof tag === "undefined")
{
tag = this._name;
}
// TODO
}
/**
* Callback for changes to the recording settings.
*
* <p>Changes to the settings require the recording to stop and be re-started.</p>
*
* @name module:visual.Camera#_onChange
* @function
* @protected
*/
_onChange()
{
if (this._status === PsychoJS.Status.STARTED)
{
this.stop();
}
this._prepareRecording();
this.start();
}
/**
* Prepare the recording.
*
* @name module:visual.Camera#_prepareRecording
* @function
* @protected
*/
async _prepareRecording()
{
// empty the video buffer:
this._videoBuffer = [];
this._recorder = null;
this._videos = [];
// create a new stream with ideal dimensions:
// TODO use size constraints
this._stream = await navigator.mediaDevices.getUserMedia({
video: true
});
// check the actual width and height:
this._streamSettings = this._stream.getVideoTracks()[0].getSettings();
this._psychoJS.logger.debug(`camera stream settings: ${JSON.stringify(this._streamSettings)}`);
// check that the specified format is supported, use default if it is not:
let options;
if (typeof this._format === "string" && MediaRecorder.isTypeSupported(this._format))
{
options = { type: this._format };
}
else
{
this._psychoJS.logger.warn(`The specified video format, ${this._format}, is not supported by this browser, using the default format instead`);
}
// create a video recorder:
this._recorder = new MediaRecorder(this._stream, options);
// setup the callbacks:
const self = this;
// called upon Camera.start(), at which point the audio data starts being gathered
// into a blob:
this._recorder.onstart = () =>
{
self._videoBuffer = [];
self._videoBuffer.length = 0;
self._clock.reset();
self._status = PsychoJS.Status.STARTED;
self._psychoJS.logger.debug("video recording started");
// resolve the Microphone.start promise:
if (self._startCallback)
{
self._startCallback(self._psychoJS.monotonicClock.getTime());
}
};
// called upon Camera.pause():
this._recorder.onpause = () =>
{
self._status = PsychoJS.Status.PAUSED;
self._psychoJS.logger.debug("video recording paused");
// resolve the Microphone.pause promise:
if (self._pauseCallback)
{
self._pauseCallback(self._psychoJS.monotonicClock.getTime());
}
};
// called upon Camera.resume():
this._recorder.onresume = () =>
{
self._status = PsychoJS.Status.STARTED;
self._psychoJS.logger.debug("video recording resumed");
// resolve the Microphone.resume promise:
if (self._resumeCallback)
{
self._resumeCallback(self._psychoJS.monotonicClock.getTime());
}
};
// called when video data is available, typically upon Camera.stop() or Camera.flush():
this._recorder.ondataavailable = (event) =>
{
const data = event.data;
// add data to the buffer:
self._videoBuffer.push(data);
self._psychoJS.logger.debug("video data added to the buffer");
// resolve the data available promise, if needed:
if (self._dataAvailableCallback)
{
self._dataAvailableCallback(self._psychoJS.monotonicClock.getTime());
}
};
// called upon Camera.stop(), after data has been made available:
this._recorder.onstop = () =>
{
self._psychoJS.logger.debug("video recording stopped");
self._status = PsychoJS.Status.NOT_STARTED;
// resolve the Microphone.stop promise:
if (self._stopCallback)
{
self._stopCallback(self._psychoJS.monotonicClock.getTime());
}
// treat stop options if there are any:
// download to a file, immediately offered to the participant:
if (typeof self._stopOptions.filename === "string")
{
self.download(self._stopOptions.filename);
}
};
// called upon recording errors:
this._recorder.onerror = (event) =>
{
// TODO
self._psychoJS.logger.error("video recording error: " + JSON.stringify(event));
self._status = PsychoJS.Status.ERROR;
};
}
}

307
src/visual/FaceDetector.js Normal file
View File

@ -0,0 +1,307 @@
/**
* Manager handling the detecting of faces in video streams.
*
* @author Alain Pitiot
* @version 2021.2.0
* @copyright (c) 2021 Open Science Tools Ltd. (https://opensciencetools.org)
* @license Distributed under the terms of the MIT License
*/
import {PsychoJS} from "../core/PsychoJS.js";
import * as util from "../util/Util.js";
import { to_pixiPoint } from "../util/Pixi.js";
import {Color} from "../util/Color.js";
import {Camera} from "./Camera.js";
import {VisualStim} from "./VisualStim.js";
import * as PIXI from "pixi.js-legacy";
/**
* <p>This manager handles the detecting of faces in video streams. FaceDetector relies on the
* [Face-API library]{@link https://github.com/justadudewhohacks/face-api.js} developed by
* [Vincent Muehler]{@link https://github.com/justadudewhohacks}</p>
*
* @name module:visual.FaceDetector
* @class
* @param {Object} options
* @param {String} options.name - the name used when logging messages from the detector
* @param @param {module:core.Window} options.win - the associated Window
* @param @param {string | HTMLVideoElement | module:visual.Camera} input - the name of a
* movie resource or of a HTMLVideoElement or of a Camera component
* @param {string} [options.faceApiUrl= 'face-api.js'] - the Url of the face-api library
* @param {string} [options.modelDir= 'models'] - the directory where to find the face detection models
* @param {string} [options.units= "norm"] - the units of the stimulus (e.g. for size, position, vertices)
* @param {Array.<number>} [options.pos= [0, 0]] - the position of the center of the stimulus
* @param {string} [options.units= 'norm'] - the units of the stimulus vertices, size and position
* @param {number} [options.ori= 0.0] - the orientation (in degrees)
* @param {number} [options.size] - the size of the rendered image (the size of the image will be used if size is not specified)
* @param {number} [options.opacity= 1.0] - the opacity
* @param {boolean} [options.autoDraw= false] - whether or not the stimulus should be automatically drawn on every frame flip
* @param {boolean} [options.autoLog= false] - whether or not to log
*/
export class FaceDetector extends VisualStim
{
/**
* @constructor
* @public
*/
constructor({name, win, input, modelDir, faceApiUrl, units, ori, opacity, pos, size, autoDraw, autoLog} = {})
{
super({name, win, units, ori, opacity, pos, size, autoDraw, autoLog});
// TODO deal with onChange (see MovieStim and Camera)
this._addAttribute("input", input, undefined);
this._addAttribute("faceApiUrl", faceApiUrl, "face-api.js");
this._addAttribute("modelDir", modelDir, "models");
this._addAttribute("autoLog", autoLog, false);
this._addAttribute("status", PsychoJS.Status.NOT_STARTED);
// init face-api:
this._initFaceApi();
if (this._autoLog)
{
this._psychoJS.experimentLogger.exp(`Created ${this.name} = ${this.toString()}`);
}
}
/**
* Setter for the video attribute.
*
* @name module:visual.FaceDetector#setCamera
* @function
* @public
* @param {string | HTMLVideoElement | module:visual.Camera} input - the name of a
* movie resource or a HTMLVideoElement or a Camera component
* @param {boolean} [log= false] - whether of not to log
*/
setInput(input, log = false)
{
const response = {
origin: "FaceDetector.setInput",
context: "when setting the video of FaceDetector: " + this._name
};
try
{
// movie is undefined: that's fine but we raise a warning in case this is
// a symptom of an actual problem
if (typeof input === "undefined")
{
this.psychoJS.logger.warn("setting the movie of MovieStim: " + this._name + " with argument: undefined.");
this.psychoJS.logger.debug("set the movie of MovieStim: " + this._name + " as: undefined");
}
else
{
// if movie is a string, then it should be the name of a resource, which we get:
if (typeof input === "string")
{
// TODO create a movie with that resource, and use the movie as input
}
// if movie is an instance of camera, get a video element from it:
else if (input instanceof Camera)
{
const video = input.getVideo();
// TODO remove previous one if there is one
// document.body.appendChild(video);
input = video;
}
// check that video is now an HTMLVideoElement
if (!(input instanceof HTMLVideoElement))
{
throw input.toString() + " is not a video";
}
this.psychoJS.logger.debug(`set the video of FaceDetector: ${this._name} as: src= ${input.src}, size= ${input.videoWidth}x${input.videoHeight}, duration= ${input.duration}s`);
// ensure we have only one onended listener per HTMLVideoElement, since we can have several
// MovieStim with the same underlying HTMLVideoElement
// https://stackoverflow.com/questions/11455515
if (!input.onended)
{
input.onended = () =>
{
this.status = PsychoJS.Status.FINISHED;
};
}
}
this._setAttribute("input", input, log);
this._needUpdate = true;
this._needPixiUpdate = true;
}
catch (error)
{
throw Object.assign(response, {error});
}
}
/**
* Start detecting faces.
*
* @name module:visual.FaceDetector#start
* @function
* @public
* @param {number} period - the detection period, in ms (e.g. 100 ms for 10Hz)
* @param detectionCallback - the callback triggered when detection results are available
* @param {boolean} [log= false] - whether of not to log
*/
start(period, detectionCallback, log = false)
{
this.status = PsychoJS.Status.STARTED;
if (typeof this._detectionId !== "undefined")
{
clearInterval(this._detectionId);
this._detectionId = undefined;
}
this._detectionId = setInterval(
async () =>
{
this._detections = await faceapi.detectAllFaces(
this._input,
new faceapi.TinyFaceDetectorOptions()
)
.withFaceLandmarks()
.withFaceExpressions();
this._needUpdate = true;
this._needPixiUpdate = true;
detectionCallback(this._detections);
},
period);
}
/**
* Stop detecting faces.
*
* @name module:visual.FaceDetector#stop
* @function
* @public
* @param {boolean} [log= false] - whether of not to log
*/
stop(log = false)
{
this.status = PsychoJS.Status.NOT_STARTED;
if (typeof this._detectionId !== "undefined")
{
clearInterval(this._detectionId);
this._detectionId = undefined;
}
}
/**
* Init the Face-API library.
*
* @name module:visual.FaceDetector#_initFaceApi
* @function
* @protected
*/
async _initFaceApi()
{/*
// load the library:
await this._psychoJS.serverManager.prepareResources([
{
"name": "face-api.js",
"path": this.faceApiUrl,
"download": true
}
]);*/
// load the models:
faceapi.nets.tinyFaceDetector.loadFromUri(this._modelDir);
faceapi.nets.faceLandmark68Net.loadFromUri(this._modelDir);
faceapi.nets.faceRecognitionNet.loadFromUri(this._modelDir);
faceapi.nets.faceExpressionNet.loadFromUri(this._modelDir);
}
/**
* Update the visual representation of the detected faces, if necessary.
*
* @name module:visual.FaceDetector#_updateIfNeeded
* @function
* @protected
*/
_updateIfNeeded()
{
if (!this._needUpdate)
{
return;
}
this._needUpdate = false;
if (this._needPixiUpdate)
{
this._needPixiUpdate = false;
if (typeof this._pixi !== "undefined")
{
this._pixi.destroy(true);
}
this._pixi = new PIXI.Container();
this._pixi.interactive = true;
this._body = new PIXI.Graphics();
this._body.interactive = true;
this._pixi.addChild(this._body);
const size_px = util.to_px(this.size, this.units, this.win);
if (typeof this._detections !== "undefined")
{
for (const detection of this._detections)
{
const landmarks = detection.landmarks;
const imageWidth = detection.alignedRect.imageWidth;
const imageHeight = detection.alignedRect.imageHeight;
for (const position of landmarks.positions)
{
this._body.beginFill(new Color("red").int, this._opacity);
this._body.drawCircle(
position._x / imageWidth * size_px[0] - size_px[0] / 2,
position._y / imageHeight * size_px[1] - size_px[1] / 2,
2);
this._body.endFill();
}
}
}
}
this._pixi.scale.x = 1;
this._pixi.scale.y = -1;
this._pixi.rotation = this.ori * Math.PI / 180;
this._pixi.position = to_pixiPoint(this.pos, this.units, this.win);
this._pixi.alpha = this._opacity;
}
/**
* Estimate the bounding box.
*
* @name module:visual.FaceDetector#_estimateBoundingBox
* @function
* @override
* @protected
*/
_estimateBoundingBox()
{
// TODO
}
}

View File

@ -14,6 +14,8 @@ import { ColorMixin } from "../util/ColorMixin.js";
import { to_pixiPoint } from "../util/Pixi.js";
import * as util from "../util/Util.js";
import { VisualStim } from "./VisualStim.js";
import {Camera} from "./Camera.js";
/**
* Movie Stimulus.
@ -24,7 +26,8 @@ import { VisualStim } from "./VisualStim.js";
* @param {Object} options
* @param {String} options.name - the name used when logging messages from this stimulus
* @param {module:core.Window} options.win - the associated Window
* @param {string | HTMLVideoElement} options.movie - the name of the movie resource or the HTMLVideoElement corresponding to the movie
* @param {string | HTMLVideoElement | module:visual.Camera} movie - the name of a
* movie resource or of a HTMLVideoElement or of a Camera component
* @param {string} [options.units= "norm"] - the units of the stimulus (e.g. for size, position, vertices)
* @param {Array.<number>} [options.pos= [0, 0]] - the position of the center of the stimulus
* @param {string} [options.units= 'norm'] - the units of the stimulus vertices, size and position
@ -134,8 +137,8 @@ export class MovieStim extends VisualStim
*
* @name module:visual.MovieStim#setMovie
* @public
* @param {string | HTMLVideoElement} movie - the name of the movie resource or a
* HTMLVideoElement
* @param {string | HTMLVideoElement | module:visual.Camera} movie - the name of a
* movie resource or of a HTMLVideoElement or of a Camera component
* @param {boolean} [log= false] - whether of not to log
*/
setMovie(movie, log = false)
@ -147,30 +150,42 @@ export class MovieStim extends VisualStim
try
{
// movie is undefined: that's fine but we raise a warning in case this is a symptom of an actual problem
if (typeof movie === "undefined")
// movie is undefined: that's fine but we raise a warning in case this is
// a symptom of an actual problem
if (typeof movie === 'undefined')
{
this.psychoJS.logger.warn("setting the movie of MovieStim: " + this._name + " with argument: undefined.");
this.psychoJS.logger.debug("set the movie of MovieStim: " + this._name + " as: undefined");
this.psychoJS.logger.warn(
`setting the movie of MovieStim: ${this._name} with argument: undefined.`);
this.psychoJS.logger.debug(`set the movie of MovieStim: ${this._name} as: undefined`);
}
else
{
// movie is a string: it should be the name of a resource, which we load
// if movie is a string, then it should be the name of a resource, which we get:
if (typeof movie === "string")
{
movie = this.psychoJS.serverManager.getResource(movie);
}
// movie should now be an actual HTMLVideoElement: we raise an error if it is not
// if movie is an instance of camera, get a video element from it:
else if (movie instanceof Camera)
{
const video = movie.getVideo();
// TODO remove previous one if there is one
// document.body.appendChild(video);
movie = video;
}
// check that movie is now an HTMLVideoElement
if (!(movie instanceof HTMLVideoElement))
{
throw "the argument: " + movie.toString() + ' is not a video" }';
throw movie.toString() + " is not a video";
}
this.psychoJS.logger.debug(`set the movie of MovieStim: ${this._name} as: src= ${movie.src}, size= ${movie.videoWidth}x${movie.videoHeight}, duration= ${movie.duration}s`);
// ensure we have only one onended listener per HTMLVideoElement (we can have several
// MovieStim with the same underlying HTMLVideoElement)
// ensure we have only one onended listener per HTMLVideoElement, since we can have several
// MovieStim with the same underlying HTMLVideoElement
// https://stackoverflow.com/questions/11455515
if (!movie.onended)
{

View File

@ -10,3 +10,5 @@ export * from "./TextBox.js";
export * from "./TextInput.js";
export * from "./TextStim.js";
export * from "./VisualStim.js";
export * from "./Camera.js";
export * from "./FaceDetector.js";