1
0
mirror of https://github.com/psychopy/psychojs.git synced 2025-05-10 10:40:54 +00:00

added FaceDetector with linking to MovieStim, little fixed here and there

This commit is contained in:
Alain Pitiot 2021-07-12 08:54:13 +02:00
parent 7e63f88aa4
commit 86cbc73c94
8 changed files with 938 additions and 19 deletions

View File

@ -1,6 +1,6 @@
{
"name": "psychojs",
"version": "2021.2.0",
"version": "2021.2.x",
"private": true,
"description": "Helps run in-browser neuroscience, psychology, and psychophysics experiments",
"license": "MIT",

View File

@ -179,7 +179,7 @@ export class PsychoJS
}
this.logger.info('[PsychoJS] Initialised.');
this.logger.info('[PsychoJS] @version 2021.2.0');
this.logger.info('[PsychoJS] @version 2021.2.x');
// Hide #root::after
jQuery('#root').addClass('is-ready');

View File

@ -512,8 +512,21 @@ export class ServerManager extends PsychObject
}
}
// download those registered resources for which download = true:
/*await*/ this._downloadResources(resourcesToDownload);
// download those registered resources for which download = true
// note: we return a Promise that will be resolved when all the resources are downloaded
return new Promise((resolve, reject) =>
{
const uuid = this.on(ServerManager.Event.RESOURCE, (signal) =>
{
if (signal.message === ServerManager.Event.DOWNLOAD_COMPLETED)
{
this.off(ServerManager.Event.RESOURCE, uuid);
resolve();
}
});
this._downloadResources(resourcesToDownload);
});
}
catch (error)
{
@ -915,7 +928,7 @@ export class ServerManager extends PsychObject
* @protected
* @param {Set} resources - a set of names of previously registered resources
*/
_downloadResources(resources)
async _downloadResources(resources)
{
const response = {
origin: 'ServerManager._downloadResources',

View File

@ -39,7 +39,7 @@ export class Microphone extends PsychObject
this._addAttribute('format', format, 'audio/webm;codecs=opus', this._onChange);
this._addAttribute('sampleRateHz', sampleRateHz, 48000, this._onChange);
this._addAttribute('clock', clock, new Clock());
this._addAttribute('autoLog', false, autoLog);
this._addAttribute('autoLog', autoLog, false);
this._addAttribute('status', PsychoJS.Status.NOT_STARTED);
// prepare the recording:

585
src/visual/Camera.js Normal file
View File

@ -0,0 +1,585 @@
/**
* Manager handling the recording of video signal.
*
* @author Alain Pitiot
* @version 2021.2.0
* @copyright (c) 2021 Open Science Tools Ltd. (https://opensciencetools.org)
* @license Distributed under the terms of the MIT License
*/
import {Clock} from "../util/Clock";
import {PsychObject} from "../util/PsychObject";
import {PsychoJS} from "../core/PsychoJS";
import * as util from '../util/Util';
import {ExperimentHandler} from "../data/ExperimentHandler";
// import {VideoClip} from "./VideoClip";
/**
* <p>This manager handles the recording of video signal.</p>
*
* @name module:visual.Camera
* @class
* @param {Object} options
* @param @param {module:core.Window} options.win - the associated Window
* @param {string} [options.format='video/webm;codecs=vp9'] the video format
* @param {Clock} [options.clock= undefined] - an optional clock
* @param {boolean} [options.autoLog= false] - whether or not to log
*/
export class Camera extends PsychObject
{
constructor({win, name, format, clock, autoLog} = {})
{
super(win._psychoJS);
this._addAttribute('win', win, undefined);
this._addAttribute('name', name, 'camera');
this._addAttribute('format', format, 'video/webm;codecs=vp9', this._onChange);
this._addAttribute('clock', clock, new Clock());
this._addAttribute('autoLog', autoLog, false);
this._addAttribute('status', PsychoJS.Status.NOT_STARTED);
// prepare the recording:
this._prepareRecording();
if (this._autoLog)
{
this._psychoJS.experimentLogger.exp(`Created ${this.name} = ${this.toString()}`);
}
}
/**
* Get the underlying video stream.
*
* @name module:visual.Camera#getStream
* @public
* @returns {MediaStream} the video stream
*/
getStream()
{
return this._stream;
}
/**
* Get a video element pointing to the Camera stream.
*
* @name module:visual.Camera#getVideo
* @public
* @returns {HTMLVideoElement} a video element
*/
getVideo()
{
// note: we need to return a new video each time, since the camera feed can be used by
// several stimuli and one of them might pause the feed
// create a video with the appropriate size:
const video = document.createElement('video');
this._videos.push(video);
video.width = this._streamSettings.width;
video.height = this._streamSettings.height;
video.autoplay = true;
// prevent clicking:
video.onclick = (mouseEvent) =>
{
mouseEvent.preventDefault();
return false;
};
// use the camera stream as source for the video:
video.srcObject = this._stream;
return video;
}
/**
* Submit a request to start the recording.
*
* @name module:visual.Camera#start
* @public
* @return {Promise} promise fulfilled when the recording actually started
*/
start()
{
// if the camera is currently paused, a call to start resumes it
// with a new recording:
if (this._status === PsychoJS.Status.PAUSED)
{
return this.resume({clear: true});
}
if (this._status !== PsychoJS.Status.STARTED)
{
this._psychoJS.logger.debug('request to start video recording');
try
{
if (!this._recorder)
{
throw 'the recorder has not been created yet, possibly because the participant has not given the authorisation to record video';
}
this._recorder.start();
// return a promise, which will be satisfied when the recording actually starts, which
// is also when the reset of the clock and the change of status takes place
const self = this;
return new Promise((resolve, reject) =>
{
self._startCallback = resolve;
self._errorCallback = reject;
});
}
catch (error)
{
this._psychoJS.logger.error('unable to start the video recording: ' + JSON.stringify(error));
this._status = PsychoJS.Status.ERROR;
throw {
origin: 'Camera.start',
context: 'when starting the video recording for camera: ' + this._name,
error
};
}
}
}
/**
* Submit a request to stop the recording.
*
* @name module:visual.Camera#stop
* @public
* @param {Object} options
* @param {string} [options.filename] the name of the file to which the video recording
* will be saved
* @return {Promise} promise fulfilled when the recording actually stopped, and the recorded
* data was made available
*/
stop({filename} = {})
{
if (this._status === PsychoJS.Status.STARTED || this._status === PsychoJS.Status.PAUSED)
{
this._psychoJS.logger.debug('request to stop video recording');
// stop the videos:
for (const video of this._videos)
{
video.pause();
}
this._stopOptions = {
filename
};
// note: calling the stop method of the MediaRecorder will first raise
// a dataavailable event, and then a stop event
// ref: https://developer.mozilla.org/en-US/docs/Web/API/MediaRecorder/stop
this._recorder.stop();
// return a promise, which will be satisfied when the recording actually stops and the data
// has been made available:
const self = this;
return new Promise((resolve, reject) =>
{
self._stopCallback = resolve;
self._errorCallback = reject;
});
}
}
/**
* Submit a request to pause the recording.
*
* @name module:visual.Camera#pause
* @public
* @return {Promise} promise fulfilled when the recording actually paused
*/
pause()
{
if (this._status === PsychoJS.Status.STARTED)
{
this._psychoJS.logger.debug('request to pause video recording');
try
{
if (!this._recorder)
{
throw 'the recorder has not been created yet, possibly because the participant has not given the authorisation to record video';
}
// note: calling the pause method of the MediaRecorder raises a pause event
this._recorder.pause();
// return a promise, which will be satisfied when the recording actually pauses:
const self = this;
return new Promise((resolve, reject) =>
{
self._pauseCallback = resolve;
self._errorCallback = reject;
});
}
catch (error)
{
self._psychoJS.logger.error('unable to pause the video recording: ' + JSON.stringify(error));
this._status = PsychoJS.Status.ERROR;
throw {
origin: 'Camera.pause',
context: 'when pausing the video recording for camera: ' + this._name,
error
};
}
}
}
/**
* Submit a request to resume the recording.
*
* <p>resume has no effect if the recording was not previously paused.</p>
*
* @name module:visual.Camera#resume
* @param {Object} options
* @param {boolean} [options.clear= false] whether or not to empty the video buffer before
* resuming the recording
* @return {Promise} promise fulfilled when the recording actually resumed
*/
resume({clear = false } = {})
{
if (this._status === PsychoJS.Status.PAUSED)
{
this._psychoJS.logger.debug('request to resume video recording');
try
{
if (!this._recorder)
{
throw 'the recorder has not been created yet, possibly because the participant has not given the authorisation to record video';
}
// empty the audio buffer is needed:
if (clear)
{
this._audioBuffer = [];
this._videoBuffer.length = 0;
}
this._recorder.resume();
// return a promise, which will be satisfied when the recording actually resumes:
const self = this;
return new Promise((resolve, reject) =>
{
self._resumeCallback = resolve;
self._errorCallback = reject;
});
}
catch (error)
{
self._psychoJS.logger.error('unable to resume the video recording: ' + JSON.stringify(error));
this._status = PsychoJS.Status.ERROR;
throw {
origin: 'Camera.resume',
context: 'when resuming the video recording for camera: ' + this._name,
error
};
}
}
}
/**
* Submit a request to flush the recording.
*
* @name module:visual.Camera#flush
* @public
* @return {Promise} promise fulfilled when the data has actually been made available
*/
flush()
{
if (this._status === PsychoJS.Status.STARTED || this._status === PsychoJS.Status.PAUSED)
{
this._psychoJS.logger.debug('request to flush video recording');
// note: calling the requestData method of the MediaRecorder will raise a
// dataavailable event
// ref: https://developer.mozilla.org/en-US/docs/Web/API/MediaRecorder/requestData
this._recorder.requestData();
// return a promise, which will be satisfied when the data has been made available:
const self = this;
return new Promise((resolve, reject) =>
{
self._dataAvailableCallback = resolve;
self._errorCallback = reject;
});
}
}
/**
* Offer the audio recording to the participant as a video file to download.
*
* @name module:visual.Camera#download
* @function
* @public
* @param {string} filename the filename
*/
download(filename = 'video.webm')
{
const videoBlob = new Blob(this._videoBuffer);
const anchor = document.createElement('a');
anchor.href = window.URL.createObjectURL(videoBlob);
anchor.download = filename;
document.body.appendChild(anchor);
anchor.click();
document.body.removeChild(anchor);
}
/**
* Upload the video recording to the pavlovia server.
*
* @name module:visual.Camera#upload
* @function
* @public
* @param {string} tag an optional tag for the audio file
*/
async upload({tag} = {})
{
// default tag: the name of this Camera object
if (typeof tag === 'undefined')
{
tag = this._name;
}
// add a format-dependent video extension to the tag:
tag += util.extensionFromMimeType(this._format);
// if the video recording cannot be uploaded, e.g. the experiment is running locally, or
// if it is piloting mode, then we offer the video recording as a file for download:
if (this._psychoJS.getEnvironment() !== ExperimentHandler.Environment.SERVER ||
this._psychoJS.config.experiment.status !== 'RUNNING' ||
this._psychoJS._serverMsg.has('__pilotToken'))
{
return this.download(tag);
}
// upload the blob:
// TODO uploadAudio -> uploadAudioVideo
const videoBlob = new Blob(this._videoBuffer);
return this._psychoJS.serverManager.uploadAudio(videoBlob, tag);
}
/**
* Get the current video recording as a VideoClip in the given format.
*
* @name module:visual.Camera#getRecording
* @function
* @public
* @param {string} tag an optional tag for the video clip
* @param {boolean} [flush=false] whether or not to first flush the recording
*/
async getRecording({tag, flush = false} = {})
{
// default tag: the name of this Microphone object
if (typeof tag === 'undefined')
{
tag = this._name;
}
// TODO
/*
const videoClip = new VideoClip({
psychoJS: this._psychoJS,
name: tag,
format: this._format,
data: new Blob(this._videoBuffer)
});
return videoClip;
*/
}
/**
* Callback for changes to the recording settings.
*
* <p>Changes to the settings require the recording to stop and be re-started.</p>
*
* @name module:visual.Camera#_onChange
* @function
* @protected
*/
_onChange()
{
if (this._status === PsychoJS.Status.STARTED)
{
this.stop();
}
this._prepareRecording();
this.start();
}
/**
* Prepare the recording.
*
* @name module:visual.Camera#_prepareRecording
* @function
* @protected
*/
async _prepareRecording()
{
// empty the video buffer:
this._videoBuffer = [];
this._recorder = null;
this._videos = [];
// create a new stream with ideal dimensions:
this._stream = await navigator.mediaDevices.getUserMedia({
video: {
width: {
ideal: 1920
},
height: {
ideal: 1080
}
}
});
// check the actual width and height:
this._streamSettings = this._stream.getVideoTracks()[0].getSettings();
this._psychoJS.logger.debug(`camera stream settings: ${JSON.stringify(this._streamSettings)}`);
// check that the specified format is supported, use default if it is not:
let options;
if (typeof this._format === 'string' && MediaRecorder.isTypeSupported(this._format))
{
options = { type: this._format };
}
else
{
this._psychoJS.logger.warn(`The specified video format, ${this._format}, is not supported by this browser, using the default format instead`);
}
// create a video recorder:
this._recorder = new MediaRecorder(this._stream, options);
// setup the callbacks:
const self = this;
// called upon Camera.start(), at which point the audio data starts being gathered
// into a blob:
this._recorder.onstart = () =>
{
self._videoBuffer = [];
self._videoBuffer.length = 0;
self._clock.reset();
self._status = PsychoJS.Status.STARTED;
self._psychoJS.logger.debug('video recording started');
// resolve the Microphone.start promise:
if (self._startCallback)
{
self._startCallback(self._psychoJS.monotonicClock.getTime());
}
};
// called upon Camera.pause():
this._recorder.onpause = () =>
{
self._status = PsychoJS.Status.PAUSED;
self._psychoJS.logger.debug('video recording paused');
// resolve the Microphone.pause promise:
if (self._pauseCallback)
{
self._pauseCallback(self._psychoJS.monotonicClock.getTime());
}
};
// called upon Camera.resume():
this._recorder.onresume = () =>
{
self._status = PsychoJS.Status.STARTED;
self._psychoJS.logger.debug('video recording resumed');
// resolve the Microphone.resume promise:
if (self._resumeCallback)
{
self._resumeCallback(self._psychoJS.monotonicClock.getTime());
}
};
// called when video data is available, typically upon Camera.stop() or Camera.flush():
this._recorder.ondataavailable = (event) =>
{
const data = event.data;
// add data to the buffer:
self._videoBuffer.push(data);
self._psychoJS.logger.debug('video data added to the buffer');
// resolve the data available promise, if needed:
if (self._dataAvailableCallback)
{
self._dataAvailableCallback(self._psychoJS.monotonicClock.getTime());
}
};
// called upon Camera.stop(), after data has been made available:
this._recorder.onstop = () =>
{
self._psychoJS.logger.debug('video recording stopped');
self._status = PsychoJS.Status.NOT_STARTED;
// resolve the Microphone.stop promise:
if (self._stopCallback)
{
self._stopCallback(self._psychoJS.monotonicClock.getTime());
}
// treat stop options if there are any:
// download to a file, immediately offered to the participant:
if (typeof self._stopOptions.filename === 'string')
{
self.download(self._stopOptions.filename);
}
};
// called upon recording errors:
this._recorder.onerror = (event) =>
{
// TODO
self._psychoJS.logger.error('video recording error: ' + JSON.stringify(event));
self._status = PsychoJS.Status.ERROR;
};
}
}

306
src/visual/FaceDetector.js Normal file
View File

@ -0,0 +1,306 @@
/**
* Manager handling the detecting of faces in video streams.
*
* @author Alain Pitiot
* @version 2021.2.0
* @copyright (c) 2021 Open Science Tools Ltd. (https://opensciencetools.org)
* @license Distributed under the terms of the MIT License
*/
import {PsychoJS} from "../core/PsychoJS";
import * as util from '../util/Util';
import {Color} from '../util/Color';
import {Camera} from "./Camera";
import {VisualStim} from "./VisualStim";
import * as PIXI from "pixi.js-legacy";
/**
* <p>This manager handles the detecting of faces in video streams.</p>
* <p>The detection is performed using the Face-API library: https://github.com/justadudewhohacks/face-api.js</p>
*
* @name module:visual.FaceDetector
* @class
* @param {Object} options
* @param {String} options.name - the name used when logging messages from the detector
* @param @param {module:core.Window} options.win - the associated Window
* @param @param {string | HTMLVideoElement | module:visual.Camera} input - the name of a
* movie resource or of a HTMLVideoElement or of a Camera component
* @param {string} [options.faceApiUrl= 'face-api.js'] - the Url of the face-api library
* @param {string} [options.modelDir= 'models'] - the directory where to find the face detection models
* @param {string} [options.units= "norm"] - the units of the stimulus (e.g. for size, position, vertices)
* @param {Array.<number>} [options.pos= [0, 0]] - the position of the center of the stimulus
* @param {string} [options.units= 'norm'] - the units of the stimulus vertices, size and position
* @param {number} [options.ori= 0.0] - the orientation (in degrees)
* @param {number} [options.size] - the size of the rendered image (the size of the image will be used if size is not specified)
* @param {number} [options.opacity= 1.0] - the opacity
* @param {boolean} [options.autoDraw= false] - whether or not the stimulus should be automatically drawn on every frame flip
* @param {boolean} [options.autoLog= false] - whether or not to log
*/
export class FaceDetector extends VisualStim
{
constructor({name, win, input, modelDir, faceApiUrl, units, ori, opacity, pos, size, autoDraw, autoLog} = {})
{
super({name, win, units, ori, opacity, pos, size, autoDraw, autoLog});
// TODO deal with onChange (see MovieStim and Camera)
this._addAttribute('input', input, undefined);
this._addAttribute('faceApiUrl', faceApiUrl, 'face-api.js');
this._addAttribute('modelDir', modelDir, 'models');
this._addAttribute('autoLog', autoLog, false);
this._addAttribute('status', PsychoJS.Status.NOT_STARTED);
// init face-api:
this._initFaceApi();
if (this._autoLog)
{
this._psychoJS.experimentLogger.exp(`Created ${this.name} = ${this.toString()}`);
}
}
/**
* Setter for the video attribute.
*
* @name module:visual.FaceDetector#setCamera
* @public
* @param {string | HTMLVideoElement | module:visual.Camera} input - the name of a
* movie resource or a HTMLVideoElement or a Camera component
* @param {boolean} [log= false] - whether of not to log
*/
setInput(input, log = false)
{
const response = {
origin: 'FaceDetector.setInput',
context: 'when setting the video of FaceDetector: ' + this._name
};
try
{
// movie is undefined: that's fine but we raise a warning in case this is
// a symptom of an actual problem
if (typeof input === 'undefined')
{
this.psychoJS.logger.warn('setting the movie of MovieStim: ' + this._name + ' with argument: undefined.');
this.psychoJS.logger.debug('set the movie of MovieStim: ' + this._name + ' as: undefined');
}
else
{
// if movie is a string, then it should be the name of a resource, which we get:
if (typeof input === 'string')
{
// TODO create a movie with that resource, and use the movie as input
}
// if movie is an instance of camera, get a video element from it:
else if (input instanceof Camera)
{
const video = input.getVideo();
// TODO remove previous one if there is one
// document.body.appendChild(video);
input = video;
}
// check that video is now an HTMLVideoElement
if (!(input instanceof HTMLVideoElement))
{
throw input.toString() + ' is not a video';
}
this.psychoJS.logger.debug(`set the video of FaceDetector: ${this._name} as: src= ${input.src}, size= ${input.videoWidth}x${input.videoHeight}, duration= ${input.duration}s`);
// ensure we have only one onended listener per HTMLVideoElement, since we can have several
// MovieStim with the same underlying HTMLVideoElement
// https://stackoverflow.com/questions/11455515
if (!input.onended)
{
input.onended = () =>
{
this.status = PsychoJS.Status.FINISHED;
};
}
}
this._setAttribute('input', input, log);
this._needUpdate = true;
this._needPixiUpdate = true;
}
catch (error)
{
throw Object.assign(response, {error});
}
}
/**
* Start detecting faces.
*
* @name module:visual.FaceDetector#start
* @public
* @param {number} period - the detection period, in ms (e.g. 100 ms for 10Hz)
* @param detectionCallback - the callback triggered when detection results are available
* @param {boolean} [log= false] - whether of not to log
*/
start(period, detectionCallback, log = false)
{
this.status = PsychoJS.Status.STARTED;
if (typeof this._detectionId !== 'undefined')
{
clearInterval(this._detectionId);
this._detectionId = undefined;
}
this._detectionId = setInterval(
async () =>
{
this._detections = await faceapi.detectAllFaces(
this._input,
new faceapi.TinyFaceDetectorOptions()
)
.withFaceLandmarks()
.withFaceExpressions();
this._needUpdate = true;
this._needPixiUpdate = true;
detectionCallback(this._detections);
},
period);
}
/**
* Stop detecting faces.
*
* @name module:visual.FaceDetector#stop
* @public
* @param {boolean} [log= false] - whether of not to log
*/
stop(log = false)
{
this.status = PsychoJS.Status.NOT_STARTED;
if (typeof this._detectionId !== 'undefined')
{
clearInterval(this._detectionId);
this._detectionId = undefined;
}
}
/**
* Init the Face-API library.
*
* @name module:visual.FaceDetector#_initFaceApi
* @private
*/
async _initFaceApi()
{/*
// load the library:
await this._psychoJS.serverManager.prepareResources([
{
'name': 'face-api.js',
'path': this.faceApiUrl,
'download': true
}
]);*/
// load the models:
faceapi.nets.tinyFaceDetector.loadFromUri(this._modelDir);
faceapi.nets.faceLandmark68Net.loadFromUri(this._modelDir);
faceapi.nets.faceRecognitionNet.loadFromUri(this._modelDir);
faceapi.nets.faceExpressionNet.loadFromUri(this._modelDir);
}
/**
* Update the visual representation of the detected faces, if necessary.
*
* @name module:visual.FaceDetector#_updateIfNeeded
* @private
*/
_updateIfNeeded()
{
if (!this._needUpdate)
{
return;
}
this._needUpdate = false;
if (this._needPixiUpdate)
{
this._needPixiUpdate = false;
if (typeof this._pixi !== 'undefined')
{
this._pixi.destroy(true);
}
this._pixi = new PIXI.Container();
this._pixi.interactive = true;
this._body = new PIXI.Graphics();
this._body.interactive = true;
this._pixi.addChild(this._body);
const size_px = util.to_px(this.size, this.units, this.win);
if (typeof this._detections !== 'undefined')
{
for (const detection of this._detections)
{
const landmarks = detection.landmarks;
const imageWidth = detection.alignedRect.imageWidth;
const imageHeight = detection.alignedRect.imageHeight;
for (const position of landmarks.positions)
{
this._body.beginFill(new Color('red').int, this._opacity);
this._body.drawCircle(
position._x / imageWidth * size_px[0] - size_px[0] / 2,
position._y / imageHeight * size_px[1] - size_px[1] / 2,
2);
this._body.endFill();
}
}
}
}
this._pixi.scale.x = 1;
this._pixi.scale.y = -1;
this._pixi.rotation = this.ori * Math.PI / 180;
this._pixi.position = util.to_pixiPoint(this.pos, this.units, this.win);
this._pixi.alpha = this._opacity;
}
/**
* Estimate the bounding box.
*
* @name module:visual.FaceDetector#_estimateBoundingBox
* @function
* @override
* @protected
*/
_estimateBoundingBox()
{
// TODO
/*this._boundingBox = new PIXI.Rectangle(
this._pos[0] + this._getLengthUnits(limits_px[0]),
this._pos[1] + this._getLengthUnits(limits_px[1]),
this._getLengthUnits(limits_px[2] - limits_px[0]),
this._getLengthUnits(limits_px[3] - limits_px[1])
);*/
// TODO take the orientation into account
}
}

View File

@ -11,9 +11,9 @@
import * as PIXI from 'pixi.js-legacy';
import {VisualStim} from './VisualStim';
import {Color} from '../util/Color';
import {ColorMixin} from '../util/ColorMixin';
import * as util from '../util/Util';
import {PsychoJS} from "../core/PsychoJS";
import {Camera} from "./Camera";
/**
@ -25,7 +25,8 @@ import {PsychoJS} from "../core/PsychoJS";
* @param {Object} options
* @param {String} options.name - the name used when logging messages from this stimulus
* @param {module:core.Window} options.win - the associated Window
* @param {string | HTMLVideoElement} options.movie - the name of the movie resource or the HTMLVideoElement corresponding to the movie
* @param {string | HTMLVideoElement | module:visual.Camera} movie - the name of a
* movie resource or of a HTMLVideoElement or of a Camera component
* @param {string} [options.units= "norm"] - the units of the stimulus (e.g. for size, position, vertices)
* @param {Array.<number>} [options.pos= [0, 0]] - the position of the center of the stimulus
* @param {string} [options.units= 'norm'] - the units of the stimulus vertices, size and position
@ -138,8 +139,8 @@ export class MovieStim extends VisualStim
*
* @name module:visual.MovieStim#setMovie
* @public
* @param {string | HTMLVideoElement} movie - the name of the movie resource or a
* HTMLVideoElement
* @param {string | HTMLVideoElement | module:visual.Camera} movie - the name of a
* movie resource or of a HTMLVideoElement or of a Camera component
* @param {boolean} [log= false] - whether of not to log
*/
setMovie(movie, log = false)
@ -151,30 +152,42 @@ export class MovieStim extends VisualStim
try
{
// movie is undefined: that's fine but we raise a warning in case this is a symptom of an actual problem
// movie is undefined: that's fine but we raise a warning in case this is
// a symptom of an actual problem
if (typeof movie === 'undefined')
{
this.psychoJS.logger.warn('setting the movie of MovieStim: ' + this._name + ' with argument: undefined.');
this.psychoJS.logger.debug('set the movie of MovieStim: ' + this._name + ' as: undefined');
this.psychoJS.logger.warn(
`setting the movie of MovieStim: ${this._name} with argument: undefined.`);
this.psychoJS.logger.debug(`set the movie of MovieStim: ${this._name} as: undefined`);
}
else
{
// movie is a string: it should be the name of a resource, which we load
// if movie is a string, then it should be the name of a resource, which we get:
if (typeof movie === 'string')
{
movie = this.psychoJS.serverManager.getResource(movie);
}
// movie should now be an actual HTMLVideoElement: we raise an error if it is not
// if movie is an instance of camera, get a video element from it:
else if (movie instanceof Camera)
{
const video = movie.getVideo();
// TODO remove previous one if there is one
// document.body.appendChild(video);
movie = video;
}
// check that movie is now an HTMLVideoElement
if (!(movie instanceof HTMLVideoElement))
{
throw 'the argument: ' + movie.toString() + ' is not a video" }';
throw movie.toString() + ' is not a video';
}
this.psychoJS.logger.debug(`set the movie of MovieStim: ${this._name} as: src= ${movie.src}, size= ${movie.videoWidth}x${movie.videoHeight}, duration= ${movie.duration}s`);
// ensure we have only one onended listener per HTMLVideoElement (we can have several
// MovieStim with the same underlying HTMLVideoElement)
// ensure we have only one onended listener per HTMLVideoElement, since we can have several
// MovieStim with the same underlying HTMLVideoElement
// https://stackoverflow.com/questions/11455515
if (!movie.onended)
{
@ -186,7 +199,6 @@ export class MovieStim extends VisualStim
}
this._setAttribute('movie', movie, log);
this._needUpdate = true;
this._needPixiUpdate = true;

View File

@ -10,3 +10,6 @@ export * from './TextBox.js';
export * from './TextInput.js';
export * from './TextStim.js';
export * from './VisualStim.js';
export * from './Camera.js';
export * from './FaceDetector.js';