1
0
mirror of https://github.com/psychopy/psychojs.git synced 2025-05-10 02:30:53 +00:00

merge with 2024.2.0

This commit is contained in:
lightest 2024-04-03 21:28:36 +01:00
commit 328cb54d53
43 changed files with 5756 additions and 502 deletions

View File

@ -53,7 +53,7 @@ jobs:
- name: Setup node
uses: actions/setup-node@v2
with:
node-version: '14'
node-version: '15'
- name: Cache modules psychojs_testing
uses: actions/cache@v2
env:

View File

@ -45,7 +45,7 @@ jobs:
- name: Setup node
uses: actions/setup-node@v1
with:
node-version: '12'
node-version: '15'
# START: install psychojs_testing
- name: Checkout psychojs_testing

25
.github/workflows/main.yml vendored Normal file
View File

@ -0,0 +1,25 @@
name: Build Branch
on: workflow_dispatch
jobs:
build_all:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@master
with:
path: app
- uses: actions/setup-node@master
with:
node-version: 19
- name: Install Node dependencies
run: |
cd app
npm install
- name: Build
run: |
cd app
echo "testing GITHUB_REF with details availability: ${GITHUB_REF#refs/heads/}"
npm run build:js && npm run build:css
echo "executing ls out on the directory:"
ls out

3
.gitignore vendored
View File

@ -1,3 +1,6 @@
.vscode/
dist
out
node_modules
src/test_experiment.js
src/test_resources

2226
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -1,6 +1,6 @@
{
"name": "psychojs",
"version": "2022.3.1",
"version": "2024.2.0",
"private": true,
"description": "Helps run in-browser neuroscience, psychology, and psychophysics experiments",
"license": "MIT",
@ -15,6 +15,9 @@
},
"main": "./src/index.js",
"scripts": {
"dev": "vite",
"vitebuild": "vite build",
"preview": "vite preview",
"build": "npm run build:js && npm run build:css && npm run build:docs",
"build:css": "node ./scripts/build.css.cjs",
"build:docs": "jsdoc src -c jsdoc.json & cp jsdoc.css docs/styles/",
@ -31,15 +34,19 @@
"a11y-dialog": "^7.5.0",
"docdash": "^1.2.0",
"esbuild-plugin-glsl": "^1.0.5",
"gifuct-js": "^2.1.2",
"howler": "^2.2.1",
"log4javascript": "github:Ritzlgrmft/log4javascript",
"pako": "^1.0.10",
"pixi-filters": "^5.0.0",
"pixi.js-legacy": "^6.0.4",
"seedrandom": "^3.0.5",
"tone": "^14.7.77",
"xlsx": "^0.17.0"
"xlsx": "^0.18.5"
},
"devDependencies": {
"vite": "^5.1.6",
"vite-plugin-glsl": "^1.2.1",
"csslint": "^1.0.5",
"dprint": "^0.15.3",
"esbuild": "^0.12.1",

View File

@ -49,6 +49,12 @@ export class EventManager
// clock reset when mouse is moved:
moveClock: new Clock(),
};
// storing touches in both map and array for fast search and fast access if touchID is known
this._touchInfo = {
touchesArray: [],
touchesMap: {}
};
}
/**
@ -140,6 +146,19 @@ export class EventManager
return this._mouseInfo;
}
/**
* Returns all the data gathered about touches.
*
* @name module:core.EventManager#getTouchInfo
* @function
* @public
* @return {object} the touch info.
*/
getTouchInfo ()
{
return this._touchInfo;
}
/**
* Clear all events from the event buffer.
*
@ -200,7 +219,6 @@ export class EventManager
self._mouseInfo.buttons.pressed[event.button] = 1;
self._mouseInfo.buttons.times[event.button] = self._psychoJS._monotonicClock.getTime() - self._mouseInfo.buttons.clocks[event.button].getLastResetTime();
self._mouseInfo.pos = [event.offsetX, event.offsetY];
this._psychoJS.experimentLogger.data("Mouse: " + event.button + " button down, pos=(" + self._mouseInfo.pos[0] + "," + self._mouseInfo.pos[1] + ")");
@ -212,10 +230,21 @@ export class EventManager
self._mouseInfo.buttons.pressed[0] = 1;
self._mouseInfo.buttons.times[0] = self._psychoJS._monotonicClock.getTime() - self._mouseInfo.buttons.clocks[0].getLastResetTime();
self._mouseInfo.pos = [event.changedTouches[0].pageX, event.changedTouches[0].pageY];
// we use the first touch, discarding all others:
const touches = event.changedTouches;
self._mouseInfo.pos = [touches[0].pageX, touches[0].pageY];
this._touchInfo.touchesArray = new Array(event.touches.length);
this._touchInfo.touchesMap = {};
let i;
for (i = 0; i < event.touches.length; i++)
{
this._touchInfo.touchesArray[i] = {
id: event.touches[i].identifier,
force: event.touches[i].force,
pos: [event.touches[i].pageX, event.touches[i].pageY],
busy: false
};
this._touchInfo.touchesMap[event.touches[i].identifier] = this._touchInfo.touchesArray[i];
}
this._psychoJS.experimentLogger.data("Mouse: " + event.button + " button down, pos=(" + self._mouseInfo.pos[0] + "," + self._mouseInfo.pos[1] + ")");
}, false);
@ -249,10 +278,20 @@ export class EventManager
self._mouseInfo.buttons.pressed[0] = 0;
self._mouseInfo.buttons.times[0] = self._psychoJS._monotonicClock.getTime() - self._mouseInfo.buttons.clocks[0].getLastResetTime();
self._mouseInfo.pos = [event.changedTouches[0].pageX, event.changedTouches[0].pageY];
// we use the first touch, discarding all others:
const touches = event.changedTouches;
self._mouseInfo.pos = [touches[0].pageX, touches[0].pageY];
this._touchInfo.touchesArray = new Array(event.touches.length);
this._touchInfo.touchesMap = {};
let i;
for (i = 0; i < event.touches.length; i++)
{
this._touchInfo.touchesArray[i] = {
id: event.touches[i].identifier,
force: event.touches[i].force,
pos: [event.touches[i].pageX, event.touches[i].pageY]
};
this._touchInfo.touchesMap[event.touches[i].identifier] = this._touchInfo.touchesArray[i];
}
this._psychoJS.experimentLogger.data("Mouse: " + event.button + " button up, pos=(" + self._mouseInfo.pos[0] + "," + self._mouseInfo.pos[1] + ")");
}, false);
@ -270,10 +309,20 @@ export class EventManager
event.preventDefault();
self._mouseInfo.moveClock.reset();
self._mouseInfo.pos = [event.changedTouches[0].pageX, event.changedTouches[0].pageY];
// we use the first touch, discarding all others:
const touches = event.changedTouches;
self._mouseInfo.pos = [touches[0].pageX, touches[0].pageY];
this._touchInfo.touchesArray = new Array(event.touches.length);
this._touchInfo.touchesMap = {};
let i;
for (i = 0; i < event.touches.length; i++)
{
this._touchInfo.touchesArray[i] = {
id: event.touches[i].identifier,
force: event.touches[i].force,
pos: [event.touches[i].pageX, event.touches[i].pageY]
};
this._touchInfo.touchesMap[event.touches[i].identifier] = this._touchInfo.touchesArray[i];
}
}, false);
// (*) wheel
@ -302,7 +351,13 @@ export class EventManager
{
const timestamp = MonotonicClock.getReferenceTime();
let code = event.code;
// Note: we are using event.key since we are interested in the input character rather than
// the physical key position on the keyboard, i.e. we need to take into account the keyboard
// layout
// See https://developer.mozilla.org/en-US/docs/Web/API/KeyboardEvent/code for a comment regarding
// event.code's lack of suitability
let code = EventManager._pygletMap[event.key];
// let code = event.code;
// take care of legacy Microsoft browsers (IE11 and pre-Chromium Edge):
if (typeof code === "undefined")

View File

@ -50,6 +50,9 @@ export class GUI
{
this._psychoJS = psychoJS;
// info fields excluded from the GUI:
this._excludedInfo = {};
// gui listens to RESOURCE events from the server manager:
psychoJS.serverManager.on(ServerManager.Event.RESOURCE, (signal) =>
{
@ -87,9 +90,6 @@ export class GUI
requireParticipantClick = GUI.DEFAULT_SETTINGS.DlgFromDict.requireParticipantClick
})
{
// get info from URL:
const infoFromUrl = util.getUrlParameters();
this._progressBarMax = 0;
this._allResourcesDownloaded = false;
this._requiredKeys = [];
@ -113,6 +113,19 @@ export class GUI
self._dialogComponent.tStart = t;
self._dialogComponent.status = PsychoJS.Status.STARTED;
// prepare the info fields excluded from the GUI, including those from the URL:
const excludedInfo = {};
for (let key in self._excludedInfo)
{
excludedInfo[key.trim().toLowerCase()] = self._excludedInfo[key];
}
const infoFromUrl = util.getUrlParameters();
infoFromUrl.forEach((value, key) =>
{
excludedInfo[key.trim().toLowerCase()] = value;
});
// if the experiment is licensed, and running on the license rather than on credit,
// we use the license logo:
if (self._psychoJS.getEnvironment() === ExperimentHandler.Environment.SERVER
@ -130,7 +143,13 @@ export class GUI
markup += "<div class='dialog-content'>";
// alert title and close button:
markup += `<div id='experiment-dialog-title' class='dialog-title'><p>${title}</p><button id='dialogClose' class='dialog-close' data-a11y-dialog-hide aria-label='Cancel Experiment'>&times;</button></div>`;
markup += "<div id='experiment-dialog-title' class='dialog-title'>";
markup += `<p>${title}</p>`;
markup += "<button id='dialogClose' class='dialog-close' data-a11y-dialog-hide aria-label='Cancel Experiment'>&times;</button>";
markup += "</div>";
// everything above the buttons is in a scrollable container:
markup += "<div class='scrollable-container'>";
// logo, if need be:
if (typeof logoUrl === "string")
@ -139,14 +158,16 @@ export class GUI
}
// add a combobox or text areas for each entry in the dictionary:
let atLeastOneIncludedKey = false;
Object.keys(dictionary).forEach((key, keyIdx) =>
{
const value = dictionary[key];
const keyId = "form-input-" + keyIdx;
// only create an input if the key is not in the URL:
let inUrl = false;
const cleanedDictKey = key.trim().toLowerCase();
const isIncluded = !(cleanedDictKey in excludedInfo);
/*let inUrl = false;
infoFromUrl.forEach((urlValue, urlKey) =>
{
const cleanedUrlKey = urlKey.trim().toLowerCase();
@ -155,10 +176,13 @@ export class GUI
inUrl = true;
// break;
}
});
});*/
if (!inUrl)
if (isIncluded)
// if (!inUrl)
{
atLeastOneIncludedKey = true;
markup += `<label for='${keyId}'> ${key} </label>`;
// if the field is required:
@ -185,7 +209,7 @@ export class GUI
markup += "</select>";
}
// otherwise we use a single string input:
// otherwise we use a single string input:
//if (typeof value === 'string')
else
{
@ -199,17 +223,27 @@ export class GUI
markup += "<p class='validateTips'>Fields marked with an asterisk (*) are required.</p>";
}
markup += "</div>"; // scrollable-container
// separator, if need be:
if (atLeastOneIncludedKey)
{
markup += "<hr>";
}
// progress bar:
markup += `<hr><div id='progressMsg' class='progress-msg'>${self._progressMessage}</div>`;
markup += `<div id='progressMsg' class='progress-msg'>${self._progressMessage}</div>`;
markup += "<div class='progress-container'><div id='progressBar' class='progress-bar'></div></div>";
// buttons:
markup += "<hr>";
markup += "<div class='dialog-button-group'>";
markup += "<button id='dialogCancel' class='dialog-button' aria-label='Cancel Experiment'>Cancel</button>";
if (self._requireParticipantClick)
{
markup += "<button id='dialogOK' class='dialog-button disabled' aria-label='Start Experiment'>Ok</button>";
}
markup += "</div>"; // button-group
markup += "</div></div>";
@ -346,14 +380,18 @@ export class GUI
{
const error = this._userFriendlyError(errorCode);
markup += `<div id='experiment-dialog-title' class='dialog-title ${error.class}'><p>${error.title}</p></div>`;
markup += "<div class='scrollable-container'>";
markup += `<p>${error.text}</p>`;
markup += "</div>";
}
else
{
markup += `<div id='experiment-dialog-title' class='dialog-title dialog-error'><p>Error</p></div>`;
markup += "<div class='scrollable-container'>";
markup += `<p>Unfortunately we encountered the following error:</p>`;
markup += stackCode;
markup += "<p>Try to run the experiment again. If the error persists, contact the experiment designer.</p>";
markup += "</div>";
}
}
@ -361,27 +399,36 @@ export class GUI
else if (typeof warning !== "undefined")
{
markup += `<div id='experiment-dialog-title' class='dialog-title dialog-warning'><p>Warning</p></div>`;
markup += "<div class='scrollable-container'>";
markup += `<p>${warning}</p>`;
markup += "</div>";
}
// we are displaying a message:
else if (typeof message !== "undefined")
{
markup += `<div id='experiment-dialog-title' class='dialog-title'><p>Message</p></div>`;
markup += "<div id='experiment-dialog-title' class='dialog-title'><p>Message</p></div>";
markup += "<div class='scrollable-container'>";
markup += `<p>${message}</p>`;
markup += "</div>";
}
if (showOK || showCancel)
{
markup += "<hr>";
}
if (showCancel)
if (showCancel || showOK)
{
markup += "<button id='dialogCancel' class='dialog-button' aria-label='Close dialog'>Cancel</button>";
}
if (showOK)
{
markup += "<button id='dialogOK' class='dialog-button' aria-label='Close dialog'>Ok</button>";
markup += "<div class='button-group'>";
if (showCancel)
{
markup += "<button id='dialogCancel' class='dialog-button' aria-label='Close dialog'>Cancel</button>";
}
if (showOK)
{
markup += "<button id='dialogOK' class='dialog-button' aria-label='Close dialog'>Ok</button>";
}
markup += "</div>"; // button-group
}
markup += "</div></div>";

View File

@ -354,7 +354,13 @@ export class Keyboard extends PsychObject
*/
self._previousKeydownKey = event.key;
let code = event.code;
// Note: we are using event.key since we are interested in the input character rather than
// the physical key position on the keyboard, i.e. we need to take into account the keyboard
// layout
// See https://developer.mozilla.org/en-US/docs/Web/API/KeyboardEvent/code for a comment regarding
// event.code's lack of suitability
let code = EventManager._pygletMap[event.key];
// let code = event.code;
// take care of legacy Microsoft browsers (IE11 and pre-Chromium Edge):
if (typeof code === "undefined")
@ -394,7 +400,9 @@ export class Keyboard extends PsychObject
self._previousKeydownKey = undefined;
let code = event.code;
// Note: see above for explanation regarding the use of event.key in lieu of event.code
let code = EventManager._pygletMap[event.key];
// let code = event.code;
// take care of legacy Microsoft Edge:
if (typeof code === "undefined")

View File

@ -530,6 +530,7 @@ export class PsychoJS
const response = { origin: "PsychoJS.quit", context: "when terminating the experiment" };
this._experiment.experimentEnded = true;
this._experiment.isCompleted = isCompleted;
this.status = PsychoJS.Status.STOPPED;
const isServerEnv = (this.getEnvironment() === ExperimentHandler.Environment.SERVER);
@ -601,7 +602,7 @@ export class PsychoJS
if (showOK)
{
let text = "Thank you for your patience.<br/><br/>";
let text = "Thank you for your patience.";
text += (typeof message !== "undefined") ? message : "Goodbye!";
this._gui.dialog({
message: text,
@ -789,7 +790,7 @@ export class PsychoJS
const self = this;
window.onerror = function(message, source, lineno, colno, error)
{console.log('@@@', message)
{
// check for ResizeObserver loop limit exceeded error:
// ref: https://stackoverflow.com/questions/49384120/resizeobserver-loop-limit-exceeded
if (message === "ResizeObserver loop limit exceeded" ||

View File

@ -315,6 +315,74 @@ export class ServerManager extends PsychObject
return pathStatusData.data;
}
/**
* Get full data of a resource.
*
* @name module:core.ServerManager#getFullResourceData
* @function
* @public
* @param {string} name - name of the requested resource
* @param {boolean} [errorIfNotDownloaded = false] whether or not to throw an exception if the
* resource status is not DOWNLOADED
* @return {Object} full available data for resource, or undefined if the resource has been registered
* but not downloaded yet.
* @throws {Object.<string, *>} exception if no resource with that name has previously been registered
*/
getFullResourceData (name, errorIfNotDownloaded = false)
{
const response = {
origin: "ServerManager.getResource",
context: "when getting the value of resource: " + name,
};
const pathStatusData = this._resources.get(name);
if (typeof pathStatusData === "undefined")
{
// throw { ...response, error: 'unknown resource' };
throw Object.assign(response, { error: "unknown resource" });
}
if (errorIfNotDownloaded && pathStatusData.status !== ServerManager.ResourceStatus.DOWNLOADED)
{
throw Object.assign(response, {
error: name + " is not available for use (yet), its current status is: "
+ util.toString(pathStatusData.status),
});
}
return pathStatusData;
}
/**
* Release a resource.
*
* @param {string} name - the name of the resource to release
* @return {boolean} true if a resource with the given name was previously registered with the manager,
* false otherwise.
*/
releaseResource(name)
{
const response = {
origin: "ServerManager.releaseResource",
context: "when releasing resource: " + name,
};
const pathStatusData = this._resources.get(name);
if (typeof pathStatusData === "undefined")
{
return false;
}
// TODO check the current status: prevent the release of a resources currently downloading
this._psychoJS.logger.debug(`releasing resource: ${name}`);
this._resources.delete(name);
return true;
}
/**
* Get the status of a single resource or the reduced status of an array of resources.
*
@ -507,18 +575,18 @@ export class ServerManager extends PsychObject
// pre-process the resources:
for (let r = 0; r < resources.length; ++r)
{
const resource = resources[r];
// convert those resources that are only a string to an object with name and path:
if (typeof resource === "string")
if (typeof resources[r] === "string")
{
resources[r] = {
name: resource,
path: resource,
name: resources[r],
path: resources[r],
download: true
};
}
const resource = resources[r];
// deal with survey models:
if ("surveyId" in resource)
{
@ -635,6 +703,19 @@ export class ServerManager extends PsychObject
}
}
cacheResourceData (name, dataToCache)
{
const pathStatusData = this._resources.get(name);
if (typeof pathStatusData === "undefined")
{
// throw { ...response, error: 'unknown resource' };
throw Object.assign(response, { error: "unknown resource" });
}
pathStatusData.cachedData = dataToCache;
}
/**
* Block the experiment until the specified resources have been downloaded.
*
@ -1256,7 +1337,7 @@ export class ServerManager extends PsychObject
}
// preload.js with forced binary:
if (["csv", "odp", "xls", "xlsx", "json"].indexOf(extension) > -1)
if (["csv", "odp", "xls", "xlsx", "json", "gif"].indexOf(extension) > -1)
{
preloadManifest.push(/*new createjs.LoadItem().set(*/ {
id: name,
@ -1284,7 +1365,7 @@ export class ServerManager extends PsychObject
}
// font files:
else if (["ttf", "otf", "woff", "woff2"].indexOf(extension) > -1)
else if (["ttf", "otf", "woff", "woff2", "eot"].indexOf(extension) > -1)
{
fontResources.push(name);
}
@ -1332,7 +1413,7 @@ export class ServerManager extends PsychObject
preloadManifest.push(/*new createjs.LoadItem().set(*/ {
id: name,
src: pathStatusData.path,
crossOrigin: "Anonymous",
crossOrigin: "Anonymous"
} /*)*/);
}
}

View File

@ -13,6 +13,7 @@ import { MonotonicClock } from "../util/Clock.js";
import { Color } from "../util/Color.js";
import { PsychObject } from "../util/PsychObject.js";
import { Logger } from "./Logger.js";
import { hasTouchScreen } from "../util/Util.js";
/**
* <p>Window displays the various stimuli of the experiment.</p>
@ -58,6 +59,8 @@ export class Window extends PsychObject
* @param {string} [options.name] the name of the window
* @param {boolean} [options.fullscr= false] whether or not to go fullscreen
* @param {Color} [options.color= Color('black')] the background color of the window
* @param {string | HTMLImageElement} [options.backgroundImage = ""] - background image of the window.
* @param {string} [options.backgroundFit = "cover"] - how to fit background image in the window.
* @param {number} [options.gamma= 1] sets the divisor for gamma correction. In other words gamma correction is calculated as pow(rgb, 1/gamma)
* @param {number} [options.contrast= 1] sets the contrast value
* @param {string} [options.units= 'pix'] the units of the window
@ -70,6 +73,8 @@ export class Window extends PsychObject
name,
fullscr = false,
color = new Color("black"),
backgroundImage = "",
backgroundFit = "cover",
gamma = 1,
contrast = 1,
units = "pix",
@ -92,11 +97,7 @@ export class Window extends PsychObject
this._drawList = [];
this._addAttribute("fullscr", fullscr);
this._addAttribute("color", color, new Color("black"), () => {
if (this._backgroundSprite) {
this._backgroundSprite.tint = this._color.int;
}
});
this._addAttribute("color", color, new Color("black"));
this._addAttribute("gamma", gamma, 1, () => {
this._adjustmentFilter.gamma = this._gamma;
});
@ -107,6 +108,8 @@ export class Window extends PsychObject
this._addAttribute("waitBlanking", waitBlanking);
this._addAttribute("autoLog", autoLog);
this._addAttribute("size", []);
this._addAttribute("backgroundImage", backgroundImage, "");
this._addAttribute("backgroundFit", backgroundFit, "cover");
// setup PIXI:
this._setupPixi();
@ -138,6 +141,12 @@ export class Window extends PsychObject
}
}
static BACKGROUND_FIT_ENUM = {
cover: 0,
contain: 1,
auto: 2
};
/**
* Close the window.
*
@ -151,7 +160,7 @@ export class Window extends PsychObject
}
this._rootContainer.destroy();
if (document.body.contains(this._renderer.view))
{
document.body.removeChild(this._renderer.view);
@ -165,7 +174,6 @@ export class Window extends PsychObject
}
this._renderer.destroy();
window.removeEventListener("resize", this._resizeCallback);
window.removeEventListener("orientationchange", this._resizeCallback);
@ -181,7 +189,7 @@ export class Window extends PsychObject
{
// gets updated frame by frame
const lastDelta = this.psychoJS.scheduler._lastDelta;
const fps = lastDelta === 0 ? 60.0 : 1000 / lastDelta;
const fps = (lastDelta === 0) ? 60.0 : (1000.0 / lastDelta);
return fps;
}
@ -315,7 +323,7 @@ export class Window extends PsychObject
*/
removePixiObject(pixiObject)
{
this._stimsContainer.removeChild(pixiObject);
this._stimsContainer.removeChild(pixiObject);
}
/**
@ -360,6 +368,134 @@ export class Window extends PsychObject
this._refresh();
}
/**
* Set background image of the window.
*
* @name module:core.Window#setBackgroundImage
* @function
* @public
* @param {string} backgroundImage - name of the image resource (should be one specified in resource manager)
* @param {boolean} log - whether or not to log
*/
setBackgroundImage (backgroundImage = "", log = false)
{
this._setAttribute("backgroundImage", backgroundImage, log);
if (this._backgroundSprite === undefined)
{
return;
}
let imgResource = backgroundImage;
if (this._backgroundSprite instanceof PIXI.Sprite && this._backgroundSprite.texture !== PIXI.Texture.WHITE)
{
this._backgroundSprite.texture.destroy(true);
}
if (typeof backgroundImage === "string" && backgroundImage.length > 0)
{
imgResource = this.psychoJS.serverManager.getResource(backgroundImage);
}
if (imgResource instanceof HTMLImageElement)
{
const texOpts =
{
scaleMode: PIXI.SCALE_MODES.LINEAR
};
this._backgroundSprite.texture = new PIXI.Texture(new PIXI.BaseTexture(imgResource, texOpts));
this._backgroundSprite.tint = 0xffffff;
this.backgroundFit = this._backgroundFit;
}
else
{
this._backgroundSprite.texture = PIXI.Texture.WHITE;
this._backgroundSprite.width = this._size[0];
this._backgroundSprite.height = this._size[1];
this._backgroundSprite.anchor.set(.5);
this.color = this._color;
}
}
/**
* Set fit mode for background image of the window.
*
* @name module:core.Window#setBackgroundFit
* @function
* @public
* @param {string} [backgroundFit = "cover"] - fit mode for background image ["cover", "contain", "scaledown", "none"].
* @param {boolean} log - whether or not to log
*/
setBackgroundFit (backgroundFit = "cover", log = false)
{
if (this._backgroundImage === "")
{
return;
}
const backgroundFitCode = Window.BACKGROUND_FIT_ENUM[backgroundFit.replace("-", "").toLowerCase()];
if (backgroundFitCode === undefined)
{
return;
}
this._setAttribute("backgroundFit", backgroundFit, log);
const backgroundAspectRatio = this._backgroundSprite.texture.width / this._backgroundSprite.texture.height;
const windowAspectRatio = this._size[0] / this._size[1];
if (backgroundFitCode === Window.BACKGROUND_FIT_ENUM.cover)
{
if (windowAspectRatio >= backgroundAspectRatio)
{
this._backgroundSprite.width = this._size[0];
this._backgroundSprite.height = this._size[0] / backgroundAspectRatio;
}
else
{
this._backgroundSprite.height = this._size[1];
this._backgroundSprite.width = this._size[1] * backgroundAspectRatio;
}
}
else if (backgroundFitCode === Window.BACKGROUND_FIT_ENUM.contain)
{
if (windowAspectRatio >= backgroundAspectRatio)
{
this._backgroundSprite.height = this._size[1];
this._backgroundSprite.width = this._size[1] * backgroundAspectRatio;
}
else
{
this._backgroundSprite.width = this._size[0];
this._backgroundSprite.height = this._size[0] / backgroundAspectRatio;
}
}
else if (backgroundFitCode === Window.BACKGROUND_FIT_ENUM.auto)
{
this._backgroundSprite.width = this._backgroundSprite.texture.width;
this._backgroundSprite.height = this._backgroundSprite.texture.height;
}
}
/**
* Set foreground color value for the window.
*
* @name module:visual.Window#setColor
* @public
* @param {Color} colorVal - color value, can be String like "red" or "#ff0000" or Number like 0xff0000.
* @param {boolean} [log= false] - whether of not to log
*/
setColor(colorVal = "white", log = false)
{
const colorObj = (colorVal instanceof Color) ? colorVal : new Color(colorVal, Color.COLOR_SPACE.RGB);
this._setAttribute("color", colorObj, log);
if (this._backgroundSprite && !this._backgroundImage)
{
this._backgroundSprite.tint = this._color.int;
}
}
/**
* Update this window, if need be.
*
@ -372,7 +508,7 @@ export class Window extends PsychObject
if (this._renderer)
{
this._renderer.backgroundColor = this._color.int;
this._backgroundSprite.tint = this._color.int;
this.color = this._color;
}
// we also change the background color of the body since
@ -466,6 +602,7 @@ export class Window extends PsychObject
// background sprite so that if we need to move all stims at once, the background sprite
// won't get affected.
this._backgroundSprite = new PIXI.Sprite(PIXI.Texture.WHITE);
this._backgroundSprite.scale.y = -1;
this._backgroundSprite.tint = this.color.int;
this._backgroundSprite.width = this._size[0];
this._backgroundSprite.height = this._size[1];
@ -476,11 +613,11 @@ export class Window extends PsychObject
// create a top-level PIXI container:
this._rootContainer = new PIXI.Container();
this._rootContainer.addChild(this._backgroundSprite, this._stimsContainer);
// sorts children according to their zIndex value. Higher zIndex means it will be moved towards the end of the array,
// and thus rendered on top of previous one.
this._rootContainer.sortableChildren = true;
this._rootContainer.interactive = true;
this._rootContainer.filters = [this._adjustmentFilter];
@ -489,17 +626,7 @@ export class Window extends PsychObject
// touch/mouse events are treated by PsychoJS' event manager:
this.psychoJS.eventManager.addMouseListeners(this._renderer);
// update the renderer size and the Window's stimuli whenever the browser's size or orientation change:
this._resizeCallback = (e) =>
{
Window._resizePixiRenderer(this, e);
this._backgroundSprite.width = this._size[0];
this._backgroundSprite.height = this._size[1];
this._fullRefresh();
};
window.addEventListener("resize", this._resizeCallback);
window.addEventListener("orientationchange", this._resizeCallback);
this._addEventListeners();
}
/**
@ -532,6 +659,87 @@ export class Window extends PsychObject
pjsWindow._rootContainer.scale.y = -1;
}
_handlePointerDown (e)
{
let i;
let pickedPixi;
let tmpPoint = new PIXI.Point();
const cursorPos = new PIXI.Point(e.pageX, e.pageY);
for (i = this._stimsContainer.children.length - 1; i >= 0; i--)
{
if (typeof this._stimsContainer.children[i].containsPoint === "function" &&
this._stimsContainer.children[i].containsPoint(cursorPos))
{
pickedPixi = this._stimsContainer.children[i];
break;
}
else if (this._stimsContainer.children[i].containsPoint === undefined &&
this._stimsContainer.children[i] instanceof PIXI.DisplayObject)
{
this._stimsContainer.children[i].worldTransform.applyInverse(cursorPos, tmpPoint);
if (this._stimsContainer.children[i].getLocalBounds().contains(tmpPoint.x, tmpPoint.y))
{
pickedPixi = this._stimsContainer.children[i];
break;
}
}
}
this.emit("pointerdown", {
pixi: pickedPixi,
originalEvent: e
});
}
_handlePointerUp (e)
{
this.emit("pointerup", {
originalEvent: e
});
}
_handlePointerMove (e)
{
this.emit("pointermove", {
originalEvent: e
});
}
_addEventListeners ()
{
this._renderer.view.addEventListener("pointerdown", this._handlePointerDown.bind(this));
this._renderer.view.addEventListener("pointerup", this._handlePointerUp.bind(this));
this._renderer.view.addEventListener("pointermove", this._handlePointerMove.bind(this));
// update the renderer size and the Window's stimuli whenever the browser's size or orientation change:
this._resizeCallback = (e) =>
{
// if the user device is a mobile phone or tablet (we use the presence of a touch screen as a
// proxy), we need to detect whether the change in size is due to the appearance of a virtual keyboard
// in which case we do not want to resize the canvas. This is rather tricky and so we resort to
// the below trick. It would be better to use the VirtualKeyboard API, but it is not widely
// available just yet, as of 2023-06.
const keyboardHeight = 300;
if (hasTouchScreen() && (window.screen.height - window.visualViewport.height) > keyboardHeight)
{
return;
}
Window._resizePixiRenderer(this, e);
if (this._backgroundImage === undefined)
{
this._backgroundSprite.width = this._size[0];
this._backgroundSprite.height = this._size[1];
}
else
{
this.backgroundFit = this._backgroundFit;
}
this._fullRefresh();
};
window.addEventListener("resize", this._resizeCallback);
window.addEventListener("orientationchange", this._resizeCallback);
}
/**
* Send all logged messages to the {@link Logger}.
*

View File

@ -276,6 +276,7 @@ export class ExperimentHandler extends PsychObject
}
let data = this._trialsData;
// if the experiment data have to be cleared, we first make a copy of them:
if (clear)
{
@ -351,6 +352,19 @@ export class ExperimentHandler extends PsychObject
}
}
/**
* Get the results of the experiment as a .csv string, ready to be uploaded or stored.
*
* @return {string} a .csv representation of the experiment results.
*/
getResultAsCsv()
{
// note: we use the XLSX library as it automatically deals with header, takes care of quotes,
// newlines, etc.
const worksheet = XLSX.utils.json_to_sheet(this._trialsData);
return "\ufeff" + XLSX.utils.sheet_to_csv(worksheet);
}
/**
* Get the attribute names and values for the current trial of a given loop.
* <p> Only info relating to the trial execution are returned.</p>

View File

@ -13,7 +13,7 @@ body {
/* Initialisation message (which will disappear behind the canvas) */
#root::after {
content: "initialising the experiment...";
content: "initialising...";
position: fixed;
top: 50%;
left: 50%;
@ -26,13 +26,12 @@ body {
/* Project and resource dialogs */
.dialog-container label,
.dialog-container input,
.dialog-container select {
box-sizing: border-box;
display: block;
padding-bottom: 0.5em;
box-sizing: border-box;
display: block;
padding-bottom: 0.5em;
}
.dialog-container input.text,
@ -40,6 +39,13 @@ body {
margin-bottom: 1em;
padding: 0.5em;
width: 100%;
height: 34px;
border: 1px solid #767676;
border-radius: 2px;
background: #ffffff;
color: #333;
font-size: 14px;
}
.dialog-container fieldset {
@ -71,12 +77,19 @@ body {
}
.dialog-content {
display: flex;
flex-direction: column;
row-gap: 0;
margin: auto;
z-index: 2;
position: relative;
width: 500px;
max-width: 88vw;
/*max-height: 90vh;*/
max-height: 93%;
padding: 0.5em;
border-radius: 2px;
@ -88,11 +101,24 @@ body {
box-shadow: 1px 1px 3px #555555;
}
.dialog-content .scrollable-container {
height: 100%;
padding: 0 0.5em;
overflow-x: hidden;
overflow-y: auto;
}
.dialog-content hr {
width: 100%;
}
.dialog-title {
padding: 0.5em;
margin-bottom: 1em;
background-color: #009900;
background-color: #00dd00;
/*background-color: #009900;*/
border-radius: 2px;
}
@ -111,6 +137,11 @@ body {
}
.dialog-close {
display: flex;
justify-content: center;
align-items: center;
line-height: 1.1em;
position: absolute;
top: 0.7em;
right: 0.7em;
@ -153,7 +184,7 @@ body {
.dialog-button {
padding: 0.5em 1em 0.5em 1em;
margin: 0.5em 0.5em 0.5em 0;
/*margin: 0.5em 0.5em 0.5em 0;*/
border: 1px solid #555555;
border-radius: 2px;
@ -176,6 +207,14 @@ body {
border: 1px solid #000000;
}
.dialog-button-group {
display: flex;
flex-direction: row;
justify-content: flex-start;
align-items: flex-start;
column-gap: 0.5em;
}
.disabled {
border: 1px solid #AAAAAA;
color: #AAAAAA;
@ -186,10 +225,15 @@ body {
}
.logo {
display: block;
display: flex;
flex: 0 1 auto;
height: 100%;
width: auto;
/*display: block;
margin: 0 auto 1em;
max-height: 20vh;
max-width: 100%;
max-width: 100%;*/
}
a,
@ -204,3 +248,30 @@ a:hover {
color: #000;
}
.yt-iframe {
display: block;
position: absolute;
border: none;
}
.yt-player-wrapper {
display: flex;
justify-content: center;
align-items: center;
}
.yt-player-wrapper.hidden {
display: none;
}
.yt-player-wrapper.inprogress:after {
content: "loading youtube...";
display: flex;
position: absolute;
color: white;
background: black;
padding: 10px;
justify-content: center;
align-items: center;
}

14
src/index.html Normal file
View File

@ -0,0 +1,14 @@
<!doctype html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>Test Experiment</title>
<link rel="stylesheet" href="./index.css">
<script src="https://cdn.jsdelivr.net/npm/preloadjs@1.0.1/lib/preloadjs.min.js"></script>
<script type="module" src="test_experiment.js"></script>
</head>
<body>
<div id="root"></div>
</body>
</html>

BIN
src/test_resources/cool.gif Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 868 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.0 MiB

View File

@ -90,6 +90,7 @@ export class MonotonicClock
{
// yyyy-mm-dd, hh:mm:ss.sss
return MonotonicClock.getDate()
.replaceAll("/","-")
// yyyy-mm-dd_hh:mm:ss.sss
.replace(", ", "_")
// yyyy-mm-dd_hh[h]mm:ss.sss

278
src/util/GifParser.js Normal file
View File

@ -0,0 +1,278 @@
/**
* Tool for parsing gif files and decoding it's data to frames.
*
* @author "Matt Way" (https://github.com/matt-way), Nikita Agafonov (https://github.com/lightest)
* @copyright (c) 2015 Matt Way, (c) 2020-2022 Open Science Tools Ltd. (https://opensciencetools.org)
* @license Distributed under the terms of the MIT License
*
* @note Based on https://github.com/matt-way/gifuct-js
*
*/
import GIF from 'js-binary-schema-parser/lib/schemas/gif'
import { parse } from 'js-binary-schema-parser'
import { buildStream } from 'js-binary-schema-parser/lib/parsers/uint8'
/**
* Deinterlace function from https://github.com/shachaf/jsgif
*/
export const deinterlace = (pixels, width) => {
const newPixels = new Array(pixels.length)
const rows = pixels.length / width
const cpRow = function(toRow, fromRow) {
const fromPixels = pixels.slice(fromRow * width, (fromRow + 1) * width)
newPixels.splice.apply(newPixels, [toRow * width, width].concat(fromPixels))
}
// See appendix E.
const offsets = [0, 4, 2, 1]
const steps = [8, 8, 4, 2]
var fromRow = 0
for (var pass = 0; pass < 4; pass++) {
for (var toRow = offsets[pass]; toRow < rows; toRow += steps[pass]) {
cpRow(toRow, fromRow)
fromRow++
}
}
return newPixels
}
/**
* javascript port of java LZW decompression
* Original java author url: https://gist.github.com/devunwired/4479231
*/
export const lzw = (minCodeSize, data, pixelCount, memoryBuffer, bufferOffset) => {
const MAX_STACK_SIZE = 4096
const nullCode = -1
const npix = pixelCount
var available,
clear,
code_mask,
code_size,
end_of_information,
in_code,
old_code,
bits,
code,
i,
datum,
data_size,
first,
top,
bi,
pi
// const dstPixels = new Array(pixelCount)
// const prefix = new Array(MAX_STACK_SIZE)
// const suffix = new Array(MAX_STACK_SIZE)
// const pixelStack = new Array(MAX_STACK_SIZE + 1)
const dstPixels = new Uint8Array(memoryBuffer, bufferOffset, pixelCount)
const prefix = new Uint16Array(MAX_STACK_SIZE)
const suffix = new Uint16Array(MAX_STACK_SIZE)
const pixelStack = new Uint8Array(MAX_STACK_SIZE + 1)
// Initialize GIF data stream decoder.
data_size = minCodeSize
clear = 1 << data_size
end_of_information = clear + 1
available = clear + 2
old_code = nullCode
code_size = data_size + 1
code_mask = (1 << code_size) - 1
for (code = 0; code < clear; code++) {
// prefix[code] = 0
suffix[code] = code
}
// Decode GIF pixel stream.
var datum, bits, count, first, top, pi, bi
datum = bits = count = first = top = pi = bi = 0
for (i = 0; i < npix; ) {
if (top === 0) {
if (bits < code_size) {
// get the next byte
datum += data[bi] << bits
bits += 8
bi++
continue
}
// Get the next code.
code = datum & code_mask
datum >>= code_size
bits -= code_size
// Interpret the code
if (code > available || code == end_of_information) {
break
}
if (code == clear) {
// Reset decoder.
code_size = data_size + 1
code_mask = (1 << code_size) - 1
available = clear + 2
old_code = nullCode
continue
}
if (old_code == nullCode) {
pixelStack[top++] = suffix[code]
old_code = code
first = code
continue
}
in_code = code
if (code == available) {
pixelStack[top++] = first
code = old_code
}
while (code > clear) {
pixelStack[top++] = suffix[code]
code = prefix[code]
}
first = suffix[code] & 0xff
pixelStack[top++] = first
// add a new string to the table, but only if space is available
// if not, just continue with current table until a clear code is found
// (deferred clear code implementation as per GIF spec)
if (available < MAX_STACK_SIZE) {
prefix[available] = old_code
suffix[available] = first
available++
if ((available & code_mask) === 0 && available < MAX_STACK_SIZE) {
code_size++
code_mask += available
}
}
old_code = in_code
}
// Pop a pixel off the pixel stack.
top--
dstPixels[pi++] = pixelStack[top]
i++
}
// for (i = pi; i < npix; i++) {
// dstPixels[i] = 0 // clear missing pixels
// }
return dstPixels
}
export const parseGIF = arrayBuffer => {
const byteData = new Uint8Array(arrayBuffer)
return parse(buildStream(byteData), GIF)
}
const generatePatch = image => {
const totalPixels = image.pixels.length
const patchData = new Uint8ClampedArray(totalPixels * 4)
for (var i = 0; i < totalPixels; i++) {
const pos = i * 4
const colorIndex = image.pixels[i]
const color = image.colorTable[colorIndex] || [0, 0, 0]
patchData[pos] = color[0]
patchData[pos + 1] = color[1]
patchData[pos + 2] = color[2]
patchData[pos + 3] = colorIndex !== image.transparentIndex ? 255 : 0
}
return patchData
}
export const decompressFrame = (frame, gct, buildImagePatch, memoryBuffer, memoryOffset) => {
if (!frame.image) {
console.warn('gif frame does not have associated image.')
return
}
const { image } = frame
// get the number of pixels
const totalPixels = image.descriptor.width * image.descriptor.height
// do lzw decompression
var pixels = lzw(image.data.minCodeSize, image.data.blocks, totalPixels, memoryBuffer, memoryOffset)
// deal with interlacing if necessary
if (image.descriptor.lct.interlaced) {
pixels = deinterlace(pixels, image.descriptor.width)
}
const resultImage = {
pixels: pixels,
dims: {
top: frame.image.descriptor.top,
left: frame.image.descriptor.left,
width: frame.image.descriptor.width,
height: frame.image.descriptor.height
}
}
// color table
if (image.descriptor.lct && image.descriptor.lct.exists) {
resultImage.colorTable = image.lct
} else {
resultImage.colorTable = gct
}
// add per frame relevant gce information
if (frame.gce) {
resultImage.delay = (frame.gce.delay || 10) * 10 // convert to ms
resultImage.disposalType = frame.gce.extras.disposal
// transparency
if (frame.gce.extras.transparentColorGiven) {
resultImage.transparentIndex = frame.gce.transparentColorIndex
}
}
// create canvas usable imagedata if desired
if (buildImagePatch) {
resultImage.patch = generatePatch(resultImage)
}
return resultImage
}
export const decompressFrames = (parsedGif, buildImagePatches) => {
// return parsedGif.frames
// .filter(f => f.image)
// .map(f => decompressFrame(f, parsedGif.gct, buildImagePatches))
let totalPixels = 0;
let framesWithData = 0;
let out ;
let i, j = 0;
for (i = 0; i < parsedGif.frames.length; i++) {
if (parsedGif.frames[i].image)
{
totalPixels += parsedGif.frames[i].image.descriptor.width * parsedGif.frames[i].image.descriptor.height;
framesWithData++;
}
}
// const dstPixels = new Uint16Array(totalPixels);
// let frameStart = 0;
// let frameEnd = 0;
const buf = new ArrayBuffer(totalPixels);
let bufOffset = 0;
out = new Array(framesWithData);
for (i = 0; i < parsedGif.frames.length; i++) {
if (parsedGif.frames[i].image)
{
out[j] = decompressFrame(parsedGif.frames[i], parsedGif.gct, buildImagePatches, buf, bufOffset);
bufOffset += parsedGif.frames[i].image.descriptor.width * parsedGif.frames[i].image.descriptor.height;
// out[j] = decompressFrame(parsedGif.frames[i], parsedGif.gct, buildImagePatches, prefix, suffix, pixelStack, dstPixels, frameStart, frameEnd);
j++;
}
}
return out;
}

View File

@ -117,9 +117,12 @@ export class Scheduler
* Start this scheduler.
*
* <p>Note: tasks are run after each animation frame.</p>
*
* @return {Promise<void>} a promise resolved when the scheduler stops, e.g. when the experiments finishes
*/
start()
{
let shedulerResolve;
const self = this;
const update = async (timestamp) =>
{
@ -127,6 +130,7 @@ export class Scheduler
if (self._stopAtNextUpdate)
{
self._status = Scheduler.Status.STOPPED;
shedulerResolve();
return;
}
@ -137,6 +141,7 @@ export class Scheduler
if (state === Scheduler.Event.QUIT)
{
self._status = Scheduler.Status.STOPPED;
shedulerResolve();
return;
}
@ -155,6 +160,12 @@ export class Scheduler
// start the animation:
requestAnimationFrame(update);
// return a promise resolved when the scheduler is stopped:
return new Promise((resolve, _) =>
{
shedulerResolve = resolve;
});
}
/**

View File

@ -322,27 +322,64 @@ export function IsPointInsidePolygon(point, vertices)
}
/**
* Shuffle an array in place using the Fisher-Yastes's modern algorithm
* Shuffle an array, or a portion of that array, in place using the Fisher-Yastes's modern algorithm
* <p>See details here: https://en.wikipedia.org/wiki/Fisher%E2%80%93Yates_shuffle#The_modern_algorithm</p>
*
* @param {Object[]} array - the input 1-D array
* @param {Function} [randomNumberGenerator = undefined] - A function used to generated random numbers in the interal [0, 1). Defaults to Math.random
* @param {Function} [randomNumberGenerator= undefined] - A function used to generated random numbers in the interval [0, 1). Defaults to Math.random
* @param [startIndex= undefined] - start index in the array
* @param [endIndex= undefined] - end index in the array
* @return {Object[]} the shuffled array
*/
export function shuffle(array, randomNumberGenerator = undefined)
export function shuffle(array, randomNumberGenerator = undefined, startIndex = undefined, endIndex = undefined)
{
if (randomNumberGenerator === undefined)
// if array is not an array, we return it untouched rather than throwing an exception:
if (!array || !Array.isArray(array))
{
return array;
}
if (typeof startIndex === "undefined")
{
startIndex = 0;
}
if (typeof endIndex === "undefined")
{
endIndex = array.length - 1;
}
if (typeof randomNumberGenerator === "undefined")
{
randomNumberGenerator = Math.random;
}
for (let i = array.length - 1; i > 0; i--)
for (let i = endIndex; i > startIndex; i--)
{
const j = Math.floor(randomNumberGenerator() * (i + 1));
[array[i], array[j]] = [array[j], array[i]];
}
return array;
}
/**
* linspace
*
* @name module:util.linspace
* @function
* @public
* @param {Object[]} startValue, stopValue, cardinality
* @return {Object[]} an array from startValue to stopValue with cardinality steps
*/
export function linspace(startValue, stopValue, cardinality) {
var arr = [];
var step = (stopValue - startValue) / (cardinality - 1);
for (var i = 0; i < cardinality; i++) {
arr.push(startValue + (step * i));
}
return arr;
}
/**
* Pick a random value from an array, uses `util.shuffle` to shuffle the array and returns the last value.
*
@ -610,6 +647,11 @@ export function toString(object)
return object.toString();
}
if (typeof object === "function")
{
return `<function ${object.name}>`;
}
try
{
const symbolReplacer = (key, value) =>
@ -1436,6 +1478,47 @@ export function loadCss(cssId, cssPath)
}
}
/**
* Whether the user device has a touchscreen, e.g. it is a mobile phone or tablet.
*
* @return {boolean} true if the user device has a touchscreen.
* @note the code below is directly adapted from MDN
*/
export function hasTouchScreen()
{
let hasTouchScreen = false;
if ("maxTouchPoints" in navigator)
{
hasTouchScreen = navigator.maxTouchPoints > 0;
}
else if ("msMaxTouchPoints" in navigator)
{
hasTouchScreen = navigator.msMaxTouchPoints > 0;
}
else
{
const mQ = matchMedia?.("(pointer:coarse)");
if (mQ?.media === "(pointer:coarse)")
{
hasTouchScreen = !!mQ.matches;
}
else if ("orientation" in window)
{
hasTouchScreen = true;
}
else
{
const UA = navigator.userAgent;
hasTouchScreen =
/\b(BlackBerry|webOS|iPhone|IEMobile)\b/i.test(UA) ||
/\b(Android|Windows Phone|iPad|iPod)\b/i.test(UA);
}
}
return hasTouchScreen;
}
/**
* Enum that stores possible text directions.
* Note that Arabic is the same as RTL but added here to support PsychoPy's

441
src/visual/AnimatedGIF.js Normal file
View File

@ -0,0 +1,441 @@
/**
* Animated gif sprite.
*
* @author Nikita Agafonov (https://github.com/lightest), Matt Karl (https://github.com/bigtimebuddy)
* @copyright (c) 2020-2022 Open Science Tools Ltd. (https://opensciencetools.org)
* @license Distributed under the terms of the MIT License
*
* @note Based on https://github.com/pixijs/gif and heavily modified.
*
*/
import * as PIXI from "pixi.js-legacy";
/**
* Runtime object to play animated GIFs. This object is similar to an AnimatedSprite.
* It support playback (seek, play, stop) as well as animation speed and looping.
*/
class AnimatedGIF extends PIXI.Sprite
{
/**
* Default options for all AnimatedGIF objects.
* @property {PIXI.SCALE_MODES} [scaleMode=PIXI.SCALE_MODES.LINEAR] - Scale mode to use for the texture.
* @property {boolean} [loop=true] - To enable looping.
* @property {number} [animationSpeed=1] - Speed of the animation.
* @property {boolean} [autoUpdate=true] - Set to `false` to manage updates yourself.
* @property {boolean} [autoPlay=true] - To start playing right away.
* @property {Function} [onComplete=null] - The completed callback, optional.
* @property {Function} [onLoop=null] - The loop callback, optional.
* @property {Function} [onFrameChange=null] - The frame callback, optional.
* @property {number} [fps=PIXI.Ticker.shared.FPS] - Default FPS.
*/
static defaultOptions = {
scaleMode: PIXI.SCALE_MODES.LINEAR,
fps: PIXI.Ticker.shared.FPS,
loop: true,
animationSpeed: 1,
autoPlay: true,
autoUpdate: true,
onComplete: null,
onFrameChange: null,
onLoop: null
};
/**
* @param frames - Data of the GIF image.
* @param options - Options for the AnimatedGIF
*/
constructor(decompressedFrames, options)
{
// Get the options, apply defaults
const { scaleMode, width, height, ...rest } = Object.assign({},
AnimatedGIF.defaultOptions,
options
);
super(new PIXI.Texture(PIXI.BaseTexture.fromBuffer(new Uint8Array(width * height * 4), width, height, options)));
this._name = options.name;
this._useFullFrames = false;
this._decompressedFrameData = decompressedFrames;
this._origDims = { width, height };
let i, j, time = 0;
this._frameTimings = new Array(decompressedFrames.length);
for (i = 0; i < decompressedFrames.length; i++)
{
this._frameTimings[i] =
{
start: time,
end: time + decompressedFrames[i].delay
};
time += decompressedFrames[i].delay;
}
this.duration = this._frameTimings[decompressedFrames.length - 1].end;
this._fullPixelData = [];
if (options.fullFrames !== undefined && options.fullFrames.length > 0)
{
this._fullPixelData = options.fullFrames;
this._useFullFrames = true;
}
this._playing = false;
this._currentTime = 0;
this._isConnectedToTicker = false;
Object.assign(this, rest);
// Draw the first frame
this.currentFrame = 0;
this._prevRenderedFrameIdx = -1;
if (this.autoPlay)
{
this.play();
}
}
static updatePixelsForOneFrame (decompressedFrameData, pixelBuffer, gifWidth)
{
let i = 0;
let patchRow = 0, patchCol = 0;
let offset = 0;
let colorData;
if (decompressedFrameData.pixels.length === pixelBuffer.length / 4)
{
// Not all GIF files are perfectly optimized
// and instead of having tiny patch of pixels that actually changed from previous frame
// they would have a full next frame.
// Knowing that, we can go faster by skipping math needed to determine where to put new pixels
// and just place them 1 to 1 over existing frame (probably internal browser optimizations also kick in).
// For large amounts of gifs running simultaniously this results in 58+FPS vs 15-25+FPS for "else" case.
for (i = 0; i < decompressedFrameData.pixels.length; i++) {
if (decompressedFrameData.pixels[i] !== decompressedFrameData.transparentIndex) {
colorData = decompressedFrameData.colorTable[decompressedFrameData.pixels[i]];
offset = i * 4;
pixelBuffer[offset] = colorData[0];
pixelBuffer[offset + 1] = colorData[1];
pixelBuffer[offset + 2] = colorData[2];
pixelBuffer[offset + 3] = 255;
}
}
}
else
{
for (i = 0; i < decompressedFrameData.pixels.length; i++) {
if (decompressedFrameData.pixels[i] !== decompressedFrameData.transparentIndex) {
colorData = decompressedFrameData.colorTable[decompressedFrameData.pixels[i]];
patchRow = (i / decompressedFrameData.dims.width) | 0;
patchCol = i % decompressedFrameData.dims.width;
offset = (gifWidth * (decompressedFrameData.dims.top + patchRow) + decompressedFrameData.dims.left + patchCol) * 4;
pixelBuffer[offset] = colorData[0];
pixelBuffer[offset + 1] = colorData[1];
pixelBuffer[offset + 2] = colorData[2];
pixelBuffer[offset + 3] = 255;
}
}
}
}
static computeFullFrames (decompressedFrames, gifWidth, gifHeight)
{
let t = performance.now();
let i, j;
let patchRow = 0, patchCol = 0;
let offset = 0;
let colorData;
let pixelData = new Uint8Array(gifWidth * gifHeight * 4);
let fullPixelData = new Uint8Array(gifWidth * gifHeight * 4 * decompressedFrames.length);
for (i = 0; i < decompressedFrames.length; i++)
{
AnimatedGIF.updatePixelsForOneFrame(decompressedFrames[i], pixelData, gifWidth);
fullPixelData.set(pixelData, pixelData.length * i);
}
console.log("full frames construction time", performance.now() - t);
return fullPixelData;
}
_constructNthFullFrame (desiredFrameIdx, prevRenderedFrameIdx, decompressedFrames, pixelBuffer)
{
let t = performance.now();
// saving to variable instead of referencing object in the loop wins up to 5ms!
// (at the moment of development observed on Win10, Chrome 103.0.5060.114 (Official Build) (64-bit))
const gifWidth = this._origDims.width;
let i;
for (i = prevRenderedFrameIdx + 1; i <= desiredFrameIdx; i++)
{
// this._updatePixelsForOneFrame(decompressedFrames[i], pixelBuffer);
AnimatedGIF.updatePixelsForOneFrame(decompressedFrames[i], pixelBuffer, gifWidth)
}
// console.log("constructed frames from", prevRenderedFrameIdx, "to", desiredFrameIdx, "(", desiredFrameIdx - prevRenderedFrameIdx, ")", performance.now() - t);
}
/** Stops the animation. */
stop()
{
if (!this._playing)
{
return;
}
this._playing = false;
if (this._autoUpdate && this._isConnectedToTicker)
{
PIXI.Ticker.shared.remove(this.update, this);
this._isConnectedToTicker = false;
}
}
/** Plays the animation. */
play()
{
if (this._playing)
{
return;
}
this._playing = true;
if (this._autoUpdate && !this._isConnectedToTicker)
{
PIXI.Ticker.shared.add(this.update, this, PIXI.UPDATE_PRIORITY.HIGH);
this._isConnectedToTicker = true;
}
// If were on the last frame and stopped, play should resume from beginning
if (!this.loop && this.currentFrame === this._decompressedFrameData.length - 1)
{
this._currentTime = 0;
}
}
/**
* Get the current progress of the animation from 0 to 1.
* @readonly
*/
get progress()
{
return this._currentTime / this.duration;
}
/** `true` if the current animation is playing */
get playing()
{
return this._playing;
}
/**
* Updates the object transform for rendering. You only need to call this
* if the `autoUpdate` property is set to `false`.
*
* @param deltaTime - Time since last tick.
*/
update(deltaTime)
{
if (!this._playing)
{
return;
}
const elapsed = this.animationSpeed * deltaTime / PIXI.settings.TARGET_FPMS;
const currentTime = this._currentTime + elapsed;
const localTime = currentTime % this.duration;
const localFrame = this._frameTimings.findIndex((ft) =>
ft.start <= localTime && ft.end > localTime);
if (this._prevRenderedFrameIdx > localFrame)
{
this._prevRenderedFrameIdx = -1;
}
if (currentTime >= this.duration)
{
if (this.loop)
{
this._currentTime = localTime;
this.updateFrameIndex(localFrame);
if (typeof this.onLoop === "function")
{
this.onLoop();
}
}
else
{
this._currentTime = this.duration;
this.updateFrameIndex(this._decompressedFrameData.length - 1);
if (typeof this.onComplete === "function")
{
this.onComplete();
}
this.stop();
}
}
else
{
this._currentTime = localTime;
this.updateFrameIndex(localFrame);
}
}
/**
* Redraw the current frame, is necessary for the animation to work when
*/
updateFrame()
{
// if (!this.dirty)
// {
// return;
// }
if (this._prevRenderedFrameIdx === this._currentFrame)
{
return;
}
// Update the current frame
if (this._useFullFrames)
{
this.texture.baseTexture.resource.data = new Uint8Array
(
this._fullPixelData.buffer, this._currentFrame * this._origDims.width * this._origDims.height * 4,
this._origDims.width * this._origDims.height * 4
);
}
else
{
// this._updatePixelsForOneFrame(this._decompressedFrameData[this._currentFrame], this.texture.baseTexture.resource.data);
this._constructNthFullFrame(this._currentFrame, this._prevRenderedFrameIdx, this._decompressedFrameData, this.texture.baseTexture.resource.data);
}
this.texture.update();
// Mark as clean
// this.dirty = false;
this._prevRenderedFrameIdx = this._currentFrame;
}
/**
* Renders the object using the WebGL renderer
*
* @param {PIXI.Renderer} renderer - The renderer
* @private
*/
_render(renderer)
{
let t = performance.now();
this.updateFrame();
// console.log("t2", this._name, performance.now() - t);
super._render(renderer);
}
/**
* Renders the object using the WebGL renderer
*
* @param {PIXI.CanvasRenderer} renderer - The renderer
* @private
*/
_renderCanvas(renderer)
{
this.updateFrame();
super._renderCanvas(renderer);
}
/**
* Whether to use PIXI.Ticker.shared to auto update animation time.
* @default true
*/
get autoUpdate()
{
return this._autoUpdate;
}
set autoUpdate(value)
{
if (value !== this._autoUpdate)
{
this._autoUpdate = value;
if (!this._autoUpdate && this._isConnectedToTicker)
{
PIXI.Ticker.shared.remove(this.update, this);
this._isConnectedToTicker = false;
}
else if (this._autoUpdate && !this._isConnectedToTicker && this._playing)
{
PIXI.Ticker.shared.add(this.update, this);
this._isConnectedToTicker = true;
}
}
}
/** Set the current frame number */
get currentFrame()
{
return this._currentFrame;
}
set currentFrame(value)
{
this.updateFrameIndex(value);
this._currentTime = this._frameTimings[value].start;
}
/** Internally handle updating the frame index */
updateFrameIndex(value)
{
if (value < 0 || value >= this._decompressedFrameData.length)
{
throw new Error(`Frame index out of range, expecting 0 to ${this.totalFrames}, got ${value}`);
}
if (this._currentFrame !== value)
{
this._currentFrame = value;
// this.dirty = true;
if (typeof this.onFrameChange === "function")
{
this.onFrameChange(value);
}
}
}
/**
* Get the total number of frame in the GIF.
*/
get totalFrames()
{
return this._decompressedFrameData.length;
}
/** Destroy and don't use after this. */
destroy()
{
this.stop();
super.destroy(true);
this._decompressedFrameData = null;
this._fullPixelData = null;
this.onComplete = null;
this.onFrameChange = null;
this.onLoop = null;
}
/**
* Cloning the animation is a useful way to create a duplicate animation.
* This maintains all the properties of the original animation but allows
* you to control playback independent of the original animation.
* If you want to create a simple copy, and not control independently,
* then you can simply create a new Sprite, e.g. `const sprite = new Sprite(animation.texture)`.
*/
clone()
{
return new AnimatedGIF([...this._decompressedFrameData], {
autoUpdate: this._autoUpdate,
loop: this.loop,
autoPlay: this.autoPlay,
scaleMode: this.texture.baseTexture.scaleMode,
animationSpeed: this.animationSpeed,
width: this._origDims.width,
height: this._origDims.height,
onComplete: this.onComplete,
onFrameChange: this.onFrameChange,
onLoop: this.onLoop,
});
}
}
export { AnimatedGIF };

View File

@ -9,6 +9,7 @@
import { Mouse } from "../core/Mouse.js";
import { TextBox } from "./TextBox.js";
import * as util from "../util/Util";
/**
* <p>ButtonStim visual stimulus.</p>
@ -32,11 +33,13 @@ export class ButtonStim extends TextBox
* @param {Color} [options.borderColor= Color("white")] the border color
* @param {Color} [options.borderWidth= 0] the border width
* @param {number} [options.opacity= 1.0] - the opacity
* @param {number} [options.depth= 0] - the depth (i.e. the z order)
* @param {number} [options.letterHeight= undefined] - the height of the text
* @param {boolean} [options.bold= true] - whether or not the text is bold
* @param {boolean} [options.italic= false] - whether or not the text is italic
* @param {boolean} [options.autoDraw= false] - whether or not the stimulus should be automatically drawn on every frame flip
* @param {boolean} [options.autoLog= false] - whether or not to log
* @param {boolean} [options.draggable= false] - whether or not to make stim draggable with mouse/touch/other pointer device
*/
constructor(
{
@ -54,11 +57,15 @@ export class ButtonStim extends TextBox
borderColor,
borderWidth = 0,
opacity,
depth,
letterHeight,
bold = true,
italic,
autoDraw,
autoLog,
draggable,
boxFn,
multiline
} = {},
)
{
@ -77,12 +84,16 @@ export class ButtonStim extends TextBox
borderColor,
borderWidth,
opacity,
depth,
letterHeight,
multiline,
bold,
italic,
alignment: "center",
autoDraw,
autoLog,
draggable,
boxFn
});
this.psychoJS.logger.debug("create a new Button with name: ", name);
@ -112,7 +123,7 @@ export class ButtonStim extends TextBox
if (this._autoLog)
{
this._psychoJS.experimentLogger.exp(`Created ${this.name} = ${this.toString()}`);
this._psychoJS.experimentLogger.exp(`Created ${this.name} = ${util.toString(this)}`);
}
}

View File

@ -42,10 +42,11 @@ export class FaceDetector extends VisualStim
* @param {number} [options.opacity= 1.0] - the opacity
* @param {boolean} [options.autoDraw= false] - whether or not the stimulus should be automatically drawn on every frame flip
* @param {boolean} [options.autoLog= false] - whether or not to log
* @param {boolean} [options.draggable= false] - whether or not to make stim draggable with mouse/touch/other pointer device
*/
constructor({name, win, input, modelDir, faceApiUrl, units, ori, opacity, pos, size, autoDraw, autoLog} = {})
constructor({name, win, input, modelDir, faceApiUrl, units, ori, opacity, pos, size, autoDraw, autoLog, draggable} = {})
{
super({name, win, units, ori, opacity, pos, size, autoDraw, autoLog});
super({name, win, units, ori, opacity, pos, size, autoDraw, autoLog, draggable});
// TODO deal with onChange (see MovieStim and Camera)
this._addAttribute("input", input, undefined);

View File

@ -54,6 +54,7 @@ export class Form extends util.mix(VisualStim).with(ColorMixin)
* @param {boolean} [options.autoDraw= false] - whether or not the stimulus should be automatically drawn on every
* frame flip
* @param {boolean} [options.autoLog= false] - whether or not to log
* @param {boolean} [options.draggable= false] - whether or not to make stim draggable with mouse/touch/other pointer device
*/
constructor(
{
@ -82,10 +83,11 @@ export class Form extends util.mix(VisualStim).with(ColorMixin)
clipMask,
autoDraw,
autoLog,
draggable
} = {},
)
{
super({ name, win, units, opacity, depth, pos, size, clipMask, autoDraw, autoLog });
super({ name, win, units, opacity, depth, pos, size, clipMask, autoDraw, autoLog, draggable });
this._addAttribute(
"itemPadding",

515
src/visual/GifStim.js Normal file
View File

@ -0,0 +1,515 @@
/**
* Gif Stimulus.
*
* @author Nikita Agafonov
* @version 2022.2.0
* @copyright (c) 2020-2022 Open Science Tools Ltd. (https://opensciencetools.org)
* @license Distributed under the terms of the MIT License
*/
import * as PIXI from "pixi.js-legacy";
import { Color } from "../util/Color.js";
import { ColorMixin } from "../util/ColorMixin.js";
import { to_pixiPoint } from "../util/Pixi.js";
import * as util from "../util/Util.js";
import { VisualStim } from "./VisualStim.js";
import {Camera} from "../hardware";
// import { parseGIF, decompressFrames } from "gifuct-js";
import { AnimatedGIF } from "./AnimatedGIF.js";
import { parseGIF, decompressFrames } from "../util/GifParser.js";
/**
* Gif Stimulus.
*
* @name module:visual.GifStim
* @class
* @extends VisualStim
* @mixes ColorMixin
* @param {Object} options
* @param {String} options.name - the name used when logging messages from this stimulus
* @param {Window} options.win - the associated Window
* @param {boolean} options.precomputeFrames - compute full frames of the GIF and store them. Setting this to true will take the load off the CPU
* @param {string | HTMLImageElement} options.image - the name of the image resource or the HTMLImageElement corresponding to the image
* @param {string | HTMLImageElement} options.mask - the name of the mask resource or HTMLImageElement corresponding to the mask
* but GIF will take longer to load and occupy more memory space. In case when there's not enough CPU peformance (e.g. due to large amount of GIFs
* playing simultaneously or heavy load elsewhere in experiment) and you don't care much about app memory usage, use this flag to easily gain more performance.
* @param {string} [options.units= "norm"] - the units of the stimulus (e.g. for size, position, vertices)
* @param {Array.<number>} [options.pos= [0, 0]] - the position of the center of the stimulus
* @param {string} [options.units= 'norm'] - the units of the stimulus vertices, size and position
* @param {number} [options.ori= 0.0] - the orientation (in degrees)
* @param {number} [options.size] - the size of the rendered image (the size of the image will be used if size is not specified)
* @param {Color} [options.color= 'white'] the background color
* @param {number} [options.opacity= 1.0] - the opacity
* @param {number} [options.contrast= 1.0] - the contrast
* @param {number} [options.depth= 0] - the depth (i.e. the z order)
* @param {number} [options.texRes= 128] - the resolution of the text
* @param {boolean} [options.loop= true] - whether or not to loop the animation
* @param {boolean} [options.autoPlay= true] - whether or not to autoPlay the animation
* @param {boolean} [options.animationSpeed= 1] - animation speed, works as multiplyer e.g. 1 - normal speed, 0.5 - half speed, 2 - twice as fast etc.
* @param {boolean} [options.interpolate= false] - whether or not the image is interpolated
* @param {boolean} [options.flipHoriz= false] - whether or not to flip horizontally
* @param {boolean} [options.flipVert= false] - whether or not to flip vertically
* @param {boolean} [options.autoDraw= false] - whether or not the stimulus should be automatically drawn on every frame flip
* @param {boolean} [options.autoLog= false] - whether or not to log
*/
export class GifStim extends util.mix(VisualStim).with(ColorMixin)
{
constructor({
name,
win,
image,
mask,
precomputeFrames,
pos,
units,
ori,
size,
color,
opacity,
contrast,
texRes,
depth,
interpolate,
loop,
autoPlay,
animationSpeed,
flipHoriz,
flipVert,
autoDraw,
autoLog
} = {})
{
super({ name, win, units, ori, opacity, depth, pos, size, autoDraw, autoLog });
this._resource = undefined;
this._addAttribute("precomputeFrames", precomputeFrames, false);
this._addAttribute("image", image);
this._addAttribute("mask", mask);
this._addAttribute("color", color, "white", this._onChange(true, false));
this._addAttribute("contrast", contrast, 1.0, this._onChange(true, false));
this._addAttribute("texRes", texRes, 128, this._onChange(true, false));
this._addAttribute("interpolate", interpolate, false);
this._addAttribute("flipHoriz", flipHoriz, false, this._onChange(false, false));
this._addAttribute("flipVert", flipVert, false, this._onChange(false, false));
this._addAttribute("loop", loop, true);
this._addAttribute("autoPlay", autoPlay, true);
this._addAttribute("animationSpeed", animationSpeed, 1);
// estimate the bounding box:
this._estimateBoundingBox();
if (this._autoLog)
{
this._psychoJS.experimentLogger.exp(`Created ${this.name} = ${this.toString()}`);
}
}
/**
* Getter for the playing property.
*
* @name module:visual.GifStim#isPlaying
* @public
*/
get isPlaying ()
{
if (this._pixi)
{
return this._pixi.playing;
}
return false;
}
/**
* Getter for the duration property. Shows animation duration time in milliseconds.
*
* @name module:visual.GifStim#duration
* @public
*/
get duration ()
{
if (this._pixi)
{
return this._pixi.duration;
}
}
/**
* Starts GIF playback.
*
* @name module:visual.GifStim#play
* @public
*/
play ()
{
if (this._pixi)
{
this._pixi.play();
}
}
/**
* Pauses GIF playback.
*
* @name module:visual.GifStim#pause
* @public
*/
pause ()
{
if (this._pixi)
{
this._pixi.stop();
}
}
/**
* Set wether or not to loop the animation.
*
* @name module:visual.GifStim#setLoop
* @public
* @param {boolean} [loop=true] - flag value
* @param {boolean} [log=false] - whether or not to log.
*/
setLoop (loop, log = false)
{
this._setAttribute("loop", loop, log);
if (this._pixi)
{
this._pixi.loop = loop;
}
}
/**
* Set wether or not to autoplay the animation.
*
* @name module:visual.GifStim#setAutoPlay
* @public
* @param {boolean} [autoPlay=true] - flag value
* @param {boolean} [log=false] - whether or not to log.
*/
setAutoPlay (autoPlay, log = false)
{
this._setAttribute("autoPlay", autoPlay, log);
if (this._pixi)
{
this._pixi.autoPlay = autoPlay;
}
}
/**
* Set animation speed of the animation.
*
* @name module:visual.GifStim#setAnimationSpeed
* @public
* @param {boolean} [animationSpeed=1] - multiplyer of the animation speed e.g. 1 - normal, 0.5 - half speed, 2 - twice as fast.
* @param {boolean} [log=false] - whether or not to log.
*/
setAnimationSpeed (animationSpeed = 1, log = false)
{
this._setAttribute("animationSpeed", animationSpeed, log);
if (this._pixi)
{
this._pixi.animationSpeed = animationSpeed;
}
}
/**
* Setter for the image attribute.
*
* @name module:visual.GifStim#setImage
* @public
* @param {HTMLImageElement | string} image - the name of the image resource or HTMLImageElement corresponding to the image
* @param {boolean} [log= false] - whether or not to log
*/
setImage(image, log = false)
{
const response = {
origin: "GifStim.setImage",
context: "when setting the image of GifStim: " + this._name,
};
try
{
// image is undefined: that's fine but we raise a warning in case this is a symptom of an actual problem
if (typeof image === "undefined")
{
this.psychoJS.logger.warn("setting the image of GifStim: " + this._name + " with argument: undefined.");
this.psychoJS.logger.debug("set the image of GifStim: " + this._name + " as: undefined");
}
else if (typeof image === "string")
{
// image is a string: it should be the name of a resource, which we load
const fullRD = this.psychoJS.serverManager.getFullResourceData(image);
console.log("gif resource", fullRD);
if (fullRD.cachedData === undefined)
{
// How GIF works: http://www.matthewflickinger.com/lab/whatsinagif/animation_and_transparency.asp
let t0 = performance.now();
let parsedGif = parseGIF(fullRD.data);
let pt = performance.now() - t0;
let t2 = performance.now();
let decompressedFrames = decompressFrames(parsedGif, false);
let dect = performance.now() - t2;
let fullFrames;
if (this._precomputeFrames)
{
fullFrames = AnimatedGIF.computeFullFrames(decompressedFrames, parsedGif.lsd.width, parsedGif.lsd.height);
}
this._resource = { parsedGif, decompressedFrames, fullFrames };
this.psychoJS.serverManager.cacheResourceData(image, this._resource);
console.log(`animated gif "${this._name}",`, "parse=", pt, "decompress=", dect);
}
else
{
this._resource = fullRD.cachedData;
}
// this.psychoJS.logger.debug(`set resource of GifStim: ${this._name} as ArrayBuffer(${this._resource.length})`);
const hasChanged = this._setAttribute("image", image, log);
if (hasChanged)
{
this._onChange(true, true)();
}
}
}
catch (error)
{
throw Object.assign(response, { error });
}
}
/**
* Setter for the mask attribute.
*
* @name module:visual.GifStim#setMask
* @public
* @param {HTMLImageElement | string} mask - the name of the mask resource or HTMLImageElement corresponding to the mask
* @param {boolean} [log= false] - whether of not to log
*/
setMask(mask, log = false)
{
const response = {
origin: "GifStim.setMask",
context: "when setting the mask of GifStim: " + this._name,
};
try
{
// mask is undefined: that's fine but we raise a warning in case this is a sympton of an actual problem
if (typeof mask === "undefined")
{
this.psychoJS.logger.warn("setting the mask of GifStim: " + this._name + " with argument: undefined.");
this.psychoJS.logger.debug("set the mask of GifStim: " + this._name + " as: undefined");
}
else
{
// mask is a string: it should be the name of a resource, which we load
if (typeof mask === "string")
{
mask = this.psychoJS.serverManager.getResource(mask);
}
// mask should now be an actual HTMLImageElement: we raise an error if it is not
if (!(mask instanceof HTMLImageElement))
{
throw "the argument: " + mask.toString() + ' is not an image" }';
}
this.psychoJS.logger.debug("set the mask of GifStim: " + this._name + " as: src= " + mask.src + ", size= " + mask.width + "x" + mask.height);
}
this._setAttribute("mask", mask, log);
this._onChange(true, false)();
}
catch (error)
{
throw Object.assign(response, { error });
}
}
/**
* Whether to interpolate (linearly) the texture in the stimulus.
*
* @name module:visual.GifStim#setInterpolate
* @public
* @param {boolean} interpolate - interpolate or not.
* @param {boolean} [log=false] - whether or not to log
*/
setInterpolate (interpolate = false, log = false)
{
this._setAttribute("interpolate", interpolate, log);
if (this._pixi instanceof PIXI.Sprite) {
this._pixi.texture.baseTexture.scaleMode = interpolate ? PIXI.SCALE_MODES.LINEAR : PIXI.SCALE_MODES.NEAREST;
this._pixi.texture.baseTexture.update();
}
}
/**
* Setter for the size attribute.
*
* @param {undefined | null | number | number[]} size - the stimulus size
* @param {boolean} [log= false] - whether of not to log
*/
setSize(size, log = false)
{
// size is either undefined, null, or a tuple of numbers:
if (typeof size !== "undefined" && size !== null)
{
size = util.toNumerical(size);
if (!Array.isArray(size))
{
size = [size, size];
}
}
this._setAttribute("size", size, log);
if (this._pixi)
{
const size_px = util.to_px(size, this.units, this.win);
const scaleX = size_px[0] / this._pixi.texture.width;
const scaleY = size_px[1] / this._pixi.texture.height;
this._pixi.scale.x = this.flipHoriz ? -scaleX : scaleX;
this._pixi.scale.y = this.flipVert ? scaleY : -scaleY;
}
}
/**
* Estimate the bounding box.
*
* @name module:visual.GifStim#_estimateBoundingBox
* @function
* @override
* @protected
*/
_estimateBoundingBox()
{
const size = this._getDisplaySize();
if (typeof size !== "undefined")
{
this._boundingBox = new PIXI.Rectangle(
this._pos[0] - size[0] / 2,
this._pos[1] - size[1] / 2,
size[0],
size[1],
);
}
// TODO take the orientation into account
}
/**
* Update the stimulus, if necessary.
*
* @name module:visual.GifStim#_updateIfNeeded
* @private
*/
_updateIfNeeded()
{
if (!this._needUpdate)
{
return;
}
this._needUpdate = false;
// update the PIXI representation, if need be:
if (this._needPixiUpdate)
{
this._needPixiUpdate = false;
if (typeof this._pixi !== "undefined")
{
this._pixi.destroy(true);
}
this._pixi = undefined;
// no image to draw: return immediately
if (typeof this._resource === "undefined")
{
return;
}
const gifOpts =
{
name: this._name,
width: this._resource.parsedGif.lsd.width,
height: this._resource.parsedGif.lsd.height,
fullFrames: this._resource.fullFrames,
scaleMode: this._interpolate ? PIXI.SCALE_MODES.LINEAR : PIXI.SCALE_MODES.NEAREST,
loop: this._loop,
autoPlay: this._autoPlay,
animationSpeed: this._animationSpeed
};
let t = performance.now();
this._pixi = new AnimatedGIF(this._resource.decompressedFrames, gifOpts);
console.log(`animatedGif "${this._name}" instancing:`, performance.now() - t);
// add a mask if need be:
if (typeof this._mask !== "undefined")
{
// Building new PIXI.BaseTexture each time we create a mask, to avoid PIXI's caching and use a unique resource.
this._pixi.mask = PIXI.Sprite.from(new PIXI.Texture(new PIXI.BaseTexture(this._mask)));
// a 0.5, 0.5 anchor is required for the mask to be aligned with the image
this._pixi.mask.anchor.x = 0.5;
this._pixi.mask.anchor.y = 0.5;
this._pixi.addChild(this._pixi.mask);
}
// since _texture.width may not be immediately available but the rest of the code needs its value
// we arrange for repeated calls to _updateIfNeeded until we have a width:
if (this._pixi.texture.width === 0)
{
this._needUpdate = true;
this._needPixiUpdate = true;
return;
}
}
this._pixi.zIndex = -this._depth;
this._pixi.alpha = this.opacity;
// set the scale:
const displaySize = this._getDisplaySize();
const size_px = util.to_px(displaySize, this.units, this.win);
const scaleX = size_px[0] / this._pixi.texture.width;
const scaleY = size_px[1] / this._pixi.texture.height;
this._pixi.scale.x = this.flipHoriz ? -scaleX : scaleX;
this._pixi.scale.y = this.flipVert ? scaleY : -scaleY;
// set the position, rotation, and anchor (image centered on pos):
this._pixi.position = to_pixiPoint(this.pos, this.units, this.win);
this._pixi.rotation = -this.ori * Math.PI / 180;
this._pixi.anchor.x = 0.5;
this._pixi.anchor.y = 0.5;
// re-estimate the bounding box, as the texture's width may now be available:
this._estimateBoundingBox();
}
/**
* Get the size of the display image, which is either that of the GifStim or that of the image
* it contains.
*
* @name module:visual.GifStim#_getDisplaySize
* @private
* @return {number[]} the size of the displayed image
*/
_getDisplaySize()
{
let displaySize = this.size;
if (this._pixi && typeof displaySize === "undefined")
{
// use the size of the texture, if we have access to it:
if (typeof this._pixi.texture !== "undefined" && this._pixi.texture.width > 0)
{
const textureSize = [this._pixi.texture.width, this._pixi.texture.height];
displaySize = util.to_unit(textureSize, "pix", this.win, this.units);
}
}
return displaySize;
}
}

View File

@ -426,6 +426,7 @@ export class GratingStim extends VisualStim
* @param {String} [options.blendmode= "avg"] - blend mode of the stimulus, determines how the stimulus is blended with the background. Supported values: "avg", "add", "mul", "screen".
* @param {boolean} [options.autoDraw= false] - whether or not the stimulus should be automatically drawn on every frame flip
* @param {boolean} [options.autoLog= false] - whether or not to log
* @param {boolean} [options.draggable= false] - whether or not to make stim draggable with mouse/touch/other pointer device
*/
constructor({
name,
@ -448,10 +449,11 @@ export class GratingStim extends VisualStim
blendmode,
autoDraw,
autoLog,
maskParams
maskParams,
draggable
} = {})
{
super({ name, win, units, ori, opacity, depth, pos, anchor, size, autoDraw, autoLog });
super({ name, win, units, ori, opacity, depth, pos, anchor, size, autoDraw, autoLog, draggable });
this._adjustmentFilter = new AdjustmentFilter({
contrast

View File

@ -46,10 +46,39 @@ export class ImageStim extends util.mix(VisualStim).with(ColorMixin)
* @param {boolean} [options.flipVert= false] - whether or not to flip vertically
* @param {boolean} [options.autoDraw= false] - whether or not the stimulus should be automatically drawn on every frame flip
* @param {boolean} [options.autoLog= false] - whether or not to log
* @param {boolean} [options.draggable= false] - whether or not to make stim draggable with mouse/touch/other pointer device
* @param {ImageStim.AspectRatioStrategy} [options.aspectRatio= ImageStim.AspectRatioStrategy.VARIABLE] - the aspect ratio handling strategy
* @param {number} [options.blurVal= 0] - the blur value. Goes 0 to as hish as you like. 0 is no blur.
*/
constructor({ name, win, image, mask, pos, anchor, units, ori, size, color, opacity, contrast, texRes, depth, interpolate, flipHoriz, flipVert, autoDraw, autoLog } = {})
constructor({
name,
win,
image,
mask,
pos,
anchor,
units,
ori,
size,
color,
opacity,
contrast,
texRes,
depth,
interpolate,
flipHoriz,
flipVert,
autoDraw,
autoLog,
aspectRatio,
draggable,
blurVal
} = {})
{
super({ name, win, units, ori, opacity, depth, pos, anchor, size, autoDraw, autoLog });
super({ name, win, units, ori, opacity, depth, pos, anchor, size, autoDraw, autoLog, draggable });
// Holds an instance of PIXI blur filter. Used if blur value is passed.
this._blurFilter = undefined;
this._addAttribute(
"image",
@ -94,6 +123,17 @@ export class ImageStim extends util.mix(VisualStim).with(ColorMixin)
false,
this._onChange(false, false),
);
this._addAttribute(
"aspectRatio",
aspectRatio,
ImageStim.AspectRatioStrategy.VARIABLE,
this._onChange(true, true),
);
this._addAttribute(
"blurVal",
blurVal,
0
);
// estimate the bounding box:
this._estimateBoundingBox();
@ -108,7 +148,7 @@ export class ImageStim extends util.mix(VisualStim).with(ColorMixin)
* Setter for the image attribute.
*
* @param {HTMLImageElement | string} image - the name of the image resource or HTMLImageElement corresponding to the image
* @param {boolean} [log= false] - whether of not to log
* @param {boolean} [log= false] - whether or not to log
*/
setImage(image, log = false)
{
@ -176,7 +216,7 @@ export class ImageStim extends util.mix(VisualStim).with(ColorMixin)
* Setter for the mask attribute.
*
* @param {HTMLImageElement | string} mask - the name of the mask resource or HTMLImageElement corresponding to the mask
* @param {boolean} [log= false] - whether of not to log
* @param {boolean} [log= false] - whether or not to log
*/
setMask(mask, log = false)
{
@ -234,6 +274,129 @@ export class ImageStim extends util.mix(VisualStim).with(ColorMixin)
}
}
/**
* Sets the amount of blur for image stimuli.
*
* @param {number} blurVal - the amount of blur. 0 is no blur, max is as high as you like.
* @param {boolean} [log=false] - whether or not to log.
*/
setBlurVal (blurVal = 0, log = false)
{
this._setAttribute("blurVal", blurVal, log);
if (this._pixi instanceof PIXI.Sprite)
{
if (this._blurFilter === undefined)
{
this._blurFilter = new PIXI.filters.BlurFilter();
this._blurFilter.blur = blurVal;
}
else
{
this._blurFilter.blur = blurVal;
}
// this._pixi might get destroyed and recreated again with no filters.
if (this._pixi.filters instanceof Array && this._pixi.filters.indexOf(this._blurFilter) === -1)
{
this._pixi.filters.push(this._blurFilter);
}
else
{
this._pixi.filters = [this._blurFilter];
}
}
}
/**
* Setter for the size attribute.
*
* @param {undefined | null | number | number[]} size - the stimulus size
* @param {boolean} [log= false] - whether or not to log
*/
setSize(size, log = false)
{
if (!Array.isArray(size))
{
size = [size, size];
}
if (Array.isArray(size) && size.length <= 1)
{
size = [size[0], size[0]];
}
for (let i = 0; i < size.length; i++)
{
try
{
size[i] = util.toNumerical(size[i]);
}
catch (err)
{
// Failed to convert to numeric. Set to NaN.
size[ i ] = NaN;
}
}
if (this._texture !== undefined)
{
size = this._ensureNaNSizeConversion(size, this._texture);
this._applySizeToPixi(size);
}
this._setAttribute("size", size, log);
}
/**
* Applies given size values to underlying pixi component of the stim.
*
* @param {Array} size
*/
_applySizeToPixi(size)
{
const size_px = util.to_px(size, this._units, this._win);
let scaleX = size_px[0] / this._texture.width;
let scaleY = size_px[1] / this._texture.height;
if (this.aspectRatio === ImageStim.AspectRatioStrategy.FIT_TO_WIDTH)
{
scaleY = scaleX;
}
else if (this.aspectRatio === ImageStim.AspectRatioStrategy.FIT_TO_HEIGHT)
{
scaleX = scaleY;
}
else if (this.aspectRatio === ImageStim.AspectRatioStrategy.HORIZONTAL_TILING)
{
scaleX = 1.0;
scaleY = 1.0;
}
this._pixi.scale.x = this.flipHoriz ? -scaleX : scaleX;
this._pixi.scale.y = this.flipVert ? scaleY : -scaleY;
}
/**
* Ensures to convert NaN in the size values to proper, numerical values using given texture dimensions.
*
* @param {Array} size
*/
_ensureNaNSizeConversion(size, pixiTex)
{
if (Number.isNaN(size[0]) && Number.isNaN(size[1]))
{
size = util.to_unit([pixiTex.width, pixiTex.height], "pix", this._win, this._units);
}
else if (Number.isNaN(size[0]))
{
size[0] = size[1] * (pixiTex.width / pixiTex.height);
}
else if (Number.isNaN(size[1]))
{
size[1] = size[0] / (pixiTex.width / pixiTex.height);
}
return size;
}
/**
* Estimate the bounding box.
*
@ -276,6 +439,7 @@ export class ImageStim extends util.mix(VisualStim).with(ColorMixin)
if (typeof this._pixi !== "undefined")
{
this._pixi.filters = null;
this._pixi.destroy(true);
}
this._pixi = undefined;
@ -292,7 +456,7 @@ export class ImageStim extends util.mix(VisualStim).with(ColorMixin)
// Not using PIXI.Texture.from() on purpose, as it caches both PIXI.Texture and PIXI.BaseTexture.
// As a result of that we can have multiple ImageStim instances using same PIXI.BaseTexture,
// thus changing texture related properties like interpolation, or calling _pixi.destroy(true)
// will affect all ImageStims who happen to share that BaseTexture.
// will affect all ImageStims which happen to share that BaseTexture.
const texOpts =
{
scaleMode: this._interpolate ? PIXI.SCALE_MODES.LINEAR : PIXI.SCALE_MODES.NEAREST
@ -309,7 +473,17 @@ export class ImageStim extends util.mix(VisualStim).with(ColorMixin)
this._texture = new PIXI.Texture(new PIXI.BaseTexture(this._image, texOpts));
}
this._pixi = PIXI.Sprite.from(this._texture);
if (this.aspectRatio === ImageStim.AspectRatioStrategy.HORIZONTAL_TILING)
{
const [width_px, _] = util.to_px([this.size[0], 0], this.units, this.win);
this._pixi = PIXI.TilingSprite.from(this._texture, 1, 1);
this._pixi.width = width_px;
this._pixi.height = this._texture.height;
}
else
{
this._pixi = PIXI.Sprite.from(this._texture);
}
// add a mask if need be:
if (typeof this._mask !== "undefined")
@ -346,19 +520,24 @@ export class ImageStim extends util.mix(VisualStim).with(ColorMixin)
this._pixi.zIndex = -this._depth;
this._pixi.alpha = this.opacity;
// set the scale:
const displaySize = this._getDisplaySize();
const size_px = util.to_px(displaySize, this.units, this.win);
const scaleX = size_px[0] / this._texture.width;
const scaleY = size_px[1] / this._texture.height;
// initial setSize might be called with incomplete values like [512, null].
// Before texture is loaded they are converted to [512, NaN].
// At this point the texture is loaded and we can convert NaN to proper values.
this.size = this._getDisplaySize();
// note: this calls VisualStim.setAnchor, which properly sets the PixiJS anchor
// from the PsychoPy text format
this.anchor = this._anchor;
this._pixi.scale.x = this.flipHoriz ? -scaleX : scaleX;
this._pixi.scale.y = this.flipVert ? scaleY : -scaleY;
// set the position, rotation, and anchor (image centered on pos):
this._pixi.position = to_pixiPoint(this.pos, this.units, this.win);
this._pixi.rotation = -this.ori * Math.PI / 180;
if (this._blurVal > 0)
{
this.setBlurVal(this._blurVal);
}
// re-estimate the bounding box, as the texture's width may now be available:
this._estimateBoundingBox();
}
@ -383,7 +562,47 @@ export class ImageStim extends util.mix(VisualStim).with(ColorMixin)
displaySize = util.to_unit(textureSize, "pix", this.win, this.units);
}
}
else
{
if (this.aspectRatio === ImageStim.AspectRatioStrategy.FIT_TO_WIDTH)
{
// use the size of the texture, if we have access to it:
if (typeof this._texture !== "undefined" && this._texture.width > 0)
{
displaySize = [displaySize[0], displaySize[0] * this._texture.height / this._texture.width];
}
}
else if (this.aspectRatio === ImageStim.AspectRatioStrategy.FIT_TO_HEIGHT)
{
// use the size of the texture, if we have access to it:
if (typeof this._texture !== "undefined" && this._texture.width > 0)
{
displaySize = [displaySize[1] * this._texture.width / this._texture.height, displaySize[1]];
}
}
else if (this.aspectRatio === ImageStim.AspectRatioStrategy.HORIZONTAL_TILING)
{
// use the size of the texture, if we have access to it:
if (typeof this._texture !== "undefined" && this._texture.width > 0)
{
displaySize = [displaySize[0], this._texture.height];
}
}
}
return displaySize;
}
}
/**
* ImageStim Aspect Ratio Strategy.
*
* @enum {Symbol}
* @readonly
*/
ImageStim.AspectRatioStrategy = {
FIT_TO_WIDTH: Symbol.for("FIT_TO_WIDTH"),
HORIZONTAL_TILING: Symbol.for("HORIZONTAL_TILING"),
FIT_TO_HEIGHT: Symbol.for("FIT_TO_HEIGHT"),
VARIABLE: Symbol.for("VARIABLE"),
};

View File

@ -15,7 +15,7 @@ import { to_pixiPoint } from "../util/Pixi.js";
import * as util from "../util/Util.js";
import { VisualStim } from "./VisualStim.js";
import {Camera} from "../hardware/Camera.js";
import YoutubeIframeAPIHandler from "./YoutubeIframeAPI.js";
/**
* Movie Stimulus.
@ -32,6 +32,9 @@ export class MovieStim extends VisualStim
* @param {module:core.Window} options.win - the associated Window
* @param {string | HTMLVideoElement | module:visual.Camera} movie - the name of a
* movie resource or of a HTMLVideoElement or of a Camera component
* @param {string} [options.youtubeUrl] - link to a youtube video. If this parameter is present, movie stim will embed a youtube video to an experiment.
* @param {boolean} [options.showYoutubeControls] - whether or not to show youtube player controls.
* @oaram {boolean} [options.disableYoutubePlayerKeyboardControls=false] - Setting the parameter's value to true causes the youtube player to not respond to keyboard controls.
* @param {string} [options.units= "norm"] - the units of the stimulus (e.g. for size, position, vertices)
* @param {Array.<number>} [options.pos= [0, 0]] - the position of the center of the stimulus
* @param {string} [options.anchor = "center"] - sets the origin point of the stim
@ -50,20 +53,65 @@ export class MovieStim extends VisualStim
* @param {boolean} [options.autoPlay= true] - whether or not to autoplay the video
* @param {boolean} [options.autoDraw= false] - whether or not the stimulus should be automatically drawn on every frame flip
* @param {boolean} [options.autoLog= false] - whether or not to log
* @param {boolean} [options.draggable= false] - whether or not to make stim draggable with mouse/touch/other pointer device
*/
constructor({ name, win, movie, pos, anchor, units, ori, size, color, opacity, contrast, interpolate, flipHoriz, flipVert, loop, volume, noAudio, autoPlay, autoDraw, autoLog } = {})
constructor({
name,
win,
movie,
youtubeUrl,
showYoutubeControls,
disableYoutubePlayerKeyboardControls,
pos,
anchor,
units,
ori,
size,
color,
opacity,
contrast,
interpolate,
flipHoriz,
flipVert,
loop,
volume,
noAudio,
autoPlay,
autoDraw,
autoLog,
draggable
} = {})
{
super({ name, win, units, ori, opacity, pos, anchor, size, autoDraw, autoLog });
super({ name, win, units, ori, opacity, pos, anchor, size, autoDraw, autoLog, draggable });
this.psychoJS.logger.debug("create a new MovieStim with name: ", name);
this._pixiTextureResource = undefined;
// Used in case when youtubeUrl parameter is set to a proper youtube url.
this._youtubePlayer = undefined;
this._ytPlayerIsReady = false;
// movie and movie control:
this._addAttribute(
"movie",
movie,
);
this._addAttribute(
"youtubeUrl",
youtubeUrl,
""
);
this._addAttribute(
"showYoutubeControls",
showYoutubeControls,
true
);
this._addAttribute(
"disableYoutubePlayerKeyboardControls",
disableYoutubePlayerKeyboardControls,
false
);
this._addAttribute(
"volume",
volume,
@ -140,7 +188,7 @@ export class MovieStim extends VisualStim
*
* @param {string | HTMLVideoElement | module:visual.Camera} movie - the name of a
* movie resource or of a HTMLVideoElement or of a Camera component
* @param {boolean} [log= false] - whether of not to log
* @param {boolean} [log= false] - whether or not to log
*/
setMovie(movie, log = false)
{
@ -162,7 +210,6 @@ export class MovieStim extends VisualStim
`setting the movie of MovieStim: ${this._name} with argument: undefined.`);
this.psychoJS.logger.debug(`set the movie of MovieStim: ${this._name} as: undefined`);
}
else
{
let videoResource;
@ -182,13 +229,13 @@ export class MovieStim extends VisualStim
{
// old behaviour: feeding a Camera to MovieStim plays the live stream:
videoResource = movie.getVideo();
// TODO remove previous movie one if there is one
// TODO remove previous movie if there is one
/*
// new behaviour: feeding a Camera to MovieStim replays the video previously recorded by the Camera:
const video = movie.getRecording();
movie = video;
*/
*/
}
if (videoResource instanceof HTMLVideoElement)
@ -196,6 +243,16 @@ export class MovieStim extends VisualStim
htmlVideo = videoResource;
htmlVideo.playsInline = true;
this._pixiTextureResource = PIXI.Texture.from(htmlVideo, { resourceOptions: { autoPlay: false } });
// Not using PIXI.Texture.from() on purpose, as it caches both PIXI.Texture and PIXI.BaseTexture.
// As a result of that we can have multiple MovieStim instances using same PIXI.BaseTexture,
// thus changing texture related properties like interpolation, or calling _pixi.destroy(true)
// will affect all MovieStims which happen to share that BaseTexture.
this._pixiTextureResource = new PIXI.Texture(new PIXI.BaseTexture(
this._movie,
{
resourceOptions: { autoPlay: this.autoPlay }
}
));
}
else if (videoResource instanceof PIXI.Texture)
{
@ -220,6 +277,20 @@ export class MovieStim extends VisualStim
this.status = PsychoJS.Status.FINISHED;
};
}
// Resize the stim when video is loaded. Otherwise this._pixiTextureResource.width is 1.
const loadedDataCb = () =>
{
this.size = this._size;
movie.removeEventListener("loadeddata", loadedDataCb);
};
if (movie.readyState < movie.HAVE_FUTURE_DATA)
{
movie.addEventListener("loadeddata", loadedDataCb)
}
this.hideYoutubePlayer();
}
this._setAttribute("movie", htmlVideo, log);
@ -232,10 +303,268 @@ export class MovieStim extends VisualStim
}
}
/**
* Setter for the size attribute.
*
* @param {undefined | null | number | number[]} size - the stimulus size
* @param {boolean} [log= false] - whether or not to log
*/
setSize(size, log = false)
{
if (!Array.isArray(size))
{
size = [size, size];
}
if (Array.isArray(size) && size.length <= 1)
{
size = [size[0], size[0]];
}
for (let i = 0; i < size.length; i++)
{
try
{
size[i] = util.toNumerical(size[i]);
}
catch (err)
{
// Failed to convert to numeric. Set to NaN.
size[ i ] = NaN;
}
}
// If the html5Video is available and loaded enough, use information from it to convert NaN to proper values.
if (this._movie !== undefined && this._movie.readyState >= this._movie.HAVE_FUTURE_DATA)
{
size = this._ensureNaNSizeConversion(size, this._movie);
}
if (this._pixiTextureResource !== undefined)
{
this._applySizeToPixi(size);
}
if (this._youtubePlayer !== undefined && this._ytPlayerIsReady)
{
// Handling youtube iframe resize here, since _updateIfNeeded aint going to be triggered due to absence of _pixi component.
this._applySizeToYoutubeIframe(size);
// Youtube player handles NaN size automatically. Leveraging that to cover unset size.
// IMPORTANT! this._youtubePlayer.getSize() is not used intentionally, because it returns initial values event after different size was set.
const ytPlayerBCR = this._youtubePlayer.getIframe().getBoundingClientRect();
size = util.to_unit([ ytPlayerBCR.width, ytPlayerBCR.height ], "pix", this._win, this._units);
}
this._setAttribute("size", size, log);
}
/**
* Setter for the position attribute.
*
* @param {Array.<number>} pos - position of the center of the stimulus, in stimulus units
* @param {boolean} [log= false] - whether or not to log
*/
setPos(pos, log = false)
{
super.setPos(pos, log);
// if (this._youtubePlayer !== undefined && this._ytPlayerIsReady)
if (this._youtubePlayer !== undefined)
{
const pos_px = util.to_px(pos, this._units, this._win, false);
pos_px[1] *= this._win._rootContainer.scale.y;
this._youtubePlayer.getIframe().style.transform = `translate3d(${pos_px[0]}px, ${pos_px[1]}px, 0)`;
}
}
/**
* Setter for the volume attribute.
*
* @param {number} volume - desired volume of the movie in [0, 1].
* @param {boolean} [log= false] - whether of not to log
*/
setVolume(vol, log = false)
{
this._setAttribute("volume", vol, log);
if (this._movie !== undefined)
{
this._movie.volume = vol;
}
else if (this._youtubePlayer !== undefined && this._ytPlayerIsReady)
{
// Original movie takes volume in [0, 1], whereas youtube's player [0, 100].
this._youtubePlayer.setVolume(vol * 100);
}
}
/**
* Draw this stimulus on the next frame draw.
*/
draw()
{
super.draw();
if (this._youtubePlayer !== undefined && this._ytPlayerIsReady)
{
this.showYoutubePlayer();
}
}
/**
* Hide this stimulus on the next frame draw.
*/
hide()
{
super.hide();
if (this._youtubePlayer !== undefined && this._ytPlayerIsReady)
{
this.hideYoutubePlayer();
}
}
/**
* Handling youtube player being ready to work.
*
* @param {string} link to a youtube video. If this parameter is present, movie stim will embed a youtube video to an experiment.
* @param {boolean} [log= false] - whether or not to log.
*/
_onYoutubePlayerReady (e)
{
this._ytPlayerIsReady = true;
if (Number.isNaN(this._size[ 0 ]) || Number.isNaN(this._size[ 1 ]))
{
// Youtube player handles NaN size automatically. Leveraging that to cover unset size.
// IMPORTANT! this._youtubePlayer.getSize() is not used intentionally, because it returns initial values event after different size was set.
const ytPlayerBCR = this._youtubePlayer.getIframe().getBoundingClientRect();
this._setAttribute("size", util.to_unit([ ytPlayerBCR.width, ytPlayerBCR.height ], "pix", this._win, this._units), true);
}
this.setVolume(this._volume, true);
}
/**
* Handling youtube player state change.
*
* @param {string} link to a youtube video. If this parameter is present, movie stim will embed a youtube video to an experiment.
* @param {boolean} [log= false] - whether or not to log.
*/
_onYoutubePlayerStateChange (e)
{
if (e.data === YT.PlayerState.PLAYING)
{
// Just in case for potential future requirements.
}
else if (e.data === YT.PlayerState.PAUSED)
{
// Just in case for potential future requirements.
}
else if (e.data === YT.PlayerState.ENDED)
{
// Just in case for potential future requirements.
}
else if (e.data === YT.PlayerState.ENDED)
{
// Just in case for potential future requirements.
}
}
/**
* Handling youtube player errors.
*
* @param {string} link to a youtube video. If this parameter is present, movie stim will embed a youtube video to an experiment.
* @param {boolean} [log= false] - whether or not to log.
*/
_onYoutubePlayerError (err)
{
// Just in case for potential future requirements.
console.error("youtube player error:", arguments);
}
hideYoutubePlayer ()
{
if (this._youtubePlayer !== undefined && this._ytPlayerIsReady)
{
this._youtubePlayer.stopVideo();
this._youtubePlayer.getIframe().parentElement.classList.add("hidden");
}
}
showYoutubePlayer ()
{
if (this._youtubePlayer !== undefined && this._ytPlayerIsReady)
{
this._youtubePlayer.getIframe().parentElement.classList.remove("hidden");
}
}
/**
* Setter for the youtubeUrl attribute.
*
* @param {string} link to a youtube video. If this parameter is present, movie stim will embed a youtube video to an experiment.
* @param {boolean} [log= false] - whether or not to log.
*/
async setYoutubeUrl (urlString = "", log = false)
{
if (urlString.length === 0)
{
this.hideYoutubePlayer();
return;
}
// Handling the case when there's already regular movie is set.
if (this._movie !== undefined)
{
this.stop();
this.setMovie(undefined);
// Removing stimuli from the drawing list.
this.hide();
}
const urlObj = new URL(urlString);
if (this._youtubePlayer === undefined)
{
const vidSizePx = util.to_unit(this._size, this.units, this.win, "pix");
await YoutubeIframeAPIHandler.init();
this._youtubePlayer = YoutubeIframeAPIHandler.createPlayer({
videoId: urlObj.searchParams.get("v"),
width: vidSizePx[0],
height: vidSizePx[ 1 ],
playerVars: {
"rel": 0,
"playsinline": 1,
"modestbranding": 1,
"disablekb": Number(this._disableYoutubePlayerKeyboardControls) || 0,
"autoplay": Number(this._autoPlay) || 0,
"controls": Number(this._showYoutubeControls) || 0,
"loop": Number(this._loop) || 0,
},
events: {
"onReady": this._onYoutubePlayerReady.bind(this),
"onStateChange": this._onYoutubePlayerStateChange.bind(this),
"onError": this._onYoutubePlayerError.bind(this),
// "onPlaybackQualityChange":
// "onPlaybackRateChange":
// "onApiChange":
}
});
// At this point youtube player is added to the page. Invoking position setter to ensure html element is placed as expected.
this.pos = this._pos;
}
else
{
this._youtubePlayer.loadVideoById(urlObj.searchParams.get("v"));
this.showYoutubePlayer();
}
}
/**
* Reset the stimulus.
*
* @param {boolean} [log= false] - whether of not to log
* @param {boolean} [log= false] - whether or not to log
*/
reset(log = false)
{
@ -247,49 +576,70 @@ export class MovieStim extends VisualStim
/**
* Start playing the movie.
*
* @param {boolean} [log= false] - whether of not to log
* @param {boolean} [log= false] - whether or not to log
*/
play(log = false)
{
this.status = PsychoJS.Status.STARTED;
// As found on https://goo.gl/LdLk22
const playPromise = this._movie.play();
if (playPromise !== undefined)
if (this._movie !== undefined)
{
playPromise.catch((error) =>
// As found on https://goo.gl/LdLk22
const playPromise = this._movie.play();
if (playPromise !== undefined)
{
throw {
origin: "MovieStim.play",
context: `when attempting to play MovieStim: ${this._name}`,
error,
};
});
playPromise.catch((error) =>
{
throw {
origin: "MovieStim.play",
context: `when attempting to play MovieStim: ${this._name}`,
error,
};
});
}
}
else if (this._youtubePlayer !== undefined && this._ytPlayerIsReady)
{
this._youtubePlayer.playVideo();
}
}
/**
* Pause the movie.
*
* @param {boolean} [log= false] - whether of not to log
* @param {boolean} [log= false] - whether or not to log
*/
pause(log = false)
{
this.status = PsychoJS.Status.STOPPED;
this._movie.pause();
if (this._movie !== undefined)
{
this._movie.pause();
}
else if (this._youtubePlayer !== undefined && this._ytPlayerIsReady)
{
this._youtubePlayer.pauseVideo();
}
}
/**
* Stop the movie and reset to 0s.
*
* @param {boolean} [log= false] - whether of not to log
* @param {boolean} [log= false] - whether or not to log
*/
stop(log = false)
{
this.status = PsychoJS.Status.STOPPED;
this._movie.pause();
this.seek(0, log);
if (this._movie !== undefined)
{
this._movie.pause();
this.seek(0, log);
}
else if (this._youtubePlayer !== undefined && this._ytPlayerIsReady)
{
this._youtubePlayer.stopVideo();
}
}
/**
@ -298,45 +648,116 @@ export class MovieStim extends VisualStim
* <p>Note: seek is experimental and does not work on all browsers at the moment.</p>
*
* @param {number} timePoint - the timepoint to which to jump (in second)
* @param {boolean} [log= false] - whether of not to log
* @param {boolean} [log= false] - whether or not to log
*/
seek(timePoint, log = false)
{
if (timePoint < 0 || timePoint > this._movie.duration)
if (this._movie !== undefined)
{
throw {
origin: "MovieStim.seek",
context: `when seeking to timepoint: ${timePoint} of MovieStim: ${this._name}`,
error: `the timepoint does not belong to [0, ${this._movie.duration}`,
};
}
if (this._hasFastSeek)
{
this._movie.fastSeek(timePoint);
}
else
{
try
{
this._movie.currentTime = timePoint;
}
catch (error)
if (timePoint < 0 || timePoint > this._movie.duration)
{
throw {
origin: "MovieStim.seek",
context: `when seeking to timepoint: ${timePoint} of MovieStim: ${this._name}`,
error,
error: `the timepoint does not belong to [0, ${this._movie.duration}`,
};
}
if (this._hasFastSeek)
{
this._movie.fastSeek(timePoint);
}
else
{
try
{
this._movie.currentTime = timePoint;
}
catch (error)
{
throw {
origin: "MovieStim.seek",
context: `when seeking to timepoint: ${timePoint} of MovieStim: ${this._name}`,
error,
};
}
}
}
else if (this._youtubePlayer !== undefined && this._ytPlayerIsReady)
{
this._youtubePlayer.seekTo(timePoint);
}
}
/**
* Estimate the bounding box.
* Get the elapsed time in seconds since the video started playing.
*
* @override
* @protected
* @return {number} playback time.
*/
getPlaybackTime ()
{
if (this._movie !== undefined)
{
return this._movie.currentTime;
}
else if (this._youtubePlayer !== undefined && this._ytPlayerIsReady)
{
return this._youtubePlayer.getCurrentTime();
}
return 0;
}
/**
* Applies given size values to underlying pixi component of the stim.
*
* @param {Array} size
*/
_applySizeToPixi(size)
{
const size_px = util.to_px(size, this._units, this._win);
const scaleX = size_px[0] / this._movie.videoWidth;
const scaleY = size_px[1] / this._movie.videoHeight;
this._pixi.scale.x = this.flipHoriz ? -scaleX : scaleX;
this._pixi.scale.y = this.flipVert ? scaleY : -scaleY;
}
/**
* Applies given size values to youtube iframe.
*
* @param {*} size
*/
_applySizeToYoutubeIframe(size)
{
const size_px = util.to_px(size, this._units, this._win);
this._youtubePlayer.setSize(size_px[ 0 ], size_px[ 1 ]);
}
/**
* Ensures to convert NaN in the size values to proper, numerical values using given texture dimensions.
*
* @param {Array} size
*/
_ensureNaNSizeConversion(size, html5Video)
{
if (Number.isNaN(size[0]) && Number.isNaN(size[1]))
{
size = util.to_unit([html5Video.videoWidth, html5Video.videoHeight], "pix", this._win, this._units);
}
else if (Number.isNaN(size[0]))
{
size[0] = size[1] * (html5Video.videoWidth / html5Video.videoHeight);
}
else if (Number.isNaN(size[1]))
{
size[1] = size[0] / (html5Video.videoWidth / html5Video.videoHeight);
}
return size;
}
/**
* Estimate the bounding box.
*/
_estimateBoundingBox()
{
@ -425,16 +846,10 @@ export class MovieStim extends VisualStim
// opacity:
this._pixi.alpha = this.opacity;
// set the scale:
const displaySize = this._getDisplaySize();
const size_px = util.to_px(displaySize, this.units, this.win);
const scaleX = size_px[0] / this._pixiTextureResource.width;
const scaleY = size_px[1] / this._pixiTextureResource.height;
this._pixi.scale.x = this.flipHoriz ? -scaleX : scaleX;
this._pixi.scale.y = this.flipVert ? scaleY : -scaleY;
this._pixi.width = size_px[0];
this._pixi.height = size_px[1];
// initial setSize might be called with incomplete values like [512, null].
// Before texture is loaded they are converted to [512, NaN].
// At this point the texture is loaded and we can convert NaN to proper values.
this.size = this._size;
// set the position, rotation, and anchor (movie centered on pos):
this._pixi.position = to_pixiPoint(this.pos, this.units, this.win);
@ -446,9 +861,11 @@ export class MovieStim extends VisualStim
}
/**
* Get the size of the display image, which is either that of the ImageStim or that of the image
* Get the size of the display image, which is either that of the MovieStim or that of the image
* it contains.
*
* @name module:visual.MovieStim#_getDisplaySize
* @private
* @protected
* @return {number[]} the size of the displayed image
*/

View File

@ -0,0 +1,331 @@
/**
* Particle Emitter.
*
* @author Nikita Agafonov
* @version 2023.2.0
* @copyright (c) 2020-2023 Open Science Tools Ltd. (https://opensciencetools.org)
* @license Distributed under the terms of the MIT License
*/
import * as PIXI from "pixi.js-legacy";
const DEFAULT_POOL_SIZE = 1024;
const DEFAULT_PARTICLE_WIDTH = 10;
const DEFAULT_PARTICLE_HEIGHT = 10;
const DEFAULT_PARTICLE_LIFETIME = 3; // Seconds.
const DEFAULT_PARTICLE_COLOR = 0xffffff;
const DEFAULT_PARTICLES_PER_SEC = 60;
const DEFAULT_PARTICLE_V = 100;
class Particle
{
constructor (cfg)
{
this.x = 0;
this.y = 0;
this.ax = 0;
this.ay = 0;
this.vx = 0;
this.vy = 0;
this.lifeTime = 0;
this.widthChange = 0;
this.heightChange = 0;
this.sprite = undefined;
this.inUse = false;
if (cfg.particleImage !== undefined)
{
this.sprite = PIXI.Sprite.from(PIXI.Texture.from(cfg.particleImage));
}
else
{
this.sprite = new PIXI.Sprite(PIXI.Texture.WHITE);
this.sprite.tint = cfg.particleColor || DEFAULT_PARTICLE_COLOR;
}
// TODO: Should we instead incorporate that in position calculation?
// Consider: accurate spawn position of the particle confined by spawnArea.
this.sprite.anchor.set(0.5);
this.width = cfg.particleWidth || DEFAULT_PARTICLE_WIDTH;
this.height = cfg.particleHeight || DEFAULT_PARTICLE_HEIGHT;
}
set width (w)
{
this._width = w;
this.sprite.width = w;
}
get width ()
{
return this._width;
}
set height (h)
{
this._height = h;
this.sprite.height = h;
}
get height ()
{
return this._height;
}
update (dt)
{
const dt2 = dt * dt;
// Update velocity with current acceleration.
this.vx += this.ax * dt;
this.vy += this.ay * dt;
// Update position with current velocity and acceleration.
this.x = this.x + this.vx * dt + this.ax * dt2 * .5;
this.y = this.y + this.vy * dt + this.ay * dt2 * .5;
this.sprite.rotation = Math.atan2(this.vy, this.vx);
this.sprite.x = this.x;
this.sprite.y = this.y;
if (this.width > 0)
{
this.width = Math.max(0, this.width + this.widthChange);
}
if (this.height > 0)
{
this.height = Math.max(0, this.height + this.heightChange);
}
this.lifeTime -= dt;
if (this.width <= 0 && this.height <= 0)
{
this.lifeTime = 0;
}
if (this.lifeTime <= 0)
{
this.inUse = false;
}
}
}
export class ParticleEmitter
{
constructor (cfg = {})
{
this.x = 0;
this.y = 0;
this._cfg = cfg;
this._particlesPerSec = cfg.particlesPerSec || DEFAULT_PARTICLES_PER_SEC;
this._spawnCoolDown = 0;
this._parentObj = undefined;
this._particlePool = new Array(DEFAULT_POOL_SIZE);
this.setParentObject(cfg.parentObject);
this._fillParticlePool(cfg);
}
_fillParticlePool (cfg)
{
let i;
for (i = 0; i < this._particlePool.length; i++)
{
this._particlePool[i] = new Particle(cfg);
}
}
_setupParticle (p)
{
let spawnAreaWidth = this._cfg.spawnAreaWidth || 0;
let spawnAreaHeight = this._cfg.spawnAreaHeight || 0;
if (this._parentObj !== undefined && this._cfg.useParentSizeAsSpawnArea)
{
spawnAreaWidth = this._parentObj.width;
spawnAreaHeight = this._parentObj.height;
}
const spawnOffsetX = Math.random() * spawnAreaWidth - spawnAreaWidth * .5;
const spawnOffsetY = Math.random() * spawnAreaHeight - spawnAreaHeight * .5;
const x = this.x + spawnOffsetX;
const y = this.y + spawnOffsetY;
p.x = x;
p.y = y;
p.ax = 0;
p.ay = 0;
if (Number.isFinite(this._cfg.initialVx))
{
p.vx = this._cfg.initialVx;
}
else if (this._cfg.initialVx instanceof Array && this._cfg.initialVx.length >= 2)
{
p.vx = Math.random() * (this._cfg.initialVx[1] - this._cfg.initialVx[0]) + this._cfg.initialVx[0];
}
else
{
p.vx = Math.random() * DEFAULT_PARTICLE_V - DEFAULT_PARTICLE_V * .5;
}
if (Number.isFinite(this._cfg.initialVy))
{
p.vy = this._cfg.initialVy;
}
else if (this._cfg.initialVy instanceof Array && this._cfg.initialVy.length >= 2)
{
p.vy = Math.random() * (this._cfg.initialVy[1] - this._cfg.initialVy[0]) + this._cfg.initialVy[0];
}
else
{
p.vy = Math.random() * DEFAULT_PARTICLE_V - DEFAULT_PARTICLE_V * .5;
}
p.lifeTime = this._cfg.lifeTime || DEFAULT_PARTICLE_LIFETIME;
p.width = this._cfg.particleWidth || DEFAULT_PARTICLE_WIDTH;
p.height = this._cfg.particleHeight || DEFAULT_PARTICLE_HEIGHT;
p.widthChange = this._cfg.particleWidthChange || 0;
p.heightChange = this._cfg.particleHeightChange || 0;
// TODO: run proper checks here.
if (this._cfg.particleImage)
{
p.sprite.texture = PIXI.Texture.from(this._cfg.particleImage);
}
else
{
p.sprite.texture = PIXI.Texture.WHITE;
}
if (this._cfg.particleColor !== undefined)
{
p.sprite.tint = this._cfg.particleColor;
}
else
{
p.sprite.tint = 0xffffff;
}
}
_spawnParticles (n = 0)
{
let i;
for (i = 0; i < this._particlePool.length && n > 0; i++)
{
if (this._particlePool[i].inUse === false)
{
this._particlePool[i].inUse = true;
n--;
this._setupParticle(this._particlePool[i]);
this._cfg.container.addChild(this._particlePool[i].sprite);
}
}
}
_getResultingExternalForce ()
{
let externalForce = [0, 0];
if (this._cfg.externalForces instanceof Array)
{
let i;
for (i = 0; i < this._cfg.externalForces.length; i++)
{
externalForce[0] += this._cfg.externalForces[i][0];
externalForce[1] += this._cfg.externalForces[i][1];
}
}
return externalForce;
}
setParentObject (po)
{
this._parentObj = po;
}
/**
* @desc: Adds external force which acts on a particle
* @param: f - Array with two elements, first is x component, second is y component.
* It's a vector of length L which sets the direction and the margnitude of the force.
* */
addExternalForce (f)
{
this._cfg.externalForces.push(f);
}
removeExternalForce (f)
{
const i = this._cfg.externalForces.indexOf(f);
if (i !== -1)
{
this._cfg.externalForces.splice(i, 1);
}
}
removeExternalForceByIdx (idx)
{
if (this._cfg.externalForces[idx] !== undefined)
{
this._cfg.externalForces.splice(idx, 1);
}
}
update (dt)
{
let externalForce;
// Sync with parent object if it exists.
if (this._parentObj !== undefined)
{
this.x = this._parentObj.x;
this.y = this._parentObj.y;
}
if (Number.isFinite(this._cfg.positionOffsetX))
{
this.x += this._cfg.positionOffsetX;
}
if (Number.isFinite(this._cfg.positionOffsetY))
{
this.y += this._cfg.positionOffsetY;
}
if (this._spawnCoolDown <= 0)
{
this._spawnCoolDown = 1 / this._particlesPerSec;
// Assuming that we have at least 60FPS.
const frameTime = Math.min(dt, 1 / 60);
const particlesPerFrame = Math.ceil(frameTime / this._spawnCoolDown);
this._spawnParticles(particlesPerFrame);
}
else
{
this._spawnCoolDown -= dt;
}
let i;
for (i = 0; i < this._particlePool.length; i++)
{
if (this._particlePool[i].inUse)
{
externalForce = this._getResultingExternalForce();
this._particlePool[i].ax = externalForce[0];
this._particlePool[i].ay = externalForce[1];
this._particlePool[i].update(dt);
}
// Check if particle should be removed.
if (this._particlePool[i].lifeTime <= 0 && this._particlePool[i].sprite.parent)
{
this._cfg.container.removeChild(this._particlePool[i].sprite);
}
}
}
}

View File

@ -39,8 +39,9 @@ export class Polygon extends ShapeStim
* @param {boolean} [options.interpolate= true] - whether or not the shape is interpolated
* @param {boolean} [options.autoDraw= false] - whether or not the stimulus should be automatically drawn on every frame flip
* @param {boolean} [options.autoLog= false] - whether or not to log
* @param {boolean} [options.draggable= false] - whether or not to make stim draggable with mouse/touch/other pointer device
*/
constructor({ name, win, lineWidth, lineColor, fillColor, opacity, edges, radius, pos, size, ori, units, contrast, depth, interpolate, autoDraw, autoLog } = {})
constructor({ name, win, lineWidth, lineColor, fillColor, opacity, edges, radius, pos, size, ori, units, contrast, depth, interpolate, autoDraw, autoLog, draggable } = {})
{
super({
name,
@ -58,9 +59,11 @@ export class Polygon extends ShapeStim
interpolate,
autoDraw,
autoLog,
draggable
});
this._psychoJS.logger.debug("create a new Polygon with name: ", name);
this._psychoJS.logger.debug("create a new Polygon with name: ",
name);
this._addAttribute(
"edges",

162
src/visual/Progress.js Normal file
View File

@ -0,0 +1,162 @@
import * as PIXI from "pixi.js-legacy";
import * as util from "../util/Util.js";
import { Color } from "../util/Color.js";
import { to_pixiPoint } from "../util/Pixi.js";
import { VisualStim } from "./VisualStim.js";
export class Progress extends VisualStim
{
constructor (
{
name,
win,
units = "pix",
ori,
opacity,
depth,
pos,
anchor = "left",
size = [300, 30],
clipMask,
autoDraw,
autoLog,
progress = 1,
type,
fillColor,
fillTexture
})
{
super({
name,
win,
units,
ori,
opacity,
depth,
pos,
anchor,
size,
clipMask,
autoDraw,
autoLog
});
this._addAttribute("progress", progress, 0);
this._addAttribute("type", type, PROGRESS_TYPES.BAR);
this._addAttribute("fillColor", fillColor, "lightgreen");
this._addAttribute("fillTexture", fillTexture, PIXI.Texture.WHITE);
if (this._autoLog)
{
this._psychoJS.experimentLogger.exp(`Created ${this.name} = ${this.toString()}`);
}
}
/**
* Setter for the progress attribute.
*/
setProgress (progress = 0, log = false)
{
this._setAttribute("progress", Math.min(1.0, Math.max(0.0, progress)), log);
if (this._pixi !== undefined)
{
this._pixi.clear();
const size_px = util.to_px(this._size, this._units, this._win);
const progressWidth = size_px[0] * this._progress;
if (this._fillTexture)
{
let t = PIXI.Texture.WHITE;
if (typeof this._fillTexture === "string")
{
t = PIXI.Texture.from(this._fillTexture);
t.baseTexture.scaleMode = PIXI.SCALE_MODES.NEAREST;
}
this._pixi.beginTextureFill({
texture: t
});
}
else
{
this._pixi.beginFill(new Color(this._fillColor).int, this._opacity);
}
if (this._type === PROGRESS_TYPES.BAR)
{
this._pixi.drawRect(0, 0, progressWidth, size_px[1]);
}
this._pixi.endFill();
// TODO: is there a better way to ensure anchor works?
this.anchor = this._anchor;
}
}
/**
* Estimate the bounding box.
*
* @override
* @protected
*/
_estimateBoundingBox()
{
let boundingBox = new PIXI.Rectangle(0, 0, 0, 0);
const anchorNum = this._anchorTextToNum(this._anchor);
const pos_px = util.to_px(this._pos, this._units, this._win);
const size_px = util.to_px(this._size, this._units, this._win);
boundingBox.x = pos_px[ 0 ] - anchorNum[ 0 ] * size_px[ 0 ];
boundingBox.y = pos_px[ 1 ] - anchorNum[ 1 ] * size_px[ 1 ];
boundingBox.width = size_px[ 0 ];
boundingBox.height = size_px[ 1 ];
this._boundingBox = boundingBox;
}
/**
* Update the stimulus, if necessary.
*
* @protected
*/
_updateIfNeeded()
{
// TODO: figure out what is the error with estimateBoundBox on resize?
if (!this._needUpdate)
{
return;
}
this._needUpdate = false;
// update the PIXI representation, if need be:
if (this._needPixiUpdate)
{
this._needPixiUpdate = false;
if (typeof this._pixi !== "undefined")
{
this._pixi.destroy(true);
}
this._pixi = new PIXI.Graphics();
// TODO: Should we do this?
// this._pixi.lineStyle(this._lineWidth, this._lineColor.int, this._opacity, 0.5);
// TODO: Should just .setProgress() be called?
this.setProgress(this._progress);
this._pixi.scale.y = -1;
this._pixi.zIndex = -this._depth;
this.anchor = this._anchor;
}
// set polygon position and rotation:
this._pixi.position = to_pixiPoint(this._pos, this._units, this._win);
this._pixi.rotation = -this.ori * Math.PI / 180.0;
this._estimateBoundingBox();
}
}
export const PROGRESS_TYPES =
{
BAR: 0,
CIRCLE: 1
}

View File

@ -38,8 +38,9 @@ export class Rect extends ShapeStim
* @param {boolean} [options.interpolate= true] - whether or not the shape is interpolated
* @param {boolean} [options.autoDraw= false] - whether or not the stimulus should be automatically drawn on every frame flip
* @param {boolean} [options.autoLog= false] - whether or not to log
* @param {boolean} [options.draggable= false] - whether or not to make stim draggable with mouse/touch/other pointer device
*/
constructor({ name, win, lineWidth, lineColor, fillColor, opacity, width, height, pos, anchor, size, ori, units, contrast, depth, interpolate, autoDraw, autoLog } = {})
constructor({ name, win, lineWidth, lineColor, fillColor, opacity, width, height, pos, anchor, size, ori, units, contrast, depth, interpolate, autoDraw, autoLog, draggable } = {})
{
super({
name,
@ -58,6 +59,7 @@ export class Rect extends ShapeStim
interpolate,
autoDraw,
autoLog,
draggable
});
this._psychoJS.logger.debug("create a new Rect with name: ", name);

View File

@ -44,10 +44,11 @@ export class ShapeStim extends util.mix(VisualStim).with(ColorMixin, WindowMixin
* @param {boolean} [options.interpolate= true] - whether or not the shape is interpolated
* @param {boolean} [options.autoDraw= false] - whether or not the stimulus should be automatically drawn on every frame flip
* @param {boolean} [options.autoLog= false] - whether or not to log
* @param {boolean} [options.draggable= false] - whether or not to make stim draggable with mouse/touch/other pointer device
*/
constructor({ name, win, lineWidth, lineColor, fillColor, opacity, vertices, closeShape, pos, anchor, size, ori, units, contrast, depth, interpolate, autoDraw, autoLog } = {})
constructor({ name, win, lineWidth, lineColor, fillColor, opacity, vertices, closeShape, pos, anchor, size, ori, units, contrast, depth, interpolate, autoDraw, autoLog, draggable } = {})
{
super({ name, win, units, ori, opacity, pos, anchor, depth, size, autoDraw, autoLog });
super({ name, win, units, ori, opacity, pos, anchor, depth, size, autoDraw, autoLog, draggable });
// the PIXI polygon corresponding to the vertices, in pixel units:
this._pixiPolygon_px = undefined;
@ -163,8 +164,8 @@ export class ShapeStim extends util.mix(VisualStim).with(ColorMixin, WindowMixin
if (typeof objectPos_px === "undefined")
{
throw {
origin: "VisualStim.contains",
context: "when determining whether VisualStim: " + this._name + " contains object: " + util.toString(object),
origin: "ShapeStim.contains",
context: "when determining whether ShapeStim: " + this._name + " contains object: " + util.toString(object),
error: "unable to determine the position of the object",
};
}
@ -176,6 +177,22 @@ export class ShapeStim extends util.mix(VisualStim).with(ColorMixin, WindowMixin
return util.IsPointInsidePolygon(objectPos_px, polygon_px);
}
/**
* Determine whether a point that is nown to have pixel dimensions is inside the bounding box of the stimulus.
*
* @name module:visual.ShapeStim#containsPointPx
* @public
* @param {number[]} point_px - the point in pixels
* @return {boolean} whether or not the object is inside the bounding box of the stimulus
*/
containsPointPx (point_px)
{
const pos_px = util.to_px(this.pos, this.units, this.win);
this._getVertices_px();
const polygon_px = this._vertices_px.map((v) => [v[0] + pos_px[0], v[1] + pos_px[1]]);
return util.IsPointInsidePolygon(point_px, polygon_px);
}
/**
* Setter for the anchor attribute.
*
@ -385,4 +402,29 @@ ShapeStim.KnownShapes = {
[-0.39, 0.31],
[-0.09, 0.18],
],
triangle: [
[+0.0, 0.5], // Point
[-0.5, -0.5], // Bottom left
[+0.5, -0.5], // Bottom right
],
rectangle: [
[-.5, .5], // Top left
[ .5, .5], // Top right
[ .5, -.5], // Bottom left
[-.5, -.5], // Bottom right
],
arrow: [
[0.0, 0.5],
[-0.5, 0.0],
[-1/6, 0.0],
[-1/6, -0.5],
[1/6, -0.5],
[1/6, 0.0],
[0.5, 0.0],
],
};
// Alias some names for convenience
ShapeStim.KnownShapes['star'] = ShapeStim.KnownShapes['star7']

View File

@ -65,6 +65,7 @@ export class Slider extends util.mix(VisualStim).with(ColorMixin, WindowMixin)
* @param {boolean} [options.autoDraw= false] - whether or not the stimulus should be automatically drawn on every
* frame flip
* @param {boolean} [options.autoLog= false] - whether or not to log
* @param {boolean} [options.draggable= false] - whether or not to make stim draggable with mouse/touch/other pointer device
*
* @param {core.MinimalStim[]} [options.dependentStims = [] ] - the list of dependent stimuli,
* which must be updated when this Slider is updated, e.g. a Form.
@ -99,10 +100,11 @@ export class Slider extends util.mix(VisualStim).with(ColorMixin, WindowMixin)
autoDraw,
autoLog,
dependentStims,
draggable
} = {},
)
{
super({ name, win, units, ori, opacity, depth, pos, size, clipMask, autoDraw, autoLog });
super({ name, win, units, ori, opacity, depth, pos, size, clipMask, autoDraw, autoLog, draggable });
this._needMarkerUpdate = false;

View File

@ -23,26 +23,7 @@ import MatrixBipolar from "./survey/components/MatrixBipolar.js";
import DropdownExtensions from "./survey/components/DropdownExtensions.js";
import customExpressionFunctionsArray from "./survey/extensions/customExpressionFunctions.js";
const CAPTIONS = {
NEXT: "Next"
};
const SURVEY_SETTINGS = {
minWidth: "100px"
};
const SURVEY_COMPLETION_CODES =
{
NORMAL: 0,
SKIP_TO_END_OF_BLOCK: 1,
SKIP_TO_END_OF_SURVEY: 2
};
const NODE_EXIT_CODES =
{
NORMAL: 0,
BREAK_FLOW: 1
};
/**
* Survey Stimulus.
@ -63,6 +44,24 @@ export class Survey extends VisualStim
ENDSURVEY: "END"
};
static CAPTIONS =
{
NEXT: "Next"
};
static SURVEY_COMPLETION_CODES =
{
NORMAL: 0,
SKIP_TO_END_OF_BLOCK: 1,
SKIP_TO_END_OF_SURVEY: 2
};
static NODE_EXIT_CODES =
{
NORMAL: 0,
BREAK_FLOW: 1
};
/**
* @memberOf module:visual
* @param {Object} options
@ -83,19 +82,16 @@ export class Survey extends VisualStim
{
super({ name, win, units, ori, depth, pos, size, autoDraw, autoLog });
// the default surveyId is an uuid based on the experiment id (or name) and the survey name:
// this way, it is always the same within a given experiment
this._hasSelfGeneratedSurveyId = (typeof surveyId === "undefined");
const defaultSurveyId = (this._psychoJS.getEnvironment() === ExperimentHandler.Environment.SERVER) ?
util.makeUuid(`${name}@${this._psychoJS.config.gitlab.projectId}`) :
util.makeUuid(`${name}@${this._psychoJS.config.experiment.name}`);
// Storing all existing signaturePad questions to properly handle their resize.
// Unfortunately signaturepad question type can't handle resizing properly by itself.
this._signaturePads = [];
// whether the user is done with the survey, independently of whether the survey is completed:
this.isFinished = false;
// Accumulated completion flag that is being set after completion of one survey node.
// This flag allows to track completion progress while moving through the survey flow.
// Initially set to true and will be flipped if at least one of the survey nodes were not fully completed.
// accumulated completion flag updated after each survey node is completed
// note: this make it possible to track completion as we move through the survey flow.
// _isCompletedAll will be flipped to false whenever a survey node is not completed
this._isCompletedAll = true;
// timestamps associated to each question:
@ -103,10 +99,9 @@ export class Survey extends VisualStim
// timestamps clock:
this._questionAnswerTimestampClock = new Clock();
this._totalSurveyResults = {};
this._overallSurveyResults = {};
this._surveyData = undefined;
this._surveyModel = undefined;
this._signaturePadRO = undefined;
this._expressionsRunner = undefined;
this._lastPageSwitchHandledIdx = -1;
this._variables = {};
@ -114,23 +109,36 @@ export class Survey extends VisualStim
this._surveyRunningPromise = undefined;
this._surveyRunningPromiseResolve = undefined;
this._surveyRunningPromiseReject = undefined;
// callback triggered when the user is done with the survey: nothing to do by default
this._onFinishedCallback = () => {};
// init SurveyJS
// init SurveyJS:
this._initSurveyJS();
// default size:
if (typeof size === "undefined")
{
this.size = (this.unit === "norm") ? [2.0, 2.0] : [1.0, 1.0];
}
this._addAttribute(
"model",
model
);
// the default surveyId is an uuid based on the experiment id (or name) and the survey name:
// this way, it is always the same within a given experiment
this._hasSelfGeneratedSurveyId = (typeof surveyId === "undefined");
const defaultSurveyId = (this._psychoJS.getEnvironment() === ExperimentHandler.Environment.SERVER) ?
util.makeUuid(`${name}@${this._psychoJS.config.gitlab.projectId}`) :
util.makeUuid(`${name}@${this._psychoJS.config.experiment.name}`);
this._addAttribute(
"surveyId",
surveyId,
defaultSurveyId
);
// estimate the bounding box:
this._estimateBoundingBox();
@ -213,7 +221,7 @@ export class Survey extends VisualStim
logs: []
};
this.psychoJS.logger.debug(`converted the old model to the new super-flow model: ${JSON.stringify(model)}`);
this.psychoJS.logger.debug(`converted the legacy model to the new super-flow model: ${JSON.stringify(model)}`);
}
this._surveyData = model;
@ -227,6 +235,24 @@ export class Survey extends VisualStim
}
}
/**
* Setter for the surveyId attribute.
*
* @param {string} surveyId - the survey Id
* @param {boolean} [log= false] - whether to log
* @return {void}
*/
setSurveyId(surveyId, log = false)
{
this._setAttribute("surveyId", surveyId, log);
// only update the model if a genuine surveyId was given as parameter to the Survey:
if (!this._hasSelfGeneratedSurveyId)
{
this.setModel(`${surveyId}.sid`, log);
}
}
/**
* Set survey variables.
*
@ -254,7 +280,8 @@ export class Survey extends VisualStim
{
if (excludedNames.indexOf(name) === -1)
{
this._surveyData.variables[name] = variables[name];
this._variables[name] = variables[name];
// this._surveyData.variables[name] = variables[name];
}
}
}
@ -282,22 +309,6 @@ export class Survey extends VisualStim
return this._surveyModel.runExpression(expression);
}
/**
* Setter for the surveyId attribute.
*
* @param {string} surveyId - the survey Id
* @param {boolean} [log= false] - whether to log
* @return {void}
*/
setSurveyId(surveyId, log = false)
{
this._setAttribute("surveyId", surveyId, log);
if (!this._hasSelfGeneratedSurveyId)
{
this.setModel(`${surveyId}.sid`, log);
}
}
/**
* Add a callback that will be triggered when the participant finishes the survey.
*
@ -336,7 +347,7 @@ export class Survey extends VisualStim
// return this._surveyModel.data;
return this._totalSurveyResults;
return this._overallSurveyResults;
}
/**
@ -374,7 +385,6 @@ export class Survey extends VisualStim
{}
);
// if the response cannot be uploaded, e.g. the experiment is running locally, or
// if it is piloting mode, then we offer the response as a file for download:
if (this._psychoJS.getEnvironment() !== ExperimentHandler.Environment.SERVER ||
@ -420,9 +430,7 @@ export class Survey extends VisualStim
*/
hide()
{
// if a survey div already does not exist already, create it:
const surveyId = `survey-${this._name}`;
const surveyDiv = document.getElementById(surveyId);
const surveyDiv = document.getElementById(this._surveyDivId);
if (surveyDiv !== null)
{
document.body.removeChild(surveyDiv);
@ -468,9 +476,9 @@ export class Survey extends VisualStim
this._needPixiUpdate = false;
// if a survey div does not exist, create it:
if (document.getElementById("_survey") === null)
if (document.getElementById(this._surveyDivId) === null)
{
document.body.insertAdjacentHTML("beforeend", "<div id='_survey' class='survey'></div>")
document.body.insertAdjacentHTML("beforeend", `<div id=${this._surveyDivId} class='survey'></div>`)
}
// start the survey flow:
@ -513,8 +521,7 @@ export class Survey extends VisualStim
*/
_registerCustomExpressionFunctions (Survey, customFuncs = [])
{
let i;
for (i = 0; i < customFuncs.length; i++)
for (let i = 0; i < customFuncs.length; i++)
{
Survey.FunctionFactory.Instance.register(customFuncs[i].func.name, customFuncs[i].func, customFuncs[i].isAsync);
}
@ -579,6 +586,7 @@ export class Survey extends VisualStim
this._questionAnswerTimestamps[questionData.name].timestamp = this._questionAnswerTimestampClock.getTime();
}
/*
// This probably needs to be moved to some kind of utils.js.
// https://en.wikipedia.org/wiki/Fisher%E2%80%93Yates_shuffle
_FisherYatesShuffle (targetArray = [])
@ -613,6 +621,7 @@ export class Survey extends VisualStim
return inOutArray;
}
*/
_composeModelWithRandomizedQuestions (surveyModel, inBlockRandomizationSettings)
{
@ -621,31 +630,32 @@ export class Survey extends VisualStim
// Hence creating a fresh survey data object with shuffled question order.
let questions = [];
let questionsMap = {};
let shuffledQuestions;
let newSurveyModel =
{
pages:[{ elements: new Array(inBlockRandomizationSettings.questionsPerPage) }]
};
let i, j, k;
for (i = 0; i < surveyModel.pages.length; i++)
for (let i = 0; i < surveyModel.pages.length; i++)
{
for (j = 0; j < surveyModel.pages[i].elements.length; j++)
for (let j = 0; j < surveyModel.pages[i].elements.length; j++)
{
questions.push(surveyModel.pages[i].elements[j]);
k = questions.length - 1;
const k = questions.length - 1;
questionsMap[questions[k].name] = questions[k];
}
}
if (inBlockRandomizationSettings.layout.length > 0)
{
j = 0;
k = 0;
let j = 0;
let k = 0;
let curPage = 0;
let curElement = 0;
const shuffledSet0 = this._FisherYatesShuffle(inBlockRandomizationSettings.set0);
const shuffledSet1 = this._FisherYatesShuffle(inBlockRandomizationSettings.set1);
for (i = 0; i < inBlockRandomizationSettings.layout.length; i++)
const shuffledSet0 = util.shuffle(Array.from(inBlockRandomizationSettings.set0));
const shuffledSet1 = util.shuffle(Array.from(inBlockRandomizationSettings.set1));
// const shuffledSet0 = this._FisherYatesShuffle(inBlockRandomizationSettings.set0);
// const shuffledSet1 = this._FisherYatesShuffle(inBlockRandomizationSettings.set1);
for (let i = 0; i < inBlockRandomizationSettings.layout.length; i++)
{
// Create new page if questionsPerPage reached.
if (curElement === inBlockRandomizationSettings.questionsPerPage)
@ -675,12 +685,14 @@ export class Survey extends VisualStim
else if (inBlockRandomizationSettings.showOnly > 0)
{
// TODO: Check if there can be questionsPerPage applicable in this case.
shuffledQuestions = this._FisherYatesShuffle(questions);
const shuffledQuestions = util.shuffle(Array.from(questions));
// shuffledQuestions = this._FisherYatesShuffle(questions);
newSurveyModel.pages[0].elements = shuffledQuestions.splice(0, inBlockRandomizationSettings.showOnly);
}
else {
// TODO: Check if there can be questionsPerPage applicable in this case.
newSurveyModel.pages[0].elements = this._FisherYatesShuffle(questions);
newSurveyModel.pages[0].elements = util.shuffle(Array.from(questions));
// newSurveyModel.pages[0].elements = this._FisherYatesShuffle(questions);
}
console.log("model recomposition took", performance.now() - t);
console.log("recomposed model:", newSurveyModel);
@ -714,12 +726,14 @@ export class Survey extends VisualStim
if (inQuestionRandomizationSettings.randomizeAll)
{
questionData[choicesFieldName] = this._FisherYatesShuffle(questionData[choicesFieldName]);
questionData[choicesFieldName] = util.shuffle(Array.from(questionData[choicesFieldName]));
// questionData[choicesFieldName] = this._FisherYatesShuffle(questionData[choicesFieldName]);
// Handle dynamic choices.
}
else if (inQuestionRandomizationSettings.showOnly > 0)
{
questionData[choicesFieldName] = this._FisherYatesShuffle(questionData[choicesFieldName]).splice(0, inQuestionRandomizationSettings.showOnly);
questionData[choicesFieldName] = util.shuffle(Array.from(questionData[choicesFieldName]).splice(0, inQuestionRandomizationSettings.showOnly));
// questionData[choicesFieldName] = this._FisherYatesShuffle(questionData[choicesFieldName]).splice(0, inQuestionRandomizationSettings.showOnly);
}
else if (inQuestionRandomizationSettings.reverse)
{
@ -739,8 +753,10 @@ export class Survey extends VisualStim
// Creating new array of choices to which we're going to write from randomized/reversed sets.
questionData[choicesFieldName] = new Array(inQuestionRandomizationSettings.layout.length);
const shuffledSet0 = this._FisherYatesShuffle(inQuestionRandomizationSettings.set0);
const shuffledSet1 = this._FisherYatesShuffle(inQuestionRandomizationSettings.set1);
const shuffledSet0 = util.shuffle(Array.from(inQuestionRandomizationSettings.set0));
const shuffledSet1 = util.shuffle(Array.from(inQuestionRandomizationSettings.set1));
// const shuffledSet0 = this._FisherYatesShuffle(inQuestionRandomizationSettings.set0);
// const shuffledSet1 = this._FisherYatesShuffle(inQuestionRandomizationSettings.set1);
const reversedSet = Math.round(Math.random()) === 1 ? inQuestionRandomizationSettings.reverseOrder.reverse() : inQuestionRandomizationSettings.reverseOrder;
for (i = 0; i < inQuestionRandomizationSettings.layout.length; i++)
{
@ -861,12 +877,12 @@ export class Survey extends VisualStim
if (skipLogic.destination === "ENDOFSURVEY")
{
surveyModel.setCompleted();
this._surveyRunningPromiseResolve(SURVEY_COMPLETION_CODES.SKIP_TO_END_OF_SURVEY);
this._surveyRunningPromiseResolve(Survey.SURVEY_COMPLETION_CODES.SKIP_TO_END_OF_SURVEY);
}
else if (skipLogic.destination === "ENDOFBLOCK")
{
surveyModel.setCompleted();
this._surveyRunningPromiseResolve(SURVEY_COMPLETION_CODES.SKIP_TO_END_OF_BLOCK);
this._surveyRunningPromiseResolve(Survey.SURVEY_COMPLETION_CODES.SKIP_TO_END_OF_BLOCK);
}
else
{
@ -896,13 +912,12 @@ export class Survey extends VisualStim
*
* @param surveyModel
* @param options
* @private
* @protected
*/
_onSurveyComplete(surveyModel, options)
{
Object.assign(this._totalSurveyResults, surveyModel.data);
this._detachResizeObservers();
let completionCode = SURVEY_COMPLETION_CODES.NORMAL;
Object.assign(this._overallSurveyResults, surveyModel.data);
let completionCode = Survey.SURVEY_COMPLETION_CODES.NORMAL;
const questions = surveyModel.getAllQuestions();
// It is guaranteed that the question with skip logic is always last on the page.
@ -916,12 +931,12 @@ export class Survey extends VisualStim
{
if (skipLogic.destination === "ENDOFSURVEY")
{
completionCode = SURVEY_COMPLETION_CODES.SKIP_TO_END_OF_SURVEY;
completionCode = Survey.SURVEY_COMPLETION_CODES.SKIP_TO_END_OF_SURVEY;
surveyModel.setCompleted();
}
else if (skipLogic.destination === "ENDOFBLOCK")
{
completionCode = SURVEY_COMPLETION_CODES.SKIP_TO_END_OF_BLOCK;
completionCode = Survey.SURVEY_COMPLETION_CODES.SKIP_TO_END_OF_BLOCK;
}
}
}
@ -976,22 +991,18 @@ export class Survey extends VisualStim
* Run the survey using flow data provided. This method runs recursively.
*
* @protected
* @param {string} surveyId - the id of the DOM div
* @param {Object} surveyData - surveyData / model.
* @param {Object} prevBlockResults - survey results gathered from running previous block of questions.
* @param {Object} surveyFlowBlock - XXX
* @return {void}
*/
_beginSurvey(surveyData, surveyFlowBlock)
{
let j;
let surveyIdx;
this._lastPageSwitchHandledIdx = -1;
surveyIdx = surveyFlowBlock.surveyIdx;
console.log("playing survey with idx", surveyIdx);
const surveyIdx = surveyFlowBlock.surveyIdx;
let surveyModelInput = this._processSurveyData(surveyData, surveyIdx);
this._surveyModel = new window.Survey.Model(surveyModelInput);
for (j in this._variables)
for (let j in this._variables)
{
// Adding variables directly to hash to get higher performance (this is instantaneous compared to .setVariable()).
// At this stage we don't care to trigger all the callbacks like .setVariable() does, since this is very beginning of survey presentation.
@ -1010,7 +1021,7 @@ export class Survey extends VisualStim
this._surveyModel.onAfterRenderQuestion.add(this._handleAfterQuestionRender.bind(this));
}
const completeText = surveyIdx < this._surveyData.surveys.length - 1 ? (this._surveyModel.pageNextText || CAPTIONS.NEXT) : undefined;
const completeText = surveyIdx < this._surveyData.surveys.length - 1 ? (this._surveyModel.pageNextText || Survey.CAPTIONS.NEXT) : undefined;
jQuery(".survey").Survey({
model: this._surveyModel,
showItemsInOrder: "column",
@ -1033,15 +1044,11 @@ export class Survey extends VisualStim
async _runSurveyFlow(surveyBlock, surveyData, prevBlockResults = {})
{
// let surveyBlock;
let surveyIdx;
let surveyCompletionCode;
let nodeExitCode = NODE_EXIT_CODES.NORMAL;
let i, j;
let nodeExitCode = Survey.NODE_EXIT_CODES.NORMAL;
if (surveyBlock.type === Survey.SURVEY_FLOW_PLAYBACK_TYPES.CONDITIONAL)
{
const dataset = Object.assign({}, this._totalSurveyResults, this._variables);
const dataset = Object.assign({}, this._overallSurveyResults, this._variables);
this._expressionsRunner.expressionExecutor.setExpression(surveyBlock.condition);
if (this._expressionsRunner.run(dataset) && surveyBlock.nodes[0] !== undefined)
{
@ -1054,13 +1061,14 @@ export class Survey extends VisualStim
}
else if (surveyBlock.type === Survey.SURVEY_FLOW_PLAYBACK_TYPES.RANDOMIZER)
{
this._InPlaceFisherYatesShuffle(surveyBlock.nodes, 0, surveyBlock.nodes.length - 1);
util.shuffle(surveyBlock.nodes, Math.random, 0, surveyBlock.nodes.length - 1);
// this._InPlaceFisherYatesShuffle(surveyBlock.nodes, 0, surveyBlock.nodes.length - 1);
}
else if (surveyBlock.type === Survey.SURVEY_FLOW_PLAYBACK_TYPES.EMBEDDED_DATA)
{
let t = performance.now();
const surveyBlockData = surveyData.embeddedData[surveyBlock.dataIdx];
for (j = 0; j < surveyBlockData.length; j++)
for (let j = 0; j < surveyBlockData.length; j++)
{
// TODO: handle the rest data types.
if (surveyBlockData[j].type === "Custom")
@ -1089,28 +1097,28 @@ export class Survey extends VisualStim
this._surveyModel.setCompleted();
}
console.log("EndSurvey block encountered, exiting.");
nodeExitCode = NODE_EXIT_CODES.BREAK_FLOW;
nodeExitCode = Survey.NODE_EXIT_CODES.BREAK_FLOW;
}
else if (surveyBlock.type === Survey.SURVEY_FLOW_PLAYBACK_TYPES.DIRECT)
{
surveyCompletionCode = await this._beginSurvey(surveyData, surveyBlock);
const surveyCompletionCode = await this._beginSurvey(surveyData, surveyBlock);
Object.assign({}, prevBlockResults, this._surveyModel.data);
// SkipLogic had destination set to ENDOFSURVEY.
if (surveyCompletionCode === SURVEY_COMPLETION_CODES.SKIP_TO_END_OF_SURVEY)
if (surveyCompletionCode === Survey.SURVEY_COMPLETION_CODES.SKIP_TO_END_OF_SURVEY)
{
nodeExitCode = NODE_EXIT_CODES.BREAK_FLOW;
nodeExitCode = Survey.NODE_EXIT_CODES.BREAK_FLOW;
}
}
if (nodeExitCode === NODE_EXIT_CODES.NORMAL &&
if (nodeExitCode === Survey.NODE_EXIT_CODES.NORMAL &&
surveyBlock.type !== Survey.SURVEY_FLOW_PLAYBACK_TYPES.CONDITIONAL &&
surveyBlock.nodes instanceof Array)
{
for (i = 0; i < surveyBlock.nodes.length; i++)
for (let i = 0; i < surveyBlock.nodes.length; i++)
{
nodeExitCode = await this._runSurveyFlow(surveyBlock.nodes[i], surveyData, prevBlockResults);
if (nodeExitCode === NODE_EXIT_CODES.BREAK_FLOW)
if (nodeExitCode === Survey.NODE_EXIT_CODES.BREAK_FLOW)
{
break;
}
@ -1131,53 +1139,64 @@ export class Survey extends VisualStim
this._lastPageSwitchHandledIdx = -1;
}
_handleSignaturePadResize (entries)
_handleWindowResize(e)
{
let signatureCanvas;
let q;
let i;
for (i = 0; i < entries.length; i++)
if (this._surveyModel)
{
signatureCanvas = entries[i].target.querySelector("canvas");
q = this._surveyModel.getQuestionByName(entries[i].target.dataset.name);
q.signatureWidth = Math.min(q.maxSignatureWidth, entries[i].contentBoxSize[0].inlineSize);
for (let i = this._signaturePads.length - 1; i >= 0; i--)
{
// As of writing this (24.03.2023). SurveyJS doesn't have a proper event
// for question being removed from nested locations, such as dynamic panel.
// However, surveyJS will set .signaturePad property to null once the question is removed.
// Utilising this knowledge to sync our lists.
if (this._signaturePads[ i ].question.signaturePad)
{
this._signaturePads[ i ].question.signatureWidth = Math.min(
this._signaturePads[i].question.maxSignatureWidth,
this._signaturePads[ i ].htmlElement.getBoundingClientRect().width
);
}
else
{
// Signature pad was removed. Syncing list.
this._signaturePads.splice(i, 1);
}
}
}
}
_addEventListeners ()
_addEventListeners()
{
this._signaturePadRO = new ResizeObserver(this._handleSignaturePadResize.bind(this));
window.addEventListener("resize", (e) => this._handleWindowResize(e));
}
_handleAfterQuestionRender (sender, options)
{
if (options.question.getType() === "signaturepad")
{
this._signaturePadRO.observe(options.htmlElement);
this._signaturePads.push(options);
options.question.signatureWidth = Math.min(options.question.maxSignatureWidth, options.htmlElement.getBoundingClientRect().width);
}
}
_detachResizeObservers ()
{
this._signaturePadRO.disconnect();
}
/**
* Init the SurveyJS.io library.
* Init the SurveyJS.io library and various extensions, setup the theme.
*
* @protected
*/
_initSurveyJS()
{
// load the Survey.js libraries, if necessary:
// TODO
// note: the Survey.js libraries must be added to the list of resources in PsychoJS.start:
// psychoJS.start({ resources: [ {'surveyLibrary': true}, ... ], ...});
// id of the SurveyJS html div:
this._surveyDivId = `survey-${this._name}`;
// load the PsychoJS SurveyJS extensions:
this._expressionsRunner = new window.Survey.ExpressionRunner();
this._registerCustomExpressionFunctions(window.Survey, customExpressionFunctionsArray);
this._registerWidgets(window.Survey);
this._registerCustomSurveyProperties(window.Survey);
this._addEventListeners();
this._expressionsRunner = new window.Survey.ExpressionRunner();
// setup the survey theme:
window.Survey.Serializer.getProperty("expression", "minWidth").defaultValue = "100px";

View File

@ -52,6 +52,7 @@ export class TextBox extends util.mix(VisualStim).with(ColorMixin)
* @param {boolean} [options.autoDraw= false] - whether or not the stimulus should be automatically drawn on every frame flip
* @param {boolean} [options.autoLog= false] - whether or not to log
* @param {boolean} [options.fitToContent = false] - whether or not to resize itself automaitcally to fit to the text content
* @param {boolean} [options.draggable= false] - whether or not to make stim draggable with mouse/touch/other pointer device
*/
constructor(
{
@ -86,11 +87,13 @@ export class TextBox extends util.mix(VisualStim).with(ColorMixin)
clipMask,
autoDraw,
autoLog,
fitToContent
fitToContent,
draggable,
boxFn
} = {},
)
{
super({ name, win, pos, anchor, size, units, ori, opacity, depth, clipMask, autoDraw, autoLog });
super({ name, win, pos, anchor, size, units, ori, opacity, depth, clipMask, autoDraw, autoLog, draggable });
this._addAttribute(
"text",
@ -202,12 +205,14 @@ export class TextBox extends util.mix(VisualStim).with(ColorMixin)
// and setSize called from super class would not have a proper effect
this.setSize(size);
this._addAttribute("boxFn", boxFn, null);
// estimate the bounding box:
this._estimateBoundingBox();
if (this._autoLog)
{
this._psychoJS.experimentLogger.exp(`Created ${this.name} = ${this.toString()}`);
this._psychoJS.experimentLogger.exp(`Created ${this.name} = ${util.toString(this)}`);
}
}
@ -481,6 +486,26 @@ export class TextBox extends util.mix(VisualStim).with(ColorMixin)
alignmentStyles = ["center", "center"];
}
let box;
if (this._boxFn !== null)
{
box = this._boxFn;
}
else
{
// note: box style properties eventually become PIXI.Graphics settings, so same syntax applies
box = {
fill: new Color(this._fillColor).int,
alpha: this._fillColor === undefined || this._fillColor === null ? 0 : 1,
rounded: 5,
stroke: {
color: new Color(this._borderColor).int,
width: borderWidth_px,
alpha: this._borderColor === undefined || this._borderColor === null ? 0 : 1
}
};
}
return {
// input style properties eventually become CSS, so same syntax applies
input: {
@ -504,41 +529,7 @@ export class TextBox extends util.mix(VisualStim).with(ColorMixin)
overflow: "hidden",
pointerEvents: "none"
},
// box style properties eventually become PIXI.Graphics settings, so same syntax applies
box: {
fill: new Color(this._fillColor).int,
alpha: this._fillColor === undefined || this._fillColor === null ? 0 : 1,
rounded: 5,
stroke: {
color: new Color(this._borderColor).int,
width: borderWidth_px,
alpha: this._borderColor === undefined || this._borderColor === null ? 0 : 1
},
/*default: {
fill: new Color(this._fillColor).int,
rounded: 5,
stroke: {
color: new Color(this._borderColor).int,
width: borderWidth_px
}
},
focused: {
fill: new Color(this._fillColor).int,
rounded: 5,
stroke: {
color: new Color(this._borderColor).int,
width: borderWidth_px
}
},
disabled: {
fill: new Color(this._fillColor).int,
rounded: 5,
stroke: {
color: new Color(this._borderColor).int,
width: borderWidth_px
}
}*/
},
box
};
}

View File

@ -49,6 +49,7 @@ export class TextStim extends util.mix(VisualStim).with(ColorMixin)
* @param {PIXI.Graphics} [options.clipMask= null] - the clip mask
* @param {boolean} [options.autoDraw= false] - whether or not the stimulus should be automatically drawn on every frame flip
* @param {boolean} [options.autoLog= false] - whether or not to log
* @param {boolean} [options.draggable= false] - whether or not to make stim draggable with mouse/touch/other pointer device
*/
constructor(
{
@ -75,10 +76,11 @@ export class TextStim extends util.mix(VisualStim).with(ColorMixin)
clipMask,
autoDraw,
autoLog,
draggable
} = {},
)
{
super({ name, win, units, ori, opacity, depth, pos, anchor, clipMask, autoDraw, autoLog });
super({ name, win, units, ori, opacity, depth, pos, anchor, clipMask, autoDraw, autoLog, draggable });
// callback to deal with text metrics invalidation:
const onChange = (withPixi = false, withBoundingBox = false, withMetrics = false) =>

View File

@ -35,8 +35,9 @@ export class VisualStim extends util.mix(MinimalStim).with(WindowMixin)
* @param {PIXI.Graphics} [options.clipMask= null] - the clip mask
* @param {boolean} [options.autoDraw= false] - whether or not the stimulus should be automatically drawn on every frame flip
* @param {boolean} [options.autoLog= false] - whether or not to log
* @param {boolean} [options.draggable= false] - whether or not to make stim draggable with mouse/touch/other pointer device
*/
constructor({ name, win, units, ori, opacity, depth, pos, anchor, size, clipMask, autoDraw, autoLog } = {})
constructor({ name, win, units, ori, opacity, depth, pos, anchor, size, clipMask, autoDraw, autoLog, draggable } = {})
{
super({ win, name, autoDraw, autoLog });
@ -84,6 +85,12 @@ export class VisualStim extends util.mix(MinimalStim).with(WindowMixin)
null,
this._onChange(false, false),
);
this._addAttribute("draggable", draggable, false);
// data needed to properly support drag and drop functionality
this._associatedPointerId = undefined;
this._initialPointerOffset = [0, 0];
this._pointerEventHandlersUuids = {};
// bounding box of the stimulus, in stimulus units
// note: boundingBox does not take the orientation into account
@ -96,6 +103,14 @@ export class VisualStim extends util.mix(MinimalStim).with(WindowMixin)
this._needPixiUpdate = true;
}
/**
* Whether or not stimuli is being dragged by pointer. Works in conjunction with draggable attribute.
*/
get isDragging()
{
return this._associatedPointerId !== undefined;
}
/**
* Force a refresh of the stimulus.
*
@ -179,15 +194,45 @@ export class VisualStim extends util.mix(MinimalStim).with(WindowMixin)
}
}
/**
* Setter for the draggable attribute.
*
* @name module:visual.VisualStim#setDraggable
* @public
* @param {boolean} [draggable=false] - whether or not to make stim draggable using mouse/touch/other pointer device
* @param {boolean} [log= false] - whether of not to log
*/
setDraggable(draggable = false, log = false)
{
const hasChanged = this._setAttribute("draggable", draggable, log);
if (hasChanged)
{
if (draggable)
{
this._pointerEventHandlersUuids[ "pointerdown" ] = this._win.on("pointerdown", this._handlePointerDown.bind(this));
this._pointerEventHandlersUuids[ "pointerup" ] = this._win.on("pointerup", this._handlePointerUp.bind(this));
this._pointerEventHandlersUuids[ "pointermove" ] = this._win.on("pointermove", this._handlePointerMove.bind(this));
}
else
{
this._win.off("pointerdown", this._pointerEventHandlersUuids[ "pointerdown" ]);
this._win.off("pointerup", this._pointerEventHandlersUuids[ "pointerup" ]);
this._win.off("pointermove", this._pointerEventHandlersUuids[ "pointermove" ]);
}
}
}
/**
* Setter for the depth attribute.
*
* @param {Array.<number>} depth - order in which stimuli is rendered, kind of css's z-index with a negative sign.
* @param {boolean} [log= false] - whether of not to log
*/
setDepth (depth = 0, log = false) {
setDepth(depth = 0, log = false)
{
this._setAttribute("depth", depth, log);
if (this._pixi) {
if (this._pixi)
{
this._pixi.zIndex = -this._depth;
}
}
@ -217,6 +262,93 @@ export class VisualStim extends util.mix(MinimalStim).with(WindowMixin)
return this._getBoundingBox_px().contains(objectPos_px[0], objectPos_px[1]);
}
/**
* Determine whether a point that is nown to have pixel dimensions is inside the bounding box of the stimulus.
*
* @name module:visual.VisualStim#containsPointPx
* @public
* @param {number[]} point_px - the point in pixels
* @return {boolean} whether or not the object is inside the bounding box of the stimulus
*/
containsPointPx (point_px)
{
return this._getBoundingBox_px().contains(point_px[0], point_px[1]);
}
/**
* Release the PIXI representation, if there is one.
*
* @name module:core.VisualStim#release
* @function
* @public
*
* @param {boolean} [log= false] - whether or not to log
*/
release(log = false)
{
this.draggable = false;
super.release(log);
}
/**
* Handler of pointerdown event.
*
* @name module:visual.VisualStim#_handlePointerDown
* @private
* @param {Object} e - pointerdown event data.
*/
_handlePointerDown (e)
{
if (e.pixi === undefined || e.pixi !== this._pixi)
{
return;
}
let relativePos = [];
let pixPos = util.to_unit(this._pos, this._units, this._win, "pix");
relativePos[0] = e.originalEvent.pageX - this._win.size[0] * 0.5 - this._pixi.parent.position.x;
relativePos[1] = -(e.originalEvent.pageY - this._win.size[1] * 0.5) - this._pixi.parent.position.y;
this._associatedPointerId = e.originalEvent.pointerId;
this._initialPointerOffset[0] = relativePos[0] - pixPos[0];
this._initialPointerOffset[1] = relativePos[1] - pixPos[1];
this.emit("pointerdown", e);
}
/**
* Handler of pointerup event.
*
* @name module:visual.VisualStim#_handlePointerUp
* @private
* @param {Object} e - pointerup event data.
*/
_handlePointerUp (e)
{
if (e.originalEvent.pointerId === this._associatedPointerId)
{
this._associatedPointerId = undefined;
this._initialPointerOffset.fill(0);
this.emit("pointerup", e);
}
}
/**
* Handler of pointermove event.
*
* @name module:visual.VisualStim#_handlePointerMove
* @private
* @param {Object} e - pointermove event data.
*/
_handlePointerMove (e)
{
if (e.originalEvent.pointerId === this._associatedPointerId)
{
let newPos = [];
newPos[ 0 ] = e.originalEvent.pageX - this._win.size[ 0 ] * 0.5 - this._pixi.parent.position.x - this._initialPointerOffset[ 0 ];
newPos[ 1 ] = -(e.originalEvent.pageY - this._win.size[ 1 ] * 0.5) - this._pixi.parent.position.y - this._initialPointerOffset[ 1 ];
this.setPos(util.to_unit(newPos, "pix", this._win, this._units));
this.emit("pointermove", e);
}
}
/**
* Setter for the anchor attribute.
*
@ -261,6 +393,7 @@ export class VisualStim extends util.mix(MinimalStim).with(WindowMixin)
{
anchor[0] = 1.0;
}
if (anchorText.indexOf("top") > -1)
{
anchor[1] = 0.0;

View File

@ -0,0 +1,94 @@
/**
* Provides a class to work with Youtube Iframe API. See https://developers.google.com/youtube/iframe_api_reference
*
* @author Nikita Agafonov
* @version 2023.2.0
* @copyright (c) 2017-2020 Ilixa Ltd. (http://ilixa.com) (c) 2020-2023 Open Science Tools Ltd. (https://opensciencetools.org)
* @license Distributed under the terms of the MIT License
*
*/
import * as util from "../util/Util.js";
class YoutubeIframeAPI
{
constructor ()
{
this.isReady = false;
this._initResolver = undefined;
this._initPromise = undefined;
}
_onYoutubeIframeAPIReady ()
{
this.isReady = true;
this._initResolver();
}
async init ()
{
if (this.isReady)
{
return Promise.resolve();
}
// If init is in progress but not done yet, return the promise.
// This is the case when multiple movie stims are created simultaneously.
if (this._initPromise)
{
return this._initPromise;
}
// Called by Youtube script.
window.onYouTubeIframeAPIReady = this._onYoutubeIframeAPIReady.bind(this);
let el = document.createElement("script");
el.src = "https://www.youtube.com/iframe_api";
let firstScriptTag = document.getElementsByTagName("script")[0];
firstScriptTag.parentNode.insertBefore(el, firstScriptTag);
this._initPromise = new Promise((res, rej) => {
this._initResolver = res;
});
return this._initPromise;
}
createPlayer (params = {})
{
const uuid = util.makeUuid();
document.body.insertAdjacentHTML("beforeend",
`<div class="yt-player-wrapper">
<div id="yt-iframe-placeholder-${uuid}" class="yt-iframe"></div>
</div>`);
document.querySelector(`#yt-iframe-placeholder-${uuid}`).parentElement.classList.add("inprogress");
const originalOnready = params.events.onReady;
params.events.onReady = (event) =>
{
document.querySelector(`#yt-iframe-placeholder-${uuid}`).parentElement.classList.remove("inprogress");
if (typeof originalOnready === "function")
{
originalOnready(event);
}
};
const ytPlayer = new YT.Player(`yt-iframe-placeholder-${uuid}`,
params
);
return ytPlayer;
}
destroyPlayer (ytPlayer)
{
const elementId = ytPlayer.getIframe().id;
ytPlayer.destroy();
// At this point youtubeAPI destroyed the player and returned the placeholder div back in place instead of it. Cleaning up.
document.getElementById(elementId).parentElement.remove();
}
}
const YTAPISingleTon = new YoutubeIframeAPI();
export default YTAPISingleTon;

View File

@ -2,6 +2,7 @@ export * from "./ButtonStim.js";
export * from "./Form.js";
export * from "./ImageStim.js";
export * from "./GratingStim.js";
export * from "./GifStim.js";
export * from "./MovieStim.js";
export * from "./Polygon.js";
export * from "./Rect.js";
@ -13,3 +14,5 @@ export * from "./TextStim.js";
export * from "./VisualStim.js";
export * from "./FaceDetector.js";
export * from "./Survey.js";
export * from "./ParticleEmitter.js";
export * from "./Progress.js";

View File

@ -95,18 +95,11 @@ class MaxDiffMatrix
question.setCssRoot(rootClass);
question.cssClasses.mainRoot = rootClass;
}
let html;
let headerCells = "";
let subHeaderCells = "";
let bodyCells = "";
let bodyHTML = "";
let cellGenerator;
let i, j;
// Relying on a fact that there's always 2 columns.
// This is correct according current Qualtrics design for MaxDiff matrices.
// Header generation
headerCells =
let headerCells =
`<th class="${CSS_CLASSES.TABLE_HEADER_CELL}">${question.columns[0].text}</th>
<td></td>
<td></td>
@ -114,9 +107,10 @@ class MaxDiffMatrix
<th class="${CSS_CLASSES.TABLE_HEADER_CELL}">${question.columns[1].text}</th>`;
// Body generation
for (i = 0; i < question.rows.length; i++)
let bodyHTML = "";
for (let i = 0; i < question.rows.length; i++)
{
bodyCells =
const bodyCells =
`<td class="${CSS_CLASSES.TABLE_CELL}">
<label class="${CSS_CLASSES.LABEL}">
<input type="radio" class="${CSS_CLASSES.ITEM_VALUE}" name="${question.rows[i].value}" data-column=${question.columns[0].value}>
@ -135,7 +129,7 @@ class MaxDiffMatrix
bodyHTML += `<tr class="${CSS_CLASSES.TABLE_ROW}">${bodyCells}</tr>`;
}
html = `<table class="${CSS_CLASSES.TABLE}">
let html = `<table class="${CSS_CLASSES.TABLE}">
<thead>
<tr>${headerCells}</tr>
</thead>
@ -147,14 +141,15 @@ class MaxDiffMatrix
let inputDOMS = el.querySelectorAll("input");
for (i = 0; i < inputDOMS.length; i++)
for (let i = 0; i < inputDOMS.length; i++)
{
inputDOMS[i].addEventListener("input", this._bindedHandlers._handleInput);
}
}
}
export default function init (Survey) {
export default function init (Survey)
{
var widget = {
//the widget name. It should be unique and written in lowcase.
name: "maxdiffmatrix",

40
vite.config.js Normal file
View File

@ -0,0 +1,40 @@
import glsl from "vite-plugin-glsl"
const fileName = `psychojs-${process.env.npm_package_version}`;
export default {
root: "./src/",
base: "./",
build:
{
outDir: "../out",
emptyOutDir: true,
sourcemap: true,
minify: false,
cssCodeSplit: true,
lib:
{
name: "psychojs",
fileName,
entry: ["index.js", "index.css"]
},
// rollupOptions:
// {
// // make sure to externalize deps that shouldn't be bundled
// // into your library
// external: ['vue'],
// output:
// {
// // Provide global variables to use in the UMD build
// // for externalized deps
// globals: {
// vue: 'Vue',
// },
// },
// }
},
plugins:
[
glsl()
]
}