mirror of
https://github.com/jspsych/jsPsych.git
synced 2025-05-10 11:10:54 +00:00
Merge branch 'master' of https://github.com/jspsych/jsPsych
This commit is contained in:
commit
c72ee5f17a
@ -205,7 +205,7 @@ var block = {
|
||||
choices: ['y', 'n'],
|
||||
prompt: '<p>Press "y" to Continue. Press "n" to end this node of the experiment.</p>',
|
||||
on_finish: function(data) {
|
||||
if (data.key_press == 'n') {
|
||||
if (data.response == 'n') {
|
||||
jsPsych.endCurrentTimeline();
|
||||
}
|
||||
},
|
||||
@ -258,7 +258,7 @@ var trial = {
|
||||
choices: ['y', 'n']
|
||||
prompt: '<p>Press "y" to Continue. Press "n" to end the experiment</p>',
|
||||
on_finish: function(data){
|
||||
if(data.key_press == "n"){
|
||||
if(data.response == "n"){
|
||||
jsPsych.endExperiment('The experiment was ended by pressing "n".');
|
||||
}
|
||||
}
|
||||
@ -390,6 +390,7 @@ The settings object can contain several parameters. The only *required* paramete
|
||||
| minimum_valid_rt | numeric | The minimum valid response time for key presses during the experiment. Any key press response time that is less than this value will be treated as invalid and ignored. Note that this parameter only applies to _keyboard responses_, and not to other response types such as buttons and sliders. The default value is 0. |
|
||||
| override_safe_mode | boolean | Running a jsPsych experiment directly in a web browser (e.g., by double clicking on a local HTML file) will load the page using the `file://` protocol. Some features of jsPsych don't work with this protocol. By default, when jsPsych detects that it's running on a page loaded via the `file://` protocol, it runs in _safe mode_, which automatically disables features that don't work in this context. Specifically, the use of Web Audio is disabled (audio will be played using HTML5 audio instead, even if `use_webaudio` is `true`) and video preloading is disabled. The `override_safe_mode` parameter defaults to `false`, but you can set it to `true` to force these features to operate under the `file://` protocol. In order for this to work, you will need to disable web security (CORS) features in your browser - this is safe to do if you know what you are doing. Note that this parameter has no effect when you are running the experiment on a web server, because the page will be loaded via the `http://` or `https://` protocol. |
|
||||
| case_sensitive_responses | boolean | If true, then jsPsych will make a distinction between uppercase and lowercase keys when evaluating keyboard responses, e.g. "A" (uppercase) will not be recognized as a valid response if the trial only accepts "a" (lowercase). If false, then jsPsych will not make a distinction between uppercase and lowercase keyboard responses, e.g. both "a" and "A" responses will be valid when the trial's key choice parameter is "a". Setting this parameter to false is useful if you want key responses to be treated the same way when CapsLock is turned on or the Shift key is held down. The default value is false. |
|
||||
extensions | array | Array containing information about one or more jsPsych extensions that are used during the experiment. Each extension should be specified as an object with `type` (required), which is the name of the extension, and `params` (optional), which is an object containing any parameter-value pairs to be passed to the extension's `initialize` function. Default value is an empty array. |
|
||||
|
||||
Possible values for the exclusions parameter above.
|
||||
|
||||
@ -465,7 +466,7 @@ var trial = {
|
||||
stimulus: 'Press p to take a 30 second break. Otherwise, press c to continue immediately.',
|
||||
choices: ['p','c'],
|
||||
on_finish: function(data){
|
||||
if(data.key_press == "p") {
|
||||
if(data.response == "p") {
|
||||
jsPsych.pauseExperiment();
|
||||
setTimeout(jsPsych.resumeExperiment, 30000);
|
||||
}
|
||||
@ -534,7 +535,7 @@ var trial = {
|
||||
stimulus: 'Press p to take a 30 second break. Otherwise, press c to continue immediately.',
|
||||
choices: ['p','c'],
|
||||
on_finish: function(data){
|
||||
if(data.key_press == "p") {
|
||||
if(data.response == "p") {
|
||||
jsPsych.pauseExperiment();
|
||||
setTimeout(jsPsych.resumeExperiment, 30000);
|
||||
}
|
||||
|
83
docs/extensions/extensions.md
Normal file
83
docs/extensions/extensions.md
Normal file
@ -0,0 +1,83 @@
|
||||
# Extensions
|
||||
|
||||
Extensions are jsPsych modules that can interface with any plugin to extend the functionality of the plugin. A canonical example of an extension is eye tracking. An eye tracking extension allows a plugin to gather gaze data and add it to the plugin's data object.
|
||||
|
||||
## Using an Extension
|
||||
|
||||
To use an extension in an experiment, you'll load the extension file via a `<script>` tag (just like adding a plugin) and then initialize the extension in the parameters of `jsPsych.init()`.
|
||||
|
||||
```html
|
||||
<head>
|
||||
<script src="jspsych/jspsych.js"></script>
|
||||
<script src="jspsych/extensions/some-extension.js"></script>
|
||||
</head>
|
||||
```
|
||||
|
||||
```js
|
||||
jsPsych.init({
|
||||
timeline: [...],
|
||||
extensions: [
|
||||
{type: 'some-extension', params: {...} }
|
||||
]
|
||||
})
|
||||
```
|
||||
|
||||
To enable an extension during a trial, add the extension to the `extensions` list for the trial. Some extensions may also support or require an object of parameters to configure the extension:
|
||||
|
||||
```js
|
||||
var trial = {
|
||||
extensions: [
|
||||
{type: 'some-extension', params: {...} }
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
## List of Extensions
|
||||
|
||||
Extension | Description
|
||||
------ | -----------
|
||||
[jspsych‑ext‑webgazer.js](/extensions/jspsych-ext-webgazer.md) | Enables eye tracking using the [WebGazer](https://webgazer.cs.brown.edu/) library.
|
||||
|
||||
## Writing an Extension
|
||||
|
||||
To create a new extension you must create an object that supports a few event callbacks. A barebones extension file looks like this:
|
||||
|
||||
```js
|
||||
jsPsych.extensions['new-extension'] = (function () {
|
||||
|
||||
var extension = {};
|
||||
|
||||
extension.initialize = function(params){
|
||||
// params are passed from the extensions parameter in jsPsych.init
|
||||
}
|
||||
|
||||
extension.on_start = function(params){
|
||||
// params are passed from the extensions parameter in the trial object
|
||||
}
|
||||
|
||||
extension.on_load = function(params){
|
||||
// params are passed from the extensions parameter in the trial object
|
||||
}
|
||||
|
||||
extension.on_finish = function(params){
|
||||
// params are passed from the extensions parameter in the trial object
|
||||
return {
|
||||
// any data that the extension returns here will be added to the trial data
|
||||
}
|
||||
}
|
||||
|
||||
return extension;
|
||||
});
|
||||
```
|
||||
|
||||
The four events that an extension must support are shown in the sample code.
|
||||
|
||||
`extension.initialize` is called with `jsPsych.init()`. This is where setup code for the extension can happen. This event will happen once per experiment, unlike the other events which occur with each trial. The `params` object can include whatever parameters are necessary to configure the extension. The `params` object is passed from the call to `jsPsych.init()` to the `extension.initialize` method. `extension.initialize` must return a `Promise` that resolves when the extension is finished initializing.
|
||||
|
||||
`extension.on_start` is called at the start of the plugin execution, prior to calling `plugin.trial`. This is where trial-specific initialization can happen, such as creating empty containers to hold data or resetting internal state. The `params` object is passed from the declaration of the extension in the trial object. You can use `params` to customize the behavior of the extension for each trial.
|
||||
|
||||
`extension.on_load` is called after `plugin.trial` has executed, which is typically when the plugin has finished executing initial DOM-modifying code and has set up various event listeners. This is where the extension can begin actively interacting with the DOM and recording data. The `params` object is passed from the declaration of the extension in the trial object. You can use `params` to customize the behavior of the extension for each trial.
|
||||
|
||||
`extension.on_finish` is called after the plugin completes. This can be used for any teardown at the end of the trial. This method should return an object of data to append to the plugin's data. Note that this event fires *before* the `on_finish` event for the plugin, so data added by the extension is accessible in any trial `on_finish` event handlers. The `params` object is passed from the declaration of the extension in the trial object. You can use `params` to customize the behavior of the extension for each trial.
|
||||
|
||||
The extension can also include any additional methods that are necessary for interacting with it. See the [webgazer extension](/extensions/jspsych-ext-webgazer.md) for an example.
|
106
docs/extensions/jspsych-ext-webgazer.md
Normal file
106
docs/extensions/jspsych-ext-webgazer.md
Normal file
@ -0,0 +1,106 @@
|
||||
# jspsych-ext-webgazer
|
||||
|
||||
This extension supports eye tracking through the [WebGazer](https://webgazer.cs.brown.edu/) library. For a narrative description of how to use this extension see the [eye tracking overview](/overview/eye-tracking.md).
|
||||
|
||||
## Parameters
|
||||
|
||||
### Initialization Parameters
|
||||
|
||||
Initialization parameters can be set when calling `jsPsych.init()`
|
||||
|
||||
```js
|
||||
jsPsych.init({
|
||||
extensions: [
|
||||
{type: 'webgazer', params: {...}}
|
||||
]
|
||||
})
|
||||
```
|
||||
|
||||
Parameter | Type | Default Value | Description
|
||||
----------|------|---------------|------------
|
||||
webgazer | object | `undefined` | You can explicitly pass a reference to a loaded instance of the webgazer.js library. If no explicit reference is passed then the extension will look for a global `webgazer` object. If you are loading webgazer.js via a `<script>` tag you do not need to set this parameter in most circumstances.
|
||||
round_predictions | bool | true | Whether to round the `x`,`y` coordinates predicted by WebGazer to the nearest whole number. This *greatly* reduces the size of the data, as WebGazer records data to 15 decimal places by default. Given the noise of the system, there's really no need to record data to this level of precision.
|
||||
|
||||
### Trial Parameters
|
||||
|
||||
Trial parameters can be set when adding the extension to a trial object.
|
||||
|
||||
```js
|
||||
var trial = {
|
||||
type: '...',
|
||||
extensions: [
|
||||
{type: 'webgazer', params: {...}}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
Parameter | Type | Default Value | Description
|
||||
----------|------|---------------|------------
|
||||
targets | array | [] | A list of elements on the page that you would like to record the coordinates of for comparison with the WebGazer data. Each entry in the array should be a valid [CSS selector string](https://www.w3schools.com/cssref/css_selectors.asp) that identifies the element. The selector string should be valid for exactly one element on the page. If the selector is valid for more than one element then only the first matching element will be recorded.
|
||||
|
||||
## Data Generated
|
||||
|
||||
Name | Type | Value
|
||||
-----|------|------
|
||||
webgazer_data | array | An array of objects containing gaze data for the trial. Each object has an `x`, a `y`, and a `t` property. The `x` and `y` properties specify the gaze location in pixels and `t` specifies the time in milliseconds since the start of the trial.
|
||||
webgazer_targets | array | An array of objects contain the pixel coordinates of elements on the screen specified by the `.targets` parameter. Each object contains a `selector` property, containing the CSS selector string used to find the element, plus `top`, `bottom`, `left`, and `right` parameters which specify the [bounding rectangle](https://developer.mozilla.org/en-US/docs/Web/API/Element/getBoundingClientRect) of the element.
|
||||
|
||||
## Functions
|
||||
|
||||
In addition to the jsPsych webgazer-* plugins, the jsPsych webgazer extension provides a set of functions that allow the researcher to interact more directly with WebGazer. These functions can be called at any point during an experiment, and are crucial for building trial plugins that interact with WebGazer. All of the functions below must be prefixed with `jsPsych.extensions.webgazer` (e.g. `jsPsych.extensions.webgazer.faceDetected()`).
|
||||
|
||||
### faceDetected()
|
||||
|
||||
Returns `true` if WebGazer is ready to make predictions (`webgazer.getTracker().predictionReady` is `true`).
|
||||
|
||||
### showPredictions()
|
||||
|
||||
Turns on WebGazer's real-time visualization of predicted gaze location.
|
||||
|
||||
### hidePredictions()
|
||||
|
||||
Turns off WebGazer's real-time visualization of predicted gaze location.
|
||||
|
||||
### showVideo()
|
||||
|
||||
Turns on a display of the webcam image, guiding box for positioning the face, and WebGazer's estimate of the location of facial landmarks.
|
||||
|
||||
### hideVideo()
|
||||
|
||||
Turns off the camera display.
|
||||
|
||||
### resume()
|
||||
|
||||
Turns on gaze prediction. The extension will automatically handle this for you in most cases. You probably only need to use this if you are writing your own plugin that interfaces directly with WebGazer.
|
||||
|
||||
### pause()
|
||||
|
||||
Turns off gaze prediction. The extension will automatically handle this for you in most cases. You probably only need to use this if you are writing your own plugin that interfaces directly with WebGazer.
|
||||
|
||||
### startMouseCalibration()
|
||||
|
||||
Turns on mouse movement and mouse clicks as calibration events. While the `webgazer-calibration` plugin can also be used to run a parmeterized calibration routine, this calibration function call allows you to continuously calibrate WebGazer to any mouse movements or clicks throughout the experiment. For example, any *-button-response trial would also function as a WebGazer calibration event.
|
||||
|
||||
### stopMouseCalibration()
|
||||
|
||||
Stops WebGazer from using mouse movements and mouse clicks as calibration events.
|
||||
|
||||
### calibratePoint(x, y)
|
||||
|
||||
Instructs WebGazer to register the location `x`, `y` (in screen pixel coordinates) as a calibration event. Can be used for passive viewing calibration, i.e., instructing participants to fixate at a particular location.
|
||||
|
||||
### setRegressionType(regression_type)
|
||||
|
||||
Change the method that WebGazer is using to perform feature -> location regression. Valid options are `ridge`, `weightedRidge`, and `threadedRidge`. See the WebGazer docs for more information about these options.
|
||||
The extension uses the default mode specified by WebGazer (currently `ridge`).
|
||||
|
||||
### getCurrentPrediction()
|
||||
|
||||
Get the current predicted gaze location from WebGazer. Returns an object with `x`, `y`, and `eyeFeature` properties. This function is asynchronus, so proper use requires either the `await` keyword in the context of another `async function` or using `.then()`.
|
||||
|
||||
```js
|
||||
jsPsych.extensions.webgazer.getCurrentPrediction().then(function(data){
|
||||
console.log(`Currently looking at coordinate ${data.x}, ${data.y}`)
|
||||
});
|
||||
```
|
||||
|
@ -46,7 +46,7 @@ var trial = {
|
||||
type: 'image-keyboard-response',
|
||||
stimulus: 'imgA.png',
|
||||
on_finish: function(data) {
|
||||
if(data.key_press == 'j'){
|
||||
if(data.response == 'j'){
|
||||
data.correct = true;
|
||||
} else {
|
||||
data.correct = false;
|
||||
|
@ -61,7 +61,7 @@ var trial = {
|
||||
type: 'image-keyboard-response',
|
||||
stimulus: 'imgA.jpg',
|
||||
on_finish: function(data){
|
||||
if(data.key_press == 'j'){
|
||||
if(data.response == 'j'){
|
||||
data.correct = true;
|
||||
} else {
|
||||
data.correct = false;
|
||||
|
@ -22,7 +22,7 @@ var trial = {
|
||||
},
|
||||
on_finish: function(data){
|
||||
// Score the response as correct or incorrect.
|
||||
if(data.key_press == "f"){
|
||||
if(data.response == "f"){
|
||||
data.correct = true;
|
||||
} else {
|
||||
data.correct = false;
|
||||
@ -128,7 +128,7 @@ var trial = {
|
||||
prompt: function() {
|
||||
// this question prompt is dynamic - the text that is shown
|
||||
// will change based on the participant's earlier response
|
||||
var favorite_city = JSON.parse(jsPsych.data.getLastTrialData().values()[0].responses).fav_city;
|
||||
var favorite_city = jsPsych.data.getLastTrialData().values()[0].response.fav_city;
|
||||
var text = "Earlier you said your favorite city is "+favorite_city+". What do you like most about "+favorite_city+"?"
|
||||
return text;
|
||||
},
|
||||
|
@ -138,3 +138,16 @@ jsPsych.init({
|
||||
override_safe_mode: true
|
||||
});
|
||||
```
|
||||
|
||||
## Add extensions
|
||||
|
||||
Extensions are jsPsych modules that can run throughout the experiment and interface with any plugin to extend the functionality of the plugin. One example of an extension is eye tracking, which allows you to gather gaze data during any trial and add it to that trial's data object. If you want to use extensions in your experiment, you must specify this when you initialize the experiment with `jsPsych.init`. The `extensions` parameter in `jsPsych.init` is an array of objects, where each object specifies the extension that you'd like to use in the experiment. Below is an example of adding the webgazer extension.
|
||||
|
||||
```js
|
||||
jsPsych.init({
|
||||
timeline: [...],
|
||||
extensions: [
|
||||
{type: 'webgazer'}
|
||||
]
|
||||
});
|
||||
```
|
237
docs/overview/eye-tracking.md
Normal file
237
docs/overview/eye-tracking.md
Normal file
@ -0,0 +1,237 @@
|
||||
# Eye Tracking
|
||||
|
||||
jsPsych supports eye tracking through the [WebGazer](https://webgazer.cs.brown.edu/) library. WebGazer uses computer vision techniques to identify features of the participant's eyes via a webcam and predicts gaze location. The system is calibrated by having the participant click on or look at known locations on the screen. These locations are linked to eye features. Gaze location is predicted using regression.
|
||||
|
||||
## Getting Started
|
||||
|
||||
First, [download WebGazer.js ](https://webgazer.cs.brown.edu/#download) and include it in your experiment file via a `<script>` tag. You'll also need to include jsPsych's [webgazer extension](/extensions/jspsych-ext-webgazer.md).
|
||||
|
||||
```html
|
||||
<head>
|
||||
<script src="jspsych/jspsych.js"></script>
|
||||
<script src="webgazer.js"></script>
|
||||
<script src="jspsych/extensions/jspsych-ext-webgazer.js"></script>
|
||||
</head>
|
||||
```
|
||||
|
||||
!!! tip
|
||||
An example experiment using WebGazer is available in the **/examples** folder of the jsPsych release. See `webgazer.html`.
|
||||
|
||||
To use the WebGazer extension in an experiment, include it in the list of extensions passed to `jsPsych.init()`
|
||||
|
||||
```js
|
||||
jsPsych.init({
|
||||
timeline: [...],
|
||||
extensions: [
|
||||
{type: 'webgazer'}
|
||||
]
|
||||
})
|
||||
```
|
||||
|
||||
To help the participant position their face correctly for eye tracking you can use the [jspsych-webgazer-init-camera plugin](/plugins/jspsych-webgazer-init-camera.ms). This will show the participant what the camera sees, including facial feature landmarks, and prevent the participant from continuing until their face is in good position for eye tracking.
|
||||
|
||||
```js
|
||||
var init_camera_trial = {
|
||||
type: 'webgazer-init-camera'
|
||||
}
|
||||
```
|
||||
|
||||
To calibrate WebGazer, you can use the [jspsych-webgazer-calibrate plugin](/plugins/jspsych-webgazer-calibrate.md). This plugin allows you to specify a set of points on the screen for calibration and to choose the method for calibrating -- either clicking on each point or simply fixating on each point. The location of calibration points is specified in percentages, e.g., `[25,50]` will result in a point that is 25% of the width of the screen from the left edge and 50% of the height of the screen from the top edge. Options for controlling other details of the calibration are explained in the [documentation for the plugin](/plugins/jspsych-webgazer-calibrate.md).
|
||||
|
||||
Note that instructions are not included in the calibration plugin, so you'll likely want to use a different plugin (e.g., `html-button-response`) to display instructions prior to running the calibration.
|
||||
|
||||
```js
|
||||
var calibration_trial = {
|
||||
type: 'webgazer-calibrate',
|
||||
calibration_points: [[25,50], [50,50], [75,50], [50,25], [50,75]],
|
||||
calibration_mode: 'click'
|
||||
}
|
||||
```
|
||||
|
||||
To measure the accuracy and precision of the calibration, you can use the [jspsych-webgazer-vaidate plugin](/plugins/jspsych-webgazer-validate.md). Like the calibration plugin, you can specify a list of points to perform validation on. Here you can specify the points as either percentages or in terms of the distance from the center of the screen in pixels. Which mode you use will probably depend on how you are defining your stimuli throughout the experiment. You can also specify the radius of tolerance around each point, and the plugin will calculate the percentage of measured gaze samples within that radius. This is a potentially useful heuristic for deciding whether or not to calibrate again. Options for controlling other details of the validation are explained in the [documentation for the plugin](/plugins/jspsych-webgazer-validate.md).
|
||||
|
||||
```js
|
||||
var validation_trial = {
|
||||
type: 'webgazer-validate',
|
||||
validation_points: [[-200,200], [200,200],[-200,-200],[200,-200]],
|
||||
validation_point_coordinates: 'center-offset-pixels',
|
||||
roi_radius: 100
|
||||
}
|
||||
```
|
||||
|
||||
The validation procedure stores the raw gaze data for each validation point, the computed average offset from each validation point, the percentage of samples within the `roi_radius` for each validation point, and the number of samples collected per second.
|
||||
|
||||
```js
|
||||
{
|
||||
raw_gaze: [...],
|
||||
percent_in_roi: [...],
|
||||
average_offset: [...],
|
||||
samples_per_sec: ...
|
||||
}
|
||||
```
|
||||
|
||||
We recommend performing calibration and validation periodically throughout your experiment.
|
||||
|
||||
To enable eye tracking for a trial in your experiment, you can simply add the WebGazer extension to the trial.
|
||||
|
||||
```js
|
||||
var trial = {
|
||||
type: 'html-keyboard-response',
|
||||
stimulus: '<img id="scene" src="my-scene.png"></img>',
|
||||
extensions: [
|
||||
{
|
||||
type: 'webgazer',
|
||||
params: {
|
||||
targets: ['#scene']
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
This will turn on WebGazer at the start of the trial.
|
||||
|
||||
The `params` property in the `extensions` declaration allows you to pass in a list of [CSS selector strings](https://www.w3schools.com/cssref/css_selectors.asp). The [bounding rectangle](https://developer.mozilla.org/en-US/docs/Web/API/Element/getBoundingClientRect) of the DOM element that matches each selector will be recorded in the data for that trial. This allows for easy alignment of the gaze data and objects on the screen.
|
||||
|
||||
```js
|
||||
webgazer_targets : [
|
||||
{selector: ..., top: ..., left: ..., right: ..., bottom:...},
|
||||
{selector: ..., top: ..., left: ..., right: ..., bottom:...},
|
||||
]
|
||||
```
|
||||
|
||||
Gaze data will be added to the trial's data under the property `webgazer_data`. The gaze data is an array of objects. Each object has an `x`, a `y`, and a `t` property. The `x` and `y` properties specify the gaze location in pixels and `t` specifies the time in milliseconds since the start of the trial. Note that establishing the precision and accuracy of these measurements across the variety of web browsers and systems that your experiment participants might be using is quite difficult. For example, different browsers may cause small systematic shifts in the accuracy of `t` values.
|
||||
|
||||
```js
|
||||
webgazer_data: [
|
||||
{x: ..., y: ..., t: ...},
|
||||
{x: ..., y: ..., t: ...},
|
||||
{x: ..., y: ..., t: ...},
|
||||
{x: ..., y: ..., t: ...}
|
||||
]
|
||||
```
|
||||
|
||||
## Tips for Improving Data Quality
|
||||
|
||||
These are some anecdotal observations about factors that improve data quality.
|
||||
|
||||
1. The quality of the camera feed is essential. Good lighting makes a big difference. You may want to encourage participants to perform any eye tracking experiments in a well-lit room.
|
||||
2. Participants need to keep their head relatively still during and after calibration. The calibration is not robust to head movements.
|
||||
3. WebGazer's click-based calibration can be used throughout the experiment. You can turn this on by calling `jsPsych.extensions.webgazer.startMouseCalibration()` at any point in the experiment. If you use a continue button to advance through the experiment and move the location of the continue button around you can be making small adjustments to the calibration throughout.
|
||||
4. Computing the gaze predictions consumes more computational resources than most other things that jsPsych is typically used for. The sampling rate that WebGazer is able to achieve will depend on the computing power of the participant's device. You may want to ask the participant to close any non-essential software and browser windows prior to completing the experiment. You may also want to check that the sampling rate is sufficiently high as part of validation.
|
||||
|
||||
If you have tips based on your own experience please consider sharing them on our [discussion forum](https://github.com/jspsych/jsPsych/discussions) and we'll add to this list!
|
||||
|
||||
## Example
|
||||
|
||||
The code below shows a basic example of what it looks like when you put all of these things together in your experiment's HTML file.
|
||||
|
||||
```html
|
||||
<html>
|
||||
<head>
|
||||
<script src="jspsych/jspsych.js"></script>
|
||||
<script src="jspsych/plugins/jspsych-preload.js"></script>
|
||||
<script src="jspsych/plugins/jspsych-image-keyboard-response.js"></script>
|
||||
<script src="jspsych/plugins/jspsych-html-keyboard-response.js"></script>
|
||||
<script src="jspsych/plugins/jspsych-webgazer-init-camera.js"></script>
|
||||
<script src="jspsych/plugins/jspsych-webgazer-calibrate.js"></script>
|
||||
<script src="jspsych/plugins/jspsych-webgazer-validation.js"></script>
|
||||
<script src="js/webgazer.js"></script>
|
||||
<script src="jspsych/extensions/jspsych-ext-webgazer.js"></script>
|
||||
<link rel="stylesheet" href="jspsych/css/jspsych.css">
|
||||
</head>
|
||||
<body></body>
|
||||
<script>
|
||||
|
||||
var preload = {
|
||||
type: 'preload',
|
||||
images: ['img/blue.png']
|
||||
}
|
||||
|
||||
var init_camera = {
|
||||
type: 'webgazer-init-camera'
|
||||
}
|
||||
|
||||
var calibration = {
|
||||
type: 'webgazer-calibrate'
|
||||
}
|
||||
|
||||
var validation = {
|
||||
type: 'webgazer-validate'
|
||||
}
|
||||
|
||||
var start = {
|
||||
type: 'html-keyboard-response',
|
||||
stimulus: 'Press any key to start.'
|
||||
}
|
||||
|
||||
var trial = {
|
||||
type: 'image-keyboard-response',
|
||||
stimulus: 'img/blue.png',
|
||||
choices: jsPsych.NO_KEYS,
|
||||
trial_duration: 1000,
|
||||
extensions: [
|
||||
{
|
||||
type: 'webgazer',
|
||||
params: {targets: ['#jspsych-image-keyboard-response-stimulus']}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
jsPsych.init({
|
||||
timeline: [init_camera, calibration, validation, start, trial],
|
||||
preload_images: ['img/blue.png'],
|
||||
extensions: [
|
||||
{type: 'webgazer'}
|
||||
]
|
||||
})
|
||||
|
||||
</script>
|
||||
</html>
|
||||
```
|
||||
|
||||
Below is example data from the image-keyboard-response trial taken from the experiment above. In addition to the standard data that is collected for this plugin, you can see the additional `webgazer_data` and `webgazer_targets` arrays. The `webgazer_data` shows 21 gaze location estimates during the 1-second image presentation. The `webgazer_targets` array shows that there was one target, the image-keyboard-response stimulus, and tells you the x- and y-coordinate boundaries for the target (image) rectangle. By comparing each of the x/y locations from the `webgazer_data` locations array with the target boundaries in `webgazer_targets`, you can determine if/when the estimated gaze location was inside the target area.
|
||||
|
||||
```js
|
||||
{
|
||||
"rt": null,
|
||||
"stimulus": "img/blue.png",
|
||||
"response": null,
|
||||
"trial_type": "image-keyboard-response",
|
||||
"trial_index": 4,
|
||||
"time_elapsed": 30701,
|
||||
"internal_node_id": "0.0-4.0",
|
||||
"webgazer_data": [
|
||||
{ "x": 1065, "y": 437, "t": 39},
|
||||
{ "x": 943, "y": 377, "t": 79},
|
||||
{ "x": 835, "y": 332, "t": 110},
|
||||
{ "x": 731, "y": 299, "t": 146},
|
||||
{ "x": 660, "y": 271, "t": 189},
|
||||
{ "x": 606, "y": 251, "t": 238},
|
||||
{ "x": 582, "y": 213, "t": 288},
|
||||
{ "x": 551, "y": 200, "t": 335},
|
||||
{ "x": 538, "y": 183, "t": 394},
|
||||
{ "x": 514, "y": 177, "t": 436},
|
||||
{ "x": 500, "y": 171, "t": 493},
|
||||
{ "x": 525, "y": 178, "t": 542},
|
||||
{ "x": 537, "y": 182, "t": 592},
|
||||
{ "x": 543, "y": 178, "t": 633},
|
||||
{ "x": 547, "y": 177, "t": 691},
|
||||
{ "x": 558, "y": 174, "t": 739},
|
||||
{ "x": 574, "y": 183, "t": 789},
|
||||
{ "x": 577, "y": 197, "t": 838},
|
||||
{ "x": 584, "y": 214, "t": 889},
|
||||
{ "x": 603, "y": 218, "t": 937},
|
||||
{ "x": 606, "y": 221, "t": 987}
|
||||
],
|
||||
"webgazer_targets": [
|
||||
{
|
||||
"selector": "#jspsych-image-keyboard-response-stimulus",
|
||||
"top": 135.33334350585938,
|
||||
"bottom": 435.3333435058594,
|
||||
"left": 490,
|
||||
"right": 790
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
@ -132,7 +132,7 @@ var trial = {
|
||||
correct_response: 'f'
|
||||
},
|
||||
on_finish: function(data){
|
||||
if(data.key_press == data.correct_response){
|
||||
if(data.response == data.correct_response){
|
||||
data.correct = true;
|
||||
} else {
|
||||
data.correct = false;
|
||||
|
@ -352,7 +352,7 @@ var trial = {
|
||||
var loop_node = {
|
||||
timeline: [trial],
|
||||
loop_function: function(data){
|
||||
if(data.values()[0].key_press == 'r'){
|
||||
if(data.values()[0].response == 'r'){
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
@ -382,7 +382,7 @@ var if_node = {
|
||||
// get the data from the previous trial,
|
||||
// and check which key was pressed
|
||||
var data = jsPsych.data.get().last(1).values()[0];
|
||||
if(data.key_press == 's'){
|
||||
if(data.response == 's'){
|
||||
return false;
|
||||
} else {
|
||||
return true;
|
||||
|
@ -22,8 +22,8 @@ In addition to the [default data collected by all plugins](/overview/plugins#dat
|
||||
|
||||
Name | Type | Value
|
||||
-----|------|------
|
||||
animation_sequence | JSON | An array, encoded in JSON format. Each element of the array is an object that represents a stimulus in the animation sequence. Each object has a `stimulus` property, which is the image that was displayed, and a `time` property, which is the time in ms, measured from when the sequence began, that the stimulus was displayed.
|
||||
responses | JSON | An array, encoded in JSON format. Each element of the array is an object representing a response given by the subject. Each object has a `stimulus` property, indicating which image was displayed when the key was pressed, an `rt` property, indicating the time of the key press relative to the start of the animation, and a `key_press` property, indicating which key was pressed.
|
||||
animation_sequence | array | An array, where each element is an object that represents a stimulus in the animation sequence. Each object has a `stimulus` property, which is the image that was displayed, and a `time` property, which is the time in ms, measured from when the sequence began, that the stimulus was displayed. The array will be encoded in JSON format when data is saved using either the `.json()` or `.csv()` functions.
|
||||
response | array | An array, where each element is an object representing a response given by the subject. Each object has a `stimulus` property, indicating which image was displayed when the key was pressed, an `rt` property, indicating the time of the key press relative to the start of the animation, and a `key_press` property, indicating which key was pressed. The array will be encoded in JSON format when data is saved using either the `.json()` or `.csv()` functions.
|
||||
|
||||
## Examples
|
||||
|
||||
|
@ -32,7 +32,7 @@ In addition to the [default data collected by all plugins](/overview/plugins#dat
|
||||
| Name | Type | Value |
|
||||
| -------------- | ------- | ---------------------------------------- |
|
||||
| rt | numeric | The response time in milliseconds for the subject to make a response. The time is measured from when the stimulus first began playing until the subject's response. |
|
||||
| button_pressed | numeric | Indicates which button the subject pressed. The first button in the `choices` array is 0, the second is 1, and so on. |
|
||||
| response | numeric | Indicates which button the subject pressed. The first button in the `choices` array is 0, the second is 1, and so on. |
|
||||
|
||||
## Examples
|
||||
|
||||
|
@ -28,7 +28,7 @@ In addition to the [default data collected by all plugins](/overview/plugins#dat
|
||||
|
||||
| Name | Type | Value |
|
||||
| --------- | ------- | ---------------------------------------- |
|
||||
| key_press | string | Indicates which key the subject pressed. If no key was pressed before the trial ended, then the value will be `null`. |
|
||||
| response | string | Indicates which key the subject pressed. If no key was pressed before the trial ended, then the value will be `null`. |
|
||||
| rt | numeric | The response time in milliseconds for the subject to make a response. The time is measured from when the stimulus first began playing until the subject made a key response. If no key was pressed before the trial ended, then the value will be `null`. |
|
||||
| stimulus | string | Path to the audio file that played during the trial. |
|
||||
|
||||
|
@ -26,7 +26,7 @@ In addition to the [default data collected by all plugins](/overview/plugins#dat
|
||||
Name | Type | Value
|
||||
-----|------|------
|
||||
rt | numeric | The response time in milliseconds for the subject to make a response. The time is measured from when the stimulus first appears on the screen until the subject's response.
|
||||
button_pressed | numeric | Indicates which button the subject pressed. The first button in the `choices` array is 0, the second is 1, and so on.
|
||||
response | numeric | Indicates which button the subject pressed. The first button in the `choices` array is 0, the second is 1, and so on.
|
||||
|
||||
Note: the canvas stimulus is *not* included in the trial data because it is a function. Any stimulus information that should be saved in the trial data can be added via the `data` parameter.
|
||||
|
||||
|
@ -22,7 +22,7 @@ In addition to the [default data collected by all plugins](/overview/plugins#dat
|
||||
|
||||
| Name | Type | Value |
|
||||
| --------- | ------- | ---------------------------------------- |
|
||||
| key_press | string | Indicates which key the subject pressed. |
|
||||
| response | string | Indicates which key the subject pressed. |
|
||||
| rt | numeric | The response time in milliseconds for the subject to make a response. The time is measured from when the stimulus first appears on the screen until the subject's response. |
|
||||
|
||||
Note: the canvas stimulus is *not* included in the trial data because it is a function. Any stimulus information that should be saved in the trial data can be added via the `data` parameter.
|
||||
|
@ -27,8 +27,8 @@ In addition to the [default data collected by all plugins](/overview/plugins#dat
|
||||
|
||||
| Name | Type | Value |
|
||||
| --------- | ------- | ---------------------------------------- |
|
||||
| stimulus | JSON | JSON encoded representation of the array of stimuli displayed in the trial. |
|
||||
| key_press | string | Indicates which key the subject pressed. |
|
||||
| stimulus | array | Array of stimuli displayed in the trial. This will be encoded as a JSON string when data is saved using the `.json()` or `.csv()` functions. |
|
||||
| response | string | Indicates which key the subject pressed. |
|
||||
| rt | numeric | The response time in milliseconds for the subject to make a response. The time is measured from when the stimulus first appears on the screen until the subject's response. |
|
||||
| correct | boolean | `true` if the subject got the correct answer, `false` otherwise. |
|
||||
|
||||
|
@ -30,7 +30,7 @@ In addition to the [default data collected by all plugins](/overview/plugins#dat
|
||||
| Name | Type | Value |
|
||||
| --------- | ------- | ---------------------------------------- |
|
||||
| stimulus | string | Either the path to the image file or the string containing the HTML formatted content that the subject saw on this trial. |
|
||||
| key_press | string | Indicates which key the subject pressed. |
|
||||
| response | string | Indicates which key the subject pressed. |
|
||||
| rt | numeric | The response time in milliseconds for the subject to make a response. The time is measured from when the stimulus first appears on the screen until the subject's response. |
|
||||
| correct | boolean | `true` if the subject got the correct answer, `false` otherwise. |
|
||||
|
||||
|
@ -31,7 +31,7 @@ In addition to the [default data collected by all plugins](/overview/plugins#dat
|
||||
| Name | Type | Value |
|
||||
| --------- | ------- | ---------------------------------------- |
|
||||
| stimulus | string | Either the path to the image file or the string containing the HTML formatted content that the subject saw on this trial. |
|
||||
| key_press | string | Indicates which key the subject pressed. |
|
||||
| response | string | Indicates which key the subject pressed. |
|
||||
| rt | numeric | The response time in milliseconds for the subject to make a response. The time is measured from when the stimulus first appears on the screen until the subject's response. |
|
||||
| correct | boolean | `true` if the subject got the correct answer, `false` otherwise. |
|
||||
|
||||
|
@ -18,8 +18,8 @@ In addition to the [parameters available in all plugins](/overview/plugins#param
|
||||
In addition to the [default data collected by all plugins](/overview/plugins#data-collected-by-all-plugins), this plugin collects the following data for each trial.
|
||||
|
||||
| Name | Type | Value |
|
||||
| ------- | ---------------- | --------------------------- |
|
||||
| answers | array of strings | Answers the partcipant gave |
|
||||
| -------- | ---------------- | --------------------------- |
|
||||
| response | array of strings | Answers the partcipant gave |
|
||||
|
||||
## Examples
|
||||
|
||||
|
@ -32,9 +32,9 @@ In addition to the [default data collected by all plugins](/overview/plugins#dat
|
||||
|
||||
Name | Type | Value
|
||||
-----|------|------
|
||||
init_locations | JSON string | A JSON-encoded object representing the initial locations of all the stimuli in the sorting area. The object is an array with one element per stimulus. Each element in the array has a "src", "x", and "y" value. "src" is the image path, and "x" and "y" are the object location.
|
||||
moves | JSON string | A JSON-encoded object representing all of the moves the participant made when sorting. The object is an array with each element representing a move. Each element in the array has a "src", "x", and "y" value. "src" is the image path, and "x" and "y" are the object location after the move.
|
||||
final_locations | JSON string | A JSON-encoded object representing the final locations of all the stimuli in the sorting area. The object is an array with one element per stimulus. Each element in the array has a "src", "x", and "y" value. "src" is the image path, and "x" and "y" are the object location.
|
||||
init_locations | array | An array containing objects representing the initial locations of all the stimuli in the sorting area. Each element in the array represents a stimulus, and has a "src", "x", and "y" value. "src" is the image path, and "x" and "y" are the object location. This will be encoded as a JSON string when data is saved using the `.json()` or `.csv()` functions.
|
||||
moves | array | An array containing objects representing all of the moves the participant made when sorting. Each object represents a move. Each element in the array has a "src", "x", and "y" value. "src" is the image path, and "x" and "y" are the object location after the move. This will be encoded as a JSON string when data is saved using the `.json()` or `.csv()` functions.
|
||||
final_locations | array | An array containing objects representing the final locations of all the stimuli in the sorting area. Each element in the array represents a stimulus, and has a "src", "x", and "y" value. "src" is the image path, and "x" and "y" are the object location. This will be encoded as a JSON string when data is saved using the `.json()` or `.csv()` functions.
|
||||
rt | numeric | The response time in milliseconds for the participant to finish all sorting.
|
||||
|
||||
## Examples
|
||||
|
@ -25,7 +25,7 @@ In addition to the [default data collected by all plugins](/overview/plugins#dat
|
||||
Name | Type | Value
|
||||
-----|------|------
|
||||
rt | numeric | The response time in milliseconds for the subject to make a response. The time is measured from when the stimulus first appears on the screen until the subject's response.
|
||||
button_pressed | numeric | Indicates which button the subject pressed. The first button in the `choices` array is 0, the second is 1, and so on.
|
||||
response | numeric | Indicates which button the subject pressed. The first button in the `choices` array is 0, the second is 1, and so on.
|
||||
stimulus | string | The HTML content that was displayed on the screen.
|
||||
|
||||
## Examples
|
||||
|
@ -22,7 +22,7 @@ In addition to the [default data collected by all plugins](/overview/plugins#dat
|
||||
|
||||
| Name | Type | Value |
|
||||
| --------- | ------- | ---------------------------------------- |
|
||||
| key_press | string | Indicates which key the subject pressed. |
|
||||
| response | string | Indicates which key the subject pressed. |
|
||||
| rt | numeric | The response time in milliseconds for the subject to make a response. The time is measured from when the stimulus first appears on the screen until the subject's response. |
|
||||
| stimulus | string | The HTML content that was displayed on the screen. |
|
||||
|
||||
|
@ -29,7 +29,7 @@ In addition to the [default data collected by all plugins](/overview/plugins#dat
|
||||
| Name | Type | Value |
|
||||
| --------- | ------- | ---------------------------------------- |
|
||||
| stimulus | string | Either the path to the image file or the string containing the HTML-formatted content that the subject saw on this trial. |
|
||||
| key_press | string | Indicates which key the subject pressed. |
|
||||
| response | string | Indicates which key the subject pressed. |
|
||||
| rt | numeric | The response time in milliseconds for the subject to make a response. The time is measured from when the stimulus first appears on the screen until the subject's response. |
|
||||
| correct | boolean | Boolean indicating whether the user's key press was correct or incorrect for the given stimulus. |
|
||||
|
||||
|
@ -29,7 +29,7 @@ In addition to the [default data collected by all plugins](/overview/plugins#dat
|
||||
| Name | Type | Value |
|
||||
| --------- | ------- | ---------------------------------------- |
|
||||
| stimulus | string | Either the path to the image file or the string containing the HTML-formatted content that the subject saw on this trial. |
|
||||
| key_press | string | Indicates which key the subject pressed. |
|
||||
| response | string | Indicates which key the subject pressed. |
|
||||
| rt | numeric | The response time in milliseconds for the subject to make a response. The time is measured from when the stimulus first appears on the screen until the subject's response. |
|
||||
| correct | boolean | Boolean indicating whether the user's key press was correct or incorrect for the given image. |
|
||||
|
||||
|
@ -31,7 +31,7 @@ In addition to the [default data collected by all plugins](/overview/plugins#dat
|
||||
Name | Type | Value
|
||||
-----|------|------
|
||||
rt | numeric | The response time in milliseconds for the subject to make a response. The time is measured from when the stimulus first appears on the screen until the subject's response.
|
||||
button_pressed | numeric | Indicates which button the subject pressed. The first button in the `choices` array is 0, the second is 1, and so on.
|
||||
response | numeric | Indicates which button the subject pressed. The first button in the `choices` array is 0, the second is 1, and so on.
|
||||
stimulus | string | The path of the image that was displayed.
|
||||
|
||||
## Examples
|
||||
|
@ -27,7 +27,7 @@ In addition to the [default data collected by all plugins](/overview/plugins#dat
|
||||
|
||||
| Name | Type | Value |
|
||||
| --------- | ------- | ---------------------------------------- |
|
||||
| key_press | string | Indicates which key the subject pressed. |
|
||||
| response | string | Indicates which key the subject pressed. |
|
||||
| rt | numeric | The response time in milliseconds for the subject to make a response. The time is measured from when the stimulus first appears on the screen until the subject's response. |
|
||||
| stimulus | string | The path of the image that was displayed. |
|
||||
|
||||
|
@ -25,7 +25,7 @@ In addition to the [default data collected by all plugins](/overview/plugins#dat
|
||||
|
||||
| Name | Type | Value |
|
||||
| ------------ | ----------- | ---------------------------------------- |
|
||||
| view_history | JSON string | A JSON string containing the order of pages the subject viewed (including when the subject returned to previous pages) and the time spent viewing each page. |
|
||||
| view_history | array | An array containing the order of pages the subject viewed (including when the subject returned to previous pages) and the time spent viewing each page. Each object in the array represents a single page view, and contains keys called `page_index` (the page number, starting with 0) and `viewing_time` (duration of the page view). This will be encoded as a JSON string when data is saved using the `.json()` or `.csv()` functions. |
|
||||
| rt | numeric | The response time in milliseconds for the subject to view all of the pages. |
|
||||
|
||||
## Example
|
||||
|
@ -23,9 +23,8 @@ In addition to the [default data collected by all plugins](/overview/plugins#dat
|
||||
Name | Type | Value
|
||||
-----|------|------
|
||||
rt | numeric | The response time in milliseconds for the subject to make a response. The time is measured from when the maxdiff table first appears on the screen until the subject's response.
|
||||
labels | JSON string | A string in JSON format containing the labels corresponding to the left and right response columns.
|
||||
left | string | The alternative endorsed on the left column.
|
||||
right | string | The alternative endorsed on the right column.
|
||||
labels | object | An object with two keys, `left` and `right`, containing the labels (strings) corresponding to the left and right response columns. This will be encoded as a JSON string when data is saved using the `.json()` or `.csv()` functions.
|
||||
response | object | An object with two keys, `left` and `right`, containing the alternatives selected on the left and right columns. This will be encoded as a JSON string when data is saved using the `.json()` or `.csv()` functions.
|
||||
|
||||
|
||||
## Examples
|
||||
|
@ -64,11 +64,11 @@ In addition to the [default data collected by all plugins](/overview/plugins#dat
|
||||
| Name | Type | Value |
|
||||
| ---------------- | ----------- | ---------------------------------------- |
|
||||
| rt | numeric | The response time in ms for the subject to make a response. |
|
||||
| key_press | string | The key that the subject pressed. |
|
||||
| response | string | The key that the subject pressed. |
|
||||
| correct | boolean | Whether or not the subject's key press corresponded to those provided in correct_choice. |
|
||||
| frame_rate | numeric | The average frame rate for the trial. 0 denotes that the subject responded before the appearance of the second frame. |
|
||||
| number_of_frames | numeric | The number of frames that was shown in this trial. |
|
||||
| frame_rate_array | JSON string | The array that holds the number of miliseconds for each frame in this trial. |
|
||||
| frame_rate_array | array | The array that holds the number of miliseconds for each frame in this trial. This will be encoded as a JSON string when data is saved using the `.json()` or `.csv()` functions. |
|
||||
| canvas_width | numeric | The width of the canvas in pixels. |
|
||||
| canvas_height | numeric | The height of the canvas in pixels. |
|
||||
|
||||
|
@ -24,8 +24,8 @@ In addition to the [default data collected by all plugins](/overview/plugins#dat
|
||||
|
||||
| Name | Type | Value |
|
||||
| --------- | ------- | ---------------------------------------- |
|
||||
| stimulus | string | An JSON-encoded array of length 2 containing the HTML-formatted content that the subject saw for each trial. |
|
||||
| key_press | string | Indicates which key the subject pressed. |
|
||||
| stimulus | array | An array of length 2 containing the HTML-formatted content that the subject saw for each trial. This will be encoded as a JSON string when data is saved using the `.json()` or `.csv()` functions. |
|
||||
| response | string | Indicates which key the subject pressed. |
|
||||
| rt | numeric | The response time in milliseconds for the subject to make a response. The time is measured from when the second stimulus first appears on the screen until the subject's response. |
|
||||
| correct | boolean | `true` if the subject's response matched the `answer` for this trial. |
|
||||
| answer | string | The correct answer to the trial, either `'same'` or `'different'`. |
|
||||
@ -35,7 +35,7 @@ Additionally, if `first_stim_duration` is null, then the following data is also
|
||||
| Name | Type | Value |
|
||||
| --------------- | ------- | ---------------------------------------- |
|
||||
| rt_stim1 | numeric | The response time in milliseconds for the subject to continue after the first stimulus. The time is measured from when the first stimulus appears on the screen until the subject's response. |
|
||||
| key_press_stim1 | string | Indicates which key the subject pressed to continue. |
|
||||
| response_stim1 | string | Indicates which key the subject pressed to continue. |
|
||||
|
||||
## Examples
|
||||
|
||||
|
@ -24,8 +24,8 @@ In addition to the [default data collected by all plugins](/overview/plugins#dat
|
||||
|
||||
| Name | Type | Value |
|
||||
| --------- | ------- | ---------------------------------------- |
|
||||
| stimulus | string | An JSON-encoded array of length 2 containing the paths to the image files that the subject saw for each trial. |
|
||||
| key_press | string | Indicates which key the subject pressed. |
|
||||
| stimulus | array | An array of length 2 containing the paths to the image files that the subject saw for each trial. This will be encoded as a JSON string when data is saved using the `.json()` or `.csv()` functions. |
|
||||
| response | string | Indicates which key the subject pressed. |
|
||||
| rt | numeric | The response time in milliseconds for the subject to make a response. The time is measured from when the second stimulus first appears on the screen until the subject's response. |
|
||||
| correct | boolean | `true` if the subject's response matched the `answer` for this trial. |
|
||||
| answer | string | The correct answer to the trial, either `'same'` or `'different'`. |
|
||||
@ -35,7 +35,7 @@ Additionally, if `first_stim_duration` is null, then the following data is also
|
||||
| Name | Type | Value |
|
||||
| --------------- | ------- | ---------------------------------------- |
|
||||
| rt_stim1 | numeric | The response time in milliseconds for the subject to continue after the first stimulus. The time is measured from when the first stimulus appears on the screen until the subject's response. |
|
||||
| key_press_stim1 | string | Indicates which key the subject pressed to continue. |
|
||||
| response_stim1 | string | Indicates which key the subject pressed to continue. |
|
||||
|
||||
## Examples
|
||||
|
||||
|
@ -25,9 +25,11 @@ In addition to the [default data collected by all plugins](/overview/plugins#dat
|
||||
|
||||
| Name | Type | Value |
|
||||
| ------ | ------- | ---------------------------------------- |
|
||||
| grid | JSON | A JSON-encoded representation of the grid. |
|
||||
| target | JSON | A JSON-encoded representation of the target on the grid. |
|
||||
| grid | array | The grid representation. Each inner array represents a single row. The entries in the inner arrays represent the columns. This will be encoded as a JSON string when data is saved using the `.json()` or `.csv()` functions. |
|
||||
| target | array | The `[row, column]` target location on the grid. This will be encoded as a JSON string when data is saved using the `.json()` or `.csv()` functions. |
|
||||
| rt | numeric | The response time in milliseconds for the subject to make a response. The time is measured from when the second stimulus first appears on the screen until the subject's response. |
|
||||
| response | array | The `[row, column]` response location on the grid. This will be encoded as a JSON string when data is saved using the `.json()` or `.csv()` functions. |
|
||||
| correct | boolean | Whether the response location matches the target location (`true`) or not (`false`)l
|
||||
|
||||
## Examples
|
||||
|
||||
|
@ -27,9 +27,9 @@ In addition to the [default data collected by all plugins](/overview/plugins#dat
|
||||
|
||||
| Name | Type | Value |
|
||||
| --------- | ------- | ---------------------------------------- |
|
||||
| grid | JSON | A JSON-encoded representation of the grid. |
|
||||
| target | JSON | A JSON-encoded representation of the target on the grid. |
|
||||
| key_press | string | Indicates which key the subject pressed. |
|
||||
| grid | array | The representation of the grid. This will be encoded as a JSON string when data is saved using the `.json()` or `.csv()` functions. |
|
||||
| target | array | The representation of the target location on the grid. This will be encoded as a JSON string when data is saved using the `.json()` or `.csv()` functions. |
|
||||
| response | string | Indicates which key the subject pressed. |
|
||||
| rt | numeric | The response time in milliseconds for the subject to make a response. The time is measured from when the second stimulus first appears on the screen until the subject's response. |
|
||||
| correct | boolean | `true` if the subject's response matched the target. |
|
||||
|
||||
|
@ -21,8 +21,8 @@ In addition to the [default data collected by all plugins](/overview/plugins#dat
|
||||
|
||||
Name | Type | Value
|
||||
-----|------|------
|
||||
responses | string | A JS object encoded in JSON format containing the response for each input. The encoded object will have a separate variable for the response to each input, with each variable being named after its corresponding input element. Each response is a string containing whatever the subject answered for this particular input.
|
||||
rt | numeric | The response time in milliseconds for the subject to make a response.
|
||||
response | object | An object containing the response for each input. The object will have a separate key (variable) for the response to each input, with each variable being named after its corresponding input element. Each response is a string containing whatever the subject answered for this particular input. This will be encoded as a JSON string when data is saved using the `.json()` or `.csv()` functions. |
|
||||
rt | numeric | The response time in milliseconds for the subject to make a response. |
|
||||
|
||||
## Examples
|
||||
|
||||
|
@ -21,9 +21,9 @@ In addition to the [default data collected by all plugins](/overview/plugins#dat
|
||||
|
||||
Name | Type | Value
|
||||
-----|------|------
|
||||
responses | JSON string | A string in JSON format containing the response for each question. The encoded object will have a separate variable for the response to each question, with the first question in the trial being recorded in `Q0`, the second in `Q1`, and so on. The responses are recorded as integers, representing the position of the slider on the scale. If the `name` parameter is defined for the question, then the response will use the value of `name` as the key for the response in the `responses` object.
|
||||
rt | numeric | The response time in milliseconds for the subject to make a response. The time is measured from when the questions first appear on the screen until the subject's response.
|
||||
question_order | JSON string | A string in JSON format containing an array with the order of questions. For example `[2,0,1]` would indicate that the first question was `trial.questions[2]` (the third item in the `questions` parameter), the second question was `trial.questions[0]`, and the final question was `trial.questions[1]`.
|
||||
response | object | An object containing the response for each question. The object will have a separate key (variable) for each question, with the first question in the trial being recorded in `Q0`, the second in `Q1`, and so on. The responses are recorded as integers, representing the position selected on the likert scale for that question. If the `name` parameter is defined for the question, then the response object will use the value of `name` as the key for each question. This will be encoded as a JSON string when data is saved using the `.json()` or `.csv()` functions. |
|
||||
rt | numeric | The response time in milliseconds for the subject to make a response. The time is measured from when the questions first appear on the screen until the subject's response(s) are submitted. |
|
||||
question_order | array | An array with the order of questions. For example `[2,0,1]` would indicate that the first question was `trial.questions[2]` (the third item in the `questions` parameter), the second question was `trial.questions[0]`, and the final question was `trial.questions[1]`. This will be encoded as a JSON string when data is saved using the `.json()` or `.csv()` functions. |
|
||||
|
||||
## Examples
|
||||
|
||||
|
@ -20,9 +20,9 @@ In addition to the [default data collected by all plugins](/overview/plugins#dat
|
||||
|
||||
Name | Type | Value
|
||||
-----|------|------
|
||||
responses | JSON string | A string in JSON format containing the response for each question. The encoded object will have a separate variable for the response to each question, with the first question in the trial being recorded in `Q0`, the second in `Q1`, and so on. The responses are recorded as the name of the option label. If the `name` parameter is defined for the question, then the response will use the value of `name` as the key for the response in the `responses` object.
|
||||
rt | numeric | The response time in milliseconds for the subject to make a response. The time is measured from when the questions first appear on the screen until the subject's response.
|
||||
question_order | JSON string | A string in JSON format containing an array with the order of questions. For example `[2,0,1]` would indicate that the first question was `trial.questions[2]` (the third item in the `questions` parameter), the second question was `trial.questions[0]`, and the final question was `trial.questions[1]`.
|
||||
response | object | An object containing the response for each question. The object will have a separate key (variable) for each question, with the first question in the trial being recorded in `Q0`, the second in `Q1`, and so on. The responses are recorded as the name of the option label selected (string). If the `name` parameter is defined for the question, then the response object will use the value of `name` as the key for each question. This will be encoded as a JSON string when data is saved using the `.json()` or `.csv()` functions. |
|
||||
rt | numeric | The response time in milliseconds for the subject to make a response. The time is measured from when the questions first appear on the screen until the subject's response(s) are submitted. |
|
||||
question_order | array | An array with the order of questions. For example `[2,0,1]` would indicate that the first question was `trial.questions[2]` (the third item in the `questions` parameter), the second question was `trial.questions[0]`, and the final question was `trial.questions[1]`. This will be encoded as a JSON string when data is saved using the `.json()` or `.csv()` functions. |
|
||||
|
||||
## Examples
|
||||
|
||||
|
@ -21,9 +21,9 @@ In addition to the [default data collected by all plugins](/overview/plugins#dat
|
||||
|
||||
Name | Type | Value
|
||||
-----|------|------
|
||||
responses | JSON string | An array containing all selected choices in JSON format for each question. The encoded object will have a separate variable for the response to each question, with the first question in the trial being recorded in `Q0`, the second in `Q1`, and so on. The responses are recorded as the name of the option label. If the `name` parameter is defined for the question, then the response will use the value of `name` as the key for the response in the `responses` object.
|
||||
rt | numeric | The response time in milliseconds for the subject to make a response. The time is measured from when the questions first appear on the screen until the subject's response.
|
||||
question_order | JSON string | A string in JSON format containing an array with the order of questions. For example `[2,0,1]` would indicate that the first question was `trial.questions[2]` (the third item in the `questions` parameter), the second question was `trial.questions[0]`, and the final question was `trial.questions[1]`.
|
||||
response | object | An object containing the response for each question. The object will have a separate key (variable) for each question, with the first question in the trial being recorded in `Q0`, the second in `Q1`, and so on. For each question, the responses are recorded as arrays containing any response options that were selected (strings). If the `name` parameter is defined for the question, then the response object will use the value of `name` as the key for each question. This will be encoded as a JSON string when data is saved using the `.json()` or `.csv()` functions. |
|
||||
rt | numeric | The response time in milliseconds for the subject to make a response. The time is measured from when the questions first appear on the screen until the subject's response(s) were submitted. |
|
||||
question_order | array | An array with the order of questions. For example `[2,0,1]` would indicate that the first question was `trial.questions[2]` (the third item in the `questions` parameter), the second question was `trial.questions[0]`, and the final question was `trial.questions[1]`. This will be encoded as a JSON string when data is saved using the `.json()` or `.csv()` functions. |
|
||||
|
||||
## Examples
|
||||
|
||||
|
@ -20,9 +20,9 @@ In addition to the [default data collected by all plugins](/overview/plugins#dat
|
||||
|
||||
Name | Type | Value
|
||||
-----|------|------
|
||||
responses | JSON string | A string in JSON format containing the response for each question. The encoded object will have a separate variable for the response to each question, with the first question in the trial being recorded in `Q0`, the second in `Q1`, and so on. Each response is a string containing whatever the subject typed into the associated text box. If the `name` parameter is defined for the question, then the response will use the value of `name` as the key for the response in the `responses` object.
|
||||
rt | numeric | The response time in milliseconds for the subject to make a response.
|
||||
question_order | JSON string | A string in JSON format containing an array with the order of questions. For example `[2,0,1]` would indicate that the first question was `trial.questions[2]` (the third item in the `questions` parameter), the second question was `trial.questions[0]`, and the final question was `trial.questions[1]`.
|
||||
response | object | An object containing the response for each question. The object will have a separate key (variable) for each question, with the first question in the trial being recorded in `Q0`, the second in `Q1`, and so on. For each question, the response is a string containing whatever text was in the response box when the responses were submitted. If the `name` parameter is defined for the question, then the response object will use the value of `name` as the key for each question. This will be encoded as a JSON string when data is saved using the `.json()` or `.csv()` functions. |
|
||||
rt | numeric | The response time in milliseconds for the subject to make a response. The time is measured from when the questions first appear on the screen until the subject's response(s) were submitted. |
|
||||
question_order | array | An array with the order of questions. For example `[2,0,1]` would indicate that the first question was `trial.questions[2]` (the third item in the `questions` parameter), the second question was `trial.questions[0]`, and the final question was `trial.questions[1]`. This will be encoded as a JSON string when data is saved using the `.json()` or `.csv()` functions. |
|
||||
|
||||
## Examples
|
||||
|
||||
|
@ -35,9 +35,9 @@ In addition to the [default data collected by all plugins](/overview/plugins#dat
|
||||
|
||||
Name | Type | Value
|
||||
-----|------|------
|
||||
button_pressed | numeric | Indicates which button the subject pressed. The first button in the `choices` array is 0, the second is 1, and so on.
|
||||
response | numeric | Indicates which button the subject pressed. The first button in the `choices` array is 0, the second is 1, and so on.
|
||||
rt | numeric | The response time in milliseconds for the subject to make a response. The time is measured from when the stimulus first appears on the screen until the subject's response.
|
||||
stimulus | string | JSON encoding of the `stimulus` array.
|
||||
stimulus | array | The `stimulus` array. This will be encoded as a JSON string when data is saved using the `.json()` or `.csv()` functions.
|
||||
|
||||
## Example
|
||||
|
||||
|
@ -31,9 +31,9 @@ In addition to the [default data collected by all plugins](/overview/plugins#dat
|
||||
|
||||
| Name | Type | Value |
|
||||
| --------- | ------- | ---------------------------------------- |
|
||||
| key_press | string | Indicates which key the subject pressed. |
|
||||
| response | string | Indicates which key the subject pressed. |
|
||||
| rt | numeric | The response time in milliseconds for the subject to make a response. The time is measured from when the stimulus first appears on the screen until the subject's response. |
|
||||
| stimulus | string | JSON encoding of the `stimulus` array. |
|
||||
stimulus | array | The `stimulus` array. This will be encoded as a JSON string when data is saved using the `.json()` or `.csv()` functions. |
|
||||
|
||||
## Example
|
||||
|
||||
|
@ -41,7 +41,7 @@ Name | Type | Value
|
||||
-----|------|------
|
||||
response | numeric | The numeric value of the slider.
|
||||
rt | numeric | The response time in milliseconds for the subject to make a response. The time is measured from when the stimulus first appears on the screen until the subject's response.
|
||||
stimulus | string | JSON encoding of the `stimulus` array.
|
||||
stimulus | array | The `stimulus` array. This will be encoded as a JSON string when data is saved using the `.json()` or `.csv()` functions.
|
||||
slider_start | numeric | The starting value of the slider.
|
||||
start | numeric | The start time of the video clip.
|
||||
|
||||
|
@ -30,11 +30,11 @@ In addition to the [default data collected by all plugins](/overview/plugins#dat
|
||||
| Name | Type | Value |
|
||||
| -------------- | ----------- | ---------------------------------------- |
|
||||
| correct | boolean | True if the subject gave the correct response. |
|
||||
| key_press | string | Indicates which key the subject pressed. |
|
||||
| response | string | Indicates which key the subject pressed. |
|
||||
| rt | numeric | The response time in milliseconds for the subject to make a response. The time is measured from when the stimulus first appears on the screen until the subject's response. |
|
||||
| set_size | numeric | The number of items in the search array |
|
||||
| target_present | boolean | True if the target is present in the search array |
|
||||
| locations | JSON string | JSON-encoded array where each element of the array is the pixel value of the center of an image in the search array. If the target is present, then the first element will represent the location of the target. |
|
||||
| locations | array | Array where each element is the pixel value of the center of an image in the search array. If the target is present, then the first element will represent the location of the target. This will be encoded as a JSON string when data is saved using the `.json()` or `.csv()` functions. |
|
||||
|
||||
## Example
|
||||
|
||||
|
@ -29,8 +29,8 @@ In addition to the [default data collected by all plugins](/overview/plugins#dat
|
||||
|
||||
| Name | Type | Value |
|
||||
| --------- | ----------- | ---------------------------------------- |
|
||||
| stimulus | JSON string | A JSON encoded array where each element of the array is a stimulus from the sequence, in the order that they were shown. |
|
||||
| responses | JSON string | A JSON encoded array containing all response information. The encoded object is an array containing one element for each valid response. Each response item has three properties: `key` the key that was pressed, `stimulus` the index of the stimulus that was displayed when the response was made, and `rt` the response time measured since the start of the sequence. |
|
||||
| stimulus | array | Array where each element is a stimulus from the sequence, in the order that they were shown. This will be encoded as a JSON string when data is saved using the `.json()` or `.csv()` functions. |
|
||||
| response | array | Array containing all response information. Each element in the array is an object representing each valid response. Each response item has three properties: `key` the key that was pressed, `stimulus` the index of the stimulus that was displayed when the response was made, and `rt` the response time measured since the start of the sequence. This will be encoded as a JSON string when data is saved using the `.json()` or `.csv()` functions. |
|
||||
|
||||
## Examples
|
||||
|
||||
|
@ -20,7 +20,7 @@ In addition to the [default data collected by all plugins](/overview/plugins#dat
|
||||
|
||||
| Name | Type | Value |
|
||||
| -------- | ----------- | ---------------------------------------- |
|
||||
| stimulus | JSON string | JSON encoded array of the stimulus shown on the trial. |
|
||||
| stimulus | array | Two dimensional array representing the stimulus shown on the trial. This will be encoded as a JSON string when data is saved using the `.json()` or `.csv()` functions. |
|
||||
|
||||
### Stimulus Creation Method
|
||||
|
||||
|
60
docs/plugins/jspsych-webgazer-calibrate.md
Normal file
60
docs/plugins/jspsych-webgazer-calibrate.md
Normal file
@ -0,0 +1,60 @@
|
||||
# jspsych-webgazer-calibrate
|
||||
|
||||
This plugin can be used to calibrate the [WebGazer extension](/extensions/jspsych-ext-webgazer.md). For a narrative description of eye tracking with jsPsych, see the [eye tracking overview](/overview/eye-tracking.md).
|
||||
|
||||
## Parameters
|
||||
|
||||
In addition to the [parameters available in all plugins](overview.md#parameters-available-in-all-plugins), this plugin accepts the following parameters. Parameters with a default value of *undefined* must be specified. Other parameters can be left unspecified if the default value is acceptable.
|
||||
|
||||
Parameter | Type | Default Value | Description
|
||||
----------|------|---------------|------------
|
||||
calibration_points | array | `[[10,10], [10,50], [10,90], [50,10], [50,50], [50,90], [90,10], [90,50], [90,90]]` | Array of points in `[x,y]` coordinates. Specified as a percentage of the screen width and height, from the left and top edge. The default grid is 9 points.
|
||||
calibration_mode | string | `'click'` | Can specify `click` to have subjects click on calibration points or `view` to have subjects passively watch calibration points.
|
||||
repetitions_per_point | numeric | 1 | The number of times to repeat the sequence of calibration points.
|
||||
randomize_calibration_order | bool | `false` | Whether to randomize the order of the calibration points.
|
||||
time_to_saccade | numeric | 1000 | If `calibration_mode` is set to `view`, then this is the delay before calibrating after showing a point. Gives the participant time to fixate on the new target before assuming that the participant is looking at the target.
|
||||
time_per_point | numeric | 1000 | If `calibration_mode` is set to `view`, then this is the length of time to show a point while calibrating. Note that if `click` calibration is used then the point will remain on the screen until clicked.
|
||||
|
||||
## Data Generated
|
||||
|
||||
In addition to the [default data collected by all plugins](overview.md#data-collected-by-plugins), this plugin collects the following data for each trial.
|
||||
|
||||
Name | Type | Value
|
||||
-----|------|------
|
||||
|
||||
No data currently added by this plugin. Use the [webgazer-validate](/plugins/jspsych-webgazer-validate.md) plugin to measure the precision and accuracy of calibration.
|
||||
|
||||
## Example
|
||||
|
||||
#### Click-based calibration with 5 points
|
||||
|
||||
```javascript
|
||||
var calibration = {
|
||||
type: 'webgazer-calibrate',
|
||||
calibration_points: [[50,50], [25,25], [25,75], [75,25], [75,75]],
|
||||
repetitions_per_point: 2,
|
||||
randomize_calibration_order: true
|
||||
}
|
||||
```
|
||||
|
||||
### View-based calibration with 33 points, concentrated in the center
|
||||
|
||||
```javascript
|
||||
var calibration = {
|
||||
type: 'webgazer-calibrate',
|
||||
calibration_points: [
|
||||
[10,10],[10,50],[10,90],
|
||||
[30,10],[30,50],[30,90],
|
||||
[40,10],[40,30],[40,40],[40,45],[40,50],[40,55],[40,60],[40,70],[40,90],
|
||||
[50,10],[50,30],[50,40],[50,45],[50,50],[50,55],[50,60],[50,70],[50,90],
|
||||
[60,10],[60,30],[60,40],[60,45],[60,50],[60,55],[60,60],[60,70],[60,90],
|
||||
[70,10],[70,50],[70,90],
|
||||
[90,10],[90,50],[90,90]
|
||||
],
|
||||
repetitions_per_point: 1,
|
||||
randomize_calibration_order: true,
|
||||
calibration_mode: 'view',
|
||||
time_per_point: 500,
|
||||
time_to_saccade: 1000
|
||||
}
|
||||
```
|
31
docs/plugins/jspsych-webgazer-init-camera.md
Normal file
31
docs/plugins/jspsych-webgazer-init-camera.md
Normal file
@ -0,0 +1,31 @@
|
||||
# jspsych-webgazer-init-camera
|
||||
|
||||
This plugin initializes the camera and helps the participant center their face in the camera view for using the the [WebGazer extension](/extensions/jspsych-ext-webgazer.md). For a narrative description of eye tracking with jsPsych, see the [eye tracking overview](/overview/eye-tracking.md).
|
||||
|
||||
## Parameters
|
||||
|
||||
In addition to the [parameters available in all plugins](overview.md#parameters-available-in-all-plugins), this plugin accepts the following parameters. Parameters with a default value of *undefined* must be specified. Other parameters can be left unspecified if the default value is acceptable.
|
||||
|
||||
Parameter | Type | Default Value | Description
|
||||
----------|------|---------------|------------
|
||||
instructions | string | too long to put here | Instructions for the participant to follow.
|
||||
button_text | string | Continue | The text for the button that participants click to end the trial.
|
||||
|
||||
## Data Generated
|
||||
|
||||
In addition to the [default data collected by all plugins](overview.md#data-collected-by-plugins), this plugin collects the following data for each trial.
|
||||
|
||||
Name | Type | Value
|
||||
-----|------|------
|
||||
|
||||
No additional data collected.
|
||||
|
||||
## Example
|
||||
|
||||
#### Parameterless use
|
||||
|
||||
```javascript
|
||||
var init_camera = {
|
||||
type: 'webgazer-init-camera'
|
||||
}
|
||||
```
|
43
docs/plugins/jspsych-webgazer-validate.md
Normal file
43
docs/plugins/jspsych-webgazer-validate.md
Normal file
@ -0,0 +1,43 @@
|
||||
# jspsych-webgazer-calibrate
|
||||
|
||||
This plugin can be used to measure the accuracy and precision of gaze predictions made by the [WebGazer extension](/extensions/jspsych-ext-webgazer.md). For a narrative description of eye tracking with jsPsych, see the [eye tracking overview](/overview/eye-tracking.md).
|
||||
|
||||
## Parameters
|
||||
|
||||
In addition to the [parameters available in all plugins](overview.md#parameters-available-in-all-plugins), this plugin accepts the following parameters. Parameters with a default value of *undefined* must be specified. Other parameters can be left unspecified if the default value is acceptable.
|
||||
|
||||
Parameter | Type | Default Value | Description
|
||||
----------|------|---------------|------------
|
||||
validation_points | array | `[[10,10], [10,50], [10,90], [50,10], [50,50], [50,90], [90,10], [90,50], [90,90]]` | Array of points in `[x,y]` coordinates. The default grid is 9 points. Meaning of coordinates controlled by `validation_point_coordinates` parameter.
|
||||
validation_point_coordinates | string | `'percent'` | Can specify `percent` to have validation point coordinates specified in percentage of screen width and height, or `center-offset-pixels` to specify each point as the distance in pixels from the center of the screen.
|
||||
roi_radius | numeric | 200 | Tolerance around the validation point in pixels when calculating the percent of gaze measurements within the acceptable range.
|
||||
repetitions_per_point | numeric | 1 | The number of times to repeat the sequence of calibration points.
|
||||
randomize_validation_order | bool | `false` | Whether to randomize the order of the validation points.
|
||||
time_to_saccade | numeric | 1000 | The delay before validating after showing a point. Gives the participant time to fixate on the new target before assuming that the participant is looking at the target.
|
||||
validation_duration | numeric | 2000 | If `calibration_mode` is set to `view`, then this is the length of time to show a point while calibrating. Note that if `click` calibration is used then the point will remain on the screen until clicked.
|
||||
point_size | numeric | 10 | Diameter of the validation points in pixels.
|
||||
show_validation_data | bool | false | If `true` then a visualization of the validation data will be shown on the screen after the validation is complete. This will show each measured gaze location color coded by whether it is within the `roi_radius` of the target point. This is mainly intended for testing and debugging.
|
||||
|
||||
## Data Generated
|
||||
|
||||
In addition to the [default data collected by all plugins](overview.md#data-collected-by-plugins), this plugin collects the following data for each trial.
|
||||
|
||||
Name | Type | Value
|
||||
-----|------|------
|
||||
raw_gaze | array | Raw gaze data for the trial. The array will contain a nested array for each validation point. Within each nested array will be a list of `{dx,dy}` values specifying the distance from the target for that gaze point.
|
||||
percent_in_roi | array | The percentage of samples within the `roi_radius` for each validation point.
|
||||
average_offset | array | The average `x` and `y` distance from each validation point, plus the median distance `r` of the points from this average offset.
|
||||
samples_per_sec | numeric | The average number of samples per second. Calculated by finding samples per second for each point and then averaging these estimates together.
|
||||
|
||||
## Example
|
||||
|
||||
#### 4 point validation using center offset mode
|
||||
|
||||
```javascript
|
||||
var validation = {
|
||||
type: 'webgazer-validate',
|
||||
validation_points: [[-200,-200], [-200,200], [200,-200], [200,200]],
|
||||
validation_point_coordinates: 'center-offset-pixels',
|
||||
show_validation_data: true
|
||||
}
|
||||
```
|
@ -48,3 +48,6 @@ Plugin | Description
|
||||
[jspsych‑visual‑search‑circle](/plugins/jspsych-visual-search-circle) | A customizable visual-search task modelled after [Wang, Cavanagh, & Green (1994)](http://dx.doi.org/10.3758/BF03206946). The subject indicates whether or not a target is present among a set of distractors. The stimuli are displayed in a circle, evenly-spaced, equidistant from a fixation point.
|
||||
[jspsych‑vsl‑animate‑occlusion](/plugins/jspsych-vsl-animate-occlusion) | A visual statistical learning paradigm based on [Fiser & Aslin (2002)](http://dx.doi.org/10.1037//0278-7393.28.3.458). A sequence of stimuli are shown in an oscillatory motion. An occluding rectangle is in the center of the display, and the stimuli change when they are behind the rectangle.
|
||||
[jspsych‑vsl‑grid‑scene](/plugins/jspsych-vsl-grid-scene) | A visual statistical learning paradigm based on [Fiser & Aslin (2001)](http://dx.doi.org/10.1111/1467-9280.00392). A scene made up of individual stimuli arranged in a grid is shown. This plugin can also generate the HTML code to render the stimuli for use in other plugins.
|
||||
[jspsych‑webgazer‑calibrate](/plugins/jspsych-webgazer-calibrate) | Calibrates the WebGazer extension for eye tracking.
|
||||
[jspsych‑webgazer‑init‑camera](/plugins/jspsych-webgazer-init-camera) | Initializes the camera and helps the participant center their face for eye tracking.
|
||||
[jspsych‑webgazer‑validate](/plugins/jspsych-webgazer-validate) | Performs validation to measure precision and accuracy of WebGazer eye tracking predictions.
|
||||
|
@ -17,7 +17,7 @@
|
||||
var loop_node = {
|
||||
timeline: [trial],
|
||||
loop_function: function(data){
|
||||
if(data.values()[0].key_press == 'r'){
|
||||
if(data.values()[0].response == 'r'){
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
@ -40,7 +40,7 @@
|
||||
timeline: [if_trial],
|
||||
conditional_function: function(){
|
||||
var data = jsPsych.data.get().last(1).values()[0];
|
||||
if(data.key_press == 's'){
|
||||
if(data.response == 's'){
|
||||
return false;
|
||||
} else {
|
||||
return true;
|
||||
|
@ -61,9 +61,9 @@
|
||||
data: jsPsych.timelineVariable('data'),
|
||||
on_finish: function(data){
|
||||
var correct = false;
|
||||
if(data.direction == 'left' && data.key_press == 'ArrowLeft' && data.rt > -1){
|
||||
if(data.direction == 'left' && data.response == 'ArrowLeft' && data.rt > -1){
|
||||
correct = true;
|
||||
} else if(data.direction == 'right' && data.key_press == 'ArrowRight' && data.rt > -1){
|
||||
} else if(data.direction == 'right' && data.response == 'ArrowRight' && data.rt > -1){
|
||||
correct = true;
|
||||
}
|
||||
data.correct = correct;
|
||||
|
@ -24,7 +24,7 @@
|
||||
render_on_canvas: false,
|
||||
prompt: '<p>Press "y" to continue. Press "n" to end this node of the experiment.</p>',
|
||||
on_finish: function(data) {
|
||||
if (data.key_press == "n") {
|
||||
if (data.response == "n") {
|
||||
jsPsych.endCurrentTimeline();
|
||||
}
|
||||
},
|
||||
|
@ -28,7 +28,7 @@
|
||||
prompt: '<p>Press "y" to continue. Press "n" to end the experiment.</p>',
|
||||
render_on_canvas: false,
|
||||
on_finish: function(data) {
|
||||
if (data.key_press == 'n') {
|
||||
if (data.response == 'n') {
|
||||
jsPsych.endExperiment('The experiment was ended. This is the end message.');
|
||||
}
|
||||
},
|
||||
|
88886
examples/js/webgazer.js
Normal file
88886
examples/js/webgazer.js
Normal file
File diff suppressed because one or more lines are too long
@ -4,6 +4,7 @@
|
||||
<script src="../jspsych.js"></script>
|
||||
<script src="../plugins/jspsych-audio-button-response.js"></script>
|
||||
<script src="../plugins/jspsych-html-button-response.js"></script>
|
||||
<script src="../plugins/jspsych-preload.js"></script>
|
||||
<link rel="stylesheet" href="../css/jspsych.css">
|
||||
</head>
|
||||
<body></body>
|
||||
@ -11,6 +12,11 @@
|
||||
|
||||
var timeline = [];
|
||||
|
||||
timeline.push({
|
||||
type: 'preload',
|
||||
auto_preload: true
|
||||
});
|
||||
|
||||
timeline.push({
|
||||
type: 'html-button-response',
|
||||
stimulus: '<div style="max-width:600px;"><p>Some browsers now require the user to interact with a page before it can play audio. '+
|
||||
|
@ -4,11 +4,17 @@
|
||||
<script src="../jspsych.js"></script>
|
||||
<script src="../plugins/jspsych-audio-keyboard-response.js"></script>
|
||||
<script src="../plugins/jspsych-html-button-response.js"></script>
|
||||
<script src="../plugins/jspsych-preload.js"></script>
|
||||
<link rel="stylesheet" href="../css/jspsych.css">
|
||||
</head>
|
||||
<body></body>
|
||||
<script>
|
||||
|
||||
var preload = {
|
||||
type: 'preload',
|
||||
auto_preload: true
|
||||
}
|
||||
|
||||
var pre_audio = {
|
||||
type: 'html-button-response',
|
||||
stimulus: '<div style="max-width:600px;"><p>Some browsers now require the user to interact with a page before it can play audio. '+
|
||||
@ -51,7 +57,7 @@
|
||||
}
|
||||
|
||||
jsPsych.init({
|
||||
timeline: [pre_audio, trial_1, trial_2, trial_3, trial_4],
|
||||
timeline: [preload, pre_audio, trial_1, trial_2, trial_3, trial_4],
|
||||
on_finish: function() {
|
||||
jsPsych.data.displayData();
|
||||
},
|
||||
|
@ -4,11 +4,17 @@
|
||||
<script src="../jspsych.js"></script>
|
||||
<script src="../plugins/jspsych-audio-slider-response.js"></script>
|
||||
<script src="../plugins/jspsych-html-button-response.js"></script>
|
||||
<script src="../plugins/jspsych-preload.js"></script>
|
||||
<link rel="stylesheet" href="../css/jspsych.css">
|
||||
</head>
|
||||
<body></body>
|
||||
<script>
|
||||
|
||||
var preload = {
|
||||
type: 'preload',
|
||||
auto_preload: true
|
||||
}
|
||||
|
||||
var pre_audio = {
|
||||
type: 'html-button-response',
|
||||
stimulus: '<div style="max-width:600px;"><p>Some browsers now require the user to interact with a page before it can play audio. '+
|
||||
@ -44,7 +50,7 @@
|
||||
}
|
||||
|
||||
jsPsych.init({
|
||||
timeline: [pre_audio, trial_1, trial_2, trial_3],
|
||||
timeline: [preload, pre_audio, trial_1, trial_2, trial_3],
|
||||
on_finish: function() {
|
||||
jsPsych.data.displayData();
|
||||
},
|
||||
|
@ -90,9 +90,9 @@
|
||||
},
|
||||
on_finish: function(data){
|
||||
if(data.word_validity == 'valid'){
|
||||
var correct = data.key_press == 'y';
|
||||
var correct = data.response == 'y';
|
||||
} else {
|
||||
var correct = data.key_press == 'n';
|
||||
var correct = data.response == 'n';
|
||||
}
|
||||
data.correct = correct;
|
||||
}
|
||||
|
162
examples/webgazer.html
Normal file
162
examples/webgazer.html
Normal file
@ -0,0 +1,162 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
|
||||
<head>
|
||||
<script src="../jspsych.js"></script>
|
||||
<script src="../plugins/jspsych-html-keyboard-response.js"></script>
|
||||
<script src="../plugins/jspsych-html-button-response.js"></script>
|
||||
<script src="../plugins/jspsych-webgazer-init-camera.js"></script>
|
||||
<script src="../plugins/jspsych-webgazer-calibrate.js"></script>
|
||||
<script src="../plugins/jspsych-webgazer-validate.js"></script>
|
||||
<script src="js/webgazer.js"></script>
|
||||
<script src="../extensions/jspsych-ext-webgazer.js"></script>
|
||||
<link rel="stylesheet" href="../css/jspsych.css">
|
||||
<style>
|
||||
.jspsych-content { max-width: 100%;}
|
||||
</style>
|
||||
</head>
|
||||
|
||||
<body></body>
|
||||
|
||||
<script>
|
||||
|
||||
var init_camera = {
|
||||
type: 'webgazer-init-camera'
|
||||
}
|
||||
|
||||
var calibration_instructions = {
|
||||
type: 'html-button-response',
|
||||
stimulus: `
|
||||
<p>Great! Now the eye tracker will be calibrated to translate the image of your eyes from the webcam to a location on your screen.</p>
|
||||
<p>To do this, you need to click a series of dots.</p>
|
||||
<p>Keep your head still, and click on each dot as it appears. Look at the dot as you click it.</p>
|
||||
`,
|
||||
choices: ['Click to begin'],
|
||||
post_trial_gap: 1000
|
||||
}
|
||||
|
||||
var calibration = {
|
||||
type: 'webgazer-calibrate',
|
||||
calibration_points: [[50,50], [25,25], [25,75], [75,25], [75,75]],
|
||||
//calibration_points: [[10,10],[10,30],[10,50],[10,70],[10,90],[30,10],[30,30],[30,50],[30,70],[30,90],[50,10],[50,30],[50,50],[50,70],[50,90],[70,10],[70,30],[70,50],[70,70],[70,90],[90,10],[90,30],[90,50],[90,70],[90,90]],
|
||||
// calibration_points: [
|
||||
// [10,10],[10,50],[10,90],
|
||||
// [30,10],[30,50],[30,90],
|
||||
// [40,10],[40,30],[40,40],[40,45],[40,50],[40,55],[40,60],[40,70],[40,90],
|
||||
// [50,10],[50,30],[50,40],[50,45],[50,50],[50,55],[50,60],[50,70],[50,90],
|
||||
// [60,10],[60,30],[60,40],[60,45],[60,50],[60,55],[60,60],[60,70],[60,90],
|
||||
// [70,10],[70,50],[70,90],
|
||||
// [90,10],[90,50],[90,90]],
|
||||
repetitions_per_point: 1,
|
||||
randomize_calibration_order: true,
|
||||
}
|
||||
|
||||
var validation_instructions = {
|
||||
type: 'html-button-response',
|
||||
stimulus: `
|
||||
<p>Let's see how accurate the eye tracking is. </p>
|
||||
<p>Keep your head still, and move your eyes to focus on each dot as it appears.</p>
|
||||
<p>You do not need to click on the dots. Just move your eyes to look at the dots.</p>
|
||||
`,
|
||||
choices: ['Click to begin'],
|
||||
post_trial_gap: 1000
|
||||
}
|
||||
|
||||
var validation = {
|
||||
type: 'webgazer-validate',
|
||||
validation_points: [[-200,-200], [-200,200], [200,-200], [200,200]],
|
||||
validation_point_coordinates: 'center-offset-pixels',
|
||||
show_validation_data: true
|
||||
}
|
||||
|
||||
var task_instructions = {
|
||||
type: 'html-button-response',
|
||||
stimulus: `
|
||||
<p>We're ready for the task now.</p>
|
||||
<p>You'll see an arrow symbol (⬅ or ➡) appear on the screen.</p>
|
||||
<p>Your job is to press A if ⬅ appears, and L if ➡ appears.</p>
|
||||
<p>This will repeat 8 times.</p>
|
||||
`,
|
||||
choices: ['I am ready!'],
|
||||
post_trial_gap: 1000
|
||||
}
|
||||
|
||||
var fixation = {
|
||||
type: 'html-keyboard-response',
|
||||
stimulus: '<p style="font-size:40px;">+</p>',
|
||||
choices: jsPsych.NO_KEYS,
|
||||
trial_duration: 500
|
||||
}
|
||||
|
||||
var trial = {
|
||||
type: 'html-keyboard-response',
|
||||
stimulus: function () {
|
||||
return(
|
||||
`<div style="position: relative; width: 400px; height: 400px;">
|
||||
<div style="position: absolute; top:${jsPsych.timelineVariable('top', true)}%; left: ${jsPsych.timelineVariable('left', true)}%">
|
||||
<span id="arrow-target" style="font-size: 40px; transform: translate(-50%, -50%);">${jsPsych.timelineVariable('direction', true) == 'left' ? '⬅' : '➡'}</span>
|
||||
</div>
|
||||
</div>`
|
||||
)
|
||||
},
|
||||
choices: ['a', 'l'],
|
||||
post_trial_gap: 750,
|
||||
data: {
|
||||
top: jsPsych.timelineVariable('top'),
|
||||
left: jsPsych.timelineVariable('left')
|
||||
},
|
||||
extensions: [
|
||||
{type: 'webgazer', params: {targets: ['#arrow-target']}}
|
||||
]
|
||||
}
|
||||
|
||||
var params = [
|
||||
{ left: 0, top: 0, direction: 'left' },
|
||||
{ left: 100, top: 0, direction: 'left' },
|
||||
{ left: 0, top: 100, direction: 'left' },
|
||||
{ left: 100, top: 100, direction: 'left' },
|
||||
{ left: 0, top: 0, direction: 'right' },
|
||||
{ left: 100, top: 0, direction: 'right' },
|
||||
{ left: 0, top: 100, direction: 'right' },
|
||||
{ left: 100, top: 100, direction: 'right' },
|
||||
]
|
||||
|
||||
var trial_proc = {
|
||||
timeline: [fixation, trial],
|
||||
timeline_variables: params,
|
||||
randomize_order: true
|
||||
}
|
||||
|
||||
var done = {
|
||||
type: 'html-button-response',
|
||||
choices: ['CSV', 'JSON'],
|
||||
stimulus: `<p>Done!</p><p>If you'd like to download a copy of the data to explore, click the format you'd like below</p>`,
|
||||
on_finish: function(data){
|
||||
if(data.response == 0){
|
||||
jsPsych.data.get().localSave('csv','webgazer-sample-data.csv');
|
||||
}
|
||||
if(data.response == 1){
|
||||
jsPsych.data.get().localSave('json', 'webgazer-sample-data.json');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var timeline = [];
|
||||
timeline.push(init_camera);
|
||||
timeline.push(calibration_instructions);
|
||||
timeline.push(calibration);
|
||||
timeline.push(validation_instructions);
|
||||
timeline.push(validation);
|
||||
timeline.push(task_instructions);
|
||||
timeline.push(trial_proc);
|
||||
timeline.push(done);
|
||||
|
||||
jsPsych.init({
|
||||
timeline: timeline,
|
||||
extensions: [
|
||||
{type: 'webgazer'}
|
||||
]
|
||||
})
|
||||
</script>
|
||||
|
||||
</html>
|
60
examples/webgazer_image.html
Normal file
60
examples/webgazer_image.html
Normal file
@ -0,0 +1,60 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<script src="../jspsych.js"></script>
|
||||
<script src="../plugins/jspsych-preload.js"></script>
|
||||
<script src="../plugins/jspsych-image-keyboard-response.js"></script>
|
||||
<script src="../plugins/jspsych-html-keyboard-response.js"></script>
|
||||
<script src="../plugins/jspsych-webgazer-init-camera.js"></script>
|
||||
<script src="../plugins/jspsych-webgazer-calibrate.js"></script>
|
||||
<script src="js/webgazer.js"></script>
|
||||
<script src="../extensions/jspsych-ext-webgazer.js"></script>
|
||||
<link rel="stylesheet" href="../css/jspsych.css">
|
||||
</head>
|
||||
<body></body>
|
||||
<script>
|
||||
|
||||
var preload = {
|
||||
type: 'preload',
|
||||
images: ['img/blue.png']
|
||||
}
|
||||
|
||||
var init_camera = {
|
||||
type: 'webgazer-init-camera'
|
||||
}
|
||||
|
||||
var validation = {
|
||||
type: 'webgazer-calibrate',
|
||||
}
|
||||
|
||||
var start = {
|
||||
type: 'html-keyboard-response',
|
||||
stimulus: 'Press any key to start.'
|
||||
}
|
||||
|
||||
var trial = {
|
||||
type: 'image-keyboard-response',
|
||||
stimulus: 'img/blue.png',
|
||||
render_on_canvas: false,
|
||||
choices: jsPsych.NO_KEYS,
|
||||
trial_duration: 1000,
|
||||
extensions: [
|
||||
{
|
||||
type: 'webgazer',
|
||||
params: {targets: ['#jspsych-image-keyboard-response-stimulus']}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
jsPsych.init({
|
||||
timeline: [preload, init_camera, validation, start, trial],
|
||||
extensions: [
|
||||
{type: 'webgazer'}
|
||||
],
|
||||
on_finish: function() {
|
||||
jsPsych.data.displayData();
|
||||
}
|
||||
})
|
||||
|
||||
</script>
|
||||
</html>
|
185
extensions/jspsych-ext-webgazer.js
Normal file
185
extensions/jspsych-ext-webgazer.js
Normal file
@ -0,0 +1,185 @@
|
||||
jsPsych.extensions['webgazer'] = (function () {
|
||||
|
||||
var extension = {};
|
||||
|
||||
// private state for the extension
|
||||
// extension authors can define public functions to interact
|
||||
// with the state. recommend not exposing state directly
|
||||
// so that state manipulations are checked.
|
||||
var state = {};
|
||||
|
||||
// required, will be called at jsPsych.init
|
||||
// should return a Promise
|
||||
extension.initialize = function (params) {
|
||||
return new Promise(function(resolve, reject){
|
||||
if (typeof params.webgazer === 'undefined') {
|
||||
if (window.webgazer) {
|
||||
state.webgazer = window.webgazer;
|
||||
} else {
|
||||
reject(new Error('webgazer extension failed to initialize. webgazer.js not loaded. Load webgazer.js before calling jsPsych.init()'));
|
||||
}
|
||||
} else {
|
||||
state.webgazer = params.webgazer;
|
||||
}
|
||||
|
||||
if (typeof params.round_predictions === 'undefined'){
|
||||
state.round_predictions = true;
|
||||
} else {
|
||||
state.round_predictions = params.round_predictions;
|
||||
}
|
||||
|
||||
// sets up event handler for webgazer data
|
||||
state.webgazer.setGazeListener(handleGazeDataUpdate);
|
||||
|
||||
// starts webgazer, and once it initializes we stop mouseCalibration and
|
||||
// pause webgazer data.
|
||||
state.webgazer.begin().then(function () {
|
||||
extension.stopMouseCalibration();
|
||||
extension.pause();
|
||||
resolve();
|
||||
})
|
||||
|
||||
// hide video by default
|
||||
extension.hideVideo();
|
||||
|
||||
// hide predictions by default
|
||||
extension.hidePredictions();
|
||||
})
|
||||
}
|
||||
|
||||
// required, will be called when the trial starts (before trial loads)
|
||||
extension.on_start = function (params) {
|
||||
state.currentTrialData = [];
|
||||
state.currentTrialTargets = [];
|
||||
}
|
||||
|
||||
// required will be called when the trial loads
|
||||
extension.on_load = function (params) {
|
||||
|
||||
// set current trial start time
|
||||
state.currentTrialStart = performance.now();
|
||||
|
||||
// resume data collection
|
||||
state.webgazer.resume();
|
||||
|
||||
// set internal flag
|
||||
state.activeTrial = true;
|
||||
|
||||
// record bounding box of any elements in params.targets
|
||||
if(typeof params !== 'undefined'){
|
||||
if(typeof params.targets !== 'undefined'){
|
||||
for(var i=0; i<params.targets.length; i++){
|
||||
var target = document.querySelector(params.targets[i]);
|
||||
if(target !== null){
|
||||
var bounding_rect = target.getBoundingClientRect();
|
||||
state.currentTrialTargets.push({
|
||||
selector: params.targets[i],
|
||||
top: bounding_rect.top,
|
||||
bottom: bounding_rect.bottom,
|
||||
left: bounding_rect.left,
|
||||
right: bounding_rect.right
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// required, will be called when jsPsych.finishTrial() is called
|
||||
// must return data object to be merged into data.
|
||||
extension.on_finish = function (params) {
|
||||
// pause the eye tracker
|
||||
state.webgazer.pause();
|
||||
|
||||
// set internal flag
|
||||
state.activeTrial = false;
|
||||
|
||||
// send back the gazeData
|
||||
return {
|
||||
webgazer_data: state.currentTrialData,
|
||||
webgazer_targets: state.currentTrialTargets
|
||||
}
|
||||
}
|
||||
|
||||
extension.faceDetected = function () {
|
||||
return state.webgazer.getTracker().predictionReady;
|
||||
}
|
||||
|
||||
extension.showPredictions = function () {
|
||||
state.webgazer.showPredictionPoints(true);
|
||||
}
|
||||
|
||||
extension.hidePredictions = function () {
|
||||
state.webgazer.showPredictionPoints(false);
|
||||
}
|
||||
|
||||
extension.showVideo = function () {
|
||||
state.webgazer.showVideo(true);
|
||||
state.webgazer.showFaceOverlay(true);
|
||||
state.webgazer.showFaceFeedbackBox(true);
|
||||
}
|
||||
|
||||
extension.hideVideo = function () {
|
||||
state.webgazer.showVideo(false);
|
||||
state.webgazer.showFaceOverlay(false);
|
||||
state.webgazer.showFaceFeedbackBox(false);
|
||||
}
|
||||
|
||||
extension.resume = function () {
|
||||
state.webgazer.resume();
|
||||
}
|
||||
|
||||
extension.pause = function () {
|
||||
state.webgazer.pause();
|
||||
}
|
||||
|
||||
extension.stopMouseCalibration = function () {
|
||||
state.webgazer.removeMouseEventListeners()
|
||||
}
|
||||
|
||||
extension.startMouseCalibration = function () {
|
||||
state.webgazer.addMouseEventListeners()
|
||||
}
|
||||
|
||||
extension.calibratePoint = function (x, y) {
|
||||
state.webgazer.recordScreenPosition(x, y, 'click');
|
||||
}
|
||||
|
||||
extension.setRegressionType = function (regression_type) {
|
||||
var valid_regression_models = ['ridge', 'weigthedRidge', 'threadedRidge'];
|
||||
if (valid_regression_models.includes(regression_type)) {
|
||||
state.webgazer.setRegression(regression_type)
|
||||
} else {
|
||||
console.warn('Invalid regression_type parameter for webgazer.setRegressionType. Valid options are ridge, weightedRidge, and threadedRidge.')
|
||||
}
|
||||
}
|
||||
|
||||
extension.getCurrentPrediction = async function () {
|
||||
var prediction = await state.webgazer.getCurrentPrediction();
|
||||
if(state.round_predictions){
|
||||
prediction.x = Math.round(prediction.x);
|
||||
prediction.y = Math.round(prediction.y);
|
||||
}
|
||||
return prediction;
|
||||
}
|
||||
|
||||
// extension.addGazeDataUpdateListener(listener){
|
||||
// state.webgazer.setGazeListener(listener);
|
||||
// }
|
||||
|
||||
function handleGazeDataUpdate(gazeData, elapsedTime) {
|
||||
if (gazeData !== null && state.activeTrial) {
|
||||
var d = {
|
||||
x: state.round_predictions ? Math.round(gazeData.x) : gazeData.x,
|
||||
y: state.round_predictions ? Math.round(gazeData.y) : gazeData.y,
|
||||
t: Math.round(performance.now() - state.currentTrialStart)
|
||||
}
|
||||
state.currentTrialData.push(d); // add data to current trial's data
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return extension;
|
||||
|
||||
})();
|
||||
|
67
jspsych.js
67
jspsych.js
@ -1,10 +1,3 @@
|
||||
/**
|
||||
* jspsych.js
|
||||
* Josh de Leeuw
|
||||
*
|
||||
* documentation: docs.jspsych.org
|
||||
*
|
||||
**/
|
||||
window.jsPsych = (function() {
|
||||
|
||||
var core = {};
|
||||
@ -106,7 +99,8 @@ window.jsPsych = (function() {
|
||||
'minimum_valid_rt': 0,
|
||||
'experiment_width': null,
|
||||
'override_safe_mode': false,
|
||||
'case_sensitive_responses': false
|
||||
'case_sensitive_responses': false,
|
||||
'extensions': []
|
||||
};
|
||||
|
||||
// detect whether page is running in browser as a local file, and if so, disable web audio and video preloading to prevent CORS issues
|
||||
@ -195,12 +189,39 @@ window.jsPsych = (function() {
|
||||
function(){
|
||||
// success! user can continue...
|
||||
// start experiment
|
||||
startExperiment();
|
||||
loadExtensions();
|
||||
},
|
||||
function(){
|
||||
// fail. incompatible user.
|
||||
}
|
||||
);
|
||||
|
||||
function loadExtensions() {
|
||||
// run the .initialize method of any extensions that are in use
|
||||
// these should return a Promise to indicate when loading is complete
|
||||
if (opts.extensions.length == 0) {
|
||||
startExperiment();
|
||||
} else {
|
||||
var loaded_extensions = 0;
|
||||
for (var i = 0; i < opts.extensions.length; i++) {
|
||||
var ext_params = opts.extensions[i].params;
|
||||
if (!ext_params) {
|
||||
ext_params = {}
|
||||
}
|
||||
jsPsych.extensions[opts.extensions[i].type].initialize(ext_params)
|
||||
.then(() => {
|
||||
loaded_extensions++;
|
||||
if (loaded_extensions == opts.extensions.length) {
|
||||
startExperiment();
|
||||
}
|
||||
})
|
||||
.catch((error_message) => {
|
||||
console.error(error_message);
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
// execute init() when the document is ready
|
||||
@ -262,6 +283,13 @@ window.jsPsych = (function() {
|
||||
// of the DataCollection, for easy access and editing.
|
||||
var trial_data_values = trial_data.values()[0];
|
||||
|
||||
// handle extension callbacks
|
||||
if(Array.isArray(current_trial.extensions)){
|
||||
for(var i=0; i<current_trial.extensions.length; i++){
|
||||
var ext_data_values = jsPsych.extensions[current_trial.extensions[i].type].on_finish(current_trial.extensions[i].params);
|
||||
Object.assign(trial_data_values, ext_data_values);
|
||||
}
|
||||
}
|
||||
// about to execute lots of callbacks, so switch context.
|
||||
jsPsych.internal.call_immediate = true;
|
||||
|
||||
@ -942,6 +970,13 @@ window.jsPsych = (function() {
|
||||
trial.on_start(trial);
|
||||
}
|
||||
|
||||
// call any on_start functions for extensions
|
||||
if(Array.isArray(trial.extensions)){
|
||||
for(var i=0; i<trial.extensions.length; i++){
|
||||
jsPsych.extensions[trial.extensions[i].type].on_start(current_trial.extensions[i].params);
|
||||
}
|
||||
}
|
||||
|
||||
// apply the focus to the element containing the experiment.
|
||||
DOM_container.focus();
|
||||
|
||||
@ -966,6 +1001,13 @@ window.jsPsych = (function() {
|
||||
trial.on_load();
|
||||
}
|
||||
|
||||
// call any on_load functions for extensions
|
||||
if(Array.isArray(trial.extensions)){
|
||||
for(var i=0; i<trial.extensions.length; i++){
|
||||
jsPsych.extensions[trial.extensions[i].type].on_load(current_trial.extensions[i].params);
|
||||
}
|
||||
}
|
||||
|
||||
// done with callbacks
|
||||
jsPsych.internal.call_immediate = false;
|
||||
}
|
||||
@ -1244,6 +1286,10 @@ jsPsych.plugins = (function() {
|
||||
return module;
|
||||
})();
|
||||
|
||||
jsPsych.extensions = (function(){
|
||||
return {};
|
||||
})();
|
||||
|
||||
jsPsych.data = (function() {
|
||||
|
||||
var module = {};
|
||||
@ -1770,6 +1816,9 @@ jsPsych.data = (function() {
|
||||
var line = '';
|
||||
for (var j = 0; j < columns.length; j++) {
|
||||
var value = (typeof array[i][columns[j]] === 'undefined') ? '' : array[i][columns[j]];
|
||||
if(typeof value == 'object') {
|
||||
value = JSON.stringify(value);
|
||||
}
|
||||
var valueString = value + "";
|
||||
line += '"' + valueString.replace(/"/g, '""') + '",';
|
||||
}
|
||||
|
@ -46,6 +46,7 @@ nav:
|
||||
- 'Record Browser Interactions': 'overview/record-browser-interactions.md'
|
||||
- 'Media Preloading': 'overview/media-preloading.md'
|
||||
- 'Fullscreen Experiments': 'overview/fullscreen.md'
|
||||
- 'Eye Tracking': 'overview/eye-tracking.md'
|
||||
- 'Exclude Participants Based on Browser Features': 'overview/exclude-browser.md'
|
||||
- 'Automatic Progress Bar': 'overview/progress-bar.md'
|
||||
- 'Integrating with Prolific': 'overview/prolific.md'
|
||||
@ -103,6 +104,12 @@ nav:
|
||||
- 'jspsych-visual-search-circle': 'plugins/jspsych-visual-search-circle.md'
|
||||
- 'jspsych-vsl-animate-occlusion': 'plugins/jspsych-vsl-animate-occlusion.md'
|
||||
- 'jspsych-vsl-grid-scene': 'plugins/jspsych-vsl-grid-scene.md'
|
||||
- 'jspsych-webgazer-calibrate': 'plugins/jspsych-webgazer-calibrate.md'
|
||||
- 'jspsych-webgazer-init-camera': 'plugins/jspsych-webgazer-init-camera.md'
|
||||
- 'jspsych-webgazer-validate': 'plugins/jspsych-webgazer-validate.md'
|
||||
- Extensions:
|
||||
- 'Extensions': 'extensions/extensions.md'
|
||||
- 'jspsych-ext-webgazer.js': 'extensions/jspsych-ext-webgazer.md'
|
||||
- About:
|
||||
- 'About jsPsych': 'about/about.md'
|
||||
- 'Getting Help': 'about/support.md'
|
||||
|
@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "jspsych",
|
||||
"version": "6.1.0",
|
||||
"version": "6.3.0",
|
||||
"description": "Behavioral experiments in a browser",
|
||||
"main": "jspsych.js",
|
||||
"directories": {
|
||||
|
@ -177,8 +177,8 @@ jsPsych.plugins.animation = (function() {
|
||||
jsPsych.pluginAPI.cancelKeyboardResponse(response_listener);
|
||||
|
||||
var trial_data = {
|
||||
"animation_sequence": JSON.stringify(animation_sequence),
|
||||
"responses": JSON.stringify(responses)
|
||||
animation_sequence: animation_sequence,
|
||||
response: responses
|
||||
};
|
||||
|
||||
jsPsych.finishTrial(trial_data);
|
||||
|
@ -195,9 +195,9 @@ jsPsych.plugins["audio-button-response"] = (function() {
|
||||
|
||||
// gather the data to store for the trial
|
||||
var trial_data = {
|
||||
"rt": response.rt,
|
||||
"stimulus": trial.stimulus,
|
||||
"button_pressed": response.button
|
||||
rt: response.rt,
|
||||
stimulus: trial.stimulus,
|
||||
response: response.button
|
||||
};
|
||||
|
||||
// clear the display
|
||||
|
@ -124,9 +124,9 @@ jsPsych.plugins["audio-keyboard-response"] = (function() {
|
||||
response.rt = Math.round(response.rt * 1000);
|
||||
}
|
||||
var trial_data = {
|
||||
"rt": response.rt,
|
||||
"stimulus": trial.stimulus,
|
||||
"key_press": response.key
|
||||
rt: response.rt,
|
||||
stimulus: trial.stimulus,
|
||||
response: response.key
|
||||
};
|
||||
|
||||
// clear the display
|
||||
|
@ -219,10 +219,10 @@ jsPsych.plugins['audio-slider-response'] = (function() {
|
||||
|
||||
// save data
|
||||
var trialdata = {
|
||||
"rt": response.rt,
|
||||
"stimulus": trial.stimulus,
|
||||
"slider_start": trial.slider_start,
|
||||
"response": response.response
|
||||
rt: response.rt,
|
||||
stimulus: trial.stimulus,
|
||||
slider_start: trial.slider_start,
|
||||
response: response.response
|
||||
};
|
||||
|
||||
display_element.innerHTML = '';
|
||||
|
@ -168,8 +168,8 @@ jsPsych.plugins["canvas-button-response"] = (function () {
|
||||
|
||||
// gather the data to store for the trial
|
||||
var trial_data = {
|
||||
"rt": response.rt,
|
||||
"button_pressed": response.button
|
||||
rt: response.rt,
|
||||
response: response.button
|
||||
};
|
||||
|
||||
// clear the display
|
||||
|
@ -96,8 +96,8 @@ jsPsych.plugins["canvas-keyboard-response"] = (function () {
|
||||
|
||||
// gather the data to store for the trial
|
||||
var trial_data = {
|
||||
"rt": response.rt,
|
||||
"key_press": response.key
|
||||
rt: response.rt,
|
||||
response: response.key
|
||||
};
|
||||
|
||||
// clear the display
|
||||
|
@ -176,9 +176,9 @@ jsPsych.plugins['canvas-slider-response'] = (function () {
|
||||
|
||||
// save data
|
||||
var trialdata = {
|
||||
"rt": response.rt,
|
||||
"response": response.response,
|
||||
"slider_start": trial.slider_start
|
||||
rt: response.rt,
|
||||
response: response.response,
|
||||
slider_start: trial.slider_start
|
||||
};
|
||||
|
||||
display_element.innerHTML = '';
|
||||
|
@ -237,10 +237,10 @@ jsPsych.plugins["categorize-animation"] = (function() {
|
||||
responded = true;
|
||||
|
||||
trial_data = {
|
||||
"stimulus": JSON.stringify(trial.stimuli),
|
||||
"rt": info.rt,
|
||||
"correct": correct,
|
||||
"key_press": info.key
|
||||
stimulus: trial.stimuli,
|
||||
rt: info.rt,
|
||||
correct: correct,
|
||||
response: info.key
|
||||
};
|
||||
|
||||
jsPsych.pluginAPI.cancelKeyboardResponse(keyboard_listener);
|
||||
|
@ -136,10 +136,10 @@ jsPsych.plugins['categorize-html'] = (function() {
|
||||
|
||||
// save data
|
||||
trial_data = {
|
||||
"rt": info.rt,
|
||||
"correct": correct,
|
||||
"stimulus": trial.stimulus,
|
||||
"key_press": info.key
|
||||
rt: info.rt,
|
||||
correct: correct,
|
||||
stimulus: trial.stimulus,
|
||||
response: info.key
|
||||
};
|
||||
|
||||
display_element.innerHTML = '';
|
||||
|
@ -138,10 +138,10 @@ jsPsych.plugins['categorize-image'] = (function() {
|
||||
|
||||
// save data
|
||||
trial_data = {
|
||||
"rt": info.rt,
|
||||
"correct": correct,
|
||||
"stimulus": trial.stimulus,
|
||||
"key_press": info.key
|
||||
rt: info.rt,
|
||||
correct: correct,
|
||||
stimulus: trial.stimulus,
|
||||
response: info.key
|
||||
};
|
||||
|
||||
display_element.innerHTML = '';
|
||||
|
@ -91,7 +91,7 @@ jsPsych.plugins['cloze'] = (function () {
|
||||
if (!trial.check_answers || (trial.check_answers && answers_correct))
|
||||
{
|
||||
var trial_data = {
|
||||
'answers' : answers
|
||||
response: answers
|
||||
};
|
||||
|
||||
display_element.innerHTML = '';
|
||||
|
@ -362,10 +362,10 @@ jsPsych.plugins['free-sort'] = (function() {
|
||||
}
|
||||
|
||||
const trial_data = {
|
||||
"init_locations": JSON.stringify(init_locations),
|
||||
"moves": JSON.stringify(moves),
|
||||
"final_locations": JSON.stringify(final_locations),
|
||||
"rt": rt
|
||||
init_locations: init_locations,
|
||||
moves: moves,
|
||||
final_locations: final_locations,
|
||||
rt: rt
|
||||
};
|
||||
|
||||
// advance to next part
|
||||
|
@ -69,6 +69,7 @@ jsPsych.plugins.fullscreen = (function() {
|
||||
endTrial();
|
||||
});
|
||||
} else {
|
||||
if ( document.fullscreenElement || document.mozFullScreenElement || document.webkitFullscreenElement ) {
|
||||
if (document.exitFullscreen) {
|
||||
document.exitFullscreen();
|
||||
} else if (document.msExitFullscreen) {
|
||||
@ -78,6 +79,7 @@ jsPsych.plugins.fullscreen = (function() {
|
||||
} else if (document.webkitExitFullscreen) {
|
||||
document.webkitExitFullscreen();
|
||||
}
|
||||
}
|
||||
endTrial();
|
||||
}
|
||||
}
|
||||
|
@ -156,9 +156,9 @@ jsPsych.plugins["html-button-response"] = (function() {
|
||||
|
||||
// gather the data to store for the trial
|
||||
var trial_data = {
|
||||
"rt": response.rt,
|
||||
"stimulus": trial.stimulus,
|
||||
"button_pressed": response.button
|
||||
rt: response.rt,
|
||||
stimulus: trial.stimulus,
|
||||
response: response.button
|
||||
};
|
||||
|
||||
// clear the display
|
||||
|
@ -89,9 +89,9 @@ jsPsych.plugins["html-keyboard-response"] = (function() {
|
||||
|
||||
// gather the data to store for the trial
|
||||
var trial_data = {
|
||||
"rt": response.rt,
|
||||
"stimulus": trial.stimulus,
|
||||
"key_press": response.key
|
||||
rt: response.rt,
|
||||
stimulus: trial.stimulus,
|
||||
response: response.key
|
||||
};
|
||||
|
||||
// clear the display
|
||||
|
@ -170,10 +170,10 @@ jsPsych.plugins['html-slider-response'] = (function() {
|
||||
|
||||
// save data
|
||||
var trialdata = {
|
||||
"rt": response.rt,
|
||||
"stimulus": trial.stimulus,
|
||||
"slider_start": trial.slider_start,
|
||||
"response": response.response
|
||||
rt: response.rt,
|
||||
stimulus: trial.stimulus,
|
||||
slider_start: trial.slider_start,
|
||||
response: response.response
|
||||
};
|
||||
|
||||
display_element.innerHTML = '';
|
||||
|
@ -165,10 +165,10 @@
|
||||
|
||||
// gather the data to store for the trial
|
||||
var trial_data = {
|
||||
"rt": response.rt,
|
||||
"stimulus": trial.stimulus,
|
||||
"key_press": response.key,
|
||||
"correct": response.correct
|
||||
rt: response.rt,
|
||||
stimulus: trial.stimulus,
|
||||
response: response.key,
|
||||
correct: response.correct
|
||||
};
|
||||
|
||||
// clears the display
|
||||
|
@ -167,10 +167,10 @@
|
||||
|
||||
// gather the data to store for the trial
|
||||
var trial_data = {
|
||||
"rt": response.rt,
|
||||
"stimulus": trial.stimulus,
|
||||
"key_press": response.key,
|
||||
"correct": response.correct
|
||||
rt: response.rt,
|
||||
stimulus: trial.stimulus,
|
||||
response: response.key,
|
||||
correct: response.correct
|
||||
};
|
||||
|
||||
// clears the display
|
||||
|
@ -294,9 +294,9 @@ jsPsych.plugins["image-button-response"] = (function() {
|
||||
|
||||
// gather the data to store for the trial
|
||||
var trial_data = {
|
||||
"rt": response.rt,
|
||||
"stimulus": trial.stimulus,
|
||||
"button_pressed": response.button
|
||||
rt: response.rt,
|
||||
stimulus: trial.stimulus,
|
||||
response: response.button
|
||||
};
|
||||
|
||||
// clear the display
|
||||
|
@ -202,9 +202,9 @@ jsPsych.plugins["image-keyboard-response"] = (function() {
|
||||
|
||||
// gather the data to store for the trial
|
||||
var trial_data = {
|
||||
"rt": response.rt,
|
||||
"stimulus": trial.stimulus,
|
||||
"key_press": response.key
|
||||
rt: response.rt,
|
||||
stimulus: trial.stimulus,
|
||||
response: response.key
|
||||
};
|
||||
|
||||
// clear the display
|
||||
|
@ -337,10 +337,10 @@ jsPsych.plugins['image-slider-response'] = (function() {
|
||||
|
||||
// save data
|
||||
var trialdata = {
|
||||
"rt": response.rt,
|
||||
"stimulus": trial.stimulus,
|
||||
"slider_start": trial.slider_start,
|
||||
"response": response.response
|
||||
rt: response.rt,
|
||||
stimulus: trial.stimulus,
|
||||
slider_start: trial.slider_start,
|
||||
response: response.response
|
||||
};
|
||||
|
||||
display_element.innerHTML = '';
|
||||
|
@ -191,8 +191,8 @@ jsPsych.plugins.instructions = (function() {
|
||||
display_element.innerHTML = '';
|
||||
|
||||
var trial_data = {
|
||||
"view_history": JSON.stringify(view_history),
|
||||
"rt": performance.now() - start_time
|
||||
view_history: view_history,
|
||||
rt: performance.now() - start_time
|
||||
};
|
||||
|
||||
jsPsych.finishTrial(trial_data);
|
||||
|
@ -157,10 +157,9 @@ jsPsych.plugins['maxdiff'] = (function () {
|
||||
|
||||
// data saving
|
||||
var trial_data = {
|
||||
"rt": response_time,
|
||||
"labels": JSON.stringify({"left": trial.labels[0], "right": trial.labels[1]}),
|
||||
"left": get_response('left'),
|
||||
"right": get_response('right')
|
||||
rt: response_time,
|
||||
labels: {left: trial.labels[0], right: trial.labels[1]},
|
||||
response: {left: get_response('left'), right: get_response('right')}
|
||||
};
|
||||
|
||||
// next trial
|
||||
|
@ -522,45 +522,44 @@ jsPsych.plugins["rdk"] = (function() {
|
||||
|
||||
//Place all the data to be saved from this trial in one data object
|
||||
var trial_data = {
|
||||
"rt": response.rt, //The response time
|
||||
"key_press": response.key, //The key that the subject pressed
|
||||
"correct": correctOrNot(), //If the subject response was correct
|
||||
"choices": trial.choices, //The set of valid keys
|
||||
"correct_choice": trial.correct_choice, //The correct choice
|
||||
"trial_duration": trial.trial_duration, //The trial duration
|
||||
"response_ends_trial": trial.response_ends_trial, //If the response ends the trial
|
||||
"number_of_apertures": trial.number_of_apertures,
|
||||
"number_of_dots": trial.number_of_dots,
|
||||
"number_of_sets": trial.number_of_sets,
|
||||
"coherent_direction": trial.coherent_direction,
|
||||
"coherence": trial.coherence,
|
||||
"opposite_coherence": trial.opposite_coherence,
|
||||
"dot_radius": trial.dot_radius,
|
||||
"dot_life": trial.dot_life,
|
||||
"move_distance": trial.move_distance,
|
||||
"aperture_width": trial.aperture_width,
|
||||
"aperture_height": trial.aperture_height,
|
||||
"dot_color": trial.dot_color,
|
||||
"background_color": trial.background_color,
|
||||
"RDK_type": trial.RDK_type,
|
||||
"aperture_type": trial.aperture_type,
|
||||
"reinsert_type": trial.reinsert_type,
|
||||
"frame_rate": frameRate, //The average frame rate for the trial
|
||||
"frame_rate_array": JSON.stringify(frameRateArray), //The array of ms per frame in this trial, in the form of a JSON string
|
||||
"number_of_frames": numberOfFrames, //The number of frames in this trial
|
||||
"aperture_center_x": trial.aperture_center_x,
|
||||
"aperture_center_y": trial.aperture_center_y,
|
||||
"fixation_cross": trial.fixation_cross,
|
||||
"fixation_cross_width": trial.fixation_cross_width,
|
||||
"fixation_cross_height": trial.fixation_cross_height,
|
||||
"fixation_cross_color": trial.fixation_cross_color,
|
||||
"fixation_cross_thickness": trial.fixation_cross_thickness,
|
||||
"border": trial.border,
|
||||
"border_thickness": trial.border_thickness,
|
||||
"border_color": trial.border_color,
|
||||
"canvas_width": canvasWidth,
|
||||
"canvas_height": canvasHeight
|
||||
|
||||
rt: response.rt, //The response time
|
||||
response: response.key, //The key that the subject pressed
|
||||
correct: correctOrNot(), //If the subject response was correct
|
||||
choices: trial.choices, //The set of valid keys
|
||||
correct_choice: trial.correct_choice, //The correct choice
|
||||
trial_duration: trial.trial_duration, //The trial duration
|
||||
response_ends_trial: trial.response_ends_trial, //If the response ends the trial
|
||||
number_of_apertures: trial.number_of_apertures,
|
||||
number_of_dots: trial.number_of_dots,
|
||||
number_of_sets: trial.number_of_sets,
|
||||
coherent_direction: trial.coherent_direction,
|
||||
coherence: trial.coherence,
|
||||
opposite_coherence: trial.opposite_coherence,
|
||||
dot_radius: trial.dot_radius,
|
||||
dot_life: trial.dot_life,
|
||||
move_distance: trial.move_distance,
|
||||
aperture_width: trial.aperture_width,
|
||||
aperture_height: trial.aperture_height,
|
||||
dot_color: trial.dot_color,
|
||||
background_color: trial.background_color,
|
||||
RDK_type: trial.RDK_type,
|
||||
aperture_type: trial.aperture_type,
|
||||
reinsert_type: trial.reinsert_type,
|
||||
frame_rate: frameRate, //The average frame rate for the trial
|
||||
frame_rate_array: frameRateArray, //The array of ms per frame in this trial
|
||||
number_of_frames: numberOfFrames, //The number of frames in this trial
|
||||
aperture_center_x: trial.aperture_center_x,
|
||||
aperture_center_y: trial.aperture_center_y,
|
||||
fixation_cross: trial.fixation_cross,
|
||||
fixation_cross_width: trial.fixation_cross_width,
|
||||
fixation_cross_height: trial.fixation_cross_height,
|
||||
fixation_cross_color: trial.fixation_cross_color,
|
||||
fixation_cross_thickness: trial.fixation_cross_thickness,
|
||||
border: trial.border,
|
||||
border_thickness: trial.border_thickness,
|
||||
border_color: trial.border_color,
|
||||
canvas_width: canvasWidth,
|
||||
canvas_height: canvasHeight
|
||||
}
|
||||
|
||||
//Remove the canvas as the child of the display_element element
|
||||
|
@ -115,9 +115,9 @@ jsPsych.plugins['reconstruction'] = (function() {
|
||||
|
||||
// save data
|
||||
var trial_data = {
|
||||
"rt": response_time,
|
||||
"final_value": param,
|
||||
"start_value": trial.starting_value
|
||||
rt: response_time,
|
||||
final_value: param,
|
||||
start_value: trial.starting_value
|
||||
};
|
||||
|
||||
display_element.innerHTML = '';
|
||||
|
@ -153,9 +153,9 @@ jsPsych.plugins["resize"] = (function() {
|
||||
// finishes trial
|
||||
|
||||
var trial_data = {
|
||||
'final_height_px': final_height_px,
|
||||
'final_width_px': final_width_px,
|
||||
'scale_factor': scale_factor
|
||||
final_height_px: final_height_px,
|
||||
final_width_px: final_width_px,
|
||||
scale_factor: scale_factor
|
||||
}
|
||||
|
||||
jsPsych.finishTrial(trial_data);
|
||||
|
@ -136,15 +136,15 @@ jsPsych.plugins['same-different-html'] = (function() {
|
||||
}
|
||||
|
||||
var trial_data = {
|
||||
"rt": info.rt,
|
||||
"answer": trial.answer,
|
||||
"correct": correct,
|
||||
"stimulus": JSON.stringify([trial.stimuli[0], trial.stimuli[1]]),
|
||||
"key_press": info.key
|
||||
rt: info.rt,
|
||||
answer: trial.answer,
|
||||
correct: correct,
|
||||
stimulus: [trial.stimuli[0], trial.stimuli[1]],
|
||||
response: info.key
|
||||
};
|
||||
if (first_stim_info) {
|
||||
trial_data["rt_stim1"] = first_stim_info.rt;
|
||||
trial_data["key_press_stim1"] = first_stim_info.key;
|
||||
trial_data["response_stim1"] = first_stim_info.key;
|
||||
}
|
||||
|
||||
display_element.innerHTML = '';
|
||||
|
@ -137,15 +137,15 @@ jsPsych.plugins['same-different-image'] = (function() {
|
||||
}
|
||||
|
||||
var trial_data = {
|
||||
"rt": info.rt,
|
||||
"answer": trial.answer,
|
||||
"correct": correct,
|
||||
"stimulus": JSON.stringify([trial.stimuli[0], trial.stimuli[1]]),
|
||||
"key_press": info.key
|
||||
rt: info.rt,
|
||||
answer: trial.answer,
|
||||
correct: correct,
|
||||
stimulus: [trial.stimuli[0], trial.stimuli[1]],
|
||||
response: info.key
|
||||
};
|
||||
if (first_stim_info) {
|
||||
trial_data["rt_stim1"] = first_stim_info.rt;
|
||||
trial_data["key_press_stim1"] = first_stim_info.key;
|
||||
trial_data["response_stim1"] = first_stim_info.key;
|
||||
}
|
||||
|
||||
display_element.innerHTML = '';
|
||||
|
@ -151,12 +151,11 @@ jsPsych.plugins["serial-reaction-time-mouse"] = (function() {
|
||||
|
||||
// gather the data to store for the trial
|
||||
var trial_data = {
|
||||
"rt": response.rt,
|
||||
"grid": JSON.stringify(trial.grid),
|
||||
"target": JSON.stringify(trial.target),
|
||||
"response_row": response.row,
|
||||
"response_column": response.column,
|
||||
"correct": response.row == trial.target[0] && response.column == trial.target[1]
|
||||
rt: response.rt,
|
||||
grid: trial.grid,
|
||||
target: trial.target,
|
||||
response: [parseInt(response.row,10), parseInt(response.column,10)],
|
||||
correct: response.row == trial.target[0] && response.column == trial.target[1]
|
||||
};
|
||||
|
||||
// clear the display
|
||||
|
@ -171,11 +171,11 @@ jsPsych.plugins["serial-reaction-time"] = (function() {
|
||||
|
||||
// gather the data to store for the trial
|
||||
var trial_data = {
|
||||
"rt": response.rt,
|
||||
"key_press": response.key,
|
||||
"correct": response.correct,
|
||||
"grid": JSON.stringify(trial.grid),
|
||||
"target": JSON.stringify(trial.target)
|
||||
rt: response.rt,
|
||||
response: response.key,
|
||||
correct: response.correct,
|
||||
grid: trial.grid,
|
||||
target: trial.target
|
||||
};
|
||||
|
||||
// clear the display
|
||||
|
@ -105,8 +105,8 @@ jsPsych.plugins['survey-html-form'] = (function() {
|
||||
|
||||
// save data
|
||||
var trialdata = {
|
||||
"rt": response_time,
|
||||
"responses": JSON.stringify(question_data)
|
||||
rt: response_time,
|
||||
response: question_data
|
||||
};
|
||||
|
||||
display_element.innerHTML = '';
|
||||
|
@ -177,9 +177,9 @@ jsPsych.plugins['survey-likert'] = (function() {
|
||||
|
||||
// save data
|
||||
var trial_data = {
|
||||
"rt": response_time,
|
||||
"responses": JSON.stringify(question_data),
|
||||
"question_order": JSON.stringify(question_order)
|
||||
rt: response_time,
|
||||
response: question_data,
|
||||
question_order: question_order
|
||||
};
|
||||
|
||||
display_element.innerHTML = '';
|
||||
|
@ -191,9 +191,9 @@ jsPsych.plugins['survey-multi-choice'] = (function() {
|
||||
}
|
||||
// save data
|
||||
var trial_data = {
|
||||
"rt": response_time,
|
||||
"responses": JSON.stringify(question_data),
|
||||
"question_order": JSON.stringify(question_order)
|
||||
rt: response_time,
|
||||
response: question_data,
|
||||
question_order: question_order
|
||||
};
|
||||
display_element.innerHTML = '';
|
||||
|
||||
|
@ -214,9 +214,9 @@ jsPsych.plugins['survey-multi-select'] = (function() {
|
||||
|
||||
// save data
|
||||
var trial_data = {
|
||||
"rt": response_time,
|
||||
"responses": JSON.stringify(question_data),
|
||||
"question_order": JSON.stringify(question_order)
|
||||
rt: response_time,
|
||||
response: question_data,
|
||||
question_order: question_order
|
||||
};
|
||||
display_element.innerHTML = '';
|
||||
|
||||
|
@ -168,8 +168,8 @@ jsPsych.plugins['survey-text'] = (function() {
|
||||
}
|
||||
// save data
|
||||
var trialdata = {
|
||||
"rt": response_time,
|
||||
"responses": JSON.stringify(question_data)
|
||||
rt: response_time,
|
||||
response: question_data
|
||||
};
|
||||
|
||||
display_element.innerHTML = '';
|
||||
|
@ -263,9 +263,9 @@ jsPsych.plugins["video-button-response"] = (function() {
|
||||
|
||||
// gather the data to store for the trial
|
||||
var trial_data = {
|
||||
"rt": response.rt,
|
||||
"stimulus": JSON.stringify(trial.stimulus),
|
||||
"button_pressed": response.button
|
||||
rt: response.rt,
|
||||
stimulus: trial.stimulus,
|
||||
response: response.button
|
||||
};
|
||||
|
||||
// clear the display
|
||||
|
@ -227,9 +227,9 @@ jsPsych.plugins["video-keyboard-response"] = (function() {
|
||||
|
||||
// gather the data to store for the trial
|
||||
var trial_data = {
|
||||
"rt": response.rt,
|
||||
"stimulus": JSON.stringify(trial.stimulus),
|
||||
"key_press": response.key
|
||||
rt: response.rt,
|
||||
stimulus: trial.stimulus,
|
||||
response: response.key
|
||||
};
|
||||
|
||||
// clear the display
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user