From 0ecfcee475f64d2eeb1c0a0aef01ff8aedf1d1ff Mon Sep 17 00:00:00 2001 From: Becky Gilbert Date: Wed, 24 Feb 2021 12:47:43 -0800 Subject: [PATCH] Deployed 4a59c86 with MkDocs version: 1.1.2 --- overview/eye-tracking/index.html | 8 ++++---- sitemap.xml.gz | Bin 238 -> 238 bytes 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/overview/eye-tracking/index.html b/overview/eye-tracking/index.html index 7b0b19e9..bfd6cb93 100755 --- a/overview/eye-tracking/index.html +++ b/overview/eye-tracking/index.html @@ -1427,7 +1427,7 @@

Eye Tracking

jsPsych supports eye tracking through the WebGazer library. WebGazer uses computer vision techniques to identify features of the participant's eyes via a webcam and predicts gaze location. The system is calibrated by having the participant click on or look at known locations on the screen. These locations are linked to eye features. Gaze location is predicted using regression.

Getting Started

-

First, download WebGazer.js and include it in your experiment file via a <script> tag. You'll also need to include jsPsych's webgazer extension.

+

First, download WebGazer.js and include it in your experiment file via a <script> tag. You'll also need to include jsPsych's webgazer extension.

<head>
   <script src="jspsych/jspsych.js"></script>
   <script src="webgazer.js"></script>
@@ -1446,12 +1446,12 @@
   ]
 })
 
-

To help the participant position their face correctly for eye tracking you can use the jspsych-webgazer-init-camera plugin. This will show the participant what the camera sees, including facial feature landmarks, and prevent the participant from continuing until their face is in good position for eye tracking.

+

To help the participant position their face correctly for eye tracking you can use the jspsych-webgazer-init-camera plugin. This will show the participant what the camera sees, including facial feature landmarks, and prevent the participant from continuing until their face is in good position for eye tracking.

var init_camera_trial = {
   type: 'webgazer-init-camera'
 }
 
-

To calibrate WebGazer, you can use the jspsych-webgazer-calibrate plugin. This plugin allows you to specify a set of points on the screen for calibration and to choose the method for calibrating -- either clicking on each point or simply fixating on each point. The location of calibration points is specified in percentages, e.g., [25,50] will result in a point that is 25% of the width of the screen from the left edge and 50% of the height of the screen from the top edge. Options for controlling other details of the calibration are explained in the documentation for the plugin.

+

To calibrate WebGazer, you can use the jspsych-webgazer-calibrate plugin. This plugin allows you to specify a set of points on the screen for calibration and to choose the method for calibrating -- either clicking on each point or simply fixating on each point. The location of calibration points is specified in percentages, e.g., [25,50] will result in a point that is 25% of the width of the screen from the left edge and 50% of the height of the screen from the top edge. Options for controlling other details of the calibration are explained in the documentation for the plugin.

Note that instructions are not included in the calibration plugin, so you'll likely want to use a different plugin (e.g., html-button-response) to display instructions prior to running the calibration.

var calibration_trial = {
   type: 'webgazer-calibrate',
@@ -1459,7 +1459,7 @@
   calibration_mode: 'click'
 }
 
-

To measure the accuracy and precision of the calibration, you can use the jspsych-webgazer-vaidate plugin. Like the calibration plugin, you can specify a list of points to perform validation on. Here you can specify the points as either percentages or in terms of the distance from the center of the screen in pixels. Which mode you use will probably depend on how you are defining your stimuli throughout the experiment. You can also specify the radius of tolerance around each point, and the plugin will calculate the percentage of measured gaze samples within that radius. This is a potentially useful heuristic for deciding whether or not to calibrate again. Options for controlling other details of the validation are explained in the documentation for the plugin.

+

To measure the accuracy and precision of the calibration, you can use the jspsych-webgazer-vaidate plugin. Like the calibration plugin, you can specify a list of points to perform validation on. Here you can specify the points as either percentages or in terms of the distance from the center of the screen in pixels. Which mode you use will probably depend on how you are defining your stimuli throughout the experiment. You can also specify the radius of tolerance around each point, and the plugin will calculate the percentage of measured gaze samples within that radius. This is a potentially useful heuristic for deciding whether or not to calibrate again. Options for controlling other details of the validation are explained in the documentation for the plugin.

var validation_trial = {
   type: 'webgazer-validate',
   validation_points: [[-200,200], [200,200],[-200,-200],[200,-200]],
diff --git a/sitemap.xml.gz b/sitemap.xml.gz
index 4c733a0d5c135a655b2208e6f7aadb6843a795da..ab12a6d063de1c5a423a7d890619f54aad66e2ab 100755
GIT binary patch
delta 14
VcmaFI_>PfHzMF$1efLDR7XT&u1wa4*

delta 14
VcmaFI_>PfHzMF$%&(?`-F90Wu1&;s#