adding a calibration routine, but it's not great

This commit is contained in:
Josh de Leeuw 2021-01-02 17:20:21 -05:00
parent 0ac9fc0d5a
commit 562e993f9c
4 changed files with 197 additions and 17 deletions

View File

@ -4,6 +4,7 @@
<head>
<script src="../jspsych.js"></script>
<script src="../plugins/jspsych-html-keyboard-response.js"></script>
<script src="../plugins/jspsych-webgazer-calibrate.js"></script>
<script src="js/webgazer.js"></script>
<script src="../extensions/jspsych-ext-webgazer.js"></script>
<link rel="stylesheet" href="../css/jspsych.css">
@ -21,8 +22,7 @@
type: 'html-keyboard-response',
stimulus: '+',
choices: jsPsych.NO_KEYS,
trial_duration: 500,
extensions: ['webgazer']
trial_duration: 500
}
var trial = {
@ -37,7 +37,8 @@
)
},
choices: ['a', 'l'],
post_trial_gap: 250
post_trial_gap: 250,
extensions: ['webgazer']
}
var params = [
@ -58,6 +59,7 @@
}
var timeline = [];
timeline.push(calibration);
timeline.push(trial_proc);
jsPsych.init({

View File

@ -23,17 +23,17 @@ jsPsych.extensions['webgazer'] = (function () {
// sets up event handler for webgazer data
state.webgazer.setGazeListener(handleGazeDataUpdate);
// hide video by default
state.webgazer.showVideo(false);
// hide predictions by default
state.webgazer.showPredictionPoints(false);
// starts webgazer
state.webgazer.begin();
// hide video by default
//state.webgazer.showVideo(false);
// hide predictions by default
//state.webgazer.showPredictionPoints(false);
// immediately pauses data gathering
state.webgazer.pause();
//state.webgazer.pause();
}
// required, will be called when the trial starts (before trial loads)
@ -55,15 +55,33 @@ jsPsych.extensions['webgazer'] = (function () {
// required, will be called when jsPsych.finishTrial() is called
// must return data object to be merged into data.
extension.on_finish = function () {
// pause the eye tracker
state.webgazer.pause();
// send back the gazeData
return {
gazeData: JSON.stringify(state.currentTrialData);
gazeData: JSON.stringify(state.currentTrialData)
}
}
function handleGazeDataUpdate(gazeData, elapsedTime){
gazeData.t = performance.now() - state.currentTrialStart; // add a timestamp to the x,y coords
extension.faceDetected = function(){
return state.webgazer.getTracker().predictionReady;
}
extension.showPredictionPoints = function(b){
state.webgazer.showPredictionPoints(b);
}
state.currentTrialData.push(gazeData); // add data to current trial's data
function handleGazeDataUpdate(gazeData, elapsedTime){
if(gazeData !== null){
var d = {
x: gazeData.x,
y: gazeData.y,
t: performance.now() - state.currentTrialStart
}
state.currentTrialData.push(d); // add data to current trial's data
}
}
return extension;

View File

@ -290,7 +290,7 @@ window.jsPsych = (function() {
// handle extension callbacks
if(Array.isArray(trial.extensions)){
for(var i=0; i<trial.extensions.length; i++){
var ext_data_values = trial.extensions[i].on_finish();
var ext_data_values = jsPsych.extensions[trial.extensions[i]].on_finish();
Object.assign(trial_data_values, ext_data_values);
}
}
@ -904,7 +904,7 @@ window.jsPsych = (function() {
// call any on_start functions for extensions
if(Array.isArray(trial.extensions)){
for(var i=0; i<trial.extensions.length; i++){
trial.extensions[i].on_start();
jsPsych.extensions[trial.extensions[i]].on_start();
}
}
@ -925,7 +925,7 @@ window.jsPsych = (function() {
// call any on_load functions for extensions
if(Array.isArray(trial.extensions)){
for(var i=0; i<trial.extensions.length; i++){
trial.extensions[i].on_load();
jsPsych.extensions[trial.extensions[i]].on_load();
}
}
}

View File

@ -0,0 +1,160 @@
/**
* jspsych-webgazer-calibrate
* Josh de Leeuw
**/
jsPsych.plugins["webgazer-calibrate"] = (function() {
var plugin = {};
plugin.info = {
name: 'webgazer-calibrate',
description: '',
parameters: {
face_detect_threshold: {
type: jsPsych.plugins.parameterType.IMAGE,
pretty_name: 'Face Detection Threshold',
default: 0.75,
description: 'A value between 0-1 representing the quality of the face detection that must be achieved before moving to calibration.'
},
calibration_points: {
type: jsPsych.plugins.parameterType.INT,
default: [[10,50], [10,90], [30,10], [50,10], [50,50], [50,90], [90,10], [90,50], [90,90]]
},
clicks_per_point: {
type: jsPsych.plugins.parameterType.INT,
default: 5
}
}
}
// provide options for calibration routines?
// dot clicks?
// track a dot with mouse?
// then a validation phase of staring at the dot in different locations?
plugin.trial = function(display_element, trial) {
var html = "<div id='webgazer-calibrate-container' style='position: relative; width:100vw; height:100vh'>"
html+="</div>"
display_element.innerHTML = html;
var wg_container = display_element.querySelector('#webgazer-calibrate-container');
var state = "video-detect";
// run the main loop through calibration routine /////////
function loop(){
if(state == 'video-detect'){
show_video_detect_message();
var score = check_face_score();
//wg_container.querySelector('#video-detect-quality-inner').style.width = (score*100) + "%"
if(score){
state = "begin-calibrate";
}
requestAnimationFrame(loop);
} else if(state == 'begin-calibrate'){
show_begin_calibrate_message();
} else if(state == 'calibrate'){
calibrate();
} else if(state == 'calibration-done'){
wg_container.innerHTML = "";
jsPsych.extensions['webgazer'].showPredictionPoints(true);
setTimeout(end_trial, 2000);
}
}
requestAnimationFrame(loop);
function show_video_detect_message(){
wg_container.innerHTML = "<div style='position: absolute; top: 50%; left: calc(50% - 350px); transform: translateY(-50%); width:700px;'>"+
"<p>To start, you need to position your head so that the webcam has a good view of your eyes.</p>"+
"<p>Use the video in the upper-left corner as a guide. Center your face in the box.</p>"+
// "<p>Quality of detection:</p>"+
// "<div id='video-detect-quality-container' style='width:700px; height: 20px; background-color:#ccc; position: relative;'>"+
// "<div id='video-detect-quality-inner' style='width:0%; height:20px; background-color: #5c5;'></div>"+
// "<div id='video-detect-threshold' style='width: 1px; height: 20px; background-color: #f00; position: absolute; top:0; left:"+(trial.face_detect_threshold*100)+"%;'></div>"+
"</div>"+
"</div>"
}
function check_face_score(){
// this is really not ideal, but webgazer doesn't expose face/eye location easily...
if(document.querySelector('#webgazerFaceFeedbackBox')){
return document.querySelector('#webgazerFaceFeedbackBox').style.borderColor == 'green';
}
return false;
}
function show_begin_calibrate_message(){
wg_container.innerHTML = "<div style='position: absolute; top: 50%; left: calc(50% - 350px); transform: translateY(-50%); width:700px;'>"+
"<p>Great! Now the eye tracker will be calibrated to translate the image of your eyes from the webcam to a location on your screen.</p>"+
"<p>To do this, you need to look at a series of dots and click on them with your mouse. Make sure to look where you are clicking. Each click teaches the eye tracker how to map the image of your eyes onto a location on the page.</p>"+
"<p>Please click each point 5 times.</p>"+
"<button id='begin-calibrate-btn' class='jspsych-btn'>Click to begin.</button>"+
"</div>"
document.querySelector('#begin-calibrate-btn').addEventListener('click', function(){
state = 'calibrate';
requestAnimationFrame(loop);
});
}
var points_completed = 0;
function calibrate(){
points_completed = 0;
next_calibration_point();
}
var clicks = 0;
function next_calibration_point(){
clicks = 0;
var pt = trial.calibration_points[points_completed];
var pt_html = '<div id="calibration-point" style="width:20px; height:20px; border-radius:10px; border: 2px solid #f00; background-color: #333; position: absolute; left:'+pt[0]+'%; top:'+pt[1]+'%;"></div>'
wg_container.innerHTML = pt_html;
wg_container.querySelector('#calibration-point').addEventListener('click', function(){
clicks++;
wg_container.querySelector('#calibration-point').style.opacity = `${100 - clicks*(80/trial.clicks_per_point)}%`
if(clicks >= trial.clicks_per_point){
points_completed++;
if(points_completed < trial.calibration_points.length){
next_calibration_point();
} else {
state = 'calibration-done'
requestAnimationFrame(loop);
}
}
});
}
//requestAnimationFrame(update_gp);
// function to end trial when it is time
var end_trial = function() {
//webgazer.pause();
//webgazer.showPredictionPoints(false);
// kill any remaining setTimeout handlers
jsPsych.pluginAPI.clearAllTimeouts();
// gather the data to store for the trial
var trial_data = {
};
// clear the display
display_element.innerHTML = '';
// move on to the next trial
jsPsych.finishTrial(trial_data);
};
};
return plugin;
})();