diff --git a/psychopy/demos/coder/iohub/eyetracking/gcCursor/iohub_config.yaml b/psychopy/demos/coder/iohub/eyetracking/gcCursor/iohub_config.yaml index 43aa3ff0d0..1ffb5ed03d 100644 --- a/psychopy/demos/coder/iohub/eyetracking/gcCursor/iohub_config.yaml +++ b/psychopy/demos/coder/iohub/eyetracking/gcCursor/iohub_config.yaml @@ -2,6 +2,7 @@ monitor_devices: - Display: name: display reporting_unit_type: pix + color_space: rgb255 device_number: 0 physical_dimensions: width: 590 @@ -101,7 +102,7 @@ monitor_devices: # inner_diameter: 6 # outer_color: [255,255,255] # inner_color: [0,0,0] -# model_name: EYELINK 1000 TOWER +# model_name: EYELINK 1000 DESKTOP # Tobii Config (uncomment below device config to use) # - eyetracker.hw.tobii.EyeTracker: @@ -113,7 +114,7 @@ monitor_devices: # # name will be considered as possible candidates for connection. # # If you only have one Tobii system connected to the computer, # # this field can just be left empty. -# model_name: +# model_name: Any Pro Model # # # The serial number of the Tobii device that you wish to connect to. # # If set, only the Tobii system matching that serial number will be diff --git a/psychopy/demos/coder/iohub/eyetracking/selectTracker/base_iohub_config.yaml b/psychopy/demos/coder/iohub/eyetracking/selectTracker/base_iohub_config.yaml index 76f48270bd..ec1d2bf18b 100644 --- a/psychopy/demos/coder/iohub/eyetracking/selectTracker/base_iohub_config.yaml +++ b/psychopy/demos/coder/iohub/eyetracking/selectTracker/base_iohub_config.yaml @@ -2,6 +2,7 @@ monitor_devices: - Display: name: display reporting_unit_type: pix + color_space: rgb device_number: 0 physical_dimensions: width: 590 diff --git a/psychopy/demos/coder/iohub/eyetracking/selectTracker/eyetracker_configs/eyelink_config.yaml b/psychopy/demos/coder/iohub/eyetracking/selectTracker/eyetracker_configs/eyelink_config.yaml index ca3f9fb8ea..35698d45c1 100644 --- a/psychopy/demos/coder/iohub/eyetracking/selectTracker/eyetracker_configs/eyelink_config.yaml +++ b/psychopy/demos/coder/iohub/eyetracking/selectTracker/eyetracker_configs/eyelink_config.yaml @@ -150,7 +150,7 @@ monitor_devices: # should be a value between 0 and 255. 0 == black, 255 == white. In general # the last value of the color list (alpha) can be left at 255, indicating # the color not mixed with the background color at all. - screen_background_color: [128,128,128] + screen_background_color: [0.0, 0.0, 0.0] # target type defines what form of calibration graphic should be used # during calibration, validation, etc. modes. @@ -166,8 +166,8 @@ monitor_devices: # outer_diameter and inner_diameter are specified in pixels outer_diameter: 33 inner_diameter: 6 - outer_color: [255,255,255] - inner_color: [0,0,0] + outer_color: [1.0, 1.0, 1.0] + inner_color: [-1.0, -1.0, -1.0] # The model_name setting allows the definition of the eye tracker model being used. # For the eyelink implementation, valid values are: diff --git a/psychopy/demos/coder/iohub/eyetracking/selectTracker/eyetracker_configs/tobii_config.yaml b/psychopy/demos/coder/iohub/eyetracking/selectTracker/eyetracker_configs/tobii_config.yaml index 956e74fbec..26a11cc8bb 100644 --- a/psychopy/demos/coder/iohub/eyetracking/selectTracker/eyetracker_configs/tobii_config.yaml +++ b/psychopy/demos/coder/iohub/eyetracking/selectTracker/eyetracker_configs/tobii_config.yaml @@ -11,7 +11,7 @@ monitor_devices: # The model name of the Tobii device that you wish to connect to can be specified here, # and only Tobii systems matching that model name will be considered as possible candidates for connection. # If you only have one Tobii system connected to the computer, this field can just be left empty. - model_name: + model_name: Any Pro Model # The serial number of the Tobii device that you wish to connect to can be specified here, # and only the Tobii system matching that serial number will be connected to, if found. @@ -42,7 +42,7 @@ monitor_devices: # screen_background_color specifies the r,g,b background color to # set the calibration, validation, etc, screens to. Each element of the color # should be a value between 0 and 255. 0 == black, 255 == white. - screen_background_color: [128,128,128] + screen_background_color: [0, 0, 0] # Target type defines what form of calibration graphic should be used # during calibration, validation, etc. modes. @@ -56,13 +56,13 @@ monitor_devices: target_attributes: outer_diameter: 35 outer_stroke_width: 2 - outer_fill_color: [128,128,128] - outer_line_color: [255,255,255] + outer_fill_color: [0,0,0] + outer_line_color: [1,1,1] inner_diameter: 7 inner_stroke_width: 1 - inner_color: [0,0,0] - inner_fill_color: [0,0,0] - inner_line_color: [0,0,0] + inner_color: [-1,-1,-1] + inner_fill_color: [-1,-1,-1] + inner_line_color: [-1,-1,-1] animate: enable: True movement_velocity: 600.0 # 500 pix / sec diff --git a/psychopy/demos/coder/iohub/eyetracking/selectTracker/iohub_config.yaml b/psychopy/demos/coder/iohub/eyetracking/selectTracker/iohub_config.yaml index 66dd7f157d..c613efe283 100644 --- a/psychopy/demos/coder/iohub/eyetracking/selectTracker/iohub_config.yaml +++ b/psychopy/demos/coder/iohub/eyetracking/selectTracker/iohub_config.yaml @@ -2,6 +2,7 @@ data_store: enable: true monitor_devices: - Display: + color_space: named default_eye_distance: surface_center: 500 unit_type: mm @@ -19,16 +20,28 @@ monitor_devices: name: mouse - Experiment: name: experimentRuntime -- eyetracker.hw.mouse.EyeTracker: - controls: - blink: - - LEFT_BUTTON - - RIGHT_BUTTON - move: RIGHT_BUTTON - saccade_threshold: 0.5 +- eyetracker.hw.sr_research.eyelink.EyeTracker: + calibration: + auto_pace: true + pacing_speed: 1.5 + screen_background_color: grey + target_attributes: + inner_color: black + inner_diameter: 6 + outer_color: dfgsdfgsdfgsd + outer_diameter: 33 + target_type: CIRCLE_TARGET + type: NINE_POINTS + default_native_data_file_name: et_data + device_timer: + interval: 0.001 enable: true + enable_interface_without_connection: false + event_buffer_length: 1024 + model_name: EYELINK 1000 DESKTOP monitor_event_types: - MonocularEyeSampleEvent + - BinocularEyeSampleEvent - FixationStartEvent - FixationEndEvent - SaccadeStartEvent @@ -36,5 +49,17 @@ monitor_devices: - BlinkStartEvent - BlinkEndEvent name: tracker + network_settings: 100.1.1.1 runtime_settings: - sampling_rate: 50 + sample_filtering: + FILTER_FILE: FILTER_LEVEL_2 + FILTER_ONLINE: FILTER_OFF + sampling_rate: 500 + track_eyes: RIGHT_EYE + vog_settings: + pupil_center_algorithm: CENTROID_FIT + pupil_measure_types: PUPIL_AREA + tracking_mode: PUPIL_CR_TRACKING + save_events: true + simulation_mode: false + stream_events: true diff --git a/psychopy/demos/coder/iohub/eyetracking/selectTracker/run.py b/psychopy/demos/coder/iohub/eyetracking/selectTracker/run.py index 725f8d3210..c47f4d5830 100644 --- a/psychopy/demos/coder/iohub/eyetracking/selectTracker/run.py +++ b/psychopy/demos/coder/iohub/eyetracking/selectTracker/run.py @@ -73,7 +73,7 @@ def run(selected_eyetracker_name): instructions_text_stim = visual.TextStim( window, text='', pos=[0, 0], height=24, color=[-1, -1, -1], colorSpace='rgb', - wrapWidth=window.size[0] * .9 + wrapWidth=window.size[0] * .9, units='pix' ) # Update Instruction Text and display on screen. diff --git a/psychopy/demos/coder/iohub/launchHub.py b/psychopy/demos/coder/iohub/launchHub.py index 13b2a6d510..21531adb73 100644 --- a/psychopy/demos/coder/iohub/launchHub.py +++ b/psychopy/demos/coder/iohub/launchHub.py @@ -67,7 +67,7 @@ def testUsingPsychoPyMonitorConfig(): print('Display Default Eye Distance: ', display.getDefaultEyeDistance()) print('Display Physical Dimensions: ', display.getPhysicalDimensions()) - # That's it, shut down the ioHub Proicess and exit. ;) + # That's it, shut down the ioHub Process and exit. ;) io.quit() def testEnabledDataStore(): diff --git a/psychopy/iohub/TODO.txt b/psychopy/iohub/TODO.txt deleted file mode 100644 index 19ccc6e76f..0000000000 --- a/psychopy/iohub/TODO.txt +++ /dev/null @@ -1,74 +0,0 @@ -TODO list / iohub 2021 rework project notes - -eyetracker related ------------------- - -- remove inner_stroke_width setting from tobii -- remove outer_stroke_width setting from tobii -- change inner_fill_color and inner_stroke_color -> inner_color in tobii settings -- change outer_fill_color and outer_stroke_color -> outer_color in tobii settings - -- make eyelink and tobii target setting consistent. - - -Other ------ - -- Python 3 only support is OK - -- Test / start developing with Python 3.9 - -- remove any areas of logic dealing with Python 2 vs Python 3 incompatibilities. - -- remove unneeded params from all supported_config_settings.yaml/default_device.yaml files - - did some obvious ones already, but a device by device review would be good. - -- remove unused / placeholder Constants (iohub.constants.py) - -- stop using lazy_import. Not sure it is really needed for iohub - -- ?? implement a new 'Mouse' base eye tracker implementation so people can use the mouse to simulate eye movements - in their experiment and get sample data like it was from a real eye tracker. - - might also act as a good 'template' for creating a new eye tracker interface with. - -- ?? Assume / enforce only 1 experiment, 1 session per iohub hdf5 file - -- switch iohub source code copywrite to standard psychopy text. - -- remove all references to these no longer supported devices: - - EventPublisher (unless we keep it) - - EventSub (unless we keep it) - - DAQ (was used for labjack and Measurement Computing analog input) - -- macos keyboard and mouse events: - - switch from polling method of CFRunLoopRunInMode to threaded approach used by eventmp. - - if that is not enough, reimplement using usb sniffer or just correct timestamp - for _minimum_ possible event delay. - -- ?? remove event filtering framework (iohub.devices.eventfilters.py) - - not actually used from what I can tell. - -- ?? remove need for / use of experiment_config.yaml. Determine what functionality this will affect. - -- ?? revert to server side only devices / events accessed via RPC. - - remove iohub.client.keyboard.py and iohub.client.wintab.py - - move any needed methods / functionality to the server side device class, or deprecate. - - experiment would now get 'native' iohub keyboard and wintab events, which is not backwards compatible. - -- Documentation ideas: - - ioHub Developer 'Notes' - - Psychopy ioHub User Docs - - Saving data using ioHub DataStore: - - device events - - experiment and session meta data - - condition variables table - - hdf5 file structure - - reading data / doing analysis from hdf5 files - -- Some bigger issues that would be good to address: - - Consistent iohub Error / exception reporting framework: - - communication of issues to client process. - - reporting of issues by client process - - cleanup (remove? iohub.errors.py) - - Consistent device status reporting after client starts iohub process. - - Create files in user writable folder (use OS specific standard folders) diff --git a/psychopy/iohub/changes.txt b/psychopy/iohub/changes.txt new file mode 100644 index 0000000000..e37a04620b --- /dev/null +++ b/psychopy/iohub/changes.txt @@ -0,0 +1,20 @@ +Changes made to iohub for 2021.2 Release +---------------------------------------- + +- Removed iohub ExpRuntime class (use iohub.launchHubServer() function) +- Some eye tracker setting haves may have changed. If running an experiment created with a previous version of iohub, + please review your eye tracker settings and update as necessary. + - Do we need to list every specific setting change? +- iohub can no longer be copied out of the psychopy package and used as a stand alone package + in your site-packages folder. Change `import iohub` to `import psychopy.iohub` for example. +- Removed most device settings related to device hardware specifics (model_id, manfacture_date, etc) + since they were never really used. +- removed use of .iohpid +- ** Changed datastore schema: + - increased experiment and session data string max length +- Added wintab device for Wacom based digital pens. (Windows only) +- Added iohub MouseGaze eye tracker simulator. +- Added 'color_space' setting to Display (bridging to remove Display class from iohub) +- Eyelink and tobii calibration gfx can now use same color space as window instead of always rgb255. + - TODO: Retest all eyetracking examples with all eye trackers on all OS's +- Removed iohub/devices/display/unit_conversions.py. Moving to psychopy monitorutil functions. \ No newline at end of file diff --git a/psychopy/iohub/client/connect.py b/psychopy/iohub/client/connect.py index 25ebe71353..61ef8fbb8c 100644 --- a/psychopy/iohub/client/connect.py +++ b/psychopy/iohub/client/connect.py @@ -3,12 +3,11 @@ # Copyright (C) 2012-2016 iSolver Software Solutions # Distributed under the terms of the GNU General Public License (GPL). from __future__ import division, absolute_import, print_function - import os - from .. import _DATA_STORE_AVAILABLE, IOHUB_DIRECTORY from . import ioHubConnection from ..util import yload, yLoader, readConfig +from psychopy import logging def launchHubServer(**kwargs): @@ -63,8 +62,14 @@ def launchHubServer(**kwargs): | | | file name different than the | | | | session_code. | +---------------------+-----------------+---------------+-----------------+ - |psychopy_monitor_name| str | Provides the path of a | - | | | PsychoPy Monitor Center config | + | window | psychopy.visual | The psychoPy experiment window | + | | .Window | being used. Information like | + | | | display size, viewing distance, | + | | | coord / color type is used to | + | | | update the ioHub Display device.| + +---------------------+-----------------+---------------+-----------------+ + | psychopy_monitor | str | Provides the path of a | + | (Deprecated) | | PsychoPy Monitor Center config | | | | file. Information like display | | | | size is read and used to update | | | | the ioHub Display Device config.| @@ -158,10 +163,6 @@ def launchHubServer(**kwargs): elif session_code: datastore_name = session_code - monitor_name = kwargs.get('psychopy_monitor_name') - if monitor_name: - del kwargs['psychopy_monitor_name'] - monitor_devices_config = None iohub_conf_file_name = kwargs.get('iohub_config_name') if iohub_conf_file_name: @@ -172,18 +173,58 @@ def launchHubServer(**kwargs): monitor_devices_config = _temp_conf_read.get('monitor_devices') del kwargs['iohub_config_name'] - iohub_config = None device_dict = {} if monitor_devices_config: device_dict = monitor_devices_config - # iohub_config = dict(monitor_devices=monitor_devices_config) - - if isinstance(device_dict,(list,tuple)): + + if isinstance(device_dict, (list, tuple)): tempdict_ = {} for ddict in device_dict: tempdict_[list(ddict.keys())[0]] = list(ddict.values())[0] device_dict = tempdict_ - + + # PsychoPy Window & Monitor integration + + # Get default iohub display config settings for experiment + display_config = device_dict.get('Display', {}) + if display_config: + del device_dict['Display'] + + # Check for a psychopy_monitor_name name + monitor_name = kwargs.get('psychopy_monitor_name', kwargs.get('monitor_name')) + if monitor_name: + if kwargs.get('psychopy_monitor_name'): + del kwargs['psychopy_monitor_name'] + else: + del kwargs['monitor_name'] + + window = kwargs.get('window') + if window: + kwargs['window'] = None + del kwargs['window'] + # PsychoPy Window has been provided, so read all info needed for iohub Display from Window + if window.units: + display_config['reporting_unit_type'] = window.units + if window.colorSpace: + display_config['color_space'] = window.colorSpace + display_config['device_number'] = window.screen + + if window.monitor.name == "__blank__": + logging.warning("launchHubServer: window.monitor.name is '__blank__'. " + "Create the PsychoPy window with a valid Monitor name.") + elif window.monitor.name: + display_config['psychopy_monitor_name'] = window.monitor.name + display_config['override_using_psycho_settings'] = True + + if not window._isFullScr: + logging.warning("launchHubServer: If using the iohub mouse or eyetracker devices, fullScr should be True.") + + elif monitor_name: + display_config['psychopy_monitor_name'] = monitor_name + display_config['override_using_psycho_settings'] = True + logging.warning("launchHubServer: Use of psychopy_monitor_name is deprecated. " + "Please use window= and provide a psychopy window that has a .monitor.") + device_dict.update(kwargs) device_list = [] @@ -195,47 +236,35 @@ def func2str(func): return '%s.%s' % (func.__module__, func.__name__) def configfuncs2str(config): - for k, v in list(config.items()): - if isinstance(v, dict): - configfuncs2str(v) - if isFunction(v): - config[k] = func2str(v) + for key, val in list(config.items()): + if isinstance(val, dict): + configfuncs2str(val) + if isFunction(val): + config[key] = func2str(val) configfuncs2str(device_dict) - # <<< WTF is this for .... ????? - - # Ensure a Display Device has been defined. If not, create one. - # Insert Display device as first device in dev. list. - if 'Display' not in device_dict: - if monitor_name: - display_config = {'psychopy_monitor_name': monitor_name, - 'override_using_psycho_settings': True} - else: - display_config = {'override_using_psycho_settings': False} - device_list.append(dict(Display=display_config)) - else: - device_list.append(dict(Display=device_dict['Display'])) - del device_dict['Display'] + + # Add Display device as first in list of devices to be sent to iohub + device_list.append(dict(Display=display_config)) # Ensure a Experiment, Keyboard, and Mouse Devices have been defined. # If not, create them. check_for_devs = ['Experiment', 'Keyboard', 'Mouse'] for adev_name in check_for_devs: if adev_name not in device_dict: - device_list.append({adev_name : {}}) + device_list.append({adev_name: {}}) else: - device_list.append({adev_name : device_dict[adev_name]}) + device_list.append({adev_name: device_dict[adev_name]}) del device_dict[adev_name] iohub_config = dict() - def_ioconf = readConfig(os.path.join(IOHUB_DIRECTORY,u'default_config.yaml')) + def_ioconf = readConfig(os.path.join(IOHUB_DIRECTORY, u'default_config.yaml')) # Add remaining defined devices to the device list. for class_name, device_config in device_dict.items(): if class_name in def_ioconf: # not a device, a top level iohub config param iohub_config[class_name] = device_config else: - #TODO: Check that class_name is valid before adding to list device_list.append({class_name: device_config}) # Create an ioHub configuration dictionary. @@ -254,9 +283,4 @@ def configfuncs2str(config): filename=datastore_name, experiment_info=experiment_info, session_info=session_info) - - #import pprint - #print() - #print('ioHubConnection(iohub_config):') - #pprint.pprint(iohub_config) return ioHubConnection(iohub_config) diff --git a/psychopy/iohub/devices/deviceConfigValidation.py b/psychopy/iohub/devices/deviceConfigValidation.py index c796d51402..5bbfc48b2e 100644 --- a/psychopy/iohub/devices/deviceConfigValidation.py +++ b/psychopy/iohub/devices/deviceConfigValidation.py @@ -2,20 +2,19 @@ # Part of the psychopy.iohub library. # Copyright (C) 2012-2016 iSolver Software Solutions # Distributed under the terms of the GNU General Public License (GPL). -from __future__ import division, absolute_import - -# Takes a device configuration yaml dict and processes it based on the devices -# support_settings_values.yaml (which must be in the same directory as the -# Device class) to ensure all entries for the device setting are valid values. - from past.builtins import basestring import socket import os import numbers # numbers.Integral is like (int, long) but supports Py3 - +from psychopy import colors +from psychopy.tools import arraytools from ..util import yload, yLoader, module_directory from ..errors import print2err +# Takes a device configuration yaml dict and processes it based on the devices +# support_settings_values.yaml (which must be in the same directory as the +# Device class) to ensure all entries for the device setting are valid values. + class ValidationError(Exception): """Base class for exceptions in this module.""" pass @@ -51,7 +50,6 @@ class StringValueError(ValidationError): value_given -- the value read from the experiment configuration file. device_config_param_constraints -- the set of constraints that apply to the parameter. msg -- explanation of the error - """ def __init__( @@ -248,24 +246,34 @@ def __str__(self): MIN_VALID_INT_VALUE = 0 MAX_VALID_INT_VALUE = 1000000 +def is_sequence(arg): + return hasattr(arg, "__getitem__") or hasattr(arg, "__iter__") -def isValidRgb255Color(config_param_name, color, constraints): - if isinstance(color, (list, tuple)): - if len(color) in [3, 4]: - for c in color: - if isinstance(c, int): - if c < 0 or c > 255: - raise ColorValueError(config_param_name, color) - else: - raise ColorValueError(config_param_name, color) +def isValidColor(config_param_name, color, constraints): + """ + Return color if it is a valid psychopy color (regardless of color space) + , otherwise raise error. Color value can be in hex, name, rgb, rgb255 format. + """ + if isinstance(color, str): + if color[0] == '#' or color[0:2].lower() == '0x': + rgb255color = colors.hex2rgb255(color) + if rgb255color is not None: + return color + else: + raise ColorValueError(config_param_name, color) + + if color.lower() in colors.colorNames.keys(): + return color + else: + raise ColorValueError(config_param_name, color) + if isinstance(color, (float, int)) or (is_sequence(color) and len(color) == 3): + colorarray = arraytools.val2array(color, length=3) + if colorarray is not None: + return color else: raise ColorValueError(config_param_name, color) - - return color - raise ColorValueError(config_param_name, color) - def isValidString(config_param_name, value, constraints): if isinstance(value, basestring): if value == constraints: @@ -385,7 +393,7 @@ def isValueValid(config_param_name, value, valid_values): IOHUB_FLOAT=isValidFloat, IOHUB_INT=isValidInt, IOHUB_LIST=isValidList, - IOHUB_RGBA255_COLOR=isValidRgb255Color, + IOHUB_COLOR=isValidColor, IOHUB_IP_ADDRESS_V4=isValidIpAddress) ############################################### diff --git a/psychopy/iohub/devices/display/__init__.py b/psychopy/iohub/devices/display/__init__.py index 9e135258be..e27b390c16 100644 --- a/psychopy/iohub/devices/display/__init__.py +++ b/psychopy/iohub/devices/display/__init__.py @@ -148,6 +148,21 @@ def getCoordinateType(self): """ return self.getConfiguration()['reporting_unit_type'] + def getColorSpace(self): + """ + Returns the color space to use for PsychoPy Windows. + + Please refer to the psychoPy documentation for a detailed description of + supported color spaces. + + Args: + None + + Returns: + str: Display color space + """ + return self.getConfiguration()['color_space'] + def getPixelsPerDegree(self): """Returns the Display's horizontal and vertical pixels per degree This is currently calculated using the PsychoPy built in function. Therefore diff --git a/psychopy/iohub/devices/display/default_display.yaml b/psychopy/iohub/devices/display/default_display.yaml index 03cf4ff32b..8d3d09aaa8 100644 --- a/psychopy/iohub/devices/display/default_display.yaml +++ b/psychopy/iohub/devices/display/default_display.yaml @@ -18,7 +18,9 @@ Display: # be represented in. Valid values are pix, deg, norm, or cm. # reporting_unit_type: pix - + + color_space: rgb255 + # The Display index to assign to the device. On a single Display # computer this must always be 0. On a computer with multiple displays, # the value can be between 0 and display_count - 1. diff --git a/psychopy/iohub/devices/display/supported_config_settings.yaml b/psychopy/iohub/devices/display/supported_config_settings.yaml index 8739caf692..0540ec8f8b 100644 --- a/psychopy/iohub/devices/display/supported_config_settings.yaml +++ b/psychopy/iohub/devices/display/supported_config_settings.yaml @@ -9,8 +9,17 @@ Display: IOHUB_INT: min: 0 max: 7 - reporting_unit_type: [ org, pix, pixel, pixels, deg, degree, degrees, cm, mm, inch, norm, normalize, normalized, percent, percentage] - origin: [ center , top_left , bottom_left] + reporting_unit_type: + IOHUB_LIST: + valid_values: [pix, norm, cm, deg, degFlat, degFlatPos] + min_length: 1 + max_length: 1 + color_space: + IOHUB_LIST: + valid_values: [rgb, rgb255, named , hex, hsv, dkl, lms] + min_length: 1 + max_length: 1 + origin: center physical_dimensions: width: IOHUB_FLOAT: @@ -22,14 +31,22 @@ Display: min: 1.0 max: 2048.0 required: True - unit_type: [ mm, cm, inch ] + unit_type: + IOHUB_LIST: + valid_values: [ mm, cm, inch ] + min_length: 1 + max_length: 1 default_eye_distance: surface_center: IOHUB_FLOAT: min: 1.0 max: 2048.0 required: True - unit_type: [ mm, cm, inch ] + unit_type: + IOHUB_LIST: + valid_values: [ mm, cm, inch ] + min_length: 1 + max_length: 1 psychopy_monitor_name: IOHUB_STRING: min_length: 1 diff --git a/psychopy/iohub/devices/display/unit_conversions.py b/psychopy/iohub/devices/display/unit_conversions.py deleted file mode 100644 index a96d932c92..0000000000 --- a/psychopy/iohub/devices/display/unit_conversions.py +++ /dev/null @@ -1,301 +0,0 @@ -# -*- coding: utf-8 -*- -# Part of the psychopy.iohub library. -# Copyright (C) 2012-2013 Josh Borah -# Copyright (C) 2012-2016 iSolver Software Solutions -# Distributed under the terms of the GNU General Public License (GPL). -# -# fileauthor:: Sol Simpson -# fileauthor:: Josh Borah - -from math import atan, tan, sqrt - -# -# distToPixel -# -# Convert between distance coordinates and pixel coordinates. -# -# Distance coordinates are 2D Cartesian coordinates, measured from an origin at the -# center pixel, and are real distance units (inches, centimeters, etc.) along horizontal and -# vertical screen axes. -# - - -def distToPixel( - hpix_per_dist_unit, - vpix_per_dist_unit, - pixHres, - pixVres, - distH, - distV): - pixH = pixHres / 2.0 + (distH * hpix_per_dist_unit) - pixV = pixVres / 2.0 + (distV * vpix_per_dist_unit) - return pixH, pixV - - -def pixelToDist( - hpix_per_dist_unit, - vpix_per_dist_unit, - pixHres, - pixVres, - pixH, - pixV): - distH = (pixH - pixHres / 2.0) / hpix_per_dist_unit - distV = (pixV - pixVres / 2.0) / vpix_per_dist_unit - return distH, distV - -# -# All of following assume a nominal eye point 'eye2display' distance units from display -# with line-of-gaze normal to the display at the display center. Angle variable are -# assumed to have units of degrees. -# -# Since the Python math lib trig functions work with radians, -# a radian to angle conversion factor (deg/rad = 57.2958) is included to give angle -# variables 'degree' units. -# - -# -# Convert between distance coordinates (distH, distV) and 'normalized Cartesian -# coordinates' (ndH, ndV). -# -# 'Normalized Cartesian coordinates' are Cartesian distance coordinates, normalized by -# by the distance from the nominal eye point to the display. For very small distances -# from the origin, these values coorespond to visual angle from the origin along the -# horizontal and vertical display axes. A factor of 57.2958 is used so that the values -# correspond to degrees rather than radians. -# - - -def convertDistToNd(eye2display, distH, distV): - ndH = 57.2958 * distH / eye2display - ndV = 57.2958 * distV / eye2display - return ndH, ndV - - -def convertNdToDist(eye2display, ndH, ndV): - distH = ndH * eye2display / 57.2958 - distV = ndV * eye2display / 57.2958 - return distH, distV - -# -# Convert between distance coordinates (distH, distV) and -# 'Cartesian Angles' (caH, caV). -# 'Cartesian Angles' are visual angles (from nominal eye point) along -# horizontal and vertical display axes. In other words, the horizontal coordinate is the -# visual angle between the origin and the intersection of the Cartesian -# coordinate line with the horizontal axes. -# - - -def distToCa(eye2display, distH, distV): - caH = 57.2958 * atan(distH / eye2display) - caV = 57.2958 * atan(distV / eye2display) - return caH, caV - - -def caToDist(eye2display, caH, caV): - distH = eye2display * tan(caH / 57.2958) - distV = eye2display * tan(caV / 57.2968) - return distH, distV - - -# -# Convert between distance coordinates (distH, distV) and Fick Coordinates (as,el) -# -def distToFick(eye2display, distH, distV): - az = 57.2958 * atan(distH / eye2display) - el = 57.2958 * atan(distV / sqrt(eye2display * - eye2display + distH * distH)) - return az, el - - -def fickToDist(eye2display, az, el): - distH = eye2display * tan(az / 57.2958) - distV = sqrt(eye2display * eye2display + distH * distH) * tan(el / 57.2958) - return distH / distV - -# -# Convert between distance coordinates (distH, distV) and 'symmetric angle' -# coordinates (saH, saV). -# 'Symmetric angles' are visual angles between a point on the display and the central -# axes lines, measured along lines parallel to the display axes. The vertical coordinate is -# same as the Fick elevation angle. The horizontal coordinate is measured in a -# symmetrical fashion and is not the same as the Fick azimuth angle. -# - - -def distToSa(eye2display, distH, distV): - saH = 57.2958 * atan(distH / sqrt(eye2display * - eye2display + distV * distV)) - saV = 57.2958 * atan(distV / sqrt(eye2display * - eye2display + distH * distH)) - return saH, saV - - -def saToDist(eye2dsply, saH, saV): - tansaV_sqrd = tan(saV / 57.2958) * tan(saV / 57.2958) - tansaH_sqrd = tan(saH / 57.2958) * tan(saH / 57.2958) - Dsqrd = eye2dsply * eye2dsply - - signsaV = 1.0 - if saV < 0.0: - signsaV = -1.0 - - signsaH = 1.0 - if saH < 0.0: - signsaH = -1.0 - - distV = signsaV * sqrt((Dsqrd * tansaV_sqrd + Dsqrd * - tansaH_sqrd * tansaV_sqrd) / (1 - tansaH_sqrd * tansaV_sqrd)) - - distH = signsaH * sqrt((Dsqrd + distV * distV) * tansaH_sqrd) - - return distV, distH - -#------------------------------------- -# Old code using matrix multiplication to convert between screen pix and -# experiment software coord system... - -# if coord_type=='pix': - -# origin=self.getOrigin() -# if origin not in Display._supported_origin_types: -# print2err(" *** Display device error: Unknown origin type: {0}".format(origin)) -# return -# -# x1,y1,x2,y2=self.getBounds() -# print2err('getBounds: ',self.getBounds(), ) -# -# bounds_matrix=np.matrix([[x1,y1,1,0],[-y1,x1,0,1],[x2,y2,1,0],[-y2,x2,0,1]]) -# -# cx1=None -# cy1=None -# cx2=None -# cy2=None -# -## if coord_type == 'org': -## cx1=x1 -## cy1=y1 -## cx2=x2 -## cy2=y2 -# if coord_type == 'pix': -# if origin == 'center': -# cx1=-pixel_width/2.0 -# cy1=pixel_height/2.0 -# cx2=pixel_width/2.0 -# cy2=-pixel_height/2.0 -## elif origin == 'top_left': -## cx1=0 -## cy1=0 -## cx2=pixel_width -## cy2=pixel_height -## elif origin == 'bottom_left': -## cx1=0 -## cy1=pixel_height -## cx2=pixel_width -## cy2=0 -# elif coord_type in ['mm','cm','inch']: -# phys_to_coord_ratio=1.0 -## if coord_type == 'mm': -## if phys_unit_type == 'cm': -## phys_to_coord_ratio=10.0 -## elif phys_unit_type == 'inch': -## phys_to_coord_ratio=25.4 -# if coord_type == 'cm': -# if phys_unit_type == 'mm': -# phys_to_coord_ratio=0.1 -# elif phys_unit_type == 'inch': -# phys_to_coord_ratio=2.54 -## elif coord_type == 'inch': -## if phys_unit_type == 'mm': -## phys_to_coord_ratio=0.0393701 -## elif phys_unit_type == 'cm': -## phys_to_coord_ratio=0.393701 -# -# if origin == 'center': -# phys_to_coord_ratio=2.0*phys_to_coord_ratio -# cx1=-phys_width/phys_to_coord_ratio -# cy1=phys_height/phys_to_coord_ratio -# cx2=phys_width/phys_to_coord_ratio -# cy2=-phys_height/phys_to_coord_ratio -## elif origin == 'top_left': -## cx1=0.0 -## cy1=0.0 -## cx2=phys_width*phys_to_coord_ratio -## cy2=phys_height*phys_to_coord_ratio -## elif origin == 'bottom_left': -## cx1=0.0 -## cy1=phys_height*phys_to_coord_ratio -## cx2=phys_width*phys_to_coord_ratio -## cy2=0.0 -# elif coord_type == 'norm': -# if origin == 'center': -# cx1=-1.0 -# cy1=1.0 -# cx2=1.0 -# cy2=-1.0 -## elif origin == 'top_left': -## cx1=0.0 -## cy1=0.0 -## cx2=1.0 -## cy2=1.0 -## elif origin == 'bottom_left': -## cx1=0.0 -## cy1=1.0 -## cx2=1.0 -## cy2=0.0 -## elif coord_type == 'percent': -## if origin == 'center': -## cx1=-50.0 -## cy1=50.0 -## cx2=50.0 -## cy2=-50.0 -## elif origin == 'top_left': -## cx1=0.0 -## cy1=0.0 -## cx2=100.0 -## cy2=100.0 -## elif origin == 'bottom_left': -## cx1=0.0 -## cy1=100.0 -## cx2=100.0 -## cy2=0.0 -# elif coord_type == 'deg': -# if origin == 'center': -# cx1=-degree_width/2.0 -# cy1=degree_height/2.0 -# cx2=degree_width/2.0 -# cy2=-degree_height/2.0 -## elif origin == 'top_left': -## cx1=0.0 -## cy1=0.0 -## cx2=degree_width -## cy2=degree_height -## elif origin == 'bottom_left': -## cx1=0.0 -## cy1=degree_height -## cx2=degree_width -## cy2=0.0 -# -# if cx1 is not None and cy1 is not None and cx2 is not None and cy2 is not None : -# coord_matrix=np.matrix( [[cx1],[cy1],[cx2],[cy2]] ) -# abcd = np.linalg.solve(bounds_matrix, coord_matrix) -# a,b,c,d=np.array(abcd)[:,0] -# #print2err('abcd: {0}\n a={1}, b={2} , c={3}, d={4}'.format(abcd,a,b,c,d)) -# -# -# def pix2coord(self, x,y,display_index=None): -# #print2err('Display {0} bounds: {1}'.format(display_index,self.getBounds())) -# if display_index == self.getIndex(): -# return a*x+b*y+c, b*x-a*y+d -# return x,y -# -# self._pix2coord=pix2coord -# -# def coord2pix(self,cx,cy,display_index=None): -# if display_index == self.getIndex(): -# aabb=(a**2+b**2) -# return (a*cx+b*cy-b*d-a*c)/aabb, (b*cx-a*cy-b*c+a*d)/aabb -# return cx,cy -# -# self._coord2pix=coord2pix - diff --git a/psychopy/iohub/devices/eyetracker/hw/sr_research/eyelink/default_eyetracker.yaml b/psychopy/iohub/devices/eyetracker/hw/sr_research/eyelink/default_eyetracker.yaml index 235a0d07b9..b3c6381648 100644 --- a/psychopy/iohub/devices/eyetracker/hw/sr_research/eyelink/default_eyetracker.yaml +++ b/psychopy/iohub/devices/eyetracker/hw/sr_research/eyelink/default_eyetracker.yaml @@ -112,8 +112,8 @@ eyetracker.hw.sr_research.eyelink.EyeTracker: # target_attributes: # outer_diameter and inner_diameter are specified in pixels - outer_diameter: 33 - inner_diameter: 6 + outer_diameter: 33.0 + inner_diameter: 6.0 outer_color: [255,255,255] inner_color: [0,0,0] diff --git a/psychopy/iohub/devices/eyetracker/hw/sr_research/eyelink/eyeLinkCoreGraphicsIOHubPsychopy.py b/psychopy/iohub/devices/eyetracker/hw/sr_research/eyelink/eyeLinkCoreGraphicsIOHubPsychopy.py index 185e0512d5..3edb97e2fd 100644 --- a/psychopy/iohub/devices/eyetracker/hw/sr_research/eyelink/eyeLinkCoreGraphicsIOHubPsychopy.py +++ b/psychopy/iohub/devices/eyetracker/hw/sr_research/eyelink/eyeLinkCoreGraphicsIOHubPsychopy.py @@ -20,27 +20,28 @@ class FixationTarget(object): def __init__(self, psychopy_eyelink_graphics): + win = psychopy_eyelink_graphics.window self.calibrationPointOuter = visual.Circle( - psychopy_eyelink_graphics.window, + win, pos=(0, 0), lineWidth=1.0, lineColor=psychopy_eyelink_graphics.CALIBRATION_POINT_OUTER_COLOR, - colorSpace='rgb255', + colorSpace=win.colorSpace, fillColor=psychopy_eyelink_graphics.CALIBRATION_POINT_OUTER_COLOR, radius=psychopy_eyelink_graphics.CALIBRATION_POINT_OUTER_RADIUS, name='CP_OUTER', - units='pix', + units=win.units, opacity=1.0, interpolate=False) self.calibrationPointInner = visual.Circle( - psychopy_eyelink_graphics.window, + win, pos=(0, 0), lineWidth=1.0, lineColor=psychopy_eyelink_graphics.CALIBRATION_POINT_INNER_COLOR, - colorSpace='rgb255', + colorSpace=win.colorSpace, fillColor=psychopy_eyelink_graphics.CALIBRATION_POINT_INNER_COLOR, radius=psychopy_eyelink_graphics.CALIBRATION_POINT_INNER_RADIUS, name='CP_INNER', - units='pix', + units=win.units, opacity=1.0, interpolate=False) @@ -58,11 +59,11 @@ class BlankScreen(object): def __init__(self, psychopy_win, color): self.display_size = psychopy_win.size w, h = self.display_size - self.win = psychopy_win + win = psychopy_win self.color = color - self.background = visual.Rect(self.win, w, h, + self.background = visual.Rect(win, w, h, lineColor=self.color, - colorSpace='rgb255', + colorSpace=win.colorSpace, fillColor=self.color, units='pix', name='BACKGROUND', @@ -78,10 +79,10 @@ class TextLine(object): def __init__(self, psychopy_win): self.display_size = psychopy_win.size - self.win = psychopy_win + win = psychopy_win self.textLine = visual.TextStim( - self.win, + win, text='***********************', pos=( 0, @@ -111,18 +112,18 @@ def draw(self, text=None): class IntroScreen(object): def __init__(self, psychopy_win): self.display_size = psychopy_win.size - self.window = psychopy_win + window = psychopy_win font_height = 24 space_per_lines = font_height * 2.5 - if self.window.useRetina: - topline_y = self.window.size[1]/4-font_height*2 + if window.useRetina: + topline_y = window.size[1]/4-font_height*2 else: - topline_y = self.window.size[1]/2-font_height*2 - wrap_width = self.window.size[1] * .8 + topline_y = window.size[1]/2-font_height*2 + wrap_width = window.size[1] * .8 self.introlines = [] - self.introlines.append(visual.TextStim(self.window, + self.introlines.append(visual.TextStim(window, text='>>>> Eyelink System Setup: Keyboard Actions <<<<', pos=( 0, @@ -142,13 +143,13 @@ def __init__(self, psychopy_win): italic=False, wrapWidth=wrap_width)) - if self.window.useRetina: - left_margin = -self.window.size[0]/4 + if window.useRetina: + left_margin = -window.size[0]/4 else: - left_margin = -self.window.size[0]/2 + left_margin = -window.size[0]/2 left_margin = left_margin *.4 topline_y = topline_y - space_per_lines/3 - self.introlines.append(visual.TextStim(self.window, + self.introlines.append(visual.TextStim(window, text='* ENTER: Begin Camera Setup Mode', pos=(left_margin, topline_y - space_per_lines * (len(self.introlines))), @@ -168,7 +169,7 @@ def __init__(self, psychopy_win): anchorHoriz='left', wrapWidth=wrap_width)) - self.introlines.append(visual.TextStim(self.window, + self.introlines.append(visual.TextStim(window, text='* C: Start Calibration Procedure', pos=(left_margin, topline_y - space_per_lines * (len(self.introlines))), @@ -188,7 +189,7 @@ def __init__(self, psychopy_win): anchorHoriz='left', wrapWidth=wrap_width)) - self.introlines.append(visual.TextStim(self.window, + self.introlines.append(visual.TextStim(window, text='* V: Start Validation Procedure', pos=(left_margin, topline_y - space_per_lines * (len(self.introlines))), @@ -205,10 +206,10 @@ def __init__(self, psychopy_win): bold=False, italic=False, alignText='left', - anchorHoriz='left', #anchorHoriz='left', + anchorHoriz='left', wrapWidth=wrap_width)) - self.introlines.append(visual.TextStim(self.window, + self.introlines.append(visual.TextStim(window, text='* ESCAPE: Exit EyeLink System Setup', pos=(left_margin, topline_y - space_per_lines * (len(self.introlines))), @@ -225,11 +226,11 @@ def __init__(self, psychopy_win): bold=True, italic=False, alignText='left', - anchorHoriz='left', #anchorHoriz='left', + anchorHoriz='left', wrapWidth=wrap_width)) topline_y = topline_y - space_per_lines/3 - self.introlines.append(visual.TextStim(self.window, + self.introlines.append(visual.TextStim(window, text='------ Camera Setup Mode Specific Actions ------', pos=(0, topline_y - space_per_lines * (len(self.introlines))), @@ -248,7 +249,7 @@ def __init__(self, psychopy_win): wrapWidth=wrap_width)) topline_y = topline_y - space_per_lines/3 - self.introlines.append(visual.TextStim(self.window, + self.introlines.append(visual.TextStim(window, text='* Left / Right Arrow: Switch Between Camera Views', pos=(left_margin, topline_y - space_per_lines * (len(self.introlines))), @@ -265,10 +266,10 @@ def __init__(self, psychopy_win): bold=False, italic=False, alignText='left', - anchorHoriz='left', #anchorHoriz='left', + anchorHoriz='left', wrapWidth=wrap_width)) - self.introlines.append(visual.TextStim(self.window, + self.introlines.append(visual.TextStim(window, text='* A: Auto-Threshold Image', pos=(left_margin, topline_y - space_per_lines * (len(self.introlines))), @@ -285,10 +286,10 @@ def __init__(self, psychopy_win): bold=False, italic=False, alignText='left', - anchorHoriz='left', #anchorHoriz='left', + anchorHoriz='left', wrapWidth=wrap_width)) - self.introlines.append(visual.TextStim(self.window, + self.introlines.append(visual.TextStim(window, text='* Up / Down Arrow: Manually Adjust Pupil Threshold', pos=(left_margin, topline_y - space_per_lines * (len(self.introlines))), @@ -305,10 +306,10 @@ def __init__(self, psychopy_win): bold=False, italic=False, alignText='left', - anchorHoriz='left', #anchorHoriz='left', + anchorHoriz='left', wrapWidth=wrap_width)) - self.introlines.append(visual.TextStim(self.window, + self.introlines.append(visual.TextStim(window, text='* + or -: Manually Adjust CR Threshold.', pos=(left_margin, topline_y - space_per_lines * (len(self.introlines))), @@ -325,7 +326,7 @@ def __init__(self, psychopy_win): bold=False, italic=False, alignText='left', - anchorHoriz='left', #anchorHoriz='left', + anchorHoriz='left', wrapWidth=wrap_width)) def draw(self): @@ -338,13 +339,13 @@ class EyeLinkCoreGraphicsIOHubPsychopy(pylink.EyeLinkCustomDisplay): # micro threads, since one is blocking # on camera setup. - WINDOW_BACKGROUND_COLOR = (128, 128, 128) - CALIBRATION_POINT_OUTER_RADIUS = 15.0, 15.0 + WINDOW_BACKGROUND_COLOR = None + CALIBRATION_POINT_OUTER_RADIUS = None CALIBRATION_POINT_OUTER_EDGE_COUNT = 64 - CALIBRATION_POINT_OUTER_COLOR = (255, 255, 255) - CALIBRATION_POINT_INNER_RADIUS = 3.0, 3.0 - CALIBRATION_POINT_INNER_EDGE_COUNT = 32 - CALIBRATION_POINT_INNER_COLOR = (25, 25, 25) + CALIBRATION_POINT_OUTER_COLOR = None + CALIBRATION_POINT_INNER_RADIUS =None + CALIBRATION_POINT_INNER_EDGE_COUNT = 64 + CALIBRATION_POINT_INNER_COLOR = None def __init__(self, eyetrackerInterface, targetForegroundColor=None, targetBackgroundColor=None, screenColor=None, @@ -355,11 +356,12 @@ def __init__(self, eyetrackerInterface, targetForegroundColor=None, self.tracker = eyetrackerInterface._eyelink self._ioKeyboard = None self._ioMouse = None + display = eyetrackerInterface._display_device self.imgstim_size = None self.rgb_index_array = None - self.screenSize = self._eyetrackerinterface._display_device.getPixelResolution() + self.screenSize = display.getPixelResolution() self.width = self.screenSize[0] self.height = self.screenSize[1] @@ -391,12 +393,11 @@ def __init__(self, eyetrackerInterface, targetForegroundColor=None, self.tracker.sendCommand('autothreshold_repeat=YES') self.tracker.sendCommand('enable_camera_position_detect=YES') - display = self._eyetrackerinterface._display_device self.window = visual.Window(display.getPixelResolution(), monitor=display.getPsychopyMonitorName(), units=display.getCoordinateType(), color=self.WINDOW_BACKGROUND_COLOR, - colorSpace='rgb255', + colorSpace=display.getColorSpace(), fullscr=True, allowGUI=False, screen=display.getIndex() diff --git a/psychopy/iohub/devices/eyetracker/hw/sr_research/eyelink/eyetracker.py b/psychopy/iohub/devices/eyetracker/hw/sr_research/eyelink/eyetracker.py index 574706bedb..a4fb8e3044 100644 --- a/psychopy/iohub/devices/eyetracker/hw/sr_research/eyelink/eyetracker.py +++ b/psychopy/iohub/devices/eyetracker/hw/sr_research/eyelink/eyetracker.py @@ -409,15 +409,10 @@ def runSetupProcedure(self): calibration_properties = self.getConfiguration().get('calibration') circle_attributes = calibration_properties.get('target_attributes') - targetForegroundColor = circle_attributes.get( - 'outer_color') # [r,g,b] of outer circle of targets - targetBackgroundColor = circle_attributes.get( - 'inner_color') # [r,g,b] of inner circle of targets - screenColor = calibration_properties.get( - 'screen_background_color') # [r,g,b] of screen - # diameter of outer target circle (in px) + targetForegroundColor = circle_attributes.get('outer_color') + targetBackgroundColor = circle_attributes.get('inner_color') + screenColor = calibration_properties.get('screen_background_color') targetOuterDiameter = circle_attributes.get('outer_diameter') - # diameter of inner target circle (in px) targetInnerDiameter = circle_attributes.get('inner_diameter') genv = EyeLinkCoreGraphicsIOHubPsychopy( @@ -433,7 +428,8 @@ def runSetupProcedure(self): genv._unregisterEventMonitors() genv.clearAllEventBuffers() genv.window.close() - + del genv.window + del genv return EyeTrackerConstants.EYETRACKER_OK except Exception as e: diff --git a/psychopy/iohub/devices/eyetracker/hw/sr_research/eyelink/supported_config_settings.yaml b/psychopy/iohub/devices/eyetracker/hw/sr_research/eyelink/supported_config_settings.yaml index 7fa5b18727..d4850951c1 100644 --- a/psychopy/iohub/devices/eyetracker/hw/sr_research/eyelink/supported_config_settings.yaml +++ b/psychopy/iohub/devices/eyetracker/hw/sr_research/eyelink/supported_config_settings.yaml @@ -33,19 +33,19 @@ eyetracker.hw.sr_research.eyelink.EyeTracker: IOHUB_FLOAT: min: 0.5 max: 2.5 - screen_background_color: IOHUB_RGBA255_COLOR + screen_background_color: IOHUB_COLOR target_type: [CIRCLE_TARGET,] target_attributes: outer_diameter: - IOHUB_INT: - min: 8 - max: 256 + IOHUB_FLOAT: + min: 0.01 + max: 250.0 inner_diameter: - IOHUB_INT: - min: 0 - max: 255 - outer_color: IOHUB_RGBA255_COLOR - inner_color: IOHUB_RGBA255_COLOR + IOHUB_FLOAT: + min: 0.01 + max: 250.0 + outer_color: IOHUB_COLOR + inner_color: IOHUB_COLOR simulation_mode: IOHUB_BOOL enable_interface_without_connection: IOHUB_BOOL network_settings: IOHUB_IP_ADDRESS_V4 diff --git a/psychopy/iohub/devices/eyetracker/hw/tobii/default_eyetracker.yaml b/psychopy/iohub/devices/eyetracker/hw/tobii/default_eyetracker.yaml index 2bd7e86f37..4987cf404b 100644 --- a/psychopy/iohub/devices/eyetracker/hw/tobii/default_eyetracker.yaml +++ b/psychopy/iohub/devices/eyetracker/hw/tobii/default_eyetracker.yaml @@ -91,10 +91,10 @@ eyetracker.hw.tobii.EyeTracker: # # outer_diameter: The size of the outer circle of the calibration target # - outer_diameter: 35 + outer_diameter: 35.0 # outer_stroke_width: The thickness of the outer circle edge. # - outer_stroke_width: 2 + outer_stroke_width: 2.0 # outer_fill_color: RGB255 color to use to fill the outer circle. # outer_fill_color: [128,128,128] @@ -103,10 +103,10 @@ eyetracker.hw.tobii.EyeTracker: outer_line_color: [255,255,255] # inner_diameter: The size of the inner circle calibration target # - inner_diameter: 7 + inner_diameter: 7.0 # inner_stroke_width: The thickness of the inner circle edge. # - inner_stroke_width: 1 + inner_stroke_width: 1.0 # inner_fill_color: RGB255 color to use to fill the inner circle. # inner_fill_color: [0,0,0] diff --git a/psychopy/iohub/devices/eyetracker/hw/tobii/supported_config_settings.yaml b/psychopy/iohub/devices/eyetracker/hw/tobii/supported_config_settings.yaml index 3d4035abd5..1620aca88f 100644 --- a/psychopy/iohub/devices/eyetracker/hw/tobii/supported_config_settings.yaml +++ b/psychopy/iohub/devices/eyetracker/hw/tobii/supported_config_settings.yaml @@ -7,7 +7,7 @@ eyetracker.hw.tobii.EyeTracker: enable: IOHUB_BOOL model_name: IOHUB_LIST: - valid_values: [Any Pro Model, Nano, Fusuion, Spectrum, T120, X120, TX300, X2, X3] + valid_values: [Any Pro Model, Nano, Fusion, Spectrum, T120, X120, TX300, X2, X3] min_length: 1 max_length: 1 serial_number: @@ -46,29 +46,29 @@ eyetracker.hw.tobii.EyeTracker: IOHUB_FLOAT: min: 0.5 max: 2.5 - screen_background_color: IOHUB_RGBA255_COLOR + screen_background_color: IOHUB_COLOR target_type: [CIRCLE_TARGET,] target_attributes: outer_diameter: - IOHUB_INT: - min: 1 - max: 1000 + IOHUB_FLOAT: + min: 0.01 + max: 1000.0 outer_stroke_width: - IOHUB_INT: - min: 1 - max: 1000 - outer_fill_color: IOHUB_RGBA255_COLOR - outer_line_color: IOHUB_RGBA255_COLOR + IOHUB_FLOAT: + min: 0.01 + max: 1000.0 + outer_fill_color: IOHUB_COLOR + outer_line_color: IOHUB_COLOR inner_diameter: - IOHUB_INT: - min: 1 - max: 1000 + IOHUB_FLOAT: + min: 0.01 + max: 1000.0 inner_stroke_width: - IOHUB_INT: - min: 1 - max: 1000 - inner_fill_color: IOHUB_RGBA255_COLOR - inner_line_color: IOHUB_RGBA255_COLOR + IOHUB_FLOAT: + min: 0.01 + max: 1000.0 + inner_fill_color: IOHUB_COLOR + inner_line_color: IOHUB_COLOR animate: enable: IOHUB_BOOL movement_velocity: # 300 pix / sec diff --git a/psychopy/iohub/devices/eyetracker/hw/tobii/tobiiCalibrationGraphics.py b/psychopy/iohub/devices/eyetracker/hw/tobii/tobiiCalibrationGraphics.py index a682663268..25fc05f8a2 100644 --- a/psychopy/iohub/devices/eyetracker/hw/tobii/tobiiCalibrationGraphics.py +++ b/psychopy/iohub/devices/eyetracker/hw/tobii/tobiiCalibrationGraphics.py @@ -25,9 +25,8 @@ class TobiiPsychopyCalibrationGraphics(object): IOHUB_HEARTBEAT_INTERVAL = 0.050 # seconds between forced run through of # micro threads, since one is blocking # on camera setup. - WINDOW_BACKGROUND_COLOR = (128, 128, 128) - CALIBRATION_POINT_LIST = [ - (0.5, 0.5), (0.1, 0.1), (0.9, 0.1), (0.9, 0.9), (0.1, 0.9), (0.5, 0.5)] + WINDOW_BACKGROUND_COLOR = None + CALIBRATION_POINT_LIST = [(0.5, 0.5), (0.1, 0.1), (0.9, 0.1), (0.9, 0.9), (0.1, 0.9), (0.5, 0.5)] TEXT_POS = [0, 0] TEXT_COLOR = [0, 0, 0] @@ -97,7 +96,7 @@ def __init__(self, eyetrackerInterface, screenColor=None, screen=display.getIndex(), color=self.WINDOW_BACKGROUND_COLOR[ 0:3], - colorSpace='rgb255') + colorSpace=display.getColorSpace()) self.window.flip(clearBuffer=True) self._createStim() @@ -194,7 +193,6 @@ def _createStim(self): name='CP_OUTER', fillColor=calibration_prefs['outer_fill_color'], lineColor=calibration_prefs['outer_line_color'], - colorSpace='rgb255', opacity=1.0, interpolate=False, edges=64, @@ -210,7 +208,6 @@ def _createStim(self): name='CP_INNER', fillColor=calibration_prefs['inner_fill_color'], lineColor=calibration_prefs['inner_line_color'], - colorSpace='rgb255', opacity=1.0, interpolate=False, edges=64, @@ -245,6 +242,7 @@ def _createStim(self): lineColor='White', fillColor='Firebrick', vertices=bar_vertices, + units='pix', pos=( 0, self.marker_heights[0])) @@ -253,6 +251,7 @@ def _createStim(self): lineColor='White', fillColor='DarkSlateGray', vertices=bar_vertices, + units='pix', pos=( 0, self.marker_heights[1])) @@ -261,6 +260,7 @@ def _createStim(self): lineColor='White', fillColor='GoldenRod', vertices=bar_vertices, + units='pix', pos=( 0, self.marker_heights[2])) @@ -272,6 +272,7 @@ def _createStim(self): lineColor='White', fillColor='Black', vertices=marker_vertices, + units='pix', pos=( 0, self.marker_heights[0])) @@ -279,6 +280,7 @@ def _createStim(self): win=self.window, lineColor='White', fillColor='Black', + units='pix', vertices=marker_vertices, pos=( 0, @@ -287,6 +289,7 @@ def _createStim(self): win=self.window, lineColor='White', fillColor='Black', + units='pix', vertices=marker_vertices, pos=( 0, @@ -295,6 +298,7 @@ def _createStim(self): win=self.window, lineColor='White', fillColor='DimGray', + units='pix', vertices=marker_vertices, pos=( 0, @@ -303,6 +307,7 @@ def _createStim(self): win=self.window, lineColor='White', fillColor='DimGray', + units='pix', vertices=marker_vertices, pos=( 0, @@ -311,6 +316,7 @@ def _createStim(self): win=self.window, lineColor='White', fillColor='DimGray', + units='pix', vertices=marker_vertices, pos=( 0, diff --git a/psychopy/iohub/incompatibilities_list.txt b/psychopy/iohub/incompatibilities_list.txt deleted file mode 100644 index 8c8a0a3797..0000000000 --- a/psychopy/iohub/incompatibilities_list.txt +++ /dev/null @@ -1,11 +0,0 @@ -List of iohub functionality that has been removed or changed as part of the 2021 refactor work ------------------------------------------------------------------------------------------------ - -- Removed iohub ExpRuntime class (use iohub.launchHubServer() function) -- Some eye tracker setting haves may have changed. If running an experiment created with a previous version of iohub, - please review your eye tracker settings and update as necessary. - - Do we need to list every specific setting change? -- iohub can no longer be copied out of the psychopy package and used as a stand alone package - in your site-packages folder. Change `import iohub` to `import psychopy.iohub` for example. -- Removed most device settings related to device hardware specifics (model_id, manfacture_date, etc) - since they were never really used. \ No newline at end of file diff --git a/psychopy/iohub/util/__init__.py b/psychopy/iohub/util/__init__.py index 84b8267c14..8d938e7442 100644 --- a/psychopy/iohub/util/__init__.py +++ b/psychopy/iohub/util/__init__.py @@ -212,10 +212,10 @@ def getDeviceDefaultConfig(device_name, builder_hides=True): _iohub2builderValType = dict(IOHUB_STRING='str', IOHUB_BOOL='bool', IOHUB_FLOAT='float', IOHUB_INT='int', - IOHUB_LIST='list', IOHUB_RGBA255_COLOR='color', IOHUB_IP_ADDRESS_V4='str') + IOHUB_LIST='list', IOHUB_COLOR='color', IOHUB_IP_ADDRESS_V4='str') _iohub2builderInputType = dict(IOHUB_STRING='single', IOHUB_BOOL='bool', IOHUB_FLOAT='single', IOHUB_INT='single', - IOHUB_LIST=('choice','multi'), IOHUB_RGBA255_COLOR='color', IOHUB_IP_ADDRESS_V4='single') + IOHUB_LIST=('choice','multi'), IOHUB_COLOR='color', IOHUB_IP_ADDRESS_V4='single') def getDeviceNames(device_name="eyetracker.hw"): """ diff --git a/psychopy/iohub/util/visualangle.py b/psychopy/iohub/util/visualangle.py index ba365eab8a..d728c951af 100644 --- a/psychopy/iohub/util/visualangle.py +++ b/psychopy/iohub/util/visualangle.py @@ -4,13 +4,11 @@ """ Pixel to Visual Angle Calculation. -Uses "symmetric angles" formula provided by Dr. Josh Borah -(jborah AT asleyetracking.com), via email correspondence in 2012. +Uses "symmetric angles" formula. Assumptions: 1) unit origin == position 0.0, 0.0 == screen center 2) Eye is orthogonal to origin of 2D plane - """ import numpy as np @@ -20,7 +18,6 @@ class VisualAngleCalc(object): - def __init__(self, display_size_mm, display_res_pix, eye_distance_mm=None): """Used to store calibrated surface information and eye to screen distance so that pixel positions can be converted to visual degree