diff --git a/doc/changes/latest.inc b/doc/changes/latest.inc index fdbf8e5e30d..5847cb5f17d 100644 --- a/doc/changes/latest.inc +++ b/doc/changes/latest.inc @@ -30,6 +30,7 @@ Enhancements - Added :class:`mne.preprocessing.eyetracking.Calibration` to store eye-tracking calibration info, and :func:`mne.preprocessing.eyetracking.read_eyelink_calibration` to read calibration data from EyeLink systems (:gh:`11719` by `Scott Huberty`_) - Ocular :class:`mne.Annotations` read in by :func:`mne.io.read_raw_eyelink` are now channel aware. This means if the left eye blinked, the associated annotation will store this in the ``'ch_names'`` key. (:gh:`11746` by `Scott Huberty`_) - Added :func:`mne.preprocessing.eyetracking.interpolate_blinks` to linear interpolate eyetrack signals during blink periods. (:gh:`11740` by `Scott Huberty`_) +- Added a section for combining eye-tracking and EEG data to the preprocessing tutorial "working with eye tracker data in MNE-Python" (:gh:`11770` by `Scott Huberty`_) Bugs ~~~~ diff --git a/doc/overview/datasets_index.rst b/doc/overview/datasets_index.rst index b2d0715e8e9..3946ef64be5 100644 --- a/doc/overview/datasets_index.rst +++ b/doc/overview/datasets_index.rst @@ -475,14 +475,17 @@ standard. * :ref:`tut-ssvep` +.. _eyelink-dataset: + EYELINK ======= :func:`mne.datasets.eyelink.data_path` -A small example dataset in SR research's proprietary .asc format. -1 participant fixated on the screen while short light flashes appeared. -Monocular recording of gaze position and pupil size, 1000 Hz sampling -frequency. +A small example dataset from a pupillary light reflex experiment. Both EEG (EGI) and +eye-tracking (SR Research EyeLink; ASCII format) data were recorded and stored in +separate files. 1 participant fixated on the screen while short light flashes appeared. +Event onsets were recorded by a photodiode attached to the screen and were +sent to both the EEG and eye-tracking systems. .. topic:: Examples diff --git a/mne/datasets/config.py b/mne/datasets/config.py index 7869f97a78e..e1881b4b774 100644 --- a/mne/datasets/config.py +++ b/mne/datasets/config.py @@ -345,9 +345,9 @@ # eyelink dataset MNE_DATASETS["eyelink"] = dict( - archive_name="eyelink_example_data.zip", - hash="md5:081950c05f35267458d9c751e178f161", - url=("https://osf.io/r5ndq/download?version=1"), + archive_name="eeg-eyetrack_data.zip", + hash="md5:c4fc788fe01737e08e9086c90cab642d", + url=("https://osf.io/63fjm/download?version=1"), folder_name="eyelink-example-data", config_key="MNE_DATASETS_EYELINK_PATH", ) diff --git a/tutorials/preprocessing/90_eyetracking_data.py b/tutorials/preprocessing/90_eyetracking_data.py index 9e3f03439bd..4c053806081 100644 --- a/tutorials/preprocessing/90_eyetracking_data.py +++ b/tutorials/preprocessing/90_eyetracking_data.py @@ -6,12 +6,14 @@ Working with eye tracker data in MNE-Python =========================================== -In this tutorial we will load some eye tracker data and plot the average -pupil response to light flashes (i.e. the pupillary light reflex). +In this tutorial we will explore simultaneously recorded eye-tracking and EEG data from +a pupillary light reflex task. We will combine the eye-tracking and EEG data, and plot +the ERP and pupil response to the light flashes (i.e. the pupillary light reflex). """ # noqa: E501 -# Authors: Dominik Welke -# Scott Huberty +# Authors: Scott Huberty +# Dominik Welke +# # # License: BSD-3-Clause @@ -19,75 +21,71 @@ # Data loading # ------------ # -# First we will load an eye tracker recording from SR research's proprietary -# ``'.asc'`` file format. -# -# The info structure tells us we loaded a monocular recording with 2 -# ``'eyegaze'``, channels (X/Y), 1 ``'pupil'`` channel, and 1 ``'stim'`` -# channel. +# As usual we start by importing the modules we need and loading some +# :ref:`example data `: eye-tracking data recorded from SR research's +# ``'.asc'`` file format, and EEG data recorded from EGI's ``'.mff'`` file format. We'll +# pass ``create_annotations=["blinks"]`` to :func:`~mne.io.read_raw_eyelink` so that +# only blinks annotations are created (by default, annotations are created for blinks, +# saccades, fixations, and experiment messages). import mne from mne.datasets.eyelink import data_path from mne.preprocessing.eyetracking import read_eyelink_calibration -eyelink_fname = data_path() / "mono_multi-block_multi-DINS.asc" +et_fpath = data_path() / "sub-01_task-plr_eyetrack.asc" +eeg_fpath = data_path() / "sub-01_task-plr_eeg.mff" -raw = mne.io.read_raw_eyelink(eyelink_fname, create_annotations=["blinks", "messages"]) -raw.crop(tmin=0, tmax=130) # for this demonstration, let's take a subset of the data +raw_et = mne.io.read_raw_eyelink(et_fpath, preload=True, create_annotations=["blinks"]) +raw_eeg = mne.io.read_raw_egi(eeg_fpath, preload=True, verbose="warning") +raw_eeg.filter(1, 30) # %% -# Ocular annotations -# ------------------ -# By default, Eyelink files will output events for ocular events (blinks, -# saccades, fixations), and experiment messages. MNE will store these events -# as `mne.Annotations`. Ocular annotations contain channel information, in the -# ``'ch_names'``` key. This means that we can see which eye an ocular event occurred in: +# .. seealso:: :ref:`tut-importing-eyetracking-data` +# :class: sidebar -print(raw.annotations[0]) # a blink in the right eye +# %% +# The info structure of the eye-tracking data tells us we loaded a monocular recording +# with 2 eyegaze channels (x- and y-coordinate positions), 1 pupil channel, 1 stim +# channel, and 3 channels for the head distance and position (since this data was +# collected using EyeLink's Remote mode). + +raw_et.info # %% -# If we are only interested in certain event types from -# the Eyelink file, we can select for these using the ``'create_annotations'`` -# argument of `mne.io.read_raw_eyelink`. above, we only created annotations -# for blinks, and experiment messages. -# -# Note that ``'blink'`` annotations are read in as ``'BAD_blink'``, and MNE will treat -# these as bad segments of data. This means that blink periods will be dropped during -# epoching by default. +# Ocular annotations +# ------------------ +# By default, EyeLink files will output ocular events (blinks, saccades, and +# fixations), and experiment messages. MNE will store these events +# as `mne.Annotations`. Ocular annotations contain channel information in the +# ``'ch_names'`` key. This means that we can see which eye an ocular event occurred in, +# which can be useful for binocular recordings: + +print(raw_et.annotations[0]["ch_names"]) # a blink in the right eye # %% # Checking the calibration # ------------------------ # -# We can also load the calibrations from the recording and visualize them. -# Checking the quality of the calibration is a useful first step in assessing -# the quality of the eye tracking data. Note that +# EyeLink ``.asc`` files can also include calibration information. +# MNE-Python can load and visualize those eye-tracking calibrations, which +# is a useful first step in assessing the quality of the eye-tracking data. # :func:`~mne.preprocessing.eyetracking.read_eyelink_calibration` # will return a list of :class:`~mne.preprocessing.eyetracking.Calibration` instances, # one for each calibration. We can index that list to access a specific calibration. -cals = read_eyelink_calibration(eyelink_fname) +cals = read_eyelink_calibration(et_fpath) print(f"number of calibrations: {len(cals)}") first_cal = cals[0] # let's access the first (and only in this case) calibration print(first_cal) # %% -# Here we can see that a 5-point calibration was performed at the beginning of -# the recording. Note that you can access the calibration information using -# dictionary style indexing: +# Calibrations have dict-like attribute access; in addition to the attributes shown in +# the output above, additional attributes are ``'positions'`` (the x and y coordinates +# of each calibration point), ``'gaze'`` (the x and y coordinates of the actual gaze +# position to each calibration point), and ``'offsets'`` (the offset in visual degrees +# between the calibration position and the actual gaze position for each calibration +# point). Below is an example of how to access these data: -print(f"Eye calibrated: {first_cal['eye']}") -print(f"Calibration model: {first_cal['model']}") -print(f"Calibration average error: {first_cal['avg_error']}") - -# %% -# The data for individual calibration points are stored as :class:`numpy.ndarray` -# arrays, in the ``'positions'``, ``'gaze'``, and ``'offsets'`` keys. ``'positions'`` -# contains the x and y coordinates of each calibration point. ``'gaze'`` contains the -# x and y coordinates of the actual gaze position for each calibration point. -# ``'offsets'`` contains the offset (in visual degrees) between the calibration position -# and the actual gaze position for each calibration point. Below is an example of -# how to access these data: print(f"offset of the first calibration point: {first_cal['offsets'][0]}") print(f"offset for each calibration point: {first_cal['offsets']}") print(f"x-coordinate for each calibration point: {first_cal['positions'].T[0]}") @@ -98,126 +96,115 @@ # and the offsets (in visual degrees) between the calibration position and the actual # gaze position of each calibration point. -first_cal.plot(show_offsets=True) +first_cal.plot() # %% -# Get stimulus events from DIN channel -# ------------------------------------ +# Plot the raw eye-tracking data +# ------------------------------ # -# Eyelink eye trackers have a DIN port that can be used to feed in stimulus -# or response timings. :func:`mne.io.read_raw_eyelink` loads this data as a -# ``'stim'`` channel. Alternatively, the onset of stimulus events could be sent -# to the eyetracker as ``messages`` - these can be read in as -# `mne.Annotations`. +# Let's plot the raw eye-tracking data. We'll pass a custom `dict` into +# the scalings argument to make the eyegaze channel traces legible when plotting, +# since this file contains pixel position data (as opposed to eye angles, +# which are reported in radians). + +raw_et.plot(scalings=dict(eyegaze=1e3)) + +# %% +# Handling blink artifacts +# ------------------------ # -# In the example data, the DIN channel contains the onset of light flashes on -# the screen. We now extract these events to visualize the pupil response. We will use -# these later in this tutorial. +# Naturally, there are blinks in our data, which occur within ``"BAD_blink"`` +# annotations. During blink periods, eyegaze coordinates are not reported, and pupil +# size data are ``0``. We don't want these blink artifacts biasing our analysis, so we +# have two options: Drop the blink periods from our data during epoching, or interpolate +# the missing data during the blink periods. For this tutorial, let's interpolate the +# blink samples. We'll pass ``(0.05, 0.2)`` to +# :func:`~mne.preprocessing.eyetracking.interpolate_blinks`, expanding the interpolation +# window 50 ms before and 200 ms after the blink, so that the noisy data surrounding +# the blink is also interpolated. -events = mne.find_events(raw, shortest_event=1, min_duration=0.02, uint_cast=True) -event_dict = {"flash": 3} +mne.preprocessing.eyetracking.interpolate_blinks(raw_et, buffer=(0.05, 0.2)) +# %% +# .. important:: By default, :func:`~mne.preprocessing.eyetracking.interpolate_blinks`, +# will only interpolate blinks in pupil channels. Passing +# ``interpolate_gaze=True`` will also interpolate the blink periods of the +# eyegaze channels. Be aware, however, that eye movements can occur +# during blinks which makes the gaze data less suitable for interpolation. # %% -# Plot raw data -# ------------- +# Extract common stimulus events from the data +# -------------------------------------------- # -# As the following plot shows, we now have a raw object with the eye tracker -# data, eyeblink annotations and stimulus events (from the DIN channel). +# In this experiment, a photodiode attached to the display screen was connected to both +# the EEG and eye-tracking systems. The photodiode was triggered by the the light flash +# stimuli, causing a signal to be sent to both systems simultaneously, signifying the +# onset of the flash. The photodiode signal was recorded as a digital input channel in +# the EEG and eye-tracking data. MNE loads these data as a :term:`stim channel`. # -# The plot also shows us that there is some noise in the data (not always -# categorized as blinks). Also, notice that we have passed a custom `dict` into -# the scalings argument of ``raw.plot``. This is necessary to make the eyegaze -# channel traces legible when plotting, since the file contains pixel position -# data (as opposed to eye angles, which are reported in radians). We also could -# have simply passed ``scalings='auto'``. - -raw.plot( - events=events, - event_id={"Flash": 3}, - event_color="g", - start=25, - duration=45, - scalings=dict(eyegaze=1e3), -) +# We'll extract the flash event onsets from both the EEG and eye-tracking data, as they +# are necessary for aligning the data from the two recordings. -# %% -# handling blink artifacts -# ------------------------ -# We also notice that, naturally, there are blinks in our data, and these blink periods -# occur within ``"BAD_blink"`` annotations. During blink periods, ``"eyegaze"`` -# coordinates are not reported, and ``"pupil"`` size data are ``0``. We don't want these -# blink artifacts biasing our analysis, so we have two options: We can either drop the -# blink periods from our data during epoching, or we can interpolate the missing data -# during the blink periods. For this tutorial, let's interpolate the missing samples: - -mne.preprocessing.eyetracking.interpolate_blinks(raw, buffer=0.05) -# Let's plot our data again to see the result of the interpolation: -raw.pick(["pupil_right"]) # Let's pick just the pupil channel -raw.plot(events=events, event_id={"Flash": 3}, event_color="g") +et_events = mne.find_events(raw_et, min_duration=0.01, shortest_event=1, uint_cast=True) +eeg_events = mne.find_events(raw_eeg, stim_channel="DIN3") # %% -# :func:`~mne.preprocessing.eyetracking.interpolate_blinks` performs a simple linear -# interpolation of the pupil size data during blink periods. the ``buffer`` keyword -# argument specifies the amount of time (in seconds) before and after the blinks to -# include in the interpolation. This is helpful because the ``blink`` annotations -# do not always capture the entire blink in the signal. We specified a value of ``.05`` -# seconds (50 ms), which is slightly more than the default value of ``.025``. +# The output above shows us that both the EEG and EyeLink data used event ID ``2`` for +# the flash events, so we'll create a dictionary to use later when plotting to label +# those events. + +event_dict = dict(Flash=2) # %% -# Dealing with high frequency noise -# --------------------------------- -# From the plot above, we notice that there is some high frequency noise in the pupil -# signal. We can remove this noise by low-pass filtering the data: +# Align the eye-tracking data with EEG the data +# --------------------------------------------- +# +# In this dataset, eye-tracking and EEG data were recorded simultaneously, but on +# different systems, so we'll need to align the data before we can analyze them +# together. We can do this using the :func:`~mne.preprocessing.realign_raw` function, +# which will align the data based on the timing of the shared events that are present in +# both :class:`~mne.io.Raw` objects. We'll use the shared photodiode events we extracted +# above, but first we need to convert the event onsets from samples to seconds. Once the +# data have been aligned, we'll add the EEG channels to the eye-tracking raw object. + +# Convert event onsets from samples to seconds +et_flash_times = et_events[:, 0] / raw_et.info["sfreq"] +eeg_flash_times = eeg_events[:, 0] / raw_eeg.info["sfreq"] +# Align the data +mne.preprocessing.realign_raw( + raw_et, raw_eeg, et_flash_times, eeg_flash_times, verbose="error" +) +# Add EEG channels to the eye-tracking raw object +raw_et.add_channels([raw_eeg], force_update_info=True) + +# Define a few channel groups of interest and plot the data +frontal = ["E19", "E11", "E4", "E12", "E5"] +occipital = ["E61", "E62", "E78", "E67", "E72", "E77"] +pupil = ["pupil_right"] +picks_idx = mne.pick_channels( + raw_et.ch_names, frontal + occipital + pupil, ordered=True +) +raw_et.plot(events=et_events, event_id=event_dict, event_color="g", order=picks_idx) -# Apply a low pass filter to the pupil channel -raw.filter(l_freq=None, h_freq=40, picks=["pupil_right"]) # %% -# Rejecting bad spans of data -# ^^^^^^^^^^^^^^^^^^^^^^^^^^^ -# Even after filtering the pupil data and interpolating the blink periods, we still see -# some artifacts in the data (the large spikes) that we don't want to include in our -# analysis. Let's epoch our data and then reject any epochs that might contain these -# artifacts. We'll use :class:`mne.Epochs` to epoch our data, and pass in the -# ``events`` array and ``event_dict`` that we created earlier. We'll also pass in the -# ``reject`` keyword argument to reject any epochs that contain data that exceeds a -# peak-to-peak signal amplitude threshold of ``1500`` in the ``"pupil"`` channel. -# Note that this threshold is arbitrary, and should be adjusted based on the data. -# We chose 1500 because eyelink reports pupil size in arbitrary units (AU), which -# typically ranges from 800 to 3000 units. Our epochs already contains large -# signal fluctuations due to the pupil response, so a threshold of 1500 is conservative -# enough to reject epochs only with large artifacts. +# Showing the pupillary light reflex +# ---------------------------------- +# Now let's extract epochs around our flash events. We should see a clear pupil +# constriction response to the flashes. epochs = mne.Epochs( - raw, - events, - tmin=-0.3, - tmax=5, + raw_et, + events=et_events, event_id=event_dict, + tmin=-0.3, + tmax=3, preload=True, - reject=dict(pupil=1500), ) -epochs.plot(events=events, event_id=event_dict) +epochs[:8].plot(events=et_events, event_id=event_dict, order=picks_idx) # %% -# We can clearly see the prominent decrease in pupil size following the -# stimulation. +# Finally, let's plot the evoked responses to the light flashes to get a sense of the +# average pupillary light response, and the associated ERP in the EEG data. -# %% -# Plot average pupil response -# --------------------------- -# -# Finally, let's plot the evoked response to the light flashes to get a sense of the -# average pupillary light response. - -epochs.average().plot() - -# %% -# Again, it is important to note that pupil size data are reported by Eyelink (and -# stored internally by MNE) as arbitrary units (AU). While it often can be -# preferable to convert pupil size data to millimeters, this requires -# information that is not present in the file. MNE does not currently -# provide methods to convert pupil size data. -# See :ref:`tut-importing-eyetracking-data` for more information on pupil size -# data. +epochs.average().plot(picks=occipital + pupil)