-
Notifications
You must be signed in to change notification settings - Fork 38
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
23 changed files
with
1,404 additions
and
94 deletions.
There are no files selected for viewing
Large diffs are not rendered by default.
Oops, something went wrong.
101 changes: 101 additions & 0 deletions
101
demos/demo_eeg_data/Meadows_nsd100annotations_v_v1_devoted-caiman_2_annotations.csv
Large diffs are not rendered by default.
Oops, something went wrong.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,6 @@ | ||
{ | ||
"path": "../../demos/demo_eeg.ipynb", | ||
"extra-media": [ | ||
"../../demos/demo_eeg_data" | ||
] | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -13,5 +13,6 @@ Demos | |
demo_unbalanced | ||
demo_temporal | ||
demo_meg_mne | ||
demo_eeg | ||
demo_searchlight | ||
rescale_partials |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,7 @@ | ||
rsatoolbox.data.ops module | ||
========================== | ||
|
||
.. automodule:: rsatoolbox.data.ops | ||
:members: | ||
:undoc-members: | ||
:show-inheritance: |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,7 @@ | ||
rsatoolbox.io.mne module | ||
======================== | ||
|
||
.. automodule:: rsatoolbox.io.mne | ||
:members: | ||
:undoc-members: | ||
:show-inheritance: |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,97 @@ | ||
"""Operations on multiple Datasets | ||
""" | ||
from __future__ import annotations | ||
from typing import TYPE_CHECKING, Union, List, Set, overload | ||
from copy import deepcopy | ||
from warnings import warn | ||
try: | ||
from typing import Literal # pylint: disable=ungrouped-imports | ||
except ImportError: | ||
from typing_extensions import Literal | ||
from numpy import concatenate, repeat | ||
import rsatoolbox | ||
if TYPE_CHECKING: | ||
DESC_LEVEL = Union[Literal['obs'], Literal['set']] | ||
from rsatoolbox.data.dataset import Dataset, TemporalDataset | ||
|
||
|
||
@overload | ||
def merge_datasets(sets: List[TemporalDataset]) -> TemporalDataset: | ||
... | ||
|
||
|
||
@overload | ||
def merge_datasets(sets: List[Dataset]) -> Dataset: | ||
... | ||
|
||
|
||
def merge_datasets(sets: Union[List[Dataset], List[TemporalDataset]] | ||
) -> Union[Dataset, TemporalDataset]: | ||
"""Concatenate measurements to create one Dataset of the same type | ||
Only descriptors that exist on all subsets are assigned to the merged | ||
dataset. | ||
Dataset-level `descriptors` that are identical across subsets will be | ||
passed on, those that vary will become `obs_descriptors`. | ||
Channel and Time descriptors must be identical across subsets. | ||
Args: | ||
sets (Union[List[Dataset], List[TemporalDataset]]): List of Dataset | ||
or TemporalDataset objects. Must all be the same type. | ||
Returns: | ||
Union[Dataset, TemporalDataset]: The new dataset combining measurements | ||
and descriptors from the given subset datasets. | ||
""" | ||
if len(sets) == 0: | ||
warn('[merge_datasets] Received empty list, returning empty Dataset') | ||
return rsatoolbox.data.dataset.Dataset(measurements=[]) | ||
if len({type(s) for s in sets}) > 1: | ||
raise ValueError('All datasets must be of the same type') | ||
ds0 = sets[0] | ||
# numpy pre-allocates so this seems to be a performant solution: | ||
meas = concatenate([ds.measurements for ds in sets], axis=0) | ||
obs_descs = dict() | ||
# loop over obs descriptors that all subsets have in common: | ||
for k in _shared_descriptors(sets, 'obs'): | ||
obs_descs[k] = concatenate([ds.obs_descriptors[k] for ds in sets]) | ||
dat_decs = dict() | ||
for k in _shared_descriptors(sets): | ||
if len({s.descriptors[k] for s in sets}) == 1: | ||
# descriptor always has the same value | ||
dat_decs[k] = ds0.descriptors[k] | ||
else: | ||
# descriptor varies across subsets, so repeat it by observation | ||
obs_descs[k] = repeat( | ||
[ds.descriptors[k] for ds in sets], | ||
[ds.n_obs for ds in sets] | ||
) | ||
# order is important as long as TemporalDataset inherits from Dataset | ||
if isinstance(ds0, rsatoolbox.data.dataset.TemporalDataset): | ||
return rsatoolbox.data.dataset.TemporalDataset( | ||
measurements=meas, | ||
descriptors=dat_decs, | ||
obs_descriptors=obs_descs, | ||
channel_descriptors=deepcopy(ds0.channel_descriptors), | ||
time_descriptors=deepcopy(ds0.time_descriptors), | ||
) | ||
if isinstance(ds0, rsatoolbox.data.dataset.Dataset): | ||
return rsatoolbox.data.dataset.Dataset( | ||
measurements=meas, | ||
descriptors=dat_decs, | ||
obs_descriptors=obs_descs, | ||
channel_descriptors=deepcopy(ds0.channel_descriptors) | ||
) | ||
raise ValueError('Unsupported Dataset type') | ||
|
||
|
||
def _shared_descriptors( | ||
datasets: Union[List[Dataset], List[TemporalDataset]], | ||
level: DESC_LEVEL = 'set') -> Set[str]: | ||
"""Find descriptors that all datasets have in common | ||
""" | ||
if level == 'set': | ||
each_keys = [set(d.descriptors.keys()) for d in datasets] | ||
else: | ||
each_keys = [set(d.obs_descriptors.keys()) for d in datasets] | ||
return set.intersection(*each_keys) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,62 @@ | ||
from __future__ import annotations | ||
from typing import Optional, Dict, TYPE_CHECKING | ||
from os.path import basename | ||
from rsatoolbox.data.dataset import TemporalDataset | ||
if TYPE_CHECKING: | ||
from mne.epochs import EpochsFIF | ||
|
||
|
||
def read_epochs(fpath: str) -> TemporalDataset: | ||
"""Create TemporalDataset from epochs in mne FIF file | ||
Args: | ||
fpath (str): Full path to epochs file | ||
Returns: | ||
TemporalDataset: dataset with epochs | ||
""" | ||
# pylint: disable-next=import-outside-toplevel | ||
from mne import read_epochs as mne_read_epochs | ||
epo = mne_read_epochs(fpath, preload=True, verbose='error') | ||
fname = basename(fpath) | ||
descs = dict(filename=fname, **descriptors_from_bids_filename(fname)) | ||
return dataset_from_epochs(epo, descs) | ||
|
||
|
||
def dataset_from_epochs( | ||
epochs: EpochsFIF, | ||
descriptors: Optional[Dict] = None | ||
) -> TemporalDataset: | ||
"""Create TemporalDataset from MNE epochs object | ||
Args: | ||
fpath (str): Full path to epochs file | ||
Returns: | ||
TemporalDataset: dataset with epochs | ||
""" | ||
descriptors = descriptors or dict() | ||
return TemporalDataset( | ||
measurements=epochs.get_data(), | ||
descriptors=descriptors, | ||
obs_descriptors=dict(event=epochs.events[:, 2]), | ||
channel_descriptors=dict(name=epochs.ch_names), | ||
time_descriptors=dict(time=epochs.times) | ||
) | ||
|
||
|
||
def descriptors_from_bids_filename(fname: str) -> Dict[str, str]: | ||
"""parse a filename for BIDS-style entities | ||
Args: | ||
fname (str): filename | ||
Returns: | ||
Dict[str, str]: sub, run or task descriptors | ||
""" | ||
descs = dict() | ||
for dname in ['sub', 'run', 'task']: | ||
for segment in fname.split('_'): | ||
if segment.startswith(dname + '-'): | ||
descs[dname] = segment[len(dname)+1:] | ||
return descs |
Oops, something went wrong.