|
28 | 28 | from ibllib.plots import vertical_lines
|
29 | 29 |
|
30 | 30 | import brainbox.plot
|
| 31 | +from brainbox.io.spikeglx import Streamer |
31 | 32 | from brainbox.ephys_plots import plot_brain_regions
|
32 | 33 | from brainbox.metrics.single_units import quick_unit_metrics
|
33 | 34 | from brainbox.behavior.wheel import interpolate_position, velocity_filtered
|
@@ -793,6 +794,7 @@ class SpikeSortingLoader:
|
793 | 794 | datasets: list = None # list of all datasets belonging to the session
|
794 | 795 | # the following properties are the outcome of a reading function
|
795 | 796 | files: dict = None
|
| 797 | + raw_data_files: list = None # list of raw ap and lf files corresponding to the recording |
796 | 798 | collection: str = ''
|
797 | 799 | histology: str = '' # 'alf', 'resolved', 'aligned' or 'traced'
|
798 | 800 | spike_sorter: str = 'pykilosort'
|
@@ -829,6 +831,7 @@ def __post_init__(self):
|
829 | 831 | if self.atlas is None:
|
830 | 832 | self.atlas = AllenAtlas()
|
831 | 833 | self.files = {}
|
| 834 | + self.raw_data_files = [] |
832 | 835 |
|
833 | 836 | def _load_object(self, *args, **kwargs):
|
834 | 837 | """
|
@@ -881,6 +884,11 @@ def load_spike_sorting_object(self, obj, *args, **kwargs):
|
881 | 884 | self.download_spike_sorting_object(obj, *args, **kwargs)
|
882 | 885 | return self._load_object(self.files[obj])
|
883 | 886 |
|
| 887 | + def get_version(self, spike_sorter='pykilosort'): |
| 888 | + collection = self._get_spike_sorting_collection(spike_sorter=spike_sorter) |
| 889 | + dset = self.one.alyx.rest('datasets', 'list', session=self.eid, collection=collection, name='spikes.times.npy') |
| 890 | + return dset[0]['version'] if len(dset) else 'unknown' |
| 891 | + |
884 | 892 | def download_spike_sorting_object(self, obj, spike_sorter='pykilosort', dataset_types=None, collection=None,
|
885 | 893 | missing='raise', **kwargs):
|
886 | 894 | """
|
@@ -919,6 +927,46 @@ def download_spike_sorting(self, **kwargs):
|
919 | 927 | self.download_spike_sorting_object(obj=obj, **kwargs)
|
920 | 928 | self.spike_sorting_path = self.files['spikes'][0].parent
|
921 | 929 |
|
| 930 | + def download_raw_electrophysiology(self, band='ap'): |
| 931 | + """ |
| 932 | + Downloads raw electrophysiology data files on local disk. |
| 933 | + :param band: "ap" (default) or "lf" for LFP band |
| 934 | + :return: list of raw data files full paths (ch, meta and cbin files) |
| 935 | + """ |
| 936 | + raw_data_files = [] |
| 937 | + for suffix in [f'*.{band}.ch', f'*.{band}.meta', f'*.{band}.cbin']: |
| 938 | + try: |
| 939 | + # FIXME: this will fail if multiple LFP segments are found |
| 940 | + raw_data_files.append(self.one.load_dataset( |
| 941 | + self.eid, |
| 942 | + download_only=True, |
| 943 | + collection=f'raw_ephys_data/{self.pname}', |
| 944 | + dataset=suffix, |
| 945 | + check_hash=False, |
| 946 | + )) |
| 947 | + except ALFObjectNotFound: |
| 948 | + _logger.debug(f"{self.session_path} can't locate raw data collection raw_ephys_data/{self.pname}, file {suffix}") |
| 949 | + self.raw_data_files = list(set(self.raw_data_files + raw_data_files)) |
| 950 | + return raw_data_files |
| 951 | + |
| 952 | + def raw_electrophysiology(self, stream=True, band='ap', **kwargs): |
| 953 | + """ |
| 954 | + Returns a reader for the raw electrophysiology data |
| 955 | + By default it is a streamer object, but if stream is False, it will return a spikeglx.Reader after having |
| 956 | + downloaded the raw data file if necessary |
| 957 | + :param stream: |
| 958 | + :param band: |
| 959 | + :param kwargs: |
| 960 | + :return: |
| 961 | + """ |
| 962 | + if stream: |
| 963 | + return Streamer(pid=self.pid, one=self.one, typ=band, **kwargs) |
| 964 | + else: |
| 965 | + raw_data_files = self.download_raw_electrophysiology(band=band) |
| 966 | + cbin_file = next(filter(lambda f: f.name.endswith(f'.{band}.cbin'), raw_data_files), None) |
| 967 | + if cbin_file is not None: |
| 968 | + return spikeglx.Reader(cbin_file) |
| 969 | + |
922 | 970 | def load_channels(self, **kwargs):
|
923 | 971 | """
|
924 | 972 | Loads channels
|
@@ -1282,7 +1330,8 @@ def load_trials(self):
|
1282 | 1330 | """
|
1283 | 1331 | # itiDuration frequently has a mismatched dimension, and we don't need it, exclude using regex
|
1284 | 1332 | self.one.wildcards = False
|
1285 |
| - self.trials = self.one.load_object(self.eid, 'trials', collection='alf', attribute=r'(?!itiDuration).*').to_df() |
| 1333 | + self.trials = self.one.load_object( |
| 1334 | + self.eid, 'trials', collection='alf', attribute=r'(?!itiDuration).*').to_df() |
1286 | 1335 | self.one.wildcards = True
|
1287 | 1336 | self.data_info.loc[self.data_info['name'] == 'trials', 'is_loaded'] = True
|
1288 | 1337 |
|
|
0 commit comments