Skip to content

Commit a9cfdf7

Browse files
committed
Merge branch 'release/2.11.0'
2 parents 7e24297 + cb000b2 commit a9cfdf7

File tree

7 files changed

+63
-45
lines changed

7 files changed

+63
-45
lines changed

brainbox/io/one.py

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -941,17 +941,18 @@ def _get_spike_sorting_collection(self, spike_sorter='pykilosort', revision=None
941941
_logger.debug(f"selecting: {collection} to load amongst candidates: {self.collections}")
942942
return collection
943943

944-
def download_spike_sorting_object(self, obj, spike_sorter='pykilosort', dataset_types=None):
944+
def download_spike_sorting_object(self, obj, spike_sorter='pykilosort', dataset_types=None, collection=None):
945945
"""
946946
Downloads an ALF object
947947
:param obj: object name, str between 'spikes', 'clusters' or 'channels'
948948
:param spike_sorter: (defaults to 'pykilosort')
949-
:param dataset_types: list of extra dataset types
949+
:param dataset_types: list of extra dataset types, for example ['spikes.samples']
950+
:param collection: string specifiying the collection, for example 'alf/probe01/pykilosort'
950951
:return:
951952
"""
952953
if len(self.collections) == 0:
953954
return {}, {}, {}
954-
self.collection = self._get_spike_sorting_collection(spike_sorter=spike_sorter)
955+
self.collection = collection or self._get_spike_sorting_collection(spike_sorter=spike_sorter)
955956
_logger.debug(f"loading spike sorting from {self.collection}")
956957
spike_attributes, cluster_attributes = self._get_attributes(dataset_types)
957958
attributes = {'spikes': spike_attributes, 'clusters': cluster_attributes, 'channels': None,

ibllib/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
__version__ = "2.10.6"
1+
__version__ = "2.11.0"
22
import warnings
33

44
from ibllib.misc import logger_config

ibllib/dsp/voltage.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -218,8 +218,8 @@ def interpolate_bad_channels(data, channel_labels=None, h=None, p=1.3, kriging_d
218218
data[i, :] = 0
219219
continue
220220
data[i, :] = np.matmul(weights[imult], data[imult, :])
221-
# from easyqc.gui import viewseis
222-
# f = viewseis(data.T, si=1/30, h=h, title='interp2', taxis=0)
221+
# from viewephys.gui import viewephys
222+
# f = viewephys(data.T, fs=1/30, h=h, title='interp2')
223223
return data
224224

225225

ibllib/oneibl/data_handlers.py

Lines changed: 9 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -47,10 +47,15 @@ def getData(self, one=None):
4747

4848
one = one or self.one
4949
session_datasets = one.list_datasets(one.path2eid(self.session_path), details=True)
50-
df = pd.DataFrame(columns=one._cache.datasets.columns)
50+
dfs = []
5151
for file in self.signature['input_files']:
52-
df = df.append(filter_datasets(session_datasets, filename=file[0], collection=file[1],
53-
wildcards=True, assert_unique=False))
52+
dfs.append(filter_datasets(session_datasets, filename=file[0], collection=file[1],
53+
wildcards=True, assert_unique=False))
54+
df = pd.concat(dfs)
55+
56+
# Some cases the eid is stored in the index. If so we drop this level
57+
if 'eid' in df.index.names:
58+
df = df.droplevel(level='eid')
5459
return df
5560

5661
def uploadData(self, outputs, version):
@@ -228,7 +233,7 @@ def setUp(self):
228233
:return:
229234
"""
230235
df = super().getData()
231-
self.one._download_datasets(df)
236+
self.one._check_filesystem(df)
232237

233238
def uploadData(self, outputs, version, **kwargs):
234239
"""

ibllib/pipes/ephys_preprocessing.py

Lines changed: 12 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1007,11 +1007,14 @@ def _run(self, cams=None, overwrite=False):
10071007
executable="/bin/bash",
10081008
)
10091009
info, error = process.communicate()
1010-
info_str = info.decode("utf-8").strip()
1011-
_logger.info(info_str)
1010+
# info_str = info.decode("utf-8").strip()
1011+
# _logger.info(info_str)
10121012
if process.returncode != 0:
10131013
error_str = error.decode("utf-8").strip()
1014-
_logger.error(f'DLC failed for {cam}Camera\n {error_str}')
1014+
_logger.error(f'DLC failed for {cam}Camera.\n\n'
1015+
f'++++++++ Output of subprocess for debugging ++++++++\n\n'
1016+
f'{error_str}\n'
1017+
f'++++++++++++++++++++++++++++++++++++++++++++\n')
10151018
self.status = -1
10161019
# We dont' run motion energy, or add any files if dlc failed to run
10171020
continue
@@ -1029,11 +1032,14 @@ def _run(self, cams=None, overwrite=False):
10291032
executable="/bin/bash",
10301033
)
10311034
info, error = process.communicate()
1032-
info_str = info.decode("utf-8").strip()
1033-
_logger.info(info_str)
1035+
# info_str = info.decode("utf-8").strip()
1036+
# _logger.info(info_str)
10341037
if process.returncode != 0:
10351038
error_str = error.decode("utf-8").strip()
1036-
_logger.error(f'Motion energy failed for {cam}Camera \n {error_str}')
1039+
_logger.error(f'Motion energy failed for {cam}Camera.\n\n'
1040+
f'++++++++ Output of subprocess for debugging ++++++++\n\n'
1041+
f'{error_str}\n'
1042+
f'++++++++++++++++++++++++++++++++++++++++++++\n')
10371043
self.status = -1
10381044
continue
10391045
actual_outputs.append(next(self.session_path.joinpath('alf').glob(

ibllib/pipes/local_server.py

Lines changed: 17 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@
1616
import ibllib.oneibl.registration as registration
1717

1818
_logger = logging.getLogger('ibllib')
19-
LARGE_TASKS = ['EphysVideoCompress', 'TrainingVideoCompress', 'SpikeSorting', 'EphysDLC'] # 'TrainingDLC',
19+
LARGE_TASKS = ['EphysVideoCompress', 'TrainingVideoCompress', 'SpikeSorting', 'EphysDLC']
2020

2121

2222
def _get_pipeline_class(session_path, one):
@@ -135,39 +135,39 @@ def job_creator(root_path, one=None, dry=False, rerun=False, max_md5_size=None):
135135
return all_datasets
136136

137137

138-
def job_runner(subjects_path, mode='all', lab=None, dry=False, one=None, count=5):
138+
def task_queue(mode='all', lab=None, one=None):
139139
"""
140-
Function to be used as a process to run the jobs as they are created on the database
141-
This will query waiting jobs from the specified Lab
142-
:param subjects_path: on servers: /mnt/s0/Data/Subjects. Contains sessions
143-
:param mode: Whether to run all jobs, or only small or large (video compression, DLC, spike sorting) jobs
144-
:param lab: lab name as per Alyx
145-
:param dry:
146-
:param count:
147-
:return:
140+
Query waiting jobs from the specified Lab
141+
:param mode: Whether to return all waiting tasks, or only small or large (specified in LARGE_TASKS) jobs
142+
:param lab: lab name as per Alyx, otherwise try to infer from local globus install
143+
:param one: ONE instance
144+
-------
145+
148146
"""
149147
if one is None:
150148
one = ONE(cache_rest=None)
151149
if lab is None:
150+
_logger.info("Trying to infer lab from globus installation")
152151
lab = _get_lab(one)
153152
if lab is None:
153+
_logger.error("No lab provided or found")
154154
return # if the lab is none, this will return empty tasks each time
155155
# Filter for tasks
156156
if mode == 'all':
157-
tasks = one.alyx.rest('tasks', 'list', status='Waiting',
158-
django=f'session__lab__name__in,{lab}', no_cache=True)
157+
waiting_tasks = one.alyx.rest('tasks', 'list', status='Waiting',
158+
django=f'session__lab__name__in,{lab}', no_cache=True)
159159
elif mode == 'small':
160160
tasks_all = one.alyx.rest('tasks', 'list', status='Waiting',
161161
django=f'session__lab__name__in,{lab}', no_cache=True)
162-
tasks = [t for t in tasks_all if t['name'] not in LARGE_TASKS]
162+
waiting_tasks = [t for t in tasks_all if t['name'] not in LARGE_TASKS]
163163
elif mode == 'large':
164-
tasks = one.alyx.rest('tasks', 'list', status='Waiting',
165-
django=f'session__lab__name__in,{lab},name__in,{LARGE_TASKS}', no_cache=True)
164+
waiting_tasks = one.alyx.rest('tasks', 'list', status='Waiting',
165+
django=f'session__lab__name__in,{lab},name__in,{LARGE_TASKS}', no_cache=True)
166166

167167
# Order tasks by priority
168-
tasks = sorted(tasks, key=lambda d: d['priority'], reverse=True)
168+
sorted_tasks = sorted(waiting_tasks, key=lambda d: d['priority'], reverse=True)
169169

170-
tasks_runner(subjects_path, tasks, one=one, count=count, time_out=3600, dry=dry)
170+
return sorted_tasks
171171

172172

173173
def tasks_runner(subjects_path, tasks_dict, one=None, dry=False, count=5, time_out=None, **kwargs):

release_notes.md

Lines changed: 18 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -1,23 +1,29 @@
1-
## Release Note 2.10
1+
## Release Notes 2.11
22

3-
## Release Note 2.10.6 2022-03-15
3+
### Release Notes 2.11.0 2022-03-08
4+
- brainbox.io.one.SpikeSortingLoader: option to load using `collection` argument
5+
- Restructuring of how jobs are run on local servers, run large jobs as service
6+
7+
## Release Notes 2.10
8+
9+
### Release Notes 2.10.6 2022-03-15
410
- Allow parent tasks to be 'Incomplete' to run task on local server
511
- Change one base_rul for dlc_qc_plot on cortexlab
612

7-
## Release Note 2.10.5 2022-03-11
13+
### Release Notes 2.10.5 2022-03-11
814
- Fix moot release accident
915

10-
## Release Note 2.10.4 2022-03-11
16+
### Release Notes 2.10.4 2022-03-11
1117
- Data handler connects to correct alyx database on cortexlab
1218

13-
## Release Note 2.10.3 2022-03-09
19+
### Release Note 2.10.3 2022-03-09
1420
- Fixes to EphysPostDLC
1521
- Small change to storing in dsp.voltage.decompress_destripe_cbin function
1622

17-
## Release Note 2.10.2 2022-02-28
23+
### Release Notes 2.10.2 2022-02-28
1824
- Small fixes to local server task queues
1925

20-
## Release Note 2.10.1 2022-02-22
26+
### Release Notes 2.10.1 2022-02-22
2127
- Authenticate alyx user in Task class
2228
- Some fixes to make dlc_qc_plot in EphysPostDLC more reliable
2329
- SpikeGlx:
@@ -28,32 +34,32 @@
2834
- voltage: decompress cbin
2935
- add support for custom spikeglx.Reader
3036

31-
## Release Note 2.10.0 2022-02-11
37+
### Release Notes 2.10.0 2022-02-11
3238
- Fix in EphysDLC task to not return multiple copies of outputs
3339
- Loading examples for different IBL data types
3440
- Fix for probe syncing when Nidq and probe pulses don't match
3541
- Account for new ONE tables in ond datahandler
3642
- Add bad channels plots into RawEphysQc task
3743

38-
## Release Note 2.9
44+
## Release Notes 2.9
3945

4046
### Release Notes 2.9.1 2022-01-24
4147
- deprecation warnings and documentation for spike sorting loading method
4248
- bugfix: remove lru_cache on AllenAtlas class for iblviewer
4349

44-
### Release Note 2.9.0 2022-01-24
50+
### Release Notes 2.9.0 2022-01-24
4551
- Adding EphysDLC task in ephys_preprocessing pipeline
4652
- NOTE: requires DLC environment to be set up on local servers!
4753
- Fixes to EphysPostDLC dlc_qc_plot
4854

49-
## Release Note 2.8
55+
## Release Notes 2.8
5056

5157
### Release Notes 2.8.0 2022-01-19
5258
- Add lfp, aprms, spike raster and behaviour report plots to task infastructure
5359
- Computation of apRMS in decompress_destripe_cbin before conversion to normalised units
5460
- Add SpikeSortingLoader class in brainbox.io.one
5561

56-
## Release Note 2.7
62+
## Release Notes 2.7
5763

5864
### Release Notes 2.7.1 2022-01-05
5965

0 commit comments

Comments
 (0)