1
- '''
2
- Processes data from one form into another, e.g. taking spike times and binning them into
3
- non-overlapping bins and convolving spike times with a gaussian kernel.
4
- '''
1
+ """Process data from one form into another.
2
+
3
+ For example, taking spike times and binning them into non-overlapping bins and convolving spike
4
+ times with a gaussian kernel.
5
+ """
5
6
6
7
import numpy as np
7
8
import pandas as pd
8
9
from scipy import interpolate , sparse
9
10
from brainbox import core
10
- from iblutil .numerical import bincount2D as _bincount2D
11
+ from iblutil .numerical import bincount2D
11
12
from iblutil .util import Bunch
12
13
import logging
13
- import warnings
14
- import traceback
15
14
16
15
_logger = logging .getLogger (__name__ )
17
16
@@ -118,35 +117,6 @@ def sync(dt, times=None, values=None, timeseries=None, offsets=None, interp='zer
118
117
return syncd
119
118
120
119
121
- def bincount2D (x , y , xbin = 0 , ybin = 0 , xlim = None , ylim = None , weights = None ):
122
- """
123
- Computes a 2D histogram by aggregating values in a 2D array.
124
-
125
- :param x: values to bin along the 2nd dimension (c-contiguous)
126
- :param y: values to bin along the 1st dimension
127
- :param xbin:
128
- scalar: bin size along 2nd dimension
129
- 0: aggregate according to unique values
130
- array: aggregate according to exact values (count reduce operation)
131
- :param ybin:
132
- scalar: bin size along 1st dimension
133
- 0: aggregate according to unique values
134
- array: aggregate according to exact values (count reduce operation)
135
- :param xlim: (optional) 2 values (array or list) that restrict range along 2nd dimension
136
- :param ylim: (optional) 2 values (array or list) that restrict range along 1st dimension
137
- :param weights: (optional) defaults to None, weights to apply to each value for aggregation
138
- :return: 3 numpy arrays MAP [ny,nx] image, xscale [nx], yscale [ny]
139
- """
140
- for line in traceback .format_stack ():
141
- print (line .strip ())
142
- warning_text = """Deprecation warning: bincount2D() is now a part of iblutil.
143
- brainbox.processing.bincount2D is deprecated and will be removed in
144
- future versions. Please replace imports with iblutil.numerical.bincount2D."""
145
- _logger .warning (warning_text )
146
- warnings .warn (warning_text , DeprecationWarning )
147
- return _bincount2D (x , y , xbin , ybin , xlim , ylim , weights )
148
-
149
-
150
120
def compute_cluster_average (spike_clusters , spike_var ):
151
121
"""
152
122
Quickish way to compute the average of some quantity across spikes in each cluster given
@@ -197,7 +167,7 @@ def bin_spikes(spikes, binsize, interval_indices=False):
197
167
198
168
199
169
def get_units_bunch (spks_b , * args ):
200
- '''
170
+ """
201
171
Returns a bunch, where the bunch keys are keys from `spks` with labels of spike information
202
172
(e.g. unit IDs, times, features, etc.), and the values for each key are arrays with values for
203
173
each unit: these arrays are ordered and can be indexed by unit id.
@@ -223,18 +193,18 @@ def get_units_bunch(spks_b, *args):
223
193
--------
224
194
1) Create a units bunch given a spikes bunch, and get the amps for unit #4 from the units
225
195
bunch.
226
- >>> import brainbox as bb
227
- >>> import alf.io as aio
196
+ >>> from brainbox import processing
197
+ >>> import one. alf.io as alfio
228
198
>>> import ibllib.ephys.spikes as e_spks
229
199
(*Note, if there is no 'alf' directory, make 'alf' directory from 'ks2' output directory):
230
200
>>> e_spks.ks2_to_alf(path_to_ks_out, path_to_alf_out)
231
- >>> spks_b = aio .load_object(path_to_alf_out, 'spikes')
232
- >>> units_b = bb. processing.get_units_bunch(spks_b)
201
+ >>> spks_b = alfio .load_object(path_to_alf_out, 'spikes')
202
+ >>> units_b = processing.get_units_bunch(spks_b)
233
203
# Get amplitudes for unit 4.
234
204
>>> amps = units_b['amps']['4']
235
205
236
206
TODO add computation time estimate?
237
- '''
207
+ """
238
208
239
209
# Initialize `units`
240
210
units_b = Bunch ()
@@ -261,7 +231,7 @@ def get_units_bunch(spks_b, *args):
261
231
262
232
263
233
def filter_units (units_b , t , ** kwargs ):
264
- '''
234
+ """
265
235
Filters units according to some parameters. **kwargs are the keyword parameters used to filter
266
236
the units.
267
237
@@ -299,24 +269,24 @@ def filter_units(units_b, t, **kwargs):
299
269
Examples
300
270
--------
301
271
1) Filter units according to the default parameters.
302
- >>> import brainbox as bb
303
- >>> import alf.io as aio
272
+ >>> from brainbox import processing
273
+ >>> import one. alf.io as alfio
304
274
>>> import ibllib.ephys.spikes as e_spks
305
275
(*Note, if there is no 'alf' directory, make 'alf' directory from 'ks2' output directory):
306
276
>>> e_spks.ks2_to_alf(path_to_ks_out, path_to_alf_out)
307
277
# Get a spikes bunch, units bunch, and filter the units.
308
- >>> spks_b = aio .load_object(path_to_alf_out, 'spikes')
309
- >>> units_b = bb. processing.get_units_bunch(spks_b, ['times', 'amps', 'clusters'])
278
+ >>> spks_b = alfio .load_object(path_to_alf_out, 'spikes')
279
+ >>> units_b = processing.get_units_bunch(spks_b, ['times', 'amps', 'clusters'])
310
280
>>> T = spks_b['times'][-1] - spks_b['times'][0]
311
- >>> filtered_units = bb. processing.filter_units(units_b, T)
281
+ >>> filtered_units = processing.filter_units(units_b, T)
312
282
313
283
2) Filter units with no minimum amplitude, a minimum firing rate of 1 Hz, and a max false
314
284
positive rate of 0.2, given a refractory period of 2 ms.
315
- >>> filtered_units = bb. processing.filter_units(units_b, T, min_amp=0, min_fr=1)
285
+ >>> filtered_units = processing.filter_units(units_b, T, min_amp=0, min_fr=1)
316
286
317
287
TODO: `units_b` input arg could eventually be replaced by `clstrs_b` if the required metrics
318
288
are in `clstrs_b['metrics']`
319
- '''
289
+ """
320
290
321
291
# Set params
322
292
params = {'min_amp' : 50e-6 , 'min_fr' : 0.5 , 'max_fpr' : 0.2 , 'rp' : 0.002 } # defaults
0 commit comments