Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add single-cell std dev #50

Merged
merged 1 commit into from
Dec 7, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion microutil/btrack/btrack.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ def gogogo_btrack(labels, config_file, radius, tracks_out):
tracker.volume = ((0, labels.shape[-2]), (0, labels.shape[-1]), (-1e5, 1e5))

# track them (in interactive mode)
tracker.track_interactive(step_size=100)
tracker.track(step_size=100)

# generate hypotheses and run the global optimizer
tracker.optimize()
Expand Down
3 changes: 2 additions & 1 deletion microutil/leica/leica.py
Original file line number Diff line number Diff line change
Expand Up @@ -296,7 +296,8 @@ def ldm_to_time(inds):
for i, s in inds.groupby('S'):
new_vals = np.arange(s['T'].nunique())
old_vals = sorted(s['T'].unique())
mapper = mapper.append(pd.Series(data=new_vals, index=old_vals))
# mapper = mapper.append(pd.Series(data=new_vals, index=old_vals))
mapper = pd.concat([mapper, pd.Series(data=new_vals, index=old_vals)])
# return mapper
inds['T'] = mapper[inds['T'].values].values
return inds
Expand Down
43 changes: 43 additions & 0 deletions microutil/single_cell/single_cell.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
"average",
"center_of_mass",
"area",
"standard_dev",
"cell_op",
"bootstrap",
"regionprops",
Expand Down Expand Up @@ -185,6 +186,42 @@ def padded_mean(intensity, labels, Nmax=None):
)


def standard_dev(ds, intensity, label_name='labels', cell_dim_name="CellID", dims='STCZYX'):
"""
Compute the standard deviation of the inntensity array over each labelled area.

Parameters
----------
ds : xr.Dataset
Dataset containing labels
intensity : xr.DataArray
Dataarray containing values that will be averaged over each labelled region.
label_name : str
Name of Variable in ds that contains labelled regions.
cell_dim_name : str default "CellID"
Name of dimension in output array that indexes individual cells.
dims : str or list of str, default 'STCZYX`
Dimensions names that correspond to STCZYX
"""

if isinstance(dims, str):
S, T, C, Z, Y, X = list(dims)
elif isinstance(dims, list):
S, T, C, Z, Y, X = dims

def padded_mean(intensity, labels, Nmax=None):
with warnings.catch_warnings():
warnings.simplefilter('ignore')
out = np.asarray(
ndi.standard_deviation(intensity, labels=labels, index=np.arange(1, Nmax))
)
return out

return cell_op(
ds, padded_mean, intensity, label_name=label_name, cell_dim_name=cell_dim_name, dims=dims
)


def center_of_mass(ds, com_name='com', label_name='labels', cell_dim_name='CellID', dims='STCZYX'):
"""
Compute the center of mass of each labeled cell in a dataset.
Expand Down Expand Up @@ -372,6 +409,12 @@ def regionprops_pandas(ds, properties=DEFAULT_PROPERTIES, label_name='labels', d
Loop over the frames of ds and compute the regionprops for
each labelled image in each frame.
"""
warnings.warn(
"Use the implementation in dask-regionprops for new code.",
DeprecationWarning,
stack_level=2,
)

if isinstance(dims, str):
S, T, C, Z, Y, X = list(dims)
elif isinstance(dims, list):
Expand Down
Loading