Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Get com coords #9

Merged
merged 10 commits into from
May 22, 2022
5 changes: 5 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -130,3 +130,8 @@ dmypy.json

# pycharm
.idea/

# test files
tests/tmp
tests/videos

34 changes: 18 additions & 16 deletions mesmerize_core/algorithms/cnmf.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,14 +58,13 @@ def main(batch_path, uuid, data_path: str = None):
Yr, dims, T = cm.load_memmap(fname_new)
images = np.reshape(Yr.T, [T] + list(dims), order='F')

mean_projection_path = str(Path(input_movie_path).parent.joinpath(f'{uuid}_mean_projection.npy'))
std_projection_path = str(Path(input_movie_path).parent.joinpath(f'{uuid}_std_projection.npy'))
max_projection_path = str(Path(input_movie_path).parent.joinpath(f'{uuid}_max_projection.npy'))
np.save(mean_projection_path, np.mean(images, axis=0))
np.save(std_projection_path, np.std(images, axis=0))
np.save(max_projection_path, np.max(images, axis=0))
# in fname new load in memmap order C
proj_paths = dict()
for proj_type in ['mean', 'std', 'max']:
p_img = getattr(np, f'nan{proj_type}')(images, axis=0)
proj_paths[proj_type] = Path(input_movie_path).parent.joinpath(f'{uuid}_{proj_type}.npy')
np.save(str(proj_paths[proj_type]), p_img)

# in fname new load in memmap order C
cm.stop_server(dview=dview)
c, dview, n_processes = cm.cluster.setup_cluster(
backend='local',
Expand All @@ -90,33 +89,36 @@ def main(batch_path, uuid, data_path: str = None):
print("Eval")
cnm.estimates.evaluate_components(images, cnm.params, dview=dview)

output_path = str(get_full_data_path(input_movie_path).parent.joinpath(f"{uuid}.hdf5").resolve())
output_path = get_full_data_path(input_movie_path).parent.joinpath(f"{uuid}.hdf5").resolve()

cnm.save(output_path)
cnm.save(str(output_path))

Cn = cm.local_correlations(images.transpose(1, 2, 0))
Cn[np.isnan(Cn)] = 0

corr_img_path = Path(input_movie_path).parent.joinpath(f'{uuid}_cn.npy').resolve()
np.save(str(corr_img_path), Cn, allow_pickle=False)

if data_path is not None:
cnmf_hdf5_path = Path(output_path).relative_to(data_path)
# output dict for dataframe row (pd.Series)
d = dict()

if data_path is not None: # relative paths
cnmf_hdf5_path = output_path.relative_to(data_path)
cnmf_memmap_path = Path(fname_new).relative_to(data_path)
corr_img_path = corr_img_path.relative_to(data_path)
else:
for proj_type in proj_paths.keys():
d[f"{proj_type}-projection-path"] = proj_paths[proj_type].relative_to(data_path)
else: # absolute paths
cnmf_hdf5_path = output_path
cnmf_memmap_path = fname_new
for proj_type in proj_paths.keys():
d[f"{proj_type}-projection-path"] = proj_paths[proj_type]

d = dict()
d.update(
{
"cnmf-hdf5-path": cnmf_hdf5_path,
"cnmf-memmap-path": cnmf_memmap_path,
"corr-img-path": corr_img_path,
"mean-projection-path": mean_projection_path,
"std-projection-path": std_projection_path,
"max-projection-path": max_projection_path,
"success": True,
"traceback": None
}
Expand Down
34 changes: 17 additions & 17 deletions mesmerize_core/algorithms/cnmfe.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,12 +49,11 @@ def main(batch_path, uuid, data_path: str = None):
Yr, dims, T = cm.load_memmap(fname_new)
images = np.reshape(Yr.T, [T] + list(dims), order='F')

mean_projection_path = str(Path(input_movie_path).parent.joinpath(f'{uuid}_mean_projection.npy'))
std_projection_path = str(Path(input_movie_path).parent.joinpath(f'{uuid}_std_projection.npy'))
max_projection_path = str(Path(input_movie_path).parent.joinpath(f'{uuid}_max_projection.npy'))
np.save(mean_projection_path, np.mean(images, axis=0))
np.save(std_projection_path, np.std(images, axis=0))
np.save(max_projection_path, np.max(images, axis=0))
proj_paths = dict()
for proj_type in ['mean', 'std', 'max']:
p_img = getattr(np, f'nan{proj_type}')(images, axis=0)
proj_paths[proj_type] = Path(input_movie_path).parent.joinpath(f'{uuid}_{proj_type}.npy')
np.save(str(proj_paths[proj_type]), p_img)

downsample_ratio = params['downsample_ratio']
# in fname new load in memmap order C
Expand All @@ -63,11 +62,11 @@ def main(batch_path, uuid, data_path: str = None):
images[::downsample_ratio], swap_dim=False, gSig=gSig
)

pnr_output_path = str(Path(input_movie_path).parent.joinpath(f"{uuid}_pn.npy").resolve())
cn_output_path = str(Path(input_movie_path).parent.joinpath(f"{uuid}_cn.npy").resolve())
pnr_output_path = Path(input_movie_path).parent.joinpath(f"{uuid}_pn.npy").resolve()
cn_output_path = Path(input_movie_path).parent.joinpath(f"{uuid}_cn.npy").resolve()

np.save(str(cn_output_path), cn_filter, allow_pickle=False)
np.save(str(pnr_output_path), pnr, allow_pickle=False)
np.save(str(cn_output_path), cn_filter, allow_pickle=False)

d = dict() # for output

Expand All @@ -92,13 +91,17 @@ def main(batch_path, uuid, data_path: str = None):
print("evaluating components")
cnm.estimates.evaluate_components(images, cnm.params, dview=dview)

output_path = str(Path(input_movie_path).parent.joinpath(f"{uuid}.hdf5").resolve())
cnm.save(output_path)
output_path = Path(input_movie_path).parent.joinpath(f"{uuid}.hdf5").resolve()
cnm.save(str(output_path))

if data_path is not None:
cnmf_hdf5_path = Path(output_path).relative_to(data_path)
for proj_type in proj_paths.keys():
d[f"{proj_type}-projection-path"] = proj_paths[proj_type].relative_to(data_path)
else:
cnmf_hdf5_path = output_path
for proj_type in proj_paths.keys():
d[f"{proj_type}-projection-path"] = proj_paths[proj_type]

d.update(
{
Expand All @@ -108,8 +111,8 @@ def main(batch_path, uuid, data_path: str = None):

if data_path is not None:
cnmfe_memmap_path = Path(fname_new).relative_to(data_path)
cn_output_path = Path(cn_output_path).relative_to(data_path)
pnr_output_path = Path(pnr_output_path).relative_to(data_path)
cn_output_path = cn_output_path.relative_to(data_path)
pnr_output_path = pnr_output_path.relative_to(data_path)
else:
cnmfe_memmap_path = fname_new

Expand All @@ -118,15 +121,12 @@ def main(batch_path, uuid, data_path: str = None):
"cnmf-memmap-path": cnmfe_memmap_path,
"corr-img-path": cn_output_path,
"pnr-image-path": pnr_output_path,
"mean-projection-path": mean_projection_path,
"std-projection-path": std_projection_path,
"max-projection-path": max_projection_path,
"success": True,
"traceback": None
}
)

print(d)
print(f"Final output dict:\n{d}")

except:
d = {"success": False, "traceback": traceback.format_exc()}
Expand Down
42 changes: 24 additions & 18 deletions mesmerize_core/algorithms/mcorr.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,13 +68,11 @@ def main(batch_path, uuid, data_path: str = None):
Yr, dims, T = cm.load_memmap(str(get_full_data_path(output_path)))
images = np.reshape(Yr.T, [T] + list(dims), order='F')

paths=[]
proj_paths = dict()
for proj_type in ['mean', 'std', 'max']:
p_img = getattr(np, f"nan{proj_type}")(images, axis=0)
proj_path = f"{uuid}_{proj_type}_projection.npy"
np.save(str(Path(input_movie_path).parent.joinpath(proj_path)), p_img)
paths.append(str(Path(input_movie_path).parent.joinpath(proj_path)))

p_img = getattr(np, f'nan{proj_type}')(images, axis=0)
proj_paths[proj_type] = Path(input_movie_path).parent.joinpath(f'{uuid}_{proj_type}.npy')
np.save(str(proj_paths[proj_type]), p_img)

print("Computing correlation image")
Cns = local_correlations_movie_offline([mc.mmap_file[0]],
Expand All @@ -83,11 +81,11 @@ def main(batch_path, uuid, data_path: str = None):
dview=dview)
Cn = Cns.max(axis=0)
Cn[np.isnan(Cn)] = 0
cn_path = str(Path(input_movie_path).parent.joinpath(f'{uuid}_cn.npy'))
np.save(cn_path, Cn, allow_pickle=False)
cn_path = Path(input_movie_path).parent.joinpath(f'{uuid}_cn.npy')
np.save(str(cn_path), Cn, allow_pickle=False)

if data_path is not None:
cn_path = Path(cn_path).relative_to(data_path)
# output dict for pandas series for dataframe row
d = dict()

print("finished computing correlation image")

Expand All @@ -96,26 +94,34 @@ def main(batch_path, uuid, data_path: str = None):
x_shifts = mc.x_shifts_els
y_shifts = mc.y_shifts_els
shifts = [x_shifts, y_shifts]
shift_path = str(Path(input_movie_path).parent.joinpath(f"{uuid}_shifts.npy"))
np.save(shift_path, shifts)
shift_path = Path(input_movie_path).parent.joinpath(f"{uuid}_shifts.npy")
np.save(str(shift_path), shifts)
else:
shifts = mc.shifts_rig
shift_path = str(Path(input_movie_path).parent.joinpath(f"{uuid}_shifts.npy"))
np.save(shift_path, shifts)
shift_path = Path(input_movie_path).parent.joinpath(f"{uuid}_shifts.npy")
np.save(str(shift_path), shifts)

if data_path is not None:
cn_path = cn_path.relative_to(data_path)
output_path = get_full_data_path(output_path).relative_to(data_path)
shift_path = shift_path.relative_to(data_path)
for proj_type in proj_paths.keys():
d[f"{proj_type}-projection-path"] = proj_paths[proj_type].relative_to(data_path)
else:
cn_path = cn_path
output_path = get_full_data_path(output_path)
shift_path = shift_path.resolve()

d = dict()
d.update(
{
"mcorr-output-path": output_path,
"corr-img-path": cn_path,
"mean-projection-path": paths[0],
"std-projection-path": paths[1],
"max-projection-path": paths[2],
"shifts": shift_path,
"success": True,
"traceback": None
}
)

except:
d = {"success": False, "traceback": traceback.format_exc()}
print("mc failed, stored traceback in output")
Expand Down
42 changes: 22 additions & 20 deletions mesmerize_core/caiman_extensions/cnmf.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
from functools import lru_cache
from pathlib import Path
from typing import List, Tuple
from typing import *

import numpy as np
import pandas as pd
Expand Down Expand Up @@ -121,7 +121,10 @@ def get_spatial_masks(self, ixs_components: np.ndarray, threshold: float = 0.01)
# TODO: Cache this globally so that a common upper cache limit is valid for ALL batch items
@staticmethod
@lru_cache(5)
def _get_spatial_contour_coors(cnmf_obj: CNMF):
def _get_spatial_contours(cnmf_obj: CNMF, ixs_components: Optional[np.ndarray] = None):
if ixs_components is None:
ixs_components = cnmf_obj.estimates.idx_components

dims = cnmf_obj.dims
if dims is None: # I think that one of these is `None` if loaded from an hdf5 file
dims = cnmf_obj.estimates.dims
Expand All @@ -130,47 +133,46 @@ def _get_spatial_contour_coors(cnmf_obj: CNMF):
dims = dims[1], dims[0]

contours = caiman_get_contours(
cnmf_obj.estimates.A,
cnmf_obj.estimates.A[:, ixs_components],
dims,
swap_dim=True
)

return contours

@validate('cnmf')
def get_spatial_contours(self, ixs_components: np.ndarray) -> List[dict]:
def get_spatial_contours(
self,
ixs_components: Optional[np.ndarray] = None
) -> Tuple[List[np.ndarray], List[np.ndarray]]:
"""
Get the contours for the spatial footprints
Get the contour and center of mass for each spatial footprint

Parameters
----------
ixs_components: np.ndarray
indices for which to return spatial contours
indices for which to return spatial contours.
if `None` just returns according to cnmf.estimates.idx_components

Returns
-------

"""
cnmf_obj = self.get_output()
contours = self._get_spatial_contour_coors(cnmf_obj)

contours_selection = list()
for i in range(len(contours)):
if i in ixs_components:
contours_selection.append(contours[i])
contours = self._get_spatial_contours(cnmf_obj, ixs_components)

return contours_selection
coordinates = list()
coms = list()

@validate('cnmf')
def get_spatial_contour_coors(self, ixs_components: np.ndarray) -> List[np.ndarray]:
contours = self.get_spatial_contours(ixs_components)

coordinates = []
for contour in contours:
coors = contour['coordinates']
coordinates.append(coors[~np.isnan(coors).any(axis=1)])
coors = coors[~np.isnan(coors).any(axis=1)]
coordinates.append(coors)

com = coors.mean(axis=0)
coms.append(com)

return coordinates
return coordinates, coms

@validate('cnmf')
def get_temporal_components(self, ixs_components: np.ndarray = None, add_background: bool = False) -> np.ndarray:
Expand Down
6 changes: 2 additions & 4 deletions mesmerize_core/caiman_extensions/mcorr.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ def get_shifts(self, output_type: str, pw_rigid: bool = True) -> Union[np.ndarra
return shifts

if pw_rigid:
x_shifts, y_shifts = shifts
return shifts
else:
n_pts = shifts.shape[0]
n_lines = shifts.shape[1]
Expand All @@ -78,6 +78,4 @@ def get_shifts(self, output_type: str, pw_rigid: bool = True) -> Union[np.ndarra

for i in range(n_lines):
ys.append(shifts[:, i])
return xs, ys


return xs, ys