Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Reformatting generation scripts #15

Merged
merged 8 commits into from
May 28, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 13 additions & 0 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
repos:
- repo: https:/python/black
rev: 19.10b0
hooks:
- id: black
pass_filenames: true
- repo: https://gitlab.com/pycqa/flake8
rev: 3.7.9
hooks:
- id: flake8
pass_filenames: true
# this seems to need to be here in addition to setup.cfg
exclude: __init__.py
4 changes: 4 additions & 0 deletions brainatlas_api/atlas_gen/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
from brainatlas_api.atlas_gen.wrapup import wrapup_atlas_from_dir
from brainatlas_api.atlas_gen.stacks import save_anatomy, save_annotation

from brainatlas_api.atlas_gen import descriptors
Empty file.
91 changes: 91 additions & 0 deletions brainatlas_api/atlas_gen/atlas_scripts/allenbrain_atlas.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,91 @@
from allensdk.api.queries.ontologies_api import OntologiesApi
from allensdk.api.queries.reference_space_api import ReferenceSpaceApi
from allensdk.core.reference_space_cache import ReferenceSpaceCache

from requests import exceptions
from pathlib import Path
import tempfile
import json

import tifffile
import pandas as pd

RES_UM = 25
ATLAS_NAME = f"allenbrain{RES_UM}um"

# Generated atlas path:
bg_root_dir = Path.home() / "brainglobe"
bg_root_dir.mkdir(exist_ok=True)

# Temporary folder for nrrd files download:
temp_path = Path(tempfile.mkdtemp())
downloading_path = temp_path / "downloading_path"
downloading_path.mkdir()

# Temporary folder for files before compressing:
uncompr_atlas_path = temp_path / ATLAS_NAME
uncompr_atlas_path.mkdir()

# Download annotated and template volume:
#########################################
spacecache = ReferenceSpaceCache(
manifest=downloading_path / "manifest.json",
# downloaded files are stored relative to here
resolution=RES_UM,
reference_space_key="annotation/ccf_2017"
# use the latest version of the CCF
)

# Download
annotated_volume, _ = spacecache.get_annotation_volume()
template_volume, _ = spacecache.get_template_volume()
print("Download completed...")
# Save tiff stacks:
tifffile.imsave(str(uncompr_atlas_path / "reference.tiff"), template_volume)
tifffile.imsave(str(uncompr_atlas_path / "annotated.tiff"), annotated_volume)

# Download structures tree and meshes:
######################################
oapi = OntologiesApi() # ontologies
struct_tree = spacecache.get_structure_tree() # structures tree

# Find id of set of regions with mesh:
select_set = "Structures whose surfaces are represented by a precomputed mesh"

all_sets = pd.DataFrame(oapi.get_structure_sets())
mesh_set_id = all_sets[all_sets.description == select_set].id.values[0]

structs_with_mesh = struct_tree.get_structures_by_set_id([mesh_set_id])

meshes_dir = uncompr_atlas_path / "meshes" # directory to save meshes into
space = ReferenceSpaceApi()
for s in structs_with_mesh:
name = s["id"]
try:
space.download_structure_mesh(
structure_id=s["id"],
ccf_version="annotation/ccf_2017",
file_name=meshes_dir / f"{name}.obj",
)
except (exceptions.HTTPError, ConnectionError):
print(s)

# Loop over structures, remove entries not used in brainglobe:
for struct in structs_with_mesh:
[struct.pop(k) for k in ["graph_id", "structure_set_ids", "graph_order"]]

with open(uncompr_atlas_path / "structures.json", "w") as f:
json.dump(structs_with_mesh, f)

metadata_dict = {
"name": ATLAS_NAME,
"citation": "Wang et al 2020, https://doi.org/10.1016/j.cell.2020.04.007",
"atlas_link": "www.brain-map.org.com",
"species": "Mus musculus",
"symmetric": True,
"resolution": (RES_UM, RES_UM, RES_UM),
"shape": template_volume.shape,
}

with open(uncompr_atlas_path / "atlas_metadata.json", "w") as f:
json.dump(metadata_dict, f)
117 changes: 117 additions & 0 deletions brainatlas_api/atlas_gen/atlas_scripts/fishatlas_atlas.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,117 @@
from atlas_scripts.fishatlas_utils import add_path_inplace, collect_all_inplace

from pathlib import Path
import tempfile
from brainatlas_api.utils import retrieve_over_http
import json
import tarfile

import nrrd
import numpy as np
import tifffile
import requests

ATLAS_NAME = "fishatlas"

base_url = r"https://fishatlas.neuro.mpg.de"


# Generated atlas path:
bg_root_dir = Path.home() / "brainglobe"
bg_root_dir.mkdir(exist_ok=True)

# Temporary folder for nrrd files download:
temp_path = Path(tempfile.mkdtemp())
download_dir_path = temp_path / "downloading_path"
download_dir_path.mkdir()

# Temporary folder for files before compressing:
uncompr_atlas_path = temp_path / ATLAS_NAME
uncompr_atlas_path.mkdir()

# Download reference:
#####################
reference_url = f"{base_url}/media/brain_browser/Brain/MovieViewBrain/standard_brain_fixed_SYP_T_GAD1b.nrrd"
out_file_path = download_dir_path / "reference.nrrd"

retrieve_over_http(reference_url, out_file_path)

# Cleanup to have in brainglobe order:
refstack_axes = (1, 2, 0)
refstack_flips = [False, True, False]

refstack, h = nrrd.read(str(out_file_path))

refstack = refstack.transpose(refstack_axes)
for i, flip in enumerate(refstack_flips):
if flip:
refstack = np.flip(refstack, i)


tifffile.imsave(str(uncompr_atlas_path / "reference.tiff"), refstack)

# Download structures tree and meshes:
######################################
regions_url = f"{base_url}/neurons/get_brain_regions"

meshes_dir_path = uncompr_atlas_path / "meshes"
meshes_dir_path.mkdir(exist_ok=True)

# Download structures hierarchy:
regions = requests.get(regions_url).json()["brain_regions"]

# Initiate dictionary with root info:
regions_dict = {
"name": "root",
"id": 0,
"sub_regions": regions.copy(),
"structure_id_path": [],
"acronym": "root",
"files": {
"file_3D": "/media/Neurons_database/Brain_and_regions/Brains/Outline/Outline_new.txt"
},
"color": "#ffffff",
}

# Go through the regions hierarchy and create the structure path entry:
add_path_inplace(regions_dict)

# Create empty list and collect all regions traversing the regions hierarchy:
regions_list = []
collect_all_inplace(
regions_dict,
regions_list,
meshes_dir_path,
refstack_axes,
refstack_flips,
refstack.shape,
)

# save regions list json:
with open(uncompr_atlas_path / "structures.json", "w") as f:
json.dump(regions_list, f)

# Write metadata:
#################
metadata_dict = {
"name": ATLAS_NAME,
"citation": "Kunst et al 2019, https://doi.org/10.1016/j.neuron.2019.04.034",
"atlas_link": "https://fishatlas.neuro.mpg.de",
"species": "Danio rerio",
"symmetric": False,
"resolution": (0.994, 1, 0.994),
"shape": refstack.shape,
}

with open(uncompr_atlas_path / "atlas_metadata.json", "w") as f:
json.dump(metadata_dict, f)

# Compress folder:
output_filename = bg_root_dir / f"{uncompr_atlas_path.name}.tar.gz"
with tarfile.open(output_filename, "w:gz") as tar:
tar.add(uncompr_atlas_path, arcname=uncompr_atlas_path.name)

# Clean temporary directory and remove it:
for f in download_dir_path.glob("*"):
f.unlink()
download_dir_path.rmdir()
132 changes: 132 additions & 0 deletions brainatlas_api/atlas_gen/atlas_scripts/fishatlas_utils.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,132 @@
from brainatlas_api.utils import retrieve_over_http
import trimesh
from brainatlas_api.structures import StructureTree
import requests
import warnings

BASE_URL = r"https://fishatlas.neuro.mpg.de"


def download_convert_mesh(
url,
interm_file_path,
obj_file_path,
refstack_axes,
refstack_flips,
ref_shape,
cleanup=True,
):
"""
Parameters
----------
url : str
mesh url for download
interm_file_path : Path obj
path of the intermediate .stl mesh
obj_file_path : Path obj
path of the final .obj object
cleanup : bool (default True)
if True, intermediate file is unlinked

Returns
-------

"""
retrieve_over_http(url, interm_file_path)

mesh = trimesh.load(interm_file_path)
mesh.vertices = mesh.vertices[:, refstack_axes]
for i, (f, size) in enumerate(zip(refstack_flips, ref_shape)):
if f:
mesh.vertices[:, i] = size - mesh.vertices[:, i]

mesh.export(obj_file_path)

if cleanup:
interm_file_path.unlink()


def add_path_inplace(parent):
""" Recursively traverse hierarchy of regions and append for each region
the full path of substructures in brainglobe standard list.
Parameters
----------
parent : dict
node parsed from fishatlas website containing a "sub_regions" key;

Returns
-------

"""
for ch in parent["sub_regions"]:
new_root = parent["structure_id_path"] + [
parent["id"],
]

ch["structure_id_path"] = new_root

add_path_inplace(ch)


def collect_all_inplace(
node,
traversing_list,
download_path,
refstack_axes,
refstack_flips,
ref_shape,
):
""" Recursively traverse a region hierarchy, download meshes, and append
regions to a list inplace.

Parameters
----------
node
traversing_list
download_path

Returns
-------

"""

# Append clean dictionary with brainglobe standard info:
traversing_list.append(
{
"name": node["name"],
"acronym": node["name"],
"id": node["id"],
"rgb_triplet": StructureTree.hex_to_rgb(node["color"]),
"structure_id_path": node["structure_id_path"],
}
)

# Url for the mesh:
mesh_url = (
BASE_URL + node["files"]["file_3D"][:-4].replace("\\", "/") + ".stl"
)

# Try download, if mesh does not exist region is removed:
try:
download_convert_mesh(
mesh_url,
download_path / "mesh.stl",
download_path / "{}.obj".format(node["id"]),
refstack_axes,
refstack_flips,
ref_shape,
)
except requests.exceptions.ConnectionError:
# Pop region from list:
message = "No mesh found for {}".format(traversing_list.pop()["name"])
warnings.warn(message)

for region in node["sub_regions"]:
collect_all_inplace(
region,
traversing_list,
download_path,
refstack_axes,
refstack_flips,
ref_shape,
)
Loading