Skip to content

Commit

Permalink
Fully working version of central saving function, untested for tiff f…
Browse files Browse the repository at this point in the history
…ilenames
  • Loading branch information
vigji committed Jun 1, 2020
1 parent 3e198ca commit e87e1fd
Show file tree
Hide file tree
Showing 4 changed files with 186 additions and 103 deletions.
93 changes: 52 additions & 41 deletions atlas_gen/atlas_scripts/allenbrain_atlas.py
Expand Up @@ -4,32 +4,32 @@

from requests import exceptions
from pathlib import Path
import tempfile
import json
from tqdm import tqdm

import tifffile
import pandas as pd
from atlas_gen.wrapup import wrapup_atlas_from_data
from brainatlas_api import descriptors

# Specify information about the atlas:
RES_UM = 25
ATLAS_NAME = f"allenbrain{RES_UM}um"

# Generated atlas path:
bg_root_dir = Path.home() / "brainglobe"
VERSION = 2
ATLAS_NAME = f"example_mouse"
SPECIES = "Mus musculus"
ATLAS_LINK = "http://www.brain-map.org.com"
CITATION = "Wang et al 2020, https://doi.org/10.1016/j.cell.2020.04.007"
ORIENTATION = "asl"

# Working path on disk:
bg_root_dir = Path.home() / "brainglobe_workdir" / ATLAS_NAME
bg_root_dir.mkdir(exist_ok=True)

# Temporary folder for nrrd files download:
temp_path = Path(tempfile.mkdtemp())
downloading_path = temp_path / "downloading_path"
downloading_path.mkdir()

# Temporary folder for files before compressing:
uncompr_atlas_path = temp_path / ATLAS_NAME
uncompr_atlas_path.mkdir()
download_dir_path = bg_root_dir / "downloading_path"
download_dir_path.mkdir(exist_ok=True)

# Download annotated and template volume:
#########################################
spacecache = ReferenceSpaceCache(
manifest=downloading_path / "manifest.json",
manifest=download_dir_path / "manifest.json",
# downloaded files are stored relative to here
resolution=RES_UM,
reference_space_key="annotation/ccf_2017"
Expand All @@ -40,9 +40,6 @@
annotated_volume, _ = spacecache.get_annotation_volume()
template_volume, _ = spacecache.get_template_volume()
print("Download completed...")
# Save tiff stacks:
tifffile.imsave(str(uncompr_atlas_path / "reference.tiff"), template_volume)
tifffile.imsave(str(uncompr_atlas_path / "annotated.tiff"), annotated_volume)

# Download structures tree and meshes:
######################################
Expand All @@ -52,40 +49,54 @@
# Find id of set of regions with mesh:
select_set = "Structures whose surfaces are represented by a precomputed mesh"

all_sets = pd.DataFrame(oapi.get_structure_sets())
mesh_set_id = all_sets[all_sets.description == select_set].id.values[0]
mesh_set_ids = [
s["id"]
for s in oapi.get_structure_sets()
if s["description"] == select_set
]

structs_with_mesh = struct_tree.get_structures_by_set_id(mesh_set_ids)

structs_with_mesh = struct_tree.get_structures_by_set_id([mesh_set_id])
# Directory for mesh saving:
meshes_dir = bg_root_dir / descriptors.MESHES_DIRNAME

meshes_dir = uncompr_atlas_path / "meshes" # directory to save meshes into
space = ReferenceSpaceApi()
for s in structs_with_mesh:
meshes_dict = dict()
for s in tqdm(structs_with_mesh):
name = s["id"]
filename = meshes_dir / f"{name}.obj"
try:
space.download_structure_mesh(
structure_id=s["id"],
ccf_version="annotation/ccf_2017",
file_name=meshes_dir / f"{name}.obj",
file_name=filename,
)
meshes_dict[name] = filename
except (exceptions.HTTPError, ConnectionError):
print(s)

# Loop over structures, remove entries not used in brainglobe:
# Loop over structures, remove entries not used:
for struct in structs_with_mesh:
[struct.pop(k) for k in ["graph_id", "structure_set_ids", "graph_order"]]

with open(uncompr_atlas_path / "structures.json", "w") as f:
json.dump(structs_with_mesh, f)

metadata_dict = {
"name": ATLAS_NAME,
"citation": "Wang et al 2020, https://doi.org/10.1016/j.cell.2020.04.007",
"atlas_link": "www.brain-map.org.com",
"species": "Mus musculus",
"symmetric": True,
"resolution": (RES_UM, RES_UM, RES_UM),
"shape": template_volume.shape,
}

with open(uncompr_atlas_path / "atlas_metadata.json", "w") as f:
json.dump(metadata_dict, f)

# Wrap up, compress, and remove file:0
print(f"Finalising atlas")
wrapup_atlas_from_data(
atlas_name=ATLAS_NAME,
atlas_minor_version=VERSION,
citation=CITATION,
atlas_link=ATLAS_LINK,
species=SPECIES,
resolution=(RES_UM,) * 3,
orientation=ORIENTATION,
root_id=997,
reference_stack=template_volume,
annotation_stack=annotated_volume,
structures_list=structs_with_mesh,
meshes_dict=meshes_dict,
working_dir=bg_root_dir,
hemispheres_stack=None,
cleanup_files=False,
compress=True,
)
60 changes: 32 additions & 28 deletions atlas_gen/atlas_scripts/example_atlas.py
Expand Up @@ -4,32 +4,28 @@

from requests import exceptions
from pathlib import Path
import json
import shutil
from tqdm import tqdm

from atlas_gen.wrapup import wrapup_atlas_from_dir
from atlas_gen.wrapup import wrapup_atlas_from_data
from brainatlas_api import descriptors

# Specify information about the atlas:
RES_UM = 100
VERSION = "0.1"
ATLAS_NAME = f"test_allen_{RES_UM}um_v{VERSION}"
SPECIES = "mouse (Mus musculus)"
VERSION = 2
ATLAS_NAME = f"example_mouse"
SPECIES = "Mus musculus"
ATLAS_LINK = "http://www.brain-map.org.com"
CITATION = "Wang et al 2020, https://doi.org/10.1016/j.cell.2020.04.007"
ORIENTATION = "asl"

# Working path on disk:
bg_root_dir = Path.home() / "brainglobe_workdir"
bg_root_dir = Path.home() / "brainglobe_workdir" / ATLAS_NAME
bg_root_dir.mkdir(exist_ok=True)

# Temporary folder for nrrd files download:
download_dir_path = bg_root_dir / "downloading_path"
download_dir_path.mkdir(exist_ok=True)

# Temporary folder for files before compressing:
uncompr_atlas_path = bg_root_dir / ATLAS_NAME
uncompr_atlas_path.mkdir(exist_ok=True)

# Download annotated and template volume:
#########################################
spacecache = ReferenceSpaceCache(
Expand Down Expand Up @@ -62,37 +58,45 @@
structs_with_mesh = struct_tree.get_structures_by_set_id(mesh_set_ids)[:3]

# Directory for mesh saving:
meshes_dir = uncompr_atlas_path / descriptors.MESHES_DIRNAME
meshes_dir = bg_root_dir / descriptors.MESHES_DIRNAME

space = ReferenceSpaceApi()
for s in structs_with_mesh:
meshes_dict = dict()
for s in tqdm(structs_with_mesh):
name = s["id"]
filename = meshes_dir / f"{name}.obj"
try:
space.download_structure_mesh(
structure_id=s["id"],
ccf_version="annotation/ccf_2017",
file_name=meshes_dir / f"{name}.obj",
file_name=filename,
)
meshes_dict[name] = filename
except (exceptions.HTTPError, ConnectionError):
print(s)

# Loop over structures, remove entries not used:
for struct in structs_with_mesh:
[struct.pop(k) for k in ["graph_id", "structure_set_ids", "graph_order"]]

with open(uncompr_atlas_path / descriptors.STRUCTURES_FILENAME, "w") as f:
json.dump(structs_with_mesh, f)

# Wrap up, compress, and remove file:
print(f"Saving compressed files at {uncompr_atlas_path.parents[0]}")
wrapup_atlas_from_dir(
uncompr_atlas_path,
CITATION,
ATLAS_LINK,
SPECIES,
(RES_UM,) * 3,
cleanup_files=True,

# Wrap up, compress, and remove file:0
print(f"Finalising atlas")
wrapup_atlas_from_data(
atlas_name=ATLAS_NAME,
atlas_minor_version=VERSION,
citation=CITATION,
atlas_link=ATLAS_LINK,
species=SPECIES,
resolution=(RES_UM,) * 3,
orientation=ORIENTATION,
root_id=997,
reference_stack=template_volume,
annotation_stack=annotated_volume,
structures_list=structs_with_mesh,
meshes_dict=meshes_dict,
working_dir=bg_root_dir,
hemispheres_stack=None,
cleanup_files=False,
compress=True,
)

shutil.rmtree(download_dir_path)
17 changes: 12 additions & 5 deletions atlas_gen/metadata_utils.py
Expand Up @@ -16,13 +16,19 @@


def generate_metadata_dict(
name, citation, atlas_link, species, symmetric, resolution, version, shape
name,
citation,
atlas_link,
species,
symmetric,
resolution,
version,
shape,
transformation_mat,
):

# We ask for a rigid naming convention to be followed:
parsename = name.split("_")
assert len(parsename) >= 3
assert re.match("[0-9]+um", parsename[-1])
# Name should be author_species
assert len(name.split("_")) >= 2

# Control version formatting:
assert re.match("[0-9]+\\.[0-9]+", version)
Expand Down Expand Up @@ -56,6 +62,7 @@ def generate_metadata_dict(
resolution=resolution,
version=version,
shape=shape,
trasform_to_bg=tuple([tuple(m) for m in transformation_mat]),
)


Expand Down

0 comments on commit e87e1fd

Please sign in to comment.