diff --git a/atlas_gen/atlas_scripts/allenbrain_atlas.py b/atlas_gen/atlas_scripts/allenbrain_atlas.py index 9b03259..1a6fd17 100644 --- a/atlas_gen/atlas_scripts/allenbrain_atlas.py +++ b/atlas_gen/atlas_scripts/allenbrain_atlas.py @@ -4,32 +4,32 @@ from requests import exceptions from pathlib import Path -import tempfile -import json +from tqdm import tqdm -import tifffile -import pandas as pd +from atlas_gen.wrapup import wrapup_atlas_from_data +from brainatlas_api import descriptors +# Specify information about the atlas: RES_UM = 25 -ATLAS_NAME = f"allenbrain{RES_UM}um" - -# Generated atlas path: -bg_root_dir = Path.home() / "brainglobe" +VERSION = 2 +ATLAS_NAME = f"example_mouse" +SPECIES = "Mus musculus" +ATLAS_LINK = "http://www.brain-map.org.com" +CITATION = "Wang et al 2020, https://doi.org/10.1016/j.cell.2020.04.007" +ORIENTATION = "asl" + +# Working path on disk: +bg_root_dir = Path.home() / "brainglobe_workdir" / ATLAS_NAME bg_root_dir.mkdir(exist_ok=True) # Temporary folder for nrrd files download: -temp_path = Path(tempfile.mkdtemp()) -downloading_path = temp_path / "downloading_path" -downloading_path.mkdir() - -# Temporary folder for files before compressing: -uncompr_atlas_path = temp_path / ATLAS_NAME -uncompr_atlas_path.mkdir() +download_dir_path = bg_root_dir / "downloading_path" +download_dir_path.mkdir(exist_ok=True) # Download annotated and template volume: ######################################### spacecache = ReferenceSpaceCache( - manifest=downloading_path / "manifest.json", + manifest=download_dir_path / "manifest.json", # downloaded files are stored relative to here resolution=RES_UM, reference_space_key="annotation/ccf_2017" @@ -40,9 +40,6 @@ annotated_volume, _ = spacecache.get_annotation_volume() template_volume, _ = spacecache.get_template_volume() print("Download completed...") -# Save tiff stacks: -tifffile.imsave(str(uncompr_atlas_path / "reference.tiff"), template_volume) -tifffile.imsave(str(uncompr_atlas_path / "annotated.tiff"), annotated_volume) # Download structures tree and meshes: ###################################### @@ -52,40 +49,54 @@ # Find id of set of regions with mesh: select_set = "Structures whose surfaces are represented by a precomputed mesh" -all_sets = pd.DataFrame(oapi.get_structure_sets()) -mesh_set_id = all_sets[all_sets.description == select_set].id.values[0] +mesh_set_ids = [ + s["id"] + for s in oapi.get_structure_sets() + if s["description"] == select_set +] + +structs_with_mesh = struct_tree.get_structures_by_set_id(mesh_set_ids) -structs_with_mesh = struct_tree.get_structures_by_set_id([mesh_set_id]) +# Directory for mesh saving: +meshes_dir = bg_root_dir / descriptors.MESHES_DIRNAME -meshes_dir = uncompr_atlas_path / "meshes" # directory to save meshes into space = ReferenceSpaceApi() -for s in structs_with_mesh: +meshes_dict = dict() +for s in tqdm(structs_with_mesh): name = s["id"] + filename = meshes_dir / f"{name}.obj" try: space.download_structure_mesh( structure_id=s["id"], ccf_version="annotation/ccf_2017", - file_name=meshes_dir / f"{name}.obj", + file_name=filename, ) + meshes_dict[name] = filename except (exceptions.HTTPError, ConnectionError): print(s) -# Loop over structures, remove entries not used in brainglobe: +# Loop over structures, remove entries not used: for struct in structs_with_mesh: [struct.pop(k) for k in ["graph_id", "structure_set_ids", "graph_order"]] -with open(uncompr_atlas_path / "structures.json", "w") as f: - json.dump(structs_with_mesh, f) - -metadata_dict = { - "name": ATLAS_NAME, - "citation": "Wang et al 2020, https://doi.org/10.1016/j.cell.2020.04.007", - "atlas_link": "www.brain-map.org.com", - "species": "Mus musculus", - "symmetric": True, - "resolution": (RES_UM, RES_UM, RES_UM), - "shape": template_volume.shape, -} - -with open(uncompr_atlas_path / "atlas_metadata.json", "w") as f: - json.dump(metadata_dict, f) + +# Wrap up, compress, and remove file:0 +print(f"Finalising atlas") +wrapup_atlas_from_data( + atlas_name=ATLAS_NAME, + atlas_minor_version=VERSION, + citation=CITATION, + atlas_link=ATLAS_LINK, + species=SPECIES, + resolution=(RES_UM,) * 3, + orientation=ORIENTATION, + root_id=997, + reference_stack=template_volume, + annotation_stack=annotated_volume, + structures_list=structs_with_mesh, + meshes_dict=meshes_dict, + working_dir=bg_root_dir, + hemispheres_stack=None, + cleanup_files=False, + compress=True, +) diff --git a/atlas_gen/atlas_scripts/example_atlas.py b/atlas_gen/atlas_scripts/example_atlas.py index c4a6c76..fa0f270 100644 --- a/atlas_gen/atlas_scripts/example_atlas.py +++ b/atlas_gen/atlas_scripts/example_atlas.py @@ -4,32 +4,28 @@ from requests import exceptions from pathlib import Path -import json -import shutil +from tqdm import tqdm -from atlas_gen.wrapup import wrapup_atlas_from_dir +from atlas_gen.wrapup import wrapup_atlas_from_data from brainatlas_api import descriptors # Specify information about the atlas: RES_UM = 100 -VERSION = "0.1" -ATLAS_NAME = f"test_allen_{RES_UM}um_v{VERSION}" -SPECIES = "mouse (Mus musculus)" +VERSION = 2 +ATLAS_NAME = f"example_mouse" +SPECIES = "Mus musculus" ATLAS_LINK = "http://www.brain-map.org.com" CITATION = "Wang et al 2020, https://doi.org/10.1016/j.cell.2020.04.007" +ORIENTATION = "asl" # Working path on disk: -bg_root_dir = Path.home() / "brainglobe_workdir" +bg_root_dir = Path.home() / "brainglobe_workdir" / ATLAS_NAME bg_root_dir.mkdir(exist_ok=True) # Temporary folder for nrrd files download: download_dir_path = bg_root_dir / "downloading_path" download_dir_path.mkdir(exist_ok=True) -# Temporary folder for files before compressing: -uncompr_atlas_path = bg_root_dir / ATLAS_NAME -uncompr_atlas_path.mkdir(exist_ok=True) - # Download annotated and template volume: ######################################### spacecache = ReferenceSpaceCache( @@ -62,17 +58,20 @@ structs_with_mesh = struct_tree.get_structures_by_set_id(mesh_set_ids)[:3] # Directory for mesh saving: -meshes_dir = uncompr_atlas_path / descriptors.MESHES_DIRNAME +meshes_dir = bg_root_dir / descriptors.MESHES_DIRNAME space = ReferenceSpaceApi() -for s in structs_with_mesh: +meshes_dict = dict() +for s in tqdm(structs_with_mesh): name = s["id"] + filename = meshes_dir / f"{name}.obj" try: space.download_structure_mesh( structure_id=s["id"], ccf_version="annotation/ccf_2017", - file_name=meshes_dir / f"{name}.obj", + file_name=filename, ) + meshes_dict[name] = filename except (exceptions.HTTPError, ConnectionError): print(s) @@ -80,19 +79,24 @@ for struct in structs_with_mesh: [struct.pop(k) for k in ["graph_id", "structure_set_ids", "graph_order"]] -with open(uncompr_atlas_path / descriptors.STRUCTURES_FILENAME, "w") as f: - json.dump(structs_with_mesh, f) - -# Wrap up, compress, and remove file: -print(f"Saving compressed files at {uncompr_atlas_path.parents[0]}") -wrapup_atlas_from_dir( - uncompr_atlas_path, - CITATION, - ATLAS_LINK, - SPECIES, - (RES_UM,) * 3, - cleanup_files=True, + +# Wrap up, compress, and remove file:0 +print(f"Finalising atlas") +wrapup_atlas_from_data( + atlas_name=ATLAS_NAME, + atlas_minor_version=VERSION, + citation=CITATION, + atlas_link=ATLAS_LINK, + species=SPECIES, + resolution=(RES_UM,) * 3, + orientation=ORIENTATION, + root_id=997, + reference_stack=template_volume, + annotation_stack=annotated_volume, + structures_list=structs_with_mesh, + meshes_dict=meshes_dict, + working_dir=bg_root_dir, + hemispheres_stack=None, + cleanup_files=False, compress=True, ) - -shutil.rmtree(download_dir_path) diff --git a/atlas_gen/metadata_utils.py b/atlas_gen/metadata_utils.py index f0dd148..b2055cc 100644 --- a/atlas_gen/metadata_utils.py +++ b/atlas_gen/metadata_utils.py @@ -16,13 +16,19 @@ def generate_metadata_dict( - name, citation, atlas_link, species, symmetric, resolution, version, shape + name, + citation, + atlas_link, + species, + symmetric, + resolution, + version, + shape, + transformation_mat, ): - # We ask for a rigid naming convention to be followed: - parsename = name.split("_") - assert len(parsename) >= 3 - assert re.match("[0-9]+um", parsename[-1]) + # Name should be author_species + assert len(name.split("_")) >= 2 # Control version formatting: assert re.match("[0-9]+\\.[0-9]+", version) @@ -56,6 +62,7 @@ def generate_metadata_dict( resolution=resolution, version=version, shape=shape, + trasform_to_bg=tuple([tuple(m) for m in transformation_mat]), ) diff --git a/atlas_gen/wrapup.py b/atlas_gen/wrapup.py index a627749..7bf0546 100644 --- a/atlas_gen/wrapup.py +++ b/atlas_gen/wrapup.py @@ -1,17 +1,21 @@ import json +import tarfile +import shutil +from pathlib import Path + +import tifffile +import bgspace as bgs +import meshio as mio + from atlas_gen.metadata_utils import ( create_metadata_files, generate_metadata_dict, ) from atlas_gen.stacks import save_reference, save_annotation - -# from brainatlas_api.utils import read_tiff, read_json from atlas_gen.structures import check_struct_consistency + from brainatlas_api import descriptors -import tarfile -import shutil -# import bgspace as bgs # This should be changed every time we make changes in the atlas # structure: @@ -41,33 +45,104 @@ def wrapup_atlas_from_data( Parameters ---------- - dest_dir : str or Path object - directory with the atlases and regions description + atlas_name : str + Atlas name in the form author_species. + atlas_minor_version : int or str + Minor version number for this particular atlas. citation : str - citation for the atlas, if unpublished specify "unpublished" + Citation for the atlas, if unpublished specify "unpublished". atlas_link : str - valid URL for the atlas + Valid URL for the atlas. species : str - species name formatted as "CommonName (Genus species)" + Species name formatted as "CommonName (Genus species)". resolution : tuple - tree elements, resolution on three axes - cleanup_files : bool + Three elements tuple, resolution on three axes + orientation : + Orientation of the original atlas (tuple describing origin for BGSpace). + root_id : + Id of the root element of the atlas. + reference_stack : str or Path or numpy array + Reference stack for the atlas. If str or Path, will be read with tifffile. + annotation_stack : str or Path or numpy array + Annotation stack for the atlas. If str or Path, will be read with tifffile. + structures_list : list of dict + List of valid dictionary for structures. + meshes_dict : dict + dict of meshio-compatible mesh file paths in the form {sruct_id: meshpath} + working_dir : str or Path obj + Path where the atlas folder and compressed file will be generated. + hemispheres_stack : str or Path or numpy array, optional + Hemisphere stack for the atlas. If str or Path, will be read with tifffile. + If none is provided, atlas is assumed to be symmetric + cleanup_files : bool, optional (Default value = False) - compress : bool + compress : bool, optional (Default value = True) """ version = f"{ATLAS_VERSION}.{atlas_minor_version}" - shape = reference_stack.shape # If no hemisphere file is given, assume the atlas is symmetric: symmetric = hemispheres_stack is None + # Instantiate BGSpace obj: + space_convention = bgs.SpaceConvention(orientation) + # Check consistency of structures .json file: check_struct_consistency(structures_list) + atlas_dir_name = atlas_name + f"{resolution[0]}um" + "_v" + version + dest_dir = Path(working_dir) / atlas_dir_name + # exist_ok would be more permissive but error-prone here as there might + # be old files + dest_dir.mkdir() + + # write tiff stacks: + for stack, saving_function in zip( + [reference_stack, annotation_stack], [save_reference, save_annotation] + ): + + if isinstance(stack, str) or isinstance(stack, Path): + stack = tifffile.imread(stack) + + # Reorient stacks if required: + original_shape = stack.shape + stack = space_convention.map_stack_to( + descriptors.ATLAS_ORIENTATION, stack, copy=False + ) + shape = stack.shape + + saving_function(stack, dest_dir) + + del stack # necessary? + + # Reorient vertices here as we need to know original stack size in um: + volume_shape = tuple(res * s for res, s in zip(resolution, original_shape)) + + mesh_dest_dir = dest_dir / descriptors.MESHES_DIRNAME + mesh_dest_dir.mkdir() + + for mesh_id, meshfile in meshes_dict.items(): + mesh = mio.read(meshfile) + + # Reorient points: + mesh.points = space_convention.map_points_to( + descriptors.ATLAS_ORIENTATION, mesh.points, shape=volume_shape + ) + + # Save in meshes dir: + mio.write(mesh_dest_dir / f"{mesh_id}.obj", mesh) + + transformation_mat = space_convention.transformation_matrix_to( + descriptors.ATLAS_ORIENTATION, shape=volume_shape + ) + + # save regions list json: + with open(dest_dir / descriptors.STRUCTURES_FILENAME, "w") as f: + json.dump(structures_list, f) + # Finalize metadata dictionary: metadata_dict = generate_metadata_dict( name=atlas_name, @@ -78,23 +153,9 @@ def wrapup_atlas_from_data( resolution=resolution, version=version, shape=shape, + transformation_mat=transformation_mat, ) - atlas_dir_name = atlas_name + "_v" + version - dest_dir = working_dir / atlas_dir_name - dest_dir.mkdir() # exist_ok would be more permissive but error-prone here - - # save regions list json: - with open(dest_dir / descriptors.STRUCTURES_FILENAME, "w") as f: - json.dump(structures_list, f) - - # TODO use BGSpace and reorient stacks; - # TODO use BGSpace and reorient mesh; - # TODO find function to save meshes; - # write tiff stacks: - save_reference(reference_stack, dest_dir) - save_annotation(annotation_stack, dest_dir) - # Create human readable .csv and .txt files: create_metadata_files(dest_dir, metadata_dict, structures_list, root_id)