Skip to content

Commit

Permalink
Merge e87e1fd into faaee88
Browse files Browse the repository at this point in the history
  • Loading branch information
vigji committed Jun 1, 2020
2 parents faaee88 + e87e1fd commit 638dc92
Show file tree
Hide file tree
Showing 7 changed files with 275 additions and 184 deletions.
93 changes: 52 additions & 41 deletions atlas_gen/atlas_scripts/allenbrain_atlas.py
Expand Up @@ -4,32 +4,32 @@

from requests import exceptions
from pathlib import Path
import tempfile
import json
from tqdm import tqdm

import tifffile
import pandas as pd
from atlas_gen.wrapup import wrapup_atlas_from_data
from brainatlas_api import descriptors

# Specify information about the atlas:
RES_UM = 25
ATLAS_NAME = f"allenbrain{RES_UM}um"

# Generated atlas path:
bg_root_dir = Path.home() / "brainglobe"
VERSION = 2
ATLAS_NAME = f"example_mouse"
SPECIES = "Mus musculus"
ATLAS_LINK = "http://www.brain-map.org.com"
CITATION = "Wang et al 2020, https://doi.org/10.1016/j.cell.2020.04.007"
ORIENTATION = "asl"

# Working path on disk:
bg_root_dir = Path.home() / "brainglobe_workdir" / ATLAS_NAME
bg_root_dir.mkdir(exist_ok=True)

# Temporary folder for nrrd files download:
temp_path = Path(tempfile.mkdtemp())
downloading_path = temp_path / "downloading_path"
downloading_path.mkdir()

# Temporary folder for files before compressing:
uncompr_atlas_path = temp_path / ATLAS_NAME
uncompr_atlas_path.mkdir()
download_dir_path = bg_root_dir / "downloading_path"
download_dir_path.mkdir(exist_ok=True)

# Download annotated and template volume:
#########################################
spacecache = ReferenceSpaceCache(
manifest=downloading_path / "manifest.json",
manifest=download_dir_path / "manifest.json",
# downloaded files are stored relative to here
resolution=RES_UM,
reference_space_key="annotation/ccf_2017"
Expand All @@ -40,9 +40,6 @@
annotated_volume, _ = spacecache.get_annotation_volume()
template_volume, _ = spacecache.get_template_volume()
print("Download completed...")
# Save tiff stacks:
tifffile.imsave(str(uncompr_atlas_path / "reference.tiff"), template_volume)
tifffile.imsave(str(uncompr_atlas_path / "annotated.tiff"), annotated_volume)

# Download structures tree and meshes:
######################################
Expand All @@ -52,40 +49,54 @@
# Find id of set of regions with mesh:
select_set = "Structures whose surfaces are represented by a precomputed mesh"

all_sets = pd.DataFrame(oapi.get_structure_sets())
mesh_set_id = all_sets[all_sets.description == select_set].id.values[0]
mesh_set_ids = [
s["id"]
for s in oapi.get_structure_sets()
if s["description"] == select_set
]

structs_with_mesh = struct_tree.get_structures_by_set_id(mesh_set_ids)

structs_with_mesh = struct_tree.get_structures_by_set_id([mesh_set_id])
# Directory for mesh saving:
meshes_dir = bg_root_dir / descriptors.MESHES_DIRNAME

meshes_dir = uncompr_atlas_path / "meshes" # directory to save meshes into
space = ReferenceSpaceApi()
for s in structs_with_mesh:
meshes_dict = dict()
for s in tqdm(structs_with_mesh):
name = s["id"]
filename = meshes_dir / f"{name}.obj"
try:
space.download_structure_mesh(
structure_id=s["id"],
ccf_version="annotation/ccf_2017",
file_name=meshes_dir / f"{name}.obj",
file_name=filename,
)
meshes_dict[name] = filename
except (exceptions.HTTPError, ConnectionError):
print(s)

# Loop over structures, remove entries not used in brainglobe:
# Loop over structures, remove entries not used:
for struct in structs_with_mesh:
[struct.pop(k) for k in ["graph_id", "structure_set_ids", "graph_order"]]

with open(uncompr_atlas_path / "structures.json", "w") as f:
json.dump(structs_with_mesh, f)

metadata_dict = {
"name": ATLAS_NAME,
"citation": "Wang et al 2020, https://doi.org/10.1016/j.cell.2020.04.007",
"atlas_link": "www.brain-map.org.com",
"species": "Mus musculus",
"symmetric": True,
"resolution": (RES_UM, RES_UM, RES_UM),
"shape": template_volume.shape,
}

with open(uncompr_atlas_path / "atlas_metadata.json", "w") as f:
json.dump(metadata_dict, f)

# Wrap up, compress, and remove file:0
print(f"Finalising atlas")
wrapup_atlas_from_data(
atlas_name=ATLAS_NAME,
atlas_minor_version=VERSION,
citation=CITATION,
atlas_link=ATLAS_LINK,
species=SPECIES,
resolution=(RES_UM,) * 3,
orientation=ORIENTATION,
root_id=997,
reference_stack=template_volume,
annotation_stack=annotated_volume,
structures_list=structs_with_mesh,
meshes_dict=meshes_dict,
working_dir=bg_root_dir,
hemispheres_stack=None,
cleanup_files=False,
compress=True,
)
65 changes: 32 additions & 33 deletions atlas_gen/atlas_scripts/example_atlas.py
Expand Up @@ -4,33 +4,28 @@

from requests import exceptions
from pathlib import Path
import json
import shutil
from tqdm import tqdm

from atlas_gen.stacks import save_anatomy, save_annotation
from atlas_gen.wrapup import wrapup_atlas_from_dir
from atlas_gen.wrapup import wrapup_atlas_from_data
from brainatlas_api import descriptors

# Specify information about the atlas:
RES_UM = 100
VERSION = "0.1"
ATLAS_NAME = f"test_allen_{RES_UM}um_v{VERSION}"
SPECIES = "mouse (Mus musculus)"
VERSION = 2
ATLAS_NAME = f"example_mouse"
SPECIES = "Mus musculus"
ATLAS_LINK = "http://www.brain-map.org.com"
CITATION = "Wang et al 2020, https://doi.org/10.1016/j.cell.2020.04.007"
ORIENTATION = "asl"

# Working path on disk:
bg_root_dir = Path.home() / "brainglobe_workdir"
bg_root_dir = Path.home() / "brainglobe_workdir" / ATLAS_NAME
bg_root_dir.mkdir(exist_ok=True)

# Temporary folder for nrrd files download:
download_dir_path = bg_root_dir / "downloading_path"
download_dir_path.mkdir(exist_ok=True)

# Temporary folder for files before compressing:
uncompr_atlas_path = bg_root_dir / ATLAS_NAME
uncompr_atlas_path.mkdir(exist_ok=True)

# Download annotated and template volume:
#########################################
spacecache = ReferenceSpaceCache(
Expand All @@ -46,10 +41,6 @@
template_volume, _ = spacecache.get_template_volume()
print("Download completed...")

# Save tiff stacks:
save_anatomy(template_volume, uncompr_atlas_path)
save_annotation(annotated_volume, uncompr_atlas_path)

# Download structures tree and meshes:
######################################
oapi = OntologiesApi() # ontologies
Expand All @@ -67,37 +58,45 @@
structs_with_mesh = struct_tree.get_structures_by_set_id(mesh_set_ids)[:3]

# Directory for mesh saving:
meshes_dir = uncompr_atlas_path / descriptors.MESHES_DIRNAME
meshes_dir = bg_root_dir / descriptors.MESHES_DIRNAME

space = ReferenceSpaceApi()
for s in structs_with_mesh:
meshes_dict = dict()
for s in tqdm(structs_with_mesh):
name = s["id"]
filename = meshes_dir / f"{name}.obj"
try:
space.download_structure_mesh(
structure_id=s["id"],
ccf_version="annotation/ccf_2017",
file_name=meshes_dir / f"{name}.obj",
file_name=filename,
)
meshes_dict[name] = filename
except (exceptions.HTTPError, ConnectionError):
print(s)

# Loop over structures, remove entries not used:
for struct in structs_with_mesh:
[struct.pop(k) for k in ["graph_id", "structure_set_ids", "graph_order"]]

with open(uncompr_atlas_path / descriptors.STRUCTURES_FILENAME, "w") as f:
json.dump(structs_with_mesh, f)

# Wrap up, compress, and remove file:
print(f"Saving compressed files at {uncompr_atlas_path.parents[0]}")
wrapup_atlas_from_dir(
uncompr_atlas_path,
CITATION,
ATLAS_LINK,
SPECIES,
(RES_UM,) * 3,
cleanup_files=True,

# Wrap up, compress, and remove file:0
print(f"Finalising atlas")
wrapup_atlas_from_data(
atlas_name=ATLAS_NAME,
atlas_minor_version=VERSION,
citation=CITATION,
atlas_link=ATLAS_LINK,
species=SPECIES,
resolution=(RES_UM,) * 3,
orientation=ORIENTATION,
root_id=997,
reference_stack=template_volume,
annotation_stack=annotated_volume,
structures_list=structs_with_mesh,
meshes_dict=meshes_dict,
working_dir=bg_root_dir,
hemispheres_stack=None,
cleanup_files=False,
compress=True,
)

shutil.rmtree(download_dir_path)
47 changes: 0 additions & 47 deletions atlas_gen/metadata.py

This file was deleted.

71 changes: 65 additions & 6 deletions atlas_gen/metadata_utils.py
Expand Up @@ -3,12 +3,67 @@
. structures.csv
. README.txt
"""
import re
import json
from datetime import datetime
from brainatlas_api import descriptors

import requests
from requests.exceptions import MissingSchema, InvalidURL, ConnectionError

from brainatlas_api.structures.structure_tree import StructureTree
from brainatlas_api.atlas_gen.structure_json_to_csv import (
convert_structure_json_to_csv,
)
from atlas_gen.structure_json_to_csv import convert_structure_json_to_csv


def generate_metadata_dict(
name,
citation,
atlas_link,
species,
symmetric,
resolution,
version,
shape,
transformation_mat,
):

# Name should be author_species
assert len(name.split("_")) >= 2

# Control version formatting:
assert re.match("[0-9]+\\.[0-9]+", version)

# We ask for DOI and correct link only if atlas is published:
if citation != "unpublished":
assert "doi" in citation

# Test url:
try:
_ = requests.get(atlas_link)
except (MissingSchema, InvalidURL, ConnectionError):
raise InvalidURL(
"Ensure that the url is valid and formatted correctly!"
)

# Enforce correct format for symmetric, resolution and shape:
assert type(symmetric) == bool
assert len(resolution) == 3
assert len(shape) == 3

resolution = tuple([float(v) for v in resolution])
shape = tuple(int(v) for v in shape)

return dict(
name=name,
citation=citation,
atlas_link=atlas_link,
species=species,
symmetric=symmetric,
resolution=resolution,
version=version,
shape=shape,
trasform_to_bg=tuple([tuple(m) for m in transformation_mat]),
)


def create_readme(uncompr_atlas_path, metadata_dict, structures):
Expand Down Expand Up @@ -59,7 +114,7 @@ def create_structures_csv(uncompr_atlas_path, root):
)


def create_metadata_files(uncompr_atlas_path, metadata_dict, structures, root):
def create_metadata_files(dest_dir, metadata_dict, structures, root_id):
"""
Automatic creation of
. structures.csv
Expand All @@ -71,5 +126,9 @@ def create_metadata_files(uncompr_atlas_path, metadata_dict, structures, root):
:param metadata_dict: dict with atlas metadata
:param structures: list of dictionaries with structures hierarchical info
"""
create_structures_csv(uncompr_atlas_path, root)
create_readme(uncompr_atlas_path, metadata_dict, structures)
# write metadata dict:
with open(dest_dir / descriptors.METADATA_FILENAME, "w") as f:
json.dump(metadata_dict, f)

create_structures_csv(dest_dir, root_id)
create_readme(dest_dir, metadata_dict, structures)
2 changes: 1 addition & 1 deletion atlas_gen/stacks.py
Expand Up @@ -13,7 +13,7 @@ def write_stack(stack, filename):
tifffile.imsave(str(filename), stack)


def save_anatomy(stack, output_dir):
def save_reference(stack, output_dir):
"""
Parameters
----------
Expand Down

0 comments on commit 638dc92

Please sign in to comment.