Skip to content

Commit

Permalink
Centralising all atlas saving
Browse files Browse the repository at this point in the history
  • Loading branch information
vigji committed Jun 1, 2020
1 parent 5df3a13 commit 3e198ca
Show file tree
Hide file tree
Showing 6 changed files with 114 additions and 106 deletions.
5 changes: 0 additions & 5 deletions atlas_gen/atlas_scripts/example_atlas.py
Expand Up @@ -7,7 +7,6 @@
import json
import shutil

from atlas_gen.stacks import save_anatomy, save_annotation
from atlas_gen.wrapup import wrapup_atlas_from_dir
from brainatlas_api import descriptors

Expand Down Expand Up @@ -46,10 +45,6 @@
template_volume, _ = spacecache.get_template_volume()
print("Download completed...")

# Save tiff stacks:
save_anatomy(template_volume, uncompr_atlas_path)
save_annotation(annotated_volume, uncompr_atlas_path)

# Download structures tree and meshes:
######################################
oapi = OntologiesApi() # ontologies
Expand Down
47 changes: 0 additions & 47 deletions atlas_gen/metadata.py

This file was deleted.

64 changes: 58 additions & 6 deletions atlas_gen/metadata_utils.py
Expand Up @@ -3,12 +3,60 @@
. structures.csv
. README.txt
"""
import re
import json
from datetime import datetime
from brainatlas_api import descriptors

import requests
from requests.exceptions import MissingSchema, InvalidURL, ConnectionError

from brainatlas_api.structures.structure_tree import StructureTree
from brainatlas_api.atlas_gen.structure_json_to_csv import (
convert_structure_json_to_csv,
)
from atlas_gen.structure_json_to_csv import convert_structure_json_to_csv


def generate_metadata_dict(
name, citation, atlas_link, species, symmetric, resolution, version, shape
):

# We ask for a rigid naming convention to be followed:
parsename = name.split("_")
assert len(parsename) >= 3
assert re.match("[0-9]+um", parsename[-1])

# Control version formatting:
assert re.match("[0-9]+\\.[0-9]+", version)

# We ask for DOI and correct link only if atlas is published:
if citation != "unpublished":
assert "doi" in citation

# Test url:
try:
_ = requests.get(atlas_link)
except (MissingSchema, InvalidURL, ConnectionError):
raise InvalidURL(
"Ensure that the url is valid and formatted correctly!"
)

# Enforce correct format for symmetric, resolution and shape:
assert type(symmetric) == bool
assert len(resolution) == 3
assert len(shape) == 3

resolution = tuple([float(v) for v in resolution])
shape = tuple(int(v) for v in shape)

return dict(
name=name,
citation=citation,
atlas_link=atlas_link,
species=species,
symmetric=symmetric,
resolution=resolution,
version=version,
shape=shape,
)


def create_readme(uncompr_atlas_path, metadata_dict, structures):
Expand Down Expand Up @@ -59,7 +107,7 @@ def create_structures_csv(uncompr_atlas_path, root):
)


def create_metadata_files(uncompr_atlas_path, metadata_dict, structures, root):
def create_metadata_files(dest_dir, metadata_dict, structures, root_id):
"""
Automatic creation of
. structures.csv
Expand All @@ -71,5 +119,9 @@ def create_metadata_files(uncompr_atlas_path, metadata_dict, structures, root):
:param metadata_dict: dict with atlas metadata
:param structures: list of dictionaries with structures hierarchical info
"""
create_structures_csv(uncompr_atlas_path, root)
create_readme(uncompr_atlas_path, metadata_dict, structures)
# write metadata dict:
with open(dest_dir / descriptors.METADATA_FILENAME, "w") as f:
json.dump(metadata_dict, f)

create_structures_csv(dest_dir, root_id)
create_readme(dest_dir, metadata_dict, structures)
2 changes: 1 addition & 1 deletion atlas_gen/stacks.py
Expand Up @@ -13,7 +13,7 @@ def write_stack(stack, filename):
tifffile.imsave(str(filename), stack)


def save_anatomy(stack, output_dir):
def save_reference(stack, output_dir):
"""
Parameters
----------
Expand Down
99 changes: 52 additions & 47 deletions atlas_gen/wrapup.py
@@ -1,31 +1,47 @@
import json
from brainatlas_api.atlas_gen.metadata_utils import create_metadata_files
from brainatlas_api.utils import read_tiff, read_json
from .metadata import generate_metadata_dict
from .structures import check_struct_consistency
from atlas_gen.metadata_utils import (
create_metadata_files,
generate_metadata_dict,
)
from atlas_gen.stacks import save_reference, save_annotation

# from brainatlas_api.utils import read_tiff, read_json
from atlas_gen.structures import check_struct_consistency
from brainatlas_api import descriptors
import tarfile
import shutil

# import bgspace as bgs

def wrapup_atlas_from_dir(
dir_path,
# This should be changed every time we make changes in the atlas
# structure:
ATLAS_VERSION = 0


def wrapup_atlas_from_data(
atlas_name,
atlas_minor_version,
citation,
atlas_link,
species,
resolution,
orientation,
root_id,
reference_stack,
annotation_stack,
structures_list,
meshes_dict,
working_dir,
hemispheres_stack=None,
cleanup_files=False,
compress=True,
root=997,
):
"""
Check compliance of a folder with atlas standards, write metadata, and if required compress and cleanup.
This function should be used to finalize all atlases as it runs the required
controls.
Finalise an atlas with truly consistent format from all the data.
Parameters
----------
dir_path : str or Path object
dest_dir : str or Path object
directory with the atlases and regions description
citation : str
citation for the atlas, if unpublished specify "unpublished"
Expand All @@ -43,36 +59,14 @@ def wrapup_atlas_from_dir(
"""

# Check that all core files are contained:
for element in [
descriptors.STRUCTURES_FILENAME,
descriptors.REFERENCE_FILENAME,
descriptors.ANNOTATION_FILENAME,
]:
assert (dir_path / element).exists()

# Get name and version from dir name - in this way multiple
# specifications are avoided:
parsename = dir_path.name.split("_")

atlas_name = "_".join(parsename[:-1])
version = parsename[-1][1:] # version: v0.0 format

# Read stack shape:
ref_stack = read_tiff(dir_path / descriptors.REFERENCE_FILENAME)
shape = ref_stack.shape

# If no hemisphere file is given, ensure the atlas is symmetric:
if not (dir_path / descriptors.HEMISPHERES_FILENAME).exists():
# assert np.allclose(ref_stack[:, :, :shape[2] // 2],
# np.flip(ref_stack[:, :, -shape[2] // 2:], 2))
symmetric = True
else:
symmetric = False
version = f"{ATLAS_VERSION}.{atlas_minor_version}"
shape = reference_stack.shape

# If no hemisphere file is given, assume the atlas is symmetric:
symmetric = hemispheres_stack is None

# Check consistency of structures .json file:
structures = read_json(dir_path / descriptors.STRUCTURES_FILENAME)
check_struct_consistency(structures)
check_struct_consistency(structures_list)

# Finalize metadata dictionary:
metadata_dict = generate_metadata_dict(
Expand All @@ -86,21 +80,32 @@ def wrapup_atlas_from_dir(
shape=shape,
)

# write metadata dict:
with open(dir_path / descriptors.METADATA_FILENAME, "w") as f:
json.dump(metadata_dict, f)
atlas_dir_name = atlas_name + "_v" + version
dest_dir = working_dir / atlas_dir_name
dest_dir.mkdir() # exist_ok would be more permissive but error-prone here

# save regions list json:
with open(dest_dir / descriptors.STRUCTURES_FILENAME, "w") as f:
json.dump(structures_list, f)

# TODO use BGSpace and reorient stacks;
# TODO use BGSpace and reorient mesh;
# TODO find function to save meshes;
# write tiff stacks:
save_reference(reference_stack, dest_dir)
save_annotation(annotation_stack, dest_dir)

# Create human readable .csv and .txt files
create_metadata_files(dir_path, metadata_dict, structures, root)
# Create human readable .csv and .txt files:
create_metadata_files(dest_dir, metadata_dict, structures_list, root_id)

# Compress if required:
if compress:
output_filename = dir_path.parent / f"{dir_path.name}.tar.gz"
output_filename = dest_dir.parent / f"{dest_dir.name}.tar.gz"
print(f"Saving compressed atlas data at: {output_filename}")
with tarfile.open(output_filename, "w:gz") as tar:
tar.add(dir_path, arcname=dir_path.name)
tar.add(dest_dir, arcname=dest_dir.name)

# Cleanup if required:
if cleanup_files:
# Clean temporary directory and remove it:
shutil.rmtree(dir_path)
shutil.rmtree(dest_dir)
3 changes: 3 additions & 0 deletions brainatlas_api/descriptors.py
Expand Up @@ -35,3 +35,6 @@

REFERENCE_DTYPE = np.uint16
ANNOTATION_DTYPE = np.int32

# Standard orientation origin: Anterior, Left, Superior
ATLAS_ORIENTATION = "als"

0 comments on commit 3e198ca

Please sign in to comment.