Skip to content

Commit

Permalink
Merge branch 'Deep-MI:dev' into dev
Browse files Browse the repository at this point in the history
  • Loading branch information
jrussell9000 committed Mar 25, 2024
2 parents 8ca55e4 + fe3f85d commit 1a0ca3d
Show file tree
Hide file tree
Showing 55 changed files with 1,030 additions and 1,847 deletions.
63 changes: 63 additions & 0 deletions .github/workflows/doc.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
name: doc
concurrency:
group: ${{ github.workflow }}-${{ github.event.number }}-${{ github.event.ref }}
cancel-in-progress: true
on:
pull_request:
push:
branches: [dev]
workflow_dispatch:

jobs:
build:
timeout-minutes: 10
runs-on: ubuntu-latest
defaults:
run:
shell: bash
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
path: ./dev
- name: Setup Python 3.10
uses: actions/setup-python@v5
with:
python-version: '3.10'
architecture: 'x64'
- name: Install package
run: |
python -m pip install --progress-bar off --upgrade pip setuptools wheel
python -m pip install --progress-bar off dev/.[doc]
- name: Build doc
run: PYTHONPATH=$PYTHONPATH:./dev TZ=UTC sphinx-build ./dev/doc ./doc-build/dev -W --keep-going
- name: Upload documentation
uses: actions/upload-artifact@v4
with:
name: doc-dev
path: ./doc-build/dev

deploy:
if: github.event_name == 'push'
needs: build
timeout-minutes: 10
runs-on: ubuntu-latest
permissions:
contents: write
defaults:
run:
shell: bash
steps:
- name: Download documentation
uses: actions/download-artifact@v4
with:
name: doc-dev
path: ./doc-dev
- name: Deploy dev documentation
uses: peaceiris/actions-gh-pages@v3
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
publish_dir: ./doc-dev
destination_dir: ./dev
user_name: 'github-actions[bot]'
user_email: 'github-actions[bot]@users.noreply.github.com'
78 changes: 0 additions & 78 deletions CODE_OF_CONDUCT.md

This file was deleted.

10 changes: 10 additions & 0 deletions CerebNet/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
__all__ = [
"apply_warp",
"config",
"datasets",
"data_loader",
"inference",
"models",
"run_prediction",
"utils",
]
6 changes: 6 additions & 0 deletions CerebNet/config/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,3 +15,9 @@
# IMPORTS
from CerebNet.config.cerebnet import get_cfg_cerebnet
from CerebNet.config.dataset import get_cfg_dataset
__all__ = [
"cerebnet",
"dataset",
"get_cfg_cerebnet",
"get_cfg_dataset",
]
10 changes: 5 additions & 5 deletions CerebNet/config/checkpoint_paths.yaml
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
URL:
url:
- "https://b2share.fz-juelich.de/api/files/c6cf7bc6-2ae5-4d0e-814d-2a3cf0e1a8c5"
- "https://zenodo.org/records/10390742/files"

CKPT:
AXIAL: "CerebNet/checkpoints/CerebNet_axial_v1.0.0.pkl"
CORONAL: "CerebNet/checkpoints/CerebNet_coronal_v1.0.0.pkl"
SAGITTAL: "CerebNet/checkpoints/CerebNet_sagittal_v1.0.0.pkl"
checkpoint:
axial: "checkpoints/CerebNet_axial_v1.0.0.pkl"
coronal: "checkpoints/CerebNet_coronal_v1.0.0.pkl"
sagittal: "checkpoints/CerebNet_sagittal_v1.0.0.pkl"
6 changes: 6 additions & 0 deletions CerebNet/data_loader/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
__all__ = [
"augmentation",
"data_utils",
"dataset",
"loader",
]
8 changes: 4 additions & 4 deletions CerebNet/data_loader/augmentation.py
Original file line number Diff line number Diff line change
Expand Up @@ -333,14 +333,14 @@ def sample_intensity_stats_from_image(
Parameters
----------
image : np.ndarray
image : array_like
Image from which to evaluate mean intensity and std deviation.
segmentation : np.ndarray
segmentation : array_like
Segmentation of the input image. Must have the same size as image.
labels_list : np.ndarray
labels_list : array_like
List of labels for which to evaluate mean and std intensity.
Can be a sequence, a 1d numpy array, or the path to a 1d numpy array.
classes_list : np.ndarray, optional
classes_list : array_like, optional
Enables grouping structures into classes of similar intensity statistics.
The intensities associated with regrouped labels will contribute to the same
Gaussian during statistics estimation. Can be a sequence, a 1D numpy array,
Expand Down
7 changes: 5 additions & 2 deletions CerebNet/data_loader/data_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,13 +15,14 @@


# IMPORTS
from typing import Literal, TypeVar
from typing import TypeVar

import numpy as np
import torch
from numpy import typing as npt

Plane = Literal['axial', 'coronal', 'sagittal']
from FastSurferCNN.utils import Plane

AT = TypeVar('AT', np.ndarray, torch.Tensor)

# CLASSES for final evaluation
Expand Down Expand Up @@ -146,6 +147,8 @@
14, 15, 14,
16, 16
])}


# Transformation for mapping
def transform_axial(vol, coronal2axial=True):
"""
Expand Down
7 changes: 2 additions & 5 deletions CerebNet/data_loader/dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,10 +11,9 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import time

# IMPORTS
from typing import Sequence, Tuple, Literal, get_args as _get_args, TypeVar, Dict
from typing import Tuple, Literal, TypeVar, Dict
from numbers import Number

import nibabel as nib
Expand All @@ -25,8 +24,7 @@
from torch.utils.data.dataset import Dataset
from torchvision.transforms import Compose

from CerebNet.data_loader.data_utils import Plane
from FastSurferCNN.utils import logging
from FastSurferCNN.utils import logging, Plane
from FastSurferCNN.data_loader.data_utils import (
get_thick_slices,
transform_axial,
Expand All @@ -42,7 +40,6 @@
LocalizerROI = Dict[ROIKeys, Tuple[int, ...]]

NT = TypeVar("NT", bound=Number)
PLANES = _get_args(Plane)

logger = logging.get_logger(__name__)

Expand Down
6 changes: 6 additions & 0 deletions CerebNet/datasets/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
__all__ = [
"generate_hdf5",
"load_data",
"utils",
"wm_merge_clean",
]
10 changes: 5 additions & 5 deletions CerebNet/datasets/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -695,11 +695,6 @@ def crop_transform(image: AT, offsets=None, target_shape=None, out: Optional[AT]
Array to store the cropped image in (optional), can be a view on image for memory-efficiency.
pad : int, str, default=0
Padding strategy to use when padding is required, if int, pad with that value (default: zero-pad).
See Also
---------
numpy.pad
For additional information refer to numpy.pad function.
Returns
-------
Expand All @@ -718,6 +713,11 @@ def crop_transform(image: AT, offsets=None, target_shape=None, out: Optional[AT]
RuntimeError
If the dimensionality of image, out, offset or target_shape is invalid or inconsistent.
See Also
--------
numpy.pad
For additional information refer to numpy.pad function.
Notes
-----
Either offsets, target_shape or out must be defined.
Expand Down
4 changes: 2 additions & 2 deletions CerebNet/inference.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
from torch.utils.data import DataLoader
from tqdm import tqdm

from FastSurferCNN.utils import logging
from FastSurferCNN.utils import logging, Plane, PLANES
from FastSurferCNN.utils.threads import get_num_threads
from FastSurferCNN.utils.mapper import JsonColorLookupTable, TSVLookupTable
from FastSurferCNN.utils.common import (
Expand All @@ -34,7 +34,7 @@
SerialExecutor,
)
from CerebNet.data_loader.augmentation import ToTensorTest
from CerebNet.data_loader.dataset import SubjectDataset, Plane, PLANES
from CerebNet.data_loader.dataset import SubjectDataset
from CerebNet.datasets.utils import crop_transform
from CerebNet.models.networks import build_model
from CerebNet.utils import checkpoint as cp
Expand Down
4 changes: 4 additions & 0 deletions CerebNet/models/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
__all__ = [
"networks",
"sub_module",
]
12 changes: 8 additions & 4 deletions CerebNet/models/sub_module.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,9 @@
# Building Blocks
class CompetitiveDenseBlock(nn.Module):
"""
Function to define a competitive dense block comprising
of 3 convolutional layers, with BN/ReLU.
Define a competitive dense block.
A dense block consists of 3 convolutional layers, with BN/ReLU.
Parameters
----------
Expand All @@ -35,8 +36,10 @@ class CompetitiveDenseBlock(nn.Module):
'stride_pool' : 2,
'num_classes' : 44,
'kernel_c' : 1,
'input' : True}.
'input' : True
}.
"""

def __init__(self, params, outblock=False, discriminator_block=False):
"""
Constructor to initialize the Competitive Dense Block.
Expand Down Expand Up @@ -162,7 +165,8 @@ class CompetitiveDenseBlockInput(nn.Module):
'stride_pool' : 2,
'num_classes' : 44,
'kernel_c' : 1,
'input' : True}.
'input' : True
}.
"""
def __init__(self, params):
"""
Expand Down
Loading

0 comments on commit 1a0ca3d

Please sign in to comment.