Skip to content

Commit

Permalink
Merge e677e66 into 5d29b90
Browse files Browse the repository at this point in the history
  • Loading branch information
willgraf committed Mar 20, 2019
2 parents 5d29b90 + e677e66 commit 3cedfbd
Show file tree
Hide file tree
Showing 44 changed files with 46,629 additions and 9,310 deletions.
1 change: 1 addition & 0 deletions .coveragerc
Original file line number Diff line number Diff line change
Expand Up @@ -18,3 +18,4 @@ omit =
deepcell/model_zoo/*
deepcell/datasets/*
deepcell/notebooks/*
deepcell/tracking.py
2 changes: 1 addition & 1 deletion .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ install:
# install testing requirements
- pip install pytest pytest-cov==2.5.1 pytest-pep8 coveralls
# install deepcell with setup.py
- python setup.py install
- python setup.py install && python setup.py build_ext --inplace

script:
- python -m pytest --cov=deepcell --pep8 deepcell tests
Expand Down
12 changes: 11 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,16 @@ DeepCell is neural network library for single cell analysis, written in Python a

DeepCell aids in biological analysis by automatically segmenting and classifying cells in optical microscopy images. The framework processes raw images and uniquely annotates each cell in the image. These annotations can be used to quantify a variety of cellular properties.

Read the documentaiton at [deepcell.readthedocs.io](https://deepcell.readthedocs.io)
Read the documentation at [deepcell.readthedocs.io](https://deepcell.readthedocs.io)

For more information on deploying DeepCell in the cloud [refer to the DeepCell Kiosk documentation](https://deepcell-kiosk.readthedocs.io)

## Examples

Raw Image | Segmented and Tracked
:-------------------------:|:-------------------------:
![](/docs/images/raw.gif) | ![](/docs/images/tracked.gif)

## Getting Started

The fastest way to get started with DeepCell is to run the latest docker image:
Expand Down Expand Up @@ -41,6 +47,10 @@ This will start a jupyter session, with several example notebooks detailing vari

* [3D Watershed - Sample Based.ipynb](scripts/watershed/Watershed%20Transform%203D%20Sample%20Based.ipynb)

### Cell Tracking in Live Cell Imaging

* [Tracking Example.ipynb](scripts/tracking/Tracking%20Example.ipynb)

## DeepCell for Developers

DeepCell uses `nvidia-docker` and `tensorflow` to enable GPU processing.
Expand Down
18 changes: 10 additions & 8 deletions deepcell/callbacks.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@

from tensorflow.python.keras.callbacks import Callback

from deepcell.utils.retinanet_anchor_utils import evaluate
from deepcell.utils.retinanet_anchor_utils import evaluate, evaluate_mask


class RedirectModel(Callback):
Expand Down Expand Up @@ -121,8 +121,10 @@ def __init__(self,
def on_epoch_end(self, epoch, logs=None):
logs = logs or {}

E = evaluate_mask if self.generator.include_masks else evaluate

# run evaluation
avg_precisions = evaluate(
avg_precisions = E(
self.generator,
self.model,
iou_threshold=self.iou_threshold,
Expand All @@ -141,20 +143,20 @@ def on_epoch_end(self, epoch, logs=None):
instances.append(num_annotations)
precisions.append(avg_precision)
if self.weighted_average:
self.mean_ap = sum([a * b for a, b in zip(instances, precisions)])
self.mean_ap = self.mean_ap / sum(instances)
mean_ap = sum([a * b for a, b in zip(instances, precisions)])
mean_ap = mean_ap / sum(instances)
else:
self.mean_ap = sum(precisions) / sum(x > 0 for x in instances)
mean_ap = sum(precisions) / sum(x > 0 for x in instances)

if self.tensorboard is not None and self.tensorboard.writer is not None:
import tensorflow as tf
summary = tf.Summary()
summary_value = summary.value.add()
summary_value.simple_value = self.mean_ap
summary_value.simple_value = mean_ap
summary_value.tag = 'mAP'
self.tensorboard.writer.add_summary(summary, epoch)

logs['mAP'] = self.mean_ap
logs['mAP'] = mean_ap

if self.verbose == 1:
print('mAP: {:.4f}'.format(self.mean_ap))
print('mAP: {:.4f}'.format(mean_ap))
1 change: 1 addition & 0 deletions deepcell/datasets/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@
from __future__ import division
from __future__ import print_function

from deepcell.datasets import tracked
from deepcell.datasets import hek293
from deepcell.datasets import hela_s3
from deepcell.datasets import mousebrain
Expand Down
38 changes: 38 additions & 0 deletions deepcell/datasets/tracked/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
# Copyright 2016-2019 The Van Valen Lab at the California Institute of
# Technology (Caltech), with support from the Paul Allen Family Foundation,
# Google, & National Institutes of Health (NIH) under Grant U24CA224309-01.
# All rights reserved.
#
# Licensed under a modified Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.github.com/vanvalenlab/deepcell-tf/LICENSE
#
# The Work provided may be used for non-commercial academic purposes only.
# For any other use of the Work, including commercial use, please contact:
# vanvalenlab@gmail.com
#
# Neither the name of Caltech nor the names of its contributors may be used
# to endorse or promote products derived from this software without specific
# prior written permission.
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Builtin Datasets"""

from __future__ import absolute_import
from __future__ import division
from __future__ import print_function

from deepcell.datasets.tracked import nih_3t3
from deepcell.datasets.tracked import hela_s3
from deepcell.datasets.tracked import hek293

del absolute_import
del division
del print_function
71 changes: 71 additions & 0 deletions deepcell/datasets/tracked/hek293.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,71 @@
# Copyright 2016-2019 The Van Valen Lab at the California Institute of
# Technology (Caltech), with support from the Paul Allen Family Foundation,
# Google, & National Institutes of Health (NIH) under Grant U24CA224309-01.
# All rights reserved.
#
# Licensed under a modified Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.github.com/vanvalenlab/deepcell-tf/LICENSE
#
# The Work provided may be used for non-commercial academic purposes only.
# For any other use of the Work, including commercial use, please contact:
# vanvalenlab@gmail.com
#
# Neither the name of Caltech nor the names of its contributors may be used
# to endorse or promote products derived from this software without specific
# prior written permission.
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tracked HEK293 Nuclear Dataset with lineage data for each batch."""

from __future__ import absolute_import
from __future__ import division
from __future__ import print_function

import os

try:
from tensorflow.python.keras.utils.data_utils import get_file
except ImportError: # tf v1.9 moves conv_utils from _impl to keras.utils
from tensorflow.python.keras._impl.keras.utils.data_utils import get_file

from deepcell.utils.data_utils import get_data


def load_tracked_data(path='HEK293.trks', test_size=.2, seed=0):
"""Loads the tracked HEK293 dataset.
# Args:
path: path where to cache the dataset locally
(relative to ~/.keras/datasets).
test_size: fraction of data to reserve as test data
seed: the seed for randomly shuffling the dataset
Returns:
Tuple of Numpy arrays: `(x_train, y_train), (x_test, y_test)`.
"""
basepath = os.path.expanduser(os.path.join('~', '.keras', 'datasets'))
prefix = path.split(os.path.sep)[:-1]
data_dir = os.path.join(basepath, *prefix) if prefix else basepath
if not os.path.exists(data_dir):
os.makedirs(data_dir)
elif not os.path.isdir(data_dir):
raise IOError('{} exists but is not a directory'.format(data_dir))

path = get_file(path,
origin='https://deepcell-data.s3.amazonaws.com/tracked/HEK293.trks',
file_hash='d19e0fe144633a08d41cf6695e11f72b')

train_dict, test_dict = get_data(path, mode='siamese_daughters',
test_size=test_size, seed=seed)

x_train, y_train = train_dict['X'], train_dict['y']
x_test, y_test = test_dict['X'], test_dict['y']
return (x_train, y_train), (x_test, y_test)
71 changes: 71 additions & 0 deletions deepcell/datasets/tracked/hela_s3.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,71 @@
# Copyright 2016-2019 The Van Valen Lab at the California Institute of
# Technology (Caltech), with support from the Paul Allen Family Foundation,
# Google, & National Institutes of Health (NIH) under Grant U24CA224309-01.
# All rights reserved.
#
# Licensed under a modified Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.github.com/vanvalenlab/deepcell-tf/LICENSE
#
# The Work provided may be used for non-commercial academic purposes only.
# For any other use of the Work, including commercial use, please contact:
# vanvalenlab@gmail.com
#
# Neither the name of Caltech nor the names of its contributors may be used
# to endorse or promote products derived from this software without specific
# prior written permission.
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tracked HeLa S3 Nuclear Dataset with lineage data for each batch."""

from __future__ import absolute_import
from __future__ import division
from __future__ import print_function

import os

try:
from tensorflow.python.keras.utils.data_utils import get_file
except ImportError: # tf v1.9 moves conv_utils from _impl to keras.utils
from tensorflow.python.keras._impl.keras.utils.data_utils import get_file

from deepcell.utils.data_utils import get_data


def load_tracked_data(path='HeLa_S3.trks', test_size=.2, seed=0):
"""Loads the tracked HeLa-S3 dataset.
# Args:
path: path where to cache the dataset locally
(relative to ~/.keras/datasets).
test_size: fraction of data to reserve as test data
seed: the seed for randomly shuffling the dataset
Returns:
Tuple of Numpy arrays: `(x_train, y_train), (x_test, y_test)`.
"""
basepath = os.path.expanduser(os.path.join('~', '.keras', 'datasets'))
prefix = path.split(os.path.sep)[:-1]
data_dir = os.path.join(basepath, *prefix) if prefix else basepath
if not os.path.exists(data_dir):
os.makedirs(data_dir)
elif not os.path.isdir(data_dir):
raise IOError('{} exists but is not a directory'.format(data_dir))

path = get_file(path,
origin='https://deepcell-data.s3.amazonaws.com/tracked/HeLa_S3.trks',
file_hash='590ee37d3c703cfe029a2e60c9dc777b')

train_dict, test_dict = get_data(path, mode='siamese_daughters',
test_size=test_size, seed=seed)

x_train, y_train = train_dict['X'], train_dict['y']
x_test, y_test = test_dict['X'], test_dict['y']
return (x_train, y_train), (x_test, y_test)
71 changes: 71 additions & 0 deletions deepcell/datasets/tracked/nih_3t3.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,71 @@
# Copyright 2016-2019 The Van Valen Lab at the California Institute of
# Technology (Caltech), with support from the Paul Allen Family Foundation,
# Google, & National Institutes of Health (NIH) under Grant U24CA224309-01.
# All rights reserved.
#
# Licensed under a modified Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.github.com/vanvalenlab/deepcell-tf/LICENSE
#
# The Work provided may be used for non-commercial academic purposes only.
# For any other use of the Work, including commercial use, please contact:
# vanvalenlab@gmail.com
#
# Neither the name of Caltech nor the names of its contributors may be used
# to endorse or promote products derived from this software without specific
# prior written permission.
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tracked 3T3 Nuclear Dataset from the NIH with lineage data for each batch."""

from __future__ import absolute_import
from __future__ import division
from __future__ import print_function

import os

try:
from tensorflow.python.keras.utils.data_utils import get_file
except ImportError: # tf v1.9 moves conv_utils from _impl to keras.utils
from tensorflow.python.keras._impl.keras.utils.data_utils import get_file

from deepcell.utils.data_utils import get_data


def load_tracked_data(path='3T3_NIH.trks', test_size=.2, seed=0):
"""Loads the tracked 3T3-NIH dataset.
# Args:
path: path where to cache the dataset locally
(relative to ~/.keras/datasets).
test_size: fraction of data to reserve as test data
seed: the seed for randomly shuffling the dataset
Returns:
Tuple of Numpy arrays: `(x_train, y_train), (x_test, y_test)`.
"""
basepath = os.path.expanduser(os.path.join('~', '.keras', 'datasets'))
prefix = path.split(os.path.sep)[:-1]
data_dir = os.path.join(basepath, *prefix) if prefix else basepath
if not os.path.exists(data_dir):
os.makedirs(data_dir)
elif not os.path.isdir(data_dir):
raise IOError('{} exists but is not a directory'.format(data_dir))

path = get_file(path,
origin='https://deepcell-data.s3.amazonaws.com/tracked/3T3_NIH.trks',
file_hash='0d90ad370e1cb9655727065ada3ded65')

train_dict, test_dict = get_data(path, mode='siamese_daughters',
test_size=test_size, seed=seed)

x_train, y_train = train_dict['X'], train_dict['y']
x_test, y_test = test_dict['X'], test_dict['y']
return (x_train, y_train), (x_test, y_test)

0 comments on commit 3cedfbd

Please sign in to comment.