Permalink
Browse files

Initial commit

  • Loading branch information...
0 parents commit 2d238b4daa92103c826b73b17f08f094867e4aca @Yangqing Yangqing committed Oct 21, 2013
Showing with 9,158 additions and 0 deletions.
  1. +57 −0 .gitignore
  2. +44 −0 LICENSE
  3. +20 −0 README.md
  4. +6 −0 decaf/Makefile
  5. +16 −0 decaf/__init__.py
  6. +152 −0 decaf/_blob.py
  7. +615 −0 decaf/base.py
  8. +74 −0 decaf/benchmarks/test_convolution.py
  9. +82 −0 decaf/benchmarks/test_lena_prediction_pipeline.py
  10. 0 decaf/demos/__init__.py
  11. +33 −0 decaf/demos/demo_convolution.py
  12. +93 −0 decaf/demos/demo_mnist_two_layer_classifier.py
  13. +128 −0 decaf/demos/demo_sparse_autoencoder.py
  14. 0 decaf/demos/imagenet/__init__.py
  15. +133 −0 decaf/demos/imagenet/flask_main.py
  16. +10 −0 decaf/demos/imagenet/flask_main.sh
  17. +13 −0 decaf/demos/imagenet/flask_service_is_down.py
  18. BIN decaf/demos/imagenet/static/favicon.ico
  19. +45 −0 decaf/demos/imagenet/templates/about.html
  20. +100 −0 decaf/demos/imagenet/templates/index.html
  21. 0 decaf/demos/notebooks/__init__.py
  22. +510 −0 decaf/demos/notebooks/lena_imagenet.ipynb
  23. +271 −0 decaf/demos/notebooks/mnist.ipynb
  24. +3 −0 decaf/layers/__init__.py
  25. +205 −0 decaf/layers/convolution.py
  26. +36 −0 decaf/layers/core_layers.py
  27. +21 −0 decaf/layers/cpp/Makefile
  28. +4 −0 decaf/layers/cpp/__init__.py
  29. +270 −0 decaf/layers/cpp/fastpool.cpp
  30. +33 −0 decaf/layers/cpp/fastpool.h
  31. +144 −0 decaf/layers/cpp/im2col.cpp
  32. +28 −0 decaf/layers/cpp/im2col.h
  33. +164 −0 decaf/layers/cpp/local_response_normalization.cpp
  34. +20 −0 decaf/layers/cpp/local_response_normalization.h
  35. +29 −0 decaf/layers/cpp/neuron.cpp
  36. +10 −0 decaf/layers/cpp/neuron.h
  37. +53 −0 decaf/layers/cpp/speedtest_lrn.cpp
  38. +159 −0 decaf/layers/cpp/wrapper.py
  39. 0 decaf/layers/data/__init__.py
  40. +104 −0 decaf/layers/data/cifar.py
  41. +115 −0 decaf/layers/data/cub.py
  42. +55 −0 decaf/layers/data/mnist.py
  43. +27 −0 decaf/layers/data/ndarraydata.py
  44. +146 −0 decaf/layers/deconvolution.py
  45. +65 −0 decaf/layers/dropout.py
  46. +121 −0 decaf/layers/fillers.py
  47. +26 −0 decaf/layers/flatten.py
  48. +108 −0 decaf/layers/group_convolution.py
  49. +31 −0 decaf/layers/identity.py
  50. +59 −0 decaf/layers/im2col.py
  51. +85 −0 decaf/layers/innerproduct.py
  52. +137 −0 decaf/layers/loss.py
  53. +181 −0 decaf/layers/normalize.py
  54. +55 −0 decaf/layers/padding.py
  55. +73 −0 decaf/layers/pooling.py
  56. +48 −0 decaf/layers/puffsampler.py
  57. +75 −0 decaf/layers/regularization.py
  58. +38 −0 decaf/layers/relu.py
  59. +77 −0 decaf/layers/sampler.py
  60. +33 −0 decaf/layers/sigmoid.py
  61. +46 −0 decaf/layers/softmax.py
  62. +3 −0 decaf/opt/__init__.py
  63. +5 −0 decaf/opt/core_solvers.py
  64. +100 −0 decaf/opt/lbfgs_solver.py
  65. +305 −0 decaf/opt/stochastic_solver.py
  66. +303 −0 decaf/puff.py
  67. +3 −0 decaf/scripts/__init__.py
  68. +168 −0 decaf/scripts/imagenet.py
  69. 0 decaf/tests/__init__.py
  70. +7 −0 decaf/tests/nompi/mpi4py.py
  71. +22 −0 decaf/tests/test_all.sh
  72. +93 −0 decaf/tests/unittest_base.py
  73. +65 −0 decaf/tests/unittest_blasdot.py
  74. +35 −0 decaf/tests/unittest_dropout.py
  75. +33 −0 decaf/tests/unittest_fillers.py
  76. +33 −0 decaf/tests/unittest_gradcheck_convolution.py
  77. +33 −0 decaf/tests/unittest_gradcheck_deconvolution.py
  78. +25 −0 decaf/tests/unittest_gradcheck_dropout.py
  79. +40 −0 decaf/tests/unittest_gradcheck_group_convolution.py
  80. +27 −0 decaf/tests/unittest_gradcheck_im2col.py
  81. +45 −0 decaf/tests/unittest_gradcheck_innerproduct.py
  82. +128 −0 decaf/tests/unittest_gradcheck_loss.py
  83. +77 −0 decaf/tests/unittest_gradcheck_normalizer.py
  84. +27 −0 decaf/tests/unittest_gradcheck_padding.py
  85. +30 −0 decaf/tests/unittest_gradcheck_pooling.py
  86. +25 −0 decaf/tests/unittest_gradcheck_relu.py
  87. +29 −0 decaf/tests/unittest_gradcheck_sigmoid.py
  88. +29 −0 decaf/tests/unittest_gradcheck_softmax.py
  89. +25 −0 decaf/tests/unittest_gradcheck_split.py
  90. +22 −0 decaf/tests/unittest_identity.py
  91. +34 −0 decaf/tests/unittest_im2col.py
  92. +124 −0 decaf/tests/unittest_imagenet_pipeline.py
  93. +35 −0 decaf/tests/unittest_layers_innerproduct.py
  94. +57 −0 decaf/tests/unittest_local_response_normalization.py
  95. +65 −0 decaf/tests/unittest_opt.py
  96. +51 −0 decaf/tests/unittest_pooling.py
  97. +153 −0 decaf/tests/unittest_puff.py
  98. +69 −0 decaf/tests/unittest_util_mpi.py
  99. +91 −0 decaf/tests/unittest_util_translator.py
  100. +2 −0 decaf/tests/util/__init__.py
  101. +11 −0 decaf/tests/util/dummydata.py
  102. +20 −0 decaf/tests/util/sleeplayer.py
  103. +3 −0 decaf/util/__init__.py
  104. 0 decaf/util/_data/__init__.py
  105. BIN decaf/util/_data/whitened_images.npz
  106. +49 −0 decaf/util/_mpi_dummy.py
  107. +51 −0 decaf/util/_numpy_blasdot.py
  108. +86 −0 decaf/util/blasdot.py
  109. +212 −0 decaf/util/gradcheck.py
  110. +34 −0 decaf/util/logexp.py
  111. +136 −0 decaf/util/mpi.py
  112. +88 −0 decaf/util/pyvml.py
  113. +24 −0 decaf/util/smalldata.py
  114. +65 −0 decaf/util/timer.py
  115. +41 −0 decaf/util/transform.py
  116. +22 −0 decaf/util/translator/__init__.py
  117. +34 −0 decaf/util/translator/conversions.py
  118. +101 −0 decaf/util/translator/registerer.py
  119. +22 −0 decaf/util/translator/translator_cmrnorm.py
  120. +67 −0 decaf/util/translator/translator_conv.py
  121. +47 −0 decaf/util/translator/translator_fc.py
  122. +25 −0 decaf/util/translator/translator_neuron.py
  123. +40 −0 decaf/util/translator/translator_pool.py
  124. +13 −0 decaf/util/translator/translator_softmax.py
  125. +215 −0 decaf/util/visualize.py
  126. +48 −0 setup.py
@@ -0,0 +1,57 @@
+# decaf stuff
+no_git_*
+*.dat
+*.decafnet
+*.dot
+.DS_Store
+*.mat
+*.npy
+*.npy
+*.pdf
+*.pickle
+*.png
+*.pstats
+*.swp
+*.txt
+
+# Python compiled
+*.py[cod]
+
+# slurm
+slurm-*.out
+
+# C extensions
+a.out
+*.o
+*.so
+
+# Packages
+*.egg
+*.egg-info
+dist
+build
+eggs
+parts
+bin
+var
+sdist
+develop-eggs
+.installed.cfg
+lib
+lib64
+
+# Installer logs
+pip-log.txt
+
+# Unit test / coverage reports
+.coverage
+.tox
+nosetests.xml
+
+# Translations
+*.mo
+
+# Mr Developer
+.mr.developer.cfg
+.project
+.pydevproject
@@ -0,0 +1,44 @@
+***************************************************************************
+************************** COPYRIGHT NOTICE *******************************
+***************************************************************************
+
+Copyright �2013. The Regents of the University of California (Regents).
+All Rights Reserved. Permission to use, copy, modify, and distribute
+this software and its documentation for educational, research, and
+not-for-profit purposes, without fee and without a signed licensing
+agreement, is hereby granted, provided that the above copyright notice,
+this paragraph and the following two paragraphs appear in all copies,
+modifications, and distributions. Contact The Office of Technology
+Licensing, UC Berkeley, 2150 Shattuck Avenue, Suite 510, Berkeley, CA
+94720-1620, (510) 643-7201, for commercial licensing opportunities.
+
+Created by Yangqing Jia and Jeff Donahue, Electrical Engineering and
+Computer Science, University of California, Berkeley.
+
+IN NO EVENT SHALL REGENTS BE LIABLE TO ANY PARTY FOR DIRECT, INDIRECT,
+SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES, INCLUDING LOST PROFITS,
+ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN IF
+REGENTS HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+REGENTS SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
+PARTICULAR PURPOSE. THE SOFTWARE AND ACCOMPANYING DOCUMENTATION, IF ANY,
+PROVIDED HEREUNDER IS PROVIDED "AS IS". REGENTS HAS NO OBLIGATION TO
+PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
+
+
+***************************************************************************
+***************************************************************************
+***************************************************************************
+
+Please cite our technical report if you use Decaf in your research.
+Link: http://arxiv.org/abs/1310.1531
+Bibtex:
+
+@article{donahue2013decaf,
+ title={DeCAF: A Deep Convolutional Activation Feature for Generic Visual Recognition},
+ author={Donahue, Jeff and Jia, Yangqing and Vinyals, Oriol and Hoffman, Judy and Zhang, Ning and Tzeng, Eric and Darrell, Trevor},
+ journal={arXiv preprint arXiv:1310.1531},
+ year={2013}
+}
+
@@ -0,0 +1,20 @@
+Decaf
+=====
+
+Decaf is a framework that implements convolutional neural networks, with the
+goal of being efficient and flexible. It allows one to easily construct a
+network in the form of an arbitrary Directed Acyclic Graph (DAG) and to
+perform end-to-end training.
+
+For more usage check out [the wiki](https://github.com/UCB-ICSI-Vision-Group/decaf-release/wiki).
+
+For the pre-trained imagenet DeCAF feature and its analysis, please see our
+[technical report on arXiv](http://arxiv.org/abs/1310.1531). Please consider
+citing our paper if you use Decaf in your research:
+
+ @article{donahue2013decaf,
+ title={DeCAF: A Deep Convolutional Activation Feature for Generic Visual Recognition},
+ author={Donahue, Jeff and Jia, Yangqing and Vinyals, Oriol and Hoffman, Judy and Zhang, Ning and Tzeng, Eric and Darrell, Trevor},
+ journal={arXiv preprint arXiv:1310.1531},
+ year={2013}
+ }
@@ -0,0 +1,6 @@
+all:
+ make -C layers/cpp/
+all_mkl:
+ make -C layers/cpp/ all_mkl
+test:
+ nosetests -v tests/*.py
@@ -0,0 +1,16 @@
+"""
+Decaf: a deep convolutional neural networks framework
+=====
+
+Decaf is a framework that implements convolutional neural networks, with the
+goal of being efficient and flexible. It allows one to easily construct a
+network in the form of an arbitrary Directed Acyclic Graph (DAG), and to
+perform end-to-end training in a distributed fashion.
+"""
+
+__author__ = 'Yangqing Jia'
+__email__ = 'jiayq84@gmail.com'
+
+import base
+import layers
+import opt
@@ -0,0 +1,152 @@
+"""The module that implements Blob, the basic component that contains a piece
+of matrix in addition to its gradients.
+"""
+
+import cPickle as pickle
+import numpy as np
+
+
+# pylint: disable=R0903
+class Blob(object):
+ """Blob is the data structure that holds a piece of numpy array as well as
+ its gradient so that we can accumulate and pass around data more easily.
+
+ We define two numpy matrices: one is data, which stores the data in the
+ current blob; the other is diff (short for difference): when a network
+ runs its forward and backward pass, diff will store the gradient value;
+ when a solver goes through the blobs, diff will then be replaced with the
+ value to update.
+
+ The diff matrix will not be created unless you explicitly run init_diff,
+ as many Blobs do not need the gradients to be computed.
+ """
+ def __init__(self, shape=None, dtype=None, filler=None):
+ self._data = None
+ self._diff = None
+ self._filler = filler
+ if shape is not None:
+ self.init_data(shape, dtype)
+
+ @staticmethod
+ def blob_like(source_blob):
+ """Create a blob that is similar to the source blob (same shape, same
+ dtype, and same filler).
+ """
+ return Blob(source_blob._data.shape, source_blob._data.dtype,
+ source_blob._filler)
+
+ def clear(self):
+ """Clears a blob data."""
+ self._data = None
+ self._diff = None
+
+ def mirror(self, input_array, shape=None):
+ """Create the data as a view of the input array. This is useful to
+ save space and avoid duplication for data layers.
+ """
+ if isinstance(input_array, Blob):
+ income_data = input_array.data()
+ else:
+ income_data = input_array.view()
+ # check if the shape or dtype changed, in which case we need to
+ # reset the diff
+ if (self.has_data() and (self._data.shape != income_data.shape
+ or self._data.dtype != income_data.dtype)):
+ self._diff = None
+ self._data = income_data
+ if shape is not None:
+ self._data.shape = shape
+ return self.data()
+
+ def mirror_diff(self, input_array, shape=None):
+ """Create the diff as a view of the input array's diff. This is useful
+ to save space and avoid duplication for data layers.
+ """
+ if isinstance(input_array, Blob):
+ self._diff = input_array.diff()
+ else:
+ self._diff = input_array.view()
+ if shape is not None:
+ self._diff.shape = shape
+ return self.diff()
+
+ def has_data(self):
+ """Checks if the blob has data."""
+ return self._data is not None
+
+ def data(self):
+ """Returns a view of the data."""
+ if self.has_data():
+ return self._data.view()
+
+ def has_diff(self):
+ """Checks if the blob has diff."""
+ return self._diff is not None
+
+ def diff(self):
+ """Returns a view of the diff."""
+ if self.has_diff():
+ return self._diff.view()
+
+ def update(self):
+ """Update the data field by SUBTRACTING diff to it.
+
+ Note that diff is often used to store the gradients, and most often
+ we will perform MINIMIZATION. This is why we always do subtraction
+ here.
+ """
+ self._data -= self._diff
+
+ def init_data(self, shape, dtype, setdata=True):
+ """Initializes the data if necessary. The filler will be always
+ called even if no reallocation of data takes place.
+ """
+ if not(self.has_data() and self._data.shape == shape and \
+ self._data.dtype == dtype):
+ self._data = np.empty(shape, dtype)
+ # Since we changed the data, the old diff has to be discarded.
+ self._diff = None
+ if setdata:
+ if self._filler is not None:
+ self._filler.fill(self._data)
+ else:
+ self._data[:] = 0
+ return self.data()
+
+ def init_diff(self, setzero=True):
+ """Initialize the diff in the same format as data.
+
+ Returns diff for easy access.
+ """
+ if not self.has_data():
+ raise ValueError('The data should be initialized first!')
+ if self.has_diff():
+ if setzero:
+ self._diff[:] = 0
+ else:
+ self._diff = np.zeros_like(self._data)
+ return self.diff()
+
+ def swap_data(self, other_blob):
+ """swaps the data between two blobs."""
+ if not(self.has_data() and other_blob.has_data() and
+ self._data.dtype == other_blob._data.dtype and
+ self._data.shape == other_blob._data.shape):
+ raise ValueError('Attempting to swap incompatible blobs.')
+ self._data, other_blob._data = other_blob._data, self._data
+
+ def __getstate__(self):
+ """When pickling, we will simply store the data field and the
+ filler of this blob. We do NOT store the diff, since it is often
+ binded to a specific run and does not bear much value.
+ """
+ return self.data(), self._filler
+
+ def __setstate__(self, state):
+ """Recovers the state."""
+ if state[0] is None:
+ Blob.__init__(self, state[1])
+ else:
+ Blob.__init__(self, state[0].shape, state[0].dtype, state[1])
+ self._data[:] = state[0]
+
Oops, something went wrong.

0 comments on commit 2d238b4

Please sign in to comment.