diff --git a/README.md b/README.md index 4d5102ec5..ee6eaa9f8 100644 --- a/README.md +++ b/README.md @@ -9,7 +9,8 @@ As the community contributions in Keras-Contrib are tested, used, validated, and --- ## Installation -For instructions on how to install Keras, see https://keras.io/#installation +For instructions on how to install Keras, +see [the Keras installation page](https://keras.io/#installation). ```shell git clone https://www.github.com/keras-team/keras-contrib.git @@ -23,7 +24,7 @@ Alternatively, using pip: sudo pip install git+https://www.github.com/keras-team/keras-contrib.git ``` -For contributor guidelines see [CONTRIBUTING.md](CONTRIBUTING.md) +For contributor guidelines see [CONTRIBUTING.md](https://github.com/keras-team/keras-contrib/blob/master/CONTRIBUTING.md) --- ## Example Usage diff --git a/keras_contrib/layers/core.py b/keras_contrib/layers/core.py index 5c4b1bc0b..04c252489 100644 --- a/keras_contrib/layers/core.py +++ b/keras_contrib/layers/core.py @@ -14,9 +14,6 @@ class CosineDense(Layer): """A cosine normalized densely-connected NN layer - Cosine Normalization: Using Cosine Similarity Instead - of Dot Product in Neural Networks - https://arxiv.org/pdf/1702.05870.pdf # Example @@ -35,7 +32,7 @@ class CosineDense(Layer): # the size of the input anymore: model.add(CosineDense(32)) - **Note that a regular Dense layer may work better as the final layer + # Note that a regular Dense layer may work better as the final layer ``` # Arguments @@ -46,23 +43,25 @@ class CosineDense(Layer): initialization. This parameter is only relevant if you don't pass a `weights` argument. activation: name of activation function to use - (see [activations](../activations.md)), - or alternatively, elementwise Theano function. + (see [activations](https://keras.io/activations)), + or alternatively, elementwise Python function. If you don't specify anything, no activation is applied (ie. "linear" activation: a(x) = x). weights: list of Numpy arrays to set as initial weights. The list should have 2 elements, of shape `(input_dim, units)` and (units,) for weights and biases respectively. - kernel_regularizer: instance of [WeightRegularizer](../regularizers.md) + kernel_regularizer: instance of [WeightRegularizer]( + https://keras.io/regularizers) (eg. L1 or L2 regularization), applied to the main weights matrix. - bias_regularizer: instance of [WeightRegularizer](../regularizers.md), - applied to the bias. - activity_regularizer: instance of [ActivityRegularizer](../regularizers.md), - applied to the network output. - kernel_constraint: instance of the [constraints](../constraints.md) module + bias_regularizer: instance of [WeightRegularizer]( + https://keras.io/regularizers), applied to the bias. + activity_regularizer: instance of [ActivityRegularizer]( + https://keras.io/regularizers), applied to the network output. + kernel_constraint: instance of the [constraints]( + https://keras.io/constraints/) module (eg. maxnorm, nonneg), applied to the main weights matrix. - bias_constraint: instance of the [constraints](../constraints.md) module, - applied to the bias. + bias_constraint: instance of the [constraints]( + https://keras.io/constraints/) module, applied to the bias. use_bias: whether to include a bias (i.e. make the layer affine rather than linear). input_dim: dimensionality of the input (integer). This argument @@ -78,6 +77,10 @@ class CosineDense(Layer): nD tensor with shape: `(nb_samples, ..., units)`. For instance, for a 2D input with shape `(nb_samples, input_dim)`, the output would have shape `(nb_samples, units)`. + + # References + - [Cosine Normalization: Using Cosine Similarity Instead + of Dot Product in Neural Networks](https://arxiv.org/pdf/1702.05870.pdf) """ def __init__(self, units, kernel_initializer='glorot_uniform',