Skip to content

Commit

Permalink
Reconstruct module files (#5251)
Browse files Browse the repository at this point in the history
* reconstruct modules and aligin torch

* format

Co-authored-by: oneflow-ci-bot <69100618+oneflow-ci-bot@users.noreply.github.com>
  • Loading branch information
Flowingsun007 and oneflow-ci-bot committed Jun 21, 2021
1 parent 582677f commit f4ef181
Show file tree
Hide file tree
Showing 12 changed files with 619 additions and 750 deletions.
61 changes: 61 additions & 0 deletions oneflow/python/nn/modules/activation.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,67 @@ def _softmax_need_transpose(x, axis):
return need_transpose, permute


@oneflow_export("nn.PReLU")
@experimental_api
class PReLU(Module):
"""Applies the element-wise function:
.. math::
PReLU(x) = \max(0,x) + a * \min(0,x)
Here :math:`a` is a learnable parameter. When called without arguments, `nn.PReLU()` uses a single
parameter :math:`a` across all input channels. If called with `nn.PReLU(nChannels)`,
a separate :math:`a` is used for each input channel.
.. note::
weight decay should not be used when learning :math:`a` for good performance.
.. note::
Channel dim is the 2nd dim of input. When input has dims < 2, then there is
no channel dim and the number of channels = 1.
Args:
num_parameters (int): number of :math:`a` to learn.
Although it takes an int as input, there is only two values are legitimate:
1, or the number of channels at input. Default: 1
init (float): the initial value of :math:`a`. Default: 0.25
Shape:
- Input: :math:`(N, *)` where `*` means, any number of additional
dimensions
- Output: :math:`(N, *)`, same shape as the input
Attr:
- weight (Tensor): the learnable weights of shape (:attr:`num_parameters`).
.. code-block:: python
>>> import numpy as np
>>> import oneflow.experimental as flow
>>> flow.enable_eager_execution()
>>> m = flow.nn.PReLU()
>>> input = flow.Tensor(np.asarray([[[[1, -2], [3, 4]]]]), dtype=flow.float32)
>>> print(m(input).numpy())
[[[[ 1. -0.5]
[ 3. 4. ]]]]
"""

def __init__(self, num_parameters: int = 1, init: float = 0.25) -> None:
super().__init__()
self.num_parameters = num_parameters
self.weight = flow.nn.Parameter(flow.Tensor(num_parameters, 1, 1).fill_(init))
self.op = flow.builtin_op("prelu").Input("x").Input("alpha").Output("y").Build()

def forward(self, x):
assert (
self.num_parameters == 1 or self.num_parameters == x.shape[1]
), f"num_parameters in prelu must be 1 or {x.shape[1]}"
return self.op(x, self.weight)[0]


@oneflow_export("nn.ReLU")
@experimental_api
class ReLU(Module):
Expand Down
181 changes: 0 additions & 181 deletions oneflow/python/nn/modules/avgpooling.py

This file was deleted.

135 changes: 0 additions & 135 deletions oneflow/python/nn/modules/groupnorm.py

This file was deleted.

Loading

0 comments on commit f4ef181

Please sign in to comment.