Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Leakyrelu with changes #11

Closed
wants to merge 10 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions KerasModeltoJSON.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ def save(self):
def __model_to_JSON(self, model):
# Initialization
conf = model.get_config()
print(conf)
md = {'model_type':'Sequential'}
md['descriptors'] = []

Expand Down Expand Up @@ -316,6 +317,8 @@ def __get_activation(self, layers, layer_dscp):
layers.append({'layer':'ELu'})
elif activation_name == 'hard_sigmoid':
layers.append({'layer':'HardSigmoid'})
elif activation_name == 'leakyrelu':
layers.append({'layer':'LeakyReLu'})
elif activation_name == 'sigmoid':
layers.append({'layer':'Sigmoid'})
elif activation_name == 'softplus':
Expand Down
637 changes: 322 additions & 315 deletions NNSharp/IO/ReaderKerasModel.cs

Large diffs are not rendered by default.

20 changes: 20 additions & 0 deletions NNSharp/KernelDescriptors/LeakyReLu.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;

namespace NNSharp.KernelDescriptors
{
public class LeakyReLu : IKernelDescriptor
{

public LeakyReLu(double alpha)
{
this.alpha = alpha;
}

public double Alpha { get { return alpha; } }
private double alpha;
}
}
37 changes: 37 additions & 0 deletions NNSharp/Kernels/CPUKernels/LeakyReLuKernel.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using NNSharp.DataTypes;

namespace NNSharp.Kernels.CPUKernels
{
[Serializable()]
public class LeakyReLuKernel :IKernel
{
public void Execute()
{
LeakyReLuLambda(input);
output = input;
}

public static void LeakyReLuLambda(IData data)
{
data.ApplyToAll(p =>
{
if (p >= 0.0)
return p;
else
{
return 0.3*p;
}
});
}

protected IData input;
protected IData output;

protected double alpha;
}
}
4 changes: 4 additions & 0 deletions NNSharp/NNSharp.csproj
Original file line number Diff line number Diff line change
Expand Up @@ -108,6 +108,7 @@
<Compile Include="KernelDescriptors\GRU.cs" />
<Compile Include="KernelDescriptors\HardSigmoid.cs" />
<Compile Include="KernelDescriptors\Input2D.cs" />
<Compile Include="KernelDescriptors\LeakyReLu.cs" />
<Compile Include="KernelDescriptors\LSTM.cs" />
<Compile Include="KernelDescriptors\MaxPooling1D.cs" />
<Compile Include="KernelDescriptors\MaxPooling2D.cs" />
Expand Down Expand Up @@ -139,6 +140,7 @@
<Compile Include="Kernels\CPUKernels\GlobalMaxPool2DKernel.cs" />
<Compile Include="Kernels\CPUKernels\GRUKernel.cs" />
<Compile Include="Kernels\CPUKernels\HardSigmoidKernel.cs" />
<Compile Include="Kernels\CPUKernels\LeakyReLuKernel.cs" />
<Compile Include="Kernels\CPUKernels\LSTMKernel.cs" />
<Compile Include="Kernels\CPUKernels\MaxPool1DKernel.cs" />
<Compile Include="Kernels\CPUKernels\MaxPool2DKernel.cs" />
Expand Down Expand Up @@ -188,6 +190,8 @@
<Compile Include="SequentialBased\SequentialLayers\GRULayerFactory.cs" />
<Compile Include="SequentialBased\SequentialLayers\HardSigmoidLayer.cs" />
<Compile Include="SequentialBased\SequentialLayers\HardSigmoidLayerFactory.cs" />
<Compile Include="SequentialBased\SequentialLayers\LeakyReLuLayer.cs" />
<Compile Include="SequentialBased\SequentialLayers\LeakyReLuLayerFactory.cs" />
<Compile Include="SequentialBased\SequentialLayers\LSTMLayer.cs" />
<Compile Include="SequentialBased\SequentialLayers\LSTMLayerFactory.cs" />
<Compile Include="SequentialBased\SequentialLayers\MaxPool1DLayer.cs" />
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@ public DeafultAbstractLayerFactory()
factories.Add(new GlobalMaxPool2DLayerFactory());
factories.Add(new GRULayerFactory());
factories.Add(new HardSigmoidLayerFactory());
factories.Add(new LeakyReLuLayerFactory());
factories.Add(new Input2DLayerFactory());
factories.Add(new LSTMLayerFactory());
factories.Add(new MaxPool1DLayerFactory());
Expand Down
46 changes: 46 additions & 0 deletions NNSharp/SequentialBased/SequentialLayers/LeakyReLuLayer.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using NNSharp.DataTypes;
using NNSharp.Kernels.CPUKernels;
using static NNSharp.DataTypes.SequentialModelData;
using static NNSharp.DataTypes.Data2D;

namespace NNSharp.SequentialBased.SequentialLayers
{
[Serializable()]
public class LeakyReLuLayer : LeakyReLuKernel, ILayer
{
public LeakyReLuLayer(double alpha)
{
this.alpha = alpha;
}

public IData GetOutput()
{
return output;
}

public void SetInput(IData input)
{
this.input = input;
}

public void SetWeights(IData weights)
{
// No weights.
}

public LayerData GetLayerSummary()
{
// The input and the output have the same sizes as the output
// of the previous layer.
return new LayerData(
this.ToString(),
-1, -1, -1, -1, -1,
-1, -1, -1, -1, -1);
}
}
}
23 changes: 23 additions & 0 deletions NNSharp/SequentialBased/SequentialLayers/LeakyReLuLayerFactory.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using NNSharp.KernelDescriptors;

namespace NNSharp.SequentialBased.SequentialLayers
{
public class LeakyReLuLayerFactory : ILayerFactory
{
public ILayer CreateProduct(IKernelDescriptor descriptor)
{
if (descriptor is LeakyReLu) {
LeakyReLu leakyrelu = descriptor as LeakyReLu;

return new LeakyReLuLayer(leakyrelu.Alpha);
}

return null;
}
}
}
Loading