-
Notifications
You must be signed in to change notification settings - Fork 24
C# interface
The interface is available here.
- Create net
- Training net
- Architecture of net
- Save and load weight of layer
- Set and get params of layer
- Monitoring gradients and weights
Operators
- Input
- Output
- FullyConnected
- Convolution
- Deconvolution
- Pooling
- LossFunction
- Switch
- Lock
- Summator
- Crop
- Concat
- Resize
- Activation
- BatchNorm
- UserLayer
Examples of use
Creating a Network Architecture
/// <summary>
/// create net
/// </summary>
/// <param name="jnNet"> network architecture in JSON </param>
/// <param name="weightPath"> path to file with weight </param>
public Net(string jnNet = "", string weightPath = ""){
if (jnNet.Length > 0)
createNetJN(jnNet);
if ((net_ != null) && (weightPath.Length > 0))
loadAllWeightFromFile(weightPath);
}
Example:
sn.Net snet = new sn.Net();
snet.addNode("Input", new sn.Input(), "C1")
.addNode("C1", new sn.Convolution(15), "C2")
.addNode("C2", new sn.Convolution(15), "P1")
.addNode("P1", new sn.Pooling(sn.calcMode.type.CUDA), "FC1")
.addNode("FC1", new sn.FullyConnected(128), "FC2")
.addNode("FC2", new sn.FullyConnected(10), "LS")
.addNode("LS", new sn.LossFunction(sn.lossType.type.softMaxToCrossEntropy), "Output");
You can train a network in two ways:
- by calling one function 'snTraining'
- and by the standard way: forwarding the function 'snForward', calculating your own error, passing back the function 'snBackward'.
Let's see the first option.
/// <summary>
/// cycle forward-backward
/// </summary>
/// <param name="lr"> lerning rate</param>
/// <param name="inTns"> in tensor NCHW(bsz, ch, h, w)</param>
/// <param name="outTns"> out tensor NCHW(bsz, ch, h, w)</param>
/// <param name="targetTns"> target tensor</param>
/// <param name="outAccurate"> accurate error</param>
/// <returns> true - ok</returns>
public bool training(float lr, Tensor inTns, Tensor outTns, Tensor targetTns, ref float outAccurate)
{
if ((net_ == null) && !createNet()) return false;
float accurate = 0;
bool ok = snTraining(net_, lr, inTns.size(), inTns.data(),
outTns.size(), outTns.data(), targetTns.data(), &accurate);
outAccurate = accurate;
return ok;
}
Example:
// training
float accurat = 0;
snet.training(lr, inLayer, outLayer, targetLayer, ref accurat);
accuratSumm += accurat;
Console.WriteLine(k.ToString() + " accurate " + (accuratSumm / (k + 1)).ToString() + " " +
snet.getLastErrorStr());
The function takes a batch of input data and the target result.
Returns the result and the evaluation by a batch.
Accurate is calculated as:
snFloat* targetData = targetTens->getData();
snFloat* outData = outTens->getData();
size_t accCnt = 0, osz = outTens->size().size();
for (size_t i = 0; i < osz; ++i){
if (abs(outData[i] - targetData[i]) < 0.1)
++accCnt;
}
return (accCnt * 1.F) / osz;
Getting network structure in Json.
/// <summary>
/// architecture of net in json
/// </summary>
/// <returns> jn arch</returns>
public string getArchitecNetJN(){
if ((net_ == null) && !createNet()) return "";
char* arch = null;
bool ok = snGetArchitecNet(net_, &arch);
string ret = "";
if (ok){
ret = Marshal.PtrToStringAnsi((IntPtr)arch);
snFreeResources(null, arch);
}
return ret;
}
/// <summary>
/// save all weight's in file
/// </summary>
/// <param name="path"> file path</param>
/// <returns> true - ok</returns>
public bool saveAllWeightToFile(string path)
{
if (net_ == null) return false;
IntPtr cpath = Marshal.StringToHGlobalAnsi(path);
bool ok = snSaveAllWeightToFile(net_, cpath);
Marshal.FreeHGlobal(cpath);
return ok;
}
/// <summary>
/// load all weight's from file
/// </summary>
/// <param name="path">file path</param>
/// <returns>true - ok</returns>
public bool loadAllWeightFromFile(string path){
if ((net_ == null) && !createNet()) return false;
IntPtr cpath = Marshal.StringToHGlobalAnsi(path);
bool ok = snLoadAllWeightFromFile(net_, cpath);
Marshal.FreeHGlobal(cpath);
return ok;
}
/// <summary>
/// add node (layer)
/// </summary>
/// <typeparam name="T"> operator type </typeparam>
/// <param name="name"> name node in architecture of net</param>
/// <param name="nd"> tensor node</param>
/// <param name="nextNodes"> next nodes through a space</param>
/// <returns>ref Net</returns>
public Net addNode<T>(string name, T nd, string nextNodes){
nodes_.Add(new node(name, ((IOperator)nd).name(), ((IOperator)nd).getParamsJn(), nextNodes));
return this;
}
/// <summary>
/// update param node (layer)
/// </summary>
/// <typeparam name="T"> operator type</typeparam>
/// <param name="name"> name node in architecture of net</param>
/// <param name="nd"> tensor node</param>
/// <returns> true ok</returns>
public bool updateNode<T>(string name, T nd)
{
bool ok = false;
if (net_ != null){
.......
}
/// <summary>
/// get output of node
/// </summary>
/// <param name="name"> name node in architecture of net</param>
/// <param name="output"> output tensor NCHW(bsz, ch, h, w)</param>
/// <returns> true - ok</returns>
public bool getOutputNode(string name, ref Tensor output)
{
if (net_ == null) return false;
.......
}
/// <summary>
/// get weight of node
/// </summary>
/// <param name="name"> name node in architecture of net</param>
/// <param name="outWeight"> weight tensor NCHW(bsz, ch, h, w)</param>
/// <returns> true - ok</returns>
public bool getWeightNode(string name, ref Tensor outWeight)
{
if (net_ == null) return false;
...............
}
The input node receives the user data, and transmits further along the chain.
Net snet;
snet.addNode("Input", new sn.Input(), "FC2")
....
The interface is not implemented as unnecessary.
For the last node, the next one is set as "Output".
Example:
Net snet;
snet.addNode("Input", new sn.Input(), "FC1")
.addNode("FC1", new sn.FullyConnected(125), "FC2")
.
.
.addNode("LS", sn::LossFunction(sn.lossType.type.softMaxToCrossEntropy), "Output");
/// <summary>
/// Fully connected layer
/// </summary>
public class FullyConnected : IOperator
{
public uint units; ///< Number of out neurons. !Required parameter [0..)
public active act = new active(active.type.relu); ///< Activation function type. Optional parameter
public optimizer opt = new optimizer(optimizer.type.adam); ///< Optimizer of weights. Optional parameter
public float dropOut = 0.0f; ///< Random disconnection of neurons. Optional parameter [0..1.F]
public batchNormType bnorm = new batchNormType(batchNormType.type.none); ///< Type of batch norm. Optional parameter
public uint gpuDeviceId = 0; ///< GPU Id. Optional parameter
public bool freeze = false; ///< Do not change weights. Optional parameter
public bool useBias= true; ///< +bias. Optional parameter
public weightInit wini = new weightInit(weightInit.type.he); ///< Type of initialization of weights. Optional parameter
public float decayMomentDW = 0.9F; ///< Optimizer of weights moment change. Optional parameter [0..1.F]
public float decayMomentWGr = 0.99F; ///< Optimizer of weights moment change of prev. Optional parameter [0..1.F]
public float lmbRegular = 0.000F; ///< Optimizer of weights l2Norm. Optional parameter [0..1.F]
public float batchNormLr = 0.001F; ///< Learning rate for batch norm coef. Optional parameter [0..)
....
}
The default parameters are specified.
/// <summary>
/// Convolution layer
/// </summary>
public class Convolution{
public uint filters; ///< Number of output layers. !Required parameter [0..)
public active act = new active(active.type.relu); ///< Activation function type. Optional parameter
public optimizer opt = new optimizer(optimizer.type.adam); ///< Optimizer of weights. Optional parameter
public float dropOut = 0.0f; ///< Random disconnection of neurons. Optional parameter [0..1.F]
public batchNormType bnorm = new batchNormType(batchNormType.type.none); ///< Type of batch norm. Optional parameter
public uint fWidth = 3; ///< Width of mask. Optional parameter(> 0)
public uint fHeight = 3; ///< Height of mask. Optional parameter(> 0)
public int padding = 0; ///< Padding around the edges. Optional parameter
public uint stride = 1; ///< Mask movement step. Optional parameter(> 0)
public uint dilate = 1; ///< Expansion mask. Optional parameter(> 0)
public uint gpuDeviceId = 0; ///< GPU Id. Optional parameter
public bool freeze = false; ///< Do not change weights. Optional parameter
public bool useBias= true; ///< +bias. Optional parameter
public weightInit wini = new weightInit(weightInit.type.he); ///< Type of initialization of weights. Optional parameter
public float decayMomentDW = 0.9F; ///< Optimizer of weights moment change. Optional parameter [0..1.F]
public float decayMomentWGr = 0.99F; ///< Optimizer of weights moment change of prev. Optional parameter [0..1.F]
public float lmbRegular = 0.001F; ///< Optimizer of weights l2Norm. Optional parameter [0..1.F]
public float batchNormLr = 0.001F; ///< Learning rate for batch norm coef. Optional parameter [0..)
....
The default parameters are specified.
/// <summary>
/// Deconvolution layer
/// </summary>
public class Deconvolution
{
public uint filters; ///< Number of output layers. !Required parameter [0..)
public active act = new active(active.type.relu); ///< Activation function type. Optional parameter
public optimizer opt = new optimizer(optimizer.type.adam); ///< Optimizer of weights. Optional parameter
public float dropOut = 0.0f; ///< Random disconnection of neurons. Optional parameter [0..1.F]
public batchNormType bnorm = new batchNormType(batchNormType.type.none); ///< Type of batch norm. Optional parameter
public uint fWidth = 3; ///< Width of mask. Optional parameter(> 0)
public uint fHeight = 3; ///< Height of mask. Optional parameter(> 0)
public uint stride = 1; ///< Mask movement step. Optional parameter(> 0)
public uint gpuDeviceId = 0; ///< GPU Id. Optional parameter
public bool freeze = false; ///< Do not change weights. Optional parameter
public weightInit wini = new weightInit(weightInit.type.he); ///< Type of initialization of weights. Optional parameter
public float decayMomentDW = 0.9F; ///< Optimizer of weights moment change. Optional parameter [0..1.F]
public float decayMomentWGr = 0.99F; ///< Optimizer of weights moment change of prev. Optional parameter [0..1.F]
public float lmbRegular = 0.001F; ///< Optimizer of weights l2Norm. Optional parameter [0..1.F]
public float batchNormLr = 0.001F; ///< Learning rate for batch norm coef. Optional parameter [0..)
.....
The default parameters are specified.
/// <summary>
/// Pooling layer
/// </summary>
public class Pooling
{
public uint kernel = 2; ///< Square Mask Size. Optional parameter (> 0)
public uint stride = 2; ///< Mask movement step. Optional parameter(> 0)
public pooling pool = new pooling(pooling.type.max); ///< Operator Type. Optional parameter
public uint gpuDeviceId = 0; ///< GPU Id. Optional parameter
....
The default parameters are specified.
If the mask does not completely enter the image, the image automatically extends around the edges.
Operator for automatic error calculation.
Depending on the network task being solved, supports the following types of errors:
- "softMaxACrossEntropy" - for multiclass classification
- "binaryCrossEntropy" - for binary classification
- "regressionMSE" - regression of a function with least-squares estimation
- "userLoss" - user operator
/// <summary>
/// Error function calculation layer
/// </summary>
public class LossFunction{
public lossType loss;
public LossFunction(lossType.type loss_)
{
loss = new lossType(loss_);
}
Operator for transferring data to several nodes at once.
In the process, you can change the way out - function net.updateNode().
Data can only be received from one node.
/// <summary>
/// Operator for transferring data to several nodes at once.
/// Data can only be received from one node.
/// </summary>
public class Switch
{
public string nextWay; // next nodes through a space
public Switch(string nextWay_)
{
nextWay = nextWay_;
}
Example:
snet.addNode("Input", new sn.Input(), "SW")
.addNode("SW", new sn.Switch(), "FC1 FC2")
.addNode("FC1", new sn.FullyConnected(10, sn.calcMode.type.CPU), "Sum")
.addNode("FC2", new sn.FullyConnected(10, sn.calcMode.type.CPU), "Sum")
.addNode("Sum", new sn.Summator(), "LS")
.addNode("LS", new sn.LossFunction(sn.lossType.type.softMaxToCrossEntropy), "Output");
Operator to block further calculation at the current location.
In the process, you can change the way out - function net.updateNode().
It is designed for the ability to dynamically disconnect the parallel branches of the network during operation.
/// <summary>
/// Operator to block further calculation at the current location.
/// It is designed for the ability to dynamically disconnect the parallel
/// branches of the network during operation.
/// </summary>
public class Lock{
public lockType lockTp; ///< Blocking activity. Optional parameter
public Lock(lockType.type lockTp_)
{
lockTp= new lockType(lockTp_);
}
Example:
snet.addNode("Input", new sn.Input(), "SW")
.addNode("SW", new sn.Switch(), "FC1 FC2")
.addNode("FC1", new sn.FullyConnected(10, sn.calcMode.type.CPU), "LC")
.addNode("LC", new sn.Lock(sn.lockType.unlock), "Sum")
.addNode("FC2", new sn.FullyConnected(10, sn.calcMode.type.CPU), "Sum")
.addNode("Sum", new sn.Summator(), "LS")
.addNode("LS", new sn.LossFunction(sn.lossType.type.softMaxToCrossEntropy), "Output");
The operator is designed to combine the values of two layers.
The consolidation can be performed by the following options: "summ", "diff", "mean".
The dimensions of the input layers must be the same.
/// <summary>
/// The operator is designed to combine the values of two layers.
/// The consolidation can be performed by the following options: "summ", "diff", "mean".
/// The dimensions of the input layers must be the same.
/// </summary>
public class Summator{
public summatorType summType;
public Summator(summatorType.type summType_ = summatorType.type.summ)
{
summType = new summatorType(summType_);
}
Example:
namespace sn = SN_API;
snet.addNode("Input", new sn.Input(), "FC1 FC2")
.addNode("FC1", new sn.FullyConnected(10, sn.calcMode.type.CPU), "Sum")
.addNode("FC2", new sn.FullyConnected(10, sn.calcMode.type.CPU), "Sum")
.addNode("Sum", new sn.Summator(), "LS")
.addNode("LS", new sn.LossFunction(sn.lossType.type.softMaxToCrossEntropy), "Output");
ROI clipping in each image of each channel.
/// <summary>
/// ROI clipping in each image of each channel
/// </summary>
public class Crop{
public rect rct; // region of interest
public Crop(rect rct_){
rct = rct_;
}
The operator connects the channels with multiple layers.
/// <summary>
/// The operator connects the channels with multiple layers
/// </summary>
public class Concat{
public string sequence; // prev nodes through a space
public Concat(string sequence_){
sequence = sequence_;
}
Example:
snet.addNode("Input", new sn.Input(), "C1 C2")
.addNode("C1", new sn.Convolution(20, sn.calcMode.type.CPU), "R1")
.addNode("R1", new sn.Resize(new sn.diap(0 20), new sn.diap(0 20), "Conc")
.addNode("C2", new sn.Convolution(20, sncalcMode.type.CPU), "R2")
.addNode("R2", new sn.Resize(new sn.diap(0 20),new sn.diap(20 40)), "Conc")
.addNode("Conc", new sn.Concat("R1 R2"), "LS")
.addNode("LS", new sn.LossFunction(sn.lossType.type.softMaxToCrossEntropy), "Output");
Change the number of channels.
Works in conjunction with "Concat".
/// <summary>
/// Change the number of channels
/// </summary>
public class Resize{
public diap fwdDiap, bwdDiap; // diap layer through a space
public Resize(diap fwdDiap_, diap bwdDiap_){
fwdDiap = fwdDiap_;
bwdDiap = bwdDiap_;
}
Example:
namespace sn = SN_API;
sn::Net snet;
snet.addNode("Input", new sn.Input(), "C1 C2")
.addNode("C1", new sn.Convolution(20, sn.calcMode.type.CPU), "R1")
.addNode("R1", new sn.Resize(new sn.diap(0 20), new sn.diap(0 20), "Conc")
.addNode("C2", new sn.Convolution(20, sn.calcMode.type.CPU), "R2")
.addNode("R2", new sn.Resize(new sn.diap(0 20), new sn.diap(20 40)), "Conc")
.addNode("Conc", new sn.Concat("R1 R2"), "LS")
.addNode("LS", new sn.LossFunction(sn.lossType.type.softMaxToCrossEntropy), "Output");
Activation function operator.
/// <summary>
/// Activation function
/// </summary>
public class Activation : IOperator
{
public active act = new active(active.type.relu); ///< Activation function type. Optional parameter
public Activation(active act_)
{
act = act_;
}
/// <summary>
/// Batch norm
/// </summary>
public class BatchNormLayer : IOperator
{
public batchNormType bnorm = new batchNormType();
public BatchNormLayer(){}
not implemented
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Runtime.InteropServices;
using System.Drawing;
using System.IO;
using sn = SN_API;
namespace Test
{
class Program
{
static bool loadImage(string imgPath, uint classCnt, List<List<string>> imgName, List<int> imgCntDir)
{
for (int i = 0; i < classCnt; ++i){
string dir = imgPath + i.ToString() + "/";
if (!Directory.Exists(dir)) continue;
imgName.Add(new List<string>());
string[] files = Directory.GetFiles(dir);
foreach (string s in files)
{
imgName[i].Add(s);
}
imgCntDir.Add(files.Count());
}
bool ok = imgCntDir.Count == classCnt;
foreach (int cnt in imgCntDir)
if (cnt == 0) ok = false;
return ok;
}
static void Main(string[] args)
{
sn.Net snet = new sn.Net();
string ver = snet.versionLib();
Console.WriteLine("Version snlib " + ver);
snet.addNode("Input", new sn.Input(), "C1")
.addNode("C1", new sn.Convolution(15, 0), "C2")
.addNode("C2", new sn.Convolution(15, 0), "P1")
.addNode("P1", new sn.Pooling(), "FC1")
.addNode("FC1", new sn.FullyConnected(128), "FC2")
.addNode("FC2", new sn.FullyConnected(10), "LS")
.addNode("LS", new sn.LossFunction(sn.lossType.type.softMaxToCrossEntropy), "Output");
string imgPath = "c://cpp//sunnet//example//mnist//images//";
uint batchSz = 100, classCnt = 10, w = 28, h = 28; float lr = 0.001F;
List<List<string>> imgName = new List<List<string>>();
List<int> imgCntDir = new List<int>(10);
Dictionary<string, Bitmap> images = new Dictionary<string, Bitmap>();
if (!loadImage(imgPath, classCnt, imgName, imgCntDir))
{
Console.WriteLine("Error 'loadImage' path: " + imgPath);
Console.ReadKey();
return;
}
string wpath = "c:/cpp/w.dat";
if (snet.loadAllWeightFromFile(wpath))
Console.WriteLine("Load weight ok path: " + wpath);
else
Console.WriteLine("Load weight err path: " + wpath);
sn.Tensor inLayer = new sn.Tensor(new sn.snLSize(w, h, 1, batchSz));
sn.Tensor targetLayer = new sn.Tensor(new sn.snLSize(classCnt, 1, 1, batchSz));
sn.Tensor outLayer = new sn.Tensor(new sn.snLSize(classCnt, 1, 1, batchSz));
float accuratSumm = 0;
for (int k = 0; k < 1000; ++k){
targetLayer.reset();
Random rnd = new Random();
for (int i = 0; i < batchSz; ++i)
{
// directory
int ndir = rnd.Next(0, (int)classCnt);
while (imgCntDir[ndir] == 0) ndir = rnd.Next(0, (int)classCnt);
// image
int nimg = rnd.Next(0, imgCntDir[ndir]);
// read
Bitmap img;
string fn = imgName[ndir][nimg];
if (images.ContainsKey(fn))
img = images[fn];
else
{
img = new Bitmap(fn);
images.Add(fn, img);
}
unsafe
{
float* refData = inLayer.data() + i * w * h;
int nr = img.Height, nc = img.Width;
System.Drawing.Imaging.BitmapData bmd = img.LockBits(new Rectangle(0, 0, img.Width, img.Height),
System.Drawing.Imaging.ImageLockMode.ReadWrite, img.PixelFormat);
IntPtr pt = bmd.Scan0;
for (int r = 0; r < nr; ++r)
{
for (int c = 0; c < nc; ++c)
{
refData[r * nc + c] = Marshal.ReadByte(pt);
pt += 4;
}
}
img.UnlockBits(bmd);
float* tarData = targetLayer.data() + classCnt * i;
tarData[ndir] = 1;
}
}
// training
float accurat = 0;
snet.training(lr, inLayer, outLayer, targetLayer, ref accurat);
// calc error
int accCnt = 0;
unsafe
{
float* targetData = targetLayer.data();
float* outData = outLayer.data();
int bsz = (int)batchSz;
for (int i = 0; i < bsz; ++i)
{
float* refOutput = outData + i * classCnt;
float maxval = refOutput[0];
int maxOutInx = 0;
for (int j = 1; j < classCnt; ++j){
if (refOutput[j] > maxval){
maxval = refOutput[j];
maxOutInx = j;
}
}
float* refTarget = targetData + i * classCnt;
maxval = refTarget[0];
int maxTargInx = 0;
for (int j = 1; j < classCnt; ++j){
if (refTarget[j] > maxval){
maxval = refTarget[j];
maxTargInx = j;
}
}
if (maxTargInx == maxOutInx)
++accCnt;
}
}
accuratSumm += (float)accCnt / batchSz;
Console.WriteLine(k.ToString() + " accurate " + (accuratSumm / (k + 1)).ToString() + " " +
snet.getLastErrorStr());
}
if (snet.saveAllWeightToFile(wpath))
Console.WriteLine("Save weight ok path: " + wpath);
else
Console.WriteLine("Save weight err path: " + wpath);
Console.ReadKey();
return;
}
}
}
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Runtime.InteropServices;
using System.Drawing;
using System.IO;
using sn = SN_API;
namespace Test
{
class Program
{
static bool loadImage(string imgPath, uint classCnt, List<List<string>> imgName, List<int> imgCntDir)
{
for (int i = 0; i < classCnt; ++i){
string dir = imgPath + i.ToString() + "/";
if (!Directory.Exists(dir)) continue;
imgName.Add(new List<string>());
string[] files = Directory.GetFiles(dir);
foreach (string s in files)
{
imgName[i].Add(s);
}
imgCntDir.Add(files.Count());
}
bool ok = imgCntDir.Count == classCnt;
foreach (int cnt in imgCntDir)
if (cnt == 0) ok = false;
return ok;
}
static void Main(string[] args)
{
sn.Net snet = new sn.Net();
string ver = snet.versionLib();
Console.WriteLine("Version snlib " + ver);
snet.addNode("Input", new sn.Input(), "C1")
.addNode("C1", new sn.Convolution(15, -1, sn.batchNormType.type.beforeActive), "C2")
.addNode("C2", new sn.Convolution(15, 0, sn.batchNormType.type.beforeActive), "P1")
.addNode("P1", new sn.Pooling(), "C3")
.addNode("C3", new sn.Convolution(25, -1, sn.batchNormType.type.beforeActive), "C4")
.addNode("C4", new sn.Convolution(25, 0, sn.batchNormType.type.beforeActive), "P2")
.addNode("P2", new sn.Pooling(), "C5")
.addNode("C5", new sn.Convolution(25, -1, sn.batchNormType.type.beforeActive), "C6")
.addNode("C6", new sn.Convolution(25, 0, sn.batchNormType.type.beforeActive), "P3")
.addNode("P3", new sn.Pooling(), "FC1")
.addNode("FC1", new sn.FullyConnected(2048), "FC2")
.addNode("FC2", new sn.FullyConnected(128), "FC3")
.addNode("FC3", new sn.FullyConnected(10), "LS")
.addNode("LS", new sn.LossFunction(sn.lossType.type.softMaxToCrossEntropy), "Output");
string imgPath = "c://cpp//sunnet//example//cifar10//images//";
uint batchSz = 100, classCnt = 10, w = 32, h = 32, d = 3; float lr = 0.001F;
List<List<string>> imgName = new List<List<string>>();
List<int> imgCntDir = new List<int>(10);
Dictionary<string, Bitmap> images = new Dictionary<string, Bitmap>();
if (!loadImage(imgPath, classCnt, imgName, imgCntDir))
{
Console.WriteLine("Error 'loadImage' path: " + imgPath);
Console.ReadKey();
return;
}
string wpath = "c:/cpp/w.dat";
// if (snet.loadAllWeightFromFile(wpath))
// Console.WriteLine("Load weight ok path: " + wpath);
// else
// Console.WriteLine("Load weight err path: " + wpath);
sn.Tensor inLayer = new sn.Tensor(new sn.snLSize(w, h, 3, batchSz));
sn.Tensor targetLayer = new sn.Tensor(new sn.snLSize(classCnt, 1, 1, batchSz));
sn.Tensor outLayer = new sn.Tensor(new sn.snLSize(classCnt, 1, 1, batchSz));
float accuratSumm = 0;
for (int k = 0; k < 1000; ++k){
targetLayer.reset();
Random rnd = new Random();
for (int i = 0; i < batchSz; ++i)
{
// directory
int ndir = rnd.Next(0, (int)classCnt);
while (imgCntDir[ndir] == 0) ndir = rnd.Next(0, (int)classCnt);
// image
int nimg = rnd.Next(0, imgCntDir[ndir]);
// read
Bitmap img;
string fn = imgName[ndir][nimg];
if (images.ContainsKey(fn))
img = images[fn];
else
{
img = new Bitmap(fn);
images.Add(fn, img);
}
unsafe
{
float* refData = inLayer.data() + i * w * h * d;
int nr = img.Height, nc = img.Width;
System.Drawing.Imaging.BitmapData bmd = img.LockBits(new Rectangle(0, 0, img.Width, img.Height),
System.Drawing.Imaging.ImageLockMode.ReadWrite, img.PixelFormat);
IntPtr pt = bmd.Scan0;
for (int r = 0; r < nr; ++r)
{
for (int c = 0; c < nc * 3; c += 3)
{
refData[r * nc + c] = Marshal.ReadByte(pt);
refData[r * nc + c + 1] = Marshal.ReadByte(pt + 1);
refData[r * nc + c + 2] = Marshal.ReadByte(pt + 2);
pt += 3;
}
}
img.UnlockBits(bmd);
float* tarData = targetLayer.data() + classCnt * i;
tarData[ndir] = 1;
}
}
// training
float accurat = 0;
snet.training(lr, inLayer, outLayer, targetLayer, ref accurat);
// calc error
int accCnt = 0;
unsafe
{
float* targetData = targetLayer.data();
float* outData = outLayer.data();
int bsz = (int)batchSz;
for (int i = 0; i < bsz; ++i)
{
float* refOutput = outData + i * classCnt;
float maxval = refOutput[0];
int maxOutInx = 0;
for (int j = 1; j < classCnt; ++j){
if (refOutput[j] > maxval){
maxval = refOutput[j];
maxOutInx = j;
}
}
float* refTarget = targetData + i * classCnt;
maxval = refTarget[0];
int maxTargInx = 0;
for (int j = 1; j < classCnt; ++j){
if (refTarget[j] > maxval){
maxval = refTarget[j];
maxTargInx = j;
}
}
if (maxTargInx == maxOutInx)
++accCnt;
}
}
accuratSumm += (float)accCnt / batchSz;
Console.WriteLine(k.ToString() + " accurate " + (accuratSumm / (k + 1)).ToString() + " " +
snet.getLastErrorStr());
}
if (snet.saveAllWeightToFile(wpath))
Console.WriteLine("Save weight ok path: " + wpath);
else
Console.WriteLine("Save weight err path: " + wpath);
Console.ReadKey();
return;
}
}
}
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Runtime.InteropServices;
using System.Drawing;
using System.IO;
using sn = SN_API;
namespace Test
{
class Program
{
static bool loadImage(string imgPath, List<string> imgName)
{
string dir = imgPath;
string[] files = Directory.GetFiles(dir);
foreach (string s in files)
{
imgName.Add(s);
}
return imgName.Count > 0;
}
static void Main(string[] args)
{
sn.Net snet = new sn.Net();
string ver = snet.versionLib();
Console.WriteLine("Version snlib " + ver);
snet.addNode("In", new sn.Input(), "C1")
.addNode("C1", new sn.Convolution(10, -1), "C2")
.addNode("C2", new sn.Convolution(10, 0), "P1 Crop1")
.addNode("Crop1", new sn.Crop(new sn.rect(0, 0, 487, 487)), "Rsz1")
.addNode("Rsz1", new sn.Resize(new sn.diap(0, 10), new sn.diap(0, 10)), "Conc1")
.addNode("P1", new sn.Pooling(), "C3")
.addNode("C3", new sn.Convolution(10, -1), "C4")
.addNode("C4", new sn.Convolution(10, 0), "P2 Crop2")
.addNode("Crop2", new sn.Crop(new sn.rect(0, 0, 247, 247)), "Rsz2")
.addNode("Rsz2", new sn.Resize(new sn.diap(0, 10), new sn.diap(0, 10)), "Conc2")
.addNode("P2", new sn.Pooling(), "C5")
.addNode("C5", new sn.Convolution(10, 0), "C6")
.addNode("C6", new sn.Convolution(10, 0), "DC1")
.addNode("DC1", new sn.Deconvolution(10, 0), "Rsz3")
.addNode("Rsz3", new sn.Resize(new sn.diap(0, 10), new sn.diap(10, 20)), "Conc2")
.addNode("Conc2", new sn.Concat("Rsz2 Rsz3"), "C7")
.addNode("C7", new sn.Convolution(10, 0), "C8")
.addNode("C8", new sn.Convolution(10, 0), "DC2")
.addNode("DC2", new sn.Deconvolution(10, 0), "Rsz4")
.addNode("Rsz4", new sn.Resize(new sn.diap(0, 10), new sn.diap(10, 20)), "Conc1")
.addNode("Conc1", new sn.Concat("Rsz1 Rsz4"), "C9")
.addNode("C9", new sn.Convolution(10, 0), "C10");
sn.Convolution convOut = new sn.Convolution(1, 0);
convOut.act = new sn.active(sn.active.type.sigmoid);
snet.addNode("C10", convOut, "LS")
.addNode("LS", new sn.LossFunction(sn.lossType.type.binaryCrossEntropy), "Output");
string imgPath = "c://cpp//other//sunnet//example//unet//images//";
string targPath = "c://cpp//other//sunnet//example//unet//labels//";
uint batchSz = 3, w = 512, h = 512, wo = 483, ho = 483; float lr = 0.001F;
List<string> imgName = new List<string>();
List<string> targName = new List<string>();
if (!loadImage(imgPath, imgName) ||
!loadImage(targPath, targName))
{
Console.WriteLine("Error 'loadImage' path: " + imgPath);
Console.ReadKey();
return;
}
string wpath = "c:/cpp/w.dat";
// if (snet.loadAllWeightFromFile(wpath))
// Console.WriteLine("Load weight ok path: " + wpath);
// else
// Console.WriteLine("Load weight err path: " + wpath);
sn.Tensor inLayer = new sn.Tensor(new sn.snLSize(w, h, 1, batchSz));
sn.Tensor targetLayer = new sn.Tensor(new sn.snLSize(wo, ho, 1, batchSz));
sn.Tensor outLayer = new sn.Tensor(new sn.snLSize(wo, ho, 1, batchSz));
float accuratSumm = 0;
for (int k = 0; k < 1000; ++k)
{
targetLayer.reset();
Random rnd = new Random();
for (int i = 0; i < batchSz; ++i)
{
// image
int nimg = rnd.Next(0, imgName.Count);
// read
Bitmap img = new Bitmap(imgName[nimg]);
unsafe
{
float* refData = inLayer.data() + i * w * h;
int nr = img.Height, nc = img.Width;
System.Drawing.Imaging.BitmapData bmd = img.LockBits(new Rectangle(0, 0, img.Width, img.Height),
System.Drawing.Imaging.ImageLockMode.ReadWrite, img.PixelFormat);
IntPtr ptData = bmd.Scan0;
for (int r = 0; r < nr; ++r)
{
for (int c = 0; c < nc; ++c)
{
refData[r * nc + c] = Marshal.ReadByte(ptData);
ptData += 4;
}
}
img.UnlockBits(bmd);
Bitmap imgTrg = new Bitmap(new Bitmap(targName[nimg]), new Size((int)wo, (int)ho));
nr = imgTrg.Height; nc = imgTrg.Width;
float* targData = targetLayer.data() + i * wo * ho;
System.Drawing.Imaging.BitmapData bmdTrg = imgTrg.LockBits(new Rectangle(0, 0, nc, nr),
System.Drawing.Imaging.ImageLockMode.ReadWrite, imgTrg.PixelFormat);
IntPtr ptTrg = bmdTrg.Scan0;
for (int r = 0; r < nr; ++r)
{
for (int c = 0; c < nc; ++c)
{
targData[r * nc + c] = (float)(Marshal.ReadByte(ptTrg) / 255.0);
ptTrg += 4;
}
}
imgTrg.UnlockBits(bmdTrg);
}
}
// training
float accurat = 0;
snet.training(lr, inLayer, outLayer, targetLayer, ref accurat);
// calc error
accuratSumm += accurat;
Console.WriteLine(k.ToString() + " accurate " + (accuratSumm / (k + 1)).ToString() + " " +
snet.getLastErrorStr());
}
if (snet.saveAllWeightToFile(wpath))
Console.WriteLine("Save weight ok path: " + wpath);
else
Console.WriteLine("Save weight err path: " + wpath);
Console.ReadKey();
return;
}
}
}
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Runtime.InteropServices;
using System.Drawing;
using System.IO;
using sn = SN_API;
using SN_API;
namespace Test
{
class Program
{
static void Main(string[] args)
{
// using python for create file 'resNet50Weights.dat' as:
// CMD: cd c:\cpp\other\sunnet\example\resnet50\
// CMD: python createNet.py
string arch = File.ReadAllText(@"c:\cpp\other\sunnet\example\resnet50\resNet50Struct.json", Encoding.UTF8);
sn.Net snet = new sn.Net(arch, @"c:\cpp\other\sunnet\example\resnet50\resNet50Weights.dat");
if (snet.getLastErrorStr().Count() > 0)
{
Console.WriteLine("Error loadAllWeightFromFile: " + snet.getLastErrorStr());
Console.ReadKey();
return;
}
string imgPath = @"c:\cpp\other\sunnet\example\resnet50\images\elephant.jpg";
int classCnt = 1000, w = 224, h = 224;
sn.Tensor inLayer = new sn.Tensor(new snLSize((UInt64)w, (UInt64)h, 3, 1));
sn.Tensor outLayer = new sn.Tensor(new snLSize((UInt64)classCnt, 1, 1, 1));
// read
Bitmap img = new Bitmap(Image.FromFile(imgPath), new Size(w, h));
unsafe
{
float* refData = inLayer.data();
System.Drawing.Imaging.BitmapData bmd = img.LockBits(new Rectangle(0, 0, img.Width, img.Height),
System.Drawing.Imaging.ImageLockMode.ReadWrite, img.PixelFormat);
// B
IntPtr pt = bmd.Scan0;
for (int r = 0; r < h; ++r)
{
for (int c = 0; c < w; ++c)
{
refData[r * w + c] = Marshal.ReadByte(pt + 3);
pt += 4;
}
}
// G
pt = bmd.Scan0;
refData += h * w;
for (int r = 0; r < h; ++r)
{
for (int c = 0; c < w; ++c)
{
refData[r * w + c] = Marshal.ReadByte(pt + 2);
pt += 4;
}
}
// R
pt = bmd.Scan0;
refData += h * w;
for (int r = 0; r < h; ++r)
{
for (int c = 0; c < w; ++c)
{
refData[r * w + c] = Marshal.ReadByte(pt + 1);
pt += 4;
}
}
img.UnlockBits(bmd);
}
// training
snet.forward(false, inLayer, outLayer);
float maxval = 0;
int maxOutInx = 0;
unsafe{
float* refOutput = outLayer.data();
maxval = refOutput[0];
for (int j = 1; j < classCnt; ++j){
if (refOutput[j] > maxval){
maxval = refOutput[j];
maxOutInx = j;
}
}
}
// for check: c:\cpp\other\sunnet\example\resnet50\imagenet_class_index.json
Console.WriteLine("inx " + maxOutInx.ToString() + " accurate " + maxval.ToString() + " " + snet.getLastErrorStr());
Console.ReadKey();
return;
}
}
}