public void Test_Input2D_NullInput() { Data2D data = null; Input2DLayer inp = new Input2DLayer(); inp.SetInput(data); }
public void Test_AvgPool2D_Null_Input() { Data2D data = null; AvgPool2DLayer pool = new AvgPool2DLayer(1, 1, 1, 1, 2, 2); pool.SetInput(data); }
public void Test_Reshape2D_WrongSIzes() { Data2D data = new Data2D(2, 3, 5, 2); Reshape2DLayer res = new Reshape2DLayer(1, 2, 1, 4); res.SetInput(data); }
public void Test_MaxPool1D_Execute() { // Initialize data. Data2D data = new Data2D(1, 3, 2, 1); data[0, 0, 0, 0] = 1; data[0, 1, 0, 0] = 2; data[0, 2, 0, 0] = 0; data[0, 0, 1, 0] = 3; data[0, 1, 1, 0] = 4; data[0, 2, 1, 0] = 0; MaxPool1DLayer pool = new MaxPool1DLayer(0, 1, 2); pool.SetInput(data); pool.Execute(); Data2D output = pool.GetOutput() as Data2D; // Checking sizes Dimension dim = output.GetDimension(); Assert.AreEqual(dim.b, 1); Assert.AreEqual(dim.c, 2); Assert.AreEqual(dim.h, 1); Assert.AreEqual(dim.w, 2); // Checking calculation Assert.AreEqual(output[0, 0, 0, 0], 2.0, 0.0000001); Assert.AreEqual(output[0, 1, 0, 0], 2.0, 0.0000001); Assert.AreEqual(output[0, 0, 1, 0], 4.0, 0.0000001); Assert.AreEqual(output[0, 1, 1, 0], 4.0, 0.0000001); }
public void Test_ReLu_Execute() { relu = new ReLuLayer(); Data2D data = new Data2D(2, 3, 1, 1); data[0, 0, 0, 0] = 4; data[0, 1, 0, 0] = 2; data[0, 2, 0, 0] = -2; data[1, 0, 0, 0] = 3; data[1, 1, 0, 0] = -1; data[1, 2, 0, 0] = -3; relu.SetInput(data); relu.Execute(); Data2D output = relu.GetOutput() as Data2D; Assert.AreEqual(output[0, 0, 0, 0], 4.0, 0.00000001); Assert.AreEqual(output[0, 1, 0, 0], 2.0, 0.00000001); Assert.AreEqual(output[0, 2, 0, 0], 0.0, 0.00000001); Assert.AreEqual(output[1, 0, 0, 0], 3.0, 0.00000001); Assert.AreEqual(output[1, 1, 0, 0], 0.0, 0.00000001); Assert.AreEqual(output[1, 2, 0, 0], 0.0, 0.00000001); }
public void Test_Cropping2D_Negative_Trim() { Data2D data = new Data2D(8, 4, 3, 5); Cropping2DLayer crop = new Cropping2DLayer(4, -5, -1, 1); crop.SetInput(data); }
public double[] Evaluate(Bitmap img) { if (model == null) { var reader = new ReaderKerasModel(cnn_nn); model = reader.GetSequentialExecutor(); } var array = new Data2D(28, 28, 1, 1); for (int i = 0; i < img.Height; i++) { for (int j = 0; j < img.Width; j++) { Color pixel = img.GetPixel(j, i); double value = 255 - pixel.A; value = value / 255; array[i, j, 0, 0] = value; } } var result = model.ExecuteNetwork(array) as Data2D; double[] toreturn = new double[10]; for (int i = 0; i < 10; i++) { toreturn[i] = result[0, 0, i, 0]; } return(toreturn); }
public void Test_GlobalMaxPool1D_Null_Input() { Data2D data = null; GlobalMaxPool1DLayer pool = new GlobalMaxPool1DLayer(); pool.SetInput(data); }
public void Test_Cropping1D_Not1DInput_Trim() { Data2D data = new Data2D(2, 4, 3, 5); Cropping1DLayer crop = new Cropping1DLayer(1, 1); crop.SetInput(data); }
public void Test_GRU_Null_Weights() { Data2D weights = null; GRULayer rnn = new GRULayer(5, 3, TanHLayer.TanHLambda, TanHLayer.TanHLambda); rnn.SetWeights(weights); }
public void Test_GRU_WrongSizeinWidth_Weights() { Data2D weights = new Data2D(1, 3, 5, 4); GRULayer rnn = new GRULayer(5, 3, TanHLayer.TanHLambda, p => { }); rnn.SetWeights(weights); }
public void Test_Sigmoid_KerasModel() { string path = @"tests\test_sigmoid_model.json"; var reader = new ReaderKerasModel(path); SequentialModel model = reader.GetSequentialExecutor(); Data2D inp = new Data2D(1, 8, 1, 1); inp[0, 0, 0, 0] = 1; inp[0, 1, 0, 0] = 2; inp[0, 2, 0, 0] = -1; inp[0, 3, 0, 0] = 0; inp[0, 4, 0, 0] = 3; inp[0, 5, 0, 0] = 1; inp[0, 6, 0, 0] = 1; inp[0, 7, 0, 0] = 2; Data2D ou = model.ExecuteNetwork(inp) as Data2D; Assert.AreEqual(ou.GetDimension().c, 4); Assert.AreEqual(ou.GetDimension().w, 1); Assert.AreEqual(ou[0, 0, 0, 0], 0.2689414322376251, 0.00001); Assert.AreEqual(ou[0, 0, 1, 0], 0.9959298968315125, 0.00001); Assert.AreEqual(ou[0, 0, 2, 0], 1.0, 0.00001); Assert.AreEqual(ou[0, 0, 3, 0], 0.9998766183853149, 0.00001); }
public void Test_Softmax_KerasModel() { string path = @"tests\test_softmax_model.json"; var reader = new ReaderKerasModel(path); SequentialModel model = reader.GetSequentialExecutor(); Data2D inp = new Data2D(1, 8, 1, 1); inp[0, 0, 0, 0] = 1; inp[0, 1, 0, 0] = 2; inp[0, 2, 0, 0] = -1; inp[0, 3, 0, 0] = 0; inp[0, 4, 0, 0] = 3; inp[0, 5, 0, 0] = 1; inp[0, 6, 0, 0] = 1; inp[0, 7, 0, 0] = 2; Data2D ou = model.ExecuteNetwork(inp) as Data2D; Assert.AreEqual(ou.GetDimension().c, 4); Assert.AreEqual(ou.GetDimension().w, 1); Assert.AreEqual(ou[0, 0, 0, 0], 3.3980058766758248e-09, 1e-10); Assert.AreEqual(ou[0, 0, 1, 0], 2.26015504267707e-06, 1e-7); Assert.AreEqual(ou[0, 0, 2, 0], 0.9999228715896606, 0.00001); Assert.AreEqual(ou[0, 0, 3, 0], 7.484605885110795e-05, 1e-6); }
public void Test_Softmax_DifferentData() { Data2D data = new Data2D(5, 4, 5, 10); SoftmaxLayer soft = new SoftmaxLayer(); soft.SetInput(data); }
public void Test_Softsign_Execute() { softsign = new SoftsignLayer(); Data2D data = new Data2D(2, 3, 1, 1); data[0, 0, 0, 0] = 4; data[0, 1, 0, 0] = 2; data[0, 2, 0, 0] = -2; data[1, 0, 0, 0] = 3; data[1, 1, 0, 0] = -1; data[1, 2, 0, 0] = -3; softsign.SetInput(data); softsign.Execute(); Data2D output = softsign.GetOutput() as Data2D; Assert.AreEqual(output[0, 0, 0, 0], SoftsignFunc(4.0), 0.00000001); Assert.AreEqual(output[0, 1, 0, 0], SoftsignFunc(2.0), 0.00000001); Assert.AreEqual(output[0, 2, 0, 0], SoftsignFunc(-2.0), 0.00000001); Assert.AreEqual(output[1, 0, 0, 0], SoftsignFunc(3.0), 0.00000001); Assert.AreEqual(output[1, 1, 0, 0], SoftsignFunc(-1.0), 0.00000001); Assert.AreEqual(output[1, 2, 0, 0], SoftsignFunc(-3.0), 0.00000001); }
public void Test_AvgPool1D_1_KerasModel() { string path = @"tests\test_avgpool_1D_1_model.json"; var reader = new ReaderKerasModel(path); SequentialModel model = reader.GetSequentialExecutor(); Data2D inp = new Data2D(1, 5, 2, 1); inp[0, 0, 0, 0] = 0; inp[0, 0, 1, 0] = 1; inp[0, 1, 0, 0] = 2; inp[0, 1, 1, 0] = 1; inp[0, 2, 0, 0] = 0; inp[0, 2, 1, 0] = 0; inp[0, 3, 0, 0] = 2; inp[0, 3, 1, 0] = 1; inp[0, 4, 0, 0] = 2; inp[0, 4, 1, 0] = 1; Data2D ou = model.ExecuteNetwork(inp) as Data2D; Assert.AreEqual(ou.GetDimension().c, 2); Assert.AreEqual(ou.GetDimension().w, 3); Assert.AreEqual(ou[0, 0, 0, 0], 0.6666666865348816, 0.00001); Assert.AreEqual(ou[0, 0, 1, 0], 0.6666666865348816, 0.00001); Assert.AreEqual(ou[0, 1, 0, 0], 1.3333333730697632, 0.00001); Assert.AreEqual(ou[0, 1, 1, 0], 0.6666666865348816, 0.00001); Assert.AreEqual(ou[0, 2, 0, 0], 1.3333333730697632, 0.00001); Assert.AreEqual(ou[0, 2, 1, 0], 0.6666666865348816, 0.00001); }
public void Test_Cropping2D_Null_Input() { Data2D data = null; Cropping2DLayer crop = new Cropping2DLayer(1, 2, 1, 1); crop.SetInput(data); }
public void Test_RepeatVector_WrongSizesWidth() { Data2D data = new Data2D(1, 3, 5, 2); RepeatVectorLayer rep = new RepeatVectorLayer(3); rep.SetInput(data); }
public void Test_Cropping2D_TooMuchCropping_Trim() { Data2D data = new Data2D(4, 4, 3, 5); Cropping2DLayer crop = new Cropping2DLayer(2, 3, 1, 1); crop.SetInput(data); }
public void Test_RepeatVector_Null_Input() { Data2D data = null; RepeatVectorLayer rep = new RepeatVectorLayer(4); rep.SetInput(data); }
public void Test_HardSigmoid_Execute() { hardsigmoid = new HardSigmoidLayer(); Data2D data = new Data2D(2, 3, 1, 1); data[0, 0, 0, 0] = 4; data[0, 1, 0, 0] = 2; data[0, 2, 0, 0] = -2; data[1, 0, 0, 0] = 3; data[1, 1, 0, 0] = -1; data[1, 2, 0, 0] = -3; hardsigmoid.SetInput(data); hardsigmoid.Execute(); Data2D output = hardsigmoid.GetOutput() as Data2D; Assert.AreEqual(output[0, 0, 0, 0], HardSigmoidFunc(4.0), 0.00000001); Assert.AreEqual(output[0, 1, 0, 0], HardSigmoidFunc(2.0), 0.00000001); Assert.AreEqual(output[0, 2, 0, 0], HardSigmoidFunc(-2.0), 0.00000001); Assert.AreEqual(output[1, 0, 0, 0], HardSigmoidFunc(3.0), 0.00000001); Assert.AreEqual(output[1, 1, 0, 0], HardSigmoidFunc(-1.0), 0.00000001); Assert.AreEqual(output[1, 2, 0, 0], HardSigmoidFunc(-3.0), 0.00000001); }
public void Test_RepeatVector_WrongSizesHeight() { Data2D data = new Data2D(2, 1, 5, 2); RepeatVectorLayer rep = new RepeatVectorLayer(3); rep.SetInput(data); }
public void Test_MaxPool1D_Null_Input() { Data2D data = null; MaxPool1DLayer pool = new MaxPool1DLayer(1, 1, 2); pool.SetInput(data); }
public void Test_Conv1D_NullConv_Weights() { Data2D weights = null; Conv1DLayer conv = new Conv1DLayer(1, 1); conv.SetWeights(weights); }
public void Test_ReLu_KerasModel() { string path = @"tests\test_relu_model.json"; var reader = new ReaderKerasModel(path); SequentialModel model = reader.GetSequentialExecutor(); Data2D inp = new Data2D(1, 8, 1, 1); inp[0, 0, 0, 0] = 1; inp[0, 1, 0, 0] = 2; inp[0, 2, 0, 0] = -1; inp[0, 3, 0, 0] = 0; inp[0, 4, 0, 0] = 3; inp[0, 5, 0, 0] = 1; inp[0, 6, 0, 0] = 1; inp[0, 7, 0, 0] = 2; Data2D ou = model.ExecuteNetwork(inp) as Data2D; Assert.AreEqual(ou.GetDimension().c, 4); Assert.AreEqual(ou.GetDimension().w, 1); Assert.AreEqual(ou[0, 0, 0, 0], 0.0, 0.00001); Assert.AreEqual(ou[0, 0, 1, 0], 5.5, 0.00001); Assert.AreEqual(ou[0, 0, 2, 0], 18.5, 0.00001); Assert.AreEqual(ou[0, 0, 3, 0], 9.0, 0.00001); }
public void SetInput(IData input) { if (input == null) { throw new Exception("BatchNormLayer: input is null."); } else if (!(input is Data2D)) { throw new Exception("BatchNormLayer: input is not Data2D."); } this.input = input as Data2D; Dimension dimI = this.input.GetDimension(); int kGamma = this.gamma.Count; int kBeta = this.beta.Count; int kBias = this.bias.Count; int kVariance = this.variance.Count; if (dimI.c != kBias || dimI.c != kGamma || dimI.c != kBeta || dimI.c != kVariance) { throw new Exception("Number of parameters is not equal with number of features (channels)."); } int outputH = dimI.h; int outputW = dimI.w; int outputC = dimI.c; int outputB = dimI.b; output = new Data2D(outputH, outputW, outputC, outputB); }
public void Test_Reshape2D_Null_Input() { Data2D data = null; Reshape2DLayer res = new Reshape2DLayer(1, 2, 1, 4); res.SetInput(data); }
public void SetWeights(IData parameters) { if (parameters == null) { throw new Exception("BatchNormLayer: parameters is null."); } else if (!(parameters is Data2D)) { throw new Exception("BatchNormLayer: parameters is not Data2D."); } Data2D pms = parameters as Data2D; if (pms.GetDimension().h != 1 || pms.GetDimension().w != 1) { throw new Exception("BatchNormLayer: parameters' height and width should be 1."); } for (int feature = 0; feature < pms.GetDimension().c; ++feature) { gamma.Add(pms[0, 0, feature, 0]); beta.Add(pms[0, 0, feature, 1]); bias.Add(pms[0, 0, feature, 2]); variance.Add(pms[0, 0, feature, 3]); } }
public static void Main(string[] args) { // Keras speed with the same: 60 ms. /*ReaderKerasModel reader = new ReaderKerasModel("test_cnn_model.json"); * SequentialModel model = reader.GetSequentialExecutor(); * * Console.WriteLine((model.GetSummary() as SequentialModelData).GetStringRepresentation()); * * Console.ReadKey(); * int[] idx = { 1,2,3}; * * Console.WriteLine(idx[1]); * Console.ReadKey();*/ Conv2DLayer layer = new Conv2DLayer(0, 0, 1, 1); Data2D input = new Data2D(6, 5, 3, 1); input[0, 0, 0, 0] = 1; input[0, 1, 0, 0] = 2; input[0, 2, 0, 0] = 2; input[0, 3, 0, 0] = 1; input[0, 4, 0, 0] = 4; input[1, 0, 0, 0] = 3; input[1, 1, 0, 0] = 1; input[1, 2, 0, 0] = 0; input[1, 3, 0, 0] = 2; input[1, 4, 0, 0] = 1; input[2, 0, 0, 0] = 0; input[2, 1, 0, 0] = 2; input[2, 2, 0, 0] = 2; input[2, 3, 0, 0] = 5; input[2, 4, 0, 0] = 2; input[3, 0, 0, 0] = 6; input[3, 1, 0, 0] = -2; input[3, 2, 0, 0] = -1; input[3, 3, 0, 0] = 3; input[3, 4, 0, 0] = 1; input[4, 0, 0, 0] = 2; input[4, 1, 0, 0] = 1; input[4, 2, 0, 0] = 2; input[4, 3, 0, 0] = 4; input[4, 4, 0, 0] = 0; input[5, 0, 0, 0] = 5; input[5, 1, 0, 0] = -3; input[5, 2, 0, 0] = -1; input[5, 3, 0, 0] = -4; input[5, 4, 0, 0] = 0; input[0, 0, 1, 0] = 2; input[0, 1, 1, 0] = 0; input[0, 2, 1, 0] = 2; input[0, 3, 1, 0] = -1; input[0, 4, 1, 0] = 3; input[1, 0, 1, 0] = 2; input[1, 1, 1, 0] = 5; input[1, 2, 1, 0] = -1; input[1, 3, 1, 0] = 3; input[1, 4, 1, 0] = 5; input[2, 0, 1, 0] = 1; input[2, 1, 1, 0] = 1; input[2, 2, 1, 0] = 1; input[2, 3, 1, 0] = 0; input[2, 4, 1, 0] = 1; input[3, 0, 1, 0] = -3; input[3, 1, 1, 0] = 2; input[3, 2, 1, 0] = -1; input[3, 3, 1, 0] = 4; input[3, 4, 1, 0] = 1; input[4, 0, 1, 0] = 2; input[4, 1, 1, 0] = 1; input[4, 2, 1, 0] = 2; input[4, 3, 1, 0] = 2; input[4, 4, 1, 0] = 1; input[5, 0, 1, 0] = 0; input[5, 1, 1, 0] = -3; input[5, 2, 1, 0] = 1; input[5, 3, 1, 0] = -2; input[5, 4, 1, 0] = -1; input[0, 0, 2, 0] = 4; input[0, 1, 2, 0] = 5; input[0, 2, 2, 0] = 0; input[0, 3, 2, 0] = -1; input[0, 4, 2, 0] = -3; input[1, 0, 2, 0] = 2; input[1, 1, 2, 0] = 3; input[1, 2, 2, 0] = 1; input[1, 3, 2, 0] = 6; input[1, 4, 2, 0] = 0; input[2, 0, 2, 0] = 0; input[2, 1, 2, 0] = -4; input[2, 2, 2, 0] = -3; input[2, 3, 2, 0] = -2; input[2, 4, 2, 0] = -4; input[3, 0, 2, 0] = 4; input[3, 1, 2, 0] = 2; input[3, 2, 2, 0] = 1; input[3, 3, 2, 0] = 0; input[3, 4, 2, 0] = 4; input[4, 0, 2, 0] = 3; input[4, 1, 2, 0] = 3; input[4, 2, 2, 0] = 0; input[4, 3, 2, 0] = 1; input[4, 4, 2, 0] = 1; input[5, 0, 2, 0] = -2; input[5, 1, 2, 0] = 1; input[5, 2, 2, 0] = 1; input[5, 3, 2, 0] = 0; input[5, 4, 2, 0] = 5; Data2D kernel = new Data2D(3, 3, 3, 1); kernel[0, 0, 0, 0] = 1; kernel[0, 1, 0, 0] = 1; kernel[0, 2, 0, 0] = 0; kernel[1, 0, 0, 0] = 2; kernel[1, 1, 0, 0] = 0; kernel[1, 2, 0, 0] = 0; kernel[2, 0, 0, 0] = 1; kernel[2, 1, 0, 0] = 2; kernel[2, 2, 0, 0] = 1; kernel[0, 0, 1, 0] = 3; kernel[0, 1, 1, 0] = 1; kernel[0, 2, 1, 0] = -1; kernel[1, 0, 1, 0] = 2; kernel[1, 1, 1, 0] = -1; kernel[1, 2, 1, 0] = -2; kernel[2, 0, 1, 0] = 0; kernel[2, 1, 1, 0] = 1; kernel[2, 2, 1, 0] = 2; kernel[0, 0, 2, 0] = 0; kernel[0, 1, 2, 0] = 1; kernel[0, 2, 2, 0] = 1; kernel[1, 0, 2, 0] = -1; kernel[1, 1, 2, 0] = 2; kernel[1, 2, 2, 0] = 1; kernel[2, 0, 2, 0] = 3; kernel[2, 1, 2, 0] = 0; kernel[2, 2, 2, 0] = 1; layer.SetWeights(kernel); layer.SetInput(input); layer.Execute(); Data2D output = layer.GetOutput() as Data2D; }
public void Test_LSTM_WrongSizeinChannel_Weights() { Data2D weights = new Data2D(1, 5, 7, 4); LSTMLayer rnn = new LSTMLayer(5, 3, TanHLayer.TanHLambda, p => { }); rnn.SetWeights(weights); }