private List <Data> GenerateTrainingData(int samples, Shape inShape, Tensor expectedParams, TrainDataFunc f) { List <Data> trainingData = new List <Data>(); for (int i = 0; i < samples; ++i) { var input = new Tensor(inShape); input.FillWithRand(3 * i); trainingData.Add(new Data(input, f(input, expectedParams))); } return(trainingData); }
private void TestConvolutionLayer(Shape inputShape, int kernelSize, int kernelsNum, int stride, int samples, int batchSize, int epochs, TrainDataFunc convFunc) { var net = new NeuralNetwork("convolution_test", 7); var model = new Sequential(); model.AddLayer(new Convolution(inputShape, kernelSize, kernelsNum, stride, Activation.Linear) { KernelInitializer = new Initializers.Constant(1) }); net.Model = model; var expectedKernels = new Tensor(new Shape(kernelSize, kernelSize, inputShape.Depth, kernelsNum)); expectedKernels.FillWithRand(17); var tData = GenerateTrainingData(samples, model.LastLayer.InputShape, expectedKernels, convFunc); net.Optimize(new SGD(0.02f), Loss.MeanSquareError); net.Fit(tData, batchSize, epochs, null, 0, Track.Nothing); var paramsAndGrads = model.LastLayer.GetParametersAndGradients(); for (int i = 0; i < expectedKernels.Length; ++i) { Assert.AreEqual(paramsAndGrads[0].Parameters.GetFlat(i), expectedKernels.GetFlat(i), 1e-2); } }