public void Fit_Batched_Tensors() { NeuralNetwork net = CreateFitTestNet(); Sequential seqModel = net.Model as Sequential; var expectedWeights = new Tensor(new[] { 1.1f, 0.1f, -1.3f, 0.2f, -0.9f, 0.7f }, new Shape(3, 2)); var tData = GenerateTrainingData(50, seqModel.LastLayer.InputShapes[0], expectedWeights, MatMult); var inputs = new Tensor(new Shape(seqModel.Layer(0).InputShape.Width, seqModel.Layer(0).InputShape.Height, seqModel.Layer(0).InputShape.Depth, tData.Count)); var outputs = new Tensor(new Shape(seqModel.LastLayer.OutputShape.Width, seqModel.LastLayer.OutputShape.Height, seqModel.LastLayer.OutputShape.Depth, tData.Count)); for (int i = 0; i < tData.Count; ++i) { tData[i].Input.CopyBatchTo(0, i, inputs); tData[i].Output.CopyBatchTo(0, i, outputs); } net.FitBatched(inputs, outputs, 300, 0, Track.Nothing); var paramsAndGrads = seqModel.LastLayer.GetParametersAndGradients(); for (int i = 0; i < expectedWeights.Length; ++i) { Assert.AreEqual(paramsAndGrads[0].Parameters.GetFlat(i), expectedWeights.GetFlat(i), 1e-2); } }
private void TestDenseNetwork(int inputs, int samples, int batchSize, int epochs) { var net = new NeuralNetwork("deep_dense_test", 7); var model = new Sequential(); model.AddLayer(new Dense(inputs, 5, Activation.Linear)); model.AddLayer(new Dense(model.LastLayer, 4, Activation.Linear)); model.AddLayer(new Dense(model.LastLayer, inputs, Activation.Linear)); net.Model = model; List <Data> tData = new List <Data>(); for (int i = 0; i < samples; ++i) { var input = new Tensor(model.Layer(0).InputShape); input.FillWithRand(10 * i, -2, 2); tData.Add(new Data(input, input.Mul(1.7f))); } net.Optimize(new SGD(0.02f), Loss.MeanSquareError); net.Fit(tData, batchSize, epochs, null, 2, Track.TrainError); for (int i = 0; i < tData.Count; ++i) { Assert.IsTrue(tData[i].Output.Equals(net.Predict(tData[i].Input)[0], 0.01f)); } }