public override string Next() { // 0. validate parameters if ((learningRate == null) || (momentum == null)) { throw new Exception(); } // 3. configure randomizer... int[] batchRandomizer = new int[batchImages.Count()]; int[] dataSetRandomizer = new int[nofSet]; log += "\nTraining data..."; imagLyrG = new GrayImage(); imagLyrG.Configure(28, 28, -1.0, 1.0); imagLyrG.fData = batchImages[0].fData[0]; convLyr = new Convolution(); convLyr.Input = imagLyrG.Output; convLyr.Configure("depth=8; activation=relu(void); kernel=convolution(size = 5, stride = 1, padding = 0, weightfieldsize = 3); outputfieldSize =2"); convLyr1 = new Convolution(); convLyr1.Input = convLyr.Output; convLyr1.Configure("depth=8; activation=relu(void); kernel=convolution(size = 5, stride = 1, padding = 0, weightfieldsize = 3); outputfieldSize =2"); poolLyr = new Pooling(); poolLyr.Input = convLyr1.Output; poolLyr.Configure("kernel=maxpool(size=2, stride=2); outputfieldsize=2"); concLyr = new Concatenation(); concLyr.Input = poolLyr.Output; concLyr.Configure("outputfieldsize=2"); // 1. initialize connection layers hiddLyr = new Connected(); hiddLyr.Input = concLyr.Output; hiddLyr.Configure("neuron=hiddenperceptron; activation=relu(void); nodes=128; outputfieldsize=2(def:2)");; // 2. initialize connection layers outpLyr = new Connected(); outpLyr.Input = hiddLyr.Output; outpLyr.Configure("neuron=outputperceptron; activation=linear(void); nodes=10; outputfieldsize=2(def:2)"); for (int i = 0; i < Epochs; i++) { //log += "\n\n////////// Epoch[" + i + "]... ///////////////////////////////////////////////////////"; Global.NextIntArray(0, nofSet, dataSetRandomizer); Global.NextIntArray(0, batchImages.Count(), batchRandomizer); // single epoch for (int j = 0; j < 100; j++) //batch { //DisplayWeights(); batchLoss = 0; for (int k = 0; k < 6; k++) { imagLyrG.fData = batchImages[batchRandomizer[j]].fData[dataSetRandomizer[k]]; convLyr.Next(Propagate.Signal); convLyr1.Next(Propagate.Signal); poolLyr.Next(Propagate.Signal); concLyr.Next(Propagate.Signal); hiddLyr.Next(Propagate.Signal); outpLyr.Next(Propagate.Signal); Node node; probs = new double[outpLyr.Output[0].Rows]; for (int l = 0; l < batchImages[batchRandomizer[j]].fData[dataSetRandomizer[k]].Label.Length; l++) { node = outpLyr.Output[0].GetElement(l, 0); probs[l] = (double)((double?[])node.Element)[Global.Sig]; ((double?[])node.Element)[Global.Err] = batchImages[batchRandomizer[j]].fData[dataSetRandomizer[k]].Label[l]; } //Set softmax output double[] softmaxOutput = Softmax(probs); for (int l = 0; l < probs.Length; l++) { node = outpLyr.Output[0].GetElement(l, 0); ((double?[])node.Element)[Global.Sig] = softmaxOutput[l]; } singleLoss = lossfunc.GetLoss(softmaxOutput, batchImages[batchRandomizer[j]].fData[dataSetRandomizer[k]].Label); // Calculate batch loss batchLoss += singleLoss; //4.4 propagate error and set new weights outpLyr.Next(Propagate.Error); hiddLyr.Next(Propagate.Error); concLyr.Next(Propagate.Error); poolLyr.Next(Propagate.Error); convLyr1.Next(Propagate.Error); convLyr.Next(Propagate.Error); SaveError(outpLyr); SaveError(hiddLyr); SaveError(concLyr); SaveError(convLyr1); SaveError(convLyr); //log += "\n" + Model.ToString(); } // 4.5 adjust weights AdjustWeights(outpLyr); AdjustWeights(hiddLyr); AdjustWeights(concLyr); AdjustWeights(convLyr1); AdjustWeights(convLyr); //DisplayWeights(); ClearError(outpLyr); ClearError(hiddLyr); ClearError(concLyr); ClearError(convLyr1); ClearError(convLyr); batchLoss = batchLoss / (double)batchSize; //log += "\n" + Model.ToString(); Console.WriteLine("Epoch[" + i.ToString() + "]" + "Batch[" + j.ToString() + "]" + "Loss: " + batchLoss.ToString("e4")); log += "\n\nEpoch[" + i.ToString() + "]" + "Batch[" + j.ToString() + "]" + "Loss: " + batchLoss.ToString("e4"); } } return(log); }