public double[] Predict(Core.fData image) { if (Input.Output.Count == 3) { ColorImage inputlayer = (ColorImage)Input; inputlayer.fData = image; } else { GrayImage inputlayer = (GrayImage)Input; inputlayer.fData = image; } Next(ANN.Enums.Propagate.Signal); Foundation.Node node; double[] probs = new double[Output.Output[0].Rows]; for (int l = 0; l < Output.Output[0].Rows; l++) { node = Output.Output[0].GetElement(l, 0); probs[l] = (double)((double?[])node.Element)[Global.Sig]; } return(probs); }
public override string Next() { // 0. validate parameters if ((learningRate == null) || (momentum == null)) { throw new Exception(); } // 3. configure randomizer... int[] batchRandomizer = new int[batchImages.Count()]; int[] dataSetRandomizer = new int[nofSet]; log += "\nTraining data..."; for (int i = 0; i < Epochs; i++) { //log += "\n\n////////// Epoch[" + i + "]... ///////////////////////////////////////////////////////"; Global.NextIntArray(0, nofSet, dataSetRandomizer); Global.NextIntArray(0, batchImages.Count(), batchRandomizer); // single epoch for (int j = 0; j < batchImages.Count; j++) //batch { //DisplayWeights(); batchLoss = 0; for (int k = 0; k < nofSet; k++) { if (Model.Input.Output.Count == 3) { ColorImage inputlayer = (ColorImage)Model.Input; inputlayer.fData = batchImages[batchRandomizer[j]].fData[dataSetRandomizer[k]]; } else { GrayImage inputlayer = (GrayImage)Model.Input; inputlayer.fData = batchImages[batchRandomizer[j]].fData[dataSetRandomizer[k]]; } //log += "\n\nModel: " + Model.ToString(); //DisplayWeights(); // 4.1 propagate signal Model.Next(Propagate.Signal); //log += "\n\nModel: " + Model.ToString(); // 4.2 set targets Layer lyr = Model.Output; Node node; probs = new double[lyr.Output[0].Rows]; for (int l = 0; l < batchImages[batchRandomizer[j]].fData[dataSetRandomizer[k]].Label.Length; l++) { node = lyr.Output[0].GetElement(l, 0); probs[l] = (double)((double?[])node.Element)[Global.Sig]; ((double?[])node.Element)[Global.Err] = batchImages[batchRandomizer[j]].fData[dataSetRandomizer[k]].Label[l]; } // Set softmax output double[] softmaxOutput = Softmax(probs); for (int l = 0; l < probs.Length; l++) { node = lyr.Output[0].GetElement(l, 0); ((double?[])node.Element)[Global.Sig] = softmaxOutput[l]; } singleLoss = lossfunc.GetLoss(softmaxOutput, batchImages[batchRandomizer[j]].fData[dataSetRandomizer[k]].Label); // Calculate batch loss batchLoss += singleLoss; //4.4 propagate error and set new weights Model.Next(Propagate.Error); SaveError(); //log += "\n" + Model.ToString(); } // 4.5 adjust weights AdjustWeights(); //DisplayWeights(); ClearError(); batchLoss = batchLoss / (double)batchSize; //log += "\n" + Model.ToString(); Console.WriteLine("Epoch[" + i.ToString() + "]" + "Batch[" + j.ToString() + "]" + "Loss: " + batchLoss.ToString("e4")); log += "\n\nEpoch[" + i.ToString() + "]" + "Batch[" + j.ToString() + "]" + "Loss: " + batchLoss.ToString("e4"); } } return(log); }
public override string Next() { // 0. validate parameters if ((learningRate == null) || (momentum == null)) { throw new Exception(); } // 3. configure randomizer... int[] dataSetRandomizer = new int[nofSet]; log += "\nTraining data..."; // 4. train epoch by epoch... for (int i = 0; i < Epochs; i++) { Console.WriteLine("Start"); log += "\n\n////////// Epoch[" + i + "]... ///////////////////////////////////////////////////////"; Global.NextIntArray(0, nofSet, dataSetRandomizer); //log += "\n\nDataSet randomizer sequence: "; //for (int s = 0; s < dataSetRandomizer.Length; s++) // log += dataSetRandomizer[s] + ", "; //log = log.TrimEnd(new char[] { ',', ' ' }); // single epoch double mean_loss = 0; for (int j = 0; j < nofSet; j++) { if (Model.Input.Output.Count == 3) { ColorImage inputlayer = (ColorImage)Model.Input; inputlayer.fData = DataSet.fData[dataSetRandomizer[j]]; } else { GrayImage inputlayer = (GrayImage)Model.Input; inputlayer.fData = DataSet.fData[dataSetRandomizer[j]]; } //log += "\n\nModel: " + Model.ToString(); // 4.1 propagate signal //DisplayWeights(); Model.Next(Propagate.Signal); //log += "\n\nModel: " + Model.ToString(); // 4.2 set targets Layer lyr = Model.Output; Node node; probs = new double[lyr.Output[0].Rows]; for (int l = 0; l < DataSet.fData[dataSetRandomizer[j]].Label.Length; l++) { node = lyr.Output[0].GetElement(l, 0); probs[l] = (double)((double?[])node.Element)[Global.Sig]; ((double?[])node.Element)[Global.Err] = DataSet.fData[dataSetRandomizer[j]].Label[l]; } //Set softmax output double[] softmaxOutput = Softmax(probs); for (int l = 0; l < probs.Length; l++) { node = lyr.Output[0].GetElement(l, 0); ((double?[])node.Element)[Global.Sig] = softmaxOutput[l]; } // 4.4 propagate error and set new weights Model.Next(Propagate.Error); // 4.5 adjust weights AdjustWeights(); //4.6 Calculate loss loss = lossfunc.GetLoss(softmaxOutput, DataSet.fData[dataSetRandomizer[j]].Label); mean_loss += loss; //log += "\n\nEpoch[" + i.ToString() + "]" + "Image[" + j.ToString() + "]" + "Loss: " + loss.ToString("e4"); Console.WriteLine("Epoch[" + i.ToString() + "]" + "Batch[" + j.ToString() + "]" + "Loss: " + loss.ToString("e4")); //log += "\n" + Model.ToString(); } Console.WriteLine("Epoch[" + i.ToString() + "]" + "Loss: " + (mean_loss / nofSet).ToString("e4")); } return(log); }