protected override void __Show(object showObj_, params object[] params_) { base.__Show(showObj_); m_loader = showObj_ as AbstractLoader; m_isComplete = false; m_curPer = 0; m_tmpPer = 0; RefreshPercent(); UpdateBar(m_tmpPer); m_txtTips.text = "欢迎来到新世界~~"; }
/// <summary> /// Method for training neural network according to settings. /// </summary> /// <param name="loader">loader which implements loading /// from the current dataset</param> /// <param name="epochCount">Number of epochs for training.</param> /// <param name="useValidationSet">parametr which signal if we will /// use validation set or not</param> /// <returns>list of results</returns> public abstract List <EpochHistory> Fit(AbstractLoader loader, int epochCount, bool useValidationSet = false);
public override List <EpochHistory> Fit(AbstractLoader loader, int epochCount, bool useValidationSet = false) { if (!IsAlreadyCompiled()) { Logger.Log.Warning("It is not possible to train" + " the model before compiling model."); return(new List <EpochHistory>()); } alreadyTrained = true; double[][][][] currentInput; double[][][][] currentGradient; double[][] trainLabels; string outputstring = ""; // proceed number of epochs List <EpochHistory> history = new List <EpochHistory>(); for (int epochCounter = 0; epochCounter < epochCount; epochCounter++) { // proceed number of iterations according to batches List <Tuple <double, double> > trainResults = new List <Tuple <double, double> >(); for (int batchIndex = 0; batchIndex < loader.TrainBatchCount; batchIndex++) { var batch = loader.LoadBatch(batchIndex, train: true); currentInput = batch.Item1; trainLabels = batch.Item2; // proceed feed forward step for (int i = 0; i < layers.Count; i++) { currentInput = layers[i].ForwardPropagation(currentInput); } // output layer gradient currentGradient = lossFunction.Derivate(currentInput, trainLabels); // backward procedure for (int i = layers.Count - 1; i >= 0; i--) { currentGradient = layers[i].BackwardPropagation(currentGradient); } // update weights for (int i = 0; i < layers.Count; i++) { if (layers[i] is ILearnable) { optimizer.UpdateWeights((ILearnable)layers[i], (epochCounter + 1)); } } trainResults.Add(Evaluate(batch.Item1, trainLabels, print: false)); } // compute average accuracy and loss for each // iteration Tuple <double, double> trainResult = ComputeFinalTrainAcc(trainResults); outputstring = "Epoch " + (epochCounter + 1).ToString() + "/" + epochCount + " " + loader.TrainItemCount + " samples " + "- train_loss: " + trainResult.Item2.ToString("0.####") + " - train_acc: " + trainResult.Item1.ToString("0.####"); // if we are using validation set // compute accuracy and loss for // this set too if (useValidationSet) { List <Tuple <double, double> > testResults = new List <Tuple <double, double> >(); for (int batchIndex = 0; batchIndex < loader.TestBatchCount; batchIndex++) { var batch = loader.LoadBatch(batchIndex, train: false); testResults.Add(Evaluate(batch.Item1, batch.Item2, print: false)); } Tuple <double, double> testResult = ComputeFinalTrainAcc(testResults); history.Add(new EpochHistory(testResult.Item2, trainResult.Item2, testResult.Item1, trainResult.Item1)); outputstring += " - test_loss: " + testResult.Item2.ToString("0.####") + " - test_acc: " + testResult.Item1.ToString("0.####"); } else { history.Add(new EpochHistory(0d, trainResult.Item2, 0d, trainResult.Item1)); } Logger.Log.Info(outputstring); } return(history); }
protected override void __Destroy() { m_loader = null; }