Exemple #1
0
        private static void CreateNetAndTreainer(ReadoutLayerSettings.ReadoutUnitSettings settings,
                                                 List <double[]> trainingPredictorsCollection,
                                                 List <double[]> trainingIdealOutputsCollection,
                                                 System.Random rand,
                                                 out INonRecurrentNetwork net,
                                                 out INonRecurrentNetworkTrainer trainer
                                                 )
        {
            if (settings.NetType == ReadoutLayerSettings.ReadoutUnitSettings.ReadoutUnitNetworkType.FF)
            {
                FeedForwardNetworkSettings netCfg = (FeedForwardNetworkSettings)settings.NetSettings;
                FeedForwardNetwork         ffn    = new FeedForwardNetwork(trainingPredictorsCollection[0].Length, 1, netCfg);
                net = ffn;
                switch (netCfg.RegressionMethod)
                {
                case FeedForwardNetworkSettings.TrainingMethodType.Linear:
                    trainer = new LinRegrTrainer(ffn, trainingPredictorsCollection, trainingIdealOutputsCollection, settings.RegressionAttemptEpochs, rand, netCfg.LinRegrTrainerCfg);
                    break;

                case FeedForwardNetworkSettings.TrainingMethodType.Resilient:
                    trainer = new RPropTrainer(ffn, trainingPredictorsCollection, trainingIdealOutputsCollection, netCfg.RPropTrainerCfg);
                    break;

                default:
                    throw new ArgumentException($"Not supported regression method {netCfg.RegressionMethod}");
                }
            }
            else
            {
                ParallelPerceptronSettings netCfg = (ParallelPerceptronSettings)settings.NetSettings;
                ParallelPerceptron         ppn    = new ParallelPerceptron(trainingPredictorsCollection[0].Length, netCfg);
                net     = ppn;
                trainer = new PDeltaRuleTrainer(ppn, trainingPredictorsCollection, trainingIdealOutputsCollection, netCfg.PDeltaRuleTrainerCfg);
            }
            net.RandomizeWeights(rand);
            return;
        }
Exemple #2
0
        /// <summary>
        /// Prepares trained readout unit for specified output field and task.
        /// </summary>
        /// <param name="taskType">Type of the task</param>
        /// <param name="readoutUnitIdx">Index of the readout unit (informative only)</param>
        /// <param name="outputFieldName">Name of the corresponding output field (informative only)</param>
        /// <param name="foldNum">Current fold number</param>
        /// <param name="numOfFolds">Total number of the folds</param>
        /// <param name="refBinDistr">Reference bin distribution (if task type is Classification)</param>
        /// <param name="trainingPredictorsCollection">Collection of the predictors for training</param>
        /// <param name="trainingIdealOutputsCollection">Collection of ideal outputs for training. Note that the double array always has only one member.</param>
        /// <param name="testingPredictorsCollection">Collection of the predictors for testing</param>
        /// <param name="testingIdealOutputsCollection">Collection of ideal outputs for testing. Note that the double array always has only one member.</param>
        /// <param name="rand">Random object to be used</param>
        /// <param name="readoutUnitSettings">Readout unit configuration parameters</param>
        /// <param name="controller">Regression controller</param>
        /// <param name="controllerUserObject">An user object to be passed to controller</param>
        /// <returns>Prepared readout unit</returns>
        public static ReadoutUnit CreateTrained(CommonEnums.TaskType taskType,
                                                int readoutUnitIdx,
                                                string outputFieldName,
                                                int foldNum,
                                                int numOfFolds,
                                                BinDistribution refBinDistr,
                                                List <double[]> trainingPredictorsCollection,
                                                List <double[]> trainingIdealOutputsCollection,
                                                List <double[]> testingPredictorsCollection,
                                                List <double[]> testingIdealOutputsCollection,
                                                System.Random rand,
                                                ReadoutLayerSettings.ReadoutUnitSettings readoutUnitSettings,
                                                RegressionCallbackDelegate controller = null,
                                                Object controllerUserObject           = null
                                                )
        {
            ReadoutUnit bestReadoutUnit = new ReadoutUnit();
            //Regression attempts
            bool stopRegression = false;

            for (int regrAttemptNumber = 1; regrAttemptNumber <= readoutUnitSettings.RegressionAttempts; regrAttemptNumber++)
            {
                //Create network and trainer
                INonRecurrentNetwork        net;
                INonRecurrentNetworkTrainer trainer;
                CreateNetAndTreainer(readoutUnitSettings,
                                     trainingPredictorsCollection,
                                     trainingIdealOutputsCollection,
                                     rand,
                                     out net,
                                     out trainer
                                     );
                //Reference binary distribution
                //Iterate training cycles
                for (int epoch = 1; epoch <= readoutUnitSettings.RegressionAttemptEpochs; epoch++)
                {
                    trainer.Iteration();
                    List <double[]> trainingComputedOutputsCollection = null;
                    List <double[]> testingComputedOutputsCollection  = null;
                    //Compute current error statistics after training iteration
                    ReadoutUnit currReadoutUnit = new ReadoutUnit();
                    currReadoutUnit.Network           = net;
                    currReadoutUnit.TrainingErrorStat = net.ComputeBatchErrorStat(trainingPredictorsCollection, trainingIdealOutputsCollection, out trainingComputedOutputsCollection);
                    if (taskType == CommonEnums.TaskType.Classification)
                    {
                        currReadoutUnit.TrainingBinErrorStat = new BinErrStat(refBinDistr, trainingComputedOutputsCollection, trainingIdealOutputsCollection);
                        currReadoutUnit.CombinedBinaryError  = currReadoutUnit.TrainingBinErrorStat.TotalErrStat.Sum;
                        //currReadoutUnit.CombinedBinaryError = currReadoutUnit.TrainingBinErrorStat.ProportionalErr;
                    }
                    currReadoutUnit.CombinedPrecisionError = currReadoutUnit.TrainingErrorStat.ArithAvg;
                    if (testingPredictorsCollection != null && testingPredictorsCollection.Count > 0)
                    {
                        currReadoutUnit.TestingErrorStat       = net.ComputeBatchErrorStat(testingPredictorsCollection, testingIdealOutputsCollection, out testingComputedOutputsCollection);
                        currReadoutUnit.CombinedPrecisionError = Math.Max(currReadoutUnit.CombinedPrecisionError, currReadoutUnit.TestingErrorStat.ArithAvg);
                        if (taskType == CommonEnums.TaskType.Classification)
                        {
                            currReadoutUnit.TestingBinErrorStat = new BinErrStat(refBinDistr, testingComputedOutputsCollection, testingIdealOutputsCollection);
                            currReadoutUnit.CombinedBinaryError = Math.Max(currReadoutUnit.CombinedBinaryError, currReadoutUnit.TestingBinErrorStat.TotalErrStat.Sum);
                            //currReadoutUnit.CombinedBinaryError = Math.Max(currReadoutUnit.CombinedBinaryError, currReadoutUnit.TestingBinErrorStat.ProportionalErr);
                        }
                    }
                    //Current results processing
                    bool better = false, stopTrainingCycle = false;
                    //Result first initialization
                    if (bestReadoutUnit.CombinedPrecisionError == -1)
                    {
                        //Adopt current regression results
                        bestReadoutUnit = currReadoutUnit.DeepClone();
                    }
                    //Perform call back if it is defined
                    RegressionControlOutArgs cbOut = null;
                    if (controller != null)
                    {
                        //Evaluation of the improvement is driven externaly
                        RegressionControlInArgs cbIn = new RegressionControlInArgs();
                        cbIn.TaskType                          = taskType;
                        cbIn.ReadoutUnitIdx                    = readoutUnitIdx;
                        cbIn.OutputFieldName                   = outputFieldName;
                        cbIn.FoldNum                           = foldNum;
                        cbIn.NumOfFolds                        = numOfFolds;
                        cbIn.RegrAttemptNumber                 = regrAttemptNumber;
                        cbIn.RegrMaxAttempts                   = readoutUnitSettings.RegressionAttempts;
                        cbIn.Epoch                             = epoch;
                        cbIn.MaxEpochs                         = readoutUnitSettings.RegressionAttemptEpochs;
                        cbIn.TrainingPredictorsCollection      = trainingPredictorsCollection;
                        cbIn.TrainingIdealOutputsCollection    = trainingIdealOutputsCollection;
                        cbIn.TrainingComputedOutputsCollection = trainingComputedOutputsCollection;
                        cbIn.TestingPredictorsCollection       = testingPredictorsCollection;
                        cbIn.TestingIdealOutputsCollection     = testingIdealOutputsCollection;
                        cbIn.TestingComputedOutputsCollection  = testingComputedOutputsCollection;
                        cbIn.CurrReadoutUnit                   = currReadoutUnit;
                        cbIn.BestReadoutUnit                   = bestReadoutUnit;
                        cbIn.UserObject                        = controllerUserObject;
                        cbOut             = controller(cbIn);
                        better            = cbOut.CurrentIsBetter;
                        stopTrainingCycle = cbOut.StopCurrentAttempt;
                        stopRegression    = cbOut.StopRegression;
                    }
                    else
                    {
                        //Default implementation
                        better = IsBetter(taskType, currReadoutUnit, bestReadoutUnit);
                    }
                    //Best?
                    if (better)
                    {
                        //Adopt current regression results
                        bestReadoutUnit = currReadoutUnit.DeepClone();
                    }
                    //Training stop conditions
                    if (stopTrainingCycle || stopRegression)
                    {
                        break;
                    }
                }
                //Regression stop conditions
                if (stopRegression)
                {
                    break;
                }
            }
            //Create statistics of the best network weights
            bestReadoutUnit.OutputWeightsStat = bestReadoutUnit.Network.ComputeWeightsStat();
            return(bestReadoutUnit);
        }