Example #1
0
 /// <summary>
 /// Creates new network and associated trainer.
 /// </summary>
 /// <param name="settings">Non-recurrent-network settings</param>
 /// <param name="trainingInputVectors">Collection of training input samples</param>
 /// <param name="trainingOutputVectors">Collection of training output (desired) samples</param>
 /// <param name="rand">Random object to be used</param>
 /// <param name="net">Created network</param>
 /// <param name="trainer">Created associated trainer</param>
 public static void CreateNetworkAndTrainer(INonRecurrentNetworkSettings settings,
                                            List <double[]> trainingInputVectors,
                                            List <double[]> trainingOutputVectors,
                                            Random rand,
                                            out INonRecurrentNetwork net,
                                            out INonRecurrentNetworkTrainer trainer
                                            )
 {
     if (IsFF(settings))
     {
         //Feed forward network
         FeedForwardNetworkSettings netCfg = (FeedForwardNetworkSettings)settings;
         FeedForwardNetwork         ffn    = new FeedForwardNetwork(trainingInputVectors[0].Length, trainingOutputVectors[0].Length, netCfg);
         net = ffn;
         if (netCfg.TrainerCfg.GetType() == typeof(QRDRegrTrainerSettings))
         {
             trainer = new QRDRegrTrainer(ffn, trainingInputVectors, trainingOutputVectors, (QRDRegrTrainerSettings)netCfg.TrainerCfg, rand);
         }
         else if (netCfg.TrainerCfg.GetType() == typeof(RidgeRegrTrainerSettings))
         {
             trainer = new RidgeRegrTrainer(ffn, trainingInputVectors, trainingOutputVectors, (RidgeRegrTrainerSettings)netCfg.TrainerCfg);
         }
         else if (netCfg.TrainerCfg.GetType() == typeof(ElasticRegrTrainerSettings))
         {
             trainer = new ElasticRegrTrainer(ffn, trainingInputVectors, trainingOutputVectors, (ElasticRegrTrainerSettings)netCfg.TrainerCfg);
         }
         else if (netCfg.TrainerCfg.GetType() == typeof(RPropTrainerSettings))
         {
             trainer = new RPropTrainer(ffn, trainingInputVectors, trainingOutputVectors, (RPropTrainerSettings)netCfg.TrainerCfg, rand);
         }
         else
         {
             throw new ArgumentException($"Unknown trainer {netCfg.TrainerCfg}");
         }
     }
     else if (IsPP(settings))
     {
         //Parallel perceptron network
         //Check output
         if (trainingOutputVectors[0].Length != 1)
         {
             throw new InvalidOperationException($"Can't create ParallelPerceptron. Only single output value is allowed.");
         }
         ParallelPerceptronSettings netCfg = (ParallelPerceptronSettings)settings;
         ParallelPerceptron         ppn    = new ParallelPerceptron(trainingInputVectors[0].Length, netCfg);
         net     = ppn;
         trainer = new PDeltaRuleTrainer(ppn, trainingInputVectors, trainingOutputVectors, netCfg.PDeltaRuleTrainerCfg, rand);
     }
     else
     {
         throw new InvalidOperationException($"Unknown network settings");
     }
     net.RandomizeWeights(rand);
     return;
 }
Example #2
0
 private static void CreateNetAndTreainer(ReadoutLayerSettings.ReadoutUnitSettings settings,
                                          List <double[]> trainingPredictorsCollection,
                                          List <double[]> trainingIdealOutputsCollection,
                                          Random rand,
                                          out INonRecurrentNetwork net,
                                          out INonRecurrentNetworkTrainer trainer
                                          )
 {
     if (settings.NetType == ReadoutLayerSettings.ReadoutUnitSettings.ReadoutUnitNetworkType.FF)
     {
         FeedForwardNetworkSettings netCfg = (FeedForwardNetworkSettings)settings.NetSettings;
         FeedForwardNetwork         ffn    = new FeedForwardNetwork(trainingPredictorsCollection[0].Length, 1, netCfg);
         net = ffn;
         if (netCfg.TrainerCfg.GetType() == typeof(QRDRegrTrainerSettings))
         {
             trainer = new QRDRegrTrainer(ffn, trainingPredictorsCollection, trainingIdealOutputsCollection, (QRDRegrTrainerSettings)netCfg.TrainerCfg, rand);
         }
         else if (netCfg.TrainerCfg.GetType() == typeof(RidgeRegrTrainerSettings))
         {
             trainer = new RidgeRegrTrainer(ffn, trainingPredictorsCollection, trainingIdealOutputsCollection, (RidgeRegrTrainerSettings)netCfg.TrainerCfg, rand);
         }
         else if (netCfg.TrainerCfg.GetType() == typeof(ElasticRegrTrainerSettings))
         {
             trainer = new ElasticRegrTrainer(ffn, trainingPredictorsCollection, trainingIdealOutputsCollection, (ElasticRegrTrainerSettings)netCfg.TrainerCfg);
         }
         else if (netCfg.TrainerCfg.GetType() == typeof(RPropTrainerSettings))
         {
             trainer = new RPropTrainer(ffn, trainingPredictorsCollection, trainingIdealOutputsCollection, (RPropTrainerSettings)netCfg.TrainerCfg, rand);
         }
         else
         {
             throw new ArgumentException($"Unknown trainer {netCfg.TrainerCfg}");
         }
     }
     else
     {
         ParallelPerceptronSettings netCfg = (ParallelPerceptronSettings)settings.NetSettings;
         ParallelPerceptron         ppn    = new ParallelPerceptron(trainingPredictorsCollection[0].Length, netCfg);
         net     = ppn;
         trainer = new PDeltaRuleTrainer(ppn, trainingPredictorsCollection, trainingIdealOutputsCollection, netCfg.PDeltaRuleTrainerCfg, rand);
     }
     net.RandomizeWeights(rand);
     return;
 }
Example #3
0
        private static void CreateNetAndTreainer(ReadoutLayerSettings.ReadoutUnitSettings settings,
                                                 List <double[]> trainingPredictorsCollection,
                                                 List <double[]> trainingIdealOutputsCollection,
                                                 Random rand,
                                                 out INonRecurrentNetwork net,
                                                 out INonRecurrentNetworkTrainer trainer
                                                 )
        {
            if (settings.NetType == ReadoutLayerSettings.ReadoutUnitSettings.ReadoutUnitNetworkType.FF)
            {
                FeedForwardNetworkSettings netCfg = (FeedForwardNetworkSettings)settings.NetSettings;
                FeedForwardNetwork         ffn    = new FeedForwardNetwork(trainingPredictorsCollection[0].Length, 1, netCfg);
                net = ffn;
                switch (netCfg.RegressionMethod)
                {
                case FeedForwardNetworkSettings.TrainingMethodType.Linear:
                    trainer = new LinRegrTrainer(ffn, trainingPredictorsCollection, trainingIdealOutputsCollection, settings.RegressionAttemptEpochs, rand, netCfg.LinRegrTrainerCfg);
                    break;

                case FeedForwardNetworkSettings.TrainingMethodType.Resilient:
                    trainer = new RPropTrainer(ffn, trainingPredictorsCollection, trainingIdealOutputsCollection, netCfg.RPropTrainerCfg);
                    break;

                default:
                    throw new ArgumentException($"Not supported regression method {netCfg.RegressionMethod}");
                }
            }
            else
            {
                ParallelPerceptronSettings netCfg = (ParallelPerceptronSettings)settings.NetSettings;
                ParallelPerceptron         ppn    = new ParallelPerceptron(trainingPredictorsCollection[0].Length, netCfg);
                net     = ppn;
                trainer = new PDeltaRuleTrainer(ppn, trainingPredictorsCollection, trainingIdealOutputsCollection, netCfg.PDeltaRuleTrainerCfg);
            }
            net.RandomizeWeights(rand);
            return;
        }