Пример #1
0
        static void Main(string[] args)
        {
            //Loading and parsing cup dataset

            /* CupDataManager dm = new CupDataManager();
             * DataSet wholeSet = dm.LoadData(Properties.Settings.Default.TrainingSetLocation, 10, 2, permute: true, seed: 1);
             * List<double> momentums = new List<double> { 0, 0.5 };
             * List<double> learningRates = new List<double> { 0.005, 0.01 };
             * List<double> regularizationRates = new List<double> { 0, 0.001 };
             * List<int> humberOfHiddenNeurons = new List<int> { 80 };
             * //screening SGD+Momentum experiments
             * GradientDescentParams passedParams = new GradientDescentParams();
             * passedParams.nestrov = false;
             * passedParams.resilient = false;
             * passedParams.resilientUpdateAccelerationRate = 0.3;
             * passedParams.resilientUpdateSlowDownRate = 0.1;
             * new KFoldValidation().ScreenGD(wholeSet, 5, momentums, learningRates, regularizationRates, humberOfHiddenNeurons, passedParams,5000);*/
            //screening Adam
            //new KFoldValidation().ScreenAdam(wholeSet, 5, learningRates, regularizationRates, humberOfHiddenNeurons, 5000);


            //ReportHowCloseWeightsAcquiredFromDifferentSeedsAre();


            AA1_MLP.DataManagers.CupDataManager dm = new AA1_MLP.DataManagers.CupDataManager();
            DataSet trainDS = dm.LoadData(@"C:\Users\Ronin\Documents\monks\Monks\UsedFiles\TrainValSplits\60percenttrain.txt", 10, 2, standardize: true);
            DataSet testDS  = dm.LoadData(@"C:\Users\Ronin\Documents\monks\Monks\UsedFiles\TrainValSplits\60percenttest.txt", 10, 2, standardize: true);

            Console.WriteLine("Training Adamax");
            AdamParams adampassedParams = new AdamParams();
            IOptimizer adamtrainer      = new Adam();

            //adampassedParams.numberOfEpochs = 10000;
            //adampassedParams.batchSize = 10;
            //adampassedParams.trainingSet = trainDS;
            //adampassedParams.validationSet = testDS;
            //adampassedParams.learningRate = 0.001;
            //adampassedParams.regularization = Regularizations.L2;
            //adampassedParams.regularizationRate = 0.001;
            //adampassedParams.NumberOfHiddenUnits = 100;


            //adampassedParams.parallelize = false;
            //LastTrain(testDS, adampassedParams, adamtrainer, "10kt100adam", 1);

            adamtrainer = new Adamax();

            adampassedParams.numberOfEpochs      = 100000;
            adampassedParams.batchSize           = 10;
            adampassedParams.trainingSet         = trainDS;
            adampassedParams.validationSet       = testDS;
            adampassedParams.learningRate        = 0.001;
            adampassedParams.regularization      = Regularizations.L2;
            adampassedParams.regularizationRate  = 0.001;
            adampassedParams.NumberOfHiddenUnits = 100;

            //adampassedParams.PrintLoss = true;
            adampassedParams.parallelize = false;
            LastTrain(testDS, adampassedParams, adamtrainer, "100kadamax", 1);



            /*Console.WriteLine("Training Adam");
             * AdamParams adampassedParams = new AdamParams();
             * IOptimizer adamtrainer = new Adamax();
             *
             * adampassedParams.numberOfEpochs = 100;
             * adampassedParams.batchSize = 10;
             * adampassedParams.trainingSet = trainDS;
             * adampassedParams.validationSet = testDS;
             * adampassedParams.learningRate = 0.001;
             * adampassedParams.regularization = Regularizations.L2;
             * adampassedParams.regularizationRate = 0.001;
             * adampassedParams.NumberOfHiddenUnits = 100;
             * adampassedParams.t = 1000000000;
             *
             * adampassedParams.parallelize = false;
             * LastTrain(testDS, adampassedParams, adamtrainer, "100adam", 1);*/
            /* Console.WriteLine("training SGD");
             * GradientDescentParams passedParams = new GradientDescentParams();
             * Gradientdescent trainer = new Gradientdescent();
             * passedParams.numberOfEpochs = 100;
             * passedParams.batchSize = 10;
             * passedParams.trainingSet = trainDS;
             * passedParams.validationSet = testDS;
             * passedParams.learningRate = 0.001;
             * passedParams.regularization = Regularizations.L2;
             * passedParams.regularizationRate = 0.001;
             * passedParams.nestrov = true;
             * passedParams.resilient = false;
             * passedParams.resilientUpdateAccelerationRate = 2;
             * passedParams.resilientUpdateSlowDownRate = 0.5;
             * passedParams.momentum = 0.5;
             * passedParams.NumberOfHiddenUnits = 100;
             * passedParams.parallelize = true;
             * LastTrain(testDS, passedParams, trainer, "5kepochsprofiling_seq", 1);
             *
             * Console.WriteLine();*/


            /*
             * List<int> seeds = new List<int>() { 1,15,40,4,73,2};
             *
             * foreach (var seed in seeds)
             * {
             *   Console.WriteLine("Seed:{0}",seed);
             *
             *   /*AdamParams passedParams = new AdamParams();
             *   IOptimizer trainer = new Adam();
             *   Console.WriteLine("training SGD");
             *   GradientDescentParams passedParams = new GradientDescentParams();
             *   Gradientdescent trainer = new Gradientdescent();
             *   passedParams.numberOfEpochs = 20000;
             *   passedParams.batchSize = 10;
             *   passedParams.trainingSet = trainDS;
             *   passedParams.validationSet = testDS;
             *   passedParams.learningRate = 0.001;
             *   passedParams.regularization = Regularizations.L2;
             *   passedParams.regularizationRate = 0.001;
             *   passedParams.nestrov = true;
             *   passedParams.resilient = false;
             *   passedParams.resilientUpdateAccelerationRate = 2;
             *   passedParams.resilientUpdateSlowDownRate = 0.5;
             *
             *   passedParams.momentum = 0.5;
             *   passedParams.NumberOfHiddenUnits = 100;
             *
             *   LastTrain(testDS, passedParams, trainer, "20kseed_"+seed+"_", seed);
             * }*/
            /* Console.WriteLine("Training Adam");
             * AdamParams adampassedParams = new AdamParams();
             * IOptimizer adamtrainer = new Adam();
             *
             * adampassedParams.numberOfEpochs = 30000;
             * adampassedParams.batchSize = 50;
             * adampassedParams.trainingSet = trainDS;
             * adampassedParams.validationSet = testDS;
             * adampassedParams.learningRate = 0.001;
             * adampassedParams.regularization = Regularizations.L2;
             * adampassedParams.regularizationRate = 0.001;
             * adampassedParams.NumberOfHiddenUnits = 100;
             *
             * LastTrain(testDS, adampassedParams, adamtrainer, "BS50_30kepochs_100_final_adam_hdn", 1);
             */


            //Loading and parsing cup dataset

            //  CupDataManager dm = new CupDataManager();
            //Loading the test dataset
            //DataSet TestSet = dm.LoadData(Properties.Settings.Default.TestSetLocation, 10, reportOsutput: false);
            //Loading the trained model
            //var n = AA1_MLP.Utilities.ModelManager.LoadNetwork("Final_hidn18_reg0.01_mo0.5_lr9E-06_model.AA1");

            //double MEE = 0;
            //applying the model on the test data
            //var predictions = ModelManager.GeneratorCUP(TestSet, n);
            //writing the results
            // File.WriteAllText("OMG_LOC-OSM2-TS.txt", string.Join("\n", predictions.Select(s => string.Join(",", s))));
        }
Пример #2
0
        static void Main(string[] args)
        {
            //Properties.Settings.Default.trainingDataLocation;
            //  string testSetLocation = Properties.Settings.Default.testDataLocation;



            AA1_MLP.DataManagers.CupDataManager dm = new AA1_MLP.DataManagers.CupDataManager();
            var trainDS = LoadData("D:\\dropbox\\Dropbox\\Master Course\\SEM-3\\ML\\CM_CUP_Datasets\\Standardized60percenttrain.txt");
            var testDS  = dm.LoadData("D:\\dropbox\\Dropbox\\Master Course\\SEM-3\\ML\\CM_CUP_Datasets\\Standardized60percenttest.txt", 10, 2);


            //  StandardizeData(trainDS);
            // StandardizeData(testDS);



            BasicNetwork network = new BasicNetwork();

            network.AddLayer(new BasicLayer(10));


            network.AddLayer(new BasicLayer(new ActivationTANH(), true, 100));
            network.AddLayer(new BasicLayer(new ActivationLinear(), true, 2));


            network.Structure.FinalizeStructure();
            network.Reset();


            var train  = new Encog.Neural.Networks.Training.Propagation.SGD.StochasticGradientDescent(network, trainDS);
            var update = new Encog.Neural.Networks.Training.Propagation.SGD.Update.NesterovUpdate();

            update.Init(train);
            train.LearningRate = 0.001;
            train.Momentum     = 0.5;
            train.L2           = 0.001;
            train.BatchSize    = 10;
            var watch = System.Diagnostics.Stopwatch.StartNew();

            int epoch = 0;

            using (System.IO.StreamWriter file =
                       new System.IO.StreamWriter(@"C:\Users\ahmad\Documents\monks\Monks\AA1_Monks\AA1_CUP\bin\Release\encog.txt"))
            {
                do
                {
                    train.Iteration();



                    double valMEE = 0.0, valMSE = 0.0;

                    TesterCUPRegression(testDS, network, out valMEE, out valMSE);

                    Console.WriteLine("Epoch #" + epoch + " Error:" + train.Error + " valMSE:" + valMSE);


                    file.WriteLine("{0},{1}", train.Error, valMSE);
                    epoch++;
                } while ((epoch < 5000) && (train.Error > 0));
            }

            watch.Stop();
            var elapsedMs = watch.ElapsedMilliseconds;

            Console.WriteLine("elapsed Time:{0} ms", elapsedMs);
            var serializer = new BinaryFormatter();

            using (var s = new FileStream(@"C:\Users\ahmad\Documents\monks\Monks\AA1_Monks\AA1_CUP\bin\Release\encog.n", FileMode.Create))
            {
                serializer.Serialize(s, network);
            }

            double MEE = 0.0, MSE = 0.0;

            TesterCUPRegression(testDS, network, out MEE, out MSE);
            Console.WriteLine("MEE{0},MSE{1}", MEE, MSE);
        }