示例#1
0
文件: Program.cs 项目: lilanpei/Monks
        static void Main(string[] args)
        {
            //Loading and parsing cup dataset

            /* CupDataManager dm = new CupDataManager();
             * DataSet wholeSet = dm.LoadData(Properties.Settings.Default.TrainingSetLocation, 10, 2, permute: true, seed: 1);
             * List<double> momentums = new List<double> { 0, 0.5 };
             * List<double> learningRates = new List<double> { 0.005, 0.01 };
             * List<double> regularizationRates = new List<double> { 0, 0.001 };
             * List<int> humberOfHiddenNeurons = new List<int> { 80 };
             * //screening SGD+Momentum experiments
             * GradientDescentParams passedParams = new GradientDescentParams();
             * passedParams.nestrov = false;
             * passedParams.resilient = false;
             * passedParams.resilientUpdateAccelerationRate = 0.3;
             * passedParams.resilientUpdateSlowDownRate = 0.1;
             * new KFoldValidation().ScreenGD(wholeSet, 5, momentums, learningRates, regularizationRates, humberOfHiddenNeurons, passedParams,5000);*/
            //screening Adam
            //new KFoldValidation().ScreenAdam(wholeSet, 5, learningRates, regularizationRates, humberOfHiddenNeurons, 5000);


            //ReportHowCloseWeightsAcquiredFromDifferentSeedsAre();


            AA1_MLP.DataManagers.CupDataManager dm = new AA1_MLP.DataManagers.CupDataManager();
            DataSet trainDS = dm.LoadData(@"C:\Users\Ronin\Documents\monks\Monks\UsedFiles\TrainValSplits\60percenttrain.txt", 10, 2, standardize: true);
            DataSet testDS  = dm.LoadData(@"C:\Users\Ronin\Documents\monks\Monks\UsedFiles\TrainValSplits\60percenttest.txt", 10, 2, standardize: true);

            Console.WriteLine("Training Adamax");
            AdamParams adampassedParams = new AdamParams();
            IOptimizer adamtrainer      = new Adam();

            //adampassedParams.numberOfEpochs = 10000;
            //adampassedParams.batchSize = 10;
            //adampassedParams.trainingSet = trainDS;
            //adampassedParams.validationSet = testDS;
            //adampassedParams.learningRate = 0.001;
            //adampassedParams.regularization = Regularizations.L2;
            //adampassedParams.regularizationRate = 0.001;
            //adampassedParams.NumberOfHiddenUnits = 100;


            //adampassedParams.parallelize = false;
            //LastTrain(testDS, adampassedParams, adamtrainer, "10kt100adam", 1);

            adamtrainer = new Adamax();

            adampassedParams.numberOfEpochs      = 100000;
            adampassedParams.batchSize           = 10;
            adampassedParams.trainingSet         = trainDS;
            adampassedParams.validationSet       = testDS;
            adampassedParams.learningRate        = 0.001;
            adampassedParams.regularization      = Regularizations.L2;
            adampassedParams.regularizationRate  = 0.001;
            adampassedParams.NumberOfHiddenUnits = 100;

            //adampassedParams.PrintLoss = true;
            adampassedParams.parallelize = false;
            LastTrain(testDS, adampassedParams, adamtrainer, "100kadamax", 1);



            /*Console.WriteLine("Training Adam");
             * AdamParams adampassedParams = new AdamParams();
             * IOptimizer adamtrainer = new Adamax();
             *
             * adampassedParams.numberOfEpochs = 100;
             * adampassedParams.batchSize = 10;
             * adampassedParams.trainingSet = trainDS;
             * adampassedParams.validationSet = testDS;
             * adampassedParams.learningRate = 0.001;
             * adampassedParams.regularization = Regularizations.L2;
             * adampassedParams.regularizationRate = 0.001;
             * adampassedParams.NumberOfHiddenUnits = 100;
             * adampassedParams.t = 1000000000;
             *
             * adampassedParams.parallelize = false;
             * LastTrain(testDS, adampassedParams, adamtrainer, "100adam", 1);*/
            /* Console.WriteLine("training SGD");
             * GradientDescentParams passedParams = new GradientDescentParams();
             * Gradientdescent trainer = new Gradientdescent();
             * passedParams.numberOfEpochs = 100;
             * passedParams.batchSize = 10;
             * passedParams.trainingSet = trainDS;
             * passedParams.validationSet = testDS;
             * passedParams.learningRate = 0.001;
             * passedParams.regularization = Regularizations.L2;
             * passedParams.regularizationRate = 0.001;
             * passedParams.nestrov = true;
             * passedParams.resilient = false;
             * passedParams.resilientUpdateAccelerationRate = 2;
             * passedParams.resilientUpdateSlowDownRate = 0.5;
             * passedParams.momentum = 0.5;
             * passedParams.NumberOfHiddenUnits = 100;
             * passedParams.parallelize = true;
             * LastTrain(testDS, passedParams, trainer, "5kepochsprofiling_seq", 1);
             *
             * Console.WriteLine();*/


            /*
             * List<int> seeds = new List<int>() { 1,15,40,4,73,2};
             *
             * foreach (var seed in seeds)
             * {
             *   Console.WriteLine("Seed:{0}",seed);
             *
             *   /*AdamParams passedParams = new AdamParams();
             *   IOptimizer trainer = new Adam();
             *   Console.WriteLine("training SGD");
             *   GradientDescentParams passedParams = new GradientDescentParams();
             *   Gradientdescent trainer = new Gradientdescent();
             *   passedParams.numberOfEpochs = 20000;
             *   passedParams.batchSize = 10;
             *   passedParams.trainingSet = trainDS;
             *   passedParams.validationSet = testDS;
             *   passedParams.learningRate = 0.001;
             *   passedParams.regularization = Regularizations.L2;
             *   passedParams.regularizationRate = 0.001;
             *   passedParams.nestrov = true;
             *   passedParams.resilient = false;
             *   passedParams.resilientUpdateAccelerationRate = 2;
             *   passedParams.resilientUpdateSlowDownRate = 0.5;
             *
             *   passedParams.momentum = 0.5;
             *   passedParams.NumberOfHiddenUnits = 100;
             *
             *   LastTrain(testDS, passedParams, trainer, "20kseed_"+seed+"_", seed);
             * }*/
            /* Console.WriteLine("Training Adam");
             * AdamParams adampassedParams = new AdamParams();
             * IOptimizer adamtrainer = new Adam();
             *
             * adampassedParams.numberOfEpochs = 30000;
             * adampassedParams.batchSize = 50;
             * adampassedParams.trainingSet = trainDS;
             * adampassedParams.validationSet = testDS;
             * adampassedParams.learningRate = 0.001;
             * adampassedParams.regularization = Regularizations.L2;
             * adampassedParams.regularizationRate = 0.001;
             * adampassedParams.NumberOfHiddenUnits = 100;
             *
             * LastTrain(testDS, adampassedParams, adamtrainer, "BS50_30kepochs_100_final_adam_hdn", 1);
             */


            //Loading and parsing cup dataset

            //  CupDataManager dm = new CupDataManager();
            //Loading the test dataset
            //DataSet TestSet = dm.LoadData(Properties.Settings.Default.TestSetLocation, 10, reportOsutput: false);
            //Loading the trained model
            //var n = AA1_MLP.Utilities.ModelManager.LoadNetwork("Final_hidn18_reg0.01_mo0.5_lr9E-06_model.AA1");

            //double MEE = 0;
            //applying the model on the test data
            //var predictions = ModelManager.GeneratorCUP(TestSet, n);
            //writing the results
            // File.WriteAllText("OMG_LOC-OSM2-TS.txt", string.Join("\n", predictions.Select(s => string.Join(",", s))));
        }
示例#2
0
文件: Network.cs 项目: xuan2261/XNet
        public void InitNetwork(ECostType costType, CostSettings costSettings, EOptimizerType optimizerType, OptimizerSettings optimizerSettings)
        {
            Utility.Dims InShape;
            Utility.Dims OutShape;
            Utility.Dims WShape;

            for (int i = 1; i < Layers.Count; i++)
            {
                Data.Data["a" + i.ToString()] = new Matrix(Layers[i].NCount, 1);
                InShape = new Utility.Dims(Layers[i].NCount, 1);

                Data.Data["b" + i.ToString()] = Matrix.RandomMatrix(Layers[i].NCount, 1, 1, EDistrubution.Gaussian);

                OutShape = new Utility.Dims(Layers[i].NCount, 1);

                Data.Data["W" + i.ToString()] = Matrix.RandomMatrix(Layers[i - 1].NCount, Layers[i].NCount, 1, EDistrubution.Gaussian);
                WShape = new Utility.Dims(Layers[i - 1].NCount, Layers[i].NCount);

                Layers[i].SetSettings(new LayerSettings(InShape, OutShape, WShape));
            }

            Data.Data["a0"] = new Matrix(Layers[0].NCount, 1);
            InShape         = new Utility.Dims(Layers[0].NCount, 1);

            Data.Data["b0"] = new Matrix(Layers[0].NCount, 1);
            OutShape        = new Utility.Dims(Layers[0].NCount, 1);

            Data.Data["W0"] = new Matrix(Layers[0].NCount * Layers[1].NCount, Layers[1].NCount);
            WShape          = new Utility.Dims(Layers[0].NCount * Layers[1].NCount, Layers[1].NCount);

            Layers[0].SetSettings(new LayerSettings(InShape, OutShape, WShape));

            switch (costType)
            {
            case ECostType.Invalid:
                throw new ArgumentException("Invalid Cost Function Selected!");

            case ECostType.CrossEntropyCost:
                CostFunction = new CrossEntropyCost((CrossEntropyCostSettings)costSettings);
                break;

            case ECostType.ExponentionalCost:
                CostFunction = new ExponentionalCost((ExponentionalCostSettings)costSettings);
                break;

            case ECostType.GeneralizedKullbackLeiblerDivergence:
                CostFunction = new GeneralizedKullbackLeiblerDivergence((GeneralizedKullbackLeiblerDivergenceSettings)costSettings);
                break;

            case ECostType.HellingerDistance:
                CostFunction = new HellingerDistance((HellingerDistanceSettings)costSettings);
                break;

            case ECostType.ItakuraSaitoDistance:
                CostFunction = new ItakuraSaitoDistance((ItakuraSaitoDistanceSettings)costSettings);
                break;

            case ECostType.KullbackLeiblerDivergence:
                CostFunction = new KullbackLeiblerDivergence((KullbackLeiblerDivergenceSettings)costSettings);
                break;

            case ECostType.QuadraticCost:
                CostFunction = new QuadraticCost((QuadraticCostSettings)costSettings);
                break;

            default:
                throw new ArgumentException("Invalid Cost Function Selected!");
            }

            switch (optimizerType)
            {
            case EOptimizerType.Invalid:
                throw new ArgumentException("Invalid Optimizer Function Selected!");

            case EOptimizerType.AdaDelta:
                OptimizerFunction = new AdaDelta((AdaDeltaSettings)optimizerSettings);
                break;

            case EOptimizerType.AdaGrad:
                OptimizerFunction = new AdaGrad((AdaGradSettings)optimizerSettings);
                break;

            case EOptimizerType.Adam:
                OptimizerFunction = new Adam((AdamSettings)optimizerSettings);
                break;

            case EOptimizerType.Adamax:
                OptimizerFunction = new Adamax((AdamaxSettings)optimizerSettings);
                break;

            case EOptimizerType.GradientDescent:
                OptimizerFunction = new GradientDescent((GradientDescentSettings)optimizerSettings);
                break;

            case EOptimizerType.Momentum:
                OptimizerFunction = new Momentum((MomentumSettings)optimizerSettings);
                break;

            case EOptimizerType.Nadam:
                OptimizerFunction = new Nadam((NadamSettings)optimizerSettings);
                break;

            case EOptimizerType.NesterovMomentum:
                OptimizerFunction = new NesterovMomentum((NesterovMomentumSettings)optimizerSettings);
                break;

            case EOptimizerType.RMSProp:
                OptimizerFunction = new RMSProp((RMSPropSettings)optimizerSettings);
                break;

            default:
                throw new ArgumentException("Invalid Optimizer Function Selected!");
            }
        }