コード例 #1
0
        private static void CheckConvexity(string dsLocation = "D:\\dropbox\\Dropbox\\Master Course\\SEM-3\\ML\\CM_CUP_Datasets\\60percenttrain.txt")
        {
            AA1_MLP.DataManagers.CupDataManager dm = new AA1_MLP.DataManagers.CupDataManager();
            DataSet trainDS = dm.LoadData(dsLocation, 10, 2, standardize: true);

            var nOriginal = //ModelManager.LoadNetwork(@"C:\Users\ahmad\Documents\monks\Monks\5kitr_mo0.5_100_final_sgdnestrov_hdn100_lr0.001_reg0.001.n");
                            new Network(new List <Layer>()
            {
                new Layer(new ActivationIdentity(), true, 10),
                new Layer(new ActivationTanh(), true, 100),
                //   new Layer(new ActivationLeakyRelu(),true,40),


                new Layer(new ActivationIdentity(), false, 2),
            }, false, AA1_MLP.Enums.WeightsInitialization.Xavier);

            for (int i = 0; i < 100; i++)
            {
                Console.WriteLine(i);

                List <double[]> weightValVsCost = GeneratePlot(trainDS, nOriginal, -10, 10, 0.01);

                File.WriteAllLines(@"xcurve" + i + ".txt", weightValVsCost.OrderBy(s => s[0]).Select(x => string.Join(",", x)).ToArray());
            }
            Console.WriteLine();
        }
コード例 #2
0
        /// <summary>
        /// For outputting the final cup results
        /// </summary>
        private static void TrainAndPRoduceFinalResult()
        {
            AA1_MLP.DataManagers.CupDataManager dm = new AA1_MLP.DataManagers.CupDataManager();
            DataSet trainDS     = dm.LoadData(@"D:\dropbox\Dropbox\Master Course\SEM-3\ML\CM_CUP_Datasets\ML-17-PRJ lecture  package-20171225\ML-CUP17-TR.csv", 10, 2, skip: 1, standardize: true);
            DataSet FinalTestDS = dm.LoadData(@"D:\dropbox\Dropbox\Master Course\SEM-3\ML\CM_CUP_Datasets\ML-17-PRJ lecture  package-20171225\ML-CUP17-TS.csv", 10, skip: 1, reportOsutput: false, standardize: true);



            /*AdamParams passedParams = new AdamParams();
             * IOptimizer trainer = new Adam();*/
            GradientDescentParams passedParams = new GradientDescentParams();
            Gradientdescent       trainer      = new Gradientdescent();

            passedParams.numberOfEpochs     = 5000;
            passedParams.batchSize          = 10;
            passedParams.trainingSet        = trainDS;
            passedParams.learningRate       = 0.001;
            passedParams.regularization     = Regularizations.L2;
            passedParams.regularizationRate = 0.001;
            passedParams.nestrov            = true;
            passedParams.resilient          = false;
            passedParams.resilientUpdateAccelerationRate = 2;
            passedParams.resilientUpdateSlowDownRate     = 0.5;

            passedParams.momentum            = 0.5;
            passedParams.NumberOfHiddenUnits = 100;
            passedParams.trueThreshold       = null;

            string path = "cupTrain" + passedParams.NumberOfHiddenUnits + "_lr" + passedParams.learningRate + "_reg" + passedParams.regularizationRate;
            //building the architecture
            Network n = new Network(new List <Layer>()
            {
                new Layer(new ActivationIdentity(), true, 10),
                new Layer(new ActivationTanh(), true, passedParams.NumberOfHiddenUnits),
                //   new Layer(new ActivationLeakyRelu(),true,40),


                new Layer(new ActivationIdentity(), false, 2),
            }, false, AA1_MLP.Enums.WeightsInitialization.Xavier);

            passedParams.network = n;
            var             watch         = System.Diagnostics.Stopwatch.StartNew();
            List <double[]> learningCurve = trainer.Train(passedParams);

            watch.Stop();
            var elapsedMs = watch.ElapsedMilliseconds;

            Console.WriteLine("elapsed Time:{0} ms", elapsedMs);



            File.WriteAllText(path + ".txt", string.Join("\n", learningCurve.Select(s => string.Join(",", s))));


            ModelManager.SaveNetowrk(n, path + ".n");

            var predictions = ModelManager.GeneratorCUP(FinalTestDS, n);

            File.WriteAllText("OMG_LOC-OSM2-TS.txt", string.Join("\n", predictions.Select(s => string.Join(",", s))));
        }