コード例 #1
0
ファイル: Program.cs プロジェクト: lilanpei/Monks
        private static void LastTrain(DataSet testDS, INeuralTrainerParams passedParams, IOptimizer trainer, string prefix, int seed)
        {
            string path = prefix + passedParams.NumberOfHiddenUnits + "_lr" + passedParams.learningRate + "_reg" + passedParams.regularizationRate;
            //building the architecture
            Network n = new Network(new List <Layer>()
            {
                new Layer(new ActivationIdentity(), true, 10),
                new Layer(new ActivationTanh(), true, passedParams.NumberOfHiddenUnits),
                //   new Layer(new ActivationLeakyRelu(),true,40),


                new Layer(new ActivationIdentity(), false, 2),
            }, false, AA1_MLP.Enums.WeightsInitialization.Xavier, seed);

            passedParams.network = n;
            var             watch         = System.Diagnostics.Stopwatch.StartNew();
            List <double[]> learningCurve = trainer.Train(passedParams);

            watch.Stop();
            var elapsedMs = watch.ElapsedMilliseconds;

            Console.WriteLine("elapsed Time:{0} ms", elapsedMs);
            double MEE = 0;
            double MSE = 0;

            var log = ModelManager.TesterCUPRegression(testDS, n, out MEE, out MSE);

            File.WriteAllText(path + ".txt", string.Join("\n", learningCurve.Select(s => string.Join(",", s))));
            File.AppendAllText(path + ".txt", "\nMEE:" + MEE + "MSE:" + MSE);
            File.WriteAllText(path + "predVsActual.txt", string.Join("\n", log.Select(s => string.Join(",", s))));



            ModelManager.SaveNetowrk(n, path + ".n");
        }
コード例 #2
0
ファイル: KFoldValidation.cs プロジェクト: lilanpei/Monks
        public List <double[]> RunExperiment(IOptimizer optimizer, INeuralTrainerParams passedParams, out double MEE, out double MSE)
        {
            //building the architecture
            Network n = new Network(new List <Layer>()
            {
                new Layer(new ActivationIdentity(), true, 10),
                new Layer(new ActivationTanh(), true, passedParams.NumberOfHiddenUnits),
                //   new Layer(new ActivationLeakyRelu(),true,40),


                new Layer(new ActivationIdentity(), false, 2),
            }, false, AA1_MLP.Enums.WeightsInitialization.Xavier);

            passedParams.network = n;
            List <double[]> learningCurve = optimizer.Train(passedParams);

            MEE = 0;
            MSE = 0;
            var log = ModelManager.TesterCUPRegression(passedParams.validationSet, n, out MEE, out MSE);

            return(learningCurve);
        }
コード例 #3
0
 internal void Train(Predictor predictor, double[] weightArray, DataSetObject dataSet)
 {
     _optimizer.Train(predictor, weightArray, dataSet, _learningRate);
 }