Esempio n. 1
0
            /// <summary>
            /// Writes a trainer parameters to event log.
            /// </summary>
            /// <param name="logFile">The log file to write to.</param>
            /// <param name="trainer">The trainer which parameters to write.</param>
            /// <param name="algorithm">The training algorithm.</param>
            /// <param name="loss">The loss function.</param>
            private void WriteTrainerParameters(
                StreamWriter logFile,
                ClassificationNetworkTrainer trainer,
                ITrainingAlgorithm algorithm,
                ILoss <int[]> loss)
            {
                this.WriteLine(logFile, "Trainer parameters:");
                this.WriteLine(logFile, "  Batch Size: {0}", trainer.BatchSize);
                this.WriteLine(logFile, "  L1 Rate: {0}", trainer.RateL1);
                this.WriteLine(logFile, "  L2 Rate: {0}", trainer.RateL2);
                this.WriteLine(logFile, "  Clip Value: {0}", trainer.ClipValue);

                this.WriteLine(logFile, "Algorithm parameters:");
                this.WriteLine(logFile, "  Algorithm: {0}", algorithm.GetType().Name);
                if (algorithm is Adadelta adadelta)
                {
                    this.WriteLine(logFile, "  Learning Rate: {0}", adadelta.LearningRate);
                    this.WriteLine(logFile, "  Decay: {0}", adadelta.Decay);
                    this.WriteLine(logFile, "  Rho: {0}", adadelta.Rho);
                    this.WriteLine(logFile, "  Eps: {0}", adadelta.Eps);
                }

                if (algorithm is Adagrad adagrad)
                {
                    this.WriteLine(logFile, "  Learning Rate: {0}", adagrad.LearningRate);
                    this.WriteLine(logFile, "  Eps: {0}", adagrad.Eps);
                }

                if (algorithm is Adam adam)
                {
                    this.WriteLine(logFile, "  Learning Rate: {0}", adam.LearningRate);
                    this.WriteLine(logFile, "  Beta1: {0}", adam.Beta1);
                    this.WriteLine(logFile, "  Beta2: {0}", adam.Beta2);
                    this.WriteLine(logFile, "  Eps: {0}", adam.Eps);
                }

                if (algorithm is RMSProp rmsProp)
                {
                    this.WriteLine(logFile, "  Learning Rate: {0}", rmsProp.LearningRate);
                    this.WriteLine(logFile, "  Rho: {0}", rmsProp.Rho);
                    this.WriteLine(logFile, "  Eps: {0}", rmsProp.Eps);
                }

                if (algorithm is SGD sgd)
                {
                    this.WriteLine(logFile, "  Learning Rate: {0}", sgd.LearningRate);
                    this.WriteLine(logFile, "  Decay: {0}", sgd.Decay);
                    this.WriteLine(logFile, "  Momentum: {0}", sgd.Momentum);
                    this.WriteLine(logFile, "  Nesterov: {0}", sgd.Nesterov);
                }

                this.WriteLine(logFile, "Loss parameters:");
                this.WriteLine(logFile, "  Loss: {0}", loss.GetType().Name);
                if (loss is LogLikelihoodLoss logLikelihoodLoss)
                {
                    this.WriteLine(logFile, "  LSR: {0}", logLikelihoodLoss.LSR);
                }
            }
Esempio n. 2
0
                private ClassificationNetworkTrainer CreateTrainer()
                {
                    ClassificationNetworkTrainer trainer = new ClassificationNetworkTrainer();

                    JsonSerializer jsonSerializer = new JsonSerializer();

                    using (JTokenReader jtokenReader = new JTokenReader(this.TaskParameters.TrainerParameters))
                    {
                        jsonSerializer.Populate(jtokenReader, trainer);
                    }

                    return(trainer);
                }
Esempio n. 3
0
        public void ForwardVolumes()
        {
            Random random = new Random(0);

            ClassificationNetworkTrainer trainer = new ClassificationNetworkTrainer();

            SGD sgd = new SGD()
            {
                LearningRate = 0.0001f,
                Momentum     = 0.0f
            };

            ClassificationNetwork network = ClassificationNetwork.FromArchitecture("1x1x2~5N~5N~3N", this.classes);

            // lets test 100 random point and label settings
            // note that this should work since l2 and l1 regularization are off
            // an issue is that if step size is too high, this could technically fail...
            for (int k = 0; k < 100; k++)
            {
                int gti = (int)Math.Floor(random.NextDouble() * 3);

                Tensor x = new Tensor(null, new Shape(Shape.BWHC, 1, 1, 1, 2));
                x.Set(new float[] { ((float)random.NextDouble() * 2) - 1, ((float)random.NextDouble() * 2) - 1 });

                Tensor pv = network.Forward(null, x).Clone() as Tensor;

                trainer.RunEpoch(
                    k,
                    network,
                    Enumerable.Repeat((x, new string[] { this.classes[gti] }), 1),
                    sgd,
                    new LogLikelihoodLoss(),
                    CancellationToken.None);

                Tensor pv2 = network.Forward(null, x).Clone() as Tensor;
                Assert.IsTrue(pv2.Weights[gti] > pv.Weights[gti], "k: {0}, gti: {1}, pv2[gti]: {2}, pv[gti]: {3}", k, gti, pv2.Weights[gti], pv.Weights[gti]);
            }
        }