/// <summary>
        /// Return a summary description of the neural network.
        /// </summary>
        /// <param name="model">The neural network to describe</param>
        /// <returns>A string description of the neural network</returns>
        public static string ToSummary(this CNTK.Function model)
        {
            var sb = new StringBuilder();

            sb.AppendFormat("\tInput = " + model.Arguments[0].Shape.AsString());
            sb.Append(Environment.NewLine);
            for (int i = 0; i < model.Outputs.Count; i++)
            {
                sb.AppendFormat("\tOutput = " + model.Outputs[i].Shape.AsString());
                sb.Append(Environment.NewLine);
            }
            sb.Append(Environment.NewLine);

            var numParameters = 0;

            foreach (var x in model.Parameters())
            {
                var shape = x.Shape;
                var p     = shape.TotalSize;
                sb.AppendFormat(string.Format("\tFilter Shape:{0,-30} Params:{1}", shape.AsString(), p));
                sb.Append(Environment.NewLine);
                numParameters += p;
            }
            sb.AppendFormat(string.Format("\tTotal Number of Parameters: {0:N0}", numParameters));
            sb.Append(Environment.NewLine);
            return(sb.ToString());
        }
        static public void log_number_of_parameters(CNTK.Function model)
        {
            Console.WriteLine("\nModel Summary");
            Console.WriteLine("\tInput = " + model.Arguments[0].Shape.AsString());
            Console.WriteLine("\tOutput = " + model.Output.Shape.AsString());
            Console.WriteLine("");

            var numParameters = 0;

            foreach (var x in model.Parameters())
            {
                var shape = x.Shape;
                var p     = shape.TotalSize;
                Console.WriteLine(string.Format("\tFilter Shape:{0,-30} Param #:{1}", shape.AsString(), p));
                numParameters += p;
            }
            Console.WriteLine(string.Format("\nTotal Number of Parameters: {0:N0}", numParameters));
            Console.WriteLine("---\n");
        }
Example #3
0
        void train(CNTK.Function model, float[][] labels)
        {
            var content_and_style_outputs = traverse_content_and_styles_nodes(model);
            var label_variables           = new List <CNTK.Variable>();

            for (int i = 0; i < labels.Length; i++)
            {
                var shape          = content_and_style_outputs[i].Shape;
                var input_variable = CNTK.Variable.InputVariable(shape, CNTK.DataType.Float, "content_and_style_" + i);
                label_variables.Add(input_variable);
            }

            var loss_function = create_loss_function(model, content_and_style_outputs, label_variables);
            var pv            = new CNTK.ParameterVector((System.Collections.ICollection)model.Parameters());
            var learner       = CNTK.CNTKLib.AdamLearner(pv, new CNTK.TrainingParameterScheduleDouble(10), new CNTK.TrainingParameterScheduleDouble(0.95));
            var trainer       = CNTK.CNTKLib.CreateTrainer(model, loss_function, loss_function, new CNTK.LearnerVector()
            {
                learner
            });

            var batch = create_batch(loss_function, labels);

            Console.WriteLine("Training on a " + computeDevice.AsString());
            var startTime = DateTime.Now;

            for (int i = 0; i < 301; i++)
            {
                trainer.TrainMinibatch(batch, true, computeDevice);
                if (i % 100 == 0)
                {
                    Console.WriteLine($"epoch {i}, loss={trainer.PreviousMinibatchLossAverage():F3}");
                }
            }
            var elapsedTime = DateTime.Now.Subtract(startTime);

            Console.WriteLine($"Done in {elapsedTime.TotalSeconds:F1} seconds");
        }
        /// <summary>
        /// Get an RMSProp learner to train the network.
        /// </summary>
        /// <param name="input">The network to train.</param>
        /// <param name="learningRateSchedule">The learning rate schedule.</param>
        /// <param name="gamma">The gamma value.</param>
        /// <param name="inc">The inc value.</param>
        /// <param name="dec">The dec value.</param>
        /// <param name="max">The max value.</param>
        /// <param name="min">The min value.</param>
        /// <returns>An RMSProp learner to train the network.</returns>
        public static CNTK.Learner GetRMSPropLearner(
            this CNTK.Function input,
            double learningRateSchedule,
            double gamma,
            double inc,
            double dec,
            double max,
            double min)
        {
            var parameterVector = new CNTK.ParameterVector((System.Collections.ICollection)input.Parameters());

            return(CNTK.CNTKLib.RMSPropLearner(
                       parameterVector,
                       new CNTK.TrainingParameterScheduleDouble(learningRateSchedule),
                       gamma,
                       inc,
                       dec,
                       max,
                       min));
        }
Example #5
0
        List <List <double> > train_mse_cntk(bool sequence_mode, CNTK.Variable x, CNTK.Variable y, CNTK.Function model, GeneratorsInfo gi, int epochs, int steps_per_epoch, CNTK.DeviceDescriptor computeDevice)
        {
            var loss_function     = CNTK.CNTKLib.SquaredError(model, y);
            var accuracy_function = loss_function;

            var lr = 0.001;
            var parameterVector = new CNTK.ParameterVector((System.Collections.ICollection)model.Parameters());
            var learner         = CNTK.CNTKLib.AdamLearner(parameterVector,
                                                           new CNTK.TrainingParameterScheduleDouble(lr /*, (uint)batch_size*/),
                                                           new CNTK.TrainingParameterScheduleDouble(0.9 /*, (uint)batch_size*/),
                                                           unitGain: false);
            var trainer = CNTK.CNTKLib.CreateTrainer(model, loss_function, accuracy_function, new CNTK.LearnerVector()
            {
                learner
            });
            var evaluator = CNTK.CNTKLib.CreateEvaluator(accuracy_function);
            var history   = fit_generator(sequence_mode, x, y, model, trainer, evaluator, gi, epochs, steps_per_epoch, computeDevice);

            return(history);
        }