Ejemplo n.º 1
0
        CNTK.Value create_x_minibatch(bool sequence_mode, CNTK.Variable x, GeneratorsInfo gi, SamplesTargets st, CNTK.DeviceDescriptor computeDevice)
        {
            if (sequence_mode == false)
            {
                return(CNTK.Value.CreateBatch(x.Shape, st.samples, computeDevice));
            }
            var sequence_length = gi.lookback / gi.step;
            var minibatch_size  = st.samples.Length / sequence_length / gi.num_records;
            var x_shape         = CNTK.NDShape.CreateNDShape(new int[] { gi.num_records, sequence_length, minibatch_size });
            var ndArrayView     = new CNTK.NDArrayView(x_shape, st.samples, computeDevice, readOnly: true);

            return(new CNTK.Value(ndArrayView));
        }
Ejemplo n.º 2
0
        GeneratorsInfo create_generators(float[][] float_data)
        {
            var rtrn = new GeneratorsInfo();

            rtrn.lookback    = 1440;
            rtrn.step        = 6;
            rtrn.delay       = 144;
            rtrn.batch_size  = 128;
            rtrn.num_records = float_data[0].Length;

            rtrn.train_gen = generator(float_data,
                                       lookback: rtrn.lookback,
                                       delay: rtrn.delay,
                                       min_index: 0,
                                       max_index: 200000,
                                       shuffle: true,
                                       step: rtrn.step,
                                       batch_size: rtrn.batch_size);

            rtrn.val_gen = generator(float_data,
                                     lookback: rtrn.lookback,
                                     delay: rtrn.delay,
                                     min_index: 200001,
                                     max_index: 300000,
                                     step: rtrn.step,
                                     batch_size: rtrn.batch_size);

            rtrn.test_gen = generator(float_data,
                                      lookback: rtrn.lookback,
                                      delay: rtrn.delay,
                                      min_index: 300001,
                                      max_index: -1,
                                      step: rtrn.step,
                                      batch_size: rtrn.batch_size);

            // This is how many steps to draw from `val_gen`
            // in order to see the whole validation set:
            rtrn.val_steps = (300000 - 200001 - rtrn.lookback) / rtrn.batch_size;


            // This is how many steps to draw from `test_gen`
            // in order to see the whole test set:
            rtrn.test_steps = (float_data.Length - 300001 - rtrn.lookback) / rtrn.batch_size;

            return(rtrn);
        }
Ejemplo n.º 3
0
        List <List <double> > fit_generator(bool sequence_mode, CNTK.Variable x, CNTK.Variable y, CNTK.Function model, CNTK.Trainer trainer, CNTK.Evaluator evaluator, GeneratorsInfo gi, int epochs, int steps_per_epoch, CNTK.DeviceDescriptor computeDevice)
        {
            var history = new List <List <double> >()
            {
                new List <double>(), new List <double>()
            };

            var train_enumerator = gi.train_gen.GetEnumerator();
            var val_enumerator   = gi.val_gen.GetEnumerator();

            var x_minibatch_dims = new List <int>(x.Shape.Dimensions);

            if (sequence_mode == false)
            {
                x_minibatch_dims.Add(gi.batch_size);
            }

            for (int current_epoch = 0; current_epoch < epochs; current_epoch++)
            {
                var epoch_start_time = DateTime.Now;

                var epoch_training_error = 0.0;
                {
                    var num_total_samples = 0;
                    for (int s = 0; s < steps_per_epoch; s++)
                    {
                        train_enumerator.MoveNext();
                        var st          = train_enumerator.Current;
                        var x_minibatch = create_x_minibatch(sequence_mode, x, gi, st, computeDevice);
                        var y_minibatch = CNTK.Value.CreateBatch(y.Shape, st.targets, computeDevice);

                        var feed_dictionary = new Dictionary <CNTK.Variable, CNTK.Value> {
                            { x, x_minibatch }, { y, y_minibatch }
                        };
                        bool isSweepEndInArguments = (s == (steps_per_epoch - 1));
                        trainer.TrainMinibatch(feed_dictionary, isSweepEndInArguments, computeDevice);
                        var minibatch_metric = trainer.PreviousMinibatchEvaluationAverage();
                        epoch_training_error += minibatch_metric * st.targets.Length;
                        num_total_samples    += st.targets.Length;
                        x_minibatch.Erase();
                        y_minibatch.Erase();
                    }
                    epoch_training_error /= num_total_samples;
                }
                history[0].Add(epoch_training_error);

                var epoch_validation_error = 0.0;
                {
                    var num_total_samples = 0;
                    for (int s = 0; s < gi.val_steps; s++)
                    {
                        val_enumerator.MoveNext();
                        var st              = val_enumerator.Current;
                        var x_minibatch     = create_x_minibatch(sequence_mode, x, gi, st, computeDevice);
                        var y_minibatch     = CNTK.Value.CreateBatch(y.Shape, st.targets, computeDevice);
                        var feed_dictionary = new CNTK.UnorderedMapVariableValuePtr()
                        {
                            { x, x_minibatch }, { y, y_minibatch }
                        };
                        var minibatch_metric = evaluator.TestMinibatch(feed_dictionary, computeDevice);
                        epoch_validation_error += minibatch_metric * st.targets.Length;
                        num_total_samples      += st.targets.Length;
                        x_minibatch.Erase();
                        y_minibatch.Erase();
                    }
                    epoch_validation_error /= num_total_samples;
                }
                history[1].Add(epoch_validation_error);

                var elapsedTime = DateTime.Now.Subtract(epoch_start_time);
                Console.WriteLine($"Epoch {current_epoch + 1:D2}/{epochs}, Elapsed time: {elapsedTime.TotalSeconds:F3} seconds. " +
                                  $"Training Error: {epoch_training_error:F3}. Validation Error: {epoch_validation_error:F3}.");
            }

            return(history);
        }
Ejemplo n.º 4
0
        List <List <double> > train_mse_cntk(bool sequence_mode, CNTK.Variable x, CNTK.Variable y, CNTK.Function model, GeneratorsInfo gi, int epochs, int steps_per_epoch, CNTK.DeviceDescriptor computeDevice)
        {
            var loss_function     = CNTK.CNTKLib.SquaredError(model, y);
            var accuracy_function = loss_function;

            var lr = 0.001;
            var parameterVector = new CNTK.ParameterVector((System.Collections.ICollection)model.Parameters());
            var learner         = CNTK.CNTKLib.AdamLearner(parameterVector,
                                                           new CNTK.TrainingParameterScheduleDouble(lr /*, (uint)batch_size*/),
                                                           new CNTK.TrainingParameterScheduleDouble(0.9 /*, (uint)batch_size*/),
                                                           unitGain: false);
            var trainer = CNTK.CNTKLib.CreateTrainer(model, loss_function, accuracy_function, new CNTK.LearnerVector()
            {
                learner
            });
            var evaluator = CNTK.CNTKLib.CreateEvaluator(accuracy_function);
            var history   = fit_generator(sequence_mode, x, y, model, trainer, evaluator, gi, epochs, steps_per_epoch, computeDevice);

            return(history);
        }