Exemplo n.º 1
0
        public History fit_generator(IEnumerator <List <Dictionary <string, Array> > > generator, int steps_per_epoch,
                                     int epochs = 1, int verbose = 1, CallbackList callbacks = null,
                                     List <Dictionary <string, Array> > validation_data = null, int?validation_steps = null,
                                     Dictionary <int, Tensor> class_weight = null,
                                     int max_q_size = 10, int workers = 1, int initial_epoch = 0)
        {
            //        """Fits the model on data generated batch-by-batch by a Python generator.
            //        The generator is run in parallel to the model, for efficiency.
            //        For instance, this allows you to do real-time data augmentation
            //        on images on CPU in parallel to training your model on GPU.
            //# Arguments
            //            generator: A generator.
            //                The output of the generator must be either
            //                - a tuple (inputs, targets)
            //                - a tuple (inputs, targets, sample_weights).
            //                All arrays should contain the same number of samples.
            //                The generator is expected to loop over its data
            //                indefinitely.An epoch finishes when `steps_per_epoch`
            //                batches have been seen by the model.
            //            steps_per_epoch: Total number of steps (batches of samples)
            //                to yield from `generator` before declaring one epoch
            //                finished and starting the next epoch. It should typically
            //                be equal to the number of unique samples of your dataset
            //                divided by the batch size.
            //            epochs: Integer, total number of iterations on the data.
            //            verbose: Verbosity mode, 0, 1, or 2.
            //            callbacks: List of callbacks to be called during training.
            //            validation_data: This can be either
            //                - A generator for the validation data
            //                - A tuple (inputs, targets)
            //                - A tuple (inputs, targets, sample_weights).
            //            validation_steps: Only relevant if `validation_data`
            //                is a generator.
            //                Number of steps to yield from validation generator
            //                at the end of every epoch. It should typically
            //                be equal to the number of unique samples of your
            //                validation dataset divided by the batch size.
            //            class_weight: Dictionary mapping class indices to a weight
            //                for the class.
            //            max_q_size: Maximum size for the generator queue
            //            workers: Maximum number of processes to spin up
            //            pickle_safe: Ff true, use process based threading.
            //                Note that because
            //                this implementation relies on multiprocessing,
            //                you should not pass
            //                non picklable arguments to the generator
            //                as they can"t be passed
            //                easily to children processes.
            //            initial_epoch: Epoch at which to start training
            //                (useful for resuming a previous training run)
            //        # Returns
            //            A `History` object.
            //        # Raises
            //            RuntimeError: if the model was never compiled.
            //        # Example
            //        ```python
            //            public override void generate_arrays_from_file(path):
            //                while 1:
            //                    f = open(path)
            //                    for line in f:
            //                        # create Numpy arrays of input data
            //                        # and labels, from each line in the file
            //                        x, y = process_line(line)
            //                        yield (x, y)
            //                        f.close()
            //            model.fit_generator(generate_arrays_from_file("/my_file.txt"),
            //                                steps_per_epoch=1000, epochs=10)
            //        ```
            //        """
            if (this.model == null)
            {
                throw new InvalidOperationException("The model needs to be compiled before being used.");
            }

            return(this.model.fit_generator(generator,
                                            steps_per_epoch,
                                            epochs,
                                            verbose: verbose,
                                            callbacks: callbacks,
                                            validation_data: validation_data,
                                            validation_steps: validation_steps,
                                            class_weight: class_weight,
                                            max_queue_size: max_q_size,
                                            workers: workers,
                                            //pickle_safe: pickle_safe,
                                            initial_epoch: initial_epoch));
        }
Exemplo n.º 2
0
        public override History fit(Dictionary <string, Array> x = null, Dictionary <string, Array> y = null, int batch_size = 32, int epochs = 1, int verbose = 1,
                                    CallbackList callbacks       = null, double validation_split      = 0, IList <Dictionary <string, Array> > validation_data = null, Shuffle shuffle = Shuffle.True,
                                    Dictionary <string, Dictionary <string, double> > class_weight = null, Dictionary <string, Array> sample_weight = null, int initial_epoch = 0, object kwargs = null)
        {
            //"""Trains the model for a fixed number of epochs.
            //# Arguments
            //    x: input data, as a Numpy array or list of Numpy arrays
            //        (if the model has multiple inputs).
            //    y: labels, as a Numpy array.
            //    batch_size: integer.Number of samples per gradient update.
            //   epochs: integer, the number of epochs to train the model.

            //   verbose: 0 for no logging to stdout,
            //        1 for progress bar logging, 2 for one log line per epoch.
            //   callbacks: list of `keras.callbacks.Callback` instances.
            //       List of callbacks to apply during training.
            //       See[callbacks](/callbacks).
            //    validation_split: float (0. < x< 1).
            //        Fraction of the data to use as held-out validation data.
            //    validation_data: tuple (x_val, y_val) or tuple
            //        (x_val, y_val, val_sample_weights) to be used as held-out
            //        validation data.Will override validation_split.
            //    shuffle: boolean or str (for "batch").
            //        Whether to shuffle the samples at each epoch.
            //        "batch" is a special option for dealing with the
            //        limitations of HDF5 data; it shuffles in batch-sized chunks.
            //    class_weight: dictionary mapping classes to a weight value,
            //        used for scaling the loss function (during training only).
            //    sample_weight: Numpy array of weights for
            //        the training samples, used for scaling the loss function
            //        (during training only). You can either pass a flat(1D)
            //        Numpy array with the same length as the input samples
            //        (1:1 mapping between weights and samples),
            //        or in the case of temporal data,
            //        you can pass a 2D array with shape(samples, sequence_length),
            //        to apply a different weight to every timestep of every sample.
            //        In this case you should make sure to specify
            //        sample_weight_mode= "temporal" in compile().
            //    initial_epoch: epoch at which to start training
            //        (useful for resuming a previous training run)
            //# Returns
            //    A `History` object. Its `History.history` attribute is
            //    a record of training loss values and metrics values
            //    at successive epochs, as well as validation loss values
            //    and validation metrics values (if applicable).
            //# Raises
            //    RuntimeError: if the model was never compiled.
            //"""


            if (this.model == null)
            {
                throw new InvalidOperationException("The model needs to be compiled before being used.");
            }

            return(model.fit(x, y,
                             batch_size,
                             epochs, verbose,
                             callbacks,
                             validation_split,
                             validation_data,
                             shuffle,
                             class_weight,
                             sample_weight,
                             initial_epoch,
                             kwargs));
        }