/// <summary>
        /// Generates output predictions for the input samples. Computation is done in batches.
        /// </summary>
        /// <param name="x">The input data frame to run prediction.</param>
        /// <param name="batch_size">Size of the batch.</param>
        /// <returns></returns>
        public DataFrame Predict(DataFrame x, int batch_size)
        {
            DataFrameIter dataFrameIter = new DataFrameIter(x);
            List <float>  predictions   = new List <float>();

            dataFrameIter.SetBatchSize(batch_size);

            while (dataFrameIter.Next())
            {
                var        data   = dataFrameIter.GetBatchX();
                SuperArray output = data;
                foreach (var layer in Layers)
                {
                    if (layer.SkipPred)
                    {
                        continue;
                    }

                    layer.Forward(output);
                    output = layer.Output;
                }

                predictions.AddRange(output.List <float>());
            }

            DataFrame result = new DataFrame();

            result.Load(predictions.ToArray());

            return(result);
        }
Esempio n. 2
0
        /// <summary>
        /// Generates output predictions for the input samples. Computation is done in batches.
        /// </summary>
        /// <param name="x">The input data frame to run prediction.</param>
        /// <param name="batch_size">Size of the batch.</param>
        /// <returns></returns>
        public Tensor Predict(DataFrame x, int batch_size)
        {
            DataFrameIter dataFrameIter = new DataFrameIter(x);
            List <float>  predictions   = new List <float>();

            dataFrameIter.SetBatchSize(batch_size);
            long[] outshape = null;

            while (dataFrameIter.Next())
            {
                var    data   = dataFrameIter.GetBatchX();
                Tensor output = data;
                foreach (var layer in Layers)
                {
                    if (layer.SkipPred)
                    {
                        continue;
                    }

                    layer.Forward(output);
                    output = layer.Output;
                }

                predictions.AddRange(output.ToArray().Cast <float>());
            }

            return(K.CreateVariable(predictions.ToArray(), outshape));
        }
        /// <summary>
        /// Trains the model for a given number of epochs (iterations on a dataset).
        /// </summary>
        /// <param name="train">The train dataset which is an instance of DataFrame Iter.</param>
        /// <param name="epochs">Integer. Number of epochs to train the model. An epoch is an iteration over the entire x and y data provided. Note that in conjunction with initial_epoch,  epochs is to be understood as "final epoch". The model is not trained for a number of iterations given by epochs, but merely until the epoch of index epochs is reached.</param>
        /// <param name="batchSize">Integer or None. Number of samples per gradient update. If unspecified, batch_size will default to 32.</param>
        /// <param name="val">The validation set of data to evaluate the model at every epoch.</param>
        public void Train(DataFrameIter train, int epochs, int batchSize, DataFrameIter val = null)
        {
            LearningHistory = new History();
            Stopwatch trainWatch = new Stopwatch();

            try
            {
                Stopwatch batchWatch = new Stopwatch();

                long n = train.DataSize;
                trainWatch.Start();
                train.SetBatchSize(batchSize);
                if (val != null)
                {
                    val.SetBatchSize(batchSize);
                }
                for (int iteration = 1; iteration <= epochs; iteration++)
                {
                    batchWatch.Restart();

                    OnEpochStart(iteration);
                    RunEpoch(iteration, train, val);

                    batchWatch.Stop();
                    long samplesSeen = n * 1000 / (batchWatch.ElapsedMilliseconds + 1);
                    if (val == null)
                    {
                        OnEpochEnd(iteration, samplesSeen, train_losses.Average(), 0, train_metrics.Average(), 0, batchWatch.ElapsedMilliseconds);
                    }
                    else
                    {
                        OnEpochEnd(iteration, samplesSeen, train_losses.Average(), val_losses.Average(), train_metrics.Average(), val_metrics.Average(), batchWatch.ElapsedMilliseconds);
                    }

                    LearningHistory.Add(train_losses, train_metrics, val_losses, val_metrics);
                }
            }
            catch (Exception ex)
            {
                Console.WriteLine(ex.ToString());
            }

            trainWatch.Stop();
            OnTrainingEnd(LearningHistory, trainWatch.ElapsedMilliseconds);
        }