Exemple #1
0
        /// <summary>
        /// Один проход для минипакетного обучения
        /// </summary>
        /// <param name="learningRate">Скорость обучения</param>
        /// <param name="network">Нейросеть</param>
        /// <param name="sequences">Датасет</param>
        /// <param name="isTraining">Производится ли обучение</param>
        /// <param name="lossFunction">Функция ошибки</param>
        /// <returns></returns>
        public double PassBatch(double learningRate, INetwork network, List <DataSequence> sequences,
                                bool isTraining, ILoss lossFunction)
        {
            double numerLoss = 0;
            double denomLoss = 0;
            int    index, passes = (sequences.Count % BatchSize == 0) ? sequences.Count / BatchSize : sequences.Count / BatchSize + 1;

            for (int j = 0; j < passes; j++)
            {
                GraphCPU g = new GraphCPU(isTraining);

                for (int i = 0; i < BatchSize; i++)
                {
                    index = random.Next(sequences.Count);
                    var seq = sequences[index];

                    network.ResetState();
                    foreach (DataStep step in seq.Steps)
                    {
                        NNValue output = network.Activate(step.Input, g);
                        if (step.TargetOutput != null)
                        {
                            double loss = lossFunction.Measure(output, step.TargetOutput);
                            if (Double.IsNaN(loss) || Double.IsInfinity(loss))
                            {
                                return(loss);
                            }
                            numerLoss += loss;
                            denomLoss++;
                            if (isTraining)
                            {
                                lossFunction.Backward(output, step.TargetOutput);
                            }
                        }
                    }
                }

                if (isTraining)
                {
                    g.Backward();                             //backprop dw values
                    UpdateModelParams(network, learningRate); //update params
                }
            }
            return(numerLoss / denomLoss);
        }
Exemple #2
0
        /// <summary>
        /// Онлайн обучение
        /// </summary>
        /// <param name="learningRate"></param>
        /// <param name="network"></param>
        /// <param name="sequences"></param>
        /// <param name="applyTraining"></param>
        /// <param name="lossTraining"></param>
        /// <returns></returns>
        public double Pass(double learningRate, INetwork network, List <DataSequence> sequences,
                           bool applyTraining, ILoss lossTraining)
        {
            double numerLoss = 0;
            double denomLoss = 0;



            foreach (DataSequence seq in sequences)
            {
                GraphCPU g = new GraphCPU(applyTraining);

                network.ResetState();
                foreach (DataStep step in seq.Steps)
                {
                    NNValue output = network.Activate(step.Input, g);
                    if (step.TargetOutput != null)
                    {
                        double loss = lossTraining.Measure(output, step.TargetOutput);
                        if (Double.IsNaN(loss) || Double.IsInfinity(loss))
                        {
                            return(loss);
                        }
                        numerLoss += loss;
                        denomLoss++;
                        if (applyTraining)
                        {
                            lossTraining.Backward(output, step.TargetOutput);
                        }
                    }
                }

                if (applyTraining)
                {
                    g.Backward();                             //backprop dw values
                    UpdateModelParams(network, learningRate); //update params
                }
            }



            return(numerLoss / denomLoss);
        }