Example #1
0
        private void Train(ITrainingExample example)
        {
            var exampleOutputView = example.Output.ViewAs <float>();

            evaluator.ComputeOutputs(example.Input);

            var layers         = net.Layers;
            var layersCount    = layers.Length;
            var outputLayerIdx = layersCount - 1;

            for (int ilayer = outputLayerIdx; ilayer >= 0; ilayer--)
            {
                var layer       = layers[ilayer];
                var output      = layer.Outputs;
                var lamda       = layer.Lamdas;
                var weightsView = layer.Weights.ViewAs <float>();
                var biasesView  = layer.Biases.ViewAs <float>();

                var lamdaView  = lamda.ViewAs <float>();
                var outputView = output.ViewAs <float>();

                var activation = evaluator.GetActivation(ilayer);
                activation.EvalDerivate(output, lamda, ilayer, layer.Neurons, net);

                if (ilayer == outputLayerIdx)
                {
                    //compute error term Lamda(j)= derivate( Err, net(j) ) for output units
                    Parallel.For(0, layer.Neurons, neuron =>
                    {
                        lamdaView[neuron] *= exampleOutputView[neuron] - outputView[neuron];
                    });
                }
                else
                {
                    //compute error term for hidden units
                    var nextLayer = layers[ilayer + 1];
                    DotTranspose(nextLayer.Weights, nextLayer.Lamdas, lamda, nextLayer.Neurons, nextLayer.WeightsPerNeurons);
                }

                var input     = ilayer == 0 ? example.Input : layers[ilayer - 1].Outputs;
                var inputView = input.ViewAs <float>();

                Parallel.For(0, layer.Neurons, n =>
                {
                    for (int j = 0; j < layer.WeightsPerNeurons; j++)
                    {
                        weightsView[n * layer.WeightsPerNeurons + j] += step * lamdaView[n] * inputView[j];
                    }

                    biasesView[n] += step * lamdaView[n];
                });
            }
        }
Example #2
0
        private float ComputeExampleError(ITrainingExample example)
        {
            evaluator.ComputeOutputs(example.Input);

            return(0.5f * DistanceSquare(example.Output, net.Output));
        }