Esempio n. 1
0
        public void CalculateDeltas(ref IFeedForwardNetLayerRepository currentLayer, ITrainingSetItemRepository trainingSetItem)
        {
            double outputDelta;

            for (int toNo = 0; toNo < currentLayer.GetToUnitCount(); toNo++)
            {
                double outputActivation = currentLayer.GetToUnitActivation(toNo);
                outputDelta = (trainingSetItem.GetOutputNodeValue(toNo) - outputActivation) * outputActivation * (1 - outputActivation);
                currentLayer.SetToUnitDelta(toNo, outputDelta);
            }
        }
Esempio n. 2
0
 public void CalculateDeltas(ref IFeedForwardNetLayerRepository currentLayer, IFeedForwardNetLayerRepository previousLayer)
 {
     double[] hiddenDSum = new double[currentLayer.GetFromUnitCount()];
     for (int fromNo = 0; fromNo < currentLayer.GetFromUnitCount(); fromNo++)
     {
         hiddenDSum[fromNo] = 0.0;
         for (int toNo = 0; toNo < previousLayer.GetToUnitCount(); toNo++)
         {
             hiddenDSum[fromNo] += previousLayer.GetToUnitDelta(toNo) * currentLayer.GetLayerWeight(fromNo, toNo);
         }
         double fromUnitActivation = previousLayer.GetFromUnitActivation(fromNo);
         double hiddenDelta        = fromUnitActivation * (1 - fromUnitActivation) * hiddenDSum[fromNo];
         currentLayer.SetToUnitDelta(fromNo, hiddenDelta);
     }
 }
Esempio n. 3
0
 public void UpdateWeights(ref IFeedForwardNetLayerRepository currentLayer)
 {
     for (int toNo = 0; toNo < currentLayer.GetToUnitCount(); toNo++)
     {
         for (int fromNo = 0; fromNo < currentLayer.GetFromUnitCount(); fromNo++)
         {
             double weightChange = backPropagationConstants.LearningRate * currentLayer.GetFromUnitActivation(fromNo) * currentLayer.GetToUnitDelta(toNo) +
                                   currentLayer.GetLayerWeightChange(fromNo, toNo) * backPropagationConstants.Momentum;
             currentLayer.SetLayerWeightChange(fromNo, toNo, weightChange);
             currentLayer.AddLayerWeight(fromNo, toNo, weightChange);
         }
         double biasChange = backPropagationConstants.LearningRate * currentLayer.GetToUnitDelta(toNo) + currentLayer.GetLayerBiasChange(toNo) * backPropagationConstants.Momentum;
         currentLayer.SetLayerBiasChange(toNo, biasChange);
         currentLayer.AddLayerBias(toNo, biasChange);
     }
 }
Esempio n. 4
0
        public void UpdateWeights(ITrainingSetItemRepository trainingSetItem)
        {
            IFeedForwardNetLayerRepository previousLayer = null;

            for (int layerNo = feedForwardNet.LayerCount() - 1; layerNo >= 0; layerNo--)
            {
                IFeedForwardNetLayerRepository currentLayer = feedForwardNet.GetLayer(layerNo);
                if (feedForwardNet.IsLastLayer(layerNo))
                {
                    outputUnitWeightStrategy.UpdateWeights(ref currentLayer);
                }
                else
                {
                    hiddenUnitWeightStrategy.UpdateWeights(ref currentLayer);
                }
                previousLayer = currentLayer;
            }
        }
Esempio n. 5
0
        // IBackPropagationConstants backPropagationConstants;

        public void UpdateWeightsDeltas(ref IFeedForwardNetLayerRepository currentLayer, ITrainingSetItemRepository trainingSetItem)
        {
            CalculateDeltas(ref currentLayer, trainingSetItem);
            UpdateWeights(ref currentLayer);
        }
Esempio n. 6
0
 public void UpdateWeightsDeltas(ref IFeedForwardNetLayerRepository currentLayer, IFeedForwardNetLayerRepository previousLayer)
 {
     CalculateDeltas(ref currentLayer, previousLayer);
     UpdateWeights(ref currentLayer);
 }