Example #1
0
 public NNFeedForwardNetwork(int[] unitsPerLayer)
 {
     layers               = new NNLayer[unitsPerLayer.Length];
     weightsPerLayer      = new NNMatrix[getLayerCount()];
     weightedinputOfLayer = new double[getLayerCount()][];
     outputOfLayer        = new double[getLayerCount()][];
     for (int layer = 0; layer < getLayerCount(); layer++)
     {
         layers[layer] = new NNLayer(unitsPerLayer[layer], ActivationFunctions.sigmoid);
         weightedinputOfLayer[layer] = new double[layers[layer].getUnitCount()];
         outputOfLayer[layer]        = new double[layers[layer].getUnitCount()];
         if (layer == 0)
         {
             weightsPerLayer[layer] = new NNMatrix(getLayer(layer).getUnitCount(), getLayer(layer).getUnitCount());
             for (int unit = 0; unit < getLayer(layer).getUnitCount(); unit++)
             {
                 weightsPerLayer[layer][unit, unit] = 1.0;
             }
         }
         else
         {
             weightsPerLayer[layer] = new NNMatrix(getLayer(layer).getUnitCount(), getLayer(layer - 1).getUnitCount());
         }
     }
     System.Threading.Thread.Sleep(1);
     rnd = new Random(System.DateTime.Now.Millisecond);
     callUpdateCallbacks();
 }
 public NNRestrictedBoltzmannMachine(int visibleUnitCnt, int hiddenUnitCnt)
 {
     rnd         = new Random(System.DateTime.Now.Millisecond);
     visible     = new NNLayer(visibleUnitCnt, ActivationFunctions.sigmoid);
     hidden      = new NNLayer(hiddenUnitCnt, ActivationFunctions.sigmoid);
     biasVisible = new double[visibleUnitCnt];
     biasHidden  = new double[hiddenUnitCnt];
     weights     = new NNMatrix(hiddenUnitCnt, visibleUnitCnt);
 }
        public void train(double[][] trainingset, int epochs = 1, double learningRate = 1.0)
        {
            double[] visibleSample = new double[visible.getUnitCount()];
            double[] hiddenSample  = new double[hidden.getUnitCount()];
            double[] hiddenSample2 = new double[hidden.getUnitCount()];
            NNMatrix posGrad       = new NNMatrix(hidden.getUnitCount(), visible.getUnitCount());
            NNMatrix negGrad       = new NNMatrix(hidden.getUnitCount(), visible.getUnitCount());

            int e, t, b;

            for (e = 0; e < epochs; e++)
            {
                for (t = 0; t < trainingset.Length; t++)
                {
                    sample(propagateVisibleToHidden(trainingset[t], hiddenSample), hiddenSample);
                    NNMatrix.outerProduct(trainingset[t], hiddenSample, posGrad);
                    sample(propagateHiddenToVisible(hiddenSample, visibleSample), visibleSample);
                    sample(propagateVisibleToHidden(visibleSample, hiddenSample2), hiddenSample2);
                    NNMatrix.outerProduct(visibleSample, hiddenSample2, negGrad);

                    // weights += (posGrad - negGrad) * learningRate
                    weights.applyOperatorToThis(posGrad, negGrad, (weight, posG, negG) => (weight + (posG - negG) * learningRate));

                    // biasVisible = new NNMatrix(biasVisible) + (new NNMatrix(trainingset[t]) - (new NNMatrix(visibleSample)) * learningRate);
                    for (b = 0; b < biasVisible.Length; b++)
                    {
                        biasVisible[b] += ((trainingset[t][b] - visibleSample[b]) * learningRate);
                    }
                    // biasHidden = new NNMatrix(biasHidden) + ((new NNMatrix(hiddenSample) - (new NNMatrix(hiddenSample2))) * learningRate);
                    for (b = 0; b < biasHidden.Length; b++)
                    {
                        biasHidden[b] += ((hiddenSample[b] - hiddenSample2[b]) * learningRate);
                    }
                }
            }
        }
Example #4
0
 public NNMatrix(NNMatrix other)
 {
     m = applyOp(other, other, (x, y) => x).m;
 }