private void UpdateLayer(int PWhichLayer, bool PForward, AutoencoderWeights PWeights)
        {
            int beginlayer = PWhichLayer - 1;

            if (PForward)
            {
                Utility.WithinBounds("Cannot update this layer!!!", PWhichLayer, 1, numlayers);
            }
            else
            {
                Utility.WithinBounds("Cannot update this layer!!!", PWhichLayer, 0, numlayers - 1);
                beginlayer = PWhichLayer + 1;
            }
            RBMLayer thislayer     = layers[PWhichLayer];
            RBMLayer previouslayer = layers[beginlayer];
            double   input         = 0;

            double[] states = previouslayer.GetStates();
            for (int i = 0; i < thislayer.Count; i++)
            {
                for (int j = 0; j < previouslayer.Count; j++)
                {
                    if (!PForward)
                    {
                        input += PWeights.GetWeightSet(beginlayer - 1).GetWeight(i, j) * states[j];
                    }
                    else
                    {
                        input += PWeights.GetWeightSet(beginlayer).GetWeight(j, i) * states[j];
                    }
                }
                thislayer.SetState(i, input);
                input = 0;
            }
        }
コード例 #2
0
        private static void TrainBias(RBMLayer PLayer, int PWhich, double PPosPhase, double PNegPhase)
        {
            double biaschange = (learnrate.mombiases * PLayer.GetBiasChange(PWhich)) + (learnrate.lrbiases * (PPosPhase - PNegPhase));

            PLayer.SetBiasChange(PWhich, biaschange);
            PLayer.SetBias(PWhich, PLayer.GetBias(PWhich) + biaschange);
        }
コード例 #3
0
 public RBM(RBMLayer PVisibles, RBMLayer PHiddens, RBMLearningRate PLearnRate, IWeightInitializer PWeightInit)
 {
     numvisibles = PVisibles.Count;
     numhiddens  = PHiddens.Count;
     InitLayers(PVisibles, PHiddens);
     InitWeights(PWeightInit);
     InitTrainingData();
     learnrate = PLearnRate;
 }
コード例 #4
0
 public RBM(RBM PA)
 {
     visibles     = (RBMLayer)PA.visibles.Clone();
     hiddens      = (RBMLayer)PA.hiddens.Clone();
     weights      = (RBMWeightSet)PA.weights.Clone();
     learnrate    = PA.learnrate;
     trainingdata = PA.trainingdata;
     numvisibles  = PA.numvisibles;
     numhiddens   = PA.numhiddens;
 }
コード例 #5
0
 private void AddLayer(RBMLayer PLayer)
 {
     learnrate.prelrbiases.Add(0.001);
     learnrate.premombiases.Add(0.5);
     learnrate.finelrbiases.Add(0.001);
     if (layers.Count >= 1)
     {
         learnrate.prelrweights.Add(0.001);
         learnrate.premomweights.Add(0.5);
         learnrate.finelrweights.Add(0.001);
     }
     layers.Add(PLayer);
 }
        private void UpdateLayerBackwardPreTrain(int PWhich)
        {
            Utility.WithinBounds("Cannot update this layer!!!", PWhich, numlayers - 1);
            RBMLayer thislayer = layers[PWhich];
            RBMLayer nextlayer = layers[PWhich + 1];
            double   input     = 0;

            double[] states = nextlayer.GetStates();
            for (int i = 0; i < thislayer.Count; i++)
            {
                for (int j = 0; j < nextlayer.Count; j++)
                {
                    input += recognitionweights.GetWeightSet(PWhich).GetWeight(i, j) * states[j];
                }
                thislayer.SetState(i, input);
                input = 0;
            }
        }
コード例 #7
0
 public static void Train(RBMLayer PLayerVis, RBMLayer PLayerHid, TrainingData PData
                          , RBMLearningRate PLearnRate, RBMWeightSet PWeightSet)
 {
     weightset = PWeightSet;
     learnrate = PLearnRate;
     for (int i = 0; i < PLayerVis.Count; i++)
     {
         for (int j = 0; j < PLayerHid.Count; j++)
         {
             TrainWeight(i, j, CalculateTrainAmount(PData.posvis[i], PData.poshid[j]
                                                    , PData.negvis[i], PData.neghid[j]));
         }
         TrainBias(PLayerVis, i, PData.posvis[i], PData.negvis[i]);
     }
     for (int j = 0; j < PLayerHid.Count; j++)
     {
         TrainBias(PLayerHid, j, PData.poshid[j], PData.neghid[j]);
     }
 }
コード例 #8
0
 private void InitLayers(RBMLayer PVisibles, RBMLayer PHiddens)
 {
     if (PVisibles == null)
     {
         throw new Exception("You need a visible layer...");
     }
     if (PHiddens == null)
     {
         throw new Exception("You need a hidden layer...");
     }
     if (PVisibles.Count <= 0)
     {
         throw new Exception("You need at least one visible neuron...");
     }
     if (PHiddens.Count <= 0)
     {
         throw new Exception("You need at least one hidden neuron...");
     }
     hiddens  = PHiddens;
     visibles = PVisibles;
 }