Exemplo n.º 1
0
        //Task execution
        public override void Execute()
        {
            if (ReconstructionSource == ReconstructionSource.INPUT)
            {
                InputLayer.RBMInputForward();

                for (int i = 0; i < CD_k; i++)
                {
                    RBMForwardPass(CurrentLayerIndex, i >= (CD_k - 1));

                    RBMBackwardPass(CurrentLayerIndex, i >= (CD_k - 1));
                }
            }
            else
            {
                MyRBMLayer layer = ((MyRBMLayer)layers[CurrentLayerIndex]);
                if (layer.Target != null && layer.Target.Count > 0)
                {
                    layer.Output.CopyFromMemoryBlock(layer.Target, 0, 0, layer.Neurons);
                }

                RBMBackwardPass(CurrentLayerIndex, false);

                for (int i = 0; i < CD_k; i++)
                {
                    RBMForwardPass(CurrentLayerIndex, i >= (CD_k - 1));
                    RBMBackwardPass(CurrentLayerIndex, i >= (CD_k - 1));
                }
            }

            MyLog.DEBUG.WriteLine("RBM reconstruction between layers [" + CurrentLayerIndex + ";" + (CurrentLayerIndex + 1) + "], step " + step + ".");
            ++step;
        }
Exemplo n.º 2
0
        //Task execution
        public override void Execute()
        {
            if (layers.Count <= CurrentLayerIndex)
            {
                MyLog.ERROR.WriteLine("Invalid CurrentLayerIndex " + CurrentLayerIndex +
                                      ". Must be smaller than number of layers which is " + layers.Count);
                return;
            }
            if (ReconstructionSource == ReconstructionSource.INPUT)
            {
                InputLayer.RBMInputForward();

                for (int i = 0; i < CD_k; i++)
                {
                    RBMForwardPass(CurrentLayerIndex, i >= (CD_k - 1));

                    RBMBackwardPass(CurrentLayerIndex, i >= (CD_k - 1));
                }
            }
            else
            {
                // we want to reconstruct from hidden, set layer index to first hidden if it was set to visible
                if (CurrentLayerIndex == 0)
                {
                    CurrentLayerIndex = 1;
                }

                MyRBMLayer layer = ((MyRBMLayer)layers[CurrentLayerIndex]);
                if (layer.Target != null && layer.Target.Count > 0)
                {
                    layer.Output.CopyFromMemoryBlock(layer.Target, 0, 0, layer.Neurons);
                }

                RBMBackwardPass(CurrentLayerIndex, false);

                for (int i = 0; i < CD_k; i++)
                {
                    RBMForwardPass(CurrentLayerIndex, i >= (CD_k - 1));
                    RBMBackwardPass(CurrentLayerIndex, i >= (CD_k - 1));
                }
            }

            MyLog.DEBUG.WriteLine("RBM reconstruction between layers [" + CurrentLayerIndex + ";" + (CurrentLayerIndex + 1) + "], step " + step + ".");
            ++step;
        }
Exemplo n.º 3
0
        //Task execution
        public override void Execute()
        {
            // sampling between input and hidden layer
            if (CurrentLayerIndex <= 0)
            {
                InputLayer.RBMInputForwardAndStore();

                MyRBMInputLayer visible = InputLayer;
                hidden = (MyRBMLayer)layers[CurrentLayerIndex + 1];

                if (hidden.Dropout > 0)
                    hidden.CreateDropoutMask();

                // forward to visible and hidden layers and store for sampling biases
                hidden.RBMForwardAndStore(SigmoidSteepness);

                // sample positive weight data
                hidden.RBMSamplePositive();

                if (RandomHidden)
                    hidden.RBMRandomActivation();

                // do k-step Contrastive Divergence (go back and forth between visible and hidden)
                for (int i = 0; i < CD_k - 1; i++)
                {
                    // back
                    hidden.RBMBackward(visible.Bias, SigmoidSteepness);

                    if (RandomVisible)
                        visible.RBMRandomActivation();

                    // and forth
                    hidden.RBMForward(SigmoidSteepness, true);

                    // randomly activate the just updated hidden neurons if needed
                    if (RandomHidden)
                        hidden.RBMRandomActivation();
                }

                // in the last (= k-th) step of CD-k, we use probabilistic modeling -> we sample both with probability, not random activation
                hidden.RBMBackward(visible.Bias, SigmoidSteepness);
                hidden.RBMForward(SigmoidSteepness, true);

                if (hidden.StoreEnergy)
                    hidden.Energy.Fill(0);

                // update biases of both layers based on the sampled data stored by RBMForwardAndStore()
                visible.RBMUpdateBiases(LearningRate, Momentum, WeightDecay);
                hidden.RBMUpdateBiases(LearningRate, Momentum, WeightDecay);

                // sample negative weight data AND adapt weights at the same time
                hidden.RBMUpdateWeights(LearningRate, Momentum, WeightDecay);

            }

            // sampling between hidden layers

                // spatny indexovani!
            else if (layers.Count > 2)//if ( pocet hidden vrstev vetsi nez 1)
            {
                InputLayer.RBMInputForward();

                // hidden layers, up to the one that will be visible (excluding)
                for (int i = 1; i < CurrentLayerIndex; i++)
                {
                    ((MyRBMLayer)layers[i]).RBMForward(SigmoidSteepness, false);
                    if (RandomPrevious)
                        ((MyRBMLayer)layers[i]).RBMRandomActivation();
                }

                MyRBMLayer visible = (MyRBMLayer)layers[CurrentLayerIndex];
                hidden = (MyRBMLayer)layers[CurrentLayerIndex + 1];

                if (hidden.Dropout > 0)
                    hidden.CreateDropoutMask();

                // forward to visible and hidden layers and store for sampling biases
                visible.RBMForwardAndStore(SigmoidSteepness);
                hidden.RBMForwardAndStore(SigmoidSteepness);

                // sample positive weight data
                hidden.RBMSamplePositive();

                if (RandomHidden)
                    hidden.RBMRandomActivation();

                // do k-step Contrastive Divergence (go back and forth between visible and hidden)
                for (int i = 0; i < CD_k - 1; i++)
                {
                    // back
                    hidden.RBMBackward(visible.Bias, SigmoidSteepness);

                    if (RandomVisible)
                        visible.RBMRandomActivation();

                    // and forth
                    hidden.RBMForward(SigmoidSteepness, true);

                    // randomly activate the just updated hidden neurons if needed
                    if (RandomHidden)
                        hidden.RBMRandomActivation();
                }

                // in last (= k-th) step of CD-k, we use probabilistic modeling -> we sample both with probability, not random activation
                hidden.RBMBackward(visible.Bias, SigmoidSteepness);
                hidden.RBMForward(SigmoidSteepness, true);

                if (hidden.StoreEnergy)
                    hidden.Energy.Fill(0);

                // update biases of both layers based on the sampled data stored by RBMForwardAndStore()
                visible.RBMUpdateBiases(LearningRate, Momentum, WeightDecay);
                hidden.RBMUpdateBiases(LearningRate, Momentum, WeightDecay);

                // sample negative weight data AND adapt weights at the same time
                hidden.RBMUpdateWeights(LearningRate, Momentum, WeightDecay);

            }
            else
            {
                MyLog.ERROR.WriteLine("Wrong index parameter. There are " + layers.Count + " total layers, can't sample from " + CurrentLayerIndex);
            }

            MyLog.DEBUG.WriteLine("RBM initialization between layers [" + CurrentLayerIndex + ";" + (CurrentLayerIndex + 1) + "], step " + step + ".");
            ++step;
        }
Exemplo n.º 4
0
        //Task execution
        public override void Execute()
        {
            // sampling between input and hidden layer
            if (CurrentLayerIndex <= 0)
            {
                InputLayer.RBMInputForwardAndStore();

                MyRBMInputLayer visible = InputLayer;
                hidden = (MyRBMLayer)layers[CurrentLayerIndex + 1];

                if (hidden.Dropout > 0)
                {
                    hidden.CreateDropoutMask();
                }

                // forward to visible and hidden layers and store for sampling biases
                hidden.RBMForwardAndStore(SigmoidSteepness);

                // sample positive weight data
                hidden.RBMSamplePositive();

                if (RandomHidden)
                {
                    hidden.RBMRandomActivation();
                }

                // do k-step Contrastive Divergence (go back and forth between visible and hidden)
                for (int i = 0; i < CD_k - 1; i++)
                {
                    // back
                    hidden.RBMBackward(visible.Bias, SigmoidSteepness);


                    if (RandomVisible)
                    {
                        visible.RBMRandomActivation();
                    }

                    // and forth
                    hidden.RBMForward(SigmoidSteepness, true);

                    // randomly activate the just updated hidden neurons if needed
                    if (RandomHidden)
                    {
                        hidden.RBMRandomActivation();
                    }
                }

                // in the last (= k-th) step of CD-k, we use probabilistic modeling -> we sample both with probability, not random activation
                hidden.RBMBackward(visible.Bias, SigmoidSteepness);
                hidden.RBMForward(SigmoidSteepness, true);

                if (hidden.StoreEnergy)
                {
                    hidden.Energy.Fill(0);
                }

                // update biases of both layers based on the sampled data stored by RBMForwardAndStore()
                visible.RBMUpdateBiases(LearningRate, Momentum, WeightDecay);
                hidden.RBMUpdateBiases(LearningRate, Momentum, WeightDecay);

                // sample negative weight data AND adapt weights at the same time
                hidden.RBMUpdateWeights(LearningRate, Momentum, WeightDecay);
            }

            // sampling between hidden layers

            else if (layers.Count > 2) // if number of  hidden layers is greater than 1)
            {
                InputLayer.RBMInputForward();

                // hidden layers, up to the one that will be visible (excluding)
                for (int i = 1; i < CurrentLayerIndex; i++)
                {
                    ((MyRBMLayer)layers[i]).RBMForward(SigmoidSteepness, false);
                    if (RandomPrevious)
                    {
                        ((MyRBMLayer)layers[i]).RBMRandomActivation();
                    }
                }

                MyRBMLayer visible = (MyRBMLayer)layers[CurrentLayerIndex];
                hidden = (MyRBMLayer)layers[CurrentLayerIndex + 1];

                if (hidden.Dropout > 0)
                {
                    hidden.CreateDropoutMask();
                }

                // forward to visible and hidden layers and store for sampling biases
                visible.RBMForwardAndStore(SigmoidSteepness);
                hidden.RBMForwardAndStore(SigmoidSteepness);

                // sample positive weight data
                hidden.RBMSamplePositive();

                if (RandomHidden)
                {
                    hidden.RBMRandomActivation();
                }

                // do k-step Contrastive Divergence (go back and forth between visible and hidden)
                for (int i = 0; i < CD_k - 1; i++)
                {
                    // back
                    hidden.RBMBackward(visible.Bias, SigmoidSteepness);


                    if (RandomVisible)
                    {
                        visible.RBMRandomActivation();
                    }

                    // and forth
                    hidden.RBMForward(SigmoidSteepness, true);

                    // randomly activate the just updated hidden neurons if needed
                    if (RandomHidden)
                    {
                        hidden.RBMRandomActivation();
                    }
                }

                // in last (= k-th) step of CD-k, we use probabilistic modeling -> we sample both with probability, not random activation
                hidden.RBMBackward(visible.Bias, SigmoidSteepness);
                hidden.RBMForward(SigmoidSteepness, true);

                if (hidden.StoreEnergy)
                {
                    hidden.Energy.Fill(0);
                }

                // update biases of both layers based on the sampled data stored by RBMForwardAndStore()
                visible.RBMUpdateBiases(LearningRate, Momentum, WeightDecay);
                hidden.RBMUpdateBiases(LearningRate, Momentum, WeightDecay);

                // sample negative weight data AND adapt weights at the same time
                hidden.RBMUpdateWeights(LearningRate, Momentum, WeightDecay);
            }
            else
            {
                MyLog.ERROR.WriteLine("Wrong CurrentLayerIndex parameter. There are " + layers.Count + " total layers, can't sample from " + CurrentLayerIndex);
            }


            MyLog.DEBUG.WriteLine("RBM initialization between layers [" + CurrentLayerIndex + ";" + (CurrentLayerIndex + 1) + "], step " + step + ".");
            ++step;
        }