Пример #1
0
        public override void Dimension(MyAbstractFLayer previousLayer)
        {
            base.Dimension(previousLayer);

            if (previousLayer is MyAbstractFBLayer)
                m_previousBackwardLayer = (previousLayer as MyAbstractFBLayer);
        }
Пример #2
0
        public override void Dimension(MyAbstractFLayer previousLayer)
        {
            base.Dimension(previousLayer);

            m_output.Nb = PreviousLayer.Output.Nb;
            m_output.Width = PreviousLayer.Output.Width;
            m_output.Height = PreviousLayer.Output.Height;
        }
Пример #3
0
        public override void Dimension(MyAbstractFLayer previousLayer)
        {
            base.Dimension(previousLayer);

            // Just a clone from the previous layer
            m_output = m_previousBackwardLayer.Output;

            // Set the weights (1 weight + 1 bias per output)
            m_weight = m_output;
            m_bias = m_output;
        }
Пример #4
0
        public override void Dimension(MyAbstractFLayer previousLayer)
        {
            base.Dimension(previousLayer);

            if (PreviousLayer.Output.Count != m_originalLayer.Output.Count)
                throw new MyFeedForwardLayerException("MirrorPoolLayer: Input (" + PreviousLayer.Output.Size + "x" + PreviousLayer.Output.Nb + ") doesn't fit the output dimension of the referenced MyPoolLayer (" + m_originalLayer.Output.Size + "x" + m_originalLayer.Output.Nb + ")");

            // Set the output
            m_output.Nb = m_originalLayer.PreviousLayer.Output.Nb;
            m_output.Height = m_originalLayer.PreviousLayer.Output.Height;
            m_output.Width = m_originalLayer.PreviousLayer.Output.Width;

            if (Output.Size == 0)
                throw new MyFeedForwardLayerException("MirrorPoolLayer: Output size cannot be 0");
        }
Пример #5
0
        public override void Dimension(MyAbstractFLayer previousLayer)
        {
            base.Dimension(previousLayer);

            if (PreviousLayer.Output.Count != m_originalLayer.Output.Count)
                throw new MyFeedForwardLayerException("MirrorNeuronLayer: input (" + PreviousLayer.Output.Size + "x" + PreviousLayer.Output.Nb + ") doesn't fit the output dimension of the referenced MyNeuronLayer (" + m_originalLayer.Output.Size + "x" + m_originalLayer.Output.Nb + ")");

            // Set the output
            m_output.Nb = m_originalLayer.PreviousLayer.Output.Nb;
            m_output.Height = m_originalLayer.PreviousLayer.Output.Height;
            m_output.Width = m_originalLayer.PreviousLayer.Output.Width;

            // There are only biases since the synaptic weights are shared with the original layer
            m_bias.Width = Output.Width;
            m_bias.Height = Output.Height;
            m_bias.Nb = Output.Nb;
        }
Пример #6
0
 public virtual void RBMUpdate(MyAbstractFLayer previousLayer, float LearningRate, float LearningMomentum, float WeightDecay)
 {
     throw new NotImplementedException();
 }
Пример #7
0
        public override void Dimension(MyAbstractFLayer previousLayer)
        {
            base.Dimension(previousLayer);

            DimensionRoutageTable(previousLayer);

            uint Kernel2dCount = 0;
            for (uint featureMapId = 0; featureMapId < m_output.Nb; featureMapId++)
                Kernel2dCount += (uint)FeatureInputs[featureMapId].Length;
            m_weight.Nb += Kernel2dCount;
            m_weight.Depth = 1;

            m_bias.Width = 1;
            m_bias.Height = 1;
            m_bias.Nb = m_output.Nb;

            if (PreviousLayer.Output.Width < m_weight.Width)
                throw new MyFeedForwardLayerException("ConvolutionLayer: Input width is smaller than kernel width");
            if (PreviousLayer.Output.Height < m_weight.Height)
                throw new MyFeedForwardLayerException("ConvolutionLayer: Input height is smaller than kernel height");

            m_output.Width = (PreviousLayer.Output.Width - m_weight.Width) / XStride + 1;
            m_output.Height = (PreviousLayer.Output.Height - m_weight.Height) / YStride + 1;
        }
Пример #8
0
 public virtual void RBMForwardAndStore(MyAbstractFLayer previousLayer)
 {
     throw new NotImplementedException();
 }
Пример #9
0
 public virtual void RBMSamplePositive(MyAbstractFLayer previousLayer)
 {
     throw new NotImplementedException();
 }
Пример #10
0
        public virtual void Dimension(MyAbstractFLayer previousLayer)
        {
            PreviousLayer = previousLayer;

            if (PreviousLayer != null)
            {
                if (PreviousLayer.Output.Size == 0)
                    throw new MyFeedForwardLayerException("AbstractFLayer: Input size is 0");

                if (PreviousLayer.Output.Width == 0)
                    throw new MyFeedForwardLayerException("AbstractFLayer: Input width is 0");

                if (PreviousLayer.Output.Width * PreviousLayer.Output.Height != PreviousLayer.Output.Size)
                    throw new MyFeedForwardLayerException("AbstractFLayer: Requires a rectangular input");
            }
        }
Пример #11
0
 public virtual void RBMBackward(MyAbstractFLayer nextLayer)
 {
     throw new NotImplementedException();
 }
Пример #12
0
        public override void Dimension(MyAbstractFLayer previousLayer)
        {
            base.Dimension(previousLayer);

            m_output.Width = PreviousLayer.Output.Width / Stride;
            m_output.Height = PreviousLayer.Output.Height / Stride;
            m_output.Nb = PreviousLayer.Output.Nb;

            if (Output.Size == 0)
                throw new MyFeedForwardLayerException("PoolLayer: Output size cannot be 0");
        }
Пример #13
0
        public override void Dimension(MyAbstractFLayer previousLayer)
        {
            base.Dimension(previousLayer);

            // Just a clone from the previous layer (inplace computation)
            m_output = m_previousBackwardLayer.Output;
        }
Пример #14
0
        public override void Dimension(MyAbstractFLayer previousLayer)
        {
            base.Dimension(previousLayer);

            // Set the weights
            m_weight.Nb = m_neuronsCount;
            m_weight.Width = PreviousLayer.Output.Width;
            m_weight.Height = PreviousLayer.Output.Height;
            m_weight.Depth = PreviousLayer.Output.Nb;

            m_bias.Nb = m_neuronsCount;
            m_bias.Width = 1;
            m_bias.Height = 1;
        }
Пример #15
0
        protected void DimensionRoutageTable(MyAbstractFLayer previousLayer)
        {
            // Create or validate the map routage
            if (FeatureInputs == null) // The input is fully connected and must be autogenerated
            {
                FeatureInputs = new uint[m_output.Nb][];
                uint inputNb = PreviousLayer.Output.Nb;
                for (uint featureMapId = 0; featureMapId < m_output.Nb; featureMapId++)
                {
                    FeatureInputs[featureMapId] = new uint[inputNb];
                    for (uint inputId = 0; inputId < inputNb; inputId++)
                    {
                        FeatureInputs[featureMapId][inputId] = inputId;
                    }
                }
            }
            else // If the array is provided by the user, check the index validity
            {
                uint nbInput = PreviousLayer.Output.Nb;
                for (uint featureMapId = 0; featureMapId < m_output.Nb; featureMapId++)
                {
                    for (uint i = 0; i < FeatureInputs[featureMapId].Length; i++)
                        if (FeatureInputs[featureMapId][i] >= nbInput)
                            throw new MyFeedForwardLayerException("ConvolutionLayer: Input index " + FeatureInputs[featureMapId][i] + " out of range [0.." + (nbInput - 1) + "]");
                }
            }

            // Example of diposition
            //
            //  ||    NbSources    ||      Offset     ||              SourceId             ||
            //  |-------------------------------------------------------------------------|
            //  ||  1  |  3  |  2  ||  0  |  1  |  4  ||  0  |  1  |  2  |  4  |  0  |  3  ||
            //  ------------------------------------------------------------------------------
            //      |     |    |       |     |     |      ^     ^                 ^
            //      |     |    |       ------|-----|------      |                 |
            //      |     |    |             ------|-------------                 |
            //      |     |    |                   --------------------------------
            //      |     |    |
            //      |     |    |                        _____
            //       -----|----|---------------------------  _____ _____ _____
            //             ----|---------------------------------------       _____ _____
            //                  -----------------------------------------------------
            //

            // Dimension the routage info vector
            uint totalSize = (uint)FeatureInputs.Length /* Sizes */ + (uint)FeatureInputs.Length /* Offset */;
            for (uint featureMapId = 0; featureMapId < FeatureInputs.Length; featureMapId++)
                totalSize += (uint)FeatureInputs[featureMapId].Length;
            m_extraSize = totalSize;
        }
Пример #16
0
        public override void RBMBackward(MyAbstractFLayer nextLayer)
        {
            GenerateRBMRandom();

            m_RBMBackwardKernel.SetupExecution(Output.Count);
            m_RBMBackwardKernel.Run(nextLayer.OutputDataPtr, OutputDataPtr, nextLayer.WeightDataPtr, BiasDataPtr, 1, m_network.RBMRandom.GetDevicePtr(m_network));
        }
Пример #17
0
        public override void RBMForwardAndStore(MyAbstractFLayer previousLayer)
        {
            GenerateRBMRandom();

            m_RBMForwardAndStoreKernel.SetupExecution(Output.Count);
            m_RBMForwardAndStoreKernel.Run(previousLayer.OutputDataPtr, OutputDataPtr, WeightDataPtr, BiasDataPtr, StoredOutputDataPtr, m_network.RBMRandom.GetDevicePtr(m_network));
        }
Пример #18
0
 public void UpdateObserver(MyAbstractFLayer previousLayer)
 {
     m_RBMObserverKernel.SetupExecution(previousLayer.Output.Count);
     m_RBMObserverKernel.Run(previousLayer.OutputDataPtr, m_network.RBMObserver.GetDevicePtr(m_network));
 }
Пример #19
0
        public override void RBMUpdate(MyAbstractFLayer previousLayer, float LearningRate, float Momentum, float WeightDecay)
        {
            m_RBMUpdateWeightKernel.SetupExecution(Weight.Count);
            m_RBMUpdateWeightKernel.Run(m_network.RBMPositiveMemoryBlock.GetDevicePtr(m_network), previousLayer.OutputDataPtr, OutputDataPtr, WeightDataPtr, m_network.RBMWeightMomentum.GetDevicePtr(m_network), LearningRate, Momentum, WeightDecay, m_network.Energy.GetDevicePtr(m_network), 1);

            // update biases of this (= hidden) layer
            m_RBMUpdateBiasKernel.SetupExecution(Output.Count);
            m_RBMUpdateBiasKernel.Run(StoredOutputDataPtr, OutputDataPtr, BiasDataPtr, m_network.RBMBiasMomentum2.GetDevicePtr(m_network), m_network.Energy.GetDevicePtr(m_network), LearningRate, Momentum, WeightDecay, 0);

            // update biases of previous (= visible) layer
            if (!(previousLayer is MyInputLayer))
            {
                m_RBMUpdateBiasKernel.SetupExecution(previousLayer.Output.Count);
                m_RBMUpdateBiasKernel.Run(previousLayer.StoredOutputDataPtr, previousLayer.OutputDataPtr, previousLayer.BiasDataPtr, m_network.RBMBiasMomentum1.GetDevicePtr(m_network), m_network.Energy.GetDevicePtr(m_network), LearningRate, Momentum, WeightDecay, 0);
            }
        }
Пример #20
0
 public override void RBMSamplePositive(MyAbstractFLayer previousLayer)
 {
     m_RBMSampleKernel.SetupExecution(Weight.Count);
     //m_RBMSampleKernel.Run(previousLayer.OutputDataPtr, OutputDataPtr, RBMPositiveDataPtr, Weight.Count);
     m_RBMSampleKernel.Run(previousLayer.OutputDataPtr, OutputDataPtr, m_network.RBMPositiveMemoryBlock.GetDevicePtr(m_network), Weight.Count);
 }