Exemple #1
0
 /// <summary>
 /// Constructor for creating new dense layer
 /// </summary>
 /// <param name="activationFunction">type of activation function</param>
 /// <param name="numNeurons">number of neurons which will be used</param>
 public LinearLayer(int numNeurons, AbstractBiasInitializer biasInitializer = null,
                    AbstractWeightInitializer weightInitializer             = null, AbstractRegularizer regularizer = null)
 {
     this.numNeurons  = numNeurons;
     this.regularizer = regularizer;
     layerName        = "Linear";
 }
        public override AbstractLayer Compile(AbstractLayer previousLayer)
        {
            if (previousLayer == null && inputDimension == null)
            {
                string msg = "Dimension of first layer of network not set!";
                Utils.ThrowException(msg);
            }
            else if (previousLayer != null)
            {
                inputDimension = previousLayer.outputDimension;
                index          = previousLayer.Index + 1;
            }

            TestHyperParametersAndSetOutputSize(this.inputDimension.width,
                                                this.inputDimension.height, this.inputDimension.depth);

            outputDimension = new Dimension(this.inputDimension.imageCount, filterCount,
                                            1, outputSize);

            #region Init initializers
            if (biasInitializer == null)
            {
                biasInitializer = new ConstantBiasInitializer(0d);
            }
            biases = biasInitializer.Initialize(filterCount);

            // https://stats.stackexchange.com/questions/198840/cnn-xavier-weight-initialization
            // https://github.com/keras-team/keras/blob/998efc04eefa0c14057c1fa87cab71df5b24bf7e/keras/initializations.py
            int fanIn  = filterSize * filterSize * inputDimension.depth;
            int fanOut = filterCount;
            if (weightInitializer == null)
            {
                weightInitializer = new XavierWeightInitializer(fanIn, fanOut);
            }
            #endregion Init initializers

            filters = weightInitializer.Initialize(new Dimension(filterCount,
                                                                 inputDimension.depth, inputDimension.width, filterSize));

            // activations will be init
            activations = Utils.Init4dArr(inputDimension.imageCount,
                                          filterCount, inputDimension.width, outputSize);

            compiled = true;

            return(this);
        }
        /// <summary>
        /// Constructor for initialization of convolution layer
        /// </summary>
        /// <param name="inputDimension">dimension of input data</param>
        /// <param name="filterSize">size of filters which will be used</param>
        /// <param name="filterCount">count of filters which will be used</param>
        /// <param name="stride">stride of filters during convolution</param>
        /// <param name="zeroPadding">if is true zero padding will be used</param>
        public Convolution1DLayer(Dimension inputDimension = null, int filterSize             = DEFAULT_FILTER_SIZE,
                                  int filterCount          = DEFAULT_FILTER_COUNT, int stride = DEFAULT_STRIDE,
                                  bool zeroPadding         = DEFAULT_ZERO_PADDING, AbstractBiasInitializer biasInitializer = null,
                                  AbstractWeightInitializer weightInitializer = null, AbstractRegularizer regularizer      = null)
        {
            // init vaiiables
            this.filterSize     = filterSize;
            this.filterCount    = filterCount;
            this.stride         = stride;
            this.zeroPadding    = zeroPadding;
            this.inputDimension = inputDimension;
            this.regularizer    = regularizer;
            layerName           = CONV_LAYER_NAME;
            outputSize          = 0;

            this.biasInitializer   = biasInitializer;
            this.weightInitializer = weightInitializer;
        }
Exemple #4
0
        public override AbstractLayer Compile(AbstractLayer previousLayer)
        {
            if (previousLayer == null)
            {
                string msg = "Dimension of first layer of network not set!";
                Utils.ThrowException(msg);
            }
            else if (previousLayer != null)
            {
                inputDimension = previousLayer.outputDimension;
                index          = previousLayer.Index + 1;
            }

            inputDimension = previousLayer.outputDimension;

            outputDimension = new Dimension(inputDimension.imageCount, 1, 1, numNeurons);

            activations = Utils.Init4dArr(inputDimension.imageCount, 1, 1, numNeurons);

            #region Init initializers
            if (biasInitializer == null)
            {
                biasInitializer = new ConstantBiasInitializer(0d);
            }
            biases = biasInitializer.Initialize(numNeurons);


            int fanIn  = inputDimension.height;
            int fanOut = numNeurons;
            if (weightInitializer == null)
            {
                weightInitializer = new XavierWeightInitializer(fanIn, fanOut);
            }
            weights = weightInitializer.Initialize(new Dimension(inputDimension.height, 1, 1, numNeurons));
            #endregion Init initializers

            compiled = true;

            return(this);
        }