예제 #1
0
        public override AbstractLayer Compile(AbstractLayer previousLayer)
        {
            if (previousLayer == null && this.inputDimension == null)
            {
                string msg = "Dimension of first layer of network not set!";
                Utils.ThrowException(msg);
            }
            else if (inputDimension == null)
            {
                inputDimension = previousLayer.outputDimension;
                index          = 0;
            }
            else if (previousLayer != null)
            {
                inputDimension = previousLayer.outputDimension;
                index          = previousLayer.Index + 1;
            }

            TestHyperParametersAndSetOutputSize(inputDimension.width);

            outputDimension = new Dimension(inputDimension.imageCount, inputDimension.depth,
                                            outputSize, outputSize);

            compiled = true;

            return(this);
        }
예제 #2
0
        public override AbstractLayer Compile(AbstractLayer previousLayer)
        {
            if (previousLayer == null && inputDimension == null)
            {
                string msg = "Dimension of first layer of network not set!";
                Utils.ThrowException(msg);
            }
            else if (previousLayer != null)
            {
                inputDimension = previousLayer.outputDimension;
                index          = previousLayer.Index + 1;
            }

            TestHyperParametersAndSetOutputSize(this.inputDimension.width,
                                                this.inputDimension.height, this.inputDimension.depth);

            outputDimension = new Dimension(this.inputDimension.imageCount, filterCount,
                                            1, outputSize);

            #region Init initializers
            if (biasInitializer == null)
            {
                biasInitializer = new ConstantBiasInitializer(0d);
            }
            biases = biasInitializer.Initialize(filterCount);

            // https://stats.stackexchange.com/questions/198840/cnn-xavier-weight-initialization
            // https://github.com/keras-team/keras/blob/998efc04eefa0c14057c1fa87cab71df5b24bf7e/keras/initializations.py
            int fanIn  = filterSize * filterSize * inputDimension.depth;
            int fanOut = filterCount;
            if (weightInitializer == null)
            {
                weightInitializer = new XavierWeightInitializer(fanIn, fanOut);
            }
            #endregion Init initializers

            filters = weightInitializer.Initialize(new Dimension(filterCount,
                                                                 inputDimension.depth, inputDimension.width, filterSize));

            // activations will be init
            activations = Utils.Init4dArr(inputDimension.imageCount,
                                          filterCount, inputDimension.width, outputSize);

            compiled = true;

            return(this);
        }
예제 #3
0
        public override AbstractLayer Compile(AbstractLayer previousLayer)
        {
            if (previousLayer == null && inputDimension == null)
            {
                string msg = "Dimension of first layer of network not set!";
                Utils.ThrowException(msg);
            }
            else if (previousLayer != null)
            {
                inputDimension = previousLayer.outputDimension;
                index          = previousLayer.Index + 1;
            }

            outputDimension = inputDimension;
            compiled        = true;

            return(this);
        }
예제 #4
0
        public override AbstractLayer Compile(AbstractLayer previousLayer)
        {
            if (previousLayer == null)
            {
                string msg = "Dropout layer cannot be first layer!";
                Utils.ThrowException(msg);
            }

            if (previousLayer.outputDimension.depth != 1 ||
                previousLayer.outputDimension.width != 1)
            {
                string msg = "Dropout layer is adapted only for flatten layers!";
                Utils.ThrowException(msg);
            }

            index           = previousLayer.Index + 1;
            compiled        = true;
            outputDimension = previousLayer.outputDimension;
            return(this);
        }
예제 #5
0
        public override AbstractLayer Compile(AbstractLayer previousLayer)
        {
            if (previousLayer == null && this.inputDimension == null)
            {
                string msg = "Dimension of first layer of network not set!";
                Utils.ThrowException(msg);
            }
            else if (previousLayer != null)
            {
                inputDimension = previousLayer.outputDimension;
                index          = previousLayer.Index + 1;
            }


            size            = inputDimension.depth * inputDimension.height * inputDimension.width;
            outputDimension = new Dimension(inputDimension.imageCount, 1, 1, size);

            compiled = true;

            return(this);
        }
예제 #6
0
        public override AbstractLayer Compile(AbstractLayer previousLayer)
        {
            if (previousLayer == null)
            {
                string msg = "Dimension of first layer of network not set!";
                Utils.ThrowException(msg);
            }
            else if (previousLayer != null)
            {
                inputDimension = previousLayer.outputDimension;
                index          = previousLayer.Index + 1;
            }

            inputDimension = previousLayer.outputDimension;

            outputDimension = new Dimension(inputDimension.imageCount, 1, 1, numNeurons);

            activations = Utils.Init4dArr(inputDimension.imageCount, 1, 1, numNeurons);

            #region Init initializers
            if (biasInitializer == null)
            {
                biasInitializer = new ConstantBiasInitializer(0d);
            }
            biases = biasInitializer.Initialize(numNeurons);


            int fanIn  = inputDimension.height;
            int fanOut = numNeurons;
            if (weightInitializer == null)
            {
                weightInitializer = new XavierWeightInitializer(fanIn, fanOut);
            }
            weights = weightInitializer.Initialize(new Dimension(inputDimension.height, 1, 1, numNeurons));
            #endregion Init initializers

            compiled = true;

            return(this);
        }
예제 #7
0
 /// <summary>
 /// Method which check the dimension of the layer and initializes
 /// all possible values in the layer - e.g. weights and biases.
 /// </summary>
 /// <param name="previousLayer">previous layer in the model</param>
 /// <returns>current layer</returns>
 public abstract AbstractLayer Compile(AbstractLayer previousLayer);