Example #1
0
        /// <summary>
        /// Creates a fully connected layer with the specified activation function.
        /// </summary>
        /// <param name="input">Input variable (layer) of a given bit depth</param>
        /// <param name="outputDim">Output capacity (number of neurons)</param>
        /// <param name="activationFunction">Activation function</param>
        /// <param name="device">The device on which the calculation is made</param>
        /// <param name="name">Layer name</param>
        /// <returns></returns>
        private static Function createFullyConnectedLinearLayer(Variable input, int outputDim, ActivationFunction activationFunction, DeviceDescriptor device, string name)
        {
            var dataType = input.DataType;
            var inputDim = input.Shape[0];
            var weight   = new Parameter(new int[] { outputDim, inputDim }, dataType, CNTKLib.GlorotUniformInitializer(
                                             CNTKLib.DefaultParamInitScale,
                                             CNTKLib.SentinelValueForInferParamInitRank,
                                             CNTKLib.SentinelValueForInferParamInitRank, 1), device);
            var bias                    = new Parameter(new int[] { outputDim }, dataType, 0, device);
            var fullyConnected          = CNTKLib.Times(weight, input) + bias;
            var activatedFullyConnected = activationFunction?.ApplyActivationFunction(fullyConnected, device) ?? fullyConnected;

            return(Function.Alias(activatedFullyConnected, name));
        }
Example #2
0
        /// <summary>
        /// Adds a convolution layer for a two-dimensional vector. If the previous layer has a non-two-dimensional output, an exception is thrown
        /// </summary>
        /// <param name="kernelWidth">Convolution core width (columns in a two-dimensional matrix)</param>
        /// <param name="kernelHeight">Convolution core height (rows in a two-dimensional matrix)</param>
        /// <param name="outFeatureMapCount">Bit depth of the output cell after convolution</param>
        /// <param name="activationFunction">Activation function for the output layer. If not required, pass null</param>
        /// <param name="hStride">The step of moving down the convolution window horizontally (along the matrix columns)</param>
        /// <param name="vStride">Step of shifting the convolution window vertically (along the rows of the matrix)</param>
        /// <param name="padding">Fill when using convolution</param>
        /// <param name="name"></param>
        public static Function Build(Variable input, int kernelWidth, int kernelHeight, DeviceDescriptor device, int outFeatureMapCount = 1, int hStride = 1, int vStride = 1, Padding padding = Padding.Valid, ActivationFunction activationFunction = null, string name = "Conv2D")
        {
            bool[] paddingVector = null;
            if (padding == Padding.Valid)
            {
                paddingVector = new bool[] { false, false, false };
            }
            if (padding == Padding.Same)
            {
                paddingVector = new bool[] { true, true, false };
            }

            var convMap              = new Parameter(new int[] { kernelWidth, kernelHeight, 1, outFeatureMapCount }, input.DataType, CNTKLib.GlorotUniformInitializer(), device);
            var convolution          = CNTKLib.Convolution(convMap, input, new int[] { hStride, vStride, 1 }, new bool[] { true }, paddingVector);
            var activatedConvolution = activationFunction?.ApplyActivationFunction(convolution, device) ?? convolution;

            return(Function.Alias(activatedConvolution, name));
        }
Example #3
0
        private static Function createResidualLayer2(Function input, int outputDimension, ActivationFunction activationFunction, DeviceDescriptor device, string name)
        {
            var dataType = input.Output.DataType;

            //forwarding of the entrance past 1 layer
            var forwarding = input;

            if (outputDimension != input.Output.Shape[0])
            {
                var scales = new Parameter(new int[] { outputDimension, input.Output.Shape[0] }, dataType, CNTKLib.UniformInitializer(CNTKLib.DefaultParamInitScale), device);
                forwarding = CNTKLib.Times(scales, forwarding);
            }

            //create 1 layer
            input = Dense.Build(input, outputDimension, activationFunction, device, "");
            //creation of 2 layers without activation function
            input = Dense.Build(input, outputDimension, null, device, "");
            //connection with forwarded input
            input = CNTKLib.Plus(input, forwarding);

            input = activationFunction?.ApplyActivationFunction(input, device) ?? input;
            return(Function.Alias(input, name));
        }
Example #4
0
        private static Function createResidualLayer2(Function input, int outputDimension, ActivationFunction activationFunction, DeviceDescriptor device, string name)
        {
            var dataType = input.Output.DataType;

            //проброс входа мимо 1 слоя
            var forwarding = input;

            if (outputDimension != input.Output.Shape[0])
            {
                var scales = new Parameter(new int[] { outputDimension, input.Output.Shape[0] }, dataType, CNTKLib.UniformInitializer(CNTKLib.DefaultParamInitScale), device);
                forwarding = CNTKLib.Times(scales, forwarding);
            }

            //создание 1 слоя
            input = Dense.Build(input, outputDimension, activationFunction, device, "");
            //создание 2 слоя без функции активации
            input = Dense.Build(input, outputDimension, null, device, "");
            //соединение с проброшенным входом
            input = CNTKLib.Plus(input, forwarding);

            input = activationFunction?.ApplyActivationFunction(input, device) ?? input;
            return(Function.Alias(input, name));
        }