Exemplo n.º 1
0
        public static Function OptimizedRNNStack(Variable input, int hiddenSize, int layerSize = 1, bool bidirectional = false, string cellType = "lstm", string name = "")
        {
            try
            {
                NodeGroup.EnterNewGroup(name);

                var dim = input.Shape.Dimensions[0];

                var weightSize = (dim - 1) * 4 * hiddenSize;
                weightSize += (layerSize - 1) * (8 * hiddenSize * hiddenSize + 8 * hiddenSize);
                weightSize += 4 * hiddenSize * hiddenSize + 12 * hiddenSize;

                var w = new Parameter(new int[] { weightSize }, DataType.Float, CNTKLib.GlorotUniformInitializer(), DeviceDescriptor.UseDefaultDevice(), name + "_w");
                Register(w);

                var rnn = CNTKLib.OptimizedRNNStack(input, w, (uint)hiddenSize, (uint)layerSize, bidirectional, cellType, name + "_rnn");
                Register(rnn);

                var output = CNTKLib.SequenceLast(rnn);
                Register(output);

                output.RootFunction.SetName(name);

                return(output);
            }
            finally
            {
                NodeGroup.LeaveGroup();
            }
        }
Exemplo n.º 2
0
        private void WriteNodeInGroup(NodeGroup group, IEnumerable <IGrouping <NodeGroup, NodeGroup.Node> > grouping, HashSet <NodeGroup> visited, int depth)
        {
            if (visited.Contains(group))
            {
                return;
            }

            visited.Add(group);

            var indent = new string(' ', depth * 4);

            _output.AppendFormat("{0}subgraph cluster_{1} {{\r\n", indent, group.UniqueName);
            _output.AppendFormat("{0}    label = \"{1}\";\r\n", indent, group.Name);
            _output.AppendFormat("{0}    labelloc = \"t\";\r\n", indent);
            _output.AppendFormat("{0}    labeljust = \"r\";\r\n", indent);
            _output.AppendFormat("{0}    style = \"dotted, filled\";\r\n", indent);
            _output.AppendFormat("{0}    fillcolor = \"#f0f0f0\";\r\n", indent);

            var nodes = grouping.Where(x => x.Key == group).FirstOrDefault();

            WriteNodes(nodes, depth + 1);

            foreach (var g in group.Subgroups)
            {
                WriteNodeInGroup(g, grouping, visited, depth + 1);
            }

            _output.AppendFormat("{0}}}\r\n", indent);
        }
Exemplo n.º 3
0
        private void WriteAllNodes()
        {
            WriteOutputNode(1);

            var groups = _nodes.GroupBy(x => NodeGroup.FindGroup(x.Uid));

            var visited = new HashSet <NodeGroup>();

            foreach (var group in groups)
            {
                var g = group.Key;
                if (g == null)
                {
                    WriteNodes(group, 1);
                    continue;
                }

                while (g.Parent != null)
                {
                    g = g.Parent;
                }

                WriteNodeInGroup(g, groups, visited, 1);
            }

            foreach (var link in _links)
            {
                _output.AppendFormat("    {0} -> {1};\r\n", link.From, link.To);
            }
        }
Exemplo n.º 4
0
        public static Function BatchNormalization(Variable input, bool spatial, double initScale, double normalizationTimeConstant, double blendTimeConstant, double epsilon, bool useCuDNNEngine, bool disableRegularization, string name)
        {
            try
            {
                NodeGroup.EnterNewGroup(name);

                var normShape = new int[] { CNTK.NDShape.InferredDimension };

                var scale = new Parameter(normShape, DataType.Float, initScale, DeviceDescriptor.UseDefaultDevice(), name + "/scale");
                Register(scale);
                var bias = new Parameter(normShape, DataType.Float, 0, DeviceDescriptor.UseDefaultDevice(), name + "/bias");
                Register(bias);

                var runningMean = new Constant(normShape, 0.0f, DeviceDescriptor.UseDefaultDevice());
                Register(runningMean);
                var runningInvStd = new Constant(normShape, 0.0f, DeviceDescriptor.UseDefaultDevice());
                Register(runningInvStd);
                var runningCount = Constant.Scalar(0.0f, DeviceDescriptor.UseDefaultDevice());
                Register(runningCount);

                var output = CNTKLib.BatchNormalization(
                    input,                     // CNTK.Variable operand
                    scale,                     // CNTK.Variable scale
                    bias,                      // CNTK.Variable bias
                    runningMean,               // CNTK.Variable runningMean
                    runningInvStd,             // CNTK.Variable runningInvStd
                    runningCount,              // CNTK.Variable runningCount
                    spatial,                   // bool spatial
                    normalizationTimeConstant, // double normalizationTimeConstant
                    blendTimeConstant,         // double blendTimeConstant
                    epsilon,                   // double epsilon
                    useCuDNNEngine,            // bool useCuDNNEngine
                    disableRegularization,     // bool disableRegularization
                    ""                         // string name
                    );
                Register(output);

                output.RootFunction.SetName(name);

                return(output);
            }
            finally
            {
                NodeGroup.LeaveGroup();
            }
        }
Exemplo n.º 5
0
        // Assume input shape is such as (x [, y [, z]], channels)
        public static Function ConvolutionTranspose(Variable input, int[] filterShape, int numFilters, string activation, CNTKDictionary initializer, bool[] padding, int[] strides, bool useBias, CNTKDictionary biasInitializer, int[] outputShape, int[] dilation, int reductionRank, int maxTempMemSizeInSamples, string name)
        {
            try
            {
                NodeGroup.EnterNewGroup(name);

                // Initializers

                if (initializer == null)
                {
                    initializer = CNTKLib.GlorotUniformInitializer();
                }

                if (useBias && biasInitializer == null)
                {
                    biasInitializer = CNTKLib.ConstantInitializer(0);
                }

                // Convolution map
                // (kernelWidth, kernelHeight, featureMapCount, kernelChannel)

                var convDims = new int[filterShape.Length + 2];
                filterShape.CopyTo(convDims, 0);
                convDims[convDims.Length - 2] = numFilters;
                convDims[convDims.Length - 1] = input.Shape.Dimensions[filterShape.Length]; // input channel

                var convolutionMap = new Parameter(convDims, DataType.Float, initializer, DeviceDescriptor.UseDefaultDevice(), name + "/weight");
                Register(convolutionMap);

                var conv = CNTKLib.ConvolutionTranspose(
                    convolutionMap,                      // CNTK.Variable convolutionMap
                    input,                               // CNTK.Variable operand
                    strides,                             // CNTK.NDShape strides
                    new BoolVector(new bool[] { true }), // CNTK.BoolVector sharing
                    new BoolVector(padding),             // CNTK.BoolVector autoPadding
                    outputShape,                         // CNTK.NDShape outputShape
                    dilation,                            // CNTK.NDShape dilation
                    (uint)reductionRank,                 // uint reductionRank
                    (uint)maxTempMemSizeInSamples,       // uint maxTempMemSizeInSamples
                    ""                                   // string name
                    );
                Register(conv);

                if (useBias)
                {
                    var bias = new Parameter(conv.Output.Shape, DataType.Float, biasInitializer, DeviceDescriptor.UseDefaultDevice(), name + "/bias");
                    Register(bias);
                    conv = CNTKLib.Plus(conv, bias);
                    Register(conv);
                }

                conv = ApplyActivation(conv, activation);

                conv.RootFunction.SetName(name);

                return(conv);
            }
            finally
            {
                NodeGroup.LeaveGroup();
            }
        }
Exemplo n.º 6
0
        public static Function Dense(Variable input, int[] outputDimensions, CNTKDictionary initializer, bool useBias, CNTKDictionary biasInitializer, bool stabilize, double steepness, string activation, DeviceDescriptor device, string name)
        {
            try
            {
                NodeGroup.EnterNewGroup(name);

                if (outputDimensions == null)
                {
                    outputDimensions = new Shape(input.Shape.Dimensions.ToArray());
                }

                if (initializer == null)
                {
                    initializer = CNTKLib.GlorotUniformInitializer();
                }

                if (useBias && biasInitializer == null)
                {
                    biasInitializer = CNTKLib.ConstantInitializer(0);
                }

                if (input.Shape.Rank > 1)
                {
                    int newDim = input.Shape.Dimensions.Aggregate((d1, d2) => d1 * d2);
                    input = CNTKLib.Reshape(input, new int[] { newDim });
                    Register(input);
                }

                var inputDimensions = input.Shape.Dimensions[0];

                int hiddenSize = outputDimensions.Aggregate((d1, d2) => d1 * d2);

                var weight = new Parameter(new int[] { hiddenSize, inputDimensions }, DataType.Float, initializer, device, name + "/weight");
                Register(weight);

                Parameter bias = null;
                if (useBias)
                {
                    bias = new Parameter(new int[] { hiddenSize }, DataType.Float, biasInitializer, device, name + "/bias");
                    Register(bias);
                }

                if (stabilize)
                {
                    input = Stabilize(input, steepness, device, name + "/stabilizer");
                }

                var output = GetAffine(input, weight, bias);

                if (outputDimensions.Length > 1)
                {
                    output = CNTKLib.Reshape(output, outputDimensions);
                    Register(output);
                }

                output = ApplyActivation(output, activation);

                output.RootFunction.SetName(name);

                return(output);
            }
            finally
            {
                NodeGroup.LeaveGroup();
            }
        }