Exemple #1
0
        // works on IRModel
        public bool InferAllLayersChannelOrder(Model model, out Dictionary <string, ChannelsOrder> layerChannelOrder)
        {
            layerChannelOrder = new Dictionary <string, ChannelsOrder>();

            IDictionary <string, TensorShape?> shapesByName = new Dictionary <string, TensorShape?>();
            IDictionary <string, int?>         ranksByName  = new Dictionary <string, int?>();

            foreach (var i in model.inputs)
            {
                ranksByName[i.name] = i.rank;
                if (!ModelAnalyzer.IsInputShapeAcceptablyKnowForShapeInference(i))
                {
                    continue;
                }
                shapesByName[i.name] = new TensorShape(i.shape);
            }

            IRShapeInferenceAndConstantFusing shapeInferencePass = new IRShapeInferenceAndConstantFusing();

            shapeInferencePass.InferAllShapes(model, ref shapesByName, ref ranksByName);

            // flood-fill approach: NCHW layout is propagated from NCHW ops
            //  * onnx-nchw ops are flagged as being native nchw
            //  * nchw layout is propagated to upstream and downstream nodes
            //  foreach node:
            //    take layout being propagated to
            //    if T or T-1 flip layout depending on upstream/downstream direction
            //    - stop if layout is the same as previously propagated
            //    - native nchw layout has priority
            Queue <(string, ChannelsOrder, FlowDirection)> layersToInferLayout = new Queue <(string, ChannelsOrder, FlowDirection)>();

            for (int l = 0; l < model.layers.Count; l++)
            {
                var layer = model.layers[l];
                if (!IsLayerNecessarilyNCHWOnnx(layer))
                {
                    continue;
                }

                layersToInferLayout.Enqueue((layer.name, ChannelsOrder.NativeNCHW, FlowDirection.Seed));
            }

            while (layersToInferLayout.Any())
            {
                (string, ChannelsOrder, FlowDirection)layerData = layersToInferLayout.Dequeue();
                string        name = layerData.Item1;
                ChannelsOrder deducedChannelOrder = layerData.Item2;
                // 0: in-place native
                // 1: downstream
                // 2: upstream
                FlowDirection flowDirection = layerData.Item3;


                if (!layerChannelOrder.ContainsKey(name))
                {
                    layerChannelOrder[name] = deducedChannelOrder;
                }
                else if (deducedChannelOrder == layerChannelOrder[name])
                {
                    continue;
                }
                else if (layerChannelOrder[name] == ChannelsOrder.NativeNCHW)
                {
                    continue;
                }
                // heuristic to stop ping-pong loop, prioritize NHWC over NCHW as it implies less transposes
                // if incoming is NativeNCHW always propagate that
                // TODO: count # of transpose swaps
                else if (layerChannelOrder[name] == ChannelsOrder.NHWC && deducedChannelOrder != ChannelsOrder.NativeNCHW)
                {
                    continue;
                }

                Layer layer;
                bool  found = ModelAnalyzer.FindLayerByName(model, name, out layer);
                if (IsLayerChangingLayoutToNHWC(layer, shapesByName, ranksByName))
                {
                    // NCHW -> T -> NHWC
                    if (((deducedChannelOrder == ChannelsOrder.NCHW) || (deducedChannelOrder == ChannelsOrder.NativeNCHW)) && (flowDirection == FlowDirection.Downstream))
                    {
                        deducedChannelOrder = ChannelsOrder.TransposeToNHWC;
                    }
                    // NCHW <- T <- NHWC
                    else if ((deducedChannelOrder == ChannelsOrder.NHWC) && (flowDirection == FlowDirection.Upstream))
                    {
                        deducedChannelOrder = ChannelsOrder.TransposeToNHWC;
                    }
                }
                else if (IsLayerChangingLayoutToNCHW(layer, shapesByName, ranksByName))
                {
                    // NHWC -> T-1 -> NCHW
                    if ((deducedChannelOrder == ChannelsOrder.NHWC) && (flowDirection == FlowDirection.Downstream))
                    {
                        deducedChannelOrder = ChannelsOrder.TransposeToNCHW;
                    }
                    // NHWC <- T-1 <- NCHW
                    else if (((deducedChannelOrder == ChannelsOrder.NCHW) || (deducedChannelOrder == ChannelsOrder.NativeNCHW)) && (flowDirection == FlowDirection.Upstream))
                    {
                        deducedChannelOrder = ChannelsOrder.TransposeToNCHW;
                    }
                }

                if ((deducedChannelOrder == ChannelsOrder.TransposeToNCHW || deducedChannelOrder == ChannelsOrder.TransposeToNHWC) && (deducedChannelOrder == layerChannelOrder[name]))
                {
                    continue;
                }

                layerChannelOrder[name] = deducedChannelOrder;

                foreach (var input in layer.inputs)
                {
                    if (deducedChannelOrder == ChannelsOrder.TransposeToNCHW)
                    {
                        layersToInferLayout.Enqueue((input, ChannelsOrder.NHWC, FlowDirection.Upstream));
                    }
                    else if (deducedChannelOrder == ChannelsOrder.TransposeToNHWC)
                    {
                        layersToInferLayout.Enqueue((input, ChannelsOrder.NCHW, FlowDirection.Upstream));
                    }
                    else
                    {
                        layersToInferLayout.Enqueue((input, deducedChannelOrder, FlowDirection.Upstream));
                    }
                }

                var outputs = ModelAnalyzer.FindLayerOutputs(model, layer.name);
                foreach (var output in outputs)
                {
                    if (deducedChannelOrder == ChannelsOrder.TransposeToNCHW)
                    {
                        layersToInferLayout.Enqueue((output, ChannelsOrder.NCHW, FlowDirection.Downstream));
                    }
                    else if (deducedChannelOrder == ChannelsOrder.TransposeToNHWC)
                    {
                        layersToInferLayout.Enqueue((output, ChannelsOrder.NHWC, FlowDirection.Downstream));
                    }
                    else
                    {
                        layersToInferLayout.Enqueue((output, deducedChannelOrder, FlowDirection.Downstream));
                    }
                }
            }

            bool modelExportedASNHWC = false;

            foreach (string key in layerChannelOrder.Keys.ToList())
            {
                var value = layerChannelOrder[key];
                if (value == ChannelsOrder.NativeNCHW)
                {
                    layerChannelOrder[key] = ChannelsOrder.NCHW;
                }

                if (value == ChannelsOrder.NHWC)
                {
                    modelExportedASNHWC = true;
                }
            }

            return(modelExportedASNHWC);
        }
        // works on IRModel
        public bool InferAllLayersChannelOrder(Model model, out Dictionary <string, ChannelsOrder> layerChannelOrder)
        {
            // TF2Onnx : pattern T (.* Conv .*) T-1
            // * being transpose commutative layer
            layerChannelOrder = new Dictionary <string, ChannelsOrder>();

            IDictionary <string, TensorShape?> shapesByName = new Dictionary <string, TensorShape?>();
            IDictionary <string, int?>         ranksByName  = new Dictionary <string, int?>();

            foreach (var i in model.inputs)
            {
                ranksByName[i.name] = i.rank;
                if (!ModelAnalyzer.IsInputShapeAcceptablyKnowForShapeInference(i))
                {
                    continue;
                }
                shapesByName[i.name] = new TensorShape(i.shape);
            }

            IRShapeInferenceAndConstantFusing shapeInferencePass = new IRShapeInferenceAndConstantFusing();

            shapeInferencePass.InferAllShapes(model, ref shapesByName, ref ranksByName);

            bool inputsNHWC = false;
            bool inputsNHWCExportedInputsAsNCHW = false;

            bool patternMatchStart = false;
            bool patternMatchConv  = false;
            // tf to onnx does not swizzle axis, need to match * Conv * T-1 ...
            bool patternMatchStartInputsAsNCHWConv = false;

            for (int l = 0; l < model.layers.Count; l++)
            {
                var layer = model.layers[l];
                if (!patternMatchStart &&
                    IsLayerTranpose(layer) && Enumerable.SequenceEqual(layer.pool, new[] { 0, 3, 1, 2 }) ||
                    IsLayerReshape(layer) && (shapesByName[layer.inputs[0]] != null) && IsReshapeTransposeToNCHW(layer, shapesByName[layer.inputs[0]].Value))
                {
                    patternMatchStart = true;
                }
                else if (patternMatchStart && patternMatchConv &&
                         ((IsLayerTranpose(layer) && Enumerable.SequenceEqual(layer.pool, new[] { 0, 2, 3, 1 })) ||
                          (IsLayerReshape(layer) && (shapesByName[layer.inputs[0]] != null) && IsReshapeTransposeToNHWC(layer, shapesByName[layer.inputs[0]].Value)) ||
                          (IsLayerSqueeze(layer) && (ranksByName[layer.inputs[0]] != null) && IsSqueezeTransposeToNHWC(layer, ranksByName[layer.inputs[0]].Value)) ||
                          (IsLayerFlatten(layer) && (ranksByName[layer.inputs[0]] != null) && IsFlattenTransposeToNHWC(layer, ranksByName[layer.inputs[0]].Value))))
                {
                    inputsNHWC = true;
                }
                else if (patternMatchStart && IsLayerConv(layer))
                {
                    patternMatchConv = true;
                }

                if (!inputsNHWCExportedInputsAsNCHW && patternMatchStartInputsAsNCHWConv &&
                    ((IsLayerTranpose(layer) && Enumerable.SequenceEqual(layer.pool, new[] { 0, 2, 3, 1 })) ||
                     (IsLayerReshape(layer) && (shapesByName[layer.inputs[0]] != null) && IsReshapeTransposeToNHWC(layer, shapesByName[layer.inputs[0]].Value))))
                {
                    inputsNHWCExportedInputsAsNCHW = true;
                }
                else if (!patternMatchStartInputsAsNCHWConv && !patternMatchStart && IsLayerConv(layer))
                {
                    patternMatchStartInputsAsNCHWConv = true;
                }
            }

            // flag each layer as being NHWC or NCHW
            for (int i = 0; i < model.inputs.Count; i++)
            {
                Model.Input input = model.inputs[i];
                if (!inputsNHWCExportedInputsAsNCHW)
                {
                    layerChannelOrder[input.name] = inputsNHWC ? ChannelsOrder.NHWC : ChannelsOrder.NCHW;
                }
                else
                {
                    layerChannelOrder[input.name] = ChannelsOrder.NCHW;
                }
            }

            for (int l = 0; l < model.layers.Count; l++)
            {
                var layer = model.layers[l];

                if (IsLayerTranpose(layer) && Enumerable.SequenceEqual(layer.pool, new[] { 0, 3, 1, 2 }) ||
                    IsLayerReshape(layer) && (shapesByName[layer.inputs[0]] != null) && IsReshapeTransposeToNCHW(layer, shapesByName[layer.inputs[0]].Value) &&
                    layerChannelOrder[layer.inputs[0]] == ChannelsOrder.NHWC)
                {
                    layerChannelOrder[layer.name] = ChannelsOrder.TransposeToNCHW;
                }
                else if (IsLayerTranpose(layer) && Enumerable.SequenceEqual(layer.pool, new[] { 0, 2, 3, 1 }) ||
                         IsLayerReshape(layer) && (shapesByName[layer.inputs[0]] != null) && IsReshapeTransposeToNHWC(layer, shapesByName[layer.inputs[0]].Value) &&
                         layerChannelOrder[layer.inputs[0]] == ChannelsOrder.NCHW)
                {
                    layerChannelOrder[layer.name] = ChannelsOrder.TransposeToNHWC;
                }
                else
                {
                    string inputWithKnownOrder = null;
                    for (int i = 0; i < layer.inputs.Length; i++)
                    {
                        var input = layer.inputs[i];
                        if (layerChannelOrder.ContainsKey(input))
                        {
                            inputWithKnownOrder = input;
                            break;
                        }
                    }

                    if (inputWithKnownOrder == null)
                    {
                        continue;
                    }
                    Assert.IsNotNull(inputWithKnownOrder);
                    ChannelsOrder inputOrder = layerChannelOrder[inputWithKnownOrder];

                    if (inputOrder == ChannelsOrder.TransposeToNCHW)
                    {
                        inputOrder = ChannelsOrder.NCHW;
                    }
                    else if (inputOrder == ChannelsOrder.TransposeToNHWC)
                    {
                        inputOrder = ChannelsOrder.NHWC;
                    }

                    // all layers with unknown layout are const
                    for (int i = 0; i < layer.inputs.Length; i++)
                    {
                        var input = layer.inputs[i];
                        if (!layerChannelOrder.ContainsKey(input))
                        {
                            layerChannelOrder[input] = inputOrder;
                        }
                    }

                    layerChannelOrder[layer.name] = inputOrder;
                }
            }

            // TODO Assert that all layers have a channel order
            // Assert that all layers are NHWC if inputsNHWC
            return(inputsNHWC || inputsNHWCExportedInputsAsNCHW);
        }