internal static Model PatchModel(Model model, string[] additionalOutputs, string[] trimOutputs = null)
        {
            bool trimModel = trimOutputs != null;

            if (model.flags.HasFlag(Model.Flags.NeedsCompilation))
            {
                model.Compile();
            }

            if (trimOutputs != null)
            {
                foreach (var o in trimOutputs.Except(model.outputs))
                {
                    if (additionalOutputs == null || !additionalOutputs.Contains(o))
                    {
                        D.LogWarning($"Output specified in trimOutputs was not found in the model: {o}");
                    }
                }

                var newModel = model.ShallowCopy();
                newModel.outputs = trimOutputs.Intersect(model.outputs).ToList();
                model            = newModel;
            }

            if (additionalOutputs != null)
            {
                foreach (var o in additionalOutputs.Except(model.layers.Select(l => l.name)))
                {
                    D.LogWarning($"Layer specified in additionalOutputs was not found in the model: {o}");
                }

                // 'new' means that output name does not yet exist in model.outputs
                // 'valid' means that output name matches one of the existing model.layer names
                var newAndValidAdditionalOutputs =
                    additionalOutputs.Except(model.outputs).Intersect(model.layers.Select(l => l.name));

                var newModel = model.ShallowCopy();
                newModel.outputs.AddRange(newAndValidAdditionalOutputs);
                model = newModel;
            }

            if (trimModel)
            {
                var newModel = model.ShallowCopy();
                var upstream = ModelAnalyzer.FindUpstreamLayers(model, newModel.outputs.ToArray());
                foreach (var l in model.layers)
                {
                    if (!upstream.Contains(l))
                    {
                        newModel.layers.Remove(l);
                    }
                }

                model = newModel;
            }

            model = ModelOptimizer.RemoveNoop(model);

            return(model);
        }
示例#2
0
        private static Layer.FusedActivation GetAndVerifyFusedActivation(Layer l)
        {
            Assert.IsTrue(ModelOptimizer.IsLayerSupportingActivationFusing(l.type));
            if (!ModelOptimizer.IsActivationFusable(l.activation))
            {
                throw new NotImplementedException("This activation function is not implemented as a fusable one yet! Check Layer.FusedActivation for supported ones.");
            }

            return((Layer.FusedActivation)l.activation);
        }
        private static void PatchLayer(List <Layer> layers, Layer layer)
        {
            // Split Load so that each constant tensor gets its own layer
            // for the sake of simplicity of the execution code
            if (layer.type == Layer.Type.Load &&
                layer.datasets.Length > 1)
            {
                foreach (var t in layer.datasets)
                {
                    Layer layerC = new Layer(t.name, Layer.Type.Load);    // load using tensor name
                    layerC.inputs   = layer.inputs;
                    layerC.datasets = new[] { t };

                    layers.Add(layerC);
                }

                // patch original layer
                layer.name     = layer.name + "_nop";
                layer.type     = Layer.Type.Nop;
                layer.datasets = new Layer.DataSet[] {};
            }

            // Split activation part into separate layer when activation fusing is not supported.
            // NOTE: Keras specific. Only Keras exporter packs both Dense/Conv and Activation into the same layer.
            // @TODO: move layer split directly into Keras exporter
            if (layer.type != Layer.Type.Activation &&
                layer.activation != Layer.Activation.None &&
                (!ModelOptimizer.IsLayerSupportingActivationFusing(layer.type) || !ModelOptimizer.IsActivationFusable(layer.activation)))
            {
                var affineOutput = layer.name + "_tmp";

                Layer layerA = new Layer(layer.name, layer.activation);   // take the original layer name
                layerA.inputs = new[] { affineOutput };

                // patch original layer
                layer.name       = affineOutput;
                layer.activation = Layer.Activation.None;
                Assert.AreEqual(layers[layers.Count - 1].name, layer.name);
                Assert.AreEqual(layers[layers.Count - 1].activation, layer.activation);

                layers.Add(layerA);
            }

            // @TODO: Enable Dropout
            // @TEMP: disabled runtime Dropout noise to get more predictable results for auto testing
            if (layer.type == Layer.Type.Dropout)
            {
                layer.type       = Layer.Type.Activation;
                layer.activation = Layer.Activation.None;
            }
        }