Ejemplo n.º 1
0
        private static Layer.FusedActivation GetAndVerifyFusedActivation(Layer l)
        {
            Assert.IsTrue(ModelOptimizer.IsLayerSupportingActivationFusing(l.type));
            if (!ModelOptimizer.IsActivationFusable(l.activation))
            {
                throw new NotImplementedException("This activation function is not implemented as a fusable one yet! Check Layer.FusedActivation for supported ones.");
            }

            return((Layer.FusedActivation)l.activation);
        }
        private static void PatchLayer(List <Layer> layers, Layer layer)
        {
            // Split Load so that each constant tensor gets its own layer
            // for the sake of simplicity of the execution code
            if (layer.type == Layer.Type.Load &&
                layer.datasets.Length > 1)
            {
                foreach (var t in layer.datasets)
                {
                    Layer layerC = new Layer(t.name, Layer.Type.Load);    // load using tensor name
                    layerC.inputs   = layer.inputs;
                    layerC.datasets = new[] { t };

                    layers.Add(layerC);
                }

                // patch original layer
                layer.name     = layer.name + "_nop";
                layer.type     = Layer.Type.Nop;
                layer.datasets = new Layer.DataSet[] {};
            }

            // Split activation part into separate layer when activation fusing is not supported.
            // NOTE: Keras specific. Only Keras exporter packs both Dense/Conv and Activation into the same layer.
            // @TODO: move layer split directly into Keras exporter
            if (layer.type != Layer.Type.Activation &&
                layer.activation != Layer.Activation.None &&
                (!ModelOptimizer.IsLayerSupportingActivationFusing(layer.type) || !ModelOptimizer.IsActivationFusable(layer.activation)))
            {
                var affineOutput = layer.name + "_tmp";

                Layer layerA = new Layer(layer.name, layer.activation);   // take the original layer name
                layerA.inputs = new[] { affineOutput };

                // patch original layer
                layer.name       = affineOutput;
                layer.activation = Layer.Activation.None;
                Assert.AreEqual(layers[layers.Count - 1].name, layer.name);
                Assert.AreEqual(layers[layers.Count - 1].activation, layer.activation);

                layers.Add(layerA);
            }

            // @TODO: Enable Dropout
            // @TEMP: disabled runtime Dropout noise to get more predictable results for auto testing
            if (layer.type == Layer.Type.Dropout)
            {
                layer.type       = Layer.Type.Activation;
                layer.activation = Layer.Activation.None;
            }
        }