Beispiel #1
0
        private TFOutput AddActivation(TFGraph graph, TFOutput y, ActivationFunctionType fusedActivationFunction, Layer nonTrivialActivation)
        {
            if (fusedActivationFunction != ActivationFunctionType.Linear)
            {
                y = graph.AddActivation(y, fusedActivationFunction);
            }
            else if (nonTrivialActivation != null)
            {
                if (nonTrivialActivation is LeakyRelu leakyRelu)
                {
                    y = graph.Maximum(y, graph.Mul(y, graph.Const(leakyRelu.Slope)));
                }
            }

            return(y);
        }