예제 #1
0
        /// <summary>
        /// Runs the defines activation function over the given vector.
        /// </summary>
        /// <param name="activation">The values for the input layer.</param>
        /// <param name="activationFunction">The activation function to use.</param>
        /// <param name="derivative">Whether we want the derivative of the values.</param>
        /// <returns>A vector to give to the next layer of the net.</returns>
        internal Vector <double> RunActivation(Vector <double> activation, NonLinearFunction activationFunction, bool derivative = false)
        {
            switch (activationFunction)
            {
            case NonLinearFunction.Sigmoid:
                return(NonLinearTransformations.Sigmoid(activation, derivative));

            case NonLinearFunction.Tanh:
                return(NonLinearTransformations.Tanh(activation, derivative));

            case NonLinearFunction.ReLU:
                return(NonLinearTransformations.ReLU(activation, derivative));

            case NonLinearFunction.LReLU:
                return(NonLinearTransformations.LReLU(activation, derivative));

            default:
                return(null);
            }
        }
예제 #2
0
        /// <summary>
        /// Runs through a non-linear layer of the network.
        /// </summary>
        /// <param name="layerInfo">The layer info used for this layer.</param>
        /// <param name="inputImages">
        /// A matrix of all the images that will be ran through the non-linear function. Each row is
        /// an image.
        /// </param>
        /// <returns>A matrix of all the resulting images. Each row is an image.</returns>
        internal Matrix <double> NonLinear(NonLinearLayerInformation layerInfo, Matrix <double> inputImages)
        {
            switch (layerInfo.NonLinearFunction)
            {
            case NonLinearFunction.Sigmoid:
                return(NonLinearTransformations.Sigmoid(inputImages));

            case NonLinearFunction.Tanh:
                return(NonLinearTransformations.Tanh(inputImages));

            case NonLinearFunction.ReLU:
                return(NonLinearTransformations.ReLU(inputImages));

            case NonLinearFunction.LReLU:
                return(NonLinearTransformations.LReLU(inputImages));

            default:
                return(null);
            }
        }