コード例 #1
0
 /// <summary>
 /// Add a non-linear layer to the network.
 /// </summary>
 /// <param name="function">The non-linear function to use in this layer.</param>
 public void AddNonLinearLayer(NonLinearFunction function)
 {
     this.LayerInformation.Add(new NonLinearLayerInformation
     {
         NonLinearFunction = function
     });
 }
コード例 #2
0
 /// <summary>
 /// Adds a new layer to the network.
 /// </summary>
 /// <param name="nodeCount">How many nodes that should be in this layer.</param>
 /// <param name="activationFunction">The activation function to use for this layer.</param>
 public void AddLayer(int nodeCount, NonLinearFunction activationFunction)
 {
     this.LayerInformation.Add(new FullyConnectedLayerInformation
     {
         NodeCount          = nodeCount,
         ActivationFunction = activationFunction
     });
     if (this.LayerInformation.Count > 1)
     {
         this.Biases.Add(Vector <double> .Build.Random(nodeCount, new Normal(0.0, 1.0)));
         this.Weights.Add(Matrix <double> .Build.Random(nodeCount, this.LayerInformation[this.LayerCount].NodeCount, new Normal(0.0, 1.0)));
     }
 }
コード例 #3
0
        /// <summary>
        /// Runs the defines activation function over the given vector.
        /// </summary>
        /// <param name="activation">The values for the input layer.</param>
        /// <param name="activationFunction">The activation function to use.</param>
        /// <param name="derivative">Whether we want the derivative of the values.</param>
        /// <returns>A vector to give to the next layer of the net.</returns>
        internal Vector <double> RunActivation(Vector <double> activation, NonLinearFunction activationFunction, bool derivative = false)
        {
            switch (activationFunction)
            {
            case NonLinearFunction.Sigmoid:
                return(NonLinearTransformations.Sigmoid(activation, derivative));

            case NonLinearFunction.Tanh:
                return(NonLinearTransformations.Tanh(activation, derivative));

            case NonLinearFunction.ReLU:
                return(NonLinearTransformations.ReLU(activation, derivative));

            case NonLinearFunction.LReLU:
                return(NonLinearTransformations.LReLU(activation, derivative));

            default:
                return(null);
            }
        }
コード例 #4
0
 /// <summary>
 /// Adds a fully connected layer to the network. This is the final stage and at least one
 /// layer is REQUIRED. Once you make a fully connected you cannot add other kinds of layers.
 /// </summary>
 /// <param name="nodeCount">The number of nodes in this fully connected layer.</param>
 /// <param name="activationFunction">The activation function to use in this layer.</param>
 public void AddFullyConnectedLayer(int nodeCount, NonLinearFunction activationFunction = NonLinearFunction.Sigmoid)
 {
     this.FullyConnectedNetwork.AddLayer(nodeCount, activationFunction);
 }