Initializes weights using Gaussian random (std.dev: 1.0/sqrt(inputWeightCount), mean: 0) and biases with Gaussian random (std.dev: 1, mean: 0)
Inheritance: IWeightInitializer
Esempio n. 1
0
        /// <summary>
        /// Create a network with random weights and biases
        /// </summary>
        /// <param name="layerConfig">The layer configuration containing the number of neurons in each layer in this order: [input layer][1st hidden layer][2nd hidden layer]...[nth hidden layer][output layer]</param>
        /// <param name="activationFunction">The activation function to use</param>
        /// <param name="weightInitializer">The weight and bias initializer to use</param>
        /// <returns></returns>
        public static Network CreateNetworkInitRandom(int[] layerConfig, IActivationFunction activationFunction, IWeightInitializer weightInitializer = null)
        {
            if (weightInitializer == null)
            {
                weightInitializer = new DefaultWeightInitializer();
            }

            List <List <Tuple <List <float>, float> > > inputLayers = new List <List <Tuple <List <float>, float> > >();

            for (int layId = 1; layId < layerConfig.Length; ++layId)
            {
                int prevLayerSize = layerConfig[layId - 1];
                int layerSize     = layerConfig[layId];
                List <Tuple <List <float>, float> > neuronList = new List <Tuple <List <float>, float> >();
                for (int i = 0; i < layerSize; i++)
                {
                    List <float> weights = new List <float>();
                    for (int j = 0; j < prevLayerSize; ++j)
                    {
                        weights.Add(weightInitializer.GetRandomWeight(prevLayerSize));
                    }
                    neuronList.Add(new Tuple <List <float>, float>(weights, weightInitializer.GetRandomBias()));
                }
                inputLayers.Add(neuronList);
            }

            return(CreateNetwork(inputLayers, activationFunction));
        }
Esempio n. 2
0
        /// <summary>
        /// Create a network with random weights and biases
        /// </summary>
        /// <param name="input_layer">The neurons in the input layer</param>
        /// <param name="hidden_and_output_layers">The layer configuration containing the activation function, and the number of neurons in each layer in this order: [1st hidden layer][2nd hidden layer]...[nth hidden layer][output layer]</param>
        /// <param name="weightInitializer">The weight and bias initializer to use</param>
        /// <returns></returns>
        public static Network CreateNetworkInitRandom(int input_layer, List <Tuple <IActivationFunction, int> > hidden_and_output_layers, IWeightInitializer weightInitializer = null)
        {
            if (weightInitializer == null)
            {
                weightInitializer = new DefaultWeightInitializer();
            }

            var inputLayers = new List <Tuple <IActivationFunction, List <Tuple <List <float>, float> > > >();

            for (int layId = 0; layId < hidden_and_output_layers.Count; ++layId)
            {
                int prevLayerSize = layId == 0 ? input_layer : hidden_and_output_layers[layId - 1].Item2;
                int layerSize     = hidden_and_output_layers[layId].Item2;
                IActivationFunction activationFunction         = hidden_and_output_layers[layId].Item1;
                List <Tuple <List <float>, float> > neuronList = new List <Tuple <List <float>, float> >();
                for (int i = 0; i < layerSize; i++)
                {
                    List <float> weights = new List <float>();
                    for (int j = 0; j < prevLayerSize; ++j)
                    {
                        weights.Add(weightInitializer.GetRandomWeight(prevLayerSize));
                    }
                    neuronList.Add(new Tuple <List <float>, float>(weights, weightInitializer.GetRandomBias()));
                }
                inputLayers.Add(new Tuple <IActivationFunction, List <Tuple <List <float>, float> > >(activationFunction, neuronList));
            }

            return(CreateNetwork(inputLayers));
        }