/// <summary> /// Greather mults means greather variance from 0 after activation function /// </summary> /// <param name="mult1">Optimals are: 1.5 for inputs close to 1 and 5.0 for 10% ones and rest zeroes</param> /// <param name="mult2">Optimal is 3.5 for all sets of inputs if prev layers was good</param> public static void RandomizeUniform(ISimpleMLP nn, double mult1 = 5, double mult2 = 3.5) { double limit = Sqrt(3.0 / nn.NInput) * mult1; for (int i = 0; i <= nn.NInput; i++) { for (int j = 0; j < nn.NHidden; j++) { //set weights to random values nn.GwInputHidden[j, i] = (rand.NextDouble() * 2 - 1) * limit; } } limit = Sqrt(3.0 / nn.NHidden) * mult2; for (int i = 0; i <= nn.NHidden; i++) { for (int j = 0; j < nn.NOutput; j++) { //set weights to random values nn.GwHiddenOutput[j, i] = (rand.NextDouble() * 2 - 1) * limit; } } }
public static void RandomizeWeights(ISimpleMLP nn) { //set weights between input and hidden //-------------------------------------------------------------------------------------------------------- for (int i = 0; i <= nn.NInput; i++) { for (int j = 0; j < nn.NHidden; j++) { //set weights to random values nn.GwInputHidden[j, i] = (rand.NextDouble() * 12 - 6) / nn.NInput; } } //set weights between input and hidden //-------------------------------------------------------------------------------------------------------- for (int i = 0; i <= nn.NHidden; i++) { for (int j = 0; j < nn.NOutput; j++) { //set weights to random values nn.GwHiddenOutput[j, i] = (rand.NextDouble() * 12 - 6) / nn.NHidden; } } }