public void AddLayerToNoHiddens() { var LMM = new LMMCNet(5, 0, new int[] { }, 5, true); int InitialLayers = LMM.Net.Count; LMM.AddLayer(1, 5); //check layer has been successfully added Assert.AreEqual(LMM.Net.Count, InitialLayers + 1); //check each neuron in new layer has correct number of weights out foreach (Neuron N in LMM.Net[1]) { Assert.AreEqual(N.WeightsOut.GetUpperBound(0) + 1, LMM.Net[2].Count); } //check each neuron in the previous layer has the correct number of weights out to the new layer foreach (Neuron N in LMM.Net[0]) { Assert.AreEqual(N.WeightsOut.GetUpperBound(0) + 1, LMM.Net[1].Count); } //make sure outputs still work var outputs = LMM.Predict(Helper.GetInputs(LMM.NumberOfInputs)); Assert.AreEqual(outputs.GetUpperBound(0) + 1, LMM.NumberOfOutputs); }
public void NetGeneration_AllWeights0_AllOutputsEqualPointFive_1HiddenLayer() { for (int NeuronsPerLayer = 1; NeuronsPerLayer <= 10; NeuronsPerLayer++) { var LMM = new LMMCNet(NeuronsPerLayer, 1, new int[] { 5 }, NeuronsPerLayer, false); var output = LMM.Predict(Helper.GetInputs(NeuronsPerLayer)); foreach (double d in output) { Assert.AreEqual(d, 0.5); } } }
public void NetGeneration_AllWeights0_AllOutputsEqualPointFive_NoHiddenLayers() { //create a neural net with all weights are 0's, all inputs are 1 expected output should be all 0.5's output. //test for number of neurons per layer 1~10 and 0 hidden layers for (int NeuronsPerLayer = 0; NeuronsPerLayer <= 10; NeuronsPerLayer++) { LMMCNet LMM = new LMMCNet(NeuronsPerLayer, 0, new int[] { }, NeuronsPerLayer, false); var output = LMM.Predict(Helper.GetInputs(NeuronsPerLayer)); foreach (double d in output) { Assert.AreEqual(d, 0.5); } } }
public void NetGeneration_AllWeights0_AllOutputsEqualPointFive_MultipleHiddenLayers() { for (int NHiddenLayers = 1; NHiddenLayers < 10; NHiddenLayers++) { for (int NeuronsPerLayer = 1; NeuronsPerLayer <= 10; NeuronsPerLayer++) { var NeuronsPerHiddenLayer = Helper.GetHLayerArray(NHiddenLayers, NeuronsPerLayer); var LMM = new LMMCNet(NeuronsPerLayer, NHiddenLayers, NeuronsPerHiddenLayer, NeuronsPerLayer, false); var output = LMM.Predict(Helper.GetInputs(NeuronsPerLayer)); foreach (double d in output) { Assert.AreEqual(d, 0.5); } } } }
//Put in random weights and inputs, check all outputs are between 1 and minus 1 public void NetGeneration_RandomWeights_AllOutputsBetweenMinus1And1() { for (int NHiddenLayers = 0; NHiddenLayers <= 10; NHiddenLayers++) { for (int NeuronsPerLayer = 1; NeuronsPerLayer <= 10; NeuronsPerLayer++) { var NeuronsPerHiddenLayer = Helper.GetHLayerArray(NHiddenLayers, NeuronsPerLayer); var LMM = new LMMCNet(NeuronsPerLayer, NHiddenLayers, NeuronsPerHiddenLayer, NeuronsPerLayer, true); var output = LMM.Predict(Helper.GetInputs(NeuronsPerLayer)); Assert.AreEqual(output.GetUpperBound(0) + 1, NeuronsPerLayer); foreach (double d in output) { Assert.IsTrue(d <= 1 && d >= -1); } } } }
public void InternalMechanism_CheckSumMethod_NoHiddenLayer() { Random random = new Random(); int NeuronsPerLayer = random.Next(5, 10); //make a net with varying neurons per layer var LMM = new LMMCNet(NeuronsPerLayer, 0, new int[] { }, NeuronsPerLayer, true); var output = LMM.Predict(Helper.GetInputs(NeuronsPerLayer)); int index = 0; foreach (double d in output) { double sum = 0; foreach (Neuron N in LMM.Net[0]) { sum += ((N.OutValue * N.WeightsOut[index]) + N.Bias); } sum = Helper.Squish(sum); Assert.AreEqual(sum, d); index++; } }
public void InternalMechanism_CheckSumMethod_OneHiddenLayer() { //check sum between input and hidden //then sum between hidden and output Random random = new Random(); int NeuronsPerLayer = random.Next(5, 10); var LMM = new LMMCNet(NeuronsPerLayer, 1, new int[] { random.Next(5, 10) }, NeuronsPerLayer, true); var output = LMM.Predict(Helper.GetInputs(NeuronsPerLayer)); int indexer = 0; foreach (Neuron N in LMM.Net[1]) { double sum = 0; foreach (Neuron OuterN in LMM.Net[0]) { sum += ((OuterN.OutValue * OuterN.WeightsOut[indexer]) + OuterN.Bias); } indexer++; sum = Helper.Squish(sum); Assert.AreEqual(N.OutValue, sum); } }
static void MNIST() { Console.WriteLine("Parsing data -------------------"); List <Mnist> TrainingList = new List <Mnist>(); StreamReader sw = new StreamReader(@"E:\Music\training.txt"); List <string> charstr = new List <string>(); string build = ""; int index = -1; int label = 0; double[] data = new double[28 * 28]; while (!sw.EndOfStream) { int next = sw.Read() - 48; if (next == -4) { if (index == -1) { label = Convert.ToInt32(build); index++; } else { data[index] = Convert.ToInt32(build); index++; } if (index == (28 * 28) - 1) { TrainingList.Add(new Mnist(data, label)); index = -1; data = new double[28 * 28]; build = ""; sw.Read(); sw.Read(); } build = ""; } else { //check for line breaks & spaces if (build.Contains(@"\")) { build = build.Remove(build.IndexOf(@"\")); } if (build.Contains(@"n")) { build = build.Remove(build.IndexOf(@"n")); } build += next; } } sw.Close(); Random random = new Random(); //choose random object for (int i = 0; i < 50; i++) { Mnist mn = TrainingList[random.Next(0, TrainingList.Count - 1)]; Bitmap bm = new Bitmap(28, 28); index = 0; for (int x = 0; x < 28; x++) { for (int y = 0; y < 28; y++) { int bright = Convert.ToInt32(mn.Data[index]); bm.SetPixel(y, x, Color.FromArgb(255, bright, bright, bright)); index++; } } string filename = @"E:\Music\Imagetest" + i + " " + mn.Label + ".png"; bm.Save(filename); } Console.WriteLine("Files output | press enter to continue"); Console.ReadLine(); var tempList = new List <Mnist>(); int count2 = 0; foreach (Mnist nn in TrainingList) { tempList.Add(nn); if (count2 > 15000) { break; } count2++; } TrainingList = tempList; int isn = 0; Console.WriteLine("Checking data -------------------"); foreach (Mnist mn in TrainingList) { int value = Convert.ToInt32(mn.Label); if (value > 9 || value < 0) { Console.WriteLine("error at {0}", isn); Console.ReadLine(); } isn++; } var LMM = new LMMCNet(28 * 28, 1, new int[] { 10 }, 10, new Random()); Console.WriteLine("Training-------------------"); int count = 0; int total = TrainingList.Count; foreach (Mnist mn in TrainingList) { count++; Console.WriteLine("Executing {0} of {1}", count, total); var ExpectedOut = new double[10]; ExpectedOut[mn.Label] = 1; for (int i = 0; i <= mn.Data.GetUpperBound(0); i++) { mn.Data[i] = mn.Data[i] / 255; } LMM.Train(mn.Data, ExpectedOut); } int totalSuccesses = 0; int totalGuesses = 0; int secondGuesses = 0; Console.WriteLine("Guessing -----------------------"); foreach (Mnist mn in TrainingList) { totalGuesses++; Console.WriteLine("On guess {0}", totalGuesses); Console.WriteLine("{0} success from {1} guesses", totalSuccesses, totalGuesses); Console.WriteLine("{0} first successes and {1} second guesses", totalSuccesses, secondGuesses); var output = LMM.Predict(mn.Data); Hessian(LMM.Net); Console.WriteLine("-----------------"); int HighestIndex = 0; double HighestValue = 0; int secondIndex = 0; double secondValue = 0; for (int i = 0; i <= output.GetUpperBound(0); i++) { if (output[i] > HighestValue) { secondIndex = HighestIndex; secondValue = HighestValue; HighestIndex = i; HighestValue = output[i]; } } Console.WriteLine("Array is"); int coun = 0; foreach (double d in output) { Console.WriteLine("{0} = {1}", coun, d); coun++; } Console.WriteLine(""); Console.WriteLine("Highest value is {0}", HighestValue); Console.WriteLine("Index of highest is {0}", HighestIndex); Console.WriteLine("Label is {0}", mn.Label); if (mn.Label == HighestIndex) { Console.WriteLine("Is matching"); } else if (mn.Label == secondIndex) { secondGuesses++; } else { Console.WriteLine("Is not matching"); } if (HighestIndex == (mn.Label)) { totalSuccesses++; } } Console.WriteLine("{0} successes, {1} failures", totalSuccesses, totalGuesses - totalSuccesses); Console.ReadLine(); }