public void GeneralisesSmallImages() { var network = new PerceptronFactory().BuildPerceptron(100); var aSet = new InputProcessor(@"..\..\trainingsets\dataset1\A"); var bSet = new InputProcessor(@"..\..\trainingsets\dataset1\B"); var trainingSet = new List <KeyValuePair <double, double[]> > { new KeyValuePair <double, double[]>(1.0, aSet.BmpToBinaryArr(@"1.jpg")), new KeyValuePair <double, double[]>(1.0, aSet.BmpToBinaryArr(@"2.jpg")), new KeyValuePair <double, double[]>(1.0, aSet.BmpToBinaryArr(@"3.jpg")), new KeyValuePair <double, double[]>(1.0, aSet.BmpToBinaryArr(@"4.jpg")), new KeyValuePair <double, double[]>(1.0, aSet.BmpToBinaryArr(@"5.jpg")), new KeyValuePair <double, double[]>(0.0, bSet.BmpToBinaryArr(@"1.jpg")), new KeyValuePair <double, double[]>(0.0, bSet.BmpToBinaryArr(@"2.jpg")), new KeyValuePair <double, double[]>(0.0, bSet.BmpToBinaryArr(@"3.jpg")), new KeyValuePair <double, double[]>(0.0, bSet.BmpToBinaryArr(@"4.jpg")), new KeyValuePair <double, double[]>(0.0, bSet.BmpToBinaryArr(@"5.jpg")), }; network.Train(trainingSet); Assert.AreEqual(1.0, network.Classify(aSet.BmpToBinaryArr(@"7.jpg"))); Assert.AreEqual(0.0, network.Classify(bSet.BmpToBinaryArr(@"7.jpg"))); }
public void FTClassifiesBiasedData() { // data has 2 double inputs and an expected binary out var network = new PerceptronFactory().BuildFTPerceptron(2); var reader = new TabSeparatedListReader(); List <KeyValuePair <double, double[]> > trainingSet = reader.Read(@"..\..\trainingsets\dataset2\biased.txt"); network.Train(trainingSet); trainingSet.ForEach(kvp => Assert.AreEqual(kvp.Key, network.Classify(kvp.Value))); }
public void ClassifiesNAND() { var network = new PerceptronFactory().BuildPerceptron(3); var trainingSet = new List <KeyValuePair <double, double[]> > { new KeyValuePair <double, double[]>(1.0, new [] { 1.0, 0.0, 0.0 }), new KeyValuePair <double, double[]>(1.0, new [] { 1.0, 0.0, 1.0 }), new KeyValuePair <double, double[]>(1.0, new [] { 1.0, 1.0, 0.0 }), new KeyValuePair <double, double[]>(0.0, new [] { 1.0, 1.0, 1.0 }), }; network.Train(trainingSet); Assert.AreEqual(1.0, network.Classify(new [] { 1.0, 0.0, 0.0 })); Assert.AreEqual(0.0, network.Classify(new [] { 1.0, 1.0, 1.0 })); }
public void SigmoidFTClassifiesNAND() { FTPerceptron network = new PerceptronFactory().BuildSigmoidFTPerceptron(3); var trainingSet = new List <KeyValuePair <double, double[]> > { new KeyValuePair <double, double[]>(1.0, new [] { 1.0, 0.0, 0.0 }), new KeyValuePair <double, double[]>(1.0, new [] { 1.0, 0.0, 1.0 }), new KeyValuePair <double, double[]>(1.0, new [] { 1.0, 1.0, 0.0 }), new KeyValuePair <double, double[]>(0.0, new [] { 1.0, 1.0, 1.0 }), }; // gets all trained up nice well within 0.8 network.Train(trainingSet, maxAllowedError: 0.2); // kinda hacky but basically with sigmoid activation youre never going to hit 1 and 0.. Assert.Greater(network.Classify(new[] { 1.0, 0.0, 0.0 }), 0.9); Assert.Less(network.Classify(new[] { 1.0, 1.0, 1.0 }), 0.1); }
public void MultiLayerPerceptronClassifies() { // fixed arrangement for now ... var network = new PerceptronFactory().BuildMultiLayerPerceptron(); var trainingSet = new List <KeyValuePair <double[], double[]> > { new KeyValuePair <double[], double[]>(new [] { 0.0, 1.0 }, new [] { 0.0, 1.0, 1.0, 0.0 }), new KeyValuePair <double[], double[]>(new [] { 1.0, 0.1 }, new [] { 1.0, 0.0, 1.0, 0.0 }), new KeyValuePair <double[], double[]>(new [] { 1.0, 1.0 }, new [] { 1.0, 1.0, 0.0, 0.0 }), }; network.Train(trainingSet, maxAllowedError: 0.2, maxIterations: 10000); trainingSet.ForEach(kvp => { Assert.AreEqual(kvp.Key[0], network.Classify(kvp.Value)[0]); Assert.AreEqual(kvp.Key[1], network.Classify(kvp.Value)[1]); }); }