/// <summary> /// Run the example. /// </summary> public void Process() { // read the iris data from the resources Assembly assembly = Assembly.GetExecutingAssembly(); var res = assembly.GetManifestResourceStream("AIFH_Vol1.Resources.iris.csv"); // did we fail to read the resouce if (res == null) { Console.WriteLine("Can't read iris data from embedded resources."); return; } // load the data var istream = new StreamReader(res); DataSet ds = DataSet.Load(istream); istream.Close(); // The following ranges are setup for the Iris data set. If you wish to normalize other files you will // need to modify the below function calls other files. ds.NormalizeRange(0, 0, 1); ds.NormalizeRange(1, 0, 1); ds.NormalizeRange(2, 0, 1); ds.NormalizeRange(3, 0, 1); IDictionary<String, int> species = ds.EncodeOneOfN(4); IList<BasicData> trainingData = ds.ExtractSupervised(0, 4, 4, 3); var network = new RBFNetwork(4, 4, 2); network.Reset(new MersenneTwisterGenerateRandom()); IScoreFunction score = new ScoreRegressionData(trainingData); var train = new TrainAnneal(network, score); PerformIterations(train, 100000, 0.01, true); QueryOneOfN(network, trainingData, species); }
public void TestComputeRegression() { var network = new RBFNetwork(2, 1, 1); double[] ltm = { 2.0, // input 1 to RBF 1 2.0, // input 2 to RBF 1 5.0, // RBF width 2.0, // RBF, center-0 4.0, // RBF, center-1 3.0, // RBF1 to Output 1 4.0}; // Bias to Output 1 Array.Copy(ltm, 0, network.LongTermMemory, 0, ltm.Length); double[] x = { 1, 2 }; double y = network.ComputeRegression(x)[0]; // Inputs: (2*1) + (2*2) = 6 // RBF: Gaussian(6) = 1 // Outputs: (1*3) + (1*4) = 7 Assert.AreEqual(7, y, AIFH.DefaultPrecision); }
/// <summary> /// Perform the example. /// </summary> public void Process() { var trainingData = BasicData.ConvertArrays(XorInput, XorIdeal); var network = new RBFNetwork(2, 5, 1); var score = new ScoreRegressionData(trainingData); var train = new TrainGreedyRandom(true, network, score); PerformIterations(train, 1000000, 0.01, true); Query(network, trainingData); }
public void TestBasics() { var network = new RBFNetwork(2, 1, 1); // should be 7, (2*1) + (1+(1 bias))*1 + 3 RBF params // 2 + 2 + 3 = 7 Assert.AreEqual(7, network.LongTermMemory.Length); Assert.AreEqual("[RBFNetwork:inputCount=2,outputCount=1,RBFs=[GaussianFunction:width=0.00,center=0.00,0.00],]", network.ToString()); }
public void TestResetCompute() { var network = new RBFNetwork(2, 1, 1); double total = network.LongTermMemory.Sum(); Assert.AreEqual(0, total, AIFH.DefaultPrecision); network.Reset(new BasicGenerateRandom()); total += network.LongTermMemory.Sum(); Assert.IsTrue(Math.Abs(total) > AIFH.DefaultPrecision); }
public void TestComputeClassification() { var network = new RBFNetwork(2, 1, 2); double[] ltm = { 2.0, // input 1 to RBF 1 2.0, // input 2 to RBF 1 5.0, // RBF width 2.0, // RBF, center-0 4.0, // RBF, center-1 3.0, // RBF1 to Output 1 4.0, // Bias to Output 1 5.0, // RBF1 to Output 2 6.0}; // Bias to Output 2 Array.Copy(ltm, 0, network.LongTermMemory, 0, ltm.Length); double[] x = { 1, 2 }; double[] y = network.ComputeRegression(x); // Inputs: (2*1) + (2*2) = 6 // RBF: Gaussian(6) = 1 // Outputs: (1*3) + (1*4) = 7 Assert.AreEqual(7, y[0], AIFH.DefaultPrecision); // Inputs: (2*1) + (2*2) = 6 // RBF: Gaussian(6) = 1 // Outputs: (1*5) + (1*6) = 11 Assert.AreEqual(11, y[1], AIFH.DefaultPrecision); int cls = network.ComputeClassification(x); // class 1 is higher than class 0 Assert.AreEqual(1, cls); }