/// <summary> /// The test harness. /// </summary> /// <param name="args">The command line arguments.</param> public static void Main(string[] args) { // The training set. TrainingSet trainingSet = new TrainingSet("XOR.trainingSet"); // The blueprint of the network. NetworkBlueprint networkBlueprint = new NetworkBlueprint( new LayerBlueprint(trainingSet.InputVectorLength), new ActivationLayerBlueprint[1] { new ActivationLayerBlueprint(2, new LogisticActivationFunction()) }, new ActivationLayerBlueprint(trainingSet.OutputVectorLength, new LogisticActivationFunction()) ); // The network. INetwork network = new Network(networkBlueprint); Test(1, "A backpropagation (BP) teacher", new BackpropagationTeacher(trainingSet, null, null), network, 100000, 1e-3); Console.WriteLine(); Test(2, "The genetic algorithm (GA) teacher", new GeneticAlgorithmTeacher(trainingSet, null, null), network, 1000, 1e-3); Console.WriteLine(); Test(3, "The simulated annealing (SA) teacher", new SimulatedAnnealingTeacher(trainingSet, null, null), network, 1000000, 1e-3); Console.WriteLine(); Test(4, "The ant colony optimization (ACO) teacher", new AntColonyOptimizationTeacher(trainingSet, null, null), network, 1000, 1e-3); Console.WriteLine(); }
static void Main(string[] args) { Console.WriteLine("{0:.10}, {1}", "Hello", "World"); // Step 1 : Alternative A : Building a training set manually // --------------------------------------------------------- int inputVectorLength = 2; int outputVectorLength = 1; TrainingSet trainingSet = new TrainingSet(inputVectorLength, outputVectorLength); TrainingPattern trainingPattern = new TrainingPattern(new double[2] { 0.0, 0.0 }, new double[1] { 0.0 }); trainingSet.Add(trainingPattern); trainingPattern = new TrainingPattern(new double[2] { 0.0, 1.0 }, new double[1] { 1.0 }); trainingSet.Add(trainingPattern); trainingPattern = new TrainingPattern(new double[2] { 1.0, 0.0 }, new double[1] { 1.0 }); trainingSet.Add(trainingPattern); trainingPattern = new TrainingPattern(new double[2] { 1.0, 1.0 }, new double[1] { 0.0 }); trainingSet.Add(trainingPattern); // Step 2 : Building a blueprint of a network // ------------------------------------------ LayerBlueprint inputLayerBlueprint = new LayerBlueprint(inputVectorLength); ActivationLayerBlueprint[] hiddenLayerBlueprints = new ActivationLayerBlueprint[1]; hiddenLayerBlueprints[0] = new ActivationLayerBlueprint(2, new LogisticActivationFunction()); ActivationLayerBlueprint outputLayerBlueprint = new ActivationLayerBlueprint(outputVectorLength, new LogisticActivationFunction()); NetworkBlueprint networkBlueprint = new NetworkBlueprint(inputLayerBlueprint, hiddenLayerBlueprints, outputLayerBlueprint); // Step 3 : Building a network // --------------------------- Network network = new Network(networkBlueprint); Console.WriteLine(network.ToString()); // Step 4 : Building a teacher // --------------------------- ITeacher teacher = new AntColonyOptimizationTeacher(trainingSet, null, null); // Step 5 : Training the network // ----------------------------- int maxIterationCount = 10000; double maxTolerableNetworkError = 1e-3; TrainingLog trainingLog = teacher.Train(network, maxIterationCount, maxTolerableNetworkError); Console.WriteLine("Number of runs used : " + trainingLog.RunCount); Console.WriteLine("Number of iterations used : " + trainingLog.IterationCount); Console.WriteLine("Minimum network error achieved : " + trainingLog.NetworkError); // Step 6 : Using the trained network // ---------------------------------- foreach (TrainingPattern tp in trainingSet.TrainingPatterns) { double[] inputVector = tp.InputVector; double[] outputVector = network.Evaluate(inputVector); Console.WriteLine(tp.ToString() + " -> " + TrainingPattern.VectorToString(outputVector)); } }
/// <summary> /// /// </summary> /// <param name="forecastingSession"></param> /// <param name="forecastingLog"></param> private static void Forecast(ForecastingSession forecastingSession, ForecastingLog forecastingLog) { // Step 0 : Read from the Forecasting Session // ------------------------------------------ string[] words = forecastingSession.Read(); // The size of the test set. string testSetSizeString = words[0].Trim(); int testSetSize = Int32.Parse(testSetSizeString); // The lags. string lagsString = words[1].Trim(); string[] lagStrings = lagsString.Split(','); int[] lags = new int[lagStrings.Length]; for (int i = 0; i < lags.Length; i++) { lags[i] = Int32.Parse(lagStrings[i]); } // The leaps. string leapsString = words[2].Trim(); string[] leapStrings = leapsString.Split(','); int[] leaps = new int[leapStrings.Length]; for (int i = 0; i < leaps.Length; i++) { leaps[i] = Int32.Parse(leapStrings[i]); } // The numner of hidden neurons. string hiddenNeuronCountString = words[3].Trim(); int hiddenNeuronCount = Int32.Parse(hiddenNeuronCountString); // DEBUG : "Lags; Number of hidden neurons" Console.WriteLine(lagsString + "; " + hiddenNeuronCountString); // Step 1 : Alternative A : Building a training set (and a testing set) manually // ----------------------------------------------------------------------------- // The training set. TrainingSet trainingSet = timeSeries.BuildTrainingSet(lags, leaps); // The testing set. TrainingSet testSet = trainingSet.SeparateTestSet(trainingSet.Size - testSetSize, testSetSize); // Step 2 : Building a blueprint of a network // ------------------------------------------ // The input layer blueprint. LayerBlueprint inputLayerBlueprint = new LayerBlueprint(lags.Length); // The hidden layer blueprint. ActivationLayerBlueprint hiddenlayerBlueprint = new ActivationLayerBlueprint(hiddenNeuronCount); // The output layer blueprint. ActivationLayerBlueprint outputLayerBlueprint = new ActivationLayerBlueprint(leaps.Length, new LinearActivationFunction()); // The network blueprint. NetworkBlueprint networkBlueprint = new NetworkBlueprint(inputLayerBlueprint, hiddenlayerBlueprint, outputLayerBlueprint); // Step 3 : Building a network // --------------------------- // The network. Network network = new Network(networkBlueprint); // Step 4 : Building a teacher // --------------------------- BackpropagationTeacher teacher = new BackpropagationTeacher(trainingSet, null, testSet); // Step 5 : Training the network // ----------------------------- int maxRunCount = 10; int maxIterationCount = 10000; double maxTolerableNetworkError = 0.0; TrainingLog tl = teacher.Train(network, maxRunCount, maxIterationCount, maxTolerableNetworkError); // Step 6 : Write into the Forecasting Log // --------------------------------------- words = new string[10] { lagsString, trainingSet.Size.ToString(), hiddenNeuronCountString, network.SynapseCount.ToString(), tl.RSS_TrainingSet.ToString(), tl.RSD_TrainingSet.ToString(), tl.AIC.ToString(), tl.BIC.ToString(), tl.RSS_TestSet.ToString(), tl.RSD_TestSet.ToString() }; forecastingLog.Write(words); // DEBUG : "RSS (within-sample); RSD (within-sample); AIC; BIC; RSS (out-of-sample); RSD (out-of-sample)" Console.WriteLine(tl.RSS_TrainingSet.ToString() + "; " + tl.RSD_TrainingSet.ToString() + "; " + tl.AIC.ToString() + "; " + tl.BIC.ToString() + "; " + tl.RSS_TestSet.ToString() + "; " + tl.RSD_TestSet.ToString()); }