/// <summary>
        /// Creates a new Backpropagation Synapse connecting the given neurons
        /// </summary>
        /// <param name="sourceNeuron">
        /// The source neuron
        /// </param>
        /// <param name="targetNeuron">
        /// The target neuron
        /// </param>
        /// <param name="parent">
        /// Parent connector containing this syanpse
        /// </param>
        /// <exception cref="System.ArgumentNullException">
        /// If any of the arguments is <c>null</c>.
        /// </exception>
        public BackpropagationSynapse(
            ActivationNeuron sourceNeuron, ActivationNeuron targetNeuron, BackpropagationConnector parent)
        {
            Helper.ValidateNotNull(sourceNeuron, "sourceNeuron");
            Helper.ValidateNotNull(targetNeuron, "targetNeuron");
            Helper.ValidateNotNull(parent, "parent");

            this.weight = 1f;
            this.delta = 0f;

            sourceNeuron.TargetSynapses.Add(this);
            targetNeuron.SourceSynapses.Add(this);

            this.sourceNeuron = sourceNeuron;
            this.targetNeuron = targetNeuron;
            this.parent = parent;
        }
Example #2
0
    void CreateNewNetwork()
    {
        Debug.Log("Creating new network...");

        LinearLayer  inputLayer   = new LinearLayer(neurons);
        SigmoidLayer hiddenLayer  = new SigmoidLayer(hidden1Neurons);
        SigmoidLayer hiddenLayer2 = new SigmoidLayer(hidden2Neurons);

        LinearLayer outputLayer = new LinearLayer(outputNum);


        BackpropagationConnector conn1 = new BackpropagationConnector(inputLayer, hiddenLayer);

        conn1.Initializer = new RandomFunction(0d, 0.00001d);
        BackpropagationConnector conn3 = new BackpropagationConnector(hiddenLayer, hiddenLayer2);

        conn3.Initializer = new RandomFunction(0d, 0.00001d);
        BackpropagationConnector conn2 = new BackpropagationConnector(hiddenLayer2, outputLayer);

        conn2.Initializer = new RandomFunction(0d, 0.00001d);

        conn1.Initialize();
        conn2.Initialize();
        conn3.Initialize();


        if (NetworkManager.Instance._neuralNetwork != null)
        {
            Debug.Log("A network already exists... new network will overwrite it");
        }
        Debug.Log("Created.");

        NetworkManager.Instance._neuralNetwork = new BackpropagationNetwork(inputLayer, outputLayer);
        NetworkManager.Instance._neuralNetwork.SetLearningRate(learningRate);
        NetworkManager.Instance.setNeuralNetwork(NetworkManager.Instance._neuralNetwork);

        NetworkManager.Instance.InitializeNetwork(neurons);
    }
 /// <summary>
 /// Initializes weights of all backpropagation synapses in the backpropagation connector.
 /// </summary>
 /// <param name="connector">
 /// The backpropagation connector to initialize.
 /// </param>
 /// <exception cref="ArgumentNullException">
 /// If <c>connector</c> is <c>null</c>
 /// </exception>
 public void Initialize(BackpropagationConnector connector)
 {
     Helper.ValidateNotNull(connector, "connector");
     foreach (BackpropagationSynapse synapse in connector.Synapses)
     {
         synapse.Weight = constant;
     }
 }
        /// <summary>
        /// Initializes weights of all backpropagation synapses in the backpropagation connector.
        /// </summary>
        /// <param name="connector">
        /// The backpropagation connector to initialize.
        /// </param>
        /// <exception cref="ArgumentNullException">
        /// If <c>connector</c> is <c>null</c>
        /// </exception>
        public void Initialize(BackpropagationConnector connector)
        {
            Helper.ValidateNotNull(connector, "connector");

            double nGuyenWidrowFactor = NGuyenWidrowFactor(
                connector.SourceLayer.NeuronCount, connector.TargetLayer.NeuronCount);

            int synapsesPerNeuron = connector.SynapseCount / connector.TargetLayer.NeuronCount;

            foreach (INeuron neuron in connector.TargetLayer.Neurons)
            {
                int i = 0;
                double[] normalizedVector = Helper.GetRandomVector(synapsesPerNeuron, nGuyenWidrowFactor);
                foreach (BackpropagationSynapse synapse in connector.GetSourceSynapses(neuron))
                {
                    synapse.Weight = normalizedVector[i++];
                }
            }
        }
Example #5
0
        /// <summary>
        /// This constructs a training procedure for standard backpropagation techniques.
        /// More advanced ones will be used as seen in the example.
        /// </summary>
        /// <param name="writer"></param>
        public TestingNdn(StreamWriter writer)
        {
            TrainingSample sample = new TrainingSample(
                new double[] { },
                new double[] { });

            //We might make a gui for this later.
            int    numberOfNeurons = 3;
            double learningRate    = 0.5;
            int    numberOfCycles  = 10000;

            double[] errorList = new double[numberOfCycles];

            LinearLayer  inputLayer  = new LinearLayer(2);
            SigmoidLayer hiddenLayer = new SigmoidLayer(numberOfNeurons);
            SigmoidLayer outputLayer = new SigmoidLayer(1);

            // This layer is a event handler that fires when the output is generated, hence backpropagation.
            BackpropagationConnector conn1 = new BackpropagationConnector(inputLayer, hiddenLayer);
            BackpropagationConnector conn2 = new BackpropagationConnector(hiddenLayer, outputLayer);

            BackpropagationNetwork network = new BackpropagationNetwork(inputLayer, outputLayer);

            network.SetLearningRate(learningRate);

            TrainingSet trainingSet = new TrainingSet(2, 1);

            trainingSet.Add(new TrainingSample(new double[2] {
                0, 0
            }, new double[1] {
                0
            }));
            trainingSet.Add(new TrainingSample(new double[2] {
                0, 1
            }, new double[1] {
                1
            }));
            trainingSet.Add(new TrainingSample(new double[2] {
                1, 0
            }, new double[1] {
                1
            }));
            trainingSet.Add(new TrainingSample(new double[2] {
                1, 1
            }, new double[1] {
                0
            }));

            double max = 0;

            // create an anonymous function to capture the error value of each iteration, and report back the percent of completion.
            network.EndEpochEvent +=
                delegate(object networkInput, TrainingEpochEventArgs args)
            {
                errorList[args.TrainingIteration] = network.MeanSquaredError;
                max             = Math.Max(max, network.MeanSquaredError);
                PercentComplete = args.TrainingIteration * 100 / numberOfCycles;
            };

            network.Learn(trainingSet, numberOfCycles);

            double[] indices = new double[numberOfCycles];
            // for (int i = 0; i < numberOfCycles; i++) { indices[i] = i; } .. oh nvm, its for graphing the learning curve

            // what to do for error list?
            // errorList => for plotting stuff.
            for (int i = 0; i < numberOfCycles; i++)
            {
                //Console.WriteLine(errorList[i]);
            }

            double[] outputResult = network.OutputLayer.GetOutput();
            Console.WriteLine("final output");

            double[] r1 = new double[] { 0, 0 };
            double[] r2 = new double[] { 0, 1 };
            double[] r3 = new double[] { 1, 0 };
            double[] r4 = new double[] { 1, 1 };

            Console.WriteLine(" 0 0 => " + network.Run(r1)[0]);
            Console.WriteLine(" 0 1 => " + network.Run(r2)[0]);
            Console.WriteLine(" 1 0 => " + network.Run(r3)[0]);
            Console.WriteLine(" 1 1 => " + network.Run(r4)[0]);
        }
        /// <summary>
        /// Initializes weights of all backpropagation synapses in the backpropagation connector.
        /// </summary>
        /// <param name="connector">
        /// The backpropagation connector to initialize.
        /// </param>
        /// <exception cref="ArgumentNullException">
        /// If <c>connector</c> is <c>null</c>
        /// </exception>
        public void Initialize(BackpropagationConnector connector)
        {
            Helper.ValidateNotNull(connector, "connector");

            int i = 0;
            double[] normalized = Helper.GetRandomVector(connector.SynapseCount, 1d);
            foreach (BackpropagationSynapse synapse in connector.Synapses)
            {
                synapse.Weight = normalized[i++];
            }
        }
Example #7
0
 public NeuralNetwork()
 {
     _inputHiddenBackpropagationConnector  = new BackpropagationConnector(_inputLayer, _hiddenLayer);
     _hiddenOutputBackpropagationConnector = new BackpropagationConnector(_hiddenLayer, _outputLayer);
     _learnNetworkTask = LearnNetworkAsync();
 }
        /// <summary>
        /// This constructor creates a default network to work with.
        /// </summary>
        /// <param name="aoe2Directory">Directory of your age of empires game.</param>
        /// <param name="aiScript">Name of your ai script that you want to generate.</param>
        public AITrainingModule(string aoe2Directory, string aiScript)
        {
            _logger = Program.Logger;
            _logger.Info("Initializing Training module.");

            _aoe2Directory = aoe2Directory;
            _aiScript      = aiScript;

            _numberOfInitialCycles    = 100000;
            _numberOfContinuousCycles = 10000;
            _numberOfNeuronRefreshes  = 0;

            // Keep track of random number of neurons here.
            int numberOfInputNeurons  = 10;
            int numberOfHiddenNeurons = 10;
            int numberOfOutputNeurons = 8;

            double learningRate = 0.25;

            _errorList = new LinkedList <double>();

            LinearLayer  inputLayer  = new LinearLayer(numberOfInputNeurons);
            SigmoidLayer hiddenLayer = new SigmoidLayer(numberOfHiddenNeurons);
            SigmoidLayer outputLayer = new SigmoidLayer(numberOfOutputNeurons);

            // Wow, how hidden is really hidden. So that I think these connectors do is
            // insert themselves as part of the various layers. This really hides the hidden
            // layer from the network, as only the connectors then modify the hidden layer.
            // In other words "trained".
            var conn1 = new BackpropagationConnector(inputLayer, hiddenLayer);
            var conn2 = new BackpropagationConnector(hiddenLayer, outputLayer);

            _nueralNetwork = new BackpropagationNetwork(inputLayer, outputLayer);
            _nueralNetwork.SetLearningRate(learningRate);
            _nueralNetwork.EndEpochEvent += BackgroundTasks;             // hehe call back methods.

            // Needs to make initial configuration of AI.
            _logger.Warn("Begining initial training cycle...");

            // If this module is being instantiated for the first time, create a comprehensive
            // knowledge base/ network so it can continue where it last left off. Tweak the
            // query to filter outliers.
            _rawMgxStats = StreamUtilities.GetAiDataSet();

            _nueralNetwork.EndEpochEvent +=
                (object networkInput, TrainingEpochEventArgs args) =>
            {
                if (_percent % (_numberOfInitialCycles / 100) == 0 && _percent > 0)
                {
                    _logger.Info(string.Format("Precent completed {0}%", _percent / (_numberOfInitialCycles / 100)));
                }

                _percent++;
            };

            _nueralNetwork.Learn(CompileTrainingSet(_rawMgxStats), _numberOfInitialCycles);
            _logger.Warn("Finished initial training cycle.");

            // Get the latest dataset so we can generate some kind of graph and push the data set to database.
            var knowledgeBase = StreamUtilities.GetLatestAiEntry().ToList();
            var aiTrainingSet = CompileTrainingSet(knowledgeBase);

            _currentStats = knowledgeBase[knowledgeBase.Count - 1];

            // push data, hacked to show simple output
            //double[] veryFirstInput
            //	 =
            //{
            //	0.2,0.2,0.2,0.2,0.2,
            //	0.2,0.2,0.2,0.2,0.2
            //};

            _climber = new HillClimbing(aiTrainingSet[0].InputVector, _nueralNetwork);
            // _climber = new HillClimbing(veryFirstInput, _nueralNetwork);


            // Hardcoding these dimensions
            int ordinalTracker = 1;

            for (int i = 0; i < 4; i++)
            {
                for (int j = i + 1; j < 5; j++)
                {
                    //write normalized data
                    StreamUtilities.SubmitPlotableData(_climber.GenerateTopologyData(i, j), ordinalTracker);

                    //write unnormalized data.
                    StreamUtilities.SubmitPlotableUnnormailizedData(_climber.GenerateUnormalizedTopologyData(i, j), ordinalTracker);
                    _logger.Debug(string.Format("Writing Axis{0} and Axis{1} with ordinal {2}.", i, j, ordinalTracker));
                    ordinalTracker++;
                }
            }

            // If input table == output, then a new game is needed
            if (StreamUtilities.CheckIfNewGameIsNeeded())
            {
                TriggerNewGame();
            }
        }
Example #9
0
 /// <summary>
 /// Initializes weights of all backpropagation synapses in the backpropagation connector.
 /// </summary>
 /// <param name="connector">
 /// The backpropagation connector to initialize.
 /// </param>
 /// <exception cref="ArgumentNullException">
 /// If <c>connector</c> is <c>null</c>
 /// </exception>
 public void Initialize(BackpropagationConnector connector)
 {
     Helper.ValidateNotNull(connector, "connector");
     foreach (BackpropagationSynapse synapse in connector.Synapses)
     {
         synapse.Weight = Helper.GetRandom(minLimit, maxLimit);
     }
 }
Example #10
0
        static void Main(string[] args)
        {
            Boolean finished = false;

            //Layer
            LinearLayer  inputLayer   = new LinearLayer(25);
            SigmoidLayer hiddenLayer1 = new SigmoidLayer(100);
            SigmoidLayer outputLayer  = new SigmoidLayer(4);

            //Connectors
            BackpropagationConnector connector  = new BackpropagationConnector(inputLayer, hiddenLayer1);
            BackpropagationConnector connector3 = new BackpropagationConnector(hiddenLayer1, outputLayer);

            network = new BackpropagationNetwork(inputLayer, outputLayer);
            network.Initialize();

            labyrinth laby = new labyrinth();
            player    plyr = new player();

            laby.Update(laby);

            Console.WriteLine(laby.GetPlayerPosition(laby.Laby));

            do
            {
                string pressedKey = Console.ReadKey(true).Key.ToString();

                switch (pressedKey)
                {
                case "RightArrow":
                    if (laby.Laby[plyr.getPlayerPosition().Item1, plyr.getPlayerPosition().Item2 + 1] != laby.Wall)
                    {
                        laby.Laby = laby.SetPosition(plyr.getPlayerPosition(), plyr.move("R"), laby.Laby);
                        laby.Update(laby);
                    }
                    break;

                case "LeftArrow":
                    if (laby.Laby[plyr.getPlayerPosition().Item1, plyr.getPlayerPosition().Item2 - 1] != laby.Wall)
                    {
                        laby.Laby = laby.SetPosition(plyr.getPlayerPosition(), plyr.move("L"), laby.Laby);
                        laby.Update(laby);
                    }
                    break;

                case "UpArrow":
                    if (laby.Laby[plyr.getPlayerPosition().Item1 - 1, plyr.getPlayerPosition().Item2] != laby.Wall)
                    {
                        laby.Laby = laby.SetPosition(plyr.getPlayerPosition(), plyr.move("U"), laby.Laby);
                        laby.Update(laby);
                    }
                    break;

                case "DownArrow":
                    if (laby.Laby[plyr.getPlayerPosition().Item1 + 1, plyr.getPlayerPosition().Item2] != laby.Wall)
                    {
                        laby.Laby = laby.SetPosition(plyr.getPlayerPosition(), plyr.move("D"), laby.Laby);
                        laby.Update(laby);
                    }
                    break;
                }
            } while (finished != true);

            Console.Read();
        }
Example #11
0
        /// <summary>
        /// train and save as the spcified path
        /// </summary>
        /// <param name="eigen">
        /// A <see cref="EigenValueTags"/>
        /// </param>
        private static void TrainNetwork(EigenValueTags eigen)
        {
            Log.Debug("================ Train Started ================ ");

            string[] dLabels = eigen.FacesLabel;
            int numInstances = eigen.eigenTaglist.Count;
            int inputNodes = eigen.eigenTaglist[0].val.Length;
            int outputNodes = dLabels.Length;
            int hiddenNodes = inputNodes+outputNodes;

            float[][] trainInput = new float[numInstances][];
            float[][] trainOutput = new float[numInstances][];

            //Random r = new Random();
            int numstrain = 0;
            for(int i=0;i<numInstances;i++){

                trainInput[numstrain] = new float[inputNodes];
                trainOutput[numstrain] = new float[outputNodes];

                for(int j=0;j<dLabels.Length;j++){
                    if(eigen.eigenTaglist[i].tag.Equals(dLabels[j]))
                        trainOutput[numstrain][j] = 0.9f;
                    else
                        trainOutput[numstrain][j] = 0.1f;
                }

                for(int j=0;j<inputNodes;j++){
                    trainInput[numstrain][j] = eigen.eigenTaglist[i].val[j];
                }
                numstrain++;
            }

            // convert to double
            Log.Debug("nums train = "+ numstrain);
            double[][] trainInputD = new double[numstrain][];
            double[][] trainOutputD = new double[numstrain][];
            for(int i=0;i<numstrain;i++){
                trainInputD[i] = new double[inputNodes];
                trainOutputD[i] = new double[outputNodes];
                for(int j=0;j<outputNodes;j++){
                    trainOutputD[i][j] = trainOutput[i][j];
                }

                for(int j=0;j<inputNodes;j++){
                    trainInputD[i][j] = trainInput[i][j];
                }
            }

            //			TimeSpan tp = System.DateTime.Now.TimeOfDay;

            Log.Debug("#in = {0}, #hid = {1}, #out = {2}",inputNodes,hiddenNodes,outputNodes);
            NeuronDotNet.Core.Backpropagation.SigmoidLayer inputLayer = new NeuronDotNet.Core.Backpropagation.SigmoidLayer(inputNodes);
            NeuronDotNet.Core.Backpropagation.SigmoidLayer hiddenlayer = new NeuronDotNet.Core.Backpropagation.SigmoidLayer(hiddenNodes);
            NeuronDotNet.Core.Backpropagation.SigmoidLayer outputlayer = new NeuronDotNet.Core.Backpropagation.SigmoidLayer(outputNodes);
            Log.Debug("BackpropagationConnector input_hidden =  new BackpropagationConnector(inputLayer, hiddenlayer);");
            BackpropagationConnector input_hidden =  new BackpropagationConnector(inputLayer, hiddenlayer);
            BackpropagationConnector hidden_output =  new BackpropagationConnector(hiddenlayer, outputlayer);

            input_hidden.Momentum = 0.3;
            hidden_output.Momentum = 0.3;
            Log.Debug("bpnet = new BackpropagationNetwork(inputLayer,outputlayer);");
            bpnet = new BackpropagationNetwork(inputLayer,outputlayer);
            Log.Debug("TrainingSet tset = new TrainingSet(inputNodes, outputNodes);");
            TrainingSet tset = new TrainingSet(inputNodes, outputNodes);
            for(int i=0;i<numstrain;i++)
                tset.Add(new TrainingSample(trainInputD[i], trainOutputD[i]));

            // prevent getting stuck in local minima
            bpnet.JitterNoiseLimit = 0.0001;
            bpnet.Initialize();

            int numEpoch = 200;
            bpnet.SetLearningRate(0.2);
            bpnet.Learn(tset, numEpoch);

            //			Log.Debug("error = {0}",bpnet.MeanSquaredError);

            //			string savepath = facedbPath + "object/";
            //			if(!Directory.Exists(savepath))
            //				Directory.CreateDirectory(savepath);

            // Serialize
            string path = Path.Combine (FSpot.Global.BaseDirectory, "ann.dat");
            SerializeUtil.Serialize(path, bpnet);

            // Deserialize
            //BackpropagationNetwork testnet = (BackpropagationNetwork)SerializeUtil.DeSerialize("nn.dat");
            //			Log.Debug("error = {0}",bpnet.MeanSquaredError);
            //bpnet = (BackpropagationNetwork)SerializeUtil.DeSerialize("/home/hyperjump/nn.dat");
            //Log.Debug("error = {0}",bpnet.MeanSquaredError);

            // test by using training data
            //			int correct = 0;
            //			for(int i=0;i<numInstances;i++){
            //
            //				double[] v = new double[inputNodes];
            //				for(int j=0;j<v.Length;j++){
            //					v[j] = (double)eigen.eigenTaglist[i].val[j];
                    //Console.Write("{0},",v[j]);
            //				}
                //Console.WriteLine();

            //				double[] netOutput = bpnet.Run(v);
                //Console.WriteLine("net out:");
            //				for(int j=0;j<netOutput.Length;j++)
            //					Console.Write("{0},",netOutput[j]);

            //				string result = FaceClassifier.Instance.AnalyseNetworkOutput(eigen, netOutput);
            //				if(eigen.eigenTaglist[i].tag.Equals(result))
            //					correct++;
            //			}
            //			Log.Debug("% correct = " + (float)correct/(float)numInstances * 100);

            //Save Train Status

            Log.Debug("Saving Train Status...");

            List<Tstate> tstateList = new List<Tstate>();
            int[] num = new int[dLabels.Length];
            Log.Debug("num length = {0}",num.Length);

            foreach(VTag vt in eigen.eigenTaglist){
                for(int k=0;k<num.Length;k++)
                    if(vt.tag.Equals(dLabels[k]))
                        num[k]++;
            }
            for(int k=0;k<dLabels.Length;k++){
                tstateList.Add(new Tstate(dLabels[k], num[k]));
            }

            FaceSpotDb.Instance.TrainingData.Trainstat = tstateList;

            //			Log.Debug("time ="+  System.DateTime.Now.TimeOfDay.Subtract(tp));
            Log.Debug("================ Train ended ================ ");
        }
Example #12
0
 /// <summary>
 /// Initializes weights of all backpropagation synapses in the backpropagation connector.
 /// </summary>
 /// <param name="connector">
 /// The backpropagation connector to initialize.
 /// </param>
 /// <exception cref="ArgumentNullException">
 /// If <c>connector</c> is <c>null</c>
 /// </exception>
 public void Initialize(BackpropagationConnector connector)
 {
     //do nothing
 }
        /// <summary>
        /// Created this to test the custom neuron network with binary inputs.
        /// </summary>
        /// <param name="writer"></param>
        public static void Test(
            string file,
            int numberOfInputNeurons,
            int numberOfHiddenNeurons,
            int numberOfOutputNeurons,
            int numberOfCycles  = 50000,
            double learningRate = 0.25)
        {
            TrainingSample sample = new TrainingSample(
                new double[] { },
                new double[] { });

            //We might make a gui for this later.

            double[] errorList = new double[numberOfCycles];

            int totalNumberOfNeurons = numberOfInputNeurons + numberOfOutputNeurons;

            LinearLayer  inputLayer  = new LinearLayer(numberOfInputNeurons);
            SigmoidLayer hiddenLayer = new SigmoidLayer(numberOfHiddenNeurons);
            SigmoidLayer outputLayer = new SigmoidLayer(numberOfOutputNeurons);

            // This layer is a event handler that fires when the output is generated, hence backpropagation.
            BackpropagationConnector conn1 = new BackpropagationConnector(inputLayer, hiddenLayer);
            BackpropagationConnector conn2 = new BackpropagationConnector(hiddenLayer, outputLayer);

            BackpropagationNetwork network = new BackpropagationNetwork(inputLayer, outputLayer);

            network.SetLearningRate(learningRate);

            TrainingSet trainingSet = new TrainingSet(10, 8);

            // A file stream reader.
            var inDefaule = Console.In;

            using (StreamReader reader = new StreamReader(file))
            {
                Console.SetIn(reader);
                String line = "";
                //trainingSet.Add(new TrainingSample(new double[] { 0, 0, 0, 0, 1 }, new double[1] { 1 }));
                while ((line = reader.ReadLine()) != null)
                {
                    String[] array       = line.Split(',');
                    double[] inputArray  = new double[10];
                    double[] outputArray = new double[8];

                    for (int i = 0; i < 10; i++)
                    {
                        inputArray[i] = Convert.ToDouble(array[i]);
                    }

                    for (int i = 0; i < 8; i++)
                    {
                        outputArray[i] = Convert.ToDouble(array[i + 11]);
                    }

                    trainingSet.Add(new TrainingSample(inputArray, outputArray));
                }
            }

            double max = 0;

            // create an anonymous function to capture the error value of each iteration, and report back the percent of completion.
            network.EndEpochEvent +=
                delegate(object networkInput, TrainingEpochEventArgs args)
            {
                errorList[args.TrainingIteration] = network.MeanSquaredError;
                max = Math.Max(max, network.MeanSquaredError);
                // PercentComplete = args.TrainingIteration * 100 / numberOfCycles;
            };

            network.Learn(trainingSet, numberOfCycles);

            double[] indices = new double[numberOfCycles];
            // for (int i = 0; i < numberOfCycles; i++) { indices[i] = i; } .. oh nvm, its for graphing the learning curve

            // what to do for error list?
            // errorList => for plotting stuff.
            // for (int i = 0; i < numberOfCycles; i++)
            // {
            //Console.WriteLine(errorList[i]);
            //  }

            // print out the error list for scientific evaluation.
            StreamUtilities.DumpData("dumpErrorValues.txt", errorList);

            double[] outputResult = network.OutputLayer.GetOutput();


            outputResult = network.Run(new double[] { 0.47, 0.41, 0.12, 0.05, 0.1, 0.5, 0.1, 0.1, 0.05, 0.1 });

            foreach (var d in outputResult)
            {
                Console.WriteLine("output: " + d);
            }

            // Console.WriteLine("final output");
        }