Example #1
0
    public ANN_Controller(int nInputLayerNeurons, int nHiddenLayerNeurons, int nOutputLayerNeurons)
    {
        m_ANN = new ArtificialNeuralNetwork(nInputLayerNeurons, nHiddenLayerNeurons, nOutputLayerNeurons);

        m_ANN.FitNetwork(TrainingDatabase.inputTraining1, TrainingDatabase.desiredOutputTraining1, 5000, 0.05f);
        ShowLayers();
    }
Example #2
0
        static void Main(string[] args)
        {
            //var dataSource = InitSingleValueDataSource();
            // var dataSource = InitListDataSource();

            var dataSource = InitMnistDataSource();

            //var config = new ArtificialNeuralNetworkConfig
            //{
            //    InputDimensions = dataSource.InputDimensions,
            //    NeuronCounts = new int[] { 32, dataSource.OutputDimensions },
            //    LearningRate = 0.001,
            //    ActivationType = ActivationTypes.ReLU
            //};

            //var ann = new ArtificialNeuralNetwork(config);
            var dir      = @"C:\Projects\simpleneuralnetwork\";
            var filename = "dc1cc91c-aa52-4aae-ac68-582c3c7d9f8c-593-p-0,0626084964120264.ann";
            var fullPath = dir + filename;

            var ann = ArtificialNeuralNetwork.Load(fullPath);

            ann.LearningRate = 0.00001;
            ann.Train(dataSource, 10000);

            //var ann = ArtificialNeuralNetwork.Load(fullPath);
            //CalculatePercentCorrect(ann);

            Console.WriteLine("Press <enter> to quit...");
            Console.ReadLine();
        }
Example #3
0
 /// <summary>
 /// Starts the neural network with full controls of its setup.
 /// </summary>
 /// <param name="inputAmmount">Input ammount.</param>
 /// <param name="outputAmmount">Output ammount.</param>
 /// <param name="hiddenLayers">Hidden layers.</param>
 /// <param name="neuronsPerLayer">Neurons per layer.</param>
 /// <param name="alpha">Alpha.</param>
 /// <param name="inputActivationMethod">Input activation method.</param>
 /// <param name="hiddenLayerActivationMethod">Hidden layer activation method.</param>
 /// <param name="outputActivationMethod">Output activation method.</param>
 public void StartNeuralNetwork(int inputAmmount, int outputAmmount, int hiddenLayerAmmount, int neuronsInHiddenLayer, double errorAlpha, Neuron.ActivationType inputActivationMethod, Neuron.ActivationType hiddenActivationMethod, Neuron.ActivationType outputActivationMethod)
 {
     if (neuralNetwork != null)
     {
         return;
     }
     neuralNetwork = new ArtificialNeuralNetwork(inputAmmount, outputAmmount, hiddenLayerAmmount, neuronsInHiddenLayer, errorAlpha, inputActivationMethod, hiddenActivationMethod, outputActivationMethod);
 }
Example #4
0
        static private void Train(double[][] Data, int input, int output)
        {
            //set parameters in layers
            int numHidden = 10;

            //create layers and construct them
            List <NNlayers> Nlist = new List <NNlayers>();
            NNlayers        N1    = new NNlayers(NNlayers.Layers_family.Affine, input, numHidden);
            NNlayers        N2    = new NNlayers(NNlayers.Layers_family.BN, numHidden, numHidden);
            NNlayers        N3    = new NNlayers(NNlayers.Layers_family.Tanh, numHidden, numHidden);
            NNlayers        N4    = new NNlayers(NNlayers.Layers_family.Affine, numHidden, output);

            Nlist.Add(N1);
            Nlist.Add(N2);
            Nlist.Add(N3);
            Nlist.Add(N4);

            //create a NN class
            Ann = new ArtificialNeuralNetwork(Nlist.ToArray(), input, output);
            Ann.PositiveLimit = 0.5;//default = 0.7
            int    maxEpochs = 100;
            double learnRate = 0.05;

            //create a error monitor backgroundworker
            BackgroundWorker BGW = new BackgroundWorker();

            BGW.DoWork += new DoWorkEventHandler(backgroundWorker_NN_DoWork);
            BGW.RunWorkerAsync(maxEpochs);

            //train
            Ann.TrainModel(Data, maxEpochs, learnRate, 0);



            double trainAcc = Ann.Accu_train;

            Console.Write("\nFinal accuracy on train data = " +
                          trainAcc.ToString("F4"));

            double testAcc = Ann.Accu_test;

            Console.Write("\nFinal accuracy on test data = " +
                          testAcc.ToString("F4"));
            Console.Write("\nTrain finish");

            string site = System.DateTime.Now.ToString("yyMMddHHmm") + "_learnproj";

            Directory.CreateDirectory(site);
            Ann.Save_network(site, learnRate);
            Ann.Save_H5files(site);

            Console.Write("\nVariables have been save in " + site);
        }
Example #5
0
 // Use this for initialization
 void Start()
 {
     if (trainingData != null)
     {
         neuralNetwork = new ArtificialNeuralNetwork(
             trainingData.InputAmmount,
             trainingData.OutputAmmount,
             hiddenLayers,
             neuronsPerHiddenLayer,
             alpha,
             inputLayerActivationMethod,
             hiddenLayersActivationMethod,
             outputLayerActivationMethod
             );
         Train(trainingData.epochCount);
     }
 }
Example #6
0
        static void ManuallyInitWeightsForSingleValueSource(ArtificialNeuralNetwork ann)
        {
            ann.Layers[0].Neurons[0].IncomingConnections[1].Weight = 0.15;
            ann.Layers[0].Neurons[0].IncomingConnections[2].Weight = 0.20;
            ann.Layers[0].Neurons[0].Bias = 0.35;

            ann.Layers[0].Neurons[1].IncomingConnections[1].Weight = 0.25;
            ann.Layers[0].Neurons[1].IncomingConnections[2].Weight = 0.30;
            ann.Layers[0].Neurons[1].Bias = 0.35;

            ann.Layers[1].Neurons[0].IncomingConnections[1].Weight = 0.40;
            ann.Layers[1].Neurons[0].IncomingConnections[2].Weight = 0.45;
            ann.Layers[1].Neurons[0].Bias = 0.60;

            ann.Layers[1].Neurons[1].IncomingConnections[1].Weight = 0.50;
            ann.Layers[1].Neurons[1].IncomingConnections[2].Weight = 0.55;
            ann.Layers[1].Neurons[1].Bias = 0.60;
        }
Example #7
0
        static void CalculatePercentCorrect(ArtificialNeuralNetwork ann)
        {
            var testDataSource = InitMnistTesttDataSource();
            int numPoints      = 0;
            int numCorrect     = 0;

            var classCounts = new Dictionary <int, (int numCorrect, int numTotal)>();

            foreach (var dataPoint in testDataSource.DataPoints)
            {
                numPoints++;
                var estimated = ann.Classify(dataPoint);
                var expected  = dataPoint.Label;

                var classEstimated = getClass(estimated);
                var classExpected  = getClass(expected);

                var classCountsTouple =
                    classCounts.TryGetValue(classExpected, out var touple) ? touple : (numCorrect : 0, numTotal : 0);

                classCountsTouple.numTotal++;

                if (classEstimated == classExpected)
                {
                    numCorrect++;
                    classCountsTouple.numCorrect++;
                }

                classCounts[classExpected] = classCountsTouple;
            }

            Console.WriteLine("Percent correct: {0}", (100.0 * numCorrect) / (double)numPoints);
            for (int i = 0; i < 10; i++)
            {
                var classCountsTouple =
                    classCounts.TryGetValue(i, out var touple) ? touple : (numCorrect : 0, numTotal : 0);

                var percent = 100.0 * classCountsTouple.numCorrect / (double)classCountsTouple.numTotal;

                Console.WriteLine("{0}: {1}", i, percent);
            }
        }
Example #8
0
    private void Start()
    {
        // int amountInputs, int amountOutputs, int amountHiddenLayers, int amountNeuronsPerHiddenLayer, double alpha
        artificialNeuralNetwork = new ArtificialNeuralNetwork(2, 1, 1, 2, trainingRate);
        List <double> result;

        if (runTimes == 1)
        {
            sumSquareError  = 0;
            result          = Train(1, 1, 0);
            sumSquareError += Mathf.Pow((float)result[0] - 0, 2);
        }
        else
        {
            // Train it 1000 times with those 4 cases XOR
            for (int i = 0; i < runTimes; i++)
            {
                sumSquareError  = 0;
                result          = Train(1, 1, 0);
                sumSquareError += Mathf.Pow((float)result[0] - 0, 2);
                result          = Train(1, 0, 1);
                sumSquareError += Mathf.Pow((float)result[0] - 1, 2);
                result          = Train(0, 1, 1);
                sumSquareError += Mathf.Pow((float)result[0] - 1, 2);
                result          = Train(0, 0, 0);
                sumSquareError += Mathf.Pow((float)result[0] - 0, 2);
            }
        }

        Debug.Log("Sum squared errors: " + sumSquareError);
        result = Train(1, 1, 0);
        Debug.Log("1 1 : " + result[0]);
        result = Train(1, 0, 1);
        Debug.Log("1 0 : " + result[0]);
        result = Train(0, 1, 1);
        Debug.Log("0 1 : " + result[0]);
        result = Train(0, 0, 0);
        Debug.Log("0 0 : " + result[0]);
    }
    // Use this for initialization
    private void Start()
    {
        artificialNeuralNetwork = new ArtificialNeuralNetwork(2, 1, 1, 2, 0.8);

        List <double> results;

        var    epochs        = 100000;
        double desiredOutput = 0d;

        for (var i = 0; i < epochs; i++)
        {
            sumSquareError = 0;

            // Training set for an XOR operator.
            desiredOutput   = 0;
            results         = Train(1, 1, desiredOutput);
            sumSquareError += Mathf.Pow((float)results[0] - (float)desiredOutput, 2);

            desiredOutput   = 1;
            results         = Train(1, 0, desiredOutput);
            sumSquareError += Mathf.Pow((float)results[0] - (float)desiredOutput, 2);

            desiredOutput   = 1;
            results         = Train(0, 1, desiredOutput);
            sumSquareError += Mathf.Pow((float)results[0] - (float)desiredOutput, 2);

            desiredOutput   = 0;
            results         = Train(0, 0, desiredOutput);
            sumSquareError += Mathf.Pow((float)results[0] - (float)desiredOutput, 2);

            // Training set for an XNOR operator:
            // Switch all desiredOutput values of the XOR training set
            // from 1 to 0 and from 0 to 1, respectively.
        }
        Debug.LogFormat("SumSquareError: {0}", sumSquareError);

        // Run training set again.
        // Side-effect: updates weights as well inside GO().
        // XOR training set.

        double input1 = 1;
        double input2 = 1;

        desiredOutput = 0;
        results       = Train(input1, input2, desiredOutput);
        Debug.LogFormat("{0}, {1}, {2} (Input values)", input1, input2, desiredOutput);
        Debug.LogFormat("{0}, {1}, {2} (Output value rounded)", input1, input2, Mathf.Round((float)results[0]));
        Debug.LogFormat("{0}, {1}, {2} (Output value)\n", input1, input2, results[0]);

        input1        = 1;
        input2        = 0;
        desiredOutput = 1;
        results       = Train(input1, input2, desiredOutput);
        Debug.LogFormat("{0}, {1}, {2} (Input values)", input1, input2, desiredOutput);
        Debug.LogFormat("{0}, {1}, {2} (Output value rounded)", input1, input2, Mathf.Round((float)results[0]));
        Debug.LogFormat("{0}, {1}, {2} (Output value)\n", input1, input2, results[0]);

        input1        = 0;
        input2        = 1;
        desiredOutput = 1;
        results       = Train(input1, input2, desiredOutput);
        Debug.LogFormat("{0}, {1}, {2} (Input values)", input1, input2, desiredOutput);
        Debug.LogFormat("{0}, {1}, {2} (Output value rounded)", input1, input2, Mathf.Round((float)results[0]));
        Debug.LogFormat("{0}, {1}, {2} (Output value)\n", input1, input2, results[0]);

        input1        = 0;
        input2        = 0;
        desiredOutput = 0;
        results       = Train(input1, input2, desiredOutput);
        Debug.LogFormat("{0}, {1}, {2} (Input values)", input1, input2, desiredOutput);
        Debug.LogFormat("{0}, {1}, {2} (Output value rounded)", input1, input2, Mathf.Round((float)results[0]));
        Debug.LogFormat("{0}, {1}, {2} (Output value)\n", input1, input2, results[0]);
    }
Example #10
0
        static private void Compute(double[][] Data, double[][] Raw, int input, int output, double[] delta, double[] offset, int[] peak)
        {
            //create a NN class
            Ann = new ArtificialNeuralNetwork(input, output);
            Ann.PositiveLimit = 0.5;//default = 0.7
            DirectoryInfo di = new DirectoryInfo(System.Environment.CurrentDirectory);

            int           date     = 0;
            string        path     = "";
            List <string> projects = new List <string>();

            Console.WriteLine("\nChoose a learning project:");
            int N = 0;

            foreach (var fi in di.GetDirectories())
            {
                if (fi.Name.Contains("learnproj"))
                {
                    projects.Add(fi.Name);
                    Console.WriteLine(N + " = " + fi.Name);
                    N++;
                }
            }
            if (N == 0)
            {
                Console.Write("\nCannot Find any learnproject");
                return;
            }
            string n     = Console.ReadLine();
            bool   error = Ann.ImportOldProject(projects[Convert.ToInt32(n)]);

            if (!error)
            {
                Console.Write("\nCannot import learnproject");
                return;
            }

            string site = System.DateTime.Now.ToString("yyMMddHHmm") + "_Compute.csv";

            using (StreamWriter SW = new StreamWriter(site))
            {
                int i = 0;
                foreach (double[] temp in Data)
                {
                    double[] result = new double[framesize];
                    Array.Copy(temp, result, framesize);
                    //List<NNlayers> layers = Ann.
                    result = Ann.Compute(result);
                    string line = "";
                    int    idx  = peak[i] - (int)(framesize / 2);
                    foreach (double t in result)
                    {
                        Raw[i][idx] = t * delta[i] + offset[i];
                        idx++;
                    }
                    foreach (double t in Raw[i])
                    {
                        line += t.ToString() + ",";
                    }
                    SW.WriteLine(line);
                    i++;
                }
                SW.Close();
            }

            Console.Write("\nResult have been save in " + site);
        }
Example #11
0
        static private void Improve(double[][] Data, int input, int output)
        {
            //create a NN class
            Ann = new ArtificialNeuralNetwork(input, output);
            Ann.PositiveLimit = 0.5;//default = 0.7
            int    maxEpochs = 100;
            double learnRate = 0.05;

            //import old learning project

            DirectoryInfo di = new DirectoryInfo(System.Environment.CurrentDirectory);

            int           date     = 0;
            string        path     = "";
            List <string> projects = new List <string>();

            Console.WriteLine("\nChoose a learning project:");
            int N = 0;

            foreach (var fi in di.GetDirectories())
            {
                if (fi.Name.Contains("_learnproj"))
                {
                    projects.Add(fi.Name);
                    Console.WriteLine(N + " = " + fi.Name);
                    N++;
                }
            }
            if (N == 0)
            {
                Console.Write("\nCannot Find any learnproject");
                return;
            }
            string n = Console.ReadLine();


            bool error = Ann.ImportOldProject(projects[Convert.ToInt32(n)]);

            if (!error)
            {
                Console.Write("\nCannot import learnproject");
                return;
            }
            //create a error monitor backgroundworker
            BackgroundWorker BGW = new BackgroundWorker();

            BGW.DoWork += new DoWorkEventHandler(backgroundWorker_NN_DoWork);
            BGW.RunWorkerAsync(maxEpochs);

            //train
            Ann.ImproveModel(Data, maxEpochs, learnRate, 0);

            double trainAcc = Ann.Accu_train;

            Console.Write("\nFinal accuracy on train data = " +
                          trainAcc.ToString("F4"));

            double testAcc = Ann.Accu_test;

            Console.Write("\nFinal accuracy on test data = " +
                          testAcc.ToString("F4"));
            Console.Write("\nTrain finish");

            string site = System.DateTime.Now.ToString("yyMMddHHmm") + "_learnproj";

            Directory.CreateDirectory(site);
            Ann.Save_network(site, learnRate);
            Ann.Save_H5files(site);

            Console.Write("\nVariables have been save in " + site);
        }
Example #12
0
        static private void Train(double[][] Data, int input, int output)
        {
            //set parameters in layers
            int numHidden  = 100;
            int numHidden2 = 100;
            int maps       = 16;
            int Deep1      = 20;
            int Deep2      = 40;
            int windows    = 2;
            //create layers and construct them
            List <NNlayers> Nlist = new List <NNlayers>();

            NNlayers C1     = new NNlayers(NNlayers.Layers_family.Convolution, input, Deep1, 3, false);
            NNlayers C2     = new NNlayers(NNlayers.Layers_family.Maxpool, (input - 3 + 1), Deep1, 2);
            int      numCov = (int)Math.Ceiling((double)(input - 3 + 1) / 2);
            NNlayers N1     = new NNlayers(NNlayers.Layers_family.BN, numCov * Deep1, numCov * Deep1);
            NNlayers H2     = new NNlayers(NNlayers.Layers_family.ReLU, numCov * Deep1, numCov * Deep1);

            NNlayers C3      = new NNlayers(NNlayers.Layers_family.Convolution, numCov, Deep2, 3, Deep1, false);
            NNlayers C4      = new NNlayers(NNlayers.Layers_family.Meanpool, (numCov - 3 + 1), Deep2, 2);
            int      numCov2 = (int)Math.Ceiling((double)(numCov - 3 + 1) / 2) * Deep2;
            //NNlayers N1 = new NNlayers(NNlayers.Layers_family.Affine, (int)Math.Ceiling((double)(input - mapsize + 1)/windows)* maps, numHidden);
            NNlayers N2 = new NNlayers(NNlayers.Layers_family.BN, numCov2, numCov2);
            //NNlayers N3 = new NNlayers(NNlayers.Layers_family.ReLU, numHidden, numHidden);

            NNlayers H3  = new NNlayers(NNlayers.Layers_family.ReLU, numCov2, numCov2);
            NNlayers N10 = new NNlayers(NNlayers.Layers_family.Affine, numCov2, output);

            //NNlayers N11 = new NNlayers(NNlayers.Layers_family.Sigmoid, output, output);
            Nlist.Add(C1);
            Nlist.Add(C2);
            Nlist.Add(N1);
            Nlist.Add(H2);
            Nlist.Add(C3);
            Nlist.Add(C4);
            //Nlist.Add(N1);
            Nlist.Add(N2);
            Nlist.Add(H3);

            Nlist.Add(N10);
            //Nlist.Add(N11);

            //create a NN class
            Ann = new ArtificialNeuralNetwork(Nlist.ToArray(), input, output);
            Ann.PositiveLimit = 0.7;//default = 0.7
            Ann.Batchsize     = 100;
            int    maxEpochs = 4000;
            double learnRate = 0.005;

            //create a error monitor backgroundworker
            BackgroundWorker BGW = new BackgroundWorker();

            BGW.DoWork += new DoWorkEventHandler(backgroundWorker_NN_DoWork);
            BGW.RunWorkerAsync(maxEpochs);

            //train
            Ann.TrainModel(Data, maxEpochs, learnRate, 0);



            double trainAcc = Ann.Accu_train;

            Console.Write("\nFinal accuracy on train data = " +
                          trainAcc.ToString("F4"));

            double testAcc = Ann.Accu_test;

            Console.Write("\nFinal accuracy on test data = " +
                          testAcc.ToString("F4"));
            Console.Write("\nTrain finish");

            string site = System.DateTime.Now.ToString("yyMMddHHmm") + "_learnproj";

            Directory.CreateDirectory(site);
            Ann.Save_network(site, learnRate);
            Ann.Save_H5files(site);

            Console.Write("\nVariables have been save in " + site);
        }
        static public void SaveNetwork(string dataFileUsed, ArtificialNeuralNetwork network) //public void SaveNetwork(ArtificialNeuralNetwork network)
        {
            XmlWriter writer = XmlWriter.Create(network.FilePath.ToString() + "/NeuralNetwork.xml");

            writer.WriteStartElement("NeuralNetwork"); //doc start

            writer.WriteAttributeString("Date", DateTime.Now.ToString());

            writer.WriteAttributeString("Type", "BackPropagation");

            writer.WriteAttributeString("DataFileUsedForTraining", dataFileUsed);


            //Network settings------------8::::::::::::>---------------------------


            writer.WriteStartElement("Settings");

            writer.WriteStartElement("DistributionType");

            writer.WriteElementString("InitialRandomDistributionType", network.RandomDistribution.ToString());

            writer.WriteEndElement();


            //Layer settings------------8::::::::::::>---------------------------


            writer.WriteStartElement("LayerStrucure");

            writer.WriteElementString("NumberOfInputNeurons", network.LayerStructure.numberOfInputNodes.ToString());
            writer.WriteElementString("NumberOfOutputNeurons", network.LayerStructure.numberOfOutputNodes.ToString());
            writer.WriteElementString("NumberOfHiddenLayers", network.LayerStructure.HiddenLayerList.Count.ToString());

            for (int i = 0; i < network.LayerStructure.HiddenLayerList.Count; i++)
            {
                writer.WriteElementString("NumberOfNeuronsInHiddenLayer" + (i + 1).ToString(), network.LayerStructure.HiddenLayerList[i].ToString());
            }

            writer.WriteEndElement();


            //Strategy settings------------8::::::::::::>---------------------------


            writer.WriteStartElement("Strategy");

            writer.WriteElementString("OptimizationStrategy", network.Strategy.ToString());
            writer.WriteElementString("HiddenLayerActivationFunction", network.Strategy.HiddenActivationFunction.ToString());
            writer.WriteElementString("OutputLayerActivationFunction", network.Strategy.OutputActivationFunction.ToString());
            writer.WriteElementString("CostFunction", network.Strategy.CostFunction.ToString());

            writer.WriteEndElement();


            //Weights settings------------8::::::::::::>---------------------------


            writer.WriteStartElement("Weights");

            for (int i = 0; i < network.Weights.Count; i++)
            {
                writer.WriteStartElement("WeightsForLayer" + (i + 1).ToString());
                writer.WriteAttributeString("NumberOfRows", network.Weights[i].GetLength(0).ToString());
                writer.WriteAttributeString("NumberOfColumns", network.Weights[i].GetLength(1).ToString());

                int counter = 0;
                foreach (double weight in network.Weights[i])
                {
                    writer.WriteElementString("Weight" + (counter + 1).ToString(), weight.ToString());
                    counter++;
                }

                writer.WriteEndElement();
            }
            writer.WriteEndElement();


            //Bias settings------------8::::::::::::>---------------------------


            writer.WriteStartElement("Biases"); //Biases start

            for (int i = 0; i < network.Biases.Count; i++)
            {
                writer.WriteStartElement("BiasesForLayer" + (i + 1).ToString());
                writer.WriteAttributeString("NumberOfRows", network.Biases[i].GetLength(0).ToString());

                int counter = 0;
                foreach (double bias in network.Biases[i])
                {
                    writer.WriteElementString("Weight" + (counter + 1).ToString(), bias.ToString());
                    counter++;
                }

                writer.WriteEndElement();
            }
            writer.WriteEndElement();

            writer.WriteEndElement(); //settings end

            writer.WriteEndElement(); // doc end

            writer.Flush();
            writer.Close();
        }
Example #14
0
        static void Main(string[] args)
        {
            //Set the path of the file containing the data set
            //string dataFilePath = @"C:\Users\kevin\Desktop\squaredtest.csv"; NutrioxDataset
            string dataFilePath = @"C:\Users\Bruker\Desktop\NutrioxDataset.csv";

            //string dataFilePath = @"C:\Users\Bruker\Desktop\-5to5-200Rows.csv";

            //Create a new data set
            DataSet.DataSet dataSet = new DataSet.DataSet(dataFilePath, true);

            //Apply desired data preprocessing to the data set
            dataSet.PreProcessDataSet(NormalizationType.MinMax, 2, EncodingType.None, null);

            //Create a model hyperparameter layer structure
            LayerStructure layerStructure = new LayerStructure()
            {
                numberOfInputNodes = 2, HiddenLayerList = new List <int> {
                    5, 5
                }, numberOfOutputNodes = 1
            };

            //Create an instance of the desired optimalization strategy to use

            var regularizationStrategyFactory = new RegularizationStrategyFactory();
            StochasticGradientDescent SGD     = new StochasticGradientDescent(new SigmoidFunction(), new IdentityFunction(), new MeanSquaredError(), RegularizationType.None, regularizationStrategyFactory);

            //Create training hyperparameters
            TrainingParameters trainingParams = new TrainingParameters()
            {
                epochs = 500, learningRate = 0.01, momentum = 0.01, RegularizationLambda = 0.00
            };

            //Create an instance of a neural network
            //ArtificialNeuralNetwork ann = new ArtificialNeuralNetwork(layerStructure, trainingParams, dataSet, SGD, new GaussianDistribution());

            //Or Load a Network from XML

            XML xml = new XML();

            ArtificialNeuralNetwork ann = xml.LoadNetwork(@"C:\Users\Bruker\Desktop\BestNet.xml", dataSet) as ArtificialNeuralNetwork;

            //Apply the desired training/test data set split ratios.
            ann.SplitDataSetIntoTrainAndTestSets(0.7);

            //Initiate network training
            //ann.TrainNetwork();

            var crossValidationStrategyFactory = new CrossValidationStrategyFactory();
            NetworkEvaluator evaluator         = new NetworkEvaluator(ann);
            CrossValidator   crossValidator    = new CrossValidator(ann, evaluator, crossValidationStrategyFactory);

            //Cross-validate the fitted model
            //crossValidator.KFold(10, 0.007);

            //Evaluate the fitted model on the test set
            evaluator.EvaluateNetwork(0.007);


            //--Optional--//

            //Serialize and save the fitted model

            //XML xml = new XML();
            //xml.SaveNetwork(dataFilePath, ann);

            //Extract model information

            //ann.SaveListOfErrors();

            //ann.GetApproximatedFunction(ann.SavePath + "/Function.txt");

            Console.ReadLine();
        }