public void LoadData_Check()
        {
            // Arrange
            ANeuralNetwork        ANN            = new ANeuralNetwork();
            List <List <double> > ExpectedOutput = new List <List <double> > {
                new List <double> {
                    1, 2, 3
                }, new List <double> {
                    4, 5, 6
                }
            };

            // Act
            List <List <double> > Output = ANN.Load_Data("TestDataInput.dat");

            // Asert
            bool ItemsMatch = true;

            for (int row = 0; row < Output.Count; row++)
            {
                for (int column = 0; column < Output[row].Count; column++)
                {
                    Console.Write(Output[row][column] + " ");
                    if (ExpectedOutput[row][column] != Output[row][column])
                    {
                        ItemsMatch = false;
                    }
                }
                Console.WriteLine();
            }
            Assert.IsTrue(ItemsMatch);
        }
        public void CheckOne()
        {
            ANeuralNetwork network = new ANeuralNetwork();

            network.Load("..\\..\\..\\..\\savedData.txt");
            var sampleDirs  = Directory.GetDirectories("..\\..\\..\\..\\Data\\TestData\\").ToList();
            int fCount      = Directory.GetFiles("..\\..\\..\\..\\Data\\GroundData\\", "*.png", SearchOption.AllDirectories).Length;
            var poliManager = new PolynomialManager();

            poliManager.InitBasis(15, 100);
            double count = 0;

            foreach (var dir in sampleDirs)
            {
                var files = Directory.GetFiles(dir, "*.png", SearchOption.TopDirectoryOnly).ToList();
                var index = new Random().Next(files.Count);
                var file  = files[index];
                var value = Convert.ToInt32(new StreamReader(dir + "\\" + "value.txt").ReadToEnd());

                ++count;
                ComplexMoments tmpMoments;
                ProcessOneImage(file, poliManager, 100, out tmpMoments);
                CvInvoke.Imshow(dir, tmpMoments.Real);
                var tmpInput = tmpMoments.ToListOfDouble();

                var output         = network.Predict(tmpInput);
                var predictedValue = Convert.ToInt32(output.IndexOf(output.Max()));

                Console.WriteLine(predictedValue);
                CvInvoke.WaitKey();
            }
        }
        public void TrainMiniBatch_ConvergenceTest_ValuesNOT01()
        {
            // Arrange
            ANeuralNetwork ANN = new ANeuralNetwork();

            ANN.Load_Network("Test.net");
            List <List <double> > InputData = new List <List <double> > {
                new List <double> {
                    10, 10
                }
            };
            List <List <double> > OutputData = new List <List <double> > {
                new List <double> {
                    10
                }
            };

            // Act
            ANN.Train_MiniBatch(InputData, OutputData, InputData.Count, "MiniBatch_Convergence.dat");
            List <double> Result = ANN.Get_Network_Output(InputData[0]);

            // Assert
            Console.Write("Output = " + Result[0]);
            Assert.AreNotEqual(OutputData[0][0], Math.Round(Result[0], 1));
        }
        public void SplitData_SampleSizeLargerThanDataSize()
        {
            // Arrange
            ANeuralNetwork        ANN      = new ANeuralNetwork();
            List <List <double> > TestData = new List <List <double> > {
                new List <double> {
                    1, 2
                }, new List <double> {
                    3, 4
                }, new List <double> {
                    5, 6
                }, new List <double> {
                    7, 8
                }
            };
            Exception expectedException = null;

            // Act
            try
            {
                ANN.SplitDataSet(TestData, 5);
            }
            catch (Exception ex)
            {
                expectedException = ex;
            }

            // Assert
            Assert.IsNotNull(expectedException);
        }
Exemple #5
0
        void CreatePopulation(int count)
        {
            List <uint> Configurate = new List <uint>
            {
                2,
                2,
                1
            };

            Nr      = new List <NeuralIndividual>();
            inputs  = new List <List <double> >();
            outputs = new List <List <double> >();
            for (int i = 0; i < count; i++)
            {
                inputs.Clear();
                outputs.Clear();
                var tmpNr = new ANeuralNetwork(Configurate, ActivationType.PositiveSygmoid, 1);
                tmpNr.LoadData("..\\..\\..\\..\\PerceptronData.txt", inputs, outputs);
                tmpNr.RandomInit();
                //var tmpError = tmpNr.BackPropTraining(inputs, outputs, 1, 0.0001, 1, false);//Обучение сети метдом оьбратного распространения ошибки
                tmpNr.IsTrained = true;
                var tm = CheckAccuracy(tmpNr);
                NeuralIndividual tmpNi = new NeuralIndividual(tmpNr, tm);
                Nr.Add(tmpNi);
            }
        }
        public void TrainMiniBatch_XORGateTest_SingleBatch()
        {
            // Arrange
            ANeuralNetwork ANN = new ANeuralNetwork();

            ANN.Create_Network(new List <int> {
                2, 3, 1
            });
            List <List <double> > InputData = new List <List <double> > {
                new List <double> {
                    1, 1
                }, new List <double> {
                    0, 1
                }, new List <double> {
                    1, 0
                }, new List <double> {
                    0, 0
                },
            };
            List <List <double> > OutputData = new List <List <double> > {
                new List <double> {
                    0
                }, new List <double> {
                    1
                }, new List <double> {
                    1
                }, new List <double> {
                    0
                }
            };
            List <List <double> > ValidationData = new List <List <double> > {
                new List <double> {
                    1, 1
                }, new List <double> {
                    0, 1
                }, new List <double> {
                    1, 0
                }, new List <double> {
                    0, 0
                },
            };
            List <double> ExpectedOutput = new List <double> {
                0, 1, 1, 0
            };

            // Act
            ANN.Train_MiniBatch(InputData, OutputData, InputData.Count, "XOR_SinleBatch.dat");
            List <double> Results = new List <double> {
            };

            for (int i = 0; i < 4; i++)
            {
                List <double> result = ANN.Get_Network_Output(ValidationData[i]);
                Results.Add(Math.Round(result[0], 0));
                Console.Write("Output = " + Results[i] + " ");
            }

            // Assert
            CollectionAssert.AreEqual(ExpectedOutput, Results);
        }
        string Recognize(ComplexMoments moments)
        {
            ANeuralNetwork networt = new ANeuralNetwork();
            var            output  = networt.Predict(moments.ToListOfDouble());

            return(output.IndexOf(output.Max()).ToString());
        }
Exemple #8
0
        public void Train(double accuracy, int populationCount)
        {
            Stopwatch stopwatch = new Stopwatch();
            double    curError  = 1;

            CreatePopulation(populationCount);
            int iter = 0;

            while (curError > accuracy)
            {
                stopwatch.Start();
                CrossPopulation();
                curError = CheckAccuracy();
                if (iter % 100 == 0)
                {
                    stopwatch.Stop();
                    var ts = stopwatch.Elapsed;
                    stopwatch.Reset();
                    Console.WriteLine("Iteration: " + iter + "\tError: " + curError + "\tTime: " + ts.TotalSeconds);
                }

                iter++;
            }

            result = Nr[0].network;
        }
        public void SplitData()
        {
            // Arrange
            ANeuralNetwork        ANN      = new ANeuralNetwork();
            List <List <double> > TestData = new List <List <double> > {
                new List <double> {
                    1, 2, 3, 4
                }, new List <double> {
                    5, 6, 7, 8
                }
            };
            List <List <List <double> > > ExpectedOutput = new List <List <List <double> > > {
                new List <List <double> > {
                    new List <double> {
                        1, 2
                    }
                }, new List <List <double> > {
                    new List <double> {
                        3, 4
                    }
                }, new List <List <double> > {
                    new List <double> {
                        5, 6
                    }
                }, new List <List <double> > {
                    new List <double> {
                        7, 8
                    }
                }
            };

            // Act
            ANN.Create_Network(new List <int> {
                2, 1, 2
            });
            List <List <List <double> > > Output = ANN.SplitDataSet(TestData, 1);

            // Assert
            bool ItemsMatch = true;

            for (int ItemInOutput = 0; ItemInOutput < Output.Count; ItemInOutput++)
            {
                for (int row = 0; row < Output[ItemInOutput].Count; row++)
                {
                    for (int column = 0; column < Output[ItemInOutput][row].Count; column++)
                    {
                        Console.Write(Output[ItemInOutput][row][column] + " ");
                        if (ExpectedOutput[ItemInOutput][row][column] != Output[ItemInOutput][row][column])
                        {
                            ItemsMatch = false;
                        }
                    }
                    Console.WriteLine();
                }
                Console.WriteLine();
            }
            Assert.IsTrue(ItemsMatch);
        }
Exemple #10
0
        double CheckAccuracy(ANeuralNetwork ann)
        {
            double error = 0;

            for (int i = 0; i < inputs.Count; i++)
            {
                for (int j = 0; j < outputs[i].Count; j++)
                {
                    var tmp = ann.Predict(inputs[i])[j];
                    error += (outputs[i][j] - ann.Predict(inputs[i])[j]) * (outputs[i][j] - ann.Predict(inputs[i])[j]);
                }
            }

            return(Math.Sqrt(error / inputs.Count));
        }
        public bool Train(List <uint> layers, SortedDictionary <string, List <ComplexMoments> > moments, int maxIters = 100000, double eps = 0.1, double speed = 0.1)
        {
            ANeuralNetwork network = new ANeuralNetwork(layers, ANeuralNetwork.ActivationType.BipolarSygmoid, 1);
            var            inputs  = new List <List <double> >();
            var            outputs = new List <List <double> >();

            var max = int.MinValue;

            foreach (var value in moments)
            {
                if (value.Value.Count > max)
                {
                    max = value.Value.Count;
                }
            }

            int iter    = 0;
            int counter = 0;

            while (iter < max && counter < layers[^ 1])
            {
                for (var i = 0; i < moments.Keys.Count; ++i)
                {
                    var output = new List <double>();
                    for (var j = 0; j < moments.Keys.Count; j++)
                    {
                        output.Add(j == i ? 1 : 0);
                    }

                    counter++;
                    if (moments[i.ToString()].Count - 1 <= iter)
                    {
                        continue;
                    }
                    var input = moments[i.ToString()][iter].ToListOfDouble();
                    inputs.Add(input);
                    outputs.Add(output);
                    counter--;
                    iter++;
                }
            }

            Console.WriteLine("Данные на входе, начинаем обучение");

            network.BackPropTraining(inputs, outputs, maxIters, eps, speed, true, 1);
            network.Save("..\\..\\..\\..\\savedData.txt");
            return(true);
        }
        public void GetNetworkOutput_CheckOutputCorrect()
        {
            // Arrange
            ANeuralNetwork ANN = new ANeuralNetwork();

            ANN.Load_Network("Test.net");
            List <double> InputData = new List <double> {
                0.1, 0.9
            };
            double DesiredOutput = 0.60761339;

            // Act
            List <double> Output = ANN.Get_Network_Output(InputData);

            // Assert
            Assert.AreEqual(DesiredOutput, Math.Round(Output[0], 8));
        }
        static void Main(string[] args)
        {
            ANeuralNetwork tmp = new ANeuralNetwork();
            tmp.Load("..\\..\\..\\..\\PerceptronSavedData.txt");
            var line = Console.ReadLine();
            while (line!="q")
            {

                var tmpInput = line.Split().Select(double.Parse).ToList();
                var tmpOutput = tmp.Predict(tmpInput);
                foreach (var element in tmpOutput)
                {
                    Console.WriteLine(element);
                }

                line = Console.ReadLine();
            }
        }
Exemple #14
0
        static void BackPropagation()
        {
            List <uint> Configurate = new List <uint>(); //Создаем конфигурацию сети, состоящую из трех слоев

            Configurate.Add(2);                          //1-ой входной слой  сети с двумя нейронами
            //2-ой слой сети с двумя нейронами
            Configurate.Add(5);
            Configurate.Add(3);
            Configurate.Add(1);//4-й выходнрой слой сети с одним нейроном
            ANeuralNetwork        NR      = new ANeuralNetwork(Configurate, ANeuralNetwork.ActivationType.PositiveSygmoid, 1);
            List <List <double> > inputs  = new List <List <double> >();
            List <List <double> > outputs = new List <List <double> >();        //создаем пустые вектора под  входные и выходные данные

            NR.LoadData("..\\..\\..\\..\\PerceptronData.txt", inputs, outputs); // выгружаем данные из файла
            // инцилизируем веса сети случайным образом
            NR.BackPropTraining(inputs, outputs, 3000000, 0.01, 1, true);       //Обучение сети метдом оьбратного распространения ошибки
            Console.WriteLine(NR.GetType());                                    //в каждые 100 итераций выводит данные в строку
            NR.Save("..\\..\\..\\..\\PerceptronSavedData.txt");                 //сохраняем в файл
        }
        public void LoadInputFile_InvaildNetworkDescription()
        {
            // Arrange
            string         Filename          = "Test_InvalidNetworkDescription.net";
            ANeuralNetwork ANN               = new ANeuralNetwork();
            Exception      expectedException = null;

            // Act
            try
            {
                ANN.Load_Network(Filename);
            }
            catch (Exception ex)
            {
                expectedException = ex;
            }

            // Assert
            Assert.IsNotNull(expectedException);
        }
        public void LoadInputFile_FileNotExists()
        {
            // Arrange
            string         Filename          = "DoesNotExist.net";
            ANeuralNetwork ANN               = new ANeuralNetwork();
            Exception      expectedException = null;

            // Act
            try
            {
                ANN.Load_Network(Filename);
            }
            catch (Exception ex)
            {
                expectedException = ex;
            }

            // Assert
            Assert.IsNotNull(expectedException);
        }
        public void LoadData_FileExists()
        {
            // Arrange
            string         Filename          = "TestDataInput.dat";
            ANeuralNetwork ANN               = new ANeuralNetwork();
            Exception      expectedException = null;

            // Act
            try
            {
                ANN.Load_Data(Filename);
            }
            catch (Exception ex)
            {
                expectedException = ex;
            }

            // Assert
            Assert.IsNull(expectedException);
        }
        public void Check()
        {
            ANeuralNetwork network = new ANeuralNetwork();

            network.Load("..\\..\\..\\..\\savedData.txt");
            var    sampleDirs  = Directory.GetDirectories("..\\..\\..\\..\\Data\\TestData\\").ToList();
            int    fCount      = Directory.GetFiles("..\\..\\..\\..\\Data\\GroundData\\", "*.png", SearchOption.AllDirectories).Length;
            double precision   = 0;
            var    poliManager = new PolynomialManager();

            poliManager.InitBasis(15, 100);
            double count     = 0;
            double trueCount = 0;

            foreach (var dir in sampleDirs)
            {
                var files = Directory.GetFiles(dir, "*.png", SearchOption.TopDirectoryOnly).ToList();
                var value = Convert.ToInt32(new StreamReader(dir + "\\" + "value.txt").ReadToEnd());

                foreach (var file in files)
                {
                    ++count;
                    ComplexMoments tmpMoments;
                    ProcessOneImage(file, poliManager, 100, out tmpMoments);
                    var tmpInput = tmpMoments.ToListOfDouble();

                    var output         = network.Predict(tmpInput);
                    var predictedValue = Convert.ToInt32(output.IndexOf(output.Max()));
                    if (predictedValue == value)
                    {
                        //++trueCount;
                        precision += Convert.ToDouble(100 / (double)fCount);
                    }
                }

                Console.WriteLine("Точность " + precision + "%");
                //Console.WriteLine("Точность " + trueCount/count + "%");
            }
        }
        public void LoadInputFile_CheckLoadedWeights()
        {
            // Arrange
            string         Filename        = "Test.net";
            ANeuralNetwork ANN             = new ANeuralNetwork();
            List <double>  ExpectedWeights = new List <double> {
                0.1, 0.2, 0.1
            };

            // Act
            ANN.Load_Network(Filename);
            List <double> Weights = new List <double> {
            };

            for (int Weight = 0; Weight < ANN.NeuralNetwork[0][0].Weights.Count; Weight++)
            {
                Weights.Add(ANN.NeuralNetwork[0][0].Weights[Weight]);
            }

            // Assert
            CollectionAssert.AreEqual(ExpectedWeights, Weights);
        }
        public void SaveNetwork()
        {
            // Arrange
            ANeuralNetwork ANN = new ANeuralNetwork();

            ANN.Create_Network(new List <int> {
                2, 3, 1
            });
            Exception expectedException = null;

            // Act
            try
            {
                ANN.Save_Network("TestSave.net");
            }
            catch (Exception ex)
            {
                expectedException = ex;
            }

            // Assert
            Assert.IsNull(expectedException);
        }
Exemple #21
0
 public NeuralIndividual(ANeuralNetwork tmpNr, double tmpError) : this()
 {
     network = tmpNr;
     error   = tmpError;
 }
Exemple #22
0
        void CrossPopulation()
        {
            List <Tuple <NeuralIndividual, NeuralIndividual> > neuralPair = new List <Tuple <NeuralIndividual, NeuralIndividual> >();
            List <uint> Configurate = new List <uint>
            {
                2,
                2,
                1
            };
            ANeuralNetwork ann = new ANeuralNetwork(Configurate, ActivationType.PositiveSygmoid, 1);

            ann.RandomInit();
            int        tmpCount  = Nr.Count;
            List <int> usedIndex = new List <int>();

            for (int i = 0; i < tmpCount; i++)
            {
                int rand = new Random().Next(tmpCount);
                while (usedIndex.Contains(rand))
                {
                    rand = new Random().Next(tmpCount);
                }
                usedIndex.Add(rand);

                if (rand == tmpCount - 1)
                {
                    for (var layerIdx = 0; layerIdx < Nr[i].network.Weights.Count; layerIdx++)
                    {
                        for (var fromIdx = 0; fromIdx < Nr[i].network.Weights[layerIdx].Count; fromIdx++)
                        {
                            for (var toIdx = 0; toIdx < Nr[i].network.Weights[layerIdx][fromIdx].Count; toIdx++)
                            {
                                var tmpRand = new Random().Next(1000);
                                if (tmpRand >= 50)
                                {
                                    ann.Weights[layerIdx][fromIdx][toIdx] = Nr[rand].network.Weights[layerIdx][fromIdx][toIdx];
                                }
                                else
                                {
                                    ann.Weights[layerIdx][fromIdx][toIdx] = Nr[0].network.Weights[layerIdx][fromIdx][toIdx];
                                }
                            }
                        }
                    }
                }
                else
                {
                    for (var layerIdx = 0; layerIdx < Nr[i].network.Weights.Count; layerIdx++)
                    {
                        for (var fromIdx = 0; fromIdx < Nr[i].network.Weights[layerIdx].Count; fromIdx++)
                        {
                            for (var toIdx = 0; toIdx < Nr[i].network.Weights[layerIdx][fromIdx].Count; toIdx++)
                            {
                                var tmpRand = new Random().Next(1000);
                                if (tmpRand >= 50)
                                {
                                    ann.Weights[layerIdx][fromIdx][toIdx] = Nr[rand].network.Weights[layerIdx][fromIdx][toIdx];
                                }
                                else
                                {
                                    ann.Weights[layerIdx][fromIdx][toIdx] = Nr[rand + 1].network.Weights[layerIdx][fromIdx][toIdx];
                                }
                            }
                        }
                    }
                }

                ann.IsTrained = true;
                var err = CheckAccuracy(ann);
                Nr.Add(new NeuralIndividual(ann, err));
            }

            for (int i = 0; i < Nr.Count - 1; i++)
            {
                for (int j = 0; j < Nr.Count - i - 1; j++)
                {
                    if (Nr[j].error > Nr[j + 1].error)
                    {
                        var temp = Nr[j];
                        Nr[j]     = Nr[j + 1];
                        Nr[j + 1] = temp;
                    }
                }
            }
            Nr.RemoveRange(tmpCount, tmpCount);
        }
Exemple #23
0
        public void IrisClasificationTest()
        {
            #region Training Data

            //Encoded
            // Iris setosa = 1 0 0
            // Iris versicolor = 0 1 0
            // Iris virginica = 0 0 1

            string[] rawData = new[]
            {
                "setosa",
                "versicolor",
                "virginica"
            };

            double[][] encodedData = new double[150][];

            encodedData[0]   = new double[] { 5.1, 3.5, 1.4, 0.2, 0, 0, 1 };
            encodedData[1]   = new double[] { 4.9, 3.0, 1.4, 0.2, 0, 0, 1 };
            encodedData[2]   = new double[] { 4.7, 3.2, 1.3, 0.2, 0, 0, 1 };
            encodedData[3]   = new double[] { 4.6, 3.1, 1.5, 0.2, 0, 0, 1 };
            encodedData[4]   = new double[] { 5.0, 3.6, 1.4, 0.2, 0, 0, 1 };
            encodedData[5]   = new double[] { 5.4, 3.9, 1.7, 0.4, 0, 0, 1 };
            encodedData[6]   = new double[] { 4.6, 3.4, 1.4, 0.3, 0, 0, 1 };
            encodedData[7]   = new double[] { 5.0, 3.4, 1.5, 0.2, 0, 0, 1 };
            encodedData[8]   = new double[] { 4.4, 2.9, 1.4, 0.2, 0, 0, 1 };
            encodedData[9]   = new double[] { 4.9, 3.1, 1.5, 0.1, 0, 0, 1 };
            encodedData[10]  = new double[] { 5.4, 3.7, 1.5, 0.2, 0, 0, 1 };
            encodedData[11]  = new double[] { 4.8, 3.4, 1.6, 0.2, 0, 0, 1 };
            encodedData[12]  = new double[] { 4.8, 3.0, 1.4, 0.1, 0, 0, 1 };
            encodedData[13]  = new double[] { 4.3, 3.0, 1.1, 0.1, 0, 0, 1 };
            encodedData[14]  = new double[] { 5.8, 4.0, 1.2, 0.2, 0, 0, 1 };
            encodedData[15]  = new double[] { 5.7, 4.4, 1.5, 0.4, 0, 0, 1 };
            encodedData[16]  = new double[] { 5.4, 3.9, 1.3, 0.4, 0, 0, 1 };
            encodedData[17]  = new double[] { 5.1, 3.5, 1.4, 0.3, 0, 0, 1 };
            encodedData[18]  = new double[] { 5.7, 3.8, 1.7, 0.3, 0, 0, 1 };
            encodedData[19]  = new double[] { 5.1, 3.8, 1.5, 0.3, 0, 0, 1 };
            encodedData[20]  = new double[] { 5.4, 3.4, 1.7, 0.2, 0, 0, 1 };
            encodedData[21]  = new double[] { 5.1, 3.7, 1.5, 0.4, 0, 0, 1 };
            encodedData[22]  = new double[] { 4.6, 3.6, 1.0, 0.2, 0, 0, 1 };
            encodedData[23]  = new double[] { 5.1, 3.3, 1.7, 0.5, 0, 0, 1 };
            encodedData[24]  = new double[] { 4.8, 3.4, 1.9, 0.2, 0, 0, 1 };
            encodedData[25]  = new double[] { 5.0, 3.0, 1.6, 0.2, 0, 0, 1 };
            encodedData[26]  = new double[] { 5.0, 3.4, 1.6, 0.4, 0, 0, 1 };
            encodedData[27]  = new double[] { 5.2, 3.5, 1.5, 0.2, 0, 0, 1 };
            encodedData[28]  = new double[] { 5.2, 3.4, 1.4, 0.2, 0, 0, 1 };
            encodedData[29]  = new double[] { 4.7, 3.2, 1.6, 0.2, 0, 0, 1 };
            encodedData[30]  = new double[] { 4.8, 3.1, 1.6, 0.2, 0, 0, 1 };
            encodedData[31]  = new double[] { 5.4, 3.4, 1.5, 0.4, 0, 0, 1 };
            encodedData[32]  = new double[] { 5.2, 4.1, 1.5, 0.1, 0, 0, 1 };
            encodedData[33]  = new double[] { 5.5, 4.2, 1.4, 0.2, 0, 0, 1 };
            encodedData[34]  = new double[] { 4.9, 3.1, 1.5, 0.1, 0, 0, 1 };
            encodedData[35]  = new double[] { 5.0, 3.2, 1.2, 0.2, 0, 0, 1 };
            encodedData[36]  = new double[] { 5.5, 3.5, 1.3, 0.2, 0, 0, 1 };
            encodedData[37]  = new double[] { 4.9, 3.1, 1.5, 0.1, 0, 0, 1 };
            encodedData[38]  = new double[] { 4.4, 3.0, 1.3, 0.2, 0, 0, 1 };
            encodedData[39]  = new double[] { 5.1, 3.4, 1.5, 0.2, 0, 0, 1 };
            encodedData[40]  = new double[] { 5.0, 3.5, 1.3, 0.3, 0, 0, 1 };
            encodedData[41]  = new double[] { 4.5, 2.3, 1.3, 0.3, 0, 0, 1 };
            encodedData[42]  = new double[] { 4.4, 3.2, 1.3, 0.2, 0, 0, 1 };
            encodedData[43]  = new double[] { 5.0, 3.5, 1.6, 0.6, 0, 0, 1 };
            encodedData[44]  = new double[] { 5.1, 3.8, 1.9, 0.4, 0, 0, 1 };
            encodedData[45]  = new double[] { 4.8, 3.0, 1.4, 0.3, 0, 0, 1 };
            encodedData[46]  = new double[] { 5.1, 3.8, 1.6, 0.2, 0, 0, 1 };
            encodedData[47]  = new double[] { 4.6, 3.2, 1.4, 0.2, 0, 0, 1 };
            encodedData[48]  = new double[] { 5.3, 3.7, 1.5, 0.2, 0, 0, 1 };
            encodedData[49]  = new double[] { 5.0, 3.3, 1.4, 0.2, 0, 0, 1 };
            encodedData[50]  = new double[] { 7.0, 3.2, 4.7, 1.4, 0, 1, 0 };
            encodedData[51]  = new double[] { 6.4, 3.2, 4.5, 1.5, 0, 1, 0 };
            encodedData[52]  = new double[] { 6.9, 3.1, 4.9, 1.5, 0, 1, 0 };
            encodedData[53]  = new double[] { 5.5, 2.3, 4.0, 1.3, 0, 1, 0 };
            encodedData[54]  = new double[] { 6.5, 2.8, 4.6, 1.5, 0, 1, 0 };
            encodedData[55]  = new double[] { 5.7, 2.8, 4.5, 1.3, 0, 1, 0 };
            encodedData[56]  = new double[] { 6.3, 3.3, 4.7, 1.6, 0, 1, 0 };
            encodedData[57]  = new double[] { 4.9, 2.4, 3.3, 1.0, 0, 1, 0 };
            encodedData[58]  = new double[] { 6.6, 2.9, 4.6, 1.3, 0, 1, 0 };
            encodedData[59]  = new double[] { 5.2, 2.7, 3.9, 1.4, 0, 1, 0 };
            encodedData[60]  = new double[] { 5.0, 2.0, 3.5, 1.0, 0, 1, 0 };
            encodedData[61]  = new double[] { 5.9, 3.0, 4.2, 1.5, 0, 1, 0 };
            encodedData[62]  = new double[] { 6.0, 2.2, 4.0, 1.0, 0, 1, 0 };
            encodedData[63]  = new double[] { 6.1, 2.9, 4.7, 1.4, 0, 1, 0 };
            encodedData[64]  = new double[] { 5.6, 2.9, 3.6, 1.3, 0, 1, 0 };
            encodedData[65]  = new double[] { 6.7, 3.1, 4.4, 1.4, 0, 1, 0 };
            encodedData[66]  = new double[] { 5.6, 3.0, 4.5, 1.5, 0, 1, 0 };
            encodedData[67]  = new double[] { 5.8, 2.7, 4.1, 1.0, 0, 1, 0 };
            encodedData[68]  = new double[] { 6.2, 2.2, 4.5, 1.5, 0, 1, 0 };
            encodedData[69]  = new double[] { 5.6, 2.5, 3.9, 1.1, 0, 1, 0 };
            encodedData[70]  = new double[] { 5.9, 3.2, 4.8, 1.8, 0, 1, 0 };
            encodedData[71]  = new double[] { 6.1, 2.8, 4.0, 1.3, 0, 1, 0 };
            encodedData[72]  = new double[] { 6.3, 2.5, 4.9, 1.5, 0, 1, 0 };
            encodedData[73]  = new double[] { 6.1, 2.8, 4.7, 1.2, 0, 1, 0 };
            encodedData[74]  = new double[] { 6.4, 2.9, 4.3, 1.3, 0, 1, 0 };
            encodedData[75]  = new double[] { 6.6, 3.0, 4.4, 1.4, 0, 1, 0 };
            encodedData[76]  = new double[] { 6.8, 2.8, 4.8, 1.4, 0, 1, 0 };
            encodedData[77]  = new double[] { 6.7, 3.0, 5.0, 1.7, 0, 1, 0 };
            encodedData[78]  = new double[] { 6.0, 2.9, 4.5, 1.5, 0, 1, 0 };
            encodedData[79]  = new double[] { 5.7, 2.6, 3.5, 1.0, 0, 1, 0 };
            encodedData[80]  = new double[] { 5.5, 2.4, 3.8, 1.1, 0, 1, 0 };
            encodedData[81]  = new double[] { 5.5, 2.4, 3.7, 1.0, 0, 1, 0 };
            encodedData[82]  = new double[] { 5.8, 2.7, 3.9, 1.2, 0, 1, 0 };
            encodedData[83]  = new double[] { 6.0, 2.7, 5.1, 1.6, 0, 1, 0 };
            encodedData[84]  = new double[] { 5.4, 3.0, 4.5, 1.5, 0, 1, 0 };
            encodedData[85]  = new double[] { 6.0, 3.4, 4.5, 1.6, 0, 1, 0 };
            encodedData[86]  = new double[] { 6.7, 3.1, 4.7, 1.5, 0, 1, 0 };
            encodedData[87]  = new double[] { 6.3, 2.3, 4.4, 1.3, 0, 1, 0 };
            encodedData[88]  = new double[] { 5.6, 3.0, 4.1, 1.3, 0, 1, 0 };
            encodedData[89]  = new double[] { 5.5, 2.5, 4.0, 1.3, 0, 1, 0 };
            encodedData[90]  = new double[] { 5.5, 2.6, 4.4, 1.2, 0, 1, 0 };
            encodedData[91]  = new double[] { 6.1, 3.0, 4.6, 1.4, 0, 1, 0 };
            encodedData[92]  = new double[] { 5.8, 2.6, 4.0, 1.2, 0, 1, 0 };
            encodedData[93]  = new double[] { 5.0, 2.3, 3.3, 1.0, 0, 1, 0 };
            encodedData[94]  = new double[] { 5.6, 2.7, 4.2, 1.3, 0, 1, 0 };
            encodedData[95]  = new double[] { 5.7, 3.0, 4.2, 1.2, 0, 1, 0 };
            encodedData[96]  = new double[] { 5.7, 2.9, 4.2, 1.3, 0, 1, 0 };
            encodedData[97]  = new double[] { 6.2, 2.9, 4.3, 1.3, 0, 1, 0 };
            encodedData[98]  = new double[] { 5.1, 2.5, 3.0, 1.1, 0, 1, 0 };
            encodedData[99]  = new double[] { 5.7, 2.8, 4.1, 1.3, 0, 1, 0 };
            encodedData[100] = new double[] { 6.3, 3.3, 6.0, 2.5, 1, 0, 0 };
            encodedData[101] = new double[] { 5.8, 2.7, 5.1, 1.9, 1, 0, 0 };
            encodedData[102] = new double[] { 7.1, 3.0, 5.9, 2.1, 1, 0, 0 };
            encodedData[103] = new double[] { 6.3, 2.9, 5.6, 1.8, 1, 0, 0 };
            encodedData[104] = new double[] { 6.5, 3.0, 5.8, 2.2, 1, 0, 0 };
            encodedData[105] = new double[] { 7.6, 3.0, 6.6, 2.1, 1, 0, 0 };
            encodedData[106] = new double[] { 4.9, 2.5, 4.5, 1.7, 1, 0, 0 };
            encodedData[107] = new double[] { 7.3, 2.9, 6.3, 1.8, 1, 0, 0 };
            encodedData[108] = new double[] { 6.7, 2.5, 5.8, 1.8, 1, 0, 0 };
            encodedData[109] = new double[] { 7.2, 3.6, 6.1, 2.5, 1, 0, 0 };
            encodedData[110] = new double[] { 6.5, 3.2, 5.1, 2.0, 1, 0, 0 };
            encodedData[111] = new double[] { 6.4, 2.7, 5.3, 1.9, 1, 0, 0 };
            encodedData[112] = new double[] { 6.8, 3.0, 5.5, 2.1, 1, 0, 0 };
            encodedData[113] = new double[] { 5.7, 2.5, 5.0, 2.0, 1, 0, 0 };
            encodedData[114] = new double[] { 5.8, 2.8, 5.1, 2.4, 1, 0, 0 };
            encodedData[115] = new double[] { 6.4, 3.2, 5.3, 2.3, 1, 0, 0 };
            encodedData[116] = new double[] { 6.5, 3.0, 5.5, 1.8, 1, 0, 0 };
            encodedData[117] = new double[] { 7.7, 3.8, 6.7, 2.2, 1, 0, 0 };
            encodedData[118] = new double[] { 7.7, 2.6, 6.9, 2.3, 1, 0, 0 };
            encodedData[119] = new double[] { 6.0, 2.2, 5.0, 1.5, 1, 0, 0 };
            encodedData[120] = new double[] { 6.9, 3.2, 5.7, 2.3, 1, 0, 0 };
            encodedData[121] = new double[] { 5.6, 2.8, 4.9, 2.0, 1, 0, 0 };
            encodedData[122] = new double[] { 7.7, 2.8, 6.7, 2.0, 1, 0, 0 };
            encodedData[123] = new double[] { 6.3, 2.7, 4.9, 1.8, 1, 0, 0 };
            encodedData[124] = new double[] { 6.7, 3.3, 5.7, 2.1, 1, 0, 0 };
            encodedData[125] = new double[] { 7.2, 3.2, 6.0, 1.8, 1, 0, 0 };
            encodedData[126] = new double[] { 6.2, 2.8, 4.8, 1.8, 1, 0, 0 };
            encodedData[127] = new double[] { 6.1, 3.0, 4.9, 1.8, 1, 0, 0 };
            encodedData[128] = new double[] { 6.4, 2.8, 5.6, 2.1, 1, 0, 0 };
            encodedData[129] = new double[] { 7.2, 3.0, 5.8, 1.6, 1, 0, 0 };
            encodedData[130] = new double[] { 7.4, 2.8, 6.1, 1.9, 1, 0, 0 };
            encodedData[131] = new double[] { 7.9, 3.8, 6.4, 2.0, 1, 0, 0 };
            encodedData[132] = new double[] { 6.4, 2.8, 5.6, 2.2, 1, 0, 0 };
            encodedData[133] = new double[] { 6.3, 2.8, 5.1, 1.5, 1, 0, 0 };
            encodedData[134] = new double[] { 6.1, 2.6, 5.6, 1.4, 1, 0, 0 };
            encodedData[135] = new double[] { 7.7, 3.0, 6.1, 2.3, 1, 0, 0 };
            encodedData[136] = new double[] { 6.3, 3.4, 5.6, 2.4, 1, 0, 0 };
            encodedData[137] = new double[] { 6.4, 3.1, 5.5, 1.8, 1, 0, 0 };
            encodedData[138] = new double[] { 6.0, 3.0, 4.8, 1.8, 1, 0, 0 };
            encodedData[139] = new double[] { 6.9, 3.1, 5.4, 2.1, 1, 0, 0 };
            encodedData[140] = new double[] { 6.7, 3.1, 5.6, 2.4, 1, 0, 0 };
            encodedData[141] = new double[] { 6.9, 3.1, 5.1, 2.3, 1, 0, 0 };
            encodedData[142] = new double[] { 5.8, 2.7, 5.1, 1.9, 1, 0, 0 };
            encodedData[143] = new double[] { 6.8, 3.2, 5.9, 2.3, 1, 0, 0 };
            encodedData[144] = new double[] { 6.7, 3.3, 5.7, 2.5, 1, 0, 0 };
            encodedData[145] = new double[] { 6.7, 3.0, 5.2, 2.3, 1, 0, 0 };
            encodedData[146] = new double[] { 6.3, 2.5, 5.0, 1.9, 1, 0, 0 };
            encodedData[147] = new double[] { 6.5, 3.0, 5.2, 2.0, 1, 0, 0 };
            encodedData[148] = new double[] { 6.2, 3.4, 5.4, 2.3, 1, 0, 0 };
            encodedData[149] = new double[] { 5.9, 3.0, 5.1, 1.8, 1, 0, 0 };

            #endregion

            var networkData = new NeuralNetworkData(rawData, ' ');

            networkData.PrepareTrainingSet(
                encodedData: encodedData,
                trainDataPercentage: 0.80d,
                seed: 72);

            ANeuralNetwork nn = new ANeuralNetwork(
                inputNodes: 4,
                hiddenNodes: 7,
                outputNodes: 3,
                hiddenNodeActivation: Activation.HyperTan,
                outputNodeActivation: Activation.Softmax);

            nn.Train(
                networkData.TrainingData,
                maxEpochs: 1000,
                learnRate: 0.05d,
                momentum: 0.01d,
                meanSquaredErrorThreshold: 0.040d);

            Assert.IsTrue(nn.GetAccuracyFor(networkData.TrainingData) > 0.95d);
            Assert.IsTrue(nn.GetAccuracyFor(networkData.TestData) > 0.95d);
            Assert.AreEqual("virginica", networkData.DecodeOutput(nn.Predict(new double[] { 5.1, 3.5, 1.4, 0.2 })));
            Assert.AreEqual("versicolor", networkData.DecodeOutput(nn.Predict(new double[] { 7.0, 3.2, 4.7, 1.4 })));
            Assert.AreEqual("setosa", networkData.DecodeOutput(nn.Predict(new double[] { 7.4, 2.8, 6.1, 1.9 })));
        }