示例#1
0
        private static Topography BridgedMultiLayerPerceptron(ref VectorHorizontal lbl)
        {
            try
            {
                var vals = new System.Collections.Generic.List<double>();
                int nl = lbl.Length;
                for (int i = 1; i < nl; i++)
                {
                    int s = lbl.Sum(0, i - 1).ToInt();
                    for (int j = 0; j < lbl.getValue(i); j++)
                    {
                        vals.Add(s + j);
                        double[] tmp = new double[s];
                        for (int ii = 0; ii < s; ii++)
                        {
                            tmp[ii] = ii;
                        }
                        vals.AddRange(tmp);
                    }
                }

                var topo = new Topography(vals.ToArray());
                topo.Type = TopographyType.BMLP;
                return topo;
            }
            catch
            {
                return null;
            }
        }
示例#2
0
        /// <summary>
        /// Convert VectorHorizontal to List
        /// </summary>
        /// <param name="vh">VectorHorizontal</param>
        /// <returns>List - generic</returns>
        public static List <double> ToList(this VectorHorizontal vh)
        {
            List <double> list = new List <double>();

            list.AddRange(vh.Data[0]);
            return(list);
        }
示例#3
0
        private static Topography MultiLayerPerceptron(ref VectorHorizontal lbl)
        {
            try
            {
                var vals = new System.Collections.Generic.List<double>();
                int nl = lbl.Length;
                for (int i = 1; i < nl; i++)
                {
                    int lw = 0;
                    for (int a = 0; a < i; a++) lw += (int)lbl[a];

                    for (int j = 0; j < lbl[i]; j++)
                    {
                        vals.Add(lw + j);
                        for (int k = (int)(lw - lbl[i - 1] + 1); k <= lw; k++)
                        {
                            vals.Add(k-1);
                        }
                    }
                }
                var topo = new Topography(vals.ToArray());
                topo.Type = TopographyType.MLP;
                return topo;
            }
            catch
            {
                return null;
            }
        }
示例#4
0
 /// <summary>
 /// Generate network topography
 /// </summary>
 /// <param name="type">TopographyType - topo type</param>
 /// <param name="lbl">VectorHorizontal</param>
 /// <returns>Topography - generated topography</returns>
 public static Topography Generate(TopographyType type, VectorHorizontal lbl)
 {
     switch (type)
     {
         case TopographyType.BMLP: return BridgedMultiLayerPerceptron(ref lbl);
         case TopographyType.MLP: return MultiLayerPerceptron(ref lbl);
         default: return null;//oops, not implemented
     }
 }//generate
示例#5
0
        /// <summary>
        /// Convert MatrixMB to VectorHorizontal
        /// </summary>
        /// <param name="mat">MatrixMB</param>
        /// <returns>VectorHorizontal</returns>
        public static VectorHorizontal ToVectorHorizontal(this MatrixMB mat)
        {
            VectorHorizontal vh = new VectorHorizontal(mat.Cols);

            for (int col = 0; col < mat.Cols; col++)
            {
                vh.Data[0][col] = mat.Data[0][col];
            }
            return(vh);
        }
示例#6
0
        public void VectorHorizontalTests()
        {
            try
            {
                VectorHorizontal vh = new VectorHorizontal(3);
                vh[0] = 1;
                vh[1] = 2;
                vh[2] = 3;
                Assert.AreEqual(1, vh[0]);
                Assert.AreEqual(2, vh[1]);
                Assert.AreEqual(3, vh[2]);

            }
            catch(Exception ex)
            {
                Assert.Fail(ex.Message);
            }
        }
示例#7
0
        /// <summary>
        /// Run NBN training and its test - this is the first version
        /// </summary>
        /// <param name="trials">int - number of learning trials</param>
        /// <returns>LearnResult</returns>R
        private LearnResult RunFirstVersion(int trials)
        {
            backupSettings               = new NeuralNetworkSettings();
            backupSettings.MaxError      = settings.MaxError;
            backupSettings.MaxIterations = settings.MaxIterations;
            backupSettings.MU            = settings.MU;
            backupSettings.MUH           = settings.MUH;
            backupSettings.MUL           = settings.MUL;
            backupSettings.Scale         = settings.Scale;

            Trials = trials;
            LearnResult result = new LearnResult();

            result.Filename = Filename;
            if (MatLabCompareDataFolder.Length > 0)
            {
                result.Filename = string.Format("{0}\\{1}.dat", MatLabCompareDataFolder, Path.GetFileNameWithoutExtension(Directory.GetFiles(MatLabCompareDataFolder, "*.dat")[0]));
            }

            if (!loadInputData(Filename))
            {
                updateErrorNBN("Dane nie zostały wczytane.");
                return(result);
            }
            if (OnDebug != null)
            {
                debug("Data loaded from file: " + Filename);
            }

            var tmp = new System.Collections.Generic.List <int>();

            tmp.Add(inputLearn.Cols);

            //setting number of hidden neurons
            for (int i = 0; i < Handle; i++)
            {
                tmp.Add(1);
            }
            tmp.Add(1);

            var vh = new VectorHorizontal(tmp.Count);

            for (int i = 0; i < vh.Length; i++)
            {
                vh[i] = tmp[i];
            }

            switch (NBN_Topography)
            {
            case 0:
            {
                topo = Topography.Generate(TopographyType.BMLP, vh);
            } break;

            case 1:
            {
                topo = Topography.Generate(TopographyType.MLP, vh);
            } break;
            }



            result.Topo = topo.Data[0];
            if (OnDebug != null)
            {
                debug(topo.ToString());
            }

            if (topo == null)
            {
                updateErrorNBN("Topologia sieci nie została utworzona.");
                return(result);
            }

            info = this.checkInputs(ref inputLearn, ref outputLearn, ref topo, out indexes);//here are set indexes

            result.TopoIndex = indexes.Data[0];
            result.Info      = info;
            if (OnDebug != null)
            {
                debug(indexes.ToString());
                debug(info.ToString());
            }

            Activation act = new Activation(info.nn);

            act.FillWithNumber(NBN_Activation);
            act.setValue(info.nn - 1, 0);
            result.ActivationFunction = act.Data[0];

            Gain gain = new Gain(info.nn);

            gain.FillWithNumber(NBN_Gain);
            result.GainValue = gain.Data[0];

            result.Settings = this.settings;

            for (trial = 0; trial < trials; trial++)
            {
                Weights initialWeights = new Weights(info.nw);
                if (MatLabCompareDataFolder.Length > 0)
                {
                    initialWeights = MatrixMB.Load(string.Format("{0}\\poczatkowe_wagi_proba_{1}.txt", MatLabCompareDataFolder, trial + 1)).ToWeights();
                }
                else
                {
                    initialWeights = Weights.Generate(info.nw);
                }

                if (IsResearchMode)
                {
                    string initialWeightsFile = String.Format("{0}\\{1}{2}_initial_weights.dat", _reasearch_folder, trial, Path.GetFileNameWithoutExtension(result.Filename));
                    initialWeights.Store(initialWeightsFile);
                }

                initialWeights.Name = "Initial";
                if (OnDebug != null)
                {
                    debug(String.Format("\r\nTrial {0} from {1}\r\n", trial + 1, trials));
                    debug(initialWeights.ToString());
                }

                settings               = null;
                settings               = NeuralNetworkSettings.Default();
                settings.MaxError      = backupSettings.MaxError;
                settings.MaxIterations = backupSettings.MaxIterations;
                settings.MU            = backupSettings.MU;
                settings.MUH           = backupSettings.MUH;
                settings.MUL           = backupSettings.MUL;
                settings.Scale         = backupSettings.Scale;

                I = MatrixMB.Eye(info.nw);

                tic();//learn time measure start
                var tr = Train(ref this.settings, ref this.info, ref this.inputLearn, ref this.outputLearn,
                               ref this.topo, initialWeights, ref act, ref gain, ref indexes);

                String LearnExecutionTime = toc(); //learn time measure stop
                LearnTimeList = time.ElapsedTicks; //learn time measure save

                result.Add(tr.weights.Data[0], SSE.ToDoubleArray(), RMSE.ToDoubleArray());

                result.LearnRMSE = (double)RMSE[RMSE.Count];
                LearnRmseList    = LastRMSE;

                if (OnDebug != null)
                {
                    debug(tr.weights.ToString());
                    debug("\r\nLearn execution time: " + LearnExecutionTime + "(hours:minutes:seconds:miliseconds)\r\n");
                    debug("\r\nLearn SSE: " + tr.sse.ToString() + "\r\n");
                    debug("\r\nLearn RMSE: " + result.LearnRMSE.ToString() + "\r\n");
                }

                updateError(result.LearnRMSE);

                NetworkInfo infoTest = info.Copy();
                infoTest.np = inputTest.Rows;

                tic();

                error.CalculateError(ref infoTest, ref inputTest, ref outputTest, ref topo, tr.weights, ref act, ref gain, ref indexes);

                var TestExecutionTime = toc();
                TestTimeList = time.ElapsedTicks;

                result.TestRMSE = Math.Sqrt(error.Error / infoTest.np);
                TestRmseList    = result.TestRMSE;
                result.TestingRmseList.Add(result.TestRMSE);
                if (OnDebug != null)
                {
                    debug("\r\nTest execution time: " + TestExecutionTime + "(hours:minutes:seconds:miliseconds)\r\n");
                    debug("\r\nTest SSE: " + error.Error.ToString() + "\r\n");
                    debug("\r\nTest RMSE: " + result.TestRMSE.ToString() + "\r\n");
                }

                //if (result.LearnRMSE < Threshold) IsTrainOK++;
                if (result.SSE[trial][result.SSE[trial].Length - 1] < Threshold)
                {
                    IsTrainOK++;
                }
            }

            result.LearnRMSE = AverageLearnRMSE;
            result.TestRMSE  = AverageTestRMSE;

            result.setStatisticsData(LearnRMSE, TestRMSE, LearnTime, TestTime, Trials);
            result.SuccessRate = (double)IsTrainOK / Trials;

            if (IsResearchMode)//save research
            {
                try
                {
                    string filename = extractionFolder + "\\research result.pdf";

                    PDFGenerate data = new PDFGenerate();
                    data.Filename      = filename;
                    data.Result        = result;
                    data.ChartFilename = GeneratePlot(result.RMSE, Path.GetFileNameWithoutExtension(result.Filename));
                    HistoryPDF pdf = new HistoryPDF(data.Result, data.ChartFilename, true);
                    pdf.Save(data.Filename);
                }
                catch { }
            }

            return(result);
        }
示例#8
0
 /// <summary>
 /// Convert MatrixMB to VectorHorizontal
 /// </summary>
 /// <param name="mat">MatrixMB</param>
 /// <returns>VectorHorizontal</returns>
 public static VectorHorizontal ToVectorHorizontal(this MatrixMB mat)
 {
     VectorHorizontal vh = new VectorHorizontal(mat.Cols);
     for (int col = 0; col < mat.Cols; col++)
     {
         vh.Data[0][col] = mat.Data[0][col];
     }
     return vh;
 }
示例#9
0
        public void Test___Error___Calculation___Test()
        {
            /*
             *
             *
             Kod testowy matlaba
             *
            function obliczanie_bledu()
            inp = [-1 -1;-1 1; 1 -1];
            dout = [1;0;0];
            topo = [3 1 2 4 1 2 3];
            w = [1 1 1 1 1 1 1];
            act = [2 0];
            gain = [1 1];
            param = [3 2 1 7 2];
            iw = [1 4 8];
            format long;
            error = calculate_error(inp,dout,topo,w,act,gain,param,iw);
            fprintf('Obliczanie błędu uczenia sieci\nBłąd wynosi: %.20f\n', error);
            %Otrzymany wynik
            %Obliczanie błędu uczenia sieci
            %Błąd wynosi: 13.83283022280404000000
            end
             *
             *
             */
            Input input = new Input(3, 2);//inp = [-1 -1;-1 1; 1 -1];
            input[0, 0] = -1;
            input[0, 1] = -1;
            input[1, 0] = -1;
            input[1, 1] = 1;
            input[2, 0] = 1;
            input[2, 1] = -1;

            Output output = new Output(3, 1);//dout = [1;0;0];
            output[0, 0] = 1;
            output[1, 0] = 0;
            output[2, 0] = 0;

            NetworkInfo info = new NetworkInfo();//param = [3 2 1 7 2];
            info.ni = 2;
            info.nn = 2;
            info.no = 1;
            info.np = 3;
            info.nw = 7;

            VectorHorizontal vh = new VectorHorizontal(3);
            vh[0, 0] = 2;
            vh[0, 1] = 1;
            vh[0, 2] = 1;

            Topography topo = Topography.Generate(TopographyType.BMLP, vh);//topo = [3 1 2 4 1 2 3];
            //w C# indeksy są od zera a nie od 1 więc wszystko o 1 w dół przestawione jest
            Assert.AreEqual(2, topo[0]);
            Assert.AreEqual(0, topo[1]);
            Assert.AreEqual(1, topo[2]);
            Assert.AreEqual(3, topo[3]);
            Assert.AreEqual(0, topo[4]);
            Assert.AreEqual(1, topo[5]);
            Assert.AreEqual(2, topo[6]);

            Weights weights = new Weights(info.nw);//w = [1 1 1 1 1 1 1];
            weights.FillWithNumber(1);//załatwione

            Activation act = new Activation(2);//act = [2 0];
            act[0] = 2;
            act[1] = 0;

            Gain gain = new Gain(2);//gain = [1 1];
            gain[0] = 1;
            gain[1] = 1;

            Index iw = Index.Find(ref topo);//iw = [1 4 8];
            //ta sama sytuacja, indeksy od 0 startują
            Assert.AreEqual(0, iw[0]);
            Assert.AreEqual(3, iw[1]);
            Assert.AreEqual(7, iw[2]);

            NetworkError ne = new NetworkError();
            var error = ne.CalculateError(ref info, ref input, ref output, ref topo, weights, ref act, ref gain, ref iw);

            double errorFromMatLab = 13.83283022280404000000;

            Console.WriteLine("Testowanie obliczania błędu");
            Console.WriteLine("Użyte dane:");
            Console.WriteLine("\nDane wejściowe:");
            Console.WriteLine(input.MatrixToString());
            Console.WriteLine("\nDane wyjściowe:");
            Console.WriteLine(output.MatrixToString());
            Console.WriteLine("\nWagi;");
            Console.WriteLine(weights.MatrixToString());
            Console.WriteLine("\nTopologia:");
            Console.WriteLine(topo.MatrixToString());
            Console.WriteLine("\nIndeksy topologii:");
            Console.WriteLine(iw.MatrixToString());
            Console.WriteLine("\nFunkcje aktywacji:");
            Console.WriteLine(act.MatrixToString());
            Console.WriteLine("\nWzmocnienia (gains):");
            Console.WriteLine(gain.MatrixToString());
            Console.WriteLine("\nParametry (param):");
            Console.WriteLine(info.ToString());

            Assert.AreEqual(errorFromMatLab, error);
            Console.WriteLine(string.Format("{0} - wynik NBN C#",error));
            Console.WriteLine(string.Format("{0} - wynik NBN w MatLabie",errorFromMatLab));
        }
示例#10
0
 /// <summary>
 /// Generate network topography
 /// </summary>
 /// <param name="type">TopographyType - topo type</param>
 /// <param name="lbl">VectorHorizontal</param>
 /// <returns>Topography - generated topography</returns>
 public static Topography Generate(TopographyType type, VectorHorizontal lbl)
 {
     switch (type)
     {
         case TopographyType.BMLP: return BridgedMultiLayerPerceptron(ref lbl);
         case TopographyType.MLP: return MultiLayerPerceptron(ref lbl);
         default: return null;//oops, not implemented
     }
 }
示例#11
0
        private static Topography MultiLayerPerceptron(ref VectorHorizontal lbl)
        {
            try
            {
                var vals = new System.Collections.Generic.List<double>();
                int nl = lbl.Length;
                for (int i = 1; i < nl; i++)
                {
                    int lw = 0;
                    for (int a = 0; a < i; a++) lw += (int)lbl[a];

                    for (int j = 0; j < lbl[i]; j++)
                    {
                        vals.Add(lw + j);
                        for (int k = (int)(lw - lbl[i - 1] + 1); k <= lw; k++)
                        {
                            vals.Add(k-1);
                        }
                    }
                }
                var topo = new Topography(vals.ToArray());
                topo.Type = TopographyType.MLP;
                return topo;
            }
            catch
            {
                return null;
            }
        }
示例#12
0
        private static Topography BridgedMultiLayerPerceptron(ref VectorHorizontal lbl)
        {
            try
            {
                var vals = new System.Collections.Generic.List<double>();
                int nl = lbl.Length;
                for (int i = 1; i < nl; i++)
                {
                    int s = lbl.Sum(0, i - 1).ToInt();
                    for (int j = 0; j < lbl.getValue(i); j++)
                    {
                        vals.Add(s + j);
                        double[] tmp = new double[s];
                        for (int ii = 0; ii < s; ii++)
                        {
                            tmp[ii] = ii;
                        }
                        vals.AddRange(tmp);
                    }
                }

                var topo = new Topography(vals.ToArray());
                topo.Type = TopographyType.BMLP;
                return topo;
            }
            catch
            {
                return null;
            }
        }
示例#13
0
文件: NBN.cs 项目: DrZeil/nbn-csharp
        /// <summary>
        /// Run NBN training and its test - this is the first version
        /// </summary>
        /// <param name="trials">int - number of learning trials</param>
        /// <returns>LearnResult</returns>R
        private LearnResult RunFirstVersion(int trials)
        {
            backupSettings = new NeuralNetworkSettings();
            backupSettings.MaxError = settings.MaxError;
            backupSettings.MaxIterations = settings.MaxIterations;
            backupSettings.MU = settings.MU;
            backupSettings.MUH = settings.MUH;
            backupSettings.MUL = settings.MUL;
            backupSettings.Scale = settings.Scale;

            Trials = trials;
            LearnResult result = new LearnResult();
            result.Filename = Filename;
            if (MatLabCompareDataFolder.Length > 0)
            {
                result.Filename = string.Format("{0}\\{1}.dat", MatLabCompareDataFolder, Path.GetFileNameWithoutExtension(Directory.GetFiles(MatLabCompareDataFolder, "*.dat")[0]));
            }

            if (!loadInputData(Filename))
            {
                updateErrorNBN("Dane nie zostały wczytane.");
                return result;
            }
            if (OnDebug != null) debug("Data loaded from file: " + Filename);

            var tmp = new System.Collections.Generic.List<int>();
            tmp.Add(inputLearn.Cols);

            //setting number of hidden neurons
            for (int i = 0; i < Handle; i++)
            {
                tmp.Add(1);
            }
            tmp.Add(1);

            var vh = new VectorHorizontal(tmp.Count);
            for (int i = 0; i < vh.Length; i++)
            {
                vh[i] = tmp[i];
            }

            switch (NBN_Topography)
            {
                case 0:
                    {
                        topo = Topography.Generate(TopographyType.BMLP, vh);
                    } break;
                case 1:
                    {
                        topo = Topography.Generate(TopographyType.MLP, vh);
                    } break;
            }

            result.Topo = topo.Data[0];
            if (OnDebug != null) debug(topo.ToString());

            if (topo == null)
            {
                updateErrorNBN("Topologia sieci nie została utworzona.");
                return result;
            }

            info = this.checkInputs(ref inputLearn, ref outputLearn, ref topo, out indexes);//here are set indexes

            result.TopoIndex = indexes.Data[0];
            result.Info = info;
            if (OnDebug != null)
            {
                debug(indexes.ToString());
                debug(info.ToString());
            }

            Activation act = new Activation(info.nn);
            act.FillWithNumber(NBN_Activation);
            act.setValue(info.nn - 1, 0);
            result.ActivationFunction = act.Data[0];

            Gain gain = new Gain(info.nn);
            gain.FillWithNumber(NBN_Gain);
            result.GainValue = gain.Data[0];

            result.Settings = this.settings;

            for (trial = 0; trial < trials; trial++)
            {
                Weights initialWeights = new Weights(info.nw);
                if (MatLabCompareDataFolder.Length > 0)
                {
                    initialWeights = MatrixMB.Load(string.Format("{0}\\poczatkowe_wagi_proba_{1}.txt", MatLabCompareDataFolder, trial + 1)).ToWeights();
                }
                else
                {
                    initialWeights = Weights.Generate(info.nw);
                }

                if (IsResearchMode)
                {
                    string initialWeightsFile = String.Format("{0}\\{1}{2}_initial_weights.dat", _reasearch_folder, trial, Path.GetFileNameWithoutExtension(result.Filename));
                    initialWeights.Store(initialWeightsFile);
                }

                initialWeights.Name = "Initial";
                if (OnDebug != null)
                {
                    debug(String.Format("\r\nTrial {0} from {1}\r\n", trial + 1, trials));
                    debug(initialWeights.ToString());
                }

                settings = null;
                settings = NeuralNetworkSettings.Default();
                settings.MaxError = backupSettings.MaxError;
                settings.MaxIterations = backupSettings.MaxIterations;
                settings.MU = backupSettings.MU;
                settings.MUH = backupSettings.MUH;
                settings.MUL = backupSettings.MUL;
                settings.Scale = backupSettings.Scale;

                I = MatrixMB.Eye(info.nw);

                tic();//learn time measure start
                var tr = Train(ref this.settings, ref this.info, ref this.inputLearn, ref this.outputLearn,
                               ref this.topo, initialWeights, ref act, ref gain, ref indexes);

                String LearnExecutionTime = toc();//learn time measure stop
                LearnTimeList = time.ElapsedTicks;//learn time measure save

                result.Add(tr.weights.Data[0], SSE.ToDoubleArray(), RMSE.ToDoubleArray());

                result.LearnRMSE = (double)RMSE[RMSE.Count];
                LearnRmseList = LastRMSE;

                if (OnDebug != null)
                {
                    debug(tr.weights.ToString());
                    debug("\r\nLearn execution time: " + LearnExecutionTime + "(hours:minutes:seconds:miliseconds)\r\n");
                    debug("\r\nLearn SSE: " + tr.sse.ToString() + "\r\n");
                    debug("\r\nLearn RMSE: " + result.LearnRMSE.ToString() + "\r\n");
                }

                updateError(result.LearnRMSE);

                NetworkInfo infoTest = info.Copy();
                infoTest.np = inputTest.Rows;

                tic();

                error.CalculateError(ref infoTest, ref inputTest, ref outputTest, ref topo, tr.weights, ref act, ref gain, ref indexes);

                var TestExecutionTime = toc();
                TestTimeList = time.ElapsedTicks;

                result.TestRMSE = Math.Sqrt(error.Error / infoTest.np);
                TestRmseList = result.TestRMSE;
                result.TestingRmseList.Add(result.TestRMSE);
                if (OnDebug != null)
                {
                    debug("\r\nTest execution time: " + TestExecutionTime + "(hours:minutes:seconds:miliseconds)\r\n");
                    debug("\r\nTest SSE: " + error.Error.ToString() + "\r\n");
                    debug("\r\nTest RMSE: " + result.TestRMSE.ToString() + "\r\n");
                }

                //if (result.LearnRMSE < Threshold) IsTrainOK++;
                if (result.SSE[trial][result.SSE[trial].Length - 1] < Threshold) IsTrainOK++;
            }

            result.LearnRMSE = AverageLearnRMSE;
            result.TestRMSE = AverageTestRMSE;

            result.setStatisticsData(LearnRMSE, TestRMSE, LearnTime, TestTime, Trials);
            result.SuccessRate = (double)IsTrainOK / Trials;

            if (IsResearchMode)//save research
            {
                try
                {
                    string filename = extractionFolder + "\\research result.pdf";

                    PDFGenerate data = new PDFGenerate();
                    data.Filename = filename;
                    data.Result = result;
                    data.ChartFilename = GeneratePlot(result.RMSE, Path.GetFileNameWithoutExtension(result.Filename));
                    HistoryPDF pdf = new HistoryPDF(data.Result, data.ChartFilename, true);
                    pdf.Save(data.Filename);
                }
                catch { }
            }

            return result;
        }
示例#14
0
        public void Hessian___Gradient___Calculation___Test()
        {
            int accuracy = 15;
            /*
             function obliczanie_hesjan_gradient()
            clear();
            inp = [-1 -1;-1 1; 1 -1];
            dout = [1;0;0];
            topo = [3 1 2 4 1 2 3];
            ww = [1 1 1 1 1 1 1];
            act = [2 0];
            gain = [1 1];
            param = [3 2 1 7 2];
            iw = [1 4 8];
            format long;

            [gradient,hessian] = Hessian(inp,dout,topo,ww,act,gain,param,iw);
            fprintf('Otrzymany gradient:\n');
            disp(gradient);
            fprintf('\nOtrzymany hesjan:\n');
            disp(hessian);

            % To jest otrzymywane
            % Otrzymany gradient:
            %   -0.839948683228052
            %    2.319597374905329
            %    2.319597374905329
            %   -2.000000000000000
            %    5.523188311911530
            %    5.523188311911531
            %    6.889667569278484
            %
            %
            % Otrzymany hesjan:
            %   Columns 1 through 6
            %
            %    1.058270685684809  -0.705513790456539  -0.705513790456539   2.519846049684157  -1.679897366456105  -1.679897366456105
            %   -0.705513790456539   1.058270685684809   0.352756895228269  -1.679897366456105   2.519846049684157   0.839948683228052
            %   -0.705513790456539   0.352756895228269   1.058270685684809  -1.679897366456105   0.839948683228052   2.519846049684157
            %    2.519846049684157  -1.679897366456105  -1.679897366456105   6.000000000000000  -4.000000000000000  -4.000000000000000
            %   -1.679897366456105   2.519846049684157   0.839948683228052  -4.000000000000000   6.000000000000000   2.000000000000000
            %   -1.679897366456105   0.839948683228052   2.519846049684157  -4.000000000000000   2.000000000000000   6.000000000000000
            %   -0.639700008449225   1.279400016898449   1.279400016898449  -1.523188311911530   3.046376623823059   3.046376623823059
            %
            %   Column 7
            %
            %   -0.639700008449225
            %    1.279400016898449
            %    1.279400016898449
            %   -1.523188311911530
            %    3.046376623823059
            %    3.046376623823059
            %    3.480153950315843
            end
             */
            Input input = new Input(3, 2);//inp = [-1 -1;-1 1; 1 -1];
            input[0, 0] = -1;
            input[0, 1] = -1;
            input[1, 0] = -1;
            input[1, 1] = 1;
            input[2, 0] = 1;
            input[2, 1] = -1;

            Output output = new Output(3, 1);//dout = [1;0;0];
            output[0, 0] = 1;
            output[1, 0] = 0;
            output[2, 0] = 0;

            NetworkInfo info = new NetworkInfo();//param = [3 2 1 7 2];
            info.ni = 2;
            info.nn = 2;
            info.no = 1;
            info.np = 3;
            info.nw = 7;

            VectorHorizontal vh = new VectorHorizontal(3);
            vh[0, 0] = 2;
            vh[0, 1] = 1;
            vh[0, 2] = 1;

            Topography topo = Topography.Generate(TopographyType.BMLP, vh);//topo = [3 1 2 4 1 2 3];
            //w C# indeksy są od zera a nie od 1 więc wszystko o 1 w dół przestawione jest
            Assert.AreEqual(2, topo[0]);
            Assert.AreEqual(0, topo[1]);
            Assert.AreEqual(1, topo[2]);
            Assert.AreEqual(3, topo[3]);
            Assert.AreEqual(0, topo[4]);
            Assert.AreEqual(1, topo[5]);
            Assert.AreEqual(2, topo[6]);

            Weights weights = new Weights(info.nw);//w = [1 1 1 1 1 1 1];
            weights.FillWithNumber(1);//załatwione

            Activation act = new Activation(2);//act = [2 0];
            act[0] = 2;
            act[1] = 0;

            Gain gain = new Gain(2);//gain = [1 1];
            gain[0] = 1;
            gain[1] = 1;

            Index iw = Index.Find(ref topo);//iw = [1 4 8];
            //ta sama sytuacja, indeksy od 0 startują
            Assert.AreEqual(0, iw[0]);
            Assert.AreEqual(3, iw[1]);
            Assert.AreEqual(7, iw[2]);
            Console.WriteLine("Testowanie obliczania gradientu i macierzy hesjana");
            Console.WriteLine("Użyte dane:");
            Console.WriteLine("\nDane wejściowe:");
            Console.WriteLine(input.MatrixToString());
            Console.WriteLine("\nDane wyjściowe:");
            Console.WriteLine(output.MatrixToString());
            Console.WriteLine("\nWagi;");
            Console.WriteLine(weights.MatrixToString());
            Console.WriteLine("\nTopologia:");
            Console.WriteLine(topo.MatrixToString());
            Console.WriteLine("\nIndeksy topologii:");
            Console.WriteLine(iw.MatrixToString());
            Console.WriteLine("\nFunkcje aktywacji:");
            Console.WriteLine(act.MatrixToString());
            Console.WriteLine("\nWzmocnienia (gains):");
            Console.WriteLine(gain.MatrixToString());
            Console.WriteLine("\nParametry (param):");
            Console.WriteLine(info.ToString());
            Hessian hess = new Hessian(ref info);
            hess.Compute(ref info, ref input, ref output, ref topo, weights, ref act, ref gain, ref iw);
            var g = hess.GradientMat;
            var h = hess.HessianMat;

            Console.WriteLine("\nSprawdzanie gradientu z dokładnością do 15 miejsc po przecinku");
            var matG = new double[] { -0.839948683228052, 2.319597374905329, 2.319597374905329, -2.000000000000000, 5.523188311911530, 5.523188311911531, 6.889667569278484 };
            /*
            % Otrzymany gradient:
            %   -0.839948683228052
            %    2.319597374905329
            %    2.319597374905329
            %   -2.000000000000000
            %    5.523188311911530
            %    5.523188311911531
            %    6.889667569278484
             */
            for (int i = 0; i < matG.Length; i++)
            {
                Console.WriteLine(string.Format("NBN C#: {0}\tMatLab NBN: {1}\t{2}", Math.Round(g[i, 0], accuracy), matG[i], Math.Round(g[i, 0], accuracy) == matG[i] ? "OK" : "źle"));
            }

            Assert.AreEqual(-0.839948683228052, Math.Round(g[0, 0], accuracy));
            Assert.AreEqual(2.319597374905329, Math.Round(g[1, 0], accuracy));
            Assert.AreEqual(2.319597374905329, Math.Round(g[2, 0], accuracy));
            Assert.AreEqual(-2.000000000000000, Math.Round(g[3, 0], accuracy));
            Assert.AreEqual(5.523188311911530, Math.Round(g[4, 0], accuracy));
            Assert.AreEqual(5.523188311911531, Math.Round(g[5, 0], accuracy));
            Assert.AreEqual(6.889667569278484, Math.Round(g[6, 0], accuracy));

            Console.WriteLine("\nSprawdzanie macierzy hesjana\nPorównania z dokładnością do 15 miejsc po przecinku");
            MatrixMB matH = new MatrixMB(7, 7);
            //col 1
            matH[0, 0] = 1.058270685684809;
            matH[1, 0] = -0.705513790456539;
            matH[2, 0] = -0.705513790456539;
            matH[3, 0] = 2.519846049684157;
            matH[4, 0] = -1.679897366456105;
            matH[5, 0] = -1.679897366456105;
            matH[6, 0] = -0.639700008449225;

            //col 2
            matH[0, 1] = -0.705513790456539;
            matH[1, 1] = 1.058270685684809;
            matH[2, 1] = 0.352756895228269;
            matH[3, 1] = -1.679897366456105;
            matH[4, 1] = 2.519846049684157;
            matH[5, 1] = 0.839948683228052;
            matH[6, 1] = 1.279400016898449;

            //col 3
            matH[0, 2] = -0.705513790456539;
            matH[1, 2] = 0.352756895228269;
            matH[2, 2] = 1.058270685684809;
            matH[3, 2] = -1.679897366456105;
            matH[4, 2] = 0.839948683228052;
            matH[5, 2] = 2.519846049684157;
            matH[6, 2] = 1.279400016898449;

            //col 4
            matH[0, 3] = 2.519846049684157;
            matH[1, 3] = -1.679897366456105;
            matH[2, 3] = -1.679897366456105;
            matH[3, 3] = 6.000000000000000;
            matH[4, 3] = -4.000000000000000;
            matH[5, 3] = -4.000000000000000;
            matH[6, 3] = -1.523188311911530;

            //col 5
            matH[0, 4] = -1.679897366456105;
            matH[1, 4] = 2.519846049684157;
            matH[2, 4] = 0.839948683228052;
            matH[3, 4] = -4.000000000000000;
            matH[4, 4] = 6.000000000000000;
            matH[5, 4] = 2.000000000000000;
            matH[6, 4] = 3.046376623823059;

            //col 6
            matH[0, 5] = -1.679897366456105;
            matH[1, 5] = 0.839948683228052;
            matH[2, 5] = 2.519846049684157;
            matH[3, 5] = -4.000000000000000;
            matH[4, 5] = 2.000000000000000;
            matH[5, 5] = 6.000000000000000;
            matH[6, 5] = 3.046376623823059;

            //col 7
            matH[0, 6] = -0.639700008449225;
            matH[1, 6] = 1.279400016898449;
            matH[2, 6] = 1.279400016898449;
            matH[3, 6] = -1.523188311911530;
            matH[4, 6] = 3.046376623823059;
            matH[5, 6] = 3.046376623823059;
            matH[6, 6] = 3.480153950315843;

            for (int k = 0; k < h.Cols; k++)
            {
                Console.WriteLine(string.Format("Kolumna {0}", k + 1));
                for (int w = 0; w < h.Rows; w++)
                {
                    decimal dh = Math.Round((decimal)h[w, k], accuracy);
                    decimal dmh = Math.Round((decimal)matH[w, k], accuracy);
                    Console.WriteLine(string.Format("NBN C#: {0}\tMatLab NBN: {1}\t{2}", dh, dmh, dh == dmh ? "OK" : "źle"));
                }
                Console.WriteLine("");
            }

            for (int k = 0; k < h.Cols; k++)
            {
                for (int w = 0; w < h.Rows; w++)
                {
                    decimal dh = Math.Round((decimal)h[w, k], accuracy);
                    decimal dmh = Math.Round((decimal)matH[w, k], accuracy);
                    Assert.AreEqual(dmh, dh);
                }
            }
            /*
            % Otrzymany hesjan:
            %   Columns 1 through 6
            %
            %    1.058270685684809  -0.705513790456539  -0.705513790456539   2.519846049684157  -1.679897366456105  -1.679897366456105
            %   -0.705513790456539   1.058270685684809   0.352756895228269  -1.679897366456105   2.519846049684157   0.839948683228052
            %   -0.705513790456539   0.352756895228269   1.058270685684809  -1.679897366456105   0.839948683228052   2.519846049684157
            %    2.519846049684157  -1.679897366456105  -1.679897366456105   6.000000000000000  -4.000000000000000  -4.000000000000000
            %   -1.679897366456105   2.519846049684157   0.839948683228052  -4.000000000000000   6.000000000000000   2.000000000000000
            %   -1.679897366456105   0.839948683228052   2.519846049684157  -4.000000000000000   2.000000000000000   6.000000000000000
            %   -0.639700008449225   1.279400016898449   1.279400016898449  -1.523188311911530   3.046376623823059   3.046376623823059
            %
            %   Column 7
            %
            %   -0.639700008449225
            %    1.279400016898449
            %    1.279400016898449
            %   -1.523188311911530
            %    3.046376623823059
            %    3.046376623823059
            %    3.480153950315843
             */
        }