Esempio n. 1
0
        public Classifiers.IClassifier learn_random_forest_on_known_points(Func <double[], double> meFunc, Func <double[], double[]> calcDerivative, double allowErr)
        {
            int[] count = new int[N]; for (int i = 0; i < N; i++)
            {
                count[i] = (Min[i] == Max[i]) ? 1 : NGRID;
            }
            create_grid(count);
            analyse_voronoi();
            analyse_error();

            int n = xf.Length;

            Classifiers.LabeledData[] ldata = new Classifiers.LabeledData[n];
            int featureCount = 0;

            for (int i = 0; i < n; i++)
            {
                double[] feature = build_fetures_from_existing_points(i, calcDerivative);
                ldata[i]     = new Classifiers.LabeledData(feature, 1);
                featureCount = feature.Length;
            }


            Classifiers.IClassifier cls = new Classifiers.RandomForest();

            Classifiers.RandomForestParams ps = new Classifiers.RandomForestParams(ldata, n /* samples count */,
                                                                                   featureCount /* features count */,
                                                                                   2 /* classes count */,
                                                                                   n / 10 /* trees count */,
                                                                                   5 /* count of features to do split in a tree */,
                                                                                   0.7     /* percent of a training set of samples  */
                                                                                           /* used to build individual trees. */);

            cls.train(ps);

            double trainModelPrecision;

            cls.validate(ldata, out trainModelPrecision);

            Console.WriteLine("Model precision on training dataset: " + trainModelPrecision);
            return(cls);
        }
Esempio n. 2
0
        public void do_random_forest_analyse(Classifiers.IClassifier cls, double allowErr, Func <double[], double> meFunc, Func <double[], double[]> calcDerivative)
        {
            int[] count = new int[N]; for (int i = 0; i < N; i++)
            {
                count[i] = (Min[i] == Max[i]) ? 1 : NGRID;
            }
            create_grid(count);
            analyse_voronoi();
            analyse_error();


            int n = candidates.Length;

            Console.WriteLine(candidates.Length);
            //int n = grid.Node.Length;
            Classifiers.LabeledData[] ldata = new Classifiers.LabeledData[n];
            int featureCount = 0;

            for (int i = 0; i < n; i++)
            {
                // min, max in locality
                double maxNeighbours = double.MinValue;
                double minNeighbours = double.MaxValue;
                foreach (var neighbour in grid.Neighbours(candidates[i]))
                //foreach (var neighbour in grid.Neighbours(i))
                {
                    double[] calcNeighbour = (double[])grid.Node[neighbour].Clone();
                    this.func.Calculate(calcNeighbour);
                    if (calcNeighbour[calcNeighbour.Length - 1] < minNeighbours)
                    {
                        minNeighbours = calcNeighbour[calcNeighbour.Length - 1];
                    }
                    if (calcNeighbour[calcNeighbour.Length - 1] > maxNeighbours)
                    {
                        maxNeighbours = calcNeighbour[calcNeighbour.Length - 1];
                    }
                }
                // current val
                double[] cuurentNode = (double[])grid.Node[candidates[i]].Clone();
                // double[] cuurentNode = (double[])grid.Node[i].Clone();
                this.func.Calculate(cuurentNode);
                double cuurentNodeVal = cuurentNode[cuurentNode.Length - 1];
                if (cuurentNodeVal < minNeighbours)
                {
                    minNeighbours = cuurentNodeVal;
                }
                if (cuurentNodeVal > maxNeighbours)
                {
                    maxNeighbours = cuurentNodeVal;
                }

                // is real function and approximation are equal, class for point

                //derivative
                double[] derivative = calcDerivative(grid.Node[candidates[i]]);
                //double[] derivative = calcDerivative(grid.Node[i]);

                // build features vector
                double[] features = new double[5 + derivative.Length];
                features[0] = borderdist[i];
                features[1] = error[i];
                features[2] = maxNeighbours;
                features[3] = minNeighbours;
                features[4] = cuurentNodeVal;
                for (int k = 0; k < derivative.Length; k++)
                {
                    features[5 + k] = derivative[k];
                }

                ldata[i]     = new Classifiers.LabeledData(features, 0);
                featureCount = features.Length;
            }
            List <int> newCandidates = new List <int>();

            int[] y = new int[ldata.Length];
            for (int i = 0; i < ldata.Length; i++)
            {
                cls.infer(ldata[i].data, out y[i]);
                if (y[i] == 1)
                {
                    //newCandidates.Add(candidates[i]);
                    newCandidates.Add(i);
                }
            }
            candidates = newCandidates.ToArray();
            Console.WriteLine(candidates.Length);

            xfcandidates = Tools.Sub(grid.Node, candidates);
        }
Esempio n. 3
0
        public Classifiers.IClassifier learn_random_forest_on_grid(Func <double[], double> meFunc, Func <double[], double[]> calcDerivative, double allowErr)
        {
            int[] count = new int[N]; for (int i = 0; i < N; i++)
            {
                count[i] = (Min[i] == Max[i]) ? 1 : NGRID;
            }
            create_grid(count);
            analyse_voronoi();
            analyse_error();

            int n = grid.Node.Length + xf.Length;

            // int n = grid.Node.Length;
            Classifiers.LabeledData[] ldata = new Classifiers.LabeledData[n];
            int featureCount = 0;

            for (int i = 0; i < grid.Node.Length; i++)
            {
                // min, max in locality
                double maxNeighbours = double.MinValue;
                double minNeighbours = double.MaxValue;
                foreach (var neighbour in grid.Neighbours(i))
                {
                    double[] calcNeighbour = (double[])grid.Node[neighbour].Clone();
                    this.func.Calculate(calcNeighbour);
                    if (calcNeighbour[calcNeighbour.Length - 1] < minNeighbours)
                    {
                        minNeighbours = calcNeighbour[calcNeighbour.Length - 1];
                    }
                    if (calcNeighbour[calcNeighbour.Length - 1] > maxNeighbours)
                    {
                        maxNeighbours = calcNeighbour[calcNeighbour.Length - 1];
                    }
                }
                // current val
                double[] cuurentNode = (double[])grid.Node[i].Clone();
                this.func.Calculate(cuurentNode);
                double cuurentNodeVal = cuurentNode[cuurentNode.Length - 1];
                if (cuurentNodeVal < minNeighbours)
                {
                    minNeighbours = cuurentNodeVal;
                }
                if (cuurentNodeVal > maxNeighbours)
                {
                    maxNeighbours = cuurentNodeVal;
                }

                // is real function and approximation are equal, class for point
                int pointClass = 0;
                if (Math.Abs(meFunc(grid.Node[i]) - cuurentNodeVal) > allowErr)
                {
                    pointClass = 1;
                }

                //derivative
                double[] derivative = calcDerivative(grid.Node[i]);

                // build features vector
                double[] features = new double[5 + derivative.Length];
                features[0] = borderdist[i];
                features[1] = error[i];
                features[2] = maxNeighbours;
                features[3] = minNeighbours;
                features[4] = cuurentNodeVal;
                for (int k = 0; k < derivative.Length; k++)
                {
                    features[5 + k] = derivative[k];
                }

                ldata[i]     = new Classifiers.LabeledData(features, pointClass);
                featureCount = features.Length;
            }
            for (int i = 0; i < xf.Length; i++)
            {
                double[] feature = build_fetures_from_existing_points(i, calcDerivative);
                ldata[grid.Node.Length + i] = new Classifiers.LabeledData(feature, 0);
                featureCount = feature.Length;
            }


            Classifiers.IClassifier        cls = new Classifiers.RandomForest();
            Classifiers.RandomForestParams ps  = new Classifiers.RandomForestParams(ldata, n /* samples count */,
                                                                                    featureCount /* features count */,
                                                                                    2 /* classes count */,
                                                                                    n / 10 /* trees count */,
                                                                                    6 /* count of features to do split in a tree */,
                                                                                    0.7       /* percent of a training set of samples  */
                                                                                              /* used to build individual trees. */);

            cls.train(ps);
            double trainModelPrecision;

            cls.validate(ldata, out trainModelPrecision);

            Console.WriteLine("Model precision on training dataset: " + trainModelPrecision);
            return(cls);
        }
Esempio n. 4
0
        private void analyse_voronoi()
        {
            //вычисляю принадлежность узлов сетки доменам (графовый алгоритм на базе структуры уровней смежности)
            SortedSet <int>[] adjncy = new SortedSet <int> [xf.Length];
            for (int i = 0; i < xf.Length; i++)
            {
                adjncy[i] = new SortedSet <int>();
            }

            Console.WriteLine("Разбиение пространства на домены");
            Queue <int> queue = new Queue <int>();

            domain = new int[grid.Node.Length];
            double[] dist = new double[grid.Node.Length];
            for (int i = 0; i < domain.Length; i++)
            {
                domain[i] = -1;
                dist[i]   = double.PositiveInfinity;
            }
            for (int i = 0; i < xf.Length; i++)
            {
                int index;
                grid.ToIndex(xf[i], out index);
                dist[index]   = distanceX(grid.Node[index], xf[i]);
                domain[index] = i;
                //setvalue(index, xf[i]);
                queue.Enqueue(index);
            }
            while (queue.Count > 0)
            {
                int index = queue.Dequeue();
                int i     = domain[index];
                foreach (var adj in grid.Neighbours(index))
                {
                    double d = distanceX(grid.Node[adj], xf[i]);
                    if (domain[adj] >= 0)
                    {
                        adjncy[domain[adj]].Add(i);
                        adjncy[i].Add(domain[adj]);
                        if (d < dist[adj])
                        {
                            domain[adj] = i;
                            dist[adj]   = d;
                        }
                        continue;
                    }
                    domain[adj] = i;
                    dist[adj]   = d;
                    //setvalue(adj, xf[i]);
                    queue.Enqueue(adj);
                }
            }

            Console.WriteLine("Построение графа доменов");
            //строю граф соседства доменов
            graph = new int[xf.Length][];
            for (int i = 0; i < xf.Length; i++)
            {
                adjncy[i].Add(i);
                graph[i] = adjncy[i].ToArray();
            }

            Console.WriteLine("Построение диграммы Вороного на сетке");
            //уточняю домены (диаграмма вороного на сетке)
            for (int i = 0; i < grid.Node.Length; i++)
            {
                double[] xy  = grid.Node[i];
                int[]    adj = graph[domain[i]];
                double   min = double.PositiveInfinity;
                for (int j = 0; j < adj.Length; j++)
                {
                    double d = distanceX(xy, xf[adj[j]]);
                    if (min > d)
                    {
                        min = d; domain[i] = adj[j];
                    }
                }
            }

            Console.WriteLine("Вычисление границ доменов");
            //вычисляю границы доменов
            borderdist = new double[grid.Node.Length];
            bordernear = new int[grid.Node.Length];
            for (int i = 0; i < grid.Node.Length; i++)
            {
                borderdist[i] = double.PositiveInfinity;
                bordernear[i] = -1;
                int dom = domain[i];
                foreach (var adj in grid.Neighbours(i))
                {
                    if (domain[adj] != dom)
                    {
                        borderdist[i] = 0;
                        bordernear[i] = i;
                        queue.Enqueue(i);
                        break;
                    }
                }
            }
            candidates = queue.ToArray();


//--------------------------TO REMOVE AFTER PROPER CLASSIFIER USAGE-------------------------
//--------------------------------------Too silly example-----------------------------------

            // Classification is binary.. Shall we use more simplest binary classifier?
            Classifiers.LabeledData[] ldata = new Classifiers.LabeledData[3];
            ldata[0] = new Classifiers.LabeledData(new double[3] {
                grid.Node[0][0], borderdist[0], bordernear[0]
            }, 1);
            ldata[1] = new Classifiers.LabeledData(new double[3] {
                grid.Node[1][0], borderdist[1], bordernear[1]
            }, 1);
            ldata[2] = new Classifiers.LabeledData(new double[3] {
                grid.Node[2][0], borderdist[2], bordernear[2]
            }, 0);

            Classifiers.IClassifier        cls = new Classifiers.RandomForest();
            Classifiers.RandomForestParams ps  = new Classifiers.RandomForestParams(ldata, 3 /* samples count */,
                                                                                    3 /* features count */,
                                                                                    2 /* classes count */,
                                                                                    3 /* trees count */,
                                                                                    2 /* count of features to do split in a tree */,
                                                                                    0.7       /* percent of a training set of samples  */
                                                                                              /* used to build individual trees. */);

            cls.train(ps);
            int[] y = new int[3];
            cls.infer(ldata[0].data, out y[0]);
            cls.infer(ldata[1].data, out y[1]);
            cls.infer(ldata[2].data, out y[2]);

            for (int i = 0; i < 3; i++)
            {
                Console.WriteLine("{0} is predicted y[{1}] from trained data sample and {2} is ground truth", y[i], i, ldata[i].label);
            }

            double trainModelPrecision;

            cls.validate(ldata, out trainModelPrecision);

            Console.WriteLine("Model precision on training dataset: " + trainModelPrecision);

            //------------------------------------------------------------------------------------------
            //--------------------------TO REMOVE AFTER PROPER CLASSIFIER USAGE-------------------------


            Console.WriteLine("Построение функции расстояний до границ доменов");
            //вычисляю расстояния от границ
            while (queue.Count > 0)
            {
                int index = queue.Dequeue();
                int dom   = domain[index];
                int brd   = bordernear[index];
                foreach (var adj in grid.Neighbours(index))
                {
                    if (domain[adj] != dom)
                    {
                        continue;
                    }
                    double d = distanceX(grid.Node[adj], grid.Node[brd]);
                    if (bordernear[adj] >= 0)
                    {
                        if (d < borderdist[adj])
                        {
                            bordernear[adj] = brd;
                            borderdist[adj] = d;
                        }
                        continue;
                    }
                    bordernear[adj] = brd;
                    borderdist[adj] = d;
                    queue.Enqueue(adj);
                }
            }

            Console.WriteLine("Нормировка функции расстояний до границ доменов");
            //нормирую расстояния от границ
            for (int i = 0; i < grid.Node.Length; i++)
            {
                int    dom = domain[i];
                int    brd = bordernear[i];
                double a   = distanceX(grid.Node[i], xf[dom]);
                double b   = distanceX(grid.Node[i], grid.Node[brd]);
                double c   = a + b;
                borderdist[i] = (c == 0) ? 0 : b / c;
            }
        }