private NearestNeighbourModel(NearestNeighbourModel original, Cloner cloner)
      : base(original, cloner) {
      kdTree = new alglib.nearestneighbor.kdtree();
      kdTree.approxf = original.kdTree.approxf;
      kdTree.boxmax = (double[])original.kdTree.boxmax.Clone();
      kdTree.boxmin = (double[])original.kdTree.boxmin.Clone();
      kdTree.buf = (double[])original.kdTree.buf.Clone();
      kdTree.curboxmax = (double[])original.kdTree.curboxmax.Clone();
      kdTree.curboxmin = (double[])original.kdTree.curboxmin.Clone();
      kdTree.curdist = original.kdTree.curdist;
      kdTree.debugcounter = original.kdTree.debugcounter;
      kdTree.idx = (int[])original.kdTree.idx.Clone();
      kdTree.kcur = original.kdTree.kcur;
      kdTree.kneeded = original.kdTree.kneeded;
      kdTree.n = original.kdTree.n;
      kdTree.nodes = (int[])original.kdTree.nodes.Clone();
      kdTree.normtype = original.kdTree.normtype;
      kdTree.nx = original.kdTree.nx;
      kdTree.ny = original.kdTree.ny;
      kdTree.r = (double[])original.kdTree.r.Clone();
      kdTree.rneeded = original.kdTree.rneeded;
      kdTree.selfmatch = original.kdTree.selfmatch;
      kdTree.splits = (double[])original.kdTree.splits.Clone();
      kdTree.tags = (int[])original.kdTree.tags.Clone();
      kdTree.x = (double[])original.kdTree.x.Clone();
      kdTree.xy = (double[,])original.kdTree.xy.Clone();

      k = original.k;
      targetVariable = original.targetVariable;
      allowedInputVariables = (string[])original.allowedInputVariables.Clone();
      if (original.classValues != null)
        this.classValues = (double[])original.classValues.Clone();
    }
        private NearestNeighbourModel(NearestNeighbourModel original, Cloner cloner)
            : base(original, cloner)
        {
            kdTree              = new alglib.nearestneighbor.kdtree();
            kdTree.approxf      = original.kdTree.approxf;
            kdTree.boxmax       = (double[])original.kdTree.boxmax.Clone();
            kdTree.boxmin       = (double[])original.kdTree.boxmin.Clone();
            kdTree.buf          = (double[])original.kdTree.buf.Clone();
            kdTree.curboxmax    = (double[])original.kdTree.curboxmax.Clone();
            kdTree.curboxmin    = (double[])original.kdTree.curboxmin.Clone();
            kdTree.curdist      = original.kdTree.curdist;
            kdTree.debugcounter = original.kdTree.debugcounter;
            kdTree.idx          = (int[])original.kdTree.idx.Clone();
            kdTree.kcur         = original.kdTree.kcur;
            kdTree.kneeded      = original.kdTree.kneeded;
            kdTree.n            = original.kdTree.n;
            kdTree.nodes        = (int[])original.kdTree.nodes.Clone();
            kdTree.normtype     = original.kdTree.normtype;
            kdTree.nx           = original.kdTree.nx;
            kdTree.ny           = original.kdTree.ny;
            kdTree.r            = (double[])original.kdTree.r.Clone();
            kdTree.rneeded      = original.kdTree.rneeded;
            kdTree.selfmatch    = original.kdTree.selfmatch;
            kdTree.splits       = (double[])original.kdTree.splits.Clone();
            kdTree.tags         = (int[])original.kdTree.tags.Clone();
            kdTree.x            = (double[])original.kdTree.x.Clone();
            kdTree.xy           = (double[, ])original.kdTree.xy.Clone();

            k = original.k;
            allowedInputVariables = (string[])original.allowedInputVariables.Clone();
            if (original.classValues != null)
            {
                this.classValues = (double[])original.classValues.Clone();
            }
        }
 private NearestNeighbourModel(bool deserializing)
     : base(deserializing)
 {
     if (deserializing)
     {
         kdTree = new alglib.nearestneighbor.kdtree();
     }
 }
Пример #4
0
 private NearestNeighbourModel(NearestNeighbourModel original, Cloner cloner)
     : base(original, cloner)
 {
     kdTree              = new alglib.nearestneighbor.kdtree();
     kdTree.approxf      = original.kdTree.approxf;
     kdTree.boxmax       = (double[])original.kdTree.boxmax.Clone();
     kdTree.boxmin       = (double[])original.kdTree.boxmin.Clone();
     kdTree.buf          = (double[])original.kdTree.buf.Clone();
     kdTree.curboxmax    = (double[])original.kdTree.curboxmax.Clone();
     kdTree.curboxmin    = (double[])original.kdTree.curboxmin.Clone();
     kdTree.curdist      = original.kdTree.curdist;
     kdTree.debugcounter = original.kdTree.debugcounter;
     kdTree.idx          = (int[])original.kdTree.idx.Clone();
     kdTree.kcur         = original.kdTree.kcur;
     kdTree.kneeded      = original.kdTree.kneeded;
     kdTree.n            = original.kdTree.n;
     kdTree.nodes        = (int[])original.kdTree.nodes.Clone();
     kdTree.normtype     = original.kdTree.normtype;
     kdTree.nx           = original.kdTree.nx;
     kdTree.ny           = original.kdTree.ny;
     kdTree.r            = (double[])original.kdTree.r.Clone();
     kdTree.rneeded      = original.kdTree.rneeded;
     kdTree.selfmatch    = original.kdTree.selfmatch;
     kdTree.splits       = (double[])original.kdTree.splits.Clone();
     kdTree.tags         = (int[])original.kdTree.tags.Clone();
     kdTree.x            = (double[])original.kdTree.x.Clone();
     kdTree.xy           = (double[, ])original.kdTree.xy.Clone();
     selfMatch           = original.selfMatch;
     k = original.k;
     isCompatibilityLoaded = original.IsCompatibilityLoaded;
     if (!IsCompatibilityLoaded)
     {
         weights = new double[original.weights.Length];
         Array.Copy(original.weights, weights, weights.Length);
         offsets = new double[original.offsets.Length];
         Array.Copy(original.offsets, this.offsets, this.offsets.Length);
     }
     allowedInputVariables = (string[])original.allowedInputVariables.Clone();
     if (original.classValues != null)
     {
         this.classValues = (double[])original.classValues.Clone();
     }
 }
        public NearestNeighbourModel(IDataset dataset, IEnumerable <int> rows, int k, string targetVariable, IEnumerable <string> allowedInputVariables, double[] classValues = null)
        {
            Name                       = ItemName;
            Description                = ItemDescription;
            this.k                     = k;
            this.targetVariable        = targetVariable;
            this.allowedInputVariables = allowedInputVariables.ToArray();

            var inputMatrix = AlglibUtil.PrepareInputMatrix(dataset,
                                                            allowedInputVariables.Concat(new string[] { targetVariable }),
                                                            rows);

            if (inputMatrix.Cast <double>().Any(x => double.IsNaN(x) || double.IsInfinity(x)))
            {
                throw new NotSupportedException(
                          "Nearest neighbour classification does not support NaN or infinity values in the input dataset.");
            }

            this.kdTree = new alglib.nearestneighbor.kdtree();

            var nRows     = inputMatrix.GetLength(0);
            var nFeatures = inputMatrix.GetLength(1) - 1;

            if (classValues != null)
            {
                this.classValues = (double[])classValues.Clone();
                int nClasses = classValues.Length;
                // map original class values to values [0..nClasses-1]
                var classIndices = new Dictionary <double, double>();
                for (int i = 0; i < nClasses; i++)
                {
                    classIndices[classValues[i]] = i;
                }

                for (int row = 0; row < nRows; row++)
                {
                    inputMatrix[row, nFeatures] = classIndices[inputMatrix[row, nFeatures]];
                }
            }
            alglib.nearestneighbor.kdtreebuild(inputMatrix, nRows, inputMatrix.GetLength(1) - 1, 1, 2, kdTree);
        }
    public NearestNeighbourModel(IDataset dataset, IEnumerable<int> rows, int k, string targetVariable, IEnumerable<string> allowedInputVariables, double[] classValues = null) {
      Name = ItemName;
      Description = ItemDescription;
      this.k = k;
      this.targetVariable = targetVariable;
      this.allowedInputVariables = allowedInputVariables.ToArray();

      var inputMatrix = AlglibUtil.PrepareInputMatrix(dataset,
                                   allowedInputVariables.Concat(new string[] { targetVariable }),
                                   rows);

      if (inputMatrix.Cast<double>().Any(x => double.IsNaN(x) || double.IsInfinity(x)))
        throw new NotSupportedException(
          "Nearest neighbour classification does not support NaN or infinity values in the input dataset.");

      this.kdTree = new alglib.nearestneighbor.kdtree();

      var nRows = inputMatrix.GetLength(0);
      var nFeatures = inputMatrix.GetLength(1) - 1;

      if (classValues != null) {
        this.classValues = (double[])classValues.Clone();
        int nClasses = classValues.Length;
        // map original class values to values [0..nClasses-1]
        var classIndices = new Dictionary<double, double>();
        for (int i = 0; i < nClasses; i++)
          classIndices[classValues[i]] = i;

        for (int row = 0; row < nRows; row++) {
          inputMatrix[row, nFeatures] = classIndices[inputMatrix[row, nFeatures]];
        }
      }
      alglib.nearestneighbor.kdtreebuild(inputMatrix, nRows, inputMatrix.GetLength(1) - 1, 1, 2, kdTree);
    }
 private NearestNeighbourModel(bool deserializing)
   : base(deserializing) {
   if (deserializing)
     kdTree = new alglib.nearestneighbor.kdtree();
 }
        public NearestNeighbourModel(IDataset dataset, IEnumerable <int> rows, int k, string targetVariable, IEnumerable <string> allowedInputVariables, IEnumerable <double> weights = null, double[] classValues = null)
            : base(targetVariable)
        {
            Name        = ItemName;
            Description = ItemDescription;
            this.k      = k;
            this.allowedInputVariables = allowedInputVariables.ToArray();
            double[,] inputMatrix;
            if (IsCompatibilityLoaded)
            {
                // no scaling
                inputMatrix = dataset.ToArray(
                    this.allowedInputVariables.Concat(new string[] { targetVariable }),
                    rows);
            }
            else
            {
                this.offsets = this.allowedInputVariables
                               .Select(name => dataset.GetDoubleValues(name, rows).Average() * -1)
                               .Concat(new double[] { 0 }) // no offset for target variable
                               .ToArray();
                if (weights == null)
                {
                    // automatic determination of weights (all features should have variance = 1)
                    this.weights = this.allowedInputVariables
                                   .Select(name => 1.0 / dataset.GetDoubleValues(name, rows).StandardDeviationPop())
                                   .Concat(new double[] { 1.0 }) // no scaling for target variable
                                   .ToArray();
                }
                else
                {
                    // user specified weights (+ 1 for target)
                    this.weights = weights.Concat(new double[] { 1.0 }).ToArray();
                    if (this.weights.Length - 1 != this.allowedInputVariables.Length)
                    {
                        throw new ArgumentException("The number of elements in the weight vector must match the number of input variables");
                    }
                }
                inputMatrix = CreateScaledData(dataset, this.allowedInputVariables.Concat(new string[] { targetVariable }), rows, this.offsets, this.weights);
            }

            if (inputMatrix.Cast <double>().Any(x => double.IsNaN(x) || double.IsInfinity(x)))
            {
                throw new NotSupportedException(
                          "Nearest neighbour model does not support NaN or infinity values in the input dataset.");
            }

            this.kdTree = new alglib.nearestneighbor.kdtree();

            var nRows     = inputMatrix.GetLength(0);
            var nFeatures = inputMatrix.GetLength(1) - 1;

            if (classValues != null)
            {
                this.classValues = (double[])classValues.Clone();
                int nClasses = classValues.Length;
                // map original class values to values [0..nClasses-1]
                var classIndices = new Dictionary <double, double>();
                for (int i = 0; i < nClasses; i++)
                {
                    classIndices[classValues[i]] = i;
                }

                for (int row = 0; row < nRows; row++)
                {
                    inputMatrix[row, nFeatures] = classIndices[inputMatrix[row, nFeatures]];
                }
            }
            alglib.nearestneighbor.kdtreebuild(inputMatrix, nRows, inputMatrix.GetLength(1) - 1, 1, 2, kdTree);
        }
Пример #9
0
 private NearestNeighbourModel(StorableConstructorFlag _) : base(_)
 {
     kdTree = new alglib.nearestneighbor.kdtree();
 }