private GaussianProcessModel(GaussianProcessModel original, Cloner cloner)
            : base(original, cloner)
        {
            this.meanFunction       = cloner.Clone(original.meanFunction);
            this.covarianceFunction = cloner.Clone(original.covarianceFunction);
            if (original.inputScaling != null)
            {
                this.inputScaling = cloner.Clone(original.inputScaling);
            }
            this.trainingDataset       = cloner.Clone(original.trainingDataset);
            this.negativeLogLikelihood = original.negativeLogLikelihood;
            this.targetVariable        = original.targetVariable;
            this.sqrSigmaNoise         = original.sqrSigmaNoise;
            if (original.meanParameter != null)
            {
                this.meanParameter = (double[])original.meanParameter.Clone();
            }
            if (original.covarianceParameter != null)
            {
                this.covarianceParameter = (double[])original.covarianceParameter.Clone();
            }

            // shallow copies of arrays because they cannot be modified
            this.trainingRows          = original.trainingRows;
            this.allowedInputVariables = original.allowedInputVariables;
            this.alpha = original.alpha;
            this.l     = original.l;
            this.x     = original.x;
        }
Exemple #2
0
 protected Scaling(Scaling original, Cloner cloner)
     : base(original, cloner)
 {
     foreach (var pair in original.scalingParameters)
     {
         scalingParameters.Add(pair.Key, Tuple.Create(pair.Value.Item1, pair.Value.Item2));
     }
 }
Exemple #3
0
    public static double[,] PrepareAndScaleInputMatrix(IDataset dataset, IEnumerable<string> variables, IEnumerable<int> rows, Scaling scaling) {
      List<string> variablesList = variables.ToList();
      List<int> rowsList = rows.ToList();

      double[,] matrix = new double[rowsList.Count, variablesList.Count];

      int col = 0;
      foreach (string column in variables) {
        var values = scaling.GetScaledValues(dataset, column, rows);
        int row = 0;
        foreach (var value in values) {
          matrix[row, col] = value;
          row++;
        }
        col++;
      }

      return matrix;
    }
 private static double[,] GetData(IDataset ds, IEnumerable <string> allowedInputs, IEnumerable <int> rows, Scaling scaling)
 {
     if (scaling != null)
     {
         return(AlglibUtil.PrepareAndScaleInputMatrix(ds, allowedInputs, rows, scaling));
     }
     else
     {
         return(AlglibUtil.PrepareInputMatrix(ds, allowedInputs, rows));
     }
 }
        private void CalculateModel(IDataset ds, IEnumerable <int> rows, bool scaleInputs = true)
        {
            this.trainingDataset = (IDataset)ds.Clone();
            this.trainingRows    = rows.ToArray();
            this.inputScaling    = scaleInputs ? new Scaling(ds, allowedInputVariables, rows) : null;

            x = GetData(ds, this.allowedInputVariables, this.trainingRows, this.inputScaling);

            IEnumerable <double> y;

            y = ds.GetDoubleValues(targetVariable, rows);

            int n = x.GetLength(0);

            // calculate cholesky decomposed (lower triangular) covariance matrix
            var cov = covarianceFunction.GetParameterizedCovarianceFunction(covarianceParameter, Enumerable.Range(0, x.GetLength(1)));

            this.l = CalculateL(x, cov, sqrSigmaNoise);

            // calculate mean
            var mean = meanFunction.GetParameterizedMeanFunction(meanParameter, Enumerable.Range(0, x.GetLength(1)));

            double[] m = Enumerable.Range(0, x.GetLength(0))
                         .Select(r => mean.Mean(x, r))
                         .ToArray();

            // calculate sum of diagonal elements for likelihood
            double diagSum = Enumerable.Range(0, n).Select(i => Math.Log(l[i, i])).Sum();

            // solve for alpha
            double[] ym = y.Zip(m, (a, b) => a - b).ToArray();

            int info;

            alglib.densesolverreport denseSolveRep;

            alglib.spdmatrixcholeskysolve(l, n, false, ym, out info, out denseSolveRep, out alpha);
            for (int i = 0; i < alpha.Length; i++)
            {
                alpha[i] = alpha[i] / sqrSigmaNoise;
            }
            negativeLogLikelihood = 0.5 * Util.ScalarProd(ym, alpha) + diagSum + (n / 2.0) * Math.Log(2.0 * Math.PI * sqrSigmaNoise);

            // derivatives
            int nAllowedVariables = x.GetLength(1);

            alglib.matinvreport matInvRep;
            double[,] lCopy = new double[l.GetLength(0), l.GetLength(1)];
            Array.Copy(l, lCopy, lCopy.Length);

            alglib.spdmatrixcholeskyinverse(ref lCopy, n, false, out info, out matInvRep);
            if (info != 1)
            {
                throw new ArgumentException("Can't invert matrix to calculate gradients.");
            }
            for (int i = 0; i < n; i++)
            {
                for (int j = 0; j <= i; j++)
                {
                    lCopy[i, j] = lCopy[i, j] / sqrSigmaNoise - alpha[i] * alpha[j];
                }
            }

            double noiseGradient = sqrSigmaNoise * Enumerable.Range(0, n).Select(i => lCopy[i, i]).Sum();

            double[] meanGradients = new double[meanFunction.GetNumberOfParameters(nAllowedVariables)];
            for (int k = 0; k < meanGradients.Length; k++)
            {
                var meanGrad = Enumerable.Range(0, alpha.Length)
                               .Select(r => mean.Gradient(x, r, k));
                meanGradients[k] = -Util.ScalarProd(meanGrad, alpha);
            }

            double[] covGradients = new double[covarianceFunction.GetNumberOfParameters(nAllowedVariables)];
            if (covGradients.Length > 0)
            {
                for (int i = 0; i < n; i++)
                {
                    for (int j = 0; j < i; j++)
                    {
                        var g = cov.CovarianceGradient(x, i, j).ToArray();
                        for (int k = 0; k < covGradients.Length; k++)
                        {
                            covGradients[k] += lCopy[i, j] * g[k];
                        }
                    }

                    var gDiag = cov.CovarianceGradient(x, i, i).ToArray();
                    for (int k = 0; k < covGradients.Length; k++)
                    {
                        // diag
                        covGradients[k] += 0.5 * lCopy[i, i] * gDiag[k];
                    }
                }
            }

            hyperparameterGradients =
                meanGradients
                .Concat(covGradients)
                .Concat(new double[] { noiseGradient }).ToArray();
        }
 private static double[,] GetData(IDataset ds, IEnumerable<string> allowedInputs, IEnumerable<int> rows, Scaling scaling) {
   if (scaling != null) {
     return AlglibUtil.PrepareAndScaleInputMatrix(ds, allowedInputs, rows, scaling);
   } else {
     return AlglibUtil.PrepareInputMatrix(ds, allowedInputs, rows);
   }
 }
    private void CalculateModel(IDataset ds, IEnumerable<int> rows, bool scaleInputs = true) {
      this.trainingDataset = (IDataset)ds.Clone();
      this.trainingRows = rows.ToArray();
      this.inputScaling = scaleInputs ? new Scaling(ds, allowedInputVariables, rows) : null;

      x = GetData(ds, this.allowedInputVariables, this.trainingRows, this.inputScaling);

      IEnumerable<double> y;
      y = ds.GetDoubleValues(TargetVariable, rows);

      int n = x.GetLength(0);

      var columns = Enumerable.Range(0, x.GetLength(1)).ToArray();
      // calculate cholesky decomposed (lower triangular) covariance matrix
      var cov = covarianceFunction.GetParameterizedCovarianceFunction(covarianceParameter, columns);
      this.l = CalculateL(x, cov, sqrSigmaNoise);

      // calculate mean
      var mean = meanFunction.GetParameterizedMeanFunction(meanParameter, columns);
      double[] m = Enumerable.Range(0, x.GetLength(0))
        .Select(r => mean.Mean(x, r))
        .ToArray();

      // calculate sum of diagonal elements for likelihood
      double diagSum = Enumerable.Range(0, n).Select(i => Math.Log(l[i, i])).Sum();

      // solve for alpha
      double[] ym = y.Zip(m, (a, b) => a - b).ToArray();

      int info;
      alglib.densesolverreport denseSolveRep;

      alglib.spdmatrixcholeskysolve(l, n, false, ym, out info, out denseSolveRep, out alpha);
      for (int i = 0; i < alpha.Length; i++)
        alpha[i] = alpha[i] / sqrSigmaNoise;
      negativeLogLikelihood = 0.5 * Util.ScalarProd(ym, alpha) + diagSum + (n / 2.0) * Math.Log(2.0 * Math.PI * sqrSigmaNoise);

      // derivatives
      int nAllowedVariables = x.GetLength(1);

      alglib.matinvreport matInvRep;
      double[,] lCopy = new double[l.GetLength(0), l.GetLength(1)];
      Array.Copy(l, lCopy, lCopy.Length);

      alglib.spdmatrixcholeskyinverse(ref lCopy, n, false, out info, out matInvRep);
      if (info != 1) throw new ArgumentException("Can't invert matrix to calculate gradients.");
      for (int i = 0; i < n; i++) {
        for (int j = 0; j <= i; j++)
          lCopy[i, j] = lCopy[i, j] / sqrSigmaNoise - alpha[i] * alpha[j];
      }

      double noiseGradient = sqrSigmaNoise * Enumerable.Range(0, n).Select(i => lCopy[i, i]).Sum();

      double[] meanGradients = new double[meanFunction.GetNumberOfParameters(nAllowedVariables)];
      for (int k = 0; k < meanGradients.Length; k++) {
        var meanGrad = new double[alpha.Length];
        for (int g = 0; g < meanGrad.Length; g++)
          meanGrad[g] = mean.Gradient(x, g, k);
        meanGradients[k] = -Util.ScalarProd(meanGrad, alpha);
      }

      double[] covGradients = new double[covarianceFunction.GetNumberOfParameters(nAllowedVariables)];
      if (covGradients.Length > 0) {
        for (int i = 0; i < n; i++) {
          for (int j = 0; j < i; j++) {
            var g = cov.CovarianceGradient(x, i, j);
            for (int k = 0; k < covGradients.Length; k++) {
              covGradients[k] += lCopy[i, j] * g[k];
            }
          }

          var gDiag = cov.CovarianceGradient(x, i, i);
          for (int k = 0; k < covGradients.Length; k++) {
            // diag
            covGradients[k] += 0.5 * lCopy[i, i] * gDiag[k];
          }
        }
      }

      hyperparameterGradients =
        meanGradients
        .Concat(covGradients)
        .Concat(new double[] { noiseGradient }).ToArray();

    }
    private GaussianProcessModel(GaussianProcessModel original, Cloner cloner)
      : base(original, cloner) {
      this.meanFunction = cloner.Clone(original.meanFunction);
      this.covarianceFunction = cloner.Clone(original.covarianceFunction);
      if (original.inputScaling != null)
        this.inputScaling = cloner.Clone(original.inputScaling);
      this.trainingDataset = cloner.Clone(original.trainingDataset);
      this.negativeLogLikelihood = original.negativeLogLikelihood;
      this.sqrSigmaNoise = original.sqrSigmaNoise;
      if (original.meanParameter != null) {
        this.meanParameter = (double[])original.meanParameter.Clone();
      }
      if (original.covarianceParameter != null) {
        this.covarianceParameter = (double[])original.covarianceParameter.Clone();
      }

      // shallow copies of arrays because they cannot be modified
      this.trainingRows = original.trainingRows;
      this.allowedInputVariables = original.allowedInputVariables;
      this.alpha = original.alpha;
      this.l = original.l;
      this.x = original.x;
    }
Exemple #9
0
        private static double[,] GetData(IDataset ds, IEnumerable <string> allowedInputs, IEnumerable <int> rows, Scaling scaling)
        {
            if (scaling != null)
            {
                // BackwardsCompatibility3.3
                #region Backwards compatible code, remove with 3.4
                // TODO: completely remove Scaling class
                List <string> variablesList = allowedInputs.ToList();
                List <int>    rowsList      = rows.ToList();

                double[,] matrix = new double[rowsList.Count, variablesList.Count];

                int col = 0;
                foreach (string column in variablesList)
                {
                    var values = scaling.GetScaledValues(ds, column, rowsList);
                    int row    = 0;
                    foreach (var value in values)
                    {
                        matrix[row, col] = value;
                        row++;
                    }
                    col++;
                }
                return(matrix);

                #endregion
            }
            else
            {
                return(ds.ToArray(allowedInputs, rows));
            }
        }
Exemple #10
0
        public static double[,] PrepareAndScaleInputMatrix(IDataset dataset, IEnumerable <string> variables, IEnumerable <int> rows, Scaling scaling)
        {
            List <string> variablesList = variables.ToList();
            List <int>    rowsList      = rows.ToList();

            double[,] matrix = new double[rowsList.Count, variablesList.Count];

            int col = 0;

            foreach (string column in variables)
            {
                var values = scaling.GetScaledValues(dataset, column, rows);
                int row    = 0;
                foreach (var value in values)
                {
                    matrix[row, col] = value;
                    row++;
                }
                col++;
            }

            return(matrix);
        }
Exemple #11
0
 protected Scaling(Scaling original, Cloner cloner)
   : base(original, cloner) {
   foreach (var pair in original.scalingParameters)
     scalingParameters.Add(pair.Key, Tuple.Create(pair.Value.Item1, pair.Value.Item2));
 }