private GaussianProcessModel(GaussianProcessModel original, Cloner cloner)
            : base(original, cloner)
        {
            this.meanFunction       = cloner.Clone(original.meanFunction);
            this.covarianceFunction = cloner.Clone(original.covarianceFunction);
            if (original.inputScaling != null)
            {
                this.inputScaling = cloner.Clone(original.inputScaling);
            }
            this.trainingDataset       = cloner.Clone(original.trainingDataset);
            this.negativeLogLikelihood = original.negativeLogLikelihood;
            this.targetVariable        = original.targetVariable;
            this.sqrSigmaNoise         = original.sqrSigmaNoise;
            if (original.meanParameter != null)
            {
                this.meanParameter = (double[])original.meanParameter.Clone();
            }
            if (original.covarianceParameter != null)
            {
                this.covarianceParameter = (double[])original.covarianceParameter.Clone();
            }

            // shallow copies of arrays because they cannot be modified
            this.trainingRows          = original.trainingRows;
            this.allowedInputVariables = original.allowedInputVariables;
            this.alpha = original.alpha;
            this.l     = original.l;
            this.x     = original.x;
        }
        private void ObjectiveFunction(double[] x, ref double func, double[] grad, object obj)
        {
            // we want to optimize the model likelihood by changing the hyperparameters and also return the gradient for each hyperparameter
            var data                  = (Tuple <IDataset, string, string[], int[], IMeanFunction, ICovarianceFunction, double[]>)obj;
            var ds                    = data.Item1;
            var targetVariable        = data.Item2;
            var allowedInputVariables = data.Item3;
            var trainingRows          = data.Item4;
            var meanFunction          = data.Item5;
            var covarianceFunction    = data.Item6;
            var bestObjValue          = data.Item7;
            var hyperParameters       = x; // the decision variable vector

            try {
                var model = new GaussianProcessModel(ds, targetVariable, allowedInputVariables, trainingRows, hyperParameters, meanFunction, covarianceFunction);

                func            = model.NegativeLogLikelihood;      // mincgoptimize, so we return negative likelihood
                bestObjValue[0] = Math.Max(bestObjValue[0], -func); // problem itself is a maximization problem
                var gradients = model.HyperparameterGradients;
                Array.Copy(gradients, grad, gradients.Length);
            }
            catch (ArgumentException) {
                // building the GaussianProcessModel might fail, in this case we return the worst possible objective value
                func = 1.0E+300;
                Array.Clear(grad, 0, grad.Length);
            }
        }
        private IItem CreateSolution()
        {
            var problemData           = ProblemData;
            var ds                    = problemData.Dataset;
            var targetVariable        = problemData.TargetVariable;
            var allowedInputVariables = problemData.AllowedInputVariables.ToArray();
            var trainingRows          = problemData.TrainingIndices.ToArray();

            lock (problemStateLocker) {
                var model = new GaussianProcessModel(ds, targetVariable, allowedInputVariables, trainingRows, bestHyperParameters, (IMeanFunction)meanFunc.Clone(), (ICovarianceFunction)covFunc.Clone());
                model.FixParameters();
                return(model.CreateRegressionSolution((IRegressionProblemData)ProblemData.Clone()));
            }
        }
    private GaussianProcessModel(GaussianProcessModel original, Cloner cloner)
      : base(original, cloner) {
      this.meanFunction = cloner.Clone(original.meanFunction);
      this.covarianceFunction = cloner.Clone(original.covarianceFunction);
      if (original.inputScaling != null)
        this.inputScaling = cloner.Clone(original.inputScaling);
      this.trainingDataset = cloner.Clone(original.trainingDataset);
      this.negativeLogLikelihood = original.negativeLogLikelihood;
      this.sqrSigmaNoise = original.sqrSigmaNoise;
      if (original.meanParameter != null) {
        this.meanParameter = (double[])original.meanParameter.Clone();
      }
      if (original.covarianceParameter != null) {
        this.covarianceParameter = (double[])original.covarianceParameter.Clone();
      }

      // shallow copies of arrays because they cannot be modified
      this.trainingRows = original.trainingRows;
      this.allowedInputVariables = original.allowedInputVariables;
      this.alpha = original.alpha;
      this.l = original.l;
      this.x = original.x;
    }