protected OnlineMeanSquaredErrorCalculator(OnlineMeanSquaredErrorCalculator original, Cloner cloner)
     : base(original, cloner)
 {
     sse        = original.sse;
     n          = original.n;
     errorState = original.errorState;
 }
Esempio n. 2
0
        public static double Calculate(IEnumerable <double> originalValues, IEnumerable <double> estimatedValues, out OnlineCalculatorError errorState)
        {
            IEnumerator <double>             originalEnumerator  = originalValues.GetEnumerator();
            IEnumerator <double>             estimatedEnumerator = estimatedValues.GetEnumerator();
            OnlineMeanSquaredErrorCalculator mseCalculator       = new OnlineMeanSquaredErrorCalculator();

            // always move forward both enumerators (do not use short-circuit evaluation!)
            while (originalEnumerator.MoveNext() & estimatedEnumerator.MoveNext())
            {
                double original  = originalEnumerator.Current;
                double estimated = estimatedEnumerator.Current;
                mseCalculator.Add(original, estimated);
                if (mseCalculator.ErrorState != OnlineCalculatorError.None)
                {
                    break;
                }
            }

            // check if both enumerators are at the end to make sure both enumerations have the same length
            if (mseCalculator.ErrorState == OnlineCalculatorError.None &&
                (estimatedEnumerator.MoveNext() || originalEnumerator.MoveNext()))
            {
                throw new ArgumentException("Number of elements in originalValues and estimatedValues enumerations doesn't match.");
            }
            else
            {
                errorState = mseCalculator.ErrorState;
                return(mseCalculator.MeanSquaredError);
            }
        }
Esempio n. 3
0
        protected void CalculateRegressionResults()
        {
            IEnumerable <double> estimatedTrainingValues = EstimatedTrainingValues; // cache values
            IEnumerable <double> originalTrainingValues  = ProblemData.Dataset.GetDoubleValues(ProblemData.TargetVariable, ProblemData.TrainingIndices);
            IEnumerable <double> estimatedTestValues     = EstimatedTestValues;     // cache values
            IEnumerable <double> originalTestValues      = ProblemData.Dataset.GetDoubleValues(ProblemData.TargetVariable, ProblemData.TestIndices);

            OnlineCalculatorError errorState;
            double trainingMSE = OnlineMeanSquaredErrorCalculator.Calculate(originalTrainingValues, estimatedTrainingValues, out errorState);

            TrainingMeanSquaredError = errorState == OnlineCalculatorError.None ? trainingMSE : double.NaN;
            double testMSE = OnlineMeanSquaredErrorCalculator.Calculate(originalTestValues, estimatedTestValues, out errorState);

            TestMeanSquaredError = errorState == OnlineCalculatorError.None ? testMSE : double.NaN;

            double trainingMAE = OnlineMeanAbsoluteErrorCalculator.Calculate(originalTrainingValues, estimatedTrainingValues, out errorState);

            TrainingMeanAbsoluteError = errorState == OnlineCalculatorError.None ? trainingMAE : double.NaN;
            double testMAE = OnlineMeanAbsoluteErrorCalculator.Calculate(originalTestValues, estimatedTestValues, out errorState);

            TestMeanAbsoluteError = errorState == OnlineCalculatorError.None ? testMAE : double.NaN;

            double trainingR = OnlinePearsonsRCalculator.Calculate(originalTrainingValues, estimatedTrainingValues, out errorState);

            TrainingRSquared = errorState == OnlineCalculatorError.None ? trainingR * trainingR : double.NaN;
            double testR = OnlinePearsonsRCalculator.Calculate(originalTestValues, estimatedTestValues, out errorState);

            TestRSquared = errorState == OnlineCalculatorError.None ? testR * testR : double.NaN;

            double trainingRelError = OnlineMeanAbsolutePercentageErrorCalculator.Calculate(originalTrainingValues, estimatedTrainingValues, out errorState);

            TrainingRelativeError = errorState == OnlineCalculatorError.None ? trainingRelError : double.NaN;
            double testRelError = OnlineMeanAbsolutePercentageErrorCalculator.Calculate(originalTestValues, estimatedTestValues, out errorState);

            TestRelativeError = errorState == OnlineCalculatorError.None ? testRelError : double.NaN;

            double trainingNMSE = OnlineNormalizedMeanSquaredErrorCalculator.Calculate(originalTrainingValues, estimatedTrainingValues, out errorState);

            TrainingNormalizedMeanSquaredError = errorState == OnlineCalculatorError.None ? trainingNMSE : double.NaN;
            double testNMSE = OnlineNormalizedMeanSquaredErrorCalculator.Calculate(originalTestValues, estimatedTestValues, out errorState);

            TestNormalizedMeanSquaredError = errorState == OnlineCalculatorError.None ? testNMSE : double.NaN;

            TrainingRootMeanSquaredError = Math.Sqrt(TrainingMeanSquaredError);
            TestRootMeanSquaredError     = Math.Sqrt(TestMeanSquaredError);

            // BackwardsCompatibility3.3
            #region Backwards compatible code, remove with 3.5
            if (ContainsKey(TrainingMeanErrorResultName))
            {
                double trainingME = OnlineMeanErrorCalculator.Calculate(originalTrainingValues, estimatedTrainingValues, out errorState);
                TrainingMeanError = errorState == OnlineCalculatorError.None ? trainingME : double.NaN;
            }
            if (ContainsKey(TestMeanErrorResultName))
            {
                double testME = OnlineMeanErrorCalculator.Calculate(originalTestValues, estimatedTestValues, out errorState);
                TestMeanError = errorState == OnlineCalculatorError.None ? testME : double.NaN;
            }
            #endregion
        }
        private void CalculateTrainingPrognosisResults()
        {
            OnlineCalculatorError errorState;
            var problemData = Solution.ProblemData;

            if (!problemData.TrainingIndices.Any())
            {
                return;
            }
            var model = Solution.Model;
            //mean model
            double trainingMean = problemData.Dataset.GetDoubleValues(problemData.TargetVariable, problemData.TrainingIndices).Average();
            var    meanModel    = new ConstantModel(trainingMean);

            //AR1 model
            double alpha, beta;
            IEnumerable <double> trainingStartValues = problemData.Dataset.GetDoubleValues(problemData.TargetVariable, problemData.TrainingIndices.Select(r => r - 1).Where(r => r > 0)).ToList();

            OnlineLinearScalingParameterCalculator.Calculate(problemData.Dataset.GetDoubleValues(problemData.TargetVariable, problemData.TrainingIndices.Where(x => x > 0)), trainingStartValues, out alpha, out beta, out errorState);
            var AR1model = new TimeSeriesPrognosisAutoRegressiveModel(problemData.TargetVariable, new double[] { beta }, alpha);

            var trainingHorizions = problemData.TrainingIndices.Select(r => Math.Min(trainingHorizon, problemData.TrainingPartition.End - r)).ToList();
            IEnumerable <IEnumerable <double> > trainingTargetValues         = problemData.TrainingIndices.Zip(trainingHorizions, Enumerable.Range).Select(r => problemData.Dataset.GetDoubleValues(problemData.TargetVariable, r)).ToList();
            IEnumerable <IEnumerable <double> > trainingEstimatedValues      = model.GetPrognosedValues(problemData.Dataset, problemData.TrainingIndices, trainingHorizions).ToList();
            IEnumerable <IEnumerable <double> > trainingMeanModelPredictions = meanModel.GetPrognosedValues(problemData.Dataset, problemData.TrainingIndices, trainingHorizions).ToList();
            IEnumerable <IEnumerable <double> > trainingAR1ModelPredictions  = AR1model.GetPrognosedValues(problemData.Dataset, problemData.TrainingIndices, trainingHorizions).ToList();

            IEnumerable <double> originalTrainingValues  = trainingTargetValues.SelectMany(x => x).ToList();
            IEnumerable <double> estimatedTrainingValues = trainingEstimatedValues.SelectMany(x => x).ToList();

            double trainingMSE = OnlineMeanSquaredErrorCalculator.Calculate(originalTrainingValues, estimatedTrainingValues, out errorState);

            PrognosisTrainingMeanSquaredError = errorState == OnlineCalculatorError.None ? trainingMSE : double.NaN;
            double trainingMAE = OnlineMeanAbsoluteErrorCalculator.Calculate(originalTrainingValues, estimatedTrainingValues, out errorState);

            PrognosisTrainingMeanAbsoluteError = errorState == OnlineCalculatorError.None ? trainingMAE : double.NaN;
            double trainingR = OnlinePearsonsRCalculator.Calculate(originalTrainingValues, estimatedTrainingValues, out errorState);

            PrognosisTrainingRSquared = errorState == OnlineCalculatorError.None ? trainingR * trainingR : double.NaN;
            double trainingRelError = OnlineMeanAbsolutePercentageErrorCalculator.Calculate(originalTrainingValues, estimatedTrainingValues, out errorState);

            PrognosisTrainingRelativeError = errorState == OnlineCalculatorError.None ? trainingRelError : double.NaN;
            double trainingNMSE = OnlineNormalizedMeanSquaredErrorCalculator.Calculate(originalTrainingValues, estimatedTrainingValues, out errorState);

            PrognosisTrainingNormalizedMeanSquaredError = errorState == OnlineCalculatorError.None ? trainingNMSE : double.NaN;
            double trainingME = OnlineMeanErrorCalculator.Calculate(originalTrainingValues, estimatedTrainingValues, out errorState);

            PrognosisTrainingMeanError = errorState == OnlineCalculatorError.None ? trainingME : double.NaN;

            PrognosisTrainingDirectionalSymmetry         = OnlineDirectionalSymmetryCalculator.Calculate(trainingStartValues, trainingTargetValues, trainingEstimatedValues, out errorState);
            PrognosisTrainingDirectionalSymmetry         = errorState == OnlineCalculatorError.None ? PrognosisTrainingDirectionalSymmetry : 0.0;
            PrognosisTrainingWeightedDirectionalSymmetry = OnlineWeightedDirectionalSymmetryCalculator.Calculate(trainingStartValues, trainingTargetValues, trainingEstimatedValues, out errorState);
            PrognosisTrainingWeightedDirectionalSymmetry = errorState == OnlineCalculatorError.None ? PrognosisTrainingWeightedDirectionalSymmetry : 0.0;
            PrognosisTrainingTheilsUStatisticAR1         = OnlineTheilsUStatisticCalculator.Calculate(trainingStartValues, trainingTargetValues, trainingAR1ModelPredictions, trainingEstimatedValues, out errorState);
            PrognosisTrainingTheilsUStatisticAR1         = errorState == OnlineCalculatorError.None ? PrognosisTrainingTheilsUStatisticAR1 : double.PositiveInfinity;
            PrognosisTrainingTheilsUStatisticMean        = OnlineTheilsUStatisticCalculator.Calculate(trainingStartValues, trainingTargetValues, trainingMeanModelPredictions, trainingEstimatedValues, out errorState);
            PrognosisTrainingTheilsUStatisticMean        = errorState == OnlineCalculatorError.None ? PrognosisTrainingTheilsUStatisticMean : double.PositiveInfinity;
        }
Esempio n. 5
0
        private void AfterDeserialization()
        {
            if (string.IsNullOrEmpty(Model.TargetVariable))
            {
                Model.TargetVariable = this.ProblemData.TargetVariable;
            }

            // BackwardsCompatibility3.4
            #region Backwards compatible code, remove with 3.5
            if (!ContainsKey(TrainingMeanAbsoluteErrorResultName))
            {
                OnlineCalculatorError errorState;
                Add(new Result(TrainingMeanAbsoluteErrorResultName, "Mean of absolute errors of the model on the training partition", new DoubleValue()));
                double trainingMAE = OnlineMeanAbsoluteErrorCalculator.Calculate(EstimatedTrainingValues, ProblemData.Dataset.GetDoubleValues(ProblemData.TargetVariable, ProblemData.TrainingIndices), out errorState);
                TrainingMeanAbsoluteError = errorState == OnlineCalculatorError.None ? trainingMAE : double.NaN;
            }

            if (!ContainsKey(TestMeanAbsoluteErrorResultName))
            {
                OnlineCalculatorError errorState;
                Add(new Result(TestMeanAbsoluteErrorResultName, "Mean of absolute errors of the model on the test partition", new DoubleValue()));
                double testMAE = OnlineMeanAbsoluteErrorCalculator.Calculate(EstimatedTestValues, ProblemData.Dataset.GetDoubleValues(ProblemData.TargetVariable, ProblemData.TestIndices), out errorState);
                TestMeanAbsoluteError = errorState == OnlineCalculatorError.None ? testMAE : double.NaN;
            }

            if (!ContainsKey(TrainingRootMeanSquaredErrorResultName))
            {
                OnlineCalculatorError errorState;
                Add(new Result(TrainingRootMeanSquaredErrorResultName, TrainingRootMeanSquaredErrorResultDescription, new DoubleValue()));
                double trainingMSE = OnlineMeanSquaredErrorCalculator.Calculate(EstimatedTrainingValues, ProblemData.Dataset.GetDoubleValues(ProblemData.TargetVariable, ProblemData.TrainingIndices), out errorState);
                TrainingRootMeanSquaredError = errorState == OnlineCalculatorError.None ? Math.Sqrt(trainingMSE) : double.NaN;
            }

            if (!ContainsKey(TestRootMeanSquaredErrorResultName))
            {
                OnlineCalculatorError errorState;
                Add(new Result(TestRootMeanSquaredErrorResultName, TestRootMeanSquaredErrorResultDescription, new DoubleValue()));
                double testMSE = OnlineMeanSquaredErrorCalculator.Calculate(EstimatedTestValues, ProblemData.Dataset.GetDoubleValues(ProblemData.TargetVariable, ProblemData.TestIndices), out errorState);
                TestRootMeanSquaredError = errorState == OnlineCalculatorError.None ? Math.Sqrt(testMSE) : double.NaN;
            }
            #endregion
        }
    public static double Calculate(IEnumerable<double> originalValues, IEnumerable<double> estimatedValues, out OnlineCalculatorError errorState) {
      IEnumerator<double> originalEnumerator = originalValues.GetEnumerator();
      IEnumerator<double> estimatedEnumerator = estimatedValues.GetEnumerator();
      OnlineMeanSquaredErrorCalculator mseCalculator = new OnlineMeanSquaredErrorCalculator();

      // always move forward both enumerators (do not use short-circuit evaluation!)
      while (originalEnumerator.MoveNext() & estimatedEnumerator.MoveNext()) {
        double original = originalEnumerator.Current;
        double estimated = estimatedEnumerator.Current;
        mseCalculator.Add(original, estimated);
        if (mseCalculator.ErrorState != OnlineCalculatorError.None) break;
      }

      // check if both enumerators are at the end to make sure both enumerations have the same length
      if (mseCalculator.ErrorState == OnlineCalculatorError.None &&
         (estimatedEnumerator.MoveNext() || originalEnumerator.MoveNext())) {
        throw new ArgumentException("Number of elements in originalValues and estimatedValues enumerations doesn't match.");
      } else {
        errorState = mseCalculator.ErrorState;
        return mseCalculator.MeanSquaredError;
      }
    }
Esempio n. 7
0
        protected void CalculateRegressionResults()
        {
            double[] estimatedTrainingValues = EstimatedTrainingValues.ToArray(); // cache values
            double[] originalTrainingValues  = ProblemData.Dataset.GetDoubleValues(ProblemData.TargetVariable, ProblemData.TrainingIndices).ToArray();
            double[] estimatedTestValues     = EstimatedTestValues.ToArray();     // cache values
            double[] originalTestValues      = ProblemData.Dataset.GetDoubleValues(ProblemData.TargetVariable, ProblemData.TestIndices).ToArray();

            OnlineCalculatorError errorState;
            double trainingMSE = OnlineMeanSquaredErrorCalculator.Calculate(originalTrainingValues, estimatedTrainingValues, out errorState);

            TrainingMeanSquaredError = errorState == OnlineCalculatorError.None ? trainingMSE : double.NaN;
            double testMSE = OnlineMeanSquaredErrorCalculator.Calculate(originalTestValues, estimatedTestValues, out errorState);

            TestMeanSquaredError = errorState == OnlineCalculatorError.None ? testMSE : double.NaN;

            double trainingR = OnlinePearsonsRCalculator.Calculate(originalTrainingValues, estimatedTrainingValues, out errorState);

            TrainingRSquared = errorState == OnlineCalculatorError.None ? trainingR * trainingR : double.NaN;
            double testR = OnlinePearsonsRCalculator.Calculate(originalTestValues, estimatedTestValues, out errorState);

            TestRSquared = errorState == OnlineCalculatorError.None ? testR * testR : double.NaN;

            double trainingNormalizedGini = NormalizedGiniCalculator.Calculate(originalTrainingValues, estimatedTrainingValues, out errorState);

            if (errorState != OnlineCalculatorError.None)
            {
                trainingNormalizedGini = double.NaN;
            }
            double testNormalizedGini = NormalizedGiniCalculator.Calculate(originalTestValues, estimatedTestValues, out errorState);

            if (errorState != OnlineCalculatorError.None)
            {
                testNormalizedGini = double.NaN;
            }

            TrainingNormalizedGiniCoefficient = trainingNormalizedGini;
            TestNormalizedGiniCoefficient     = testNormalizedGini;
        }