예제 #1
0
            ///<summary>
            /// VIDYA
            ///</summary>
            ///<param name="pNav">Navigator</param>
            ///<param name="pSource">Field Source</param>
            ///<param name="periods">Periods</param>
            ///<param name="R2Scale">R2 Scale</param>
            ///<param name="Alias">Alias</param>
            ///<returns>Recordset</returns>
            public Recordset VIDYA(Navigator pNav, Field pSource, int periods, double R2Scale, string Alias)
            {
                int Record;
                LinearRegression LR = new LinearRegression();

                int RecordCount = pNav.RecordCount;

                Field Field1 = new Field(RecordCount, Alias);

                Recordset Results = LR.Regression(pNav, pSource, periods);

                const int Start = 2;

                pNav.Position = Start;
                for (Record = Start; Record < RecordCount + 1; Record++)
                {
                    pNav.MovePrevious();
                    double PreviousValue = pSource.ValueEx(pNav.Position);
                    pNav.MoveNext();
                    double R2Scaled = Results.ValueEx("RSquared", pNav.Position) * R2Scale;
                    Field1.Value(pNav.Position, R2Scaled *
                                 pSource.Value(pNav.Position) + (1 - R2Scaled) * PreviousValue);
                    pNav.MoveNext();
                }//Record

                Results.AddField(Field1);
                return(Results);
            }
예제 #2
0
파일: Program.cs 프로젝트: 9nick9/MRPCode
        static void RegressAtLag(CompanyObservations compObs, int lag, SqlCommand cmd, CompanyObservations.RegressionType type)
        {
            DoubleVector prices       = compObs.GetPrices(type, compObs.Length - lag, 1);
            DoubleVector laggedPrices = compObs.GetPrices(type, compObs.Length - lag, lag);

            //Regress price & priceLag
            LinearRegression      priceRegression = new LinearRegression(new DoubleMatrix(laggedPrices), prices);
            LinearRegressionAnova priceAnova      = new LinearRegressionAnova(priceRegression);

            DoubleMatrix companyGen = compObs.GetCompanyGenerated(type, lag);

            //Regress Price and compLag
            LinearRegression      companyRegression = new LinearRegression(companyGen, prices);
            LinearRegressionAnova companyAnova      = new LinearRegressionAnova(companyRegression);

            DoubleMatrix customerGen = compObs.GetCustomerGenerated(type, lag);

            //Regress Price and cust lag
            LinearRegression      customerRegression = new LinearRegression(customerGen, prices);
            LinearRegressionAnova customerAnova      = new LinearRegressionAnova(customerRegression);

            double priceExplain    = Double.IsInfinity(priceAnova.AdjustedRsquared) ? 0 : priceAnova.AdjustedRsquared;
            double companyExplain  = Double.IsInfinity(companyAnova.AdjustedRsquared) ? 0 : companyAnova.AdjustedRsquared;
            double customerExplain = Double.IsInfinity(customerAnova.AdjustedRsquared) ? 0 : customerAnova.AdjustedRsquared;

            cmd.CommandText = $"Insert INTO RegressionResults VALUES ({compObs.CompanyID}, {lag}, {priceExplain}, {customerExplain}, {companyExplain}, {(int) type})";
            cmd.ExecuteNonQuery();
        }
예제 #3
0
        public static IInterpolator1D RegressNotContinuous(double[] Xs, double[] Ys, int nSegments)
        {
            var nSamples = Xs.Length;
            //var x = new double[nSegments + 1];
            //var y = new double[nSegments + 1];
            var samplesPerSegment = nSamples / nSegments;
            var interps           = new IInterpolator1D[nSegments];
            var uBounds           = new double[nSegments];

            for (var i = 0; i < nSegments; i++)
            {
                var sampleXs = Xs.Skip(i * samplesPerSegment).Take(samplesPerSegment).ToArray();
                var sampleYs = Ys.Skip(i * samplesPerSegment).Take(samplesPerSegment).ToArray();
                var lr       = LinearRegression.LinearRegressionVector(sampleXs, sampleYs);
                var xLo      = sampleXs.First();
                var xHi      = sampleXs.Last();
                var yLo      = lr.Alpha + lr.Beta * xLo;
                var yHi      = lr.Alpha + lr.Beta * xHi;

                interps[i] = InterpolatorFactory.GetInterpolator(new[] { xLo, xHi }, new[] { yLo, yHi }, Interpolator1DType.Linear);
                uBounds[i] = xHi;
            }
            uBounds[uBounds.Length - 1] = double.MaxValue;
            return(new NonContinuousInterpolator(uBounds, interps));
        }
        public override void Init(IRepository repository, IEnumerable<string> releases)
        {
            base.Init(repository, releases);

            double dd = repository.SelectionDSL()
                .Commits().TillRevision(PredictionRelease)
                .Modifications().InCommits()
                .CodeBlocks().InModifications().CalculateDefectDensity(PredictionRelease);

            context.SetCommits(null, PredictionRelease);

            regression = new LinearRegression();
            foreach (var file in GetFilesInRevision(PredictionRelease))
            {
                double ddForFile = repository.SelectionDSL()
                    .Commits().TillRevision(PredictionRelease)
                    .Files().IdIs(file.ID)
                    .Modifications().InCommits().InFiles()
                    .CodeBlocks().InModifications().CalculateDefectDensity(PredictionRelease);

                if (ddForFile >= dd)
                {
                    context.SetFiles(e => e.IdIs(file.ID));

                    regression.AddTrainingData(
                        GetPredictorValuesFor(context)[0],
                        NumberOfFixedDefectsForFile(file.ID)
                    );
                }
            }

            regression.Train();
        }
예제 #5
0
        public void Calculate()
        {
            double summary = 0;

            foreach (var pair in pairs)
            {
                var regression = new LinearRegression();
                regression.Compute(pair.DeltaValues.ToDecimal(), synthIndex.ToDecimal());

                pair.Weight = 1 / (1 + Math.Abs(regression.Beta.ToDouble()));

                summary += pair.Weight;
            }

            foreach (var pair in pairs)
            {
                pair.Weight      = pair.Weight / summary;
                pair.TradeVolume = Balance * pair.Weight;
            }

            foreach (var pair in pairs)
            {
                double beta   = pair.Regression.Beta.ToDouble();
                double weight = 1.0 / (1.0 + Math.Abs(beta));

                pair.X.TradeVolume = pair.TradeVolume * (weight * Math.Abs(beta));
                pair.Y.TradeVolume = pair.TradeVolume * weight;
            }
        }
예제 #6
0
 public void CheckRegression(LinearRegression lr)
 {
     Assert.AreEqual(86.7434, lr.Alpha.ToDouble(), 0.001);
     Assert.AreEqual(0.0189, lr.Beta.ToDouble(), 0.001);
     Assert.AreEqual(0.4164, lr.RValue.ToDouble(), 0.001);
     Assert.AreEqual(0.1734, lr.RSquared.ToDouble(), 0.001);
 }
예제 #7
0
        /// <summary>
        /// Calibrates an input image using VizieR stars catalogs.
        /// </summary>
        /// <param name="VizieRStars">List of VizieR stars.</param>
        /// <param name="DetectedStars">List of locally detected stars.</param>
        /// <param name="PositionError">Maximum position error of stars. Value in arcseconds.</param>
        /// <returns>The Zero Point magnitude.</returns>
        public static double Calibrate(List <StarInfo> VizieRStars, List <Star> DetectedStars, double PositionError)
        {
            double T = double.MaxValue, B = double.MinValue, L = double.MaxValue, R = double.MinValue;
            List <Tuple <Star, StarInfo> > Pairs = new List <Tuple <Star, StarInfo> >();

            foreach (Star s in DetectedStars)
            {
                if (s.EqCenter.Dec < T)
                {
                    T = s.EqCenter.Dec;
                }
                if (s.EqCenter.Dec > B)
                {
                    B = s.EqCenter.Dec;
                }
                if (s.EqCenter.RA > R)
                {
                    R = s.EqCenter.RA;
                }
                if (s.EqCenter.RA < L)
                {
                    L = s.EqCenter.RA;
                }
            }
            QuadTree <Star> Tree = new QuadTree <Star>(10, T, B, L, R);

            foreach (Star s in DetectedStars)
            {
                Tree.Add(s, s.EqCenter.RA, s.EqCenter.Dec);
            }

            foreach (StarInfo si in VizieRStars)
            {
                var Stars = Tree.Query(si.Coordinate.RA, si.Coordinate.Dec, Arc1Sec * PositionError);
                var St2   = Tree.Query(si.Coordinate.RA, si.Coordinate.Dec, Arc1Sec * PositionError * 5);
                if (St2.Count == 1 & Stars.Count == 1)
                {
                    Pairs.Add(new Tuple <Star, StarInfo>(Stars[0], si));
                }
            }

            if (Pairs.Count < 5)
            {
                throw new IndexOutOfRangeException("Could not find enough pairs for calibration.");
            }

            var Rpairs1 = Pairs.ToArray();
            var ZPSet   = Pairs.Select((x) => x.Item2.Magnitude + 2.5 * Math.Log10(x.Item1.Flux)).ToArray();

            Array.Sort(ZPSet, Rpairs1);
            var Rpairs2 = Rpairs1.Skip(Rpairs1.Length / 4).Take(Rpairs1.Length / 2).ToList();

            LinearRegression.LinearRegressionParameters LRP = LinearRegression.ComputeLinearRegression(Rpairs2.Select((x) => Math.Log10(x.Item1.Flux)).ToArray(), Rpairs2.Select((x) => x.Item2.Magnitude).ToArray());
            if (LRP.PearsonR * LRP.PearsonR < CalibMinR * CalibMinR)
            {
                throw new ArgumentOutOfRangeException("Could not calibrate the fluxes with enough accuracy.");
            }

            return(ZPSet[ZPSet.Length / 2]);
        }
예제 #8
0
        private IRegressionSolution CreateLinearRegressionSolution()
        {
            if (Content == null)
            {
                throw new InvalidOperationException();
            }
            double rmse, cvRmsError;
            var    problemData = (IRegressionProblemData)ProblemData.Clone();

            if (!problemData.TrainingIndices.Any())
            {
                return(null);                              // don't create an LR model if the problem does not have a training set (e.g. loaded into an existing model)
            }
            //clear checked inputVariables
            foreach (var inputVariable in problemData.InputVariables.CheckedItems)
            {
                problemData.InputVariables.SetItemCheckedState(inputVariable.Value, false);
            }

            //check inputVariables used in the symbolic regression model
            var usedVariables =
                Content.Model.SymbolicExpressionTree.IterateNodesPostfix().OfType <VariableTreeNode>().Select(
                    node => node.VariableName).Distinct();

            foreach (var variable in usedVariables)
            {
                problemData.InputVariables.SetItemCheckedState(
                    problemData.InputVariables.First(x => x.Value == variable), true);
            }

            var solution = LinearRegression.CreateLinearRegressionSolution(problemData, out rmse, out cvRmsError);

            solution.Name = "Baseline (linear subset)";
            return(solution);
        }
예제 #9
0
        private void PlotLinearRegression(List <GPoint> points)
        {
            var ls = new OxyPlot.Series.LineSeries()
            {
                MarkerType   = ShowMarker ? MarkerType.None : MarkerType.Plus,
                MarkerStroke = OxyPlot.OxyColors.Blue
            };

            LinearFunction func;

            if (LinearRegression.Regression(points.ToArray(), out func))
            {
                int    TOTAL = 10;
                double x     = points.First().X;
                double stepX = (points.Last().X - x) / TOTAL;
                for (int i = 0; i <= TOTAL; i++)
                {
                    double y = func.Slope * x + func.Intercept;
                    ls.Points.Add(new OxyPlot.DataPoint(x, y));
                    x += stepX;
                }
                ls.Title = $"Linear Regression(R: {func.CorrelationCoeff:f5})";
            }
            else
            {
                ls.Title = "Linear Regression(Failed)";
            }

            base.Series.Add(ls);
        }
예제 #10
0
        /// <summary>
        /// Computes the next value of this indicator from the given state
        /// </summary>
        /// <param name="input">The input given to the indicator</param>
        /// <returns>
        /// A new value for this indicator
        /// </returns>
        protected override decimal ComputeNextValue(IndicatorDataPoint input)
        {
            _standardDeviation.Update(input);
            LinearRegression.Update(input);

            return(LinearRegression.Current.Value);
        }
예제 #11
0
        private void RunRl()
        {
            var list             = GetAllModelForWeekday(selectedWeekday);
            var takeNumber       = list.Count;
            var comeHomingValues =
                list.OrderByDescending(u => u.Date)
                .Select(z => new TimeSpan(0, int.Parse(z.Hour), int.Parse(z.Minutes), 0))
                .Take(takeNumber)
                .ToList();
            var xs    = new double[takeNumber];
            var value = 0.1;

            for (var i = 0; i < xs.Length; i++)
            {
                xs[i]  = value;
                value += 0.1;
            }
            var comeHomingHourValues =
                comeHomingValues.Select(y => TimeConverter.ConvertFromTimeToDouble(y.TotalMinutes)).ToArray();
            double r;
            double yintercept;
            double slope;

            LinearRegression.Execute(xs, comeHomingHourValues, 1, takeNumber - 1, out r, out yintercept, out slope);
            var predictionValue = slope * (value + 0.1) + yintercept;

            _predictRL = predictionValue;
        }
예제 #12
0
    public void Predict()
    {
        if (this.model == null)
        {
            Debug.Log("Create model before");
            return;
        }

        // Call lib to predict test spheres
        foreach (var testSphere in testSpheres)
        {
            var      position  = testSphere.position;
            double[] inputs    = { position.x, position.z };
            var      predicted = LinearRegression.linear_model_predict_regression(this.model.Value, 2, inputs);

            position = new Vector3(
                position.x,
                (float)predicted,
                position.z
                );
            testSphere.position = position;
        }

        Debug.Log("Predicted");
    }
        public override void Init(IRepository repository, IEnumerable <string> releases)
        {
            base.Init(repository, releases);

            double dd = repository.SelectionDSL()
                        .Commits().TillRevision(PredictionRelease)
                        .Modifications().InCommits()
                        .CodeBlocks().InModifications().CalculateDefectDensity(PredictionRelease);

            context.SetCommits(null, PredictionRelease);

            regression = new LinearRegression();
            foreach (var file in GetFilesInRevision(PredictionRelease))
            {
                double ddForFile = repository.SelectionDSL()
                                   .Commits().TillRevision(PredictionRelease)
                                   .Files().IdIs(file.ID)
                                   .Modifications().InCommits().InFiles()
                                   .CodeBlocks().InModifications().CalculateDefectDensity(PredictionRelease);

                if (ddForFile >= dd)
                {
                    context.SetFiles(e => e.IdIs(file.ID));

                    regression.AddTrainingData(
                        GetPredictorValuesFor(context)[0],
                        NumberOfFixedDefectsForFile(file.ID)
                        );
                }
            }

            regression.Train();
        }
예제 #14
0
        public void TestLinearRegression()
        {
            // Control.LinearAlgebraProvider = new MklLinearAlgebraProvider();
            // a simple dataset
            var x = DenseMatrix.OfArray(new double[, ] {
                { 1 }, { 2 }
            });
            var y = DenseVector.OfEnumerable(new double[] { 1, 2 });

            var clf = new LinearRegression();

            clf.Fit(x, y);

            Assert.AreEqual(1.0, clf.Coef.Column(0)[0], 1E-5);
            //Assert.AreEqual(0.0, clf.Intercept[0]);
            Assert.IsTrue(DenseVector.OfEnumerable(new double[] { 1, 2 }).AlmostEquals(clf.Predict(x).Column(0)));

            // test it also for degenerate input
            x = DenseMatrix.OfArray(new double[, ] {
                { 1 }
            });
            y = DenseVector.OfEnumerable(new double[] { 0 });


            clf = new LinearRegression(fitIntercept: false);
            clf.Fit(x, y);
            Assert.AreEqual(0.0, clf.Coef.Column(0)[0]);
            //assert_array_almost_equal(clf.intercept_, [0])
            Assert.AreEqual(0.0, clf.Predict(x).Column(0)[0]);
        }
예제 #15
0
        public void ParallelNSampler()
        {
            var prior = from a in Normal(0, 100)
                        from b in Normal(0, 100)
                        select new Param(a, b);

            var smc = LinearRegression.CreateLinearRegression(prior, LinearRegression.BeachSandData).SmcStandard(5000);

            var sTimer = new Stopwatch();

            sTimer.Start();
            var serial = from s1 in smc
                         from s2 in smc
                         from s3 in smc
                         select new Tuple <Samples <Param>, Samples <Param>, Samples <Param> >(s1, s2, s3);
            var result = serial.SampleN(2).ToList();

            sTimer.Stop();

            var pTimer = new Stopwatch();

            pTimer.Start();
            var parallel = from s1 in Independent(smc)
                           from s2 in Independent(smc)
                           from s3 in Independent(smc)
                           from triple in RunIndependent(s1, s2, s3)
                           select triple;
            var thing = parallel.SampleNParallel(2).ToList();

            pTimer.Stop();


            Trace.WriteLine($"parallel: {pTimer.ElapsedMilliseconds}ms, serial: {sTimer.ElapsedMilliseconds}ms");
            Assert.IsTrue(pTimer.ElapsedMilliseconds < sTimer.ElapsedMilliseconds);
        }
예제 #16
0
        /// <summary>
        ///      Computes the next value of this indicator from the given state
        /// </summary>
        /// <param name="time"></param>
        /// <param name="input">The input given to the indicator</param>
        /// <returns>
        ///      A new value for this indicator
        /// </returns>
        protected override DoubleArray Forward(long time, DoubleArray input)
        {
            _standardDeviation.Update(time, input);
            LinearRegression.Update(time, input);

            return(LinearRegression.Current);
        }
예제 #17
0
        public void TestFitIntercept()
        {
            var x2 = DenseMatrix.OfArray(new[, ]
            {
                { 0.38349978, 0.61650022 },
                { 0.58853682, 0.41146318 }
            });
            var x3 = DenseMatrix.OfArray(new[, ]
            {
                { 0.27677969, 0.70693172, 0.01628859 },
                { 0.08385139, 0.20692515, 0.70922346 }
            });
            var y = DenseVector.OfEnumerable(new double[] { 1, 1 });


            var lr2WithoutIntercept = new LinearRegression(fitIntercept: false);

            lr2WithoutIntercept.Fit(x2, y);
            var lr2WithIntercept = new LinearRegression(fitIntercept: true);

            lr2WithIntercept.Fit(x2, y);

            var lr3WithoutIntercept = new LinearRegression(fitIntercept: false);

            lr3WithoutIntercept.Fit(x3, y);
            var lr3WithIntercept = new LinearRegression(fitIntercept: true);

            lr3WithIntercept.Fit(x3, y);

            Assert.AreEqual(lr2WithIntercept.Coef.Column(0).Count,
                            lr2WithoutIntercept.Coef.Column(0).Count);
            Assert.AreEqual(lr3WithIntercept.Coef.Column(0).Count,
                            lr3WithoutIntercept.Coef.Column(0).Count);
        }
        public void Example4a()
        {
            var line = new LinearRegression(table4a);

            Assert.AreEqual(13.67, line.a, 0.01);
            Assert.AreEqual(7.03, line.b, 0.01);
        }
        private IRegressionSolution CreateLinearRegressionSolution()
        {
            if (Content == null)
            {
                throw new InvalidOperationException();
            }
            double rmse, cvRmsError;
            var    problemData = (IRegressionProblemData)ProblemData.Clone();

            if (!problemData.TrainingIndices.Any())
            {
                return(null);                              // don't create an LR model if the problem does not have a training set (e.g. loaded into an existing model)
            }
            var usedVariables = Content.Model.VariablesUsedForPrediction;

            var usedDoubleVariables = usedVariables
                                      .Where(name => problemData.Dataset.VariableHasType <double>(name))
                                      .Distinct();

            var usedFactorVariables = usedVariables
                                      .Where(name => problemData.Dataset.VariableHasType <string>(name))
                                      .Distinct();

            // gkronber: for binary factors we actually produce a binary variable in the new dataset
            // but only if the variable is not used as a full factor anyway (LR creates binary columns anyway)
            var usedBinaryFactors =
                Content.Model.SymbolicExpressionTree.IterateNodesPostfix().OfType <BinaryFactorVariableTreeNode>()
                .Where(node => !usedFactorVariables.Contains(node.VariableName))
                .Select(node => Tuple.Create(node.VariableValue, node.VariableValue));

            // create a new problem and dataset
            var variableNames =
                usedDoubleVariables
                .Concat(usedFactorVariables)
                .Concat(usedBinaryFactors.Select(t => t.Item1 + "=" + t.Item2))
                .Concat(new string[] { problemData.TargetVariable })
                .ToArray();
            var variableValues =
                usedDoubleVariables.Select(name => (IList)problemData.Dataset.GetDoubleValues(name).ToList())
                .Concat(usedFactorVariables.Select(name => problemData.Dataset.GetStringValues(name).ToList()))
                .Concat(
                    // create binary variable
                    usedBinaryFactors.Select(t => problemData.Dataset.GetReadOnlyStringValues(t.Item1).Select(val => val == t.Item2 ? 1.0 : 0.0).ToList())
                    )
                .Concat(new[] { problemData.Dataset.GetDoubleValues(problemData.TargetVariable).ToList() });

            var newDs          = new Dataset(variableNames, variableValues);
            var newProblemData = new RegressionProblemData(newDs, variableNames.Take(variableNames.Length - 1), variableNames.Last());

            newProblemData.TrainingPartition.Start = problemData.TrainingPartition.Start;
            newProblemData.TrainingPartition.End   = problemData.TrainingPartition.End;
            newProblemData.TestPartition.Start     = problemData.TestPartition.Start;
            newProblemData.TestPartition.End       = problemData.TestPartition.End;

            var solution = LinearRegression.CreateLinearRegressionSolution(newProblemData, out rmse, out cvRmsError);

            solution.Name = "Baseline (linear subset)";
            return(solution);
        }
        public void Example4b()
        {
            var line = new LinearRegression(table4b);

            Assert.AreEqual(-2.49, line.a, 0.01);
            Assert.AreEqual(244.18, line.b, 0.01);
            Assert.AreEqual(-0.767, line.r, 0.001);
        }
        private IRegressionSolution CreateLinearSolution()
        {
            double rmsError, cvRmsError;
            var    solution = LinearRegression.CreateLinearRegressionSolution((IRegressionProblemData)ProblemData.Clone(), out rmsError, out cvRmsError);

            solution.Name = "Baseline (linear)";
            return(solution);
        }
 public void PredictTest()
 {
     double[,] features = { { 1d, 0d }, { 1d, 1d }, { 1d, 2d }, { 1d, 3d }, { 1d, 4d } };
     double[] theta       = { 1d, -1d };
     double[] labels      = { 1d, 0d, -1d, -2d, -3d };
     double[] predictions = LinearRegression.Predict(features, theta);
     CollectionAssert.AreEqual(labels, predictions);
 }
예제 #23
0
 /// <summary>
 /// Resets this indicator and all sub-indicators (StandardDeviation, LowerBand, MiddleBand, UpperBand)
 /// </summary>
 public override void Reset()
 {
     _standardDeviation.Reset();
     LinearRegression.Reset();
     LowerChannel.Reset();
     UpperChannel.Reset();
     base.Reset();
 }
예제 #24
0
        public void PredictCurrencyExchangeRate_ValidData_ShouldSucceed(int[] xVals, double[] yVals, int monthToPredict, double expectedValue)
        {
            // Act
            double actualValue = LinearRegression.PredictCurrencyExchangeRate(xVals, yVals, monthToPredict);

            // Assert
            Assert.Equal(expectedValue, actualValue);
        }
예제 #25
0
        public void PredictCurrencyExchangeRate_DifferentArrayLengths_ShouldFail(int[] xVals, double[] yVals, int monthToPredict)
        {
            // Act
            Action act = () => LinearRegression.PredictCurrencyExchangeRate(xVals, yVals, monthToPredict);

            // Assert
            Assert.Throws <ArgumentException>(act);
        }
        private static void TestLinearRegression(FeatureVector training, FeatureVector test)
        {
            LinearRegression      lr          = new LinearRegression();
            LinearRegressionModel lrModel     = (LinearRegressionModel)lr.Fit(training);
            FeatureVector         predictions = lrModel.transform(test);

            PrintPredictionsAndEvaluate(predictions);
        }
 public void PerfectFitTest()
 {
     double[,] features = { { 1d, 0d }, { 1d, 1d }, { 1d, 2d }, { 1d, 3d }, { 1d, 4d } };
     double[] theta  = { 1d, -1d };
     double[] labels = { 1d, 0d, -1d, -2d, -3d };
     Assert.AreEqual(0d, LinearRegression.Cost(features, labels, theta), 0.001d);
     Assert.AreEqual(0d, LinearRegression.Gradient(features, labels, theta).Sum(), 0.001d);
 }
예제 #28
0
        public Task <byte[]> Execute(byte[] data)
        {
            // prep the data
            Int32 questionId = BitConverter.ToInt32(data, 0);

            Console.WriteLine("Node task created with parameter: " + questionId);

            return(Task.Factory.StartNew(() =>
            {
                // To prevent any x-threaded issues
                Int32 q = questionId;

                Random rnd = new Random(q);

                // prep the regres algorithm
                LinearRegression regres = new LinearRegression();

                double[] y;
                double[,] x;
                double[] w;     // weighting

                using (SqlConnection conn = new SqlConnection(@"Data Source=kivu;Initial Catalog=StackoverflowAug2012;uid=sblackler;pwd=password"))
                {
                    conn.Open();

                    var results = conn.Query <PostResult>(@"SELECT TOP 8000 id, PostScore, ViewCount FROM Posts WHERE AcceptedAnswerId IS NULL AND AnswerCount < 10 AND PostScore > (SELECT TOP 1 PostScore FROM Posts WHERE ID = @Q) ORDER BY PostScore DESC", new { Q = rnd.Next(0, 11750761) }).ToList();

                    y = results.Select(p => p.ID).ToArray();

                    Int32 i = 0;
                    x = new double[results.Count, results.Count];
                    foreach (var el in results)
                    {
                        for (Int32 j = 1; j < results.Count; j++)
                        {
                            x[i, j - 1] = (el.PostScore < 1 ? 1 : el.PostScore / el.ViewCount < 1 ? 1 : el.ViewCount) * j / 100;
                        }
                    }
                }

                w = new double[Math.Max(y.Length, x.Length)];

                // Blank the array with the value 1
                Extensions.MemSet(w, 1);
                try
                {
                    if (regres.Regress(y, x, w))
                    {
                        return BitConverter.GetBytes((int)regres.SEC[0]);
                    }
                }
                catch { return BitConverter.GetBytes(-1); }
                // In case the regression fails.
                return BitConverter.GetBytes(rnd.Next(0, 11750761));
            }));
        }
        private void TestLinearRegression()
        {
            LinearRegression linearRegression = new LinearRegression(trainingEpochs, learningRate);

            linearRegression.Train(dataX, dataY);
            (float[] results, float optimalWeights, float optimalBias) = linearRegression.Predict(testX);
            //analytical solution coefficientVector=(X'*X)*X'*Y where X is the inputX with ones in the first column and Y is inputY: bias=0.7988 and weight=0.2516
            Assert.Equal(0.309068531, optimalWeights, 6);
            Assert.Equal(0.373608261, optimalBias, 6);
        }
        public void WrongDataSetVariableImpactRegressionTest()
        {
            IRegressionProblemData problemData = LoadDefaultTowerProblem();
            double rmsError;
            double cvRmsError;
            var    solution = LinearRegression.CreateSolution(problemData, out rmsError, out cvRmsError);

            solution.ProblemData = LoadDefaultMibaProblem();
            RegressionSolutionVariableImpactsCalculator.CalculateImpacts(solution);
        }
        public void LinearRegressionModelVariableImpactMibaTest()
        {
            IRegressionProblemData problemData = LoadDefaultMibaProblem();
            double rmsError;
            double cvRmsError;
            var    solution = LinearRegression.CreateSolution(problemData, out rmsError, out cvRmsError);
            Dictionary <string, double> expectedImpacts = GetExpectedValuesForLRMiba();

            CheckDefaultAsserts(solution, expectedImpacts);
        }
예제 #32
0
        public void test1dLinearRegression()
        {
            //BOOST_MESSAGE("Testing 1d simple linear least-squares regression...");

            /* Example taken from the QuantLib-User list, see posting
             * Multiple linear regression/weighted regression, Boris Skorodumov */

            //SavedSettings backup;

            List<double> x = new InitializedList<double>(9),
                         y = new InitializedList<double>(9);
            x[0] = 2.4; x[1] = 1.8; x[2] = 2.5; x[3] = 3.0;
            x[4] = 2.1; x[5] = 1.2; x[6] = 2.0; x[7] = 2.7; x[8] = 3.6;

            y[0] = 7.8; y[1] = 5.5; y[2] = 8.0; y[3] = 9.0;
            y[4] = 6.5; y[5] = 4.0; y[6] = 6.3; y[7] = 8.4; y[8] = 10.2;

            List<Func<double, double>> v = new List<Func<double, double>>();
            v.Add(a => 1.0);
            v.Add(a => a);

            LinearRegression m = new LinearRegression(x, y);

            const double tol = 0.0002;
            double[] coeffExpected = new double[] { 0.9448, 2.6853 };
            double[] errorsExpected = new double[] { 0.3654, 0.1487 };

            for (int i = 0; i < 2; ++i) {
                if (Math.Abs(m.standardErrors()[i] - errorsExpected[i]) > tol) {
                    Assert.Fail("Failed to reproduce linear regression standard errors"
                                + "\n    calculated: " + m.standardErrors()[i]
                                + "\n    expected:   " + errorsExpected[i]
                                + "\n    tolerance:  " + tol);
                }

                if (Math.Abs(m.coefficients()[i] - coeffExpected[i]) > tol) {
                    Assert.Fail("Failed to reproduce linear regression coef."
                                + "\n    calculated: " + m.coefficients()[i]
                                + "\n    expected:   " + coeffExpected[i]
                                + "\n    tolerance:  " + tol);
                }
            }
        }
예제 #33
0
 public float GetSlope(SectionPathLossCalcParam param, short[] altitudes, int nIndex)
 {
     int fNumber = (int) (param.MergeEdgeMaxDis / (4f * param.CalcResolution));
     if (fNumber < 1)
     {
         fNumber = 1;
     }
     fNumber = Math.Min(this.Trim(fNumber, 5, 10), nIndex);
     float[] a = new float[fNumber * 2];
     float[] y = new float[fNumber];
     int num2 = nIndex;
     for (int i = 0; i < fNumber; i++)
     {
         a[i] = num2 * param.CalcResolution;
         y[i] = altitudes[num2 - 1];
         num2--;
     }
     float[] numArray3 = new float[2];
     float[] numArray4 = new float[3];
     LinearRegression regression = new LinearRegression();
     MatrixOperation operation = new MatrixOperation();
     regression.mvfitgn(a, y, numArray3, fNumber, 2, false);
     return numArray3[1];
 }
    private static LinearRegression getRegression(double[] dependent_, double[] independent_)
    {
      var yV = new DoubleVector(dependent_);
      var matrix = new DoubleMatrix(independent_.ToColumn());

      var lin = new LinearRegression(obs: yV, A: matrix, addIntercept: true);

      return lin;
    }
예제 #35
0
        public void TestRidgeVsLstsq()
        {
            var random = new Random(0);
            // we need more samples than features
            const int nSamples = 5;
            const int nFeatures = 4;
            var y = DenseVector.CreateRandom(nSamples, new Normal { RandomSource = random });
            var x = DenseMatrix.CreateRandom(nSamples, nFeatures, new Normal { RandomSource = random });

            var ridge = new RidgeRegression(alpha: 0.0, fitIntercept: false);
            var ols = new LinearRegression(fitIntercept: false);

            ridge.Fit(x, y);
            ols.Fit(x, y);
            Assert.IsTrue(ridge.Coef.AlmostEquals(ols.Coef));

            ridge.Fit(x, y);
            ols.Fit(x, y);
            Assert.IsTrue(ridge.Coef.AlmostEquals(ols.Coef));
        }
예제 #36
0
파일: SPMAdjustDT.cs 프로젝트: xiaoyj/Space
 private ReturnValue Recursion(float[] valueMatrix, float[] actualLossMatrix, float[] coeMatrix, int validDataNumber, int coeNumNeedAdjust, bool isAdjustClutter)
 {
     ReturnValue value2 = ReturnValue.PROPADJ_SUCCESS;
     if (coeNumNeedAdjust == 0)
     {
         return ReturnValue.PROPCOR_NOSUCESS;
     }
     if (validDataNumber < 10)
     {
         return ReturnValue.PROPCOR_LACKDATAERROR;
     }
     float[] numArray = new float[validDataNumber];
     LinearRegression regression = new LinearRegression();
     MatrixOperation operation = new MatrixOperation();
     int num = 0;
     try
     {
         num = regression.mvfitgn(valueMatrix, actualLossMatrix, coeMatrix, validDataNumber, coeNumNeedAdjust, isAdjustClutter);
     }
     catch (Exception exception)
     {
         WriteLog.Logger.Error(exception.StackTrace);
     }
     if (num != 0)
     {
         return ReturnValue.PROPCOR_NOTFIT;
     }
     return value2;
 }
        /// <summary>
        /// Demonstrates linear reression in two dimensions.
        /// </summary>
        void LinearRegression()
        {
            // Set up example description
            nRichDescription.Text = "A linear regression model is computed from the sample data containing random normal noise. \n\nA point is then predicted from the model with a 95% confidence interval - meaning the predicted point is expected to lie within the confidence interval 95% of the time.";

            // First read in the independent (or predictor) values. This is a matrix
            // with one column and a row for each amounts measurement.
            DoubleVector raw_data = new DoubleVector(25, 0, 1);
            DoubleMatrix measurements = new DoubleMatrix(raw_data);

            // Next, read in the responses
            // Build a linear polynomial and add noise for interest
            RandomNumberGenerator rand = new RandGenNormal(0, noiselevel_);
            Polynomial poly = new Polynomial(new DoubleVector("0, 1"));
            DoubleVector responses = poly.Evaluate(raw_data) + new DoubleVector(raw_data.Length, rand); ;

            // Construct a linear regression. If we want our regression to calculate a
            // y-intercept we must send in true for the "addIntercept" parameter (the
            // third paramameter in the constructor).
            LinearRegression regression = new LinearRegression(measurements, responses, true);
            DoubleVector residues = regression.Residuals;

            // Use the linear regression class to make a prediction according to the model
            DoubleVector xvalues = new DoubleVector("30"); // Use the model to predict the observation at 30.
            Interval pi = regression.PredictionInterval(xvalues, 0.95);

            // Build some data points along the regression line for display
            DoubleMatrix abcissae = new DoubleMatrix(new DoubleVector(raw_data));
            DoubleVector predicted_ys = regression.PredictedObservations(abcissae);

            // Build the chart
            SetupChartLayout("Linear Regression");

            NChart chart = nChartControl1.Charts[0];

            SetupChartAxes(chart);

            // Set up the line series
            NLineSeries line = new NLineSeries();
            chart.Series.Add(line);

            // tell the series to regard the X values
            line.UseXValues = true;

            // no data labels
            line.DataLabelStyle.Visible = false;

            // Set the line color
            line.BorderStyle = new NStrokeStyle(2.0f, Color.Tomato);

            // name data set
            line.Name = "Linear Regression";

            // Add the the Linear Regression line data to the line series
            line.XValues.AddRange(abcissae.DataBlock.Data);
            line.Values.AddRange(predicted_ys.DataBlock.Data);

            // Draw the raw data points
            NPointSeries point = new NPointSeries();
            chart.Series.Add(point);

            point.UseXValues = true;
            point.DataLabelStyle.Visible = false;

            // Set the point appearance properties
            point.FillStyle = new NColorFillStyle(Color.SkyBlue);
            point.BorderStyle = new NStrokeStyle(1.0f,Color.DarkGray);
            point.PointShape = PointShape.Cross;
            point.Size = new NLength(6.0f);

            // Points must fit in the chart area
            point.InflateMargins = true;

            // Name point set
            point.Name = "Observations";

            // set the point data
            point.Values.AddRange(responses.DataBlock.Data);
            point.XValues.AddRange(measurements.DataBlock.Data);

            double m = (pi.Min + pi.Max) / 2.0;

            // Display the predicted value with an error bar series
            NErrorBarSeries predicted_points = new NErrorBarSeries();
            chart.Series.Add(predicted_points);
            predicted_points.Name = "Predicted Point";
            predicted_points.UseXValues = true;
            predicted_points.InflateMargins = true;
            predicted_points.FillStyle = new NColorFillStyle(Color.Crimson);
            predicted_points.BorderStyle = new NStrokeStyle(1.0f, Color.DarkGray);
            predicted_points.DataLabelStyle.Visible = false;
            predicted_points.MarkerStyle.Visible = true;
            predicted_points.MarkerStyle.FillStyle = new NColorFillStyle(Color.Crimson);
            predicted_points.MarkerStyle.BorderStyle = new NStrokeStyle(1.0f, Color.DarkGray);
            predicted_points.MarkerStyle.PointShape = PointShape.Bar;
            predicted_points.MarkerStyle.Width = new NLength(5);
            predicted_points.MarkerStyle.Height = new NLength(5);
            predicted_points.SizeY = new NLength(5);

            // Fill the data for the predicted point
            predicted_points.XValues.AddRange(xvalues.DataBlock.Data);
            predicted_points.Values.Add(m);
            predicted_points.UpperErrorsY.Add(pi.Max - m);
            predicted_points.LowerErrorsY.Add(m - pi.Min);

            // Create a label to display the predicted value
            NLabel label = new NLabel();
            label.BoundsMode = BoundsMode.None;
            label.ContentAlignment = ContentAlignment.BottomLeft;
            label.Location = new NPointL(
                new NLength(87, NRelativeUnit.ParentPercentage),
                new NLength(3, NRelativeUnit.ParentPercentage));

            label.TextStyle.TextFormat = TextFormat.XML;
            label.TextStyle.FontStyle = new NFontStyle("Arial", 9);
            label.TextStyle.StringFormatStyle.HorzAlign = Nevron.HorzAlign.Right;
            label.TextStyle.BackplaneStyle.Visible = true;
            label.TextStyle.BackplaneStyle.FillStyle = new NGradientFillStyle(GradientStyle.Horizontal, GradientVariant.Variant1, Color.FromArgb(130, 255, 255, 255), Color.FromArgb(130, 233, 233, 255));
            label.TextStyle.BackplaneStyle.Shape = BackplaneShape.SmoothEdgeRectangle;
            label.TextStyle.BackplaneStyle.StandardFrameStyle.InnerBorderColor = Color.White;
            label.Text = "<font color = 'crimson'>" + m.ToString("0.###") + "</font> - predicted value with 95% confidence interval";

            chart.ChildPanels.Add(label);

            nChartControl1.Refresh();
        }
 /// <summary>
 /// Returns a new line chart by interpolating over the given linear regression.
 /// </summary>
 /// <param name="lr">Linear Regression.</param>
 /// <param name="predictorIndex">The predictor (independent) variable to plot on the x-axis.</param>
 /// <returns>A new chart.</returns>
 /// <exception cref="InvalidArgumentException">
 /// Thrown if the given predictorIndex is outside the range of columns in lr.PredictorMatrix.
 /// </exception>
 /// <remarks>
 /// The multidimensional linear regression fit is plotted projected onto the plane of the specified 
 /// predictor variable.
 /// </remarks>
 public static ChartControl ToChart( LinearRegression lr, int predictorIndex )
 {
     ChartControl chart = GetDefaultChart();
       Update( ref chart, lr, predictorIndex );
       return chart;
 }
        /// <summary>
        /// Updates the given chart with the specified linear regression.
        /// </summary>
        /// <param name="chart">A chart.</param>
        /// <param name="lr">Linear Regression.</param>
        /// <param name="predictorIndex">The predictor (independent) variable to plot on the x-axis.</param>
        /// <exception cref="InvalidArgumentException">
        /// Thrown if the given predictorIndex is outside the range of columns in lr.PredictorMatrix.
        /// </exception>
        /// <remarks>
        /// The multidimensional linear regression fit is plotted projected onto the plane of the specified 
        /// predictor variable.
        /// <br/>
        /// Titles are added only if chart does not currently contain any titles.
        /// <br/>
        /// The first two data series are replaced, or added if necessary.
        /// </remarks>
        public static void Update( ref ChartControl chart, LinearRegression lr, int predictorIndex )
        {
            if( predictorIndex < 0 || predictorIndex > lr.PredictorMatrix.Cols )
              {
            throw new Core.IndexOutOfRangeException( predictorIndex );
              }

              List<string> titles = new List<string>()
              {
            "LinearRegression",
              };
              string xTitle = "Independent Variable " + predictorIndex;
              string yTitle = "Dependent Variable";

              // create version of predictor matrix with all other columns zeroed out
              DoubleMatrix projection = new DoubleMatrix( lr.PredictorMatrix.Rows, lr.PredictorMatrix.Cols );
              projection[Slice.All, predictorIndex] = lr.PredictorMatrix.Col( predictorIndex );

              DoubleMatrix data = new DoubleMatrix( lr.NumberOfObservations, 3 );
              data[Slice.All, 0] = lr.PredictorMatrix.Col( predictorIndex );          // x
              data[Slice.All, 1] = lr.Observations;                                   // y
              data[Slice.All, 2] = lr.PredictedObservations( projection );            // y predicted

              data = NMathFunctions.SortByColumn( data, 0 );

              List<ChartSeries> series = new List<ChartSeries>()
              {
            BindXY( data.Col(0), data.Col(1), ChartSeriesType.Scatter, DefaultMarker ),

            // only necessary to plot endpoints of line
            BindXY( data.Col(0)[new Slice(0, 2, data.Rows - 1)], data.Col(2)[new Slice(0, 2, data.Rows - 1)], ChartSeriesType.Line, ChartSymbolShape.None )
              };
              series[0].Text = "Observed";
              series[1].Text = "Predicted";

              Update( ref chart, series, titles, xTitle, yTitle );
        }
 /// <summary>
 /// Shows a new chart in a default form.
 /// </summary>
 /// <param name="lr">Linear Regression.</param>
 /// <param name="predictorIndex">The predictor (independent) variable to plot on the x-axis.</param>
 /// <exception cref="InvalidArgumentException">
 /// Thrown if the given predictorIndex is outside the range of columns in lr.PredictorMatrix.
 /// </exception>
 /// <remarks>
 /// The multidimensional linear regression fit is plotted projected onto the plane of the specified 
 /// predictor variable.
 /// <br/>
 /// Equivalent to:
 /// <code>
 /// NMathStatsChart.Show( ToChart( lr, predictorIndex ) );
 /// </code>
 /// </remarks>
 public static void Show( LinearRegression lr, int predictorIndex )
 {
     Show( ToChart( lr, predictorIndex ) );
 }