public override void Recompute() { var toLog = GetInputBundle(); var logged = new List <Data.TimeSeries>(toLog.Count); var failed = false; foreach (var inp in toLog) { var ts = new Data.TimeSeries(); ts.Title = $"Log({inp.Title})"; for (var t = 0; t < inp.Count; ++t) { if (inp[t] > 0) { ts.Add(inp.TimeStamp(t), Math.Log(inp[t]), false); } else { failed = true; } } logged.Add(ts); } outputs = logged; IsValid = true; if (failed) { Console.WriteLine( "One or more values were non-positive. Logs of these numbers are not included in the output."); } }
/// <summary> /// constructor for heavy-tailed ARMAX model /// </summary> /// <param name="arOrder">autoregressive order</param> /// <param name="maOrder">moving average order</param> /// <param name="exOrder">number of exogenous inputs</param> /// <param name="tailDoF">degrees of freedom for t-distributed innovations</param> /// <param name="data">data to perform analysis on</param> public ARMAXModel(int arOrder, int maOrder, int exOrder, int tailDoF, Data.TimeSeries data) : base(arOrder, maOrder, data, tailDoF) { numExogenous = exOrder; exogenous = new Data.TimeSeries[numExogenous]; LocalInitializeParameters(); }
//public override Icon GetIcon() //{ // return null; //} public override void Recompute() { IsValid = false; if (GetInputType(-1) != InputType.UnivariateTS) { return; } var series = GetInputBundle(); if (series.Count != 1) { return; // something wrong! } var input = series[0]; var agg = new Data.TimeSeries(); double accumulated = 0; for (var t = 0; t < input.Count; ++t) { accumulated += input[t]; if (t % Period == Period - 1) { agg.Add(input.TimeStamp(t), accumulated, false); accumulated = 0; } } outputs = new List <Data.TimeSeries>(); outputs.Add(agg); IsValid = true; }
//public override Icon GetIcon() //{ // var x = Images.ResourceManager.GetObject("ThresholdIcon") as Icon; // return x; //} public override void Recompute() { var ins = GetInputBundle(); outputs = new List <Data.TimeSeries>(); foreach (var ts in ins) { var ots = new Data.TimeSeries { Title = ts.Title }; for (var t = 0; t < ts.Count; ++t) { if (ts[t] > Maximum) { ots.Add(ts.TimeStamp(t), Maximum, false); } else if (ts[t] < Minimum) { ots.Add(ts.TimeStamp(t), Minimum, false); } else { ots.Add(ts.TimeStamp(t), ts[t], false); } } outputs.Add(ots); } IsValid = true; }
public override Data.TimeSeries SimulateData(List <DateTime> inputs, int simSeed) { var times = inputs; if (times == null) { return(null); // inputs should be a list of DateTimes } var n = times.Count; var simulated = new Data.TimeSeries(); var randomSource = new Palf(simSeed); var stdnormal = new Normal(); stdnormal.RandomSource = randomSource; var mVar = GetVariance(Parameters); var ss = Vector <double> .Build.Dense(n); for (var i = 0; i < n; ++i) { var variance = GetConditionalSig2(i, simulated, ss, Parameters, mVar); var simLR = stdnormal.RandomSource.NextDouble() * Math.Sqrt(variance); simulated.Add(times[i], simLR, false); } simulated.Title = "Simulation"; simulated.Description = $"Simulation from {Description}"; return(simulated); }
//public override Icon GetIcon() //{ // return null; //} public override void Recompute() { IsValid = false; var tsList = GetInputBundle(); if (tsList != null) { outputs = new List <Data.TimeSeries>(tsList.Count); foreach (var ts in tsList) { var stripped = new Data.TimeSeries { Title = ts.Title }; for (var t = 0; t < ts.Count; ++t) { if (!(ts[t] < LowerValue || ts[t] > UpperValue)) { stripped.Add(ts.TimeStamp(t), ts[t], true); } } outputs.Add(stripped); } if (outputs.Count > 0) { IsValid = true; } } }
public override void Recompute() { IsValid = false; tsOutput = null; mvtsOutput = null; if (SkipDates == null) { SkipDates = new List <DateTime>(); } var mvts = GetInput(0) as MVTimeSeries; var ts = GetInput(0) as Data.TimeSeries; // make sure we have at least 2 data points in the time series to be sampled var tp = GetInputType(-1); if (tp != InputType.UnivariateTS && tp != InputType.MultivariateTS) { return; } if (tp == InputType.MultivariateTS) { // split it up var individuals = mvts.ExtractList(); var nc = individuals.Count; var sampled = new List <Data.TimeSeries>(nc); IsValid = true; for (var i = 0; i < nc; ++i) { var res = DoUnivariateSampling(individuals[i]); if (res.Count > 0) { sampled.Add(res); } else { IsValid = false; } } if (IsValid) { mvtsOutput = new MVTimeSeries(sampled, false) { Title = mvts.Title } } ; } if (tp == InputType.UnivariateTS) { tsOutput = DoUnivariateSampling(ts); IsValid = tsOutput.Count > 0; } }
private Data.TimeSeries DoUnivariateSampling(Data.TimeSeries ofTS) { var retval = new Data.TimeSeries(); retval.Title = ofTS.Title; if (SamplingInterval.Ticks == 0) { int i0, i1; bool gotHit; i0 = ofTS.IndexAtOrBefore(StartTime, out gotHit); if (!gotHit) { ++i0; } i1 = ofTS.IndexAtOrBefore(EndTime, out gotHit); ++i1; if (i1 > ofTS.Count) { i1 = ofTS.Count; } for (var t = i0; t < i1; ++t) { retval.Add(ofTS.TimeStamp(t), ofTS[t], false); } } else { for (var current = new DateTime(StartTime.Ticks); current < EndTime; current = new DateTime((current + SamplingInterval).Ticks)) { foreach (var sampleOffset in SamplingBaseOffsets) { var adjusted = current + sampleOffset; var skipping = false; if (SkipWeekends) { if (adjusted.DayOfWeek == DayOfWeek.Saturday || adjusted.DayOfWeek == DayOfWeek.Sunday) { skipping = true; } } if (IsWithinSkipIntervals(adjusted)) { skipping = true; } if (!skipping) { var sampled = ofTS.ValueAtTime(adjusted); retval.Add(adjusted, sampled, false); } } } } return(retval); }
/// <summary> /// basic constructor for an GARCH(m,s) model. /// </summary> /// <param name="modelType">Type of GARCH model to user</param> /// <param name="dataOrder">m order</param> /// <param name="intrinsicOrder">s order</param> /// <param name="data">data to associate with this model</param> public GARCHModel(GARCHType modelType, int dataOrder, int intrinsicOrder, Data.TimeSeries data) { this.modelType = modelType; this.dataOrder = dataOrder; this.intrinsicOrder = intrinsicOrder; values = data; LocalInitializeParameters(); }
public override void Recompute() { IsValid = false; var tp = GetInputType(-1); if (tp == InputType.MultivariateTS) { var mts = GetInput(0) as MVTimeSeries; if (mts == null) { return; } mvIntegral = new MVTimeSeries(mts.Dimension) { Title = "MV" }; for (var i = 0; i < mts.Dimension; ++i) { mvIntegral.SubTitle[i] = mts.SubTitle[i]; } integral = null; var sum = new double[mts.Dimension]; for (var t = 0; t < mts.Count; ++t) { for (var j = 0; j < mts.Dimension; ++j) { if (!double.IsNaN(mts[t][j])) { sum[j] += mts[t][j]; } } var temp = new double[mts.Dimension]; Array.Copy(sum, temp, mts.Dimension); mvIntegral.Add(mts.TimeStamp(t), temp, false); } } if (tp == InputType.UnivariateTS) { var ts = GetInput(0) as Data.TimeSeries; if (ts == null) { return; } integral = new Data.TimeSeries { Title = ts.Title }; mvIntegral = null; double sum = 0; for (var t = 0; t < ts.Count; ++t) { sum += ts[t]; integral.Add(ts.TimeStamp(t), sum, false); } } IsValid = true; }
//public override Icon GetIcon() //{ // return null; //} private Data.TimeSeries ApplyFilterTo(Data.TimeSeries ts) { var retval = new Data.TimeSeries(); var maOrder = maCoeffs.Length; var arOrder = arCoeffs.Length; double arSum = 0; for (var i = 1; i < arOrder; ++i) { arSum += arCoeffs[i]; } double maSum = 0; for (var i = 0; i < maOrder; ++i) { maSum += maCoeffs[i]; } // now go through and apply filter for (var t = 0; t < ts.Count; ++t) { var tx = 0.0; if (TimeInterval.Ticks == 0) // apply the filter to successive data points { // get MA part for (var i = 0; i < maOrder; ++i) { tx += t >= i ? maCoeffs[i] * ts[t - i] : ts[0]; } // get AR part for (var i = 1; i < arOrder; ++i) { tx += t >= i ? arCoeffs[i] * retval[t - i] : ts[0]; } tx /= arSum + maSum; } else // apply the filter to data points at specified sampling interval { // get MA part for (var i = 0; i < maOrder; ++i) { tx += maCoeffs[i] * ts.ValueAtTime(ts.TimeStamp(t) - new TimeSpan(i * TimeInterval.Ticks)); } // get AR part for (var i = 1; i < arOrder; ++i) { tx += arCoeffs[i] * retval.ValueAtTime(ts.TimeStamp(t) - new TimeSpan(i * TimeInterval.Ticks)); } tx /= arSum + maSum; } retval.Add(ts.TimeStamp(t), tx, false); } return(retval); }
protected override void OnDataConnection() { values = TheData as Data.TimeSeries; longitudinalValues = TheData as Longitudinal; if (values == null && longitudinalValues == null) { throw new ApplicationException("Invalid data connection."); } }
public override void Recompute() { IsValid = false; if (string.IsNullOrEmpty(Expression)) { return; } if (methodInfo == null || methodInfoString == null || methodInfoString != Expression) { CompileStuff(); } if (methodInfo == null) { Console.WriteLine("Unable to parse expression."); return; } var tsList = GetInputBundle(); outputs = new List <Data.TimeSeries>(); { var tempxArray = new double[tsList[0].Count]; var tempyArray = new double[tsList[0].Count]; var tempzArray = new double[tsList[0].Count]; for (var t = 0; t < tsList[0].Count; ++t) { tempxArray[t] = tsList[0][t]; if (tsList.Count > 1) { tempyArray[t] = tsList[1].ValueAtTime(tsList[0].TimeStamp(t)); } if (tsList.Count > 2) { tempzArray[t] = tsList[2].ValueAtTime(tsList[0].TimeStamp(t)); } } methodInfo.Invoke(null, new object[] { tempxArray, tempyArray, tempzArray }); var newTS = new Data.TimeSeries(); for (var t = 0; t < tsList[0].Count; ++t) { newTS.Add(tsList[0].TimeStamp(t), tempxArray[t], false); } outputs.Add(newTS); } IsValid = true; }
//public override Icon GetIcon() //{ // return null; //} public override void Recompute() { IsValid = false; var inputs = GetInputBundle(); if (inputs.Count != 1) { return; } var input = inputs[0]; var values = new Data.TimeSeries(); var lower = new Data.TimeSeries(); var upper = new Data.TimeSeries(); var indicators = new Data.TimeSeries(); var acc = new List <double>(); //new Accumulator(); for (var t = 0; t < input.Count; ++t) { acc.Add(input[t]); if (acc.Count > NumPeriods) { var v = Vector <double> .Build.DenseOfArray(acc.ToArray()); var sigma = v.PopulationStandardDeviation(); var mean = v.Mean(); acc.Remove(input[t - NumPeriods]); values.Add(input.TimeStamp(t), input[t], false); lower.Add(input.TimeStamp(t), mean - Width * sigma, false); upper.Add(input.TimeStamp(t), mean + Width * sigma, false); var sig = sigma; indicators.Add(input.TimeStamp(t), sig != 0 ? (input[t] - mean) / sig : 0, false); } } values.Title = "Level"; lower.Title = "Lower Band"; upper.Title = "Upper Band"; indicators.Title = "Indicator"; outputs = new List <Data.TimeSeries>(4) { values, lower, upper, indicators }; IsValid = true; }
//public override Icon GetIcon() //{ // return null; //} private Data.TimeSeries ApplyFilterTo(Data.TimeSeries ts) { var retval = new Data.TimeSeries(); // now go through and apply filter for (var t = 0; t < ts.Count; ++t) { var tx = 0.0; // get MA part tx += (1 - SmoothFactor) * ts[t]; // get AR part tx += t >= 1 ? SmoothFactor * retval[t - 1] : 0; retval.Add(ts.TimeStamp(t), tx, false); } return(retval); }
//public override Icon GetIcon() //{ // return null; //} public override void Recompute() { IsValid = false; var ts = GetInput(0) as Data.TimeSeries; var mts = GetInput(0) as MVTimeSeries; if (ts == null && mts == null) { return; // not valid } if (ts != null) { // merge univariate TS var result = new Data.TimeSeries(); for (var ii = 0; ii < NumberOfInputs; ++ii) { var pts = GetInput(ii) as Data.TimeSeries; if (pts != null) { result.Add(pts, true); } } outputResult = result; IsValid = true; } else if (mts != null) { // merge univariate TS var result = new MVTimeSeries(mts.Dimension); result.SubTitle = mts.SubTitle; for (var ii = 0; ii < NumberOfInputs; ++ii) { var pts = GetInput(ii) as MVTimeSeries; if (pts != null) { result.Add(pts, true); } } outputResult = result; IsValid = true; } }
public override void Recompute() { outputs = new List <Data.TimeSeries>(); IsValid = false; if (FutureTimes == null) { return; } if (FutureTimes.Length == 0) { return; } var tsm = GetInput(0) as UnivariateTimeSeriesModel; var tsd = GetInput(1) as Data.TimeSeries; if (tsm == null || tsd == null) { return; } var fdt = new List <DateTime>(); foreach (var dt in FutureTimes) { fdt.Add(dt); } var forecasts = tsm.BuildForecasts(tsd, fdt) as TimeSeriesBase <DistributionSummary>; if (forecasts == null) { return; } var predictiveMeans = new Data.TimeSeries(); for (var t = 0; t < forecasts.Count; ++t) { predictiveMeans.Add(forecasts.TimeStamp(t), forecasts[t].Mean, false); } outputs.Add(predictiveMeans); IsValid = predictiveMeans.Count > 0; }
private List <Data.TimeSeries> DoUnivariateSampling(Data.TimeSeries ofTS) { var retval = new List <Data.TimeSeries>(); for (var current = new DateTime(StartTime.Ticks); current < EndTime; current = new DateTime((current + SamplingInterval).Ticks)) { var slice = new Data.TimeSeries(); foreach (var sampleOffset in SamplingBaseOffsets) { var adjusted = current + sampleOffset; var skipping = false; if (SkipWeekends) { if (adjusted.DayOfWeek == DayOfWeek.Saturday || adjusted.DayOfWeek == DayOfWeek.Sunday) { skipping = true; } } if (IsWithinSkipIntervals(adjusted)) { skipping = true; } if (!skipping) { var sampled = ofTS.ValueAtTime(adjusted); slice.Add(adjusted, sampled, false); } } slice.Title = current.ToString(); if (slice.Count > 0) { retval.Add(slice); } } return(retval); }
public override void Recompute() { IsValid = false; var inputs = GetInputBundle(); outputs = new List <Data.TimeSeries>(inputs.Count); var failure = false; for (var i = 0; i < inputs.Count; ++i) { var lrs = new Data.TimeSeries(); for (var t = 1; t < inputs[i].Count; ++t) { var x1 = inputs[i][t - 1]; var x2 = inputs[i][t]; if (x1 > 0 && x2 > 0) { lrs.Add(inputs[i].TimeStamp(t), Math.Log(x2) - Math.Log(x1), false); } else { failure = true; } } lrs.Title = inputs[i].Title; outputs.Add(lrs); } multivariateOutputPrefix = "LR"; IsValid = true; if (failure) { Console.WriteLine("One or more values was non-positive, corresponding log-returns were left out."); } }
public override void Recompute() { IsValid = false; tsOutput = null; mvtsOutput = null; // make sure we have at least 2 data points in the time series to be sampled var tp = GetInputType(0); if (tp != InputType.UnivariateTS && tp != InputType.MultivariateTS) { return; } var rp = GetInputType(1); if (rp != InputType.UnivariateTS && rp != InputType.MultivariateTS) { return; } // first get the reference times var times = new List <DateTime>(1000); var input1 = GetInput(1); var mvref = input1 as MVTimeSeries; var uref = input1 as Data.TimeSeries; if (rp == InputType.UnivariateTS) { if (uref == null) { return; } for (var t = 0; t < uref.Count; ++t) { times.Add(uref.TimeStamp(t)); } } else if (rp == InputType.MultivariateTS) { if (mvref == null) { return; } for (var t = 0; t < mvref.Count; ++t) { times.Add(mvref.TimeStamp(t)); } } else { throw new ApplicationException("Invalid input type; this should not happen!"); } var refDim = uref != null ? 1 : mvref.Dimension; // then sample to get the output if (tp == InputType.UnivariateTS) { var uts = GetInput(0) as Data.TimeSeries; if (uts == null) { return; } // split it up if (!IncludeReferenceInOutput) { tsOutput = new Data.TimeSeries(); for (var t = 0; t < times.Count; ++t) { tsOutput.Add(times[t], uts.ValueAtTime(times[t]), true); } } else { mvtsOutput = new MVTimeSeries(1 + refDim); for (var t = 0; t < times.Count; ++t) { var val = new double[1 + refDim]; val[0] = uts.ValueAtTime(times[t]); for (var i = 1; i <= refDim; ++i) { val[i] = mvref != null ? mvref[t][i - 1] : uref[t]; } mvtsOutput.Add(times[t], val, true); } } IsValid = true; } if (tp == InputType.MultivariateTS) { var mts = GetInput(0) as MVTimeSeries; if (mts == null) { return; } // split it up if (!IncludeReferenceInOutput) { mvtsOutput = new MVTimeSeries(); for (var t = 0; t < times.Count; ++t) { mvtsOutput.Add(times[t], mts.ValueAtTime(times[t]), true); } } else { var thisDim = mts.Dimension; mvtsOutput = new MVTimeSeries(thisDim + refDim); for (var t = 0; t < times.Count; ++t) { var val = new double[thisDim + refDim]; var dv = mts.ValueAtTime(times[t]); for (var i = 0; i < thisDim; ++i) { val[i] = dv[i]; } for (var i = 0; i < refDim; ++i) { val[i + thisDim] = mvref != null ? mvref[t][i] : uref[t]; } mvtsOutput.Add(times[t], val, true); } } IsValid = true; } }
public override void Recompute() { IsValid = false; if (GetInputType(-1) != InputType.MultivariateTS) { return; } midPoints = null; var series = GetInputBundle(); if (series.Count != 2) { return; // something wrong! } // now we should be able to do something var t0 = 0; // index of next one to check var t1 = 0; var T0 = series[0].Count; var T1 = series[1].Count; midPoints = new Data.TimeSeries(); if (string.IsNullOrEmpty(AssignedName)) { midPoints.Title = "Mid"; } else { midPoints.Title = AssignedName; } var done = false; while (!done) { var dt0 = t0 < T0 ? series[0].TimeStamp(t0) : DateTime.MaxValue; var dt1 = t1 < T1 ? series[1].TimeStamp(t1) : DateTime.MaxValue; if (dt0 == dt1) { // got one! only record midpoint if the spread is sufficiently small var gap = Math.Abs(series[0][t0] - series[1][t1]); if (gap < SpreadLimit) // less than 4 basis points { midPoints.Add(series[0].TimeStamp(t0), (series[0][t0] + series[1][t1]) / 2.0, true); } ++t0; ++t1; } if (dt0 < dt1) { ++t0; } if (dt0 > dt1) { ++t1; } done = t0 == T0 && t1 == T1; } outputs = new List <Data.TimeSeries>(1); outputs.Add(midPoints); IsValid = true; }
public override void Recompute() { var mts = GetInput(0) as MVTimeSeries; var uts = GetInput(0) as Data.TimeSeries; var lts = GetInput(0) as Longitudinal; if (Spacing == 0) { Spacing = 1; // just fix it here: this is for backwards-compatibility } mvDifferences = null; differences = null; longDifferences = null; IsValid = false; if (mts != null) { mvDifferences = new MVTimeSeries(mts.Dimension) { Title = "Diffs" }; for (var i = 0; i < mts.Dimension; ++i) { mvDifferences.SubTitle[i] = mts.SubTitle[i]; } var zerov = new double[mts.Dimension]; if (!LeftTimeStamps && PadWithZeroes) { for (var t = 0; t < Lag; ++t) { if (t % Spacing == Phase) { mvDifferences.Add(mts.TimeStamp(t), zerov, false); } } } for (var t = Phase; t < mts.Count; t += Spacing) { if (t >= Lag) { var diff = new double[mts.Dimension]; for (var j = 0; j < mts.Dimension; ++j) { diff[j] = mts[t][j] - mts[t - Lag][j]; } var stamp = LeftTimeStamps ? mts.TimeStamp(t - Lag) : mts.TimeStamp(t); mvDifferences.Add(stamp, diff, false); } } if (LeftTimeStamps && PadWithZeroes) { for (var t = mts.Count - Lag; t < mts.Count; ++t) { if (t % Spacing == Phase) { mvDifferences.Add(mts.TimeStamp(t), zerov, false); } } } } if (uts != null) { differences = new Data.TimeSeries { Title = $"Diff({uts.Title})" }; if (!LeftTimeStamps && PadWithZeroes) { for (var t = 0; t < Lag; ++t) { if (t % Spacing == Phase) { differences.Add(uts.TimeStamp(t), 0, false); } } } for (var t = Phase; t < uts.Count; t += Spacing) { if (t >= Lag) { var diff = uts[t] - uts[t - Lag]; var stamp = LeftTimeStamps ? uts.TimeStamp(t - Lag) : uts.TimeStamp(t); differences.Add(stamp, diff, false); } } if (LeftTimeStamps && PadWithZeroes) { for (var t = uts.Count - Lag; t < uts.Count; ++t) { if (t % Spacing == Phase) { differences.Add(uts.TimeStamp(t), 0, false); } } } } if (lts != null) { var segments = new List <Data.TimeSeries>(lts.Count); for (var i = 0; i < lts.Count; ++i) { uts = lts[i]; var du = new Data.TimeSeries(); for (var t = Lag; t < uts.Count; ++t) { var diff = uts[t] - uts[t - Lag]; var stamp = LeftTimeStamps ? uts.TimeStamp(t - Lag) : uts.TimeStamp(t); du.Add(stamp, diff, false); } segments.Add(du); } longDifferences = new Longitudinal(segments); } IsValid = true; }
public override void Recompute() { IsValid = false; combination = null; if (GetInputType(-1) != InputType.UnivariateTS) { return; } // hacky fix to use old saved files if (Coefficients == null) { Coefficients = new[] { 1.0, -1.0 } } ; var n = Coefficients.Length; combination = null; var series = GetInputBundle(); if (series.Count != Coefficients.Length) { return; } // now we should be able to do something var ts = new int[n]; // index of next one in each ts to check var Ts = new int[n]; // counts for (var i = 0; i < n; ++i) { Ts[i] = series[i].Count; } combination = new Data.TimeSeries(); if (n == 2) { combination.Title = string.Format("{0:0.0}x{1} {2} {3:0.0}x{4}", Coefficients[0], series[0].Title, Coefficients[1] >= 0 ? '+' : '-', Math.Abs(Coefficients[1]), series[1].Title); } else { combination.Title = "Linear Comb."; } if (UseTimesFromFirst) { for (var t = 0; t < series[0].Count; ++t) { var sum = series[0][t] * Coefficients[0]; var tstamp = series[0].TimeStamp(t); for (var i = 1; i < n; ++i) { sum += series[i].ValueAtTime(tstamp) * Coefficients[i]; } combination.Add(tstamp, sum, true); } } else // it's either require exact time matching or else use step functions { var done = false; while (!done) { var dts = new DateTime[n]; var allDatesSame = true; var argmin = 0; var minval = DateTime.MaxValue; for (var i = 0; i < n; ++i) { dts[i] = ts[i] < Ts[i] ? series[i].TimeStamp(ts[i]) : DateTime.MaxValue; if (dts[i] < minval) { argmin = i; minval = dts[i]; } if (i > 0) { allDatesSame &= dts[i] == dts[i - 1]; } } if (allDatesSame) { var sum = 0.0; // got one! for (var i = 0; i < n; ++i) { sum += series[i][ts[i]] * Coefficients[i]; } combination.Add(series[0].TimeStamp(ts[0]), sum, false); for (var i = 0; i < n; ++i) { ++ts[i]; } } else if (!RequiresExactTimeMatch) { // evaluate at the minimum and advance var sum = 0.0; var valid = true; for (var i = 0; i < n; ++i) { if (ts[i] < Ts[i] && series[i].TimeStamp(ts[i]) <= minval) { sum += Coefficients[i] * series[i][ts[i]]; } else { if (ts[i] > 0) { sum += Coefficients[i] * series[i][ts[i] - 1]; } else { valid = false; } } } if (valid) { combination.Add(minval, sum, false); } for (var i = 0; i < n; ++i) { if (ts[i] < Ts[i]) { if (series[i].TimeStamp(ts[i]) <= minval) { ++ts[i]; } } } } else if (ts[argmin] < Ts[argmin]) { ++ts[argmin]; } done = true; for (var i = 0; i < n; ++i) { done &= ts[i] == Ts[i]; } } } if (combination.Count == 0) { combination = null; } else { IsValid = true; } }
private double GetConditionalSig2(int t, Data.TimeSeries localLogReturns, Vector <double> sigmaSquared, Vector <double> param, double marginalVariance) { double ls2; switch (modelType) { case GARCHType.EGARCH: ls2 = alpha(0, param); for (var i = 1; i <= dataOrder; ++i) { if (t - i >= 0) { var ztmi = localLogReturns[t - i] / Math.Exp(sigmaSquared[t - i] / 2.0); if (ztmi > 5.0) { ztmi = 5; } if (ztmi < -5.0) { ztmi = -5; } ls2 += alpha(i, param) * (Math.Abs(ztmi) + gamma(i, param) * ztmi); } else { ls2 += alpha(i, param) * root2onpi * marginalVariance; } } for (var i = 1; i <= intrinsicOrder; ++i) { if (t - i >= 0) { ls2 += beta(i, param) * sigmaSquared[t - i]; } else { ls2 += beta(i, param) * marginalVariance; } } if (ls2 < -80) { ls2 = -80; } if (ls2 > 80) { ls2 = 80; } sigmaSquared[t] = ls2; return(Math.Exp(ls2)); case GARCHType.Standard: ls2 = alpha(0, param); for (var i = 1; i <= dataOrder; ++i) { if (t - i >= 0) { var ztmi = localLogReturns[t - i]; ls2 += alpha(i, param) * ztmi * ztmi; } else { ls2 += alpha(i, param) * marginalVariance; } } for (var i = 1; i <= intrinsicOrder; ++i) { if (t - i >= 0) { ls2 += beta(i, param) * sigmaSquared[t - i]; } else { ls2 += beta(i, param) * marginalVariance; } } sigmaSquared[t] = ls2; return(ls2); } return(0); }
public override double LogLikelihood(Vector <double> parameter, double penaltyFactor, bool fillOutputs) { //var sigmaSquared = new MathNet.Numerics.LinearAlgebra.Vector(values.Count); //double mVar = GetVariance(parameter); //var allLLs = new MathNet.Numerics.LinearAlgebra.Vector(values.Count); //for (int t = 0; t < values.Count; ++t ) // allLLs[t] = conditionalLL(t, sigmaSquared, parameter, mVar); var pbak = Parameters; if (values == null) { return(double.NaN); } if (parameter != null) { Parameters = parameter; } var sigmaSquared = Vector <double> .Build.Dense(values.Count); var mVar = GetVariance(Parameters); double logLikelihood = 0; var allLLs = Vector <double> .Build.Dense(values.Count); for (var t = 0; t < values.Count; ++t) { allLLs[t] = conditionalLL(t, sigmaSquared, Parameters, mVar); logLikelihood += allLLs[t]; } if (fillOutputs) { var rts = new Data.TimeSeries { Title = $"{values.Title}[GARCH Res]" }; predictiveStdDevAtAvail = new Data.TimeSeries { Title = $"{values.Title}Pred.StDev[AA]" }; for (var t = 0; t < values.Count; ++t) { var rx = values[t] / Math.Sqrt(sigmaSquared[t]); rts.Add(values.TimeStamp(t), rx, false); if (t > 0) { predictiveStdDevAtAvail.Add(values.TimeStamp(t - 1), Math.Sqrt(sigmaSquared[t]), false); } } Residuals = rts; GoodnessOfFit = logLikelihood; } if (parameter != null) { Parameters = pbak; } var llp = new LogLikelihoodPenalizer(allLLs); return(llp.LogLikelihood - llp.Penalty * penaltyFactor); }