public List <Forecasting> ParseData() { List <List <double> > data = Read(); List <Forecasting> timeSeries = new List <Forecasting>(); for (int i = 0; i < data.Count; i++) { Forecasting time = new Forecasting(); if (i < 12) { time.Time = data[i][0]; time.Demand = data[i][1]; time.Level = data[i][2]; time.TrendLine = data[i][3]; time.Seasonal_Adj = data[i][4]; time.Onestep_for = data[i][5]; time.For_err = data[i][6]; time.Squered_err = data[i][7]; } else if (i >= 12 && i < 48) { time.Time = data[i][0]; time.Demand = data[i][1]; } else { time.Time = data[i][0]; } timeSeries.Add(time); } return(timeSeries); }
public void Story_delivery() { var sut = new Forecasting(); var result = sut.WhenWillTheStoriesBeDone(new DateTime(2019, 11, 18), 10, _historicalData); foreach (var x in result.Entries) { _testOutputHelper.WriteLine($"{x.DaysUntilDelivery}\t{x.Probability}\t{x.Percentile}"); } }
public void Issue_delivery_based_on_resources() { var sut = new Forecasting(); var result = sut.WhenWillTheIssuesBeDone(2, 5, _historicalData); foreach (var x in result.Entries) { _testOutputHelper.WriteLine($"{x.DaysUntilDelivery}\t{x.Probability}\t{x.Percentile}"); } }
private void btnForecast_Click(object sender, RoutedEventArgs e) { InputBox inputBox = new InputBox(); inputBox.Label1.Text = "Enter the header name you want to use as the id column."; inputBox.ShowDialog(); string selectedHeaderName = inputBox.TextBox1.Text; //tabName = Interaction.InputBox("Enter a name for the new tab.", "Enter Name", "Tab " + Convert.ToString(tabControl.Items.Count + 1)); if (selectedHeaderName == "" | selectedHeaderName == null) { return; } NumericTable numericTable = ProcessDefinition.ToNumericTableUseProcessDefinition(RawData, ",", -1); StringTable stringTable = ProcessData.RawTableToStringTable(ProcessData.ParseCSV(RawData, ","), -1); int idHeaderIndex = numericTable.IndependentHeaders.ToList().IndexOf(selectedHeaderName); if (idHeaderIndex == -1) { MessageBox.Show("Id header not found, aborted."); return; } int totalRows = numericTable.Independents.GetUpperBound(0) + 1; int totalColumns = numericTable.Independents.GetUpperBound(1) + 1; List <string> rows = new List <string>(); rows.Add(selectedHeaderName + "," + "Forecast"); List <Operator> operators = Operator.BuildOperators(); Node modelExpression = Node.Parse(txtModelExpression.Text, operators); List <Node> branches = modelExpression.DescendantsAndSelf(); List <Pointer> pointers = Node.BuildPointers("Independent", branches); for (int rowIndex = 0; rowIndex < totalRows; rowIndex++) { string id = stringTable.Independents[rowIndex, idHeaderIndex]; List <double> numbers = new List <double>(); for (int columnIndex = 0; columnIndex < totalColumns; columnIndex++) { double number = numericTable.Independents[rowIndex, columnIndex]; numbers.Add(number); } //Node.SetNumbers(pointers, numbers, branches); //double forecast = Evaluation.Evaluate(modelExpression) * ProcessDefinition.Scale + ProcessDefinition.Offset; double forecast = Forecasting.ForecastFast(numbers, branches, pointers); forecast = Forecasting.ScaleAndOffsetForecast(forecast, ProcessDefinition.Scale, ProcessDefinition.Offset); string row = id + "," + forecast.ToString(); rows.Add(row); } txtForecasts.Text = string.Join(Environment.NewLine, rows.ToArray()); //txtForecasts.Text = ProcessData.NumericTableToCSV(numericTable); }
public void Acceptance() { const int NUMBER_OF_SIMULATIONS = 1000; const int GRANULARITY_OF_FORECAST = 20; var repo = new HistoryRepository(REPO_PATH); var forecasting = new Forecasting(new MonteCarloSimulation(), NUMBER_OF_SIMULATIONS, GRANULARITY_OF_FORECAST); var sut = new RequestHandler(repo, forecasting); var newHistory = new NewHistoryDto { Id = "abc", Name = "test history", Email = "*****@*****.**", HistoricalData = new[] { new DatapointDto { Value = 1f, Tags = new[] { "a" } }, new DatapointDto { Value = 2f, Tags = new[] { "a" } } }, HistoricalDataToParse = "2;a,x\n3;a\n3;a\n4;a\n10;b,x\n10;b\n20;b\n20;b\n30;b,x" }; var historyId = sut.Create_history(newHistory); Assert.AreEqual("abc", historyId); var history = sut.Load_history_by_id(historyId); Assert.AreEqual("test history", history.Name); Assert.AreEqual(11, history.HistoricalData.Length); Assert.AreEqual(new[] { "a", "b", "x" }, history.Tags); history = sut.Load_history_by_name("test history"); Assert.AreEqual("*****@*****.**", history.Email); var forecast = sut.Calculate_forecast(historyId, new[] { new FeatureDto { Quantity = 2, Tags = new[] { "a" } }, new FeatureDto { Quantity = 1, Tags = new[] { "b" } }, }); // only prognosises with p>=0.5 Assert.IsTrue(forecast.Distribution.Select(p => p.Count).Sum() >= 500); }
public int ForecastingAdd(Forecasting entity) { context.Forecasting.Add(entity); context.SaveChanges(); return(entity.Id); }
public void ForecastingUpdate(Forecasting entity) { context.Entry <Forecasting>(entity).State = EntityState.Modified; context.SaveChanges(); }
public void Run() { const int INITIAL_NUMBER_OF_ISSUES = 5; var sut = new Forecasting(); var issues = IssueRepository.Import().ToList(); var cts = issues.Select(x => x.CycleTime.Days).ToArray(); var ctsDistribution = Statistics.Distribution(cts); Export("CTs 2019-11-06.csv", ctsDistribution); var tps = issues.BusinessDayThroughputs(); var tpsDistribution = Statistics.Distribution(tps); Export("TPs 2019-11-06.csv", tpsDistribution); // initial forecast var numberOfIssues = INITIAL_NUMBER_OF_ISSUES; var fc = sut.WhenWillTheIssuesBeDone(new DateTime(2019, 11, 6), numberOfIssues, issues.ToArray()); Export(new DateTime(2019, 11, 6), fc); // updating forecast after first issue got delivered issues.Add(new Issue(new DateTime(2019, 11, 6), new DateTime(2019, 11, 7), null, null, false)); numberOfIssues += -1; fc = sut.WhenWillTheIssuesBeDone(new DateTime(2019, 11, 7), numberOfIssues, issues.ToArray()); Export(new DateTime(2019, 11, 7), fc); issues.Add(new Issue(new DateTime(2019, 11, 7), new DateTime(2019, 11, 8), null, null, false)); numberOfIssues += -1; fc = sut.WhenWillTheIssuesBeDone(new DateTime(2019, 11, 8), numberOfIssues, issues.ToArray()); Export(new DateTime(2019, 11, 8), fc); issues.Add(new Issue(new DateTime(2019, 11, 6), new DateTime(2019, 11, 11), null, null, false)); numberOfIssues += -1 + 2; fc = sut.WhenWillTheIssuesBeDone(new DateTime(2019, 11, 11), numberOfIssues, issues.ToArray()); Export(new DateTime(2019, 11, 11), fc); issues.Add(new Issue(new DateTime(2019, 11, 11), new DateTime(2019, 11, 13), null, null, false)); numberOfIssues += -1; fc = sut.WhenWillTheIssuesBeDone(new DateTime(2019, 11, 13), numberOfIssues, issues.ToArray()); Export(new DateTime(2019, 11, 13), fc); issues.Add(new Issue(new DateTime(2019, 11, 8), new DateTime(2019, 11, 14), null, null, false)); numberOfIssues += -1 + 1; fc = sut.WhenWillTheIssuesBeDone(new DateTime(2019, 11, 14), numberOfIssues, issues.ToArray()); Export(new DateTime(2019, 11, 14), fc); issues.Add(new Issue(new DateTime(2019, 11, 11), new DateTime(2019, 11, 15), null, null, false)); numberOfIssues += -1; fc = sut.WhenWillTheIssuesBeDone(new DateTime(2019, 11, 15), numberOfIssues, issues.ToArray()); Export(new DateTime(2019, 11, 15), fc); issues.Add(new Issue(new DateTime(2019, 11, 13), new DateTime(2019, 11, 18), null, null, false)); numberOfIssues += -1 + 2; fc = sut.WhenWillTheIssuesBeDone(new DateTime(2019, 11, 18), numberOfIssues, issues.ToArray()); Export(new DateTime(2019, 11, 18), fc); issues.Add(new Issue(new DateTime(2019, 11, 15), new DateTime(2019, 11, 19), null, null, false)); numberOfIssues += -1; fc = sut.WhenWillTheIssuesBeDone(new DateTime(2019, 11, 19), numberOfIssues, issues.ToArray()); Export(new DateTime(2019, 11, 19), fc); issues.Add(new Issue(new DateTime(2019, 11, 18), new DateTime(2019, 11, 20), null, null, false)); numberOfIssues += -1; fc = sut.WhenWillTheIssuesBeDone(new DateTime(2019, 11, 20), numberOfIssues, issues.ToArray()); Export(new DateTime(2019, 11, 20), fc); issues.Add(new Issue(new DateTime(2019, 11, 18), new DateTime(2019, 11, 21), null, null, false)); numberOfIssues += -1; Assert.Equal(0, numberOfIssues); // rear mirror wisdom fc = sut.WhenWillTheIssuesBeDone(new DateTime(2019, 11, 6), INITIAL_NUMBER_OF_ISSUES + 5, issues.ToArray()); File.Move("2019-11-06.csv", "2019-11-06-v1.csv", true); Export(new DateTime(2019, 11, 6), fc); File.Move("2019-11-06.csv", "2019-11-06-v2.csv", true); cts = issues.Select(x => x.CycleTime.Days).ToArray(); ctsDistribution = Statistics.Distribution(cts); Export("CTs 2019-11-21.csv", ctsDistribution); tps = issues.BusinessDayThroughputs(); tpsDistribution = Statistics.Distribution(tps); Export("TPs 2019-11-21.csv", tpsDistribution); }
internal RequestHandler(HistoryRepository repo, Forecasting forecasting) { _repo = repo; _forecasting = forecasting; }
public void Forecast() { var rndNumbers = new Queue <int>(new[] { 0, 2, 1, 1, 5, 3, 3, 4, 2 }); var montecarlo = new MonteCarloSimulation(_ => rndNumbers.Dequeue()); var sut = new Forecasting(montecarlo, 3, 2); //TODO: integrationstest forecasting var historicalData = new[] { new History.Datapoint { Value = 1f, Tags = new[] { "a" } }, new History.Datapoint { Value = 2f, Tags = new[] { "a" } }, new History.Datapoint { Value = 2f, Tags = new[] { "a" } }, new History.Datapoint { Value = 3f, Tags = new[] { "a" } }, new History.Datapoint { Value = 3f, Tags = new[] { "a" } }, new History.Datapoint { Value = 4f, Tags = new[] { "a" } }, new History.Datapoint { Value = 10f, Tags = new[] { "b" } }, new History.Datapoint { Value = 10f, Tags = new[] { "b" } }, new History.Datapoint { Value = 20f, Tags = new[] { "b" } }, new History.Datapoint { Value = 20f, Tags = new[] { "b" } }, new History.Datapoint { Value = 30f, Tags = new[] { "b" } } }; var features = new[] { new Feature { Quantity = 2, Tags = new[] { "a" } }, new Feature { Quantity = 1, Tags = new[] { "b" } }, }; /* * 3 Simulationen, 2 Intervalle * 2a(0..5), 1b(0..4) * * a: 0=1 1=2 3=3 * a: 2=2 5=4 4=3 * b: 1=10 3=20 2=20 * 13 26 26 * * 26-13=13 * 13/2=6,5 * 13..19,5, 19,5..26 * (13,1,0.33), (26,2,1.0) */ var result = sut.Calculate(historicalData, features); foreach (var f in result.Features) { Debug.WriteLine($"{string.Join(",", f)}"); } foreach (var po in result.Distribution) { Debug.WriteLine($"{po.Prognosis}, {po.CummulatedProbability}"); } Assert.AreEqual(new[] { "a", "a", "b" }, result.Features); Assert.AreEqual(2, result.Distribution.Length); Assert.AreEqual(3, result.Distribution.Select(g => g.Count).Sum()); Assert.AreEqual(13f, result.Distribution[0].Prognosis); Assert.AreEqual(1, result.Distribution[0].Count); Assert.AreEqual(0.33f, result.Distribution[0].CummulatedProbability, 0.01f); Assert.AreEqual(26f, result.Distribution[1].Prognosis); Assert.AreEqual(2, result.Distribution[1].Count); Assert.AreEqual(1f, result.Distribution[1].CummulatedProbability, 0.01f); }
/// <summary> /// Forecast from last point of original time series up to steps_ahead. /// </summary> /// <param name="stepsAhead"></param> /// <param name="singularValues"></param> /// <returns></returns> public double[] Forecast(int[] group, int stepsAhead, bool wholeSeries = true, Forecasting method = Forecasting.Rforecasing) { // if (method == Forecasting.Rforecasing) { return(Rforecasting(group, stepsAhead, wholeSeries)); } else { return(Vforecasting(group, stepsAhead, wholeSeries)); } }
public double[] Forecast(int compCount, int stepsAhead, bool wholeSeries = true, Forecasting method = Forecasting.Rforecasing) { var group = Enumerable.Range(1, compCount).ToArray(); return(Forecast(group, stepsAhead, wholeSeries, method)); }