public void TestTimeSeriesMonitorAutoUpdate() { var env = new Simulation(); var stat = new TimeSeriesMonitor(env); env.Process(StatProcess(env, stat)); env.Run(); stat.Reset(); env.Process(StatProcess(env, stat)); env.Run(); }
public void Simulate(int repetitions = 5) { var lambda = 1 / OrderArrivalTime.TotalDays; var mu = 1 / ProcessingTime.TotalDays; var rho = lambda / mu; var analyticWIP = rho / (1 - rho); var analyticLeadtime = 1 / (mu - lambda); var analyticWaitingtime = rho / (mu - lambda); var env = new Simulation(randomSeed: 1, defaultStep: TimeSpan.FromDays(1)); var utilization = new TimeSeriesMonitor(env, name: "Utilization"); var wip = new TimeSeriesMonitor(env, name: "WIP", collect: true); var leadtime = new SampleMonitor(name: "Lead time", collect: true); var waitingtime = new SampleMonitor(name: "Waiting time", collect: true); env.Log("Analytical results of this system:"); env.Log("Time\tUtilization.Mean\tWIP.Mean\tLeadtime.Mean\tWaitingTime.Mean"); env.Log("{4}\t{0}\t{1}\t{2}\t{3}", rho, analyticWIP, analyticLeadtime, analyticWaitingtime, double.PositiveInfinity); env.Log(""); // example to create a running report of these measures every simulated week //var report = Report.CreateBuilder(env) // .Add("Utilization", utilization, Report.Measures.Mean | Report.Measures.StdDev) // .Add("WIP", wip, Report.Measures.Min | Report.Measures.Mean | Report.Measures.Max) // .Add("Leadtime", leadtime, Report.Measures.Min | Report.Measures.Mean | Report.Measures.Max) // .Add("WaitingTime", waitingtime, Report.Measures.Min | Report.Measures.Mean | Report.Measures.Max) // .SetOutput(env.Logger) // use a "new StreamWriter("report.csv")" to direct to a file // .SetSeparator("\t") // .SetPeriodicUpdate(TimeSpan.FromDays(7), withHeaders: true) // .Build(); var summary = Report.CreateBuilder(env) .Add("Utilization", utilization, Report.Measures.Mean) .Add("WIP", wip, Report.Measures.Mean) .Add("Leadtime", leadtime, Report.Measures.Mean) .Add("WaitingTime", waitingtime, Report.Measures.Mean) .SetOutput(env.Logger) .SetSeparator("\t") .SetFinalUpdate(withHeaders: false) // creates a summary of the means at the end .SetTimeAPI(useDApi: true) .Build(); env.Log("Simulated results of this system (" + repetitions + " repetitions):"); env.Log(""); summary.WriteHeader(); // write the header just once for (var i = 0; i < repetitions; i++) { env.Reset(i + 1); // reset environment utilization.Reset(); // reset monitors wip.Reset(); leadtime.Reset(); waitingtime.Reset(); var server = new Resource(env, capacity: 1) { Utilization = utilization, WIP = wip, LeadTime = leadtime, WaitingTime = waitingtime, }; env.Process(Source(env, server)); env.Process(HandleWarmup(env, TimeSpan.FromDays(32), utilization, wip, leadtime, waitingtime)); env.Run(TimeSpan.FromDays(365)); } env.Log(""); env.Log("Detailed results from the last run:"); env.Log(""); env.Log(utilization.Summarize()); env.Log(wip.Summarize(maxBins: 10, binWidth: 2)); env.Log(leadtime.Summarize(maxBins: 10, binWidth: 5 / 1440.0)); env.Log(waitingtime.Summarize(maxBins: 10, binWidth: 4 / 1440.0));; }
public void TestTimeSeriesMonitor(double[] times, double[] values, double min, double max, double mean, double variance, double area, double median) { var env = new Simulation(); var stat = new TimeSeriesMonitor(env, collect: false) { Active = false }; var stat_collect = new TimeSeriesMonitor(env, collect: true) { Active = false }; var count = 0; foreach (var v in times.Zip(values, Tuple.Create)) { if (v.Item1 > 0) { env.RunD(v.Item1); } if (count == 3) { stat.Active = stat_collect.Active = true; } stat.UpdateTo(v.Item2); stat_collect.UpdateTo(v.Item2); if (count == times.Length - 3) { stat.Active = stat_collect.Active = false; } count++; } Assert.Equal(min, stat.Min); Assert.Equal(max, stat.Max); Assert.Equal(mean, stat.Mean, 14); Assert.Equal(variance, stat.Variance, 14); Assert.Equal(area, stat.Area); Assert.True(double.IsNaN(stat.GetMedian())); Assert.True(double.IsNaN(stat.GetPercentile(0.25))); Assert.True(double.IsNaN(stat.GetPercentile(0.75))); Assert.Empty(stat.Series); Assert.Equal(min, stat_collect.Min); Assert.Equal(max, stat_collect.Max); Assert.Equal(mean, stat_collect.Mean, 14); Assert.Equal(variance, stat_collect.Variance, 14); Assert.Equal(area, stat_collect.Area); Assert.Equal(median, stat_collect.GetMedian()); Assert.True(stat_collect.GetPercentile(0.25) <= median); Assert.True(stat_collect.GetPercentile(0.75) >= median); Assert.Equal(values.Length - 5, stat_collect.Series.Count()); stat.Reset(); Assert.False(stat.Active); stat_collect.Reset(); Assert.False(stat_collect.Active); count = 0; foreach (var v in times.Zip(values, Tuple.Create)) { if (v.Item1 > 0) { env.RunD(v.Item1); } if (count == 3) { stat.Active = stat_collect.Active = true; } stat.UpdateTo(v.Item2); stat_collect.UpdateTo(v.Item2); if (count == times.Length - 3) { stat.Active = stat_collect.Active = false; } count++; } Assert.Equal(min, stat.Min); Assert.Equal(max, stat.Max); Assert.Equal(mean, stat.Mean, 14); Assert.Equal(variance, stat.Variance, 14); Assert.Equal(area, stat.Area); Assert.True(double.IsNaN(stat.GetMedian())); Assert.True(double.IsNaN(stat.GetPercentile(0.25))); Assert.True(double.IsNaN(stat.GetPercentile(0.75))); Assert.Empty(stat.Series); Assert.Equal(min, stat_collect.Min); Assert.Equal(max, stat_collect.Max); Assert.Equal(mean, stat_collect.Mean, 14); Assert.Equal(variance, stat_collect.Variance, 14); Assert.Equal(area, stat_collect.Area); Assert.Equal(median, stat_collect.GetMedian()); Assert.True(stat_collect.GetPercentile(0.25) <= median); Assert.True(stat_collect.GetPercentile(0.75) >= median); Assert.Equal(values.Length - 5, stat_collect.Series.Count()); }