public void Simulate(int repetitions = 5) { var lambda = 1 / OrderArrivalTime.TotalDays; var mu = 1 / ProcessingTime.TotalDays; var rho = lambda / mu; var analyticWIP = rho / (1 - rho); var analyticLeadtime = 1 / (mu - lambda); var analyticWaitingtime = rho / (mu - lambda); var env = new Simulation(randomSeed: 1, defaultStep: TimeSpan.FromDays(1)); var utilization = new TimeSeriesMonitor(env, name: "Utilization"); var wip = new TimeSeriesMonitor(env, name: "WIP", collect: true); var leadtime = new SampleMonitor(name: "Lead time", collect: true); var waitingtime = new SampleMonitor(name: "Waiting time", collect: true); env.Log("Analytical results of this system:"); env.Log("Time\tUtilization.Mean\tWIP.Mean\tLeadtime.Mean\tWaitingTime.Mean"); env.Log("{4}\t{0}\t{1}\t{2}\t{3}", rho, analyticWIP, analyticLeadtime, analyticWaitingtime, double.PositiveInfinity); env.Log(""); // example to create a running report of these measures every simulated week //var report = Report.CreateBuilder(env) // .Add("Utilization", utilization, Report.Measures.Mean | Report.Measures.StdDev) // .Add("WIP", wip, Report.Measures.Min | Report.Measures.Mean | Report.Measures.Max) // .Add("Leadtime", leadtime, Report.Measures.Min | Report.Measures.Mean | Report.Measures.Max) // .Add("WaitingTime", waitingtime, Report.Measures.Min | Report.Measures.Mean | Report.Measures.Max) // .SetOutput(env.Logger) // use a "new StreamWriter("report.csv")" to direct to a file // .SetSeparator("\t") // .SetPeriodicUpdate(TimeSpan.FromDays(7), withHeaders: true) // .Build(); var summary = Report.CreateBuilder(env) .Add("Utilization", utilization, Report.Measures.Mean) .Add("WIP", wip, Report.Measures.Mean) .Add("Leadtime", leadtime, Report.Measures.Mean) .Add("WaitingTime", waitingtime, Report.Measures.Mean) .SetOutput(env.Logger) .SetSeparator("\t") .SetFinalUpdate(withHeaders: false) // creates a summary of the means at the end .SetTimeAPI(useDApi: true) .Build(); env.Log("Simulated results of this system (" + repetitions + " repetitions):"); env.Log(""); summary.WriteHeader(); // write the header just once for (var i = 0; i < repetitions; i++) { env.Reset(i + 1); // reset environment utilization.Reset(); // reset monitors wip.Reset(); leadtime.Reset(); waitingtime.Reset(); var server = new Resource(env, capacity: 1) { Utilization = utilization, WIP = wip, LeadTime = leadtime, WaitingTime = waitingtime, }; env.Process(Source(env, server)); env.Process(HandleWarmup(env, TimeSpan.FromDays(32), utilization, wip, leadtime, waitingtime)); env.Run(TimeSpan.FromDays(365)); } env.Log(""); env.Log("Detailed results from the last run:"); env.Log(""); env.Log(utilization.Summarize()); env.Log(wip.Summarize(maxBins: 10, binWidth: 2)); env.Log(leadtime.Summarize(maxBins: 10, binWidth: 5 / 1440.0)); env.Log(waitingtime.Summarize(maxBins: 10, binWidth: 4 / 1440.0));; }
public void TestSampleMonitor(IEnumerable <double> data) { var stat = new SampleMonitor(collect: false) { Active = false }; var stat_collect = new SampleMonitor(collect: true) { Active = false }; var data_list = data.ToList(); if (data_list.All(x => !double.IsNaN(x) && !double.IsInfinity(x))) { var count = 0; foreach (var d in data_list) { if (count == 2) { stat.Active = stat_collect.Active = true; } stat.Add(d); stat_collect.Add(d); count++; } var avg = data_list.Skip(2).Average(); var sum = data_list.Skip(2).Sum(); var cnt = data_list.Skip(2).Count(); var min = data_list.Skip(2).Min(); var max = data_list.Skip(2).Max(); var pvar = 0.0; foreach (var d in data_list.Skip(2)) { pvar += (d - avg) * (d - avg); } pvar /= cnt; var med = data_list.Skip(2).Count() % 2 == 1 ? data_list.Skip(2).OrderBy(x => x).Skip(data_list.Skip(2).Count() / 2 - 1).First() : data_list.Skip(2).OrderBy(x => x).Skip(data_list.Skip(2).Count() / 2 - 1).Take(2).Average(); Assert.Equal(avg, stat.Mean); Assert.Equal(sum, stat.Total); Assert.Equal(cnt, stat.Count); Assert.Equal(min, stat.Min); Assert.Equal(max, stat.Max); Assert.Equal(pvar, stat.Variance, 14); Assert.True(double.IsNaN(stat.GetMedian())); Assert.Empty(stat.Samples); Assert.Equal(avg, stat_collect.Mean); Assert.Equal(sum, stat_collect.Total); Assert.Equal(cnt, stat_collect.Count); Assert.Equal(min, stat_collect.Min); Assert.Equal(max, stat_collect.Max); Assert.Equal(pvar, stat_collect.Variance, 14); Assert.Equal(med, stat_collect.GetMedian()); Assert.Equal(data_list.Skip(2).Count(), stat_collect.Samples.Count()); stat.Active = false; stat.Reset(); Assert.False(stat.Active); stat_collect.Active = false; stat_collect.Reset(); Assert.False(stat_collect.Active); count = 0; foreach (var d in data_list) { if (count == 2) { stat.Active = stat_collect.Active = true; } stat.Add(d); stat_collect.Add(d); count++; } Assert.Equal(avg, stat.Mean); Assert.Equal(sum, stat.Total); Assert.Equal(cnt, stat.Count); Assert.Equal(min, stat.Min); Assert.Equal(max, stat.Max); Assert.Equal(pvar, stat.Variance, 14); Assert.True(double.IsNaN(stat.GetMedian())); Assert.Empty(stat.Samples); Assert.Equal(avg, stat_collect.Mean); Assert.Equal(sum, stat_collect.Total); Assert.Equal(cnt, stat_collect.Count); Assert.Equal(min, stat_collect.Min); Assert.Equal(max, stat_collect.Max); Assert.Equal(pvar, stat_collect.Variance, 14); Assert.Equal(med, stat_collect.GetMedian()); Assert.Equal(data_list.Skip(2).Count(), stat_collect.Samples.Count()); } else { stat.Active = true; Assert.Throws <ArgumentException>(() => { foreach (var d in data_list) { stat.Add(d); } }); } }