//cnm DefaultHeuristics private void Experiment4(int mode) { if (finished == 0) { DeleteFiles(); } started = DateTime.Now; LogWrite(started.ToLongTimeString() + " Starting experiment 4 mode " + mode); SwitchParameter("DefaultHeuristics", (mode == 2 ? "MinMin" : "Stub")); var wfDescriptionReader = new DescriptionReader("cnm.wf"); _api.UploadFiles(wfDescriptionReader.InputFiles); _api.Script = wfDescriptionReader.Script; LogWrite(_api.Script); jobMonitor = _api.CreateMonitor(); jobMonitor.UpdatePeriod = 1000 * 5; jobMonitor.Active += JobMonitorOnActive; jobMonitor.Finished += (sender, jobDecriptionEventArgs) => { JobMonitorStandardFinished(sender, jobDecriptionEventArgs); CopyRows(); if (mode == 2) { Experiment4(mode); } }; jobMonitor.Run(); Statuses.Add(jobMonitor.JobId.ToString(), "Pushed"); LogWrite(DateTime.Now.ToLongTimeString() + " " + jobMonitor.JobId + " state: pushed"); }
//Just one bsm private void Experiment6() { if (finished == 0) { DeleteFiles(); } started = DateTime.Now; LogWrite(started.ToLongTimeString() + " Starting experiment 6 mode " + mode); ParasiteLoading("b14.b14-113"); //ParasiteLoading("b14.b14-22"); return; var wfDescriptionReader = new DescriptionReader("bsm.wf"); _api.UploadFiles(wfDescriptionReader.InputFiles); _api.Script = wfDescriptionReader.Script; jobMonitor = _api.CreateMonitor(); jobMonitor.UpdatePeriod = 1000 * 5; jobMonitor.Active += JobMonitorOnActive; jobMonitor.Finished += (sender, jobDecriptionEventArgs) => { var monitor = (sender as JobMonitor); finished++; Statuses[monitor.JobId.ToString()] = "Finished"; LogWrite(DateTime.Now.ToLongTimeString() + " " + monitor.JobId + " " + Statuses[monitor.JobId.ToString()] + " after " + TimeSpan.FromTicks(DateTime.Now.Ticks - started.Ticks).Minutes + " min " + TimeSpan.FromTicks(DateTime.Now.Ticks - started.Ticks).Seconds + " sec "); //CopyRows(); }; LogWrite(_api.Script); jobMonitor.Run(); Statuses.Add(jobMonitor.JobId.ToString(), "Pushed"); LogWrite(DateTime.Now.ToLongTimeString() + " " + jobMonitor.JobId + " pushed"); }
//virtual starting private void Experiment2() { vmlauncher = new VMLauncher(); vmlauncher.StopAll(20); steps = 2; workflows = 25; int sleep = 30; int delay = 140; var testpDescriptionReader = new DescriptionReader("testp.wf"); int i = 0; timer = new Timer(); timer.Elapsed += new ElapsedEventHandler((s, e) => { if (i >= workflows) { timer.Stop(); LogWrite("Pusher stopped"); } else { StringBuilder sb = new StringBuilder(); sb.Append(testpDescriptionReader.Script); if (i > 1) { steps = 1; } for (int step = 0; step < steps; step++) { sb.Append("\n step s" + step + " runs testp (\n in0=" + step + ",\n in1=" + sleep + ",\n timeToWait=" + (delay).ToString() + "\n)\n"); } _api.Script = sb.ToString(); jobMonitor = _api.CreateMonitor(); jobMonitor.UpdatePeriod = 1000 * 50; jobMonitor.Finished += JobMonitorOnFinishedExp2; jobMonitor.Active += JobMonitorOnActive; LogWrite(DateTime.Now.ToLongTimeString() + " " + (i + 1) + "/" + workflows + " testp pushed; " + steps + " steps"); jobMonitor.Run(); timer.Interval = sleep * 1000; } i++; }); LogWrite("Sending jobs to " + ConfigurationManager.AppSettings["ServerName"] + ". Press any key to stop monitoring..."); timer.Interval = 100; timer.Start(); vmlauncher.Run(); Console.ReadLine(); }
//estimation learning private void Experiment3PushBsm() { if (finished == 0) { DeleteFiles(); } var resourse = "b4.b4-131"; LogWrite("Pushing BSM to resourse " + resourse); //workflows = 5; steps = 1; int[] sizes = { 1, 3, 4, 5, 6 }; var bsmDescriptionReader = new DescriptionReader("bsm.wf"); _api.UploadFiles(bsmDescriptionReader.InputFiles); for (int i = 0; i < sizes.Length; i++) { StringBuilder sb = new StringBuilder(); //sb.Append(bsmDescriptionReader.Script); int stepDiffer = 0; for (int step = 1; step <= steps; step++) //foreach(var size in sizes) { var size = sizes[0]; //if (stepDiffer >= sizes.Length) stepDiffer = 0; sb.Append("[Resource = " + '"' + resourse + '"' + "]\n"); sb.Append("step MaskedFullBSM_" + stepDiffer + " runs bsm " + (stepDiffer > 0 ? " after MaskedFullBSM_" + (stepDiffer - 1) : "") + " \n (\n inMeasurement = measurementFile,\n inHirlam = hirlam" + size + ",\n swan = swanFile,\n inBSH = BSHFile,\n useAssimilation = true,\n useSWAN = true,\n useBSH = true,\n useOldProject = false,\n useMask = false,\n startCalcDate = \"09/01/2007 12:00:00\",\n inAssFields = assFields,\n inProject = projects,\n controlPoints = inControlPoints,\n deleteDirs = true,\n ForecastSize = " + size + " \n)\n"); stepDiffer++; } _api.Script = sb.ToString(); jobMonitor = _api.CreateMonitor(); jobMonitor.UpdatePeriod = 1000 * 5; jobMonitor.Active += JobMonitorOnActive; jobMonitor.Finished += (sender, jobDecriptionEventArgs) => { JobMonitorStandardFinished(sender, jobDecriptionEventArgs); CopyRows(); }; LogWrite(DateTime.Now.ToLongTimeString() + " " + steps + " bsm pushed: " + i + "/" + workflows + "; id:" + jobMonitor.JobId); LogWrite(_api.Script.Substring(0, 70)); jobMonitor.Run(); Statuses.Add(jobMonitor.JobId.ToString(), "Pushed"); LogWrite(DateTime.Now.ToLongTimeString() + " " + jobMonitor.JobId + " pushed"); } }
//cnm DefaultUrgentHeuristics private void Experiment5(int mode) { if (finished == 0) { DeleteFiles(); } started = DateTime.Now; LogWrite(started.ToLongTimeString() + "Starting experiment 5 mode " + mode); ParasiteLoading("b14.b14-113"); ParasiteLoading("b14.b14-22"); SwitchParameter(DateTime.Now.ToLongTimeString() + " DefaultUrgentHeuristics", (mode == 2 ? "UBestFirst" : "UGreedy")); var wfDescriptionReader = new DescriptionReader("cnm2.wf"); _api.UploadFiles(wfDescriptionReader.InputFiles); _api.Script = wfDescriptionReader.Script; jobMonitor = _api.CreateMonitor(); jobMonitor.UpdatePeriod = 1000 * 5; jobMonitor.Active += JobMonitorOnActive; jobMonitor.Finished += (sender, jobDecriptionEventArgs) => { var monitor = (sender as JobMonitor); finished++; Statuses[monitor.JobId.ToString()] = "Finished"; LogWrite(DateTime.Now.ToLongTimeString() + " " + monitor.JobId + " " + Statuses[monitor.JobId.ToString()] + " after " + TimeSpan.FromTicks(DateTime.Now.Ticks - started.Ticks).Minutes + " min " + TimeSpan.FromTicks(DateTime.Now.Ticks - started.Ticks).Seconds + " sec "); CopyRows(); mode++; if (mode == 2) { Experiment5(mode); } }; LogWrite(_api.Script); jobMonitor.Run(); Statuses.Add(jobMonitor.JobId.ToString(), "Pushed"); LogWrite(DateTime.Now.ToLongTimeString() + " " + jobMonitor.JobId + " pushed"); }
private void ParasiteLoading(string resourse = null) { LogWrite(DateTime.Now.ToLongTimeString() + " Loading " + (resourse != null?resourse:"system") + " by parasite testp task"); var parasiteWfDescriptionReader = new DescriptionReader("parasite.wf"); _api.Script = (resourse != null?"[Resource = \"" + resourse + "\"]\n":"") + parasiteWfDescriptionReader.Script; jobMonitor = _api.CreateMonitor(); jobMonitor.Active += (sender, jobDecriptionEventArgs) => { if (Statuses[jobDecriptionEventArgs.JobInfo.ID.ToString()] != jobDecriptionEventArgs.JobInfo.State.ToString()) { Statuses[jobDecriptionEventArgs.JobInfo.ID.ToString()] = jobDecriptionEventArgs.JobInfo.State.ToString(); if (Statuses[jobDecriptionEventArgs.JobInfo.ID.ToString()] == "Active") { //var sleep = 15; //LogWrite(DateTime.Now.ToLongTimeString() + " Sleep " + sleep + " sec"); //System.Threading.Thread.Sleep(sleep * 1000); waitOne.Set(); } } }; jobMonitor.Finished += (sender, jobDecriptionEventArgs) => { var monitor = (sender as JobMonitor); Statuses[monitor.JobId.ToString()] = "Finished"; if (jobDecriptionEventArgs.JobInfo != null && jobDecriptionEventArgs.JobInfo.ErrorComment != null) { LogWrite(DateTime.Now.ToLongTimeString() + " " + monitor.JobId + " parasite " + Statuses[monitor.JobId.ToString()] + (resourse != null ? " on " + resourse : "") + " with error " + jobDecriptionEventArgs.JobInfo.ErrorComment); return; } LogWrite(DateTime.Now.ToLongTimeString() + " " + monitor.JobId + " parasite " + Statuses[monitor.JobId.ToString()] + (resourse != null ? " on " + resourse : "")); CopyRows(); }; jobMonitor.Run(); Statuses.Add(jobMonitor.JobId.ToString(), "Pushed"); LogWrite(DateTime.Now.ToLongTimeString() + " " + jobMonitor.JobId + " parasite pushed " + (resourse != null ? " on " + resourse : "")); LogWrite(DateTime.Now.ToLongTimeString() + " Waiting till parasite WF become active"); waitOne.WaitOne(); }
private void Experiment3() { if (finished == 0) { if (DeleteFiles()) { finished++; Experiment3PushBsm(); return; } } var resourse = "b4.b4-131"; LogWrite("Starting experiment 3 on resourse " + resourse); steps = 1; //max 20 int[] sizes = { 1, 3, 4, 5, 6 }; workflows = sizes.Length; /* * foreach (var pair in bsmDescriptionReader.InputFiles) * { * var val = ConfigurationManager.AppSettings[pair.Key]; * _api.BindFile(pair.Key, val); * } */ IEnumerable <string> lines = new List <string>(); IEnumerable <string> overlines = new List <string>(); if (File.Exists(ConfigurationManager.AppSettings["HistoryFile"])) { lines = File.ReadAllLines(ConfigurationManager.AppSettings["HistoryFile"]).Where(line => line.Contains("bsm") && line.Contains(resourse)); overlines = File.ReadAllLines(ConfigurationManager.AppSettings["HistoryFile"].Replace("model_coef", "over")); } var runs = new List <RunRecord>(); //.Where(l=>l.Contains(@"{""ForecastSize"": ""1""}")) foreach (var line in lines) { var rows = line.Split(new[] { ";" }, StringSplitOptions.RemoveEmptyEntries); var rows2 = overlines.Where(l => l.Contains(rows[0])).FirstOrDefault().Split(new[] { ";" }, StringSplitOptions.RemoveEmptyEntries); //Select(s => double.Parse(s)).ToArray(); var rx = new Regex(@"""ForecastSize"": ""([^,]+)"""); var match = rx.Match(rows[11]); if (match.Success) { var rec = new RunRecord { ComputationTime = double.Parse(rows[7].Replace(".", ",")) }; rec.OverheadTime = TimeSpan.Parse(rows2[7]).TotalSeconds; // База пакетов rec.OverheadTime += TimeSpan.Parse(rows2[8]).TotalSeconds; // Оценка ресурсов rec.OverheadTime += TimeSpan.Parse(rows2[9]).TotalSeconds; // T_Scheduler rec.OverheadTime += TimeSpan.Parse(rows2[10]).TotalSeconds; // Коммуникация rec.OverheadTime += TimeSpan.Parse(rows2[11]).TotalSeconds; // T_InputFilesCopy rec.OverheadTime += TimeSpan.Parse(rows2[12]).TotalSeconds; // T_OutputFilesCopy rec.RunContext.Add("ForecastSize", double.Parse(match.Groups[1].Value)); runs.Add(rec); } } PerformanceModel model = new BsmModel(); var sp = ParametersOptimizer.UpdateServiceComputationParameters(new Dictionary <string, double>(), runs, model); var spp = ParametersOptimizer.UpdateServiceOverheadParameters(new Dictionary <string, double>(), runs); foreach (var p in sp) { LogWrite(String.Format("{0}: {1}", p.Key, p.Value.ToString("0.0000"))); } /* * foreach (var p in spp) * LogWrite(String.Format("{0}: {1}", p.Key, p.Value.ToString("0.0000"))); */ var bsmDescriptionReader = new DescriptionReader("bsm.wf"); _api.UploadFiles(bsmDescriptionReader.InputFiles); int i = 0; foreach (var size in sizes) { LogWrite(String.Format("ForecastSize: {0}", size)); var runRecord = new RunRecord(); runRecord.RunContext.Add("ForecastSize", (double)size); var compuTime = model.GetComputationTime(sp, runRecord.RunContext, runRecord.ExecutionParams); var compuError = model.GetComputationErrorRelative(sp, runRecord.RunContext, runRecord.ExecutionParams); LogWrite(String.Format("Calculation time: {0}+/-{1}", compuTime.ToString("0.000"), compuError.ToString("0.000"))); var overheadTime = model.GetOverheadTime(spp, runRecord.RunContext, runRecord.ExecutionParams); var overheadError = model.GetOverheadError(spp, runRecord.RunContext, runRecord.ExecutionParams); LogWrite(String.Format("Overhead time: {0}+/-{1}", overheadTime.ToString("0.000"), overheadError.ToString("0.000"))); StringBuilder sb = new StringBuilder(); //int stepDiffer = 0; for (int step = 1; step <= steps; step++) { //if (stepDiffer >= sizes.Length) stepDiffer = 0; sb.Append("[Resource = " + '"' + resourse + '"' + "]\n"); sb.Append("step MaskedFullBSM_" + step + " runs bsm \n (\n inMeasurement = measurementFile,\n inHirlam = hirlam" + size + ",\n swan = swanFile,\n inBSH = BSHFile,\n useAssimilation = true,\n useSWAN = true,\n useBSH = true,\n useOldProject = false,\n useMask = false,\n startCalcDate = \"09/01/2007 12:00:00\",\n inAssFields = assFields,\n inProject = projects,\n controlPoints = inControlPoints,\n deleteDirs = true,\n ForecastSize = " + size + " \n)\n"); //stepDiffer++; } _api.Script = sb.ToString(); jobMonitor = _api.CreateMonitor(); jobMonitor.UpdatePeriod = 1000 * 5; jobMonitor.Active += JobMonitorOnActive; jobMonitor.Finished += (sender, jobDecriptionEventArgs) => { JobMonitorStandardFinished(sender, jobDecriptionEventArgs); CopyRows(new string[] { "ForecastSize" }); }; LogWrite(DateTime.Now.ToLongTimeString() + " " + steps + " bsm pushed: " + i + "/" + workflows + "; id:" + jobMonitor.JobId); //LogWrite(_api.Script.Substring(0, 70)); started = DateTime.Now; jobMonitor.Run(); Statuses.Add(jobMonitor.JobId.ToString(), "Pushed"); CalculationTimes.Add(jobMonitor.JobId.ToString(), compuTime.ToString("0.000")); CalculationTimesErr.Add(jobMonitor.JobId.ToString(), compuError.ToString("0.000")); OverheadTimes.Add(jobMonitor.JobId.ToString(), overheadTime.ToString("0.000")); OverheadTimesErr.Add(jobMonitor.JobId.ToString(), overheadError.ToString("0.000")); LogWrite(DateTime.Now.ToLongTimeString() + " " + jobMonitor.JobId + " pushed"); i++; } }
private void ParasiteLoading(string resourse=null) { LogWrite(DateTime.Now.ToLongTimeString() + " Loading "+(resourse!=null?resourse:"system")+" by parasite testp task"); var parasiteWfDescriptionReader = new DescriptionReader("parasite.wf"); _api.Script = (resourse!=null?"[Resource = \""+resourse+"\"]\n":"")+parasiteWfDescriptionReader.Script; jobMonitor = _api.CreateMonitor(); jobMonitor.Active += (sender, jobDecriptionEventArgs) =>{ if (Statuses[jobDecriptionEventArgs.JobInfo.ID.ToString()] != jobDecriptionEventArgs.JobInfo.State.ToString()) { Statuses[jobDecriptionEventArgs.JobInfo.ID.ToString()] = jobDecriptionEventArgs.JobInfo.State.ToString(); if (Statuses[jobDecriptionEventArgs.JobInfo.ID.ToString()] == "Active") { //var sleep = 15; //LogWrite(DateTime.Now.ToLongTimeString() + " Sleep " + sleep + " sec"); //System.Threading.Thread.Sleep(sleep * 1000); waitOne.Set(); } } }; jobMonitor.Finished += (sender, jobDecriptionEventArgs) => { var monitor = (sender as JobMonitor); Statuses[monitor.JobId.ToString()] = "Finished"; if (jobDecriptionEventArgs.JobInfo != null && jobDecriptionEventArgs.JobInfo.ErrorComment != null) { LogWrite(DateTime.Now.ToLongTimeString() + " " + monitor.JobId + " parasite " + Statuses[monitor.JobId.ToString()]+(resourse != null ? " on " + resourse : "")+" with error "+jobDecriptionEventArgs.JobInfo.ErrorComment); return; } LogWrite(DateTime.Now.ToLongTimeString() + " " + monitor.JobId + " parasite " + Statuses[monitor.JobId.ToString()]+(resourse != null ? " on " + resourse : "")); CopyRows(); }; jobMonitor.Run(); Statuses.Add(jobMonitor.JobId.ToString(), "Pushed"); LogWrite(DateTime.Now.ToLongTimeString() + " " + jobMonitor.JobId + " parasite pushed " + (resourse != null ? " on "+resourse : "")); LogWrite(DateTime.Now.ToLongTimeString() + " Waiting till parasite WF become active"); waitOne.WaitOne(); }
//3 modes private void Experiment1(int mode) { LogWrite("Starting experiment mode " + mode.ToString()); workflows = 2; steps = 20; //max 20 int delay = 120; //150 var testpDescriptionReader = new DescriptionReader("testp.wf"); var bsmDescriptionReader = new DescriptionReader("bsm.wf"); foreach (var pair in bsmDescriptionReader.InputFiles) { var val = ConfigurationManager.AppSettings[pair.Key]; _api.BindFile(pair.Key, val); } var timer = new Timer(); int i = 1; timer.Elapsed += new ElapsedEventHandler((s, e) => { Random random = new Random(); if (i > workflows) { i = 0; timer.Stop(); } else if (i == 1) { StringBuilder sb = new StringBuilder(); sb.Append(testpDescriptionReader.Script); for (int step = 1; step <= 2 * steps; step++) { //if (step > amount/2) offset = 40; sb.Append("\n step s" + step + " runs testp (\n in0=" + i + step + ",\n in1=1,\n timeToWait=" + (delay / i + random.Next(0, 20)).ToString() + "\n)\n"); } _api.Script = sb.ToString(); jobMonitor = _api.CreateMonitor(); jobMonitor.UpdatePeriod = 1000 * 50; jobMonitor.Finished += JobMonitorOnFinishedExp1; jobMonitor.Active += JobMonitorOnActive; LogWrite(DateTime.Now.ToLongTimeString() + " " + steps + " testp pushed: " + i + "/" + "; id:" + jobMonitor.JobId); LogWrite(_api.Script.Substring(0, 57)); if (mode == 1 || mode == 3) { timer.Interval = 20 * 1000; LogWrite("Waiting 25 sec"); } jobMonitor.Run(); } else { StringBuilder sb = new StringBuilder(); //sb.Append(bsmDescriptionReader.Script); if (mode > 1) //2,3 { sb.Append("[flow:priority = @urgent]\n"); if (mode == 2) { sb.Append("[flow:MinTime = " + '"' + "0" + '"' + "]\n[flow:MaxTime = " + '"' + "0" + '"' + "]\n"); } } if (mode > 2) { sb.Append("[flow:MinTime = " + '"' + "0" + '"' + "]\n[flow:MaxTime = " + '"' + "0" + '"' + "]\n"); } for (int step = 1; step <= steps; step++) { sb.Append("\n step MaskedFullBSM_" + step + " runs bsm \n (\n inMeasurement = measurementFile,\n inHirlam = hirlam6,\n swan = swanFile,\n inBSH = BSHFile,\n useAssimilation = true,\n useSWAN = true,\n useBSH = true,\n useOldProject = false,\n useMask = false,\n startCalcDate = \"09/01/2007 12:00:00\",\n inAssFields = assFields,\n inProject = projects,\n controlPoints = inControlPoints,\n deleteDirs = true,\n ForecastSize = 3 \n)\n"); } _api.Script = sb.ToString(); jobMonitor = _api.CreateMonitor(); jobMonitor.UpdatePeriod = 1000 * 5; jobMonitor.Finished += JobMonitorOnFinishedExp1; jobMonitor.Active += JobMonitorOnActive; LogWrite(DateTime.Now.ToLongTimeString() + " " + steps + " bsm pushed: " + i + "/" + "; id:" + jobMonitor.JobId); LogWrite(_api.Script.Substring(0, 70)); jobMonitor.Run(); //timer.Interval = 120 * 1000; timer.Stop(); } i++; }); timer.Interval = 1000; timer.Start(); }
//cnm DefaultUrgentHeuristics private void Experiment5(int mode) { if (finished == 0) DeleteFiles(); started = DateTime.Now; LogWrite(started.ToLongTimeString() + "Starting experiment 5 mode " + mode); ParasiteLoading("b14.b14-113"); ParasiteLoading("b14.b14-22"); SwitchParameter(DateTime.Now.ToLongTimeString() + " DefaultUrgentHeuristics", (mode == 2 ? "UBestFirst" : "UGreedy")); var wfDescriptionReader = new DescriptionReader("cnm2.wf"); _api.UploadFiles(wfDescriptionReader.InputFiles); _api.Script = wfDescriptionReader.Script; jobMonitor = _api.CreateMonitor(); jobMonitor.UpdatePeriod = 1000 * 5; jobMonitor.Active += JobMonitorOnActive; jobMonitor.Finished += (sender, jobDecriptionEventArgs) => { var monitor = (sender as JobMonitor); finished++; Statuses[monitor.JobId.ToString()] = "Finished"; LogWrite(DateTime.Now.ToLongTimeString() + " " + monitor.JobId + " " + Statuses[monitor.JobId.ToString()] + " after " + TimeSpan.FromTicks(DateTime.Now.Ticks - started.Ticks).Minutes + " min " + TimeSpan.FromTicks(DateTime.Now.Ticks - started.Ticks).Seconds + " sec "); CopyRows(); mode++; if (mode == 2) Experiment5(mode); }; LogWrite(_api.Script); jobMonitor.Run(); Statuses.Add(jobMonitor.JobId.ToString(), "Pushed"); LogWrite(DateTime.Now.ToLongTimeString() + " " + jobMonitor.JobId + " pushed"); }
//Just one bsm private void Experiment6() { if (finished == 0) DeleteFiles(); started = DateTime.Now; LogWrite(started.ToLongTimeString() + " Starting experiment 6 mode " + mode); ParasiteLoading("b14.b14-113"); //ParasiteLoading("b14.b14-22"); return; var wfDescriptionReader = new DescriptionReader("bsm.wf"); _api.UploadFiles(wfDescriptionReader.InputFiles); _api.Script = wfDescriptionReader.Script; jobMonitor = _api.CreateMonitor(); jobMonitor.UpdatePeriod = 1000 * 5; jobMonitor.Active += JobMonitorOnActive; jobMonitor.Finished += (sender, jobDecriptionEventArgs) => { var monitor = (sender as JobMonitor); finished++; Statuses[monitor.JobId.ToString()] = "Finished"; LogWrite(DateTime.Now.ToLongTimeString() + " " + monitor.JobId + " " + Statuses[monitor.JobId.ToString()] + " after " + TimeSpan.FromTicks(DateTime.Now.Ticks - started.Ticks).Minutes + " min " + TimeSpan.FromTicks(DateTime.Now.Ticks - started.Ticks).Seconds + " sec "); //CopyRows(); }; LogWrite(_api.Script); jobMonitor.Run(); Statuses.Add(jobMonitor.JobId.ToString(), "Pushed"); LogWrite(DateTime.Now.ToLongTimeString() + " " + jobMonitor.JobId + " pushed"); }
//cnm DefaultHeuristics private void Experiment4(int mode) { if (finished == 0) DeleteFiles(); started = DateTime.Now; LogWrite(started.ToLongTimeString()+" Starting experiment 4 mode " + mode); SwitchParameter("DefaultHeuristics", (mode == 2 ? "MinMin" : "Stub")); var wfDescriptionReader = new DescriptionReader("cnm.wf"); _api.UploadFiles(wfDescriptionReader.InputFiles); _api.Script = wfDescriptionReader.Script; LogWrite(_api.Script); jobMonitor = _api.CreateMonitor(); jobMonitor.UpdatePeriod = 1000 * 5; jobMonitor.Active += JobMonitorOnActive; jobMonitor.Finished += (sender, jobDecriptionEventArgs) => { JobMonitorStandardFinished(sender, jobDecriptionEventArgs); CopyRows(); if (mode == 2) Experiment4(mode); }; jobMonitor.Run(); Statuses.Add(jobMonitor.JobId.ToString(),"Pushed"); LogWrite(DateTime.Now.ToLongTimeString() + " " + jobMonitor.JobId + " state: pushed"); }
//estimation learning private void Experiment3PushBsm() { if (finished == 0) DeleteFiles(); var resourse = "b4.b4-131"; LogWrite("Pushing BSM to resourse " + resourse); //workflows = 5; steps = 1; int[] sizes = { 1,3,4,5,6 }; var bsmDescriptionReader = new DescriptionReader("bsm.wf"); _api.UploadFiles(bsmDescriptionReader.InputFiles); for (int i = 0; i < sizes.Length; i++) { StringBuilder sb = new StringBuilder(); //sb.Append(bsmDescriptionReader.Script); int stepDiffer = 0; for (int step = 1; step <= steps; step++) //foreach(var size in sizes) { var size = sizes[0]; //if (stepDiffer >= sizes.Length) stepDiffer = 0; sb.Append("[Resource = " + '"' + resourse + '"' + "]\n"); sb.Append("step MaskedFullBSM_" + stepDiffer + " runs bsm " + (stepDiffer > 0 ? " after MaskedFullBSM_" + (stepDiffer - 1) : "") + " \n (\n inMeasurement = measurementFile,\n inHirlam = hirlam" + size + ",\n swan = swanFile,\n inBSH = BSHFile,\n useAssimilation = true,\n useSWAN = true,\n useBSH = true,\n useOldProject = false,\n useMask = false,\n startCalcDate = \"09/01/2007 12:00:00\",\n inAssFields = assFields,\n inProject = projects,\n controlPoints = inControlPoints,\n deleteDirs = true,\n ForecastSize = " + size + " \n)\n"); stepDiffer++; } _api.Script = sb.ToString(); jobMonitor = _api.CreateMonitor(); jobMonitor.UpdatePeriod = 1000 * 5; jobMonitor.Active += JobMonitorOnActive; jobMonitor.Finished += (sender, jobDecriptionEventArgs) => { JobMonitorStandardFinished(sender, jobDecriptionEventArgs); CopyRows(); }; LogWrite(DateTime.Now.ToLongTimeString() + " " + steps + " bsm pushed: " + i + "/" + workflows + "; id:" + jobMonitor.JobId); LogWrite(_api.Script.Substring(0, 70)); jobMonitor.Run(); Statuses.Add(jobMonitor.JobId.ToString(), "Pushed"); LogWrite(DateTime.Now.ToLongTimeString() + " " + jobMonitor.JobId + " pushed"); } }
private void Experiment3() { if (finished == 0) if (DeleteFiles()) { finished++; Experiment3PushBsm(); return; } var resourse = "b4.b4-131"; LogWrite("Starting experiment 3 on resourse "+resourse); steps = 1; //max 20 int[] sizes = { 1,3,4,5,6 }; workflows = sizes.Length; /* foreach (var pair in bsmDescriptionReader.InputFiles) { var val = ConfigurationManager.AppSettings[pair.Key]; _api.BindFile(pair.Key, val); } */ IEnumerable<string> lines = new List<string>(); IEnumerable<string> overlines = new List<string>(); if (File.Exists(ConfigurationManager.AppSettings["HistoryFile"])) { lines = File.ReadAllLines(ConfigurationManager.AppSettings["HistoryFile"]).Where(line => line.Contains("bsm") && line.Contains(resourse)); overlines = File.ReadAllLines(ConfigurationManager.AppSettings["HistoryFile"].Replace("model_coef", "over")); } var runs = new List<RunRecord>(); //.Where(l=>l.Contains(@"{""ForecastSize"": ""1""}")) foreach (var line in lines) { var rows = line.Split(new[] { ";" }, StringSplitOptions.RemoveEmptyEntries); var rows2 = overlines.Where(l => l.Contains(rows[0])).FirstOrDefault().Split(new[] { ";" }, StringSplitOptions.RemoveEmptyEntries); //Select(s => double.Parse(s)).ToArray(); var rx = new Regex(@"""ForecastSize"": ""([^,]+)"""); var match = rx.Match(rows[11]); if (match.Success) { var rec = new RunRecord { ComputationTime = double.Parse(rows[7].Replace(".", ",")) }; rec.OverheadTime = TimeSpan.Parse(rows2[7]).TotalSeconds; // База пакетов rec.OverheadTime += TimeSpan.Parse(rows2[8]).TotalSeconds; // Оценка ресурсов rec.OverheadTime += TimeSpan.Parse(rows2[9]).TotalSeconds; // T_Scheduler rec.OverheadTime += TimeSpan.Parse(rows2[10]).TotalSeconds; // Коммуникация rec.OverheadTime += TimeSpan.Parse(rows2[11]).TotalSeconds; // T_InputFilesCopy rec.OverheadTime += TimeSpan.Parse(rows2[12]).TotalSeconds; // T_OutputFilesCopy rec.RunContext.Add("ForecastSize", double.Parse(match.Groups[1].Value)); runs.Add(rec); } } PerformanceModel model = new BsmModel(); var sp = ParametersOptimizer.UpdateServiceComputationParameters(new Dictionary<string, double>(), runs, model); var spp = ParametersOptimizer.UpdateServiceOverheadParameters(new Dictionary<string, double>(), runs); foreach (var p in sp) LogWrite(String.Format("{0}: {1}", p.Key, p.Value.ToString("0.0000"))); /* foreach (var p in spp) LogWrite(String.Format("{0}: {1}", p.Key, p.Value.ToString("0.0000"))); */ var bsmDescriptionReader = new DescriptionReader("bsm.wf"); _api.UploadFiles(bsmDescriptionReader.InputFiles); int i = 0; foreach (var size in sizes) { LogWrite(String.Format("ForecastSize: {0}",size)); var runRecord = new RunRecord(); runRecord.RunContext.Add("ForecastSize", (double)size); var compuTime = model.GetComputationTime(sp, runRecord.RunContext, runRecord.ExecutionParams); var compuError = model.GetComputationErrorRelative(sp, runRecord.RunContext, runRecord.ExecutionParams); LogWrite(String.Format("Calculation time: {0}+/-{1}", compuTime.ToString("0.000"), compuError.ToString("0.000"))); var overheadTime = model.GetOverheadTime(spp, runRecord.RunContext, runRecord.ExecutionParams); var overheadError = model.GetOverheadError(spp, runRecord.RunContext, runRecord.ExecutionParams); LogWrite(String.Format("Overhead time: {0}+/-{1}", overheadTime.ToString("0.000"), overheadError.ToString("0.000"))); StringBuilder sb = new StringBuilder(); //int stepDiffer = 0; for (int step = 1; step <= steps; step++){ //if (stepDiffer >= sizes.Length) stepDiffer = 0; sb.Append("[Resource = " + '"' + resourse + '"' + "]\n"); sb.Append("step MaskedFullBSM_"+step+" runs bsm \n (\n inMeasurement = measurementFile,\n inHirlam = hirlam" + size + ",\n swan = swanFile,\n inBSH = BSHFile,\n useAssimilation = true,\n useSWAN = true,\n useBSH = true,\n useOldProject = false,\n useMask = false,\n startCalcDate = \"09/01/2007 12:00:00\",\n inAssFields = assFields,\n inProject = projects,\n controlPoints = inControlPoints,\n deleteDirs = true,\n ForecastSize = " + size + " \n)\n"); //stepDiffer++; } _api.Script = sb.ToString(); jobMonitor = _api.CreateMonitor(); jobMonitor.UpdatePeriod = 1000 * 5; jobMonitor.Active += JobMonitorOnActive; jobMonitor.Finished += (sender, jobDecriptionEventArgs) => { JobMonitorStandardFinished(sender, jobDecriptionEventArgs); CopyRows(new string[]{"ForecastSize"}); }; LogWrite(DateTime.Now.ToLongTimeString() + " " + steps + " bsm pushed: " + i + "/" + workflows + "; id:" + jobMonitor.JobId); //LogWrite(_api.Script.Substring(0, 70)); started = DateTime.Now; jobMonitor.Run(); Statuses.Add(jobMonitor.JobId.ToString(), "Pushed"); CalculationTimes.Add(jobMonitor.JobId.ToString(), compuTime.ToString("0.000")); CalculationTimesErr.Add(jobMonitor.JobId.ToString(), compuError.ToString("0.000")); OverheadTimes.Add(jobMonitor.JobId.ToString(), overheadTime.ToString("0.000")); OverheadTimesErr.Add(jobMonitor.JobId.ToString(), overheadError.ToString("0.000")); LogWrite(DateTime.Now.ToLongTimeString() + " " + jobMonitor.JobId + " pushed"); i++; } }
//virtual starting private void Experiment2() { vmlauncher = new VMLauncher(); vmlauncher.StopAll(20); steps = 2; workflows = 25; int sleep = 30; int delay = 140; var testpDescriptionReader = new DescriptionReader("testp.wf"); int i = 0; timer = new Timer(); timer.Elapsed += new ElapsedEventHandler((s, e) => { if (i >= workflows) { timer.Stop(); LogWrite("Pusher stopped"); } else{ StringBuilder sb = new StringBuilder(); sb.Append(testpDescriptionReader.Script); if (i > 1) steps = 1; for (int step = 0; step < steps; step++) sb.Append("\n step s"+step+" runs testp (\n in0=" + step+",\n in1="+sleep+",\n timeToWait=" + (delay).ToString() + "\n)\n"); _api.Script = sb.ToString(); jobMonitor = _api.CreateMonitor(); jobMonitor.UpdatePeriod = 1000 * 50; jobMonitor.Finished += JobMonitorOnFinishedExp2; jobMonitor.Active += JobMonitorOnActive; LogWrite(DateTime.Now.ToLongTimeString() + " " + (i + 1) + "/" + workflows + " testp pushed; " + steps + " steps"); jobMonitor.Run(); timer.Interval = sleep*1000; } i++; }); LogWrite("Sending jobs to " + ConfigurationManager.AppSettings["ServerName"] + ". Press any key to stop monitoring..."); timer.Interval = 100; timer.Start(); vmlauncher.Run(); Console.ReadLine(); }
//3 modes private void Experiment1(int mode) { LogWrite("Starting experiment mode " + mode.ToString()); workflows = 2; steps = 20; //max 20 int delay = 120; //150 var testpDescriptionReader = new DescriptionReader("testp.wf"); var bsmDescriptionReader = new DescriptionReader("bsm.wf"); foreach (var pair in bsmDescriptionReader.InputFiles) { var val = ConfigurationManager.AppSettings[pair.Key]; _api.BindFile(pair.Key, val); } var timer = new Timer(); int i = 1; timer.Elapsed += new ElapsedEventHandler((s, e) => { Random random = new Random(); if (i > workflows) { i = 0; timer.Stop(); } else if (i == 1) { StringBuilder sb = new StringBuilder(); sb.Append(testpDescriptionReader.Script); for (int step = 1; step <= 2*steps; step++) { //if (step > amount/2) offset = 40; sb.Append("\n step s" + step + " runs testp (\n in0=" + i + step + ",\n in1=1,\n timeToWait=" + (delay / i + random.Next(0, 20)).ToString() + "\n)\n"); } _api.Script = sb.ToString(); jobMonitor = _api.CreateMonitor(); jobMonitor.UpdatePeriod = 1000 * 50; jobMonitor.Finished += JobMonitorOnFinishedExp1; jobMonitor.Active += JobMonitorOnActive; LogWrite(DateTime.Now.ToLongTimeString() + " " + steps + " testp pushed: " + i + "/" + "; id:" + jobMonitor.JobId); LogWrite(_api.Script.Substring(0, 57)); if (mode == 1 || mode==3) { timer.Interval = 20 * 1000; LogWrite("Waiting 25 sec"); } jobMonitor.Run(); } else { StringBuilder sb = new StringBuilder(); //sb.Append(bsmDescriptionReader.Script); if (mode > 1) { //2,3 sb.Append("[flow:priority = @urgent]\n"); if (mode == 2) sb.Append("[flow:MinTime = " + '"' + "0" + '"' + "]\n[flow:MaxTime = " + '"' + "0" + '"' + "]\n"); } if (mode > 2) sb.Append("[flow:MinTime = " + '"' + "0" + '"' + "]\n[flow:MaxTime = " + '"' + "0" + '"' + "]\n"); for (int step = 1; step <=steps; step++) { sb.Append("\n step MaskedFullBSM_" + step + " runs bsm \n (\n inMeasurement = measurementFile,\n inHirlam = hirlam6,\n swan = swanFile,\n inBSH = BSHFile,\n useAssimilation = true,\n useSWAN = true,\n useBSH = true,\n useOldProject = false,\n useMask = false,\n startCalcDate = \"09/01/2007 12:00:00\",\n inAssFields = assFields,\n inProject = projects,\n controlPoints = inControlPoints,\n deleteDirs = true,\n ForecastSize = 3 \n)\n"); } _api.Script = sb.ToString(); jobMonitor = _api.CreateMonitor(); jobMonitor.UpdatePeriod = 1000 * 5; jobMonitor.Finished += JobMonitorOnFinishedExp1; jobMonitor.Active += JobMonitorOnActive; LogWrite(DateTime.Now.ToLongTimeString() + " " + steps + " bsm pushed: " + i + "/" + "; id:" + jobMonitor.JobId); LogWrite(_api.Script.Substring(0, 70)); jobMonitor.Run(); //timer.Interval = 120 * 1000; timer.Stop(); } i++; }); timer.Interval = 1000; timer.Start(); }