private void btnConvert_Click(object sender, EventArgs e) { string zTDFFile = ((KeyValuePair <string, string>) this.cbxInput.SelectedItem).Key; IJob zJob; // Create an instance for the phase retrieval job: zJob = new TDF2TIFFJob( zTDFFile, this.zOutputPathTxb.Text, (this.chkConsiderSubset.Checked) ? Convert.ToInt32(this.nudTDFToTIFFFrom.Value) : 0, (this.chkConsiderSubset.Checked) ? Convert.ToInt32(this.nudTDFToTIFFTo.Value) : Convert.ToInt32(this.nudTDFToTIFFTo.Maximum), (this.chkData.Checked) ? tbxProjectionPrefix.Text : "-", (this.tbxFlatPrefix.Enabled) ? ((this.chkDataWhite.Checked) ? tbxFlatPrefix.Text : "-") : "-", (this.tbxDarkPrefix.Enabled) ? ((this.chkDataDark.Checked) ? tbxDarkPrefix.Text : "-") : "-", this.rbtDirectOrder.Checked, this.btnTIFFFormat.Checked, 1 ); // Create an instance of JobExecuter with the Phase Retrieval job // splitting it into several processes (if specified): JobExecuter zExecuter = new JobExecuter(zJob); // Execute the job: zExecuter.Run(); // Start the monitoring of the job: if (chkData.Checked) { mJobMonitor.Run(zExecuter, tbxProjectionPrefix.Text); } else { if ((chkDataDark.Checked) && !(chkDataWhite.Checked)) { // Only darks are created: mJobMonitor.Run(zExecuter, this.tbxDarkPrefix.Text, TDFReader.GetNumberOfDarks(zTDFFile)); } else { if (!(chkDataDark.Checked) && (chkDataWhite.Checked)) { // Only flats are created: mJobMonitor.Run(zExecuter, this.tbxFlatPrefix.Text, TDFReader.GetNumberOfFlats(zTDFFile)); } else { // Both flats and darks are created but STP-Core first creates // flats and then darks, so monitor the dark prefix: mJobMonitor.Run(zExecuter, this.tbxDarkPrefix.Text, TDFReader.GetNumberOfDarks(zTDFFile)); } } } }
private void btnRun_Click(object sender, EventArgs e) { // Run Job Convert To TDF: IJob zJob; // Execute with conventional flat fielding: zJob = new PreProcessingJob( // Get combobox selection (in handler) this.mInputTDF, this.mInputTDF.Remove(this.mInputTDF.Length - 4) + "_corr.tdf", 0, TDFReader.GetNumberOfSlices(mInputTDF) - 1, mAirSx, mAirDx, this.mFlatEnd, // use flat at the end this.mHalfHalf, this.mHalfHalfLine, this.mExtFOV, this.mExtFOVRight, this.mExtFOVOverlap, this.mExtFOVNormalize, this.mExtFOVAverage, this.mRingRemoval, this.mDynamicFlatFielding, Convert.ToInt32(Properties.Settings.Default.FormSettings_NrOfProcesses), false, "-" ); // Create an instance of JobExecuter with the pre processing job: JobExecuter zExecuter = new JobExecuter(zJob); // Execute the job splitting it with several processes (if specified): zExecuter.Run(); // Start the monitoring of the job: mJobMonitor.Run(zExecuter, "sino"); this.mPreProcessOnGoing = true; this.lblPreProcessing.Enabled = true; this.lblPhaseRetrieval.Enabled = false; this.lblReconstruction.Enabled = false; // Reset status bar: this.toolStripStatusLabel1.Text = string.Empty; }
//cnm DefaultHeuristics private void Experiment4(int mode) { if (finished == 0) { DeleteFiles(); } started = DateTime.Now; LogWrite(started.ToLongTimeString() + " Starting experiment 4 mode " + mode); SwitchParameter("DefaultHeuristics", (mode == 2 ? "MinMin" : "Stub")); var wfDescriptionReader = new DescriptionReader("cnm.wf"); _api.UploadFiles(wfDescriptionReader.InputFiles); _api.Script = wfDescriptionReader.Script; LogWrite(_api.Script); jobMonitor = _api.CreateMonitor(); jobMonitor.UpdatePeriod = 1000 * 5; jobMonitor.Active += JobMonitorOnActive; jobMonitor.Finished += (sender, jobDecriptionEventArgs) => { JobMonitorStandardFinished(sender, jobDecriptionEventArgs); CopyRows(); if (mode == 2) { Experiment4(mode); } }; jobMonitor.Run(); Statuses.Add(jobMonitor.JobId.ToString(), "Pushed"); LogWrite(DateTime.Now.ToLongTimeString() + " " + jobMonitor.JobId + " state: pushed"); }
//Just one bsm private void Experiment6() { if (finished == 0) { DeleteFiles(); } started = DateTime.Now; LogWrite(started.ToLongTimeString() + " Starting experiment 6 mode " + mode); ParasiteLoading("b14.b14-113"); //ParasiteLoading("b14.b14-22"); return; var wfDescriptionReader = new DescriptionReader("bsm.wf"); _api.UploadFiles(wfDescriptionReader.InputFiles); _api.Script = wfDescriptionReader.Script; jobMonitor = _api.CreateMonitor(); jobMonitor.UpdatePeriod = 1000 * 5; jobMonitor.Active += JobMonitorOnActive; jobMonitor.Finished += (sender, jobDecriptionEventArgs) => { var monitor = (sender as JobMonitor); finished++; Statuses[monitor.JobId.ToString()] = "Finished"; LogWrite(DateTime.Now.ToLongTimeString() + " " + monitor.JobId + " " + Statuses[monitor.JobId.ToString()] + " after " + TimeSpan.FromTicks(DateTime.Now.Ticks - started.Ticks).Minutes + " min " + TimeSpan.FromTicks(DateTime.Now.Ticks - started.Ticks).Seconds + " sec "); //CopyRows(); }; LogWrite(_api.Script); jobMonitor.Run(); Statuses.Add(jobMonitor.JobId.ToString(), "Pushed"); LogWrite(DateTime.Now.ToLongTimeString() + " " + jobMonitor.JobId + " pushed"); }
//virtual starting private void Experiment2() { vmlauncher = new VMLauncher(); vmlauncher.StopAll(20); steps = 2; workflows = 25; int sleep = 30; int delay = 140; var testpDescriptionReader = new DescriptionReader("testp.wf"); int i = 0; timer = new Timer(); timer.Elapsed += new ElapsedEventHandler((s, e) => { if (i >= workflows) { timer.Stop(); LogWrite("Pusher stopped"); } else { StringBuilder sb = new StringBuilder(); sb.Append(testpDescriptionReader.Script); if (i > 1) { steps = 1; } for (int step = 0; step < steps; step++) { sb.Append("\n step s" + step + " runs testp (\n in0=" + step + ",\n in1=" + sleep + ",\n timeToWait=" + (delay).ToString() + "\n)\n"); } _api.Script = sb.ToString(); jobMonitor = _api.CreateMonitor(); jobMonitor.UpdatePeriod = 1000 * 50; jobMonitor.Finished += JobMonitorOnFinishedExp2; jobMonitor.Active += JobMonitorOnActive; LogWrite(DateTime.Now.ToLongTimeString() + " " + (i + 1) + "/" + workflows + " testp pushed; " + steps + " steps"); jobMonitor.Run(); timer.Interval = sleep * 1000; } i++; }); LogWrite("Sending jobs to " + ConfigurationManager.AppSettings["ServerName"] + ". Press any key to stop monitoring..."); timer.Interval = 100; timer.Start(); vmlauncher.Run(); Console.ReadLine(); }
private void btnConvert_Click(object sender, EventArgs e) { // Run Job Convert To TDF: IMonitoredJob zJob; // Check output file: if (File.Exists(Properties.Settings.Default.FormSettings_WorkingPath + Path.DirectorySeparatorChar + Path.GetFileNameWithoutExtension(txbOutputTDF.Text) + Properties.Settings.Default.TomoDataFormatExtension)) { if (MessageBox.Show("The specified TDF file already exists. Overwrite it?", "SYRMEP Tomo Project", MessageBoxButtons.YesNo, MessageBoxIcon.Warning) == System.Windows.Forms.DialogResult.Yes) { File.Delete(Properties.Settings.Default.FormSettings_WorkingPath + Path.DirectorySeparatorChar + Path.GetFileNameWithoutExtension(txbOutputTDF.Text) + Properties.Settings.Default.TomoDataFormatExtension); } else { return; } } //mTempOutputName = Properties.Settings.Default.FormSettings_WorkingPath + Path.DirectorySeparatorChar + "_" + Program.GetTimestamp(DateTime.Now) + ".tmp"; mTempOutputName = Properties.Settings.Default.FormSettings_WorkingPath + Path.DirectorySeparatorChar + txbOutputTDF.Text; // Create an instance for the phase retrieval job: zJob = new HIS2TDFJob( this.txbProjectionHIS.Text, this.txbDarkHIS.Text, this.txbFlatHIS.Text, this.txbPostDarkHIS.Text, this.txbPostFlatHIS.Text, mTempOutputName, (this.chkConsiderSubset.Checked) ? Convert.ToInt32(this.nudConvertToTDFFrom.Value) : 0, (this.chkConsiderSubset.Checked) ? Convert.ToInt32(this.nudConvertToTDFTo.Value) : Convert.ToInt32(this.nudConvertToTDFTo.Maximum), Convert.ToInt32(this.nudConvertToTDF_CropLeft.Value), Convert.ToInt32(this.nudConvertToTDF_CropRight.Value), Convert.ToInt32(this.nudConvertToTDF_CropTop.Value), Convert.ToInt32(this.nudConvertToTDF_CropBottom.Value), true, Convert.ToInt32(this.nudOutputCompression.Value) ); // Create an instance of JobExecuter with the Phase Retrieval job // splitting it into several processes (if specified): JobExecuter zExecuter = new JobExecuter(zJob); // Execute the job: zExecuter.Run(); // Start the monitoring of the job: mJobMonitor.Run(zExecuter, Properties.Settings.Default.FormSettings_ProjectionPrefix); }
//estimation learning private void Experiment3PushBsm() { if (finished == 0) { DeleteFiles(); } var resourse = "b4.b4-131"; LogWrite("Pushing BSM to resourse " + resourse); //workflows = 5; steps = 1; int[] sizes = { 1, 3, 4, 5, 6 }; var bsmDescriptionReader = new DescriptionReader("bsm.wf"); _api.UploadFiles(bsmDescriptionReader.InputFiles); for (int i = 0; i < sizes.Length; i++) { StringBuilder sb = new StringBuilder(); //sb.Append(bsmDescriptionReader.Script); int stepDiffer = 0; for (int step = 1; step <= steps; step++) //foreach(var size in sizes) { var size = sizes[0]; //if (stepDiffer >= sizes.Length) stepDiffer = 0; sb.Append("[Resource = " + '"' + resourse + '"' + "]\n"); sb.Append("step MaskedFullBSM_" + stepDiffer + " runs bsm " + (stepDiffer > 0 ? " after MaskedFullBSM_" + (stepDiffer - 1) : "") + " \n (\n inMeasurement = measurementFile,\n inHirlam = hirlam" + size + ",\n swan = swanFile,\n inBSH = BSHFile,\n useAssimilation = true,\n useSWAN = true,\n useBSH = true,\n useOldProject = false,\n useMask = false,\n startCalcDate = \"09/01/2007 12:00:00\",\n inAssFields = assFields,\n inProject = projects,\n controlPoints = inControlPoints,\n deleteDirs = true,\n ForecastSize = " + size + " \n)\n"); stepDiffer++; } _api.Script = sb.ToString(); jobMonitor = _api.CreateMonitor(); jobMonitor.UpdatePeriod = 1000 * 5; jobMonitor.Active += JobMonitorOnActive; jobMonitor.Finished += (sender, jobDecriptionEventArgs) => { JobMonitorStandardFinished(sender, jobDecriptionEventArgs); CopyRows(); }; LogWrite(DateTime.Now.ToLongTimeString() + " " + steps + " bsm pushed: " + i + "/" + workflows + "; id:" + jobMonitor.JobId); LogWrite(_api.Script.Substring(0, 70)); jobMonitor.Run(); Statuses.Add(jobMonitor.JobId.ToString(), "Pushed"); LogWrite(DateTime.Now.ToLongTimeString() + " " + jobMonitor.JobId + " pushed"); } }
//cnm DefaultUrgentHeuristics private void Experiment5(int mode) { if (finished == 0) { DeleteFiles(); } started = DateTime.Now; LogWrite(started.ToLongTimeString() + "Starting experiment 5 mode " + mode); ParasiteLoading("b14.b14-113"); ParasiteLoading("b14.b14-22"); SwitchParameter(DateTime.Now.ToLongTimeString() + " DefaultUrgentHeuristics", (mode == 2 ? "UBestFirst" : "UGreedy")); var wfDescriptionReader = new DescriptionReader("cnm2.wf"); _api.UploadFiles(wfDescriptionReader.InputFiles); _api.Script = wfDescriptionReader.Script; jobMonitor = _api.CreateMonitor(); jobMonitor.UpdatePeriod = 1000 * 5; jobMonitor.Active += JobMonitorOnActive; jobMonitor.Finished += (sender, jobDecriptionEventArgs) => { var monitor = (sender as JobMonitor); finished++; Statuses[monitor.JobId.ToString()] = "Finished"; LogWrite(DateTime.Now.ToLongTimeString() + " " + monitor.JobId + " " + Statuses[monitor.JobId.ToString()] + " after " + TimeSpan.FromTicks(DateTime.Now.Ticks - started.Ticks).Minutes + " min " + TimeSpan.FromTicks(DateTime.Now.Ticks - started.Ticks).Seconds + " sec "); CopyRows(); mode++; if (mode == 2) { Experiment5(mode); } }; LogWrite(_api.Script); jobMonitor.Run(); Statuses.Add(jobMonitor.JobId.ToString(), "Pushed"); LogWrite(DateTime.Now.ToLongTimeString() + " " + jobMonitor.JobId + " pushed"); }
private void ParasiteLoading(string resourse = null) { LogWrite(DateTime.Now.ToLongTimeString() + " Loading " + (resourse != null?resourse:"system") + " by parasite testp task"); var parasiteWfDescriptionReader = new DescriptionReader("parasite.wf"); _api.Script = (resourse != null?"[Resource = \"" + resourse + "\"]\n":"") + parasiteWfDescriptionReader.Script; jobMonitor = _api.CreateMonitor(); jobMonitor.Active += (sender, jobDecriptionEventArgs) => { if (Statuses[jobDecriptionEventArgs.JobInfo.ID.ToString()] != jobDecriptionEventArgs.JobInfo.State.ToString()) { Statuses[jobDecriptionEventArgs.JobInfo.ID.ToString()] = jobDecriptionEventArgs.JobInfo.State.ToString(); if (Statuses[jobDecriptionEventArgs.JobInfo.ID.ToString()] == "Active") { //var sleep = 15; //LogWrite(DateTime.Now.ToLongTimeString() + " Sleep " + sleep + " sec"); //System.Threading.Thread.Sleep(sleep * 1000); waitOne.Set(); } } }; jobMonitor.Finished += (sender, jobDecriptionEventArgs) => { var monitor = (sender as JobMonitor); Statuses[monitor.JobId.ToString()] = "Finished"; if (jobDecriptionEventArgs.JobInfo != null && jobDecriptionEventArgs.JobInfo.ErrorComment != null) { LogWrite(DateTime.Now.ToLongTimeString() + " " + monitor.JobId + " parasite " + Statuses[monitor.JobId.ToString()] + (resourse != null ? " on " + resourse : "") + " with error " + jobDecriptionEventArgs.JobInfo.ErrorComment); return; } LogWrite(DateTime.Now.ToLongTimeString() + " " + monitor.JobId + " parasite " + Statuses[monitor.JobId.ToString()] + (resourse != null ? " on " + resourse : "")); CopyRows(); }; jobMonitor.Run(); Statuses.Add(jobMonitor.JobId.ToString(), "Pushed"); LogWrite(DateTime.Now.ToLongTimeString() + " " + jobMonitor.JobId + " parasite pushed " + (resourse != null ? " on " + resourse : "")); LogWrite(DateTime.Now.ToLongTimeString() + " Waiting till parasite WF become active"); waitOne.WaitOne(); }
private void btnConvert_Click(object sender, EventArgs e) { // Run Job Convert To TDF: IJob zJob; //mTempOutputName = Properties.Settings.Default.FormSettings_WorkingPath + Path.DirectorySeparatorChar + "_" + Program.GetTimestamp(DateTime.Now) + ".tmp"; mTempOutputName = Properties.Settings.Default.FormSettings_WorkingPath + Path.DirectorySeparatorChar + txbOutputTDF.Text; // Create an instance for the phase retrieval job: zJob = new TIFF2TDFJob( mInputPath, mTempOutputName, (this.chkConsiderSubset.Checked) ? Convert.ToInt32(this.nudConvertToTDFFrom.Value) : 0, (this.chkConsiderSubset.Checked) ? Convert.ToInt32(this.nudConvertToTDFTo.Value) : Convert.ToInt32(this.nudConvertToTDFTo.Maximum), Convert.ToInt32(this.nudConvertToTDF_CropLeft.Value), Convert.ToInt32(this.nudConvertToTDF_CropRight.Value), Convert.ToInt32(this.nudConvertToTDF_CropTop.Value), Convert.ToInt32(this.nudConvertToTDF_CropBottom.Value), tbxProjectionPrefix.Text, tbxFlatPrefix.Text, tbxDarkPrefix.Text, (this.rbtInputSinograms.Checked) ? false : true, Convert.ToInt32(this.nudOutputCompression.Value), Convert.ToInt32(Properties.Settings.Default.FormSettings_NrOfProcesses) ); // Create an instance of JobExecuter with the Phase Retrieval job // splitting it into several processes (if specified): JobExecuter zExecuter = new JobExecuter(zJob); // Execute the job: zExecuter.Run(); // Start the monitoring of the job: mJobMonitor.Run(zExecuter, this.tbxProjectionPrefix.Text); // Reset status bar: this.toolStripStatusLabel1.Text = string.Empty; }
//virtual starting private void Experiment2() { vmlauncher = new VMLauncher(); vmlauncher.StopAll(20); steps = 2; workflows = 25; int sleep = 30; int delay = 140; var testpDescriptionReader = new DescriptionReader("testp.wf"); int i = 0; timer = new Timer(); timer.Elapsed += new ElapsedEventHandler((s, e) => { if (i >= workflows) { timer.Stop(); LogWrite("Pusher stopped"); } else{ StringBuilder sb = new StringBuilder(); sb.Append(testpDescriptionReader.Script); if (i > 1) steps = 1; for (int step = 0; step < steps; step++) sb.Append("\n step s"+step+" runs testp (\n in0=" + step+",\n in1="+sleep+",\n timeToWait=" + (delay).ToString() + "\n)\n"); _api.Script = sb.ToString(); jobMonitor = _api.CreateMonitor(); jobMonitor.UpdatePeriod = 1000 * 50; jobMonitor.Finished += JobMonitorOnFinishedExp2; jobMonitor.Active += JobMonitorOnActive; LogWrite(DateTime.Now.ToLongTimeString() + " " + (i + 1) + "/" + workflows + " testp pushed; " + steps + " steps"); jobMonitor.Run(); timer.Interval = sleep*1000; } i++; }); LogWrite("Sending jobs to " + ConfigurationManager.AppSettings["ServerName"] + ". Press any key to stop monitoring..."); timer.Interval = 100; timer.Start(); vmlauncher.Run(); Console.ReadLine(); }
private void Experiment3() { if (finished == 0) if (DeleteFiles()) { finished++; Experiment3PushBsm(); return; } var resourse = "b4.b4-131"; LogWrite("Starting experiment 3 on resourse "+resourse); steps = 1; //max 20 int[] sizes = { 1,3,4,5,6 }; workflows = sizes.Length; /* foreach (var pair in bsmDescriptionReader.InputFiles) { var val = ConfigurationManager.AppSettings[pair.Key]; _api.BindFile(pair.Key, val); } */ IEnumerable<string> lines = new List<string>(); IEnumerable<string> overlines = new List<string>(); if (File.Exists(ConfigurationManager.AppSettings["HistoryFile"])) { lines = File.ReadAllLines(ConfigurationManager.AppSettings["HistoryFile"]).Where(line => line.Contains("bsm") && line.Contains(resourse)); overlines = File.ReadAllLines(ConfigurationManager.AppSettings["HistoryFile"].Replace("model_coef", "over")); } var runs = new List<RunRecord>(); //.Where(l=>l.Contains(@"{""ForecastSize"": ""1""}")) foreach (var line in lines) { var rows = line.Split(new[] { ";" }, StringSplitOptions.RemoveEmptyEntries); var rows2 = overlines.Where(l => l.Contains(rows[0])).FirstOrDefault().Split(new[] { ";" }, StringSplitOptions.RemoveEmptyEntries); //Select(s => double.Parse(s)).ToArray(); var rx = new Regex(@"""ForecastSize"": ""([^,]+)"""); var match = rx.Match(rows[11]); if (match.Success) { var rec = new RunRecord { ComputationTime = double.Parse(rows[7].Replace(".", ",")) }; rec.OverheadTime = TimeSpan.Parse(rows2[7]).TotalSeconds; // База пакетов rec.OverheadTime += TimeSpan.Parse(rows2[8]).TotalSeconds; // Оценка ресурсов rec.OverheadTime += TimeSpan.Parse(rows2[9]).TotalSeconds; // T_Scheduler rec.OverheadTime += TimeSpan.Parse(rows2[10]).TotalSeconds; // Коммуникация rec.OverheadTime += TimeSpan.Parse(rows2[11]).TotalSeconds; // T_InputFilesCopy rec.OverheadTime += TimeSpan.Parse(rows2[12]).TotalSeconds; // T_OutputFilesCopy rec.RunContext.Add("ForecastSize", double.Parse(match.Groups[1].Value)); runs.Add(rec); } } PerformanceModel model = new BsmModel(); var sp = ParametersOptimizer.UpdateServiceComputationParameters(new Dictionary<string, double>(), runs, model); var spp = ParametersOptimizer.UpdateServiceOverheadParameters(new Dictionary<string, double>(), runs); foreach (var p in sp) LogWrite(String.Format("{0}: {1}", p.Key, p.Value.ToString("0.0000"))); /* foreach (var p in spp) LogWrite(String.Format("{0}: {1}", p.Key, p.Value.ToString("0.0000"))); */ var bsmDescriptionReader = new DescriptionReader("bsm.wf"); _api.UploadFiles(bsmDescriptionReader.InputFiles); int i = 0; foreach (var size in sizes) { LogWrite(String.Format("ForecastSize: {0}",size)); var runRecord = new RunRecord(); runRecord.RunContext.Add("ForecastSize", (double)size); var compuTime = model.GetComputationTime(sp, runRecord.RunContext, runRecord.ExecutionParams); var compuError = model.GetComputationErrorRelative(sp, runRecord.RunContext, runRecord.ExecutionParams); LogWrite(String.Format("Calculation time: {0}+/-{1}", compuTime.ToString("0.000"), compuError.ToString("0.000"))); var overheadTime = model.GetOverheadTime(spp, runRecord.RunContext, runRecord.ExecutionParams); var overheadError = model.GetOverheadError(spp, runRecord.RunContext, runRecord.ExecutionParams); LogWrite(String.Format("Overhead time: {0}+/-{1}", overheadTime.ToString("0.000"), overheadError.ToString("0.000"))); StringBuilder sb = new StringBuilder(); //int stepDiffer = 0; for (int step = 1; step <= steps; step++){ //if (stepDiffer >= sizes.Length) stepDiffer = 0; sb.Append("[Resource = " + '"' + resourse + '"' + "]\n"); sb.Append("step MaskedFullBSM_"+step+" runs bsm \n (\n inMeasurement = measurementFile,\n inHirlam = hirlam" + size + ",\n swan = swanFile,\n inBSH = BSHFile,\n useAssimilation = true,\n useSWAN = true,\n useBSH = true,\n useOldProject = false,\n useMask = false,\n startCalcDate = \"09/01/2007 12:00:00\",\n inAssFields = assFields,\n inProject = projects,\n controlPoints = inControlPoints,\n deleteDirs = true,\n ForecastSize = " + size + " \n)\n"); //stepDiffer++; } _api.Script = sb.ToString(); jobMonitor = _api.CreateMonitor(); jobMonitor.UpdatePeriod = 1000 * 5; jobMonitor.Active += JobMonitorOnActive; jobMonitor.Finished += (sender, jobDecriptionEventArgs) => { JobMonitorStandardFinished(sender, jobDecriptionEventArgs); CopyRows(new string[]{"ForecastSize"}); }; LogWrite(DateTime.Now.ToLongTimeString() + " " + steps + " bsm pushed: " + i + "/" + workflows + "; id:" + jobMonitor.JobId); //LogWrite(_api.Script.Substring(0, 70)); started = DateTime.Now; jobMonitor.Run(); Statuses.Add(jobMonitor.JobId.ToString(), "Pushed"); CalculationTimes.Add(jobMonitor.JobId.ToString(), compuTime.ToString("0.000")); CalculationTimesErr.Add(jobMonitor.JobId.ToString(), compuError.ToString("0.000")); OverheadTimes.Add(jobMonitor.JobId.ToString(), overheadTime.ToString("0.000")); OverheadTimesErr.Add(jobMonitor.JobId.ToString(), overheadError.ToString("0.000")); LogWrite(DateTime.Now.ToLongTimeString() + " " + jobMonitor.JobId + " pushed"); i++; } }
private void Experiment3() { if (finished == 0) { if (DeleteFiles()) { finished++; Experiment3PushBsm(); return; } } var resourse = "b4.b4-131"; LogWrite("Starting experiment 3 on resourse " + resourse); steps = 1; //max 20 int[] sizes = { 1, 3, 4, 5, 6 }; workflows = sizes.Length; /* * foreach (var pair in bsmDescriptionReader.InputFiles) * { * var val = ConfigurationManager.AppSettings[pair.Key]; * _api.BindFile(pair.Key, val); * } */ IEnumerable <string> lines = new List <string>(); IEnumerable <string> overlines = new List <string>(); if (File.Exists(ConfigurationManager.AppSettings["HistoryFile"])) { lines = File.ReadAllLines(ConfigurationManager.AppSettings["HistoryFile"]).Where(line => line.Contains("bsm") && line.Contains(resourse)); overlines = File.ReadAllLines(ConfigurationManager.AppSettings["HistoryFile"].Replace("model_coef", "over")); } var runs = new List <RunRecord>(); //.Where(l=>l.Contains(@"{""ForecastSize"": ""1""}")) foreach (var line in lines) { var rows = line.Split(new[] { ";" }, StringSplitOptions.RemoveEmptyEntries); var rows2 = overlines.Where(l => l.Contains(rows[0])).FirstOrDefault().Split(new[] { ";" }, StringSplitOptions.RemoveEmptyEntries); //Select(s => double.Parse(s)).ToArray(); var rx = new Regex(@"""ForecastSize"": ""([^,]+)"""); var match = rx.Match(rows[11]); if (match.Success) { var rec = new RunRecord { ComputationTime = double.Parse(rows[7].Replace(".", ",")) }; rec.OverheadTime = TimeSpan.Parse(rows2[7]).TotalSeconds; // База пакетов rec.OverheadTime += TimeSpan.Parse(rows2[8]).TotalSeconds; // Оценка ресурсов rec.OverheadTime += TimeSpan.Parse(rows2[9]).TotalSeconds; // T_Scheduler rec.OverheadTime += TimeSpan.Parse(rows2[10]).TotalSeconds; // Коммуникация rec.OverheadTime += TimeSpan.Parse(rows2[11]).TotalSeconds; // T_InputFilesCopy rec.OverheadTime += TimeSpan.Parse(rows2[12]).TotalSeconds; // T_OutputFilesCopy rec.RunContext.Add("ForecastSize", double.Parse(match.Groups[1].Value)); runs.Add(rec); } } PerformanceModel model = new BsmModel(); var sp = ParametersOptimizer.UpdateServiceComputationParameters(new Dictionary <string, double>(), runs, model); var spp = ParametersOptimizer.UpdateServiceOverheadParameters(new Dictionary <string, double>(), runs); foreach (var p in sp) { LogWrite(String.Format("{0}: {1}", p.Key, p.Value.ToString("0.0000"))); } /* * foreach (var p in spp) * LogWrite(String.Format("{0}: {1}", p.Key, p.Value.ToString("0.0000"))); */ var bsmDescriptionReader = new DescriptionReader("bsm.wf"); _api.UploadFiles(bsmDescriptionReader.InputFiles); int i = 0; foreach (var size in sizes) { LogWrite(String.Format("ForecastSize: {0}", size)); var runRecord = new RunRecord(); runRecord.RunContext.Add("ForecastSize", (double)size); var compuTime = model.GetComputationTime(sp, runRecord.RunContext, runRecord.ExecutionParams); var compuError = model.GetComputationErrorRelative(sp, runRecord.RunContext, runRecord.ExecutionParams); LogWrite(String.Format("Calculation time: {0}+/-{1}", compuTime.ToString("0.000"), compuError.ToString("0.000"))); var overheadTime = model.GetOverheadTime(spp, runRecord.RunContext, runRecord.ExecutionParams); var overheadError = model.GetOverheadError(spp, runRecord.RunContext, runRecord.ExecutionParams); LogWrite(String.Format("Overhead time: {0}+/-{1}", overheadTime.ToString("0.000"), overheadError.ToString("0.000"))); StringBuilder sb = new StringBuilder(); //int stepDiffer = 0; for (int step = 1; step <= steps; step++) { //if (stepDiffer >= sizes.Length) stepDiffer = 0; sb.Append("[Resource = " + '"' + resourse + '"' + "]\n"); sb.Append("step MaskedFullBSM_" + step + " runs bsm \n (\n inMeasurement = measurementFile,\n inHirlam = hirlam" + size + ",\n swan = swanFile,\n inBSH = BSHFile,\n useAssimilation = true,\n useSWAN = true,\n useBSH = true,\n useOldProject = false,\n useMask = false,\n startCalcDate = \"09/01/2007 12:00:00\",\n inAssFields = assFields,\n inProject = projects,\n controlPoints = inControlPoints,\n deleteDirs = true,\n ForecastSize = " + size + " \n)\n"); //stepDiffer++; } _api.Script = sb.ToString(); jobMonitor = _api.CreateMonitor(); jobMonitor.UpdatePeriod = 1000 * 5; jobMonitor.Active += JobMonitorOnActive; jobMonitor.Finished += (sender, jobDecriptionEventArgs) => { JobMonitorStandardFinished(sender, jobDecriptionEventArgs); CopyRows(new string[] { "ForecastSize" }); }; LogWrite(DateTime.Now.ToLongTimeString() + " " + steps + " bsm pushed: " + i + "/" + workflows + "; id:" + jobMonitor.JobId); //LogWrite(_api.Script.Substring(0, 70)); started = DateTime.Now; jobMonitor.Run(); Statuses.Add(jobMonitor.JobId.ToString(), "Pushed"); CalculationTimes.Add(jobMonitor.JobId.ToString(), compuTime.ToString("0.000")); CalculationTimesErr.Add(jobMonitor.JobId.ToString(), compuError.ToString("0.000")); OverheadTimes.Add(jobMonitor.JobId.ToString(), overheadTime.ToString("0.000")); OverheadTimesErr.Add(jobMonitor.JobId.ToString(), overheadError.ToString("0.000")); LogWrite(DateTime.Now.ToLongTimeString() + " " + jobMonitor.JobId + " pushed"); i++; } }
//estimation learning private void Experiment3PushBsm() { if (finished == 0) DeleteFiles(); var resourse = "b4.b4-131"; LogWrite("Pushing BSM to resourse " + resourse); //workflows = 5; steps = 1; int[] sizes = { 1,3,4,5,6 }; var bsmDescriptionReader = new DescriptionReader("bsm.wf"); _api.UploadFiles(bsmDescriptionReader.InputFiles); for (int i = 0; i < sizes.Length; i++) { StringBuilder sb = new StringBuilder(); //sb.Append(bsmDescriptionReader.Script); int stepDiffer = 0; for (int step = 1; step <= steps; step++) //foreach(var size in sizes) { var size = sizes[0]; //if (stepDiffer >= sizes.Length) stepDiffer = 0; sb.Append("[Resource = " + '"' + resourse + '"' + "]\n"); sb.Append("step MaskedFullBSM_" + stepDiffer + " runs bsm " + (stepDiffer > 0 ? " after MaskedFullBSM_" + (stepDiffer - 1) : "") + " \n (\n inMeasurement = measurementFile,\n inHirlam = hirlam" + size + ",\n swan = swanFile,\n inBSH = BSHFile,\n useAssimilation = true,\n useSWAN = true,\n useBSH = true,\n useOldProject = false,\n useMask = false,\n startCalcDate = \"09/01/2007 12:00:00\",\n inAssFields = assFields,\n inProject = projects,\n controlPoints = inControlPoints,\n deleteDirs = true,\n ForecastSize = " + size + " \n)\n"); stepDiffer++; } _api.Script = sb.ToString(); jobMonitor = _api.CreateMonitor(); jobMonitor.UpdatePeriod = 1000 * 5; jobMonitor.Active += JobMonitorOnActive; jobMonitor.Finished += (sender, jobDecriptionEventArgs) => { JobMonitorStandardFinished(sender, jobDecriptionEventArgs); CopyRows(); }; LogWrite(DateTime.Now.ToLongTimeString() + " " + steps + " bsm pushed: " + i + "/" + workflows + "; id:" + jobMonitor.JobId); LogWrite(_api.Script.Substring(0, 70)); jobMonitor.Run(); Statuses.Add(jobMonitor.JobId.ToString(), "Pushed"); LogWrite(DateTime.Now.ToLongTimeString() + " " + jobMonitor.JobId + " pushed"); } }
//Just one bsm private void Experiment6() { if (finished == 0) DeleteFiles(); started = DateTime.Now; LogWrite(started.ToLongTimeString() + " Starting experiment 6 mode " + mode); ParasiteLoading("b14.b14-113"); //ParasiteLoading("b14.b14-22"); return; var wfDescriptionReader = new DescriptionReader("bsm.wf"); _api.UploadFiles(wfDescriptionReader.InputFiles); _api.Script = wfDescriptionReader.Script; jobMonitor = _api.CreateMonitor(); jobMonitor.UpdatePeriod = 1000 * 5; jobMonitor.Active += JobMonitorOnActive; jobMonitor.Finished += (sender, jobDecriptionEventArgs) => { var monitor = (sender as JobMonitor); finished++; Statuses[monitor.JobId.ToString()] = "Finished"; LogWrite(DateTime.Now.ToLongTimeString() + " " + monitor.JobId + " " + Statuses[monitor.JobId.ToString()] + " after " + TimeSpan.FromTicks(DateTime.Now.Ticks - started.Ticks).Minutes + " min " + TimeSpan.FromTicks(DateTime.Now.Ticks - started.Ticks).Seconds + " sec "); //CopyRows(); }; LogWrite(_api.Script); jobMonitor.Run(); Statuses.Add(jobMonitor.JobId.ToString(), "Pushed"); LogWrite(DateTime.Now.ToLongTimeString() + " " + jobMonitor.JobId + " pushed"); }
private void btnConvert_Click(object sender, EventArgs e) { // Run GDEI Job: IJob zJob; string zInputFile_1 = ((KeyValuePair <string, string>) this.cbxGDEI_Input2.SelectedItem).Key; string zInputFile_2 = ((KeyValuePair <string, string>) this.cbxGDEI_Input1.SelectedItem).Key; string zInputFile_3 = ((KeyValuePair <string, string>) this.cbxGDEI_Input3.SelectedItem).Key; String[] zSubStrings = Path.GetFileName(zInputFile_1).Split('_'); string zOutputPrefix = string.Empty; for (int i = 0; i < zSubStrings.Length - 1; i++) { zOutputPrefix += zSubStrings[i]; } string zOutputFile_Abs = Properties.Settings.Default.FormSettings_WorkingPath + Path.DirectorySeparatorChar + zOutputPrefix + "_abs" + Properties.Settings.Default.TomoDataFormatExtension; string zOutputFile_Refr = Properties.Settings.Default.FormSettings_WorkingPath + Path.DirectorySeparatorChar + zOutputPrefix + "_ref" + Properties.Settings.Default.TomoDataFormatExtension; string zOutputFile_Sca = Properties.Settings.Default.FormSettings_WorkingPath + Path.DirectorySeparatorChar + zOutputPrefix + "_sca" + Properties.Settings.Default.TomoDataFormatExtension; // Create an instance for the phase retrieval job: zJob = new GDEIJob( zInputFile_1, zInputFile_2, zInputFile_3, zOutputFile_Abs, zOutputFile_Refr, zOutputFile_Sca, 0, //(this.chkConsiderSubset.Checked) ? Convert.ToInt32(this.nudConvertToTDFFrom.Value) : 0, TDFReader.GetNumberOfSlices(zInputFile_1) - 1, //-1, //(this.chkConsiderSubset.Checked) ? Convert.ToInt32(this.nudConvertToTDFTo.Value) : -1, this.mAirSx, this.mAirDx, this.mFlatEnd, this.mHalfHalf, this.mHalfHalfLine, this.mExtFOV, this.mExtFOVRight, this.mExtFOVOverlap, this.mExtFOVNormalize, this.mExtFOVAverage, this.mRingRemoval, this.mDynamicFlatFielding, Convert.ToInt32(this.nudGDEI_ShiftVert1.Value), Convert.ToInt32(this.nudGDEI_ShiftHoriz1.Value), Convert.ToInt32(this.nudGDEI_ShiftVert2.Value), Convert.ToInt32(this.nudGDEI_ShiftHoriz2.Value), Convert.ToInt32(this.nudGDEI_ShiftVert3.Value), Convert.ToInt32(this.nudGDEI_ShiftHoriz3.Value), Convert.ToDouble(this.nudGDEI_r1.Value) * Math.Pow(10, Convert.ToDouble(this.nudGDEI_r1Exp.Value)), Convert.ToDouble(this.nudGDEI_r2.Value) * Math.Pow(10, Convert.ToDouble(this.nudGDEI_r2Exp.Value)), Convert.ToDouble(this.nudGDEI_r3.Value) * Math.Pow(10, Convert.ToDouble(this.nudGDEI_r3Exp.Value)), Convert.ToDouble(this.nudGDEI_d1.Value) * Math.Pow(10, Convert.ToDouble(this.nudGDEI_d1Exp.Value)), Convert.ToDouble(this.nudGDEI_d2.Value) * Math.Pow(10, Convert.ToDouble(this.nudGDEI_d2Exp.Value)), Convert.ToDouble(this.nudGDEI_d3.Value) * Math.Pow(10, Convert.ToDouble(this.nudGDEI_d3Exp.Value)), Convert.ToDouble(this.nudGDEI_dd1.Value) * Math.Pow(10, Convert.ToDouble(this.nudGDEI_dd1Exp.Value)), Convert.ToDouble(this.nudGDEI_dd2.Value) * Math.Pow(10, Convert.ToDouble(this.nudGDEI_dd2Exp.Value)), Convert.ToDouble(this.nudGDEI_dd3.Value) * Math.Pow(10, Convert.ToDouble(this.nudGDEI_dd3Exp.Value)), Convert.ToInt32(Properties.Settings.Default.FormSettings_NrOfProcesses) ); // Create an instance of JobExecuter with the Phase Retrieval job // splitting it into several processes (if specified): JobExecuter zExecuter = new JobExecuter(zJob); // Execute the job: zExecuter.Run(); // Start the monitoring of the job: mJobMonitor.Run(zExecuter, "sino"); // Reset status bar: this.toolStripStatusLabel1.Text = string.Empty; }
//3 modes private void Experiment1(int mode) { LogWrite("Starting experiment mode " + mode.ToString()); workflows = 2; steps = 20; //max 20 int delay = 120; //150 var testpDescriptionReader = new DescriptionReader("testp.wf"); var bsmDescriptionReader = new DescriptionReader("bsm.wf"); foreach (var pair in bsmDescriptionReader.InputFiles) { var val = ConfigurationManager.AppSettings[pair.Key]; _api.BindFile(pair.Key, val); } var timer = new Timer(); int i = 1; timer.Elapsed += new ElapsedEventHandler((s, e) => { Random random = new Random(); if (i > workflows) { i = 0; timer.Stop(); } else if (i == 1) { StringBuilder sb = new StringBuilder(); sb.Append(testpDescriptionReader.Script); for (int step = 1; step <= 2*steps; step++) { //if (step > amount/2) offset = 40; sb.Append("\n step s" + step + " runs testp (\n in0=" + i + step + ",\n in1=1,\n timeToWait=" + (delay / i + random.Next(0, 20)).ToString() + "\n)\n"); } _api.Script = sb.ToString(); jobMonitor = _api.CreateMonitor(); jobMonitor.UpdatePeriod = 1000 * 50; jobMonitor.Finished += JobMonitorOnFinishedExp1; jobMonitor.Active += JobMonitorOnActive; LogWrite(DateTime.Now.ToLongTimeString() + " " + steps + " testp pushed: " + i + "/" + "; id:" + jobMonitor.JobId); LogWrite(_api.Script.Substring(0, 57)); if (mode == 1 || mode==3) { timer.Interval = 20 * 1000; LogWrite("Waiting 25 sec"); } jobMonitor.Run(); } else { StringBuilder sb = new StringBuilder(); //sb.Append(bsmDescriptionReader.Script); if (mode > 1) { //2,3 sb.Append("[flow:priority = @urgent]\n"); if (mode == 2) sb.Append("[flow:MinTime = " + '"' + "0" + '"' + "]\n[flow:MaxTime = " + '"' + "0" + '"' + "]\n"); } if (mode > 2) sb.Append("[flow:MinTime = " + '"' + "0" + '"' + "]\n[flow:MaxTime = " + '"' + "0" + '"' + "]\n"); for (int step = 1; step <=steps; step++) { sb.Append("\n step MaskedFullBSM_" + step + " runs bsm \n (\n inMeasurement = measurementFile,\n inHirlam = hirlam6,\n swan = swanFile,\n inBSH = BSHFile,\n useAssimilation = true,\n useSWAN = true,\n useBSH = true,\n useOldProject = false,\n useMask = false,\n startCalcDate = \"09/01/2007 12:00:00\",\n inAssFields = assFields,\n inProject = projects,\n controlPoints = inControlPoints,\n deleteDirs = true,\n ForecastSize = 3 \n)\n"); } _api.Script = sb.ToString(); jobMonitor = _api.CreateMonitor(); jobMonitor.UpdatePeriod = 1000 * 5; jobMonitor.Finished += JobMonitorOnFinishedExp1; jobMonitor.Active += JobMonitorOnActive; LogWrite(DateTime.Now.ToLongTimeString() + " " + steps + " bsm pushed: " + i + "/" + "; id:" + jobMonitor.JobId); LogWrite(_api.Script.Substring(0, 70)); jobMonitor.Run(); //timer.Interval = 120 * 1000; timer.Stop(); } i++; }); timer.Interval = 1000; timer.Start(); }
private void btnRun_Click(object sender, EventArgs e) { // Run Job Convert To TDF: IJob zJob; string zFileName = Path.GetFileName(mInputTDF); string zString = string.Empty; string pythoncmd = (string.IsNullOrEmpty(Properties.Settings.Default.SYRMEP_HPC_PythonPath)) ? "python " : Properties.Settings.Default.SYRMEP_HPC_PythonPath + "/python "; // Unix-like command line: zString = pythoncmd + Properties.Settings.Default.SYRMEP_HPC_SourcePath + '/' + Properties.Settings.Default.GetTDFDimensionJob + " " + mInputTDF; string zResult = SYRMEP_HPC.Execute(zString); string[] zLines = zResult.Split('\n'); int zProjections = Int32.Parse((zLines[0].Split(' '))[1]); int zSlices = Int32.Parse((zLines[1].Split(' '))[1]); int zTo = zSlices - 1; int zThreads = Convert.ToInt32(Properties.Settings.Default.SYRMEP_HPC_Processes); // Execute with conventional flat fielding: zJob = new RemotePreProcessingJob( // Get combobox selection (in handler) zFileName, zFileName.Remove(zFileName.Length - 4) + "_corr.tdf", 0, zTo, mAirSx, mAirDx, this.mFlatEnd, // use flat at the end this.mHalfHalf, this.mHalfHalfLine, this.mExtFOV, this.mExtFOVRight, this.mExtFOVOverlap, this.mExtFOVNormalize, this.mExtFOVAverage, this.mRingRemoval, this.mDynamicFlatFielding, zThreads, false, "-" ); // Create an instance of JobExecuter with the pre processing job: IJobExecuter zExecuter = new RemoteJobExecuter(zJob); // Execute the job splitting it with several processes (if specified): zExecuter.Run(); // Start the monitoring of the job: mJobMonitor.Run(zExecuter, "sino"); this.mPreProcessOnGoing = true; this.lblPreProcessing.Enabled = true; this.lblPhaseRetrieval.Enabled = false; this.lblReconstruction.Enabled = false; // Reset status bar: this.toolStripStatusLabel1.Text = string.Empty; }
//cnm DefaultHeuristics private void Experiment4(int mode) { if (finished == 0) DeleteFiles(); started = DateTime.Now; LogWrite(started.ToLongTimeString()+" Starting experiment 4 mode " + mode); SwitchParameter("DefaultHeuristics", (mode == 2 ? "MinMin" : "Stub")); var wfDescriptionReader = new DescriptionReader("cnm.wf"); _api.UploadFiles(wfDescriptionReader.InputFiles); _api.Script = wfDescriptionReader.Script; LogWrite(_api.Script); jobMonitor = _api.CreateMonitor(); jobMonitor.UpdatePeriod = 1000 * 5; jobMonitor.Active += JobMonitorOnActive; jobMonitor.Finished += (sender, jobDecriptionEventArgs) => { JobMonitorStandardFinished(sender, jobDecriptionEventArgs); CopyRows(); if (mode == 2) Experiment4(mode); }; jobMonitor.Run(); Statuses.Add(jobMonitor.JobId.ToString(),"Pushed"); LogWrite(DateTime.Now.ToLongTimeString() + " " + jobMonitor.JobId + " state: pushed"); }
private void ParasiteLoading(string resourse=null) { LogWrite(DateTime.Now.ToLongTimeString() + " Loading "+(resourse!=null?resourse:"system")+" by parasite testp task"); var parasiteWfDescriptionReader = new DescriptionReader("parasite.wf"); _api.Script = (resourse!=null?"[Resource = \""+resourse+"\"]\n":"")+parasiteWfDescriptionReader.Script; jobMonitor = _api.CreateMonitor(); jobMonitor.Active += (sender, jobDecriptionEventArgs) =>{ if (Statuses[jobDecriptionEventArgs.JobInfo.ID.ToString()] != jobDecriptionEventArgs.JobInfo.State.ToString()) { Statuses[jobDecriptionEventArgs.JobInfo.ID.ToString()] = jobDecriptionEventArgs.JobInfo.State.ToString(); if (Statuses[jobDecriptionEventArgs.JobInfo.ID.ToString()] == "Active") { //var sleep = 15; //LogWrite(DateTime.Now.ToLongTimeString() + " Sleep " + sleep + " sec"); //System.Threading.Thread.Sleep(sleep * 1000); waitOne.Set(); } } }; jobMonitor.Finished += (sender, jobDecriptionEventArgs) => { var monitor = (sender as JobMonitor); Statuses[monitor.JobId.ToString()] = "Finished"; if (jobDecriptionEventArgs.JobInfo != null && jobDecriptionEventArgs.JobInfo.ErrorComment != null) { LogWrite(DateTime.Now.ToLongTimeString() + " " + monitor.JobId + " parasite " + Statuses[monitor.JobId.ToString()]+(resourse != null ? " on " + resourse : "")+" with error "+jobDecriptionEventArgs.JobInfo.ErrorComment); return; } LogWrite(DateTime.Now.ToLongTimeString() + " " + monitor.JobId + " parasite " + Statuses[monitor.JobId.ToString()]+(resourse != null ? " on " + resourse : "")); CopyRows(); }; jobMonitor.Run(); Statuses.Add(jobMonitor.JobId.ToString(), "Pushed"); LogWrite(DateTime.Now.ToLongTimeString() + " " + jobMonitor.JobId + " parasite pushed " + (resourse != null ? " on "+resourse : "")); LogWrite(DateTime.Now.ToLongTimeString() + " Waiting till parasite WF become active"); waitOne.WaitOne(); }
//3 modes private void Experiment1(int mode) { LogWrite("Starting experiment mode " + mode.ToString()); workflows = 2; steps = 20; //max 20 int delay = 120; //150 var testpDescriptionReader = new DescriptionReader("testp.wf"); var bsmDescriptionReader = new DescriptionReader("bsm.wf"); foreach (var pair in bsmDescriptionReader.InputFiles) { var val = ConfigurationManager.AppSettings[pair.Key]; _api.BindFile(pair.Key, val); } var timer = new Timer(); int i = 1; timer.Elapsed += new ElapsedEventHandler((s, e) => { Random random = new Random(); if (i > workflows) { i = 0; timer.Stop(); } else if (i == 1) { StringBuilder sb = new StringBuilder(); sb.Append(testpDescriptionReader.Script); for (int step = 1; step <= 2 * steps; step++) { //if (step > amount/2) offset = 40; sb.Append("\n step s" + step + " runs testp (\n in0=" + i + step + ",\n in1=1,\n timeToWait=" + (delay / i + random.Next(0, 20)).ToString() + "\n)\n"); } _api.Script = sb.ToString(); jobMonitor = _api.CreateMonitor(); jobMonitor.UpdatePeriod = 1000 * 50; jobMonitor.Finished += JobMonitorOnFinishedExp1; jobMonitor.Active += JobMonitorOnActive; LogWrite(DateTime.Now.ToLongTimeString() + " " + steps + " testp pushed: " + i + "/" + "; id:" + jobMonitor.JobId); LogWrite(_api.Script.Substring(0, 57)); if (mode == 1 || mode == 3) { timer.Interval = 20 * 1000; LogWrite("Waiting 25 sec"); } jobMonitor.Run(); } else { StringBuilder sb = new StringBuilder(); //sb.Append(bsmDescriptionReader.Script); if (mode > 1) //2,3 { sb.Append("[flow:priority = @urgent]\n"); if (mode == 2) { sb.Append("[flow:MinTime = " + '"' + "0" + '"' + "]\n[flow:MaxTime = " + '"' + "0" + '"' + "]\n"); } } if (mode > 2) { sb.Append("[flow:MinTime = " + '"' + "0" + '"' + "]\n[flow:MaxTime = " + '"' + "0" + '"' + "]\n"); } for (int step = 1; step <= steps; step++) { sb.Append("\n step MaskedFullBSM_" + step + " runs bsm \n (\n inMeasurement = measurementFile,\n inHirlam = hirlam6,\n swan = swanFile,\n inBSH = BSHFile,\n useAssimilation = true,\n useSWAN = true,\n useBSH = true,\n useOldProject = false,\n useMask = false,\n startCalcDate = \"09/01/2007 12:00:00\",\n inAssFields = assFields,\n inProject = projects,\n controlPoints = inControlPoints,\n deleteDirs = true,\n ForecastSize = 3 \n)\n"); } _api.Script = sb.ToString(); jobMonitor = _api.CreateMonitor(); jobMonitor.UpdatePeriod = 1000 * 5; jobMonitor.Finished += JobMonitorOnFinishedExp1; jobMonitor.Active += JobMonitorOnActive; LogWrite(DateTime.Now.ToLongTimeString() + " " + steps + " bsm pushed: " + i + "/" + "; id:" + jobMonitor.JobId); LogWrite(_api.Script.Substring(0, 70)); jobMonitor.Run(); //timer.Interval = 120 * 1000; timer.Stop(); } i++; }); timer.Interval = 1000; timer.Start(); }
private void btnConvert_Click(object sender, EventArgs e) { IJob zJob; // Create an instance for the multi offset job: zJob = new MultiAngleJob( this.mSlicePrefix, this.mImageIndex, this.mInputTDF, this.zOutputPathTxb.Text, this.mPreProcess, this.mAirSx, this.mAirDx, this.mFlatEnd, this.mHalfHalf, this.mHalfHalfLine, this.mExtFOV, this.mExtFOVRight, this.mExtFOVOverlap, this.mExtFOVNormalize, this.mExtFOVAverage, this.mRingRemoval, this.mAngles, this.mCenter, this.mReconFunc, this.mReconParam1, this.mScale, this.mOverPaddding, this.mLogTransform, this.mCircle, this.mZeroneMode, this.mCorrectionOffset, this.mDecimateFactor, this.mDownscaleFactor, this.mPostProcess, this.mPostProcessConvertArgs, this.mPostProcessCropArgs, this.mPhaseRetrieval, this.mPhrtMethod, this.mParam1, this.mParam2, this.mDistance, this.mEnergy, this.mPixelsize, this.mPhrtPad, Convert.ToInt32(this.nudMultiAngle_From.Value), Convert.ToInt32(this.nudMultiAngle_To.Value) ); // Create an instance of JobExecuter with the Phase Retrieval job // splitting it into several processes (if specified): JobExecuter zExecuter = new JobExecuter(zJob); // Execute the job: zExecuter.Run(); // Start the monitoring of the job: int zLines = (Convert.ToInt32(this.nudMultiAngle_To.Value - this.nudMultiAngle_From.Value)) + 1; mJobMonitor.Run(zExecuter, this.mSlicePrefix, zLines); }
//cnm DefaultUrgentHeuristics private void Experiment5(int mode) { if (finished == 0) DeleteFiles(); started = DateTime.Now; LogWrite(started.ToLongTimeString() + "Starting experiment 5 mode " + mode); ParasiteLoading("b14.b14-113"); ParasiteLoading("b14.b14-22"); SwitchParameter(DateTime.Now.ToLongTimeString() + " DefaultUrgentHeuristics", (mode == 2 ? "UBestFirst" : "UGreedy")); var wfDescriptionReader = new DescriptionReader("cnm2.wf"); _api.UploadFiles(wfDescriptionReader.InputFiles); _api.Script = wfDescriptionReader.Script; jobMonitor = _api.CreateMonitor(); jobMonitor.UpdatePeriod = 1000 * 5; jobMonitor.Active += JobMonitorOnActive; jobMonitor.Finished += (sender, jobDecriptionEventArgs) => { var monitor = (sender as JobMonitor); finished++; Statuses[monitor.JobId.ToString()] = "Finished"; LogWrite(DateTime.Now.ToLongTimeString() + " " + monitor.JobId + " " + Statuses[monitor.JobId.ToString()] + " after " + TimeSpan.FromTicks(DateTime.Now.Ticks - started.Ticks).Minutes + " min " + TimeSpan.FromTicks(DateTime.Now.Ticks - started.Ticks).Seconds + " sec "); CopyRows(); mode++; if (mode == 2) Experiment5(mode); }; LogWrite(_api.Script); jobMonitor.Run(); Statuses.Add(jobMonitor.JobId.ToString(), "Pushed"); LogWrite(DateTime.Now.ToLongTimeString() + " " + jobMonitor.JobId + " pushed"); }