/// <summary> /// Process one or more .NCC Review measurement data files, with related .NOP/.COP files. /// Presence of NOP/COP files is optional /// See Import Operator Declarations from Operator Review and Measurements from Radiation Review p. 24, /// See Operator Declaration File Format p. 87, /// See Operator Declaration File Format for Curium Ratio Measurements p. 88, /// See Radiation Review Measurement Data File Format p. 93, INCC Software Users Manual, March 29, 2009 /// </summary> void INCCReviewFileProcessing() { FireEvent(EventType.ActionPrep, this); NC.App.Opstate.StampOperationStartTime(); // This code would be the same for an interactive version of this operation // > Start here // first find and process and .NOP files List <string> exts = new List <string>() { ".nop", ".cop" }; FileList <CSVFile> hdlr = new FileList <CSVFile>(); hdlr.Init(exts, ctrllog); FileList <CSVFile> files = null; OPFiles opfiles = new OPFiles(); // The INCC5 semantics if (NC.App.AppContext.FileInputList == null) { files = (FileList <CSVFile>)hdlr.BuildFileList(NC.App.AppContext.FileInput, NC.App.AppContext.Recurse, true); } else { files = (FileList <CSVFile>)hdlr.BuildFileList(NC.App.AppContext.FileInputList); } if (files != null && files.Count > 0) { // construct lists of isotopics and items from the NOP and COP files opfiles.Process(files); ctrllog.TraceEvent(LogLevels.Verbose, 33085, "NOP items " + opfiles.Results.ItemIds.Count); //ctrllog.TraceEvent(LogLevels.Verbose, 33085, "COP entries " + opfiles.Results.COPRatioRecs.Count); } else { ctrllog.TraceEvent(LogLevels.Warning, 33085, "No operator declarations available, continuing with default values"); } // process the NCC files only INCCFileOrFolderInfo foo = new INCCFileOrFolderInfo(ctrllog, "*.NCC"); if (NC.App.AppContext.FileInputList == null) { foo.SetPath(NC.App.AppContext.FileInput); } else { foo.SetFileList(NC.App.AppContext.FileInputList); } foo.eh += new TransferEventHandler((s, e) => { FireEvent(EventType.ActionInProgress, e); }); List <INCCTransferBase> res = foo.Restore(); // use RemoveAll to cull those NCC files that reference a non-existent detector DetectorList dl = NC.App.DB.Detectors; foreach (INCCReviewFile rf in res) { List <Detector> tdk = dl.FindAll(d => 0 == string.Compare(d.Id.DetectorName, rf.detector, true)); if (tdk.Count < 1) { rf.skip = true; ctrllog.TraceEvent(LogLevels.Warning, 33085, "No detector " + rf.detector + " defined, cannot complete processing NCC review file " + System.IO.Path.GetFileName(rf.Path)); } } res.RemoveAll(rf => (rf as INCCReviewFile).skip); res.Sort((rf1, rf2) => // sort chronologically { return(DateTime.Compare((rf1 as INCCReviewFile).dt, (rf2 as INCCReviewFile).dt)); }); /// end here > The sorted, filtered and processed list here would be returned to the UI for display and interactive selection if (res == null || res.Count < 1) { NC.App.Opstate.StopTimer(); NC.App.Opstate.StampOperationStopTime(); FireEvent(EventType.ActionStop, this); ctrllog.TraceEvent(LogLevels.Warning, 33085, "No usable NCC review files found in " + System.IO.Path.GetFullPath(foo.GetPath())); return; } AcquireParameters orig_acq = new AcquireParameters(NC.App.Opstate.Measurement.AcquireState); // Each NCC file is a separate measurement foreach (INCCReviewFile irf in res) { ResetMeasurement(); if (NC.App.Opstate.IsQuitRequested) { break; } // Find the detector named in the NCC file (existence shown in earlier processing above) Detector curdet = NC.App.DB.Detectors.Find(d => string.Compare(d.Id.DetectorName, irf.detector, true) == 0); if (curdet.AB.Unset) { ABKey abkey = new ABKey(curdet.MultiplicityParams, 512); // NEXT: maxbins is arbitrary, just the first of many for VSR LMRawAnalysis.SDTMultiplicityCalculator.SetAlphaBeta(abkey, curdet.AB); } // set up acquire state based on the NCC file content AcquireParameters newacq = ConfigureAcquireState(curdet, orig_acq, irf); AssaySelector.MeasurementOption mo = NCCMeasOption(irf); // make a temp MeasId MeasId thisone = new MeasId(mo, newacq.MeasDateTime); // get the list of measurement Ids for this detector List <MeasId> list = NC.App.DB.MeasurementIds(curdet.Id.DetectorName, mo.PrintName()); MeasId already = list.Find(mid => mid.Equals(thisone)); if (already != null) { // URGENT: do the replacement as it says ctrllog.TraceEvent(LogLevels.Warning, 33085, "Replacing the matching {0} measurement, timestamp {1} (in a future release)", already.MeasOption.PrintName(), already.MeasDateTime.ToString()); } IntegrationHelpers.BuildMeasurement(newacq, curdet, mo); Measurement meas = NC.App.Opstate.Measurement; meas.MeasDate = newacq.MeasDateTime; // use the date and time from the NCC file content meas.Persist(); // preserve the basic results record if (AssaySelector.ForMass(meas.MeasOption) && !meas.INCCAnalysisState.Methods.AnySelected()) { ctrllog.TraceEvent(LogLevels.Warning, 437, "No mass methods for " + meas.INCCAnalysisState.Methods.selector.ToString()); } try { ushort total_number_runs = 0; double run_count_time = 0; double total_good_count_time = 0; if (irf.num_runs == 0) { ctrllog.TraceEvent(LogLevels.Error, 440, "This measurement has no good cycles."); continue; } if (meas.MeasOption == AssaySelector.MeasurementOption.holdup) { continue; } /* convert to total count time */ total_number_runs = irf.num_runs; total_good_count_time = irf.num_runs * run_count_time; meas.RequestedRepetitions = total_number_runs; // convert runs to cycles for (int i = 0; i < irf.num_runs; i++) { /* run date and time (IAEA format) */ AddReviewFileCycle(i, irf.runs[i], irf.times[i], meas, irf.Path); if (i % 8 == 0) { FireEvent(EventType.ActionInProgress, this); } } FireEvent(EventType.ActionInProgress, this); // NEXT: handle true AAS cycles as in INCC5 if (meas.MeasOption == AssaySelector.MeasurementOption.verification && meas.INCCAnalysisState.Methods.Has(AnalysisMethod.AddASource) && meas.AcquireState.well_config == WellConfiguration.Passive) { ctrllog.TraceEvent(LogLevels.Error, 440, "No add-a-source data processed because the implementation is incomplete. AAS"); //AddASourceSetup aass = IntegrationHelpers.GetCurrentAASSParams(meas.Detector); //for (int n = 1; n <= aass.number_positions; n++) //{ // /* number of good runs */ // string l = td.reader.ReadLine(); // if (td.reader.EndOfStream) // { // ctrllog.TraceEvent(LogLevels.Error, 440, "No add-a-source data found in disk file. " + "AAS p" + n.ToString()); // } //} } } catch (Exception e) { NC.App.Opstate.SOH = OperatingState.Trouble; ctrllog.TraceException(e, true); ctrllog.TraceEvent(LogLevels.Error, 437, "NCC file processing stopped with error: '" + e.Message + "'"); } finally { NC.App.Loggers.Flush(); } FireEvent(EventType.ActionInProgress, this); ComputeFromINCC5SRData(meas); FireEvent(EventType.ActionInProgress, this); } NC.App.Opstate.ResetTokens(); NC.App.Opstate.SOH = OperatingState.Stopping; NC.App.Opstate.StampOperationStopTime(); FireEvent(EventType.ActionFinished, this); }
void TestDataAssay() { List <string> exts = new List <string>() { ".dat", ".cnn" }; FileList <TestDataFile> hdlr = new FileList <TestDataFile>(); hdlr.Init(exts, ctrllog); FileList <TestDataFile> files = null; // initialize operation timer here NC.App.Opstate.ResetTimer(filegather, files, 170, (int)NC.App.AppContext.StatusTimerMilliseconds); FireEvent(EventType.ActionPrep, this); NC.App.Opstate.StampOperationStartTime(); if (NC.App.AppContext.FileInputList == null) { files = (FileList <TestDataFile>)hdlr.BuildFileList(NC.App.AppContext.FileInput, NC.App.AppContext.Recurse, true); } else { files = (FileList <TestDataFile>)hdlr.BuildFileList(NC.App.AppContext.FileInputList); } if (files == null || files.Count < 1) { NC.App.Opstate.StopTimer(); NC.App.Opstate.StampOperationStopTime(); FireEvent(EventType.ActionStop, this); ctrllog.TraceEvent(LogLevels.Warning, 33085, "No usable Test data/Disk .dat or .cnn files found"); return; } AcquireParameters orig_acq = new AcquireParameters(NC.App.Opstate.Measurement.AcquireState); Detector curdet = NC.App.Opstate.Measurement.Detector; AssaySelector.MeasurementOption mo = NC.App.Opstate.Measurement.MeasOption; if (curdet.AB.Unset) { ABKey abkey = new ABKey(curdet.MultiplicityParams, 512); // NEXT: maxbins is arbitrary, just the first of many for VSR LMRawAnalysis.SDTMultiplicityCalculator.SetAlphaBeta(abkey, curdet.AB); } // Each .dat file is a separate measurement foreach (TestDataFile td in files) { Measurement meas = null; ResetMeasurement(); try { if (!td.OpenForReading()) { continue; } if (NC.App.Opstate.IsQuitRequested) { break; } uint run_seconds; ushort number_good_runs = 0; ushort total_number_runs = 0; double run_count_time = 0; double total_good_count_time = 0; if (mo != AssaySelector.MeasurementOption.holdup) { /* number of good runs */ string l = td.reader.ReadLine(); ushort.TryParse(l, out number_good_runs); if (number_good_runs == 0) { ctrllog.TraceEvent(LogLevels.Error, 440, "This measurement has no good cycles."); continue; } /* run count time */ l = td.reader.ReadLine(); double.TryParse(l, out run_count_time); if (run_count_time <= 0.0) { ctrllog.TraceEvent(LogLevels.Error, 441, "Count time is <= 0."); continue; } } // update acq and then meas here AcquireParameters newacq = ConfigureAcquireState(curdet, orig_acq, DateTimeOffset.Now, number_good_runs, td.Filename); IntegrationHelpers.BuildMeasurement(newacq, curdet, mo); meas = NC.App.Opstate.Measurement; meas.MeasDate = newacq.MeasDateTime; meas.Persist(); // preserve the basic results record /* convert to total count time */ total_number_runs = number_good_runs; total_good_count_time = number_good_runs * run_count_time; meas.RequestedRepetitions = total_number_runs; // update the acq status and do the persist /* read in run data */ for (int i = 0; i < number_good_runs; i++) { /* run date and time (IAEA format) */ run_seconds = (uint)(i * (ushort)run_count_time); // from start time AddTestDataCycle(i, run_seconds, run_count_time, meas, td); if (i % 8 == 0) { FireEvent(EventType.ActionInProgress, this); } } FireEvent(EventType.ActionInProgress, this); if (meas.MeasOption == AssaySelector.MeasurementOption.verification && meas.INCCAnalysisState.Methods.Has(AnalysisMethod.AddASource) && meas.AcquireState.well_config == WellConfiguration.Passive) { AddASourceSetup aass = IntegrationHelpers.GetCurrentAASSParams(meas.Detector); for (int n = 1; n <= aass.number_positions; n++) { /* number of good runs */ string l = td.reader.ReadLine(); if (td.reader.EndOfStream) { ctrllog.TraceEvent(LogLevels.Error, 440, "No add-a-source data found in disk file. AAS p" + n.ToString()); } ushort.TryParse(l, out number_good_runs); if (number_good_runs == 0) { ctrllog.TraceEvent(LogLevels.Error, 440, "This measurement has no good cycles. AAS p" + n.ToString()); continue; } /* run count time */ l = td.reader.ReadLine(); double.TryParse(l, out run_count_time); if (run_count_time <= 0.0) { ctrllog.TraceEvent(LogLevels.Error, 441, "Count time is <= 0. AAS p" + n.ToString()); continue; } /* read in run data */ for (int i = 0; i < number_good_runs; i++) { /* run date and time (IAEA format) */ run_seconds = (uint)((n + 1) * (i + 1) * (ushort)run_count_time); // from start time AddTestDataCycle(i, run_seconds, run_count_time, meas, td, " AAS p" + n.ToString(), n); if (i % 8 == 0) { FireEvent(EventType.ActionInProgress, this); } } FireEvent(EventType.ActionInProgress, this); } } } catch (Exception e) { NC.App.Opstate.SOH = OperatingState.Trouble; ctrllog.TraceException(e, true); ctrllog.TraceEvent(LogLevels.Error, 437, "Test data file processing stopped with error: '" + e.Message + "'"); } finally { td.CloseReader(); NC.App.Loggers.Flush(); } FireEvent(EventType.ActionInProgress, this); ComputeFromINCC5SRData(meas); FireEvent(EventType.ActionInProgress, this); } NC.App.Opstate.ResetTokens(); NC.App.Opstate.SOH = OperatingState.Stopping; NC.App.Opstate.StampOperationStopTime(); FireEvent(EventType.ActionFinished, this); }
/// <summary> /// Process one or more .NCC Review measurement data files, with related .NOP/.COP files. /// see Import Operator Declarations from Operator Review and Measurements from Radiation Review p. 24, /// See Operator Declaration File Format p. 87, /// See Operator Declaration File Format for Curium Ratio Measurements p. 88, /// See Radiation Review Measurement Data File Format p. 93, INCC Software Users Manual, March 29, 2009 /// </summary> void INCCReviewFileProcessing() { FireEvent(EventType.ActionPrep, this); NC.App.Opstate.StampOperationStartTime(); // This code would be the same for an interactive version of this operation // > Start here // first find and process and .NOP files List<string> exts = new List<string>() { ".nop", ".cop" }; FileList<CSVFile> hdlr = new FileList<CSVFile>(); hdlr.Init(exts, ctrllog); FileList<CSVFile> files = null; // The INCC5 semantics if (NC.App.AppContext.FileInputList == null) files = (FileList<CSVFile>)hdlr.BuildFileList(NC.App.AppContext.FileInput, NC.App.AppContext.Recurse, true); else files = (FileList<CSVFile>)hdlr.BuildFileList(NC.App.AppContext.FileInputList); // construct lists of isotopics and items from the NOP and COP files OPFiles opfiles = new OPFiles(); opfiles.Process(files); ctrllog.TraceEvent(LogLevels.Verbose, 33085, "NOP items " + opfiles.NOPItemIds.Count); // process the NCC files only INCCFileOrFolderInfo foo = new INCCFileOrFolderInfo(ctrllog, "*.NCC"); if (NC.App.AppContext.FileInputList == null) foo.SetPath(NC.App.AppContext.FileInput); else foo.SetFileList(NC.App.AppContext.FileInputList); List<INCCTransferBase> res = foo.Restore(); // use RemoveAll to cull those NCC files that reference a non-existent detector DetectorList dl = NC.App.DB.Detectors; foreach (INCCReviewFile rf in res) { List<Detector> tdk = dl.FindAll(d => 0 == string.Compare(d.Id.DetectorName, rf.detector, true)); if (tdk.Count < 1) { rf.skip = true; ctrllog.TraceEvent(LogLevels.Warning, 33085, "No detector " + rf.detector + " defined, cannot complete processing NCC review file " + System.IO.Path.GetFileName(rf.Path)); } } res.RemoveAll(rf => (rf as INCCReviewFile).skip); res.Sort((rf1, rf2) => // sort chronologically { return DateTime.Compare((rf1 as INCCReviewFile).dt, (rf2 as INCCReviewFile).dt); }); /// end here > The sorted, filtered and processed list here would be returned to the UI for display and interactive selection if (res == null || res.Count < 1) { NC.App.Opstate.StopTimer(0); NC.App.Opstate.StampOperationStopTime(); FireEvent(EventType.ActionStop, this); ctrllog.TraceEvent(LogLevels.Warning, 33085, "No useable NCC review files found in " + System.IO.Path.GetFullPath(foo.GetPath())); return; } AcquireParameters orig_acq = new AcquireParameters(NC.App.Opstate.Measurement.AcquireState); // Each NCC file is a separate measurement foreach (INCCReviewFile irf in res) { ResetMeasurement(); if (NC.App.Opstate.IsQuitRequested) break; // Find the detector named in the NCC file (existence shown in earlier processing above) Detector curdet = NC.App.DB.Detectors.Find(d => string.Compare(d.Id.DetectorName, irf.detector, true) == 0); // set up acquire state based on the NCC file content AcquireParameters newacq = ConfigureAcquireState(curdet, orig_acq, irf); AssaySelector.MeasurementOption mo = NCCMeasOption(irf); // make a temp MeasId MeasId thisone = new MeasId(mo, newacq.MeasDateTime); // get the list of measurement Ids for this detector List<MeasId> list = NC.App.DB.MeasurementIds(curdet.Id.DetectorName, mo.PrintName()); MeasId already = list.Find(mid => mid.Equals(thisone)); if (already != null) { // URGENT: do the replacement as it says ctrllog.TraceEvent(LogLevels.Warning, 33085, "Replacing the matching {0} measurement, timestamp {1} (in a future release)", already.MeasOption.PrintName(), already.MeasDateTime.ToString()); } IntegrationHelpers.BuildMeasurement(newacq, curdet, mo); Measurement meas = NC.App.Opstate.Measurement; meas.MeasDate = newacq.MeasDateTime; // use the date and time from the NCC file content meas.Persist(); // preserve the basic results record if (AssaySelector.ForMass(meas.MeasOption) && !meas.INCCAnalysisState.Methods.AnySelected()) ctrllog.TraceEvent(LogLevels.Warning, 437, "No mass methods for " + meas.INCCAnalysisState.Methods.selector.ToString()); try { UInt16 total_number_runs = 0; double run_count_time = 0; double total_good_count_time = 0; if (irf.num_runs == 0) { ctrllog.TraceEvent(LogLevels.Error, 440, "This measurement has no good cycles."); continue; } if (meas.MeasOption == AssaySelector.MeasurementOption.holdup) continue; /* convert to total count time */ total_number_runs = irf.num_runs; total_good_count_time = (double)irf.num_runs * run_count_time; meas.RequestedRepetitions = total_number_runs; // convert runs to cycles for (int i = 0; i < irf.num_runs; i++) { /* run date and time (IAEA format) */ AddReviewFileCycle(i, irf.runs[i], irf.times[i], meas, irf.Path); if (i % 8 == 0) FireEvent(EventType.ActionInProgress, this); } FireEvent(EventType.ActionInProgress, this); // NEXT: handle true AAS cycles as in INCC5 if (meas.MeasOption == AssaySelector.MeasurementOption.verification && meas.INCCAnalysisState.Methods.Has(AnalysisMethod.AddASource) && meas.AcquireState.well_config == WellConfiguration.Passive) { ctrllog.TraceEvent(LogLevels.Error, 440, "No add-a-source data processed because I am lame. " + "AAS "); //AddASourceSetup aass = IntegrationHelpers.GetCurrentAASSParams(meas.Detectors[0]); //for (int n = 1; n <= aass.number_positions; n++) //{ // /* number of good runs */ // string l = td.reader.ReadLine(); // if (td.reader.EndOfStream) // { // ctrllog.TraceEvent(LogLevels.Error, 440, "No add-a-source data found in disk file. " + "AAS p" + n.ToString()); // } //} } } catch (Exception e) { NC.App.Opstate.SOH = NCC.OperatingState.Trouble; ctrllog.TraceException(e, true); ctrllog.TraceEvent(LogLevels.Error, 437, "NCC file processing stopped with error: '" + e.Message + "'"); } finally { NC.App.Loggers.Flush(); } FireEvent(EventType.ActionInProgress, this); ComputeFromINCC5SRData(meas); FireEvent(EventType.ActionInProgress, this); } NC.App.Opstate.ResetTokens(); NC.App.Opstate.SOH = NCC.OperatingState.Stopping; NC.App.Opstate.StampOperationStopTime(); FireEvent(EventType.ActionStop, this); }
void TestDataAssay() { List<string> exts = new List<string>() { ".dat", ".cnn" }; FileList<TestDataFile> hdlr = new FileList<TestDataFile>(); hdlr.Init(exts, ctrllog); FileList<TestDataFile> files = null; // initialize operation timer here NC.App.Opstate.ResetTimer(0, filegather, files, 170, (int)NC.App.AppContext.StatusTimerMilliseconds); FireEvent(EventType.ActionPrep, this); NC.App.Opstate.StampOperationStartTime(); if (NC.App.AppContext.FileInputList == null) files = (FileList<TestDataFile>)hdlr.BuildFileList(NC.App.AppContext.FileInput, NC.App.AppContext.Recurse, true); else files = (FileList<TestDataFile>)hdlr.BuildFileList(NC.App.AppContext.FileInputList); if (files == null || files.Count < 1) { NC.App.Opstate.StopTimer(0); NC.App.Opstate.StampOperationStopTime(); FireEvent(EventType.ActionStop, this); ctrllog.TraceEvent(LogLevels.Warning, 33085, "No useable Test data/Disk .dat files found"); return; } AcquireParameters orig_acq = new AcquireParameters(NC.App.Opstate.Measurement.AcquireState); Detector curdet = NC.App.Opstate.Measurement.Detectors[0]; AssaySelector.MeasurementOption mo = NC.App.Opstate.Measurement.MeasOption; // Each .dat file is a separate measurement foreach (TestDataFile td in files) { Measurement meas = null; ResetMeasurement(); try { if (!td.OpenForReading()) continue; if (NC.App.Opstate.IsQuitRequested) break; UInt32 run_seconds; UInt16 number_good_runs = 0; UInt16 total_number_runs = 0; double run_count_time = 0; double total_good_count_time = 0; if (mo != AssaySelector.MeasurementOption.holdup) { /* number of good runs */ string l = td.reader.ReadLine(); UInt16.TryParse(l, out number_good_runs); if (number_good_runs == 0) { ctrllog.TraceEvent(LogLevels.Error, 440, "This measurement has no good cycles."); continue; } /* run count time */ l = td.reader.ReadLine(); Double.TryParse(l, out run_count_time); if (run_count_time <= 0.0) { ctrllog.TraceEvent(LogLevels.Error, 441, "Count time is <= 0."); continue; } } // update acq and then meas here AcquireParameters newacq = ConfigureAcquireState(curdet, orig_acq, DateTimeOffset.Now, number_good_runs, td.Filename); IntegrationHelpers.BuildMeasurement(newacq, curdet, mo); meas = NC.App.Opstate.Measurement; meas.MeasDate = newacq.MeasDateTime; meas.Persist(); // preserve the basic results record /* convert to total count time */ total_number_runs = number_good_runs; total_good_count_time = (double)number_good_runs * run_count_time; meas.RequestedRepetitions = total_number_runs; // update the acq status and do the persist /* read in run data */ for (int i = 0; i < number_good_runs; i++) { /* run date and time (IAEA format) */ run_seconds = (UInt32)(i * (UInt16)run_count_time); // from start time AddTestDataCycle(i, run_seconds, run_count_time, meas, td); if (i % 8 == 0) FireEvent(EventType.ActionInProgress, this); } FireEvent(EventType.ActionInProgress, this); if (meas.MeasOption == AssaySelector.MeasurementOption.verification && meas.INCCAnalysisState.Methods.Has(AnalysisMethod.AddASource) && meas.AcquireState.well_config == WellConfiguration.Passive) { AddASourceSetup aass = IntegrationHelpers.GetCurrentAASSParams(meas.Detectors[0]); for (int n = 1; n <= aass.number_positions; n++) { /* number of good runs */ string l = td.reader.ReadLine(); if (td.reader.EndOfStream) { ctrllog.TraceEvent(LogLevels.Error, 440, "No add-a-source data found in disk file. " + "AAS p" + n.ToString()); } UInt16.TryParse(l, out number_good_runs); if (number_good_runs == 0) { ctrllog.TraceEvent(LogLevels.Error, 440, "This measurement has no good cycles. " + "AAS p" + n.ToString()); continue; } /* run count time */ l = td.reader.ReadLine(); Double.TryParse(l, out run_count_time); if (run_count_time <= 0.0) { ctrllog.TraceEvent(LogLevels.Error, 441, "Count time is <= 0. " + "AAS p" + n.ToString()); continue; } /* read in run data */ for (int i = 0; i < number_good_runs; i++) { /* run date and time (IAEA format) */ run_seconds = (UInt32)((n + 1) * (i + 1) * (UInt16)run_count_time); // from start time AddTestDataCycle(i, run_seconds, run_count_time, meas, td, " AAS p" + n.ToString(), n); if (i % 8 == 0) FireEvent(EventType.ActionInProgress, this); } FireEvent(EventType.ActionInProgress, this); } } } catch (Exception e) { NC.App.Opstate.SOH = NCC.OperatingState.Trouble; ctrllog.TraceException(e, true); ctrllog.TraceEvent(LogLevels.Error, 437, "Test data file processing stopped with error: '" + e.Message + "'"); } finally { td.CloseReader(); NC.App.Loggers.Flush(); } FireEvent(EventType.ActionInProgress, this); ComputeFromINCC5SRData(meas); FireEvent(EventType.ActionInProgress, this); } NC.App.Opstate.ResetTokens(); NC.App.Opstate.SOH = NCC.OperatingState.Stopping; NC.App.Opstate.StampOperationStopTime(); FireEvent(EventType.ActionStop, this); }
protected void PTRFilePairAssay() { List<string> ext = new List<string>() { ".chn", ".bin" }; FileList<PTRFilePair> hdlr = new FileList<PTRFilePair>(); FileList<PTRFilePair> files = null; hdlr.Init(ext, datalog); // initialize operation timer here NC.App.Opstate.ResetTimer(0, filegather, files, 170, (int)NC.App.AppContext.StatusTimerMilliseconds); FireEvent(EventType.ActionPrep, this); NC.App.Opstate.StampOperationStartTime(); // get the list of files from the named folder, or use the supplied list if (NC.App.AppContext.FileInputList == null) files = (FileList<PTRFilePair>)hdlr.BuildFileList(NC.App.AppContext.FileInput, NC.App.AppContext.Recurse, false); else files = (FileList<PTRFilePair>)hdlr.BuildFileList(NC.App.AppContext.FileInputList); if (files == null || files.Count() < 1) { NC.App.Opstate.StopTimer(0); return; } int removed = files.RemoveAll(f => f.PairEntryFileExtension(".chn")); if (files.Count() < 1) { NC.App.Opstate.StopTimer(0); return; } Measurement meas = NC.App.Opstate.Measurement; PseudoInstrument = new LMDAQ.LMInstrument(meas.Detectors[0]); // psuedo LM until we can map from user or deduce from file content at run-time PseudoInstrument.selected = true; if (!Instruments.Active.Contains(PseudoInstrument)) Instruments.Active.Add(PseudoInstrument); // add to global runtime list // Force RDT.State to be a LM ptrFile file RDT, this shows a design failure, so need to rework the entire scheme, (like there is still time . . .) LMRawDataTransform rdt = (PseudoInstrument as LMDAQ.LMInstrument).RDT; rdt.SetLMState(((LMConnectionInfo)(PseudoInstrument.id.FullConnInfo)).NetComm); PTRFileProcessingState c = new PTRFileProcessingState(rdt.State.maxValuesInBuffer * 2, (LMProcessingState)PseudoInstrument.RDT.State); // expects 8 bytes, we only use 4 PseudoInstrument.RDT.State = c; rdt.Init(NC.App.Loggers.Logger(LMLoggers.AppSection.Data), NC.App.Loggers.Logger(LMLoggers.AppSection.Analysis)); rdt.RawDataBuff = null;// free it back up, don't need it NC.App.Opstate.ResetTimer(0, this.neutronCountingPrep, 0, 170, (int)NC.App.AppContext.StatusTimerMilliseconds / 4); DataSourceIdentifier did = meas.Detectors[0].Id; rdt.SetupCountingAnalyzerHandler(NC.App.Config, did.source.TimeBase(did.SRType), // 1e-8 expected here (string s) => { PseudoInstrument.PendingComplete(); ctrllog.TraceEvent(LogLevels.Verbose, 439, "Neutron counting processing complete: '" + s + "'"); }, (string s) => { PseudoInstrument.PendingComplete(); ctrllog.TraceEvent(LogLevels.Error, 438, "Neutron counting processing stopped with error: '" + s + "'"); rdt.EndAnalysisImmediately(); throw new FatalNeutronCountingException(s); // emergency exit, caught and noted in file processing loop below }, (string s) => { PseudoInstrument.PendingComplete(); ctrllog.TraceEvent(LogLevels.Error, 437, "Neutron counting processing [Block] stopped with error: '" + s + "'"); rdt.EndAnalysisImmediately(); throw new FatalNeutronCountingException(s); // emergency exit, caught and noted in file processing loop below } ); meas.AcquireState.num_runs = (ushort)files.Count(); // RequestedRepetitions rdt.PrepareAndStartCountingAnalyzers(meas.AnalysisParams); NC.App.Opstate.StopTimer(0); FireEvent(EventType.ActionStart, this); meas.CurrentRepetition = 0; NC.App.Opstate.ResetTimer(0, filerawprocessing, PseudoInstrument, 250, (int)NC.App.AppContext.StatusTimerMilliseconds); PTRFileProcessingState pps = PseudoInstrument.RDT.State as PTRFileProcessingState; foreach (var ptrFile in files) { if (NC.App.Opstate.IsQuitRequested) // cancellation occurs here and at selected steps in the internal file and analyzer processing break; if (!ptrFile.OpenForReading()) continue; PseudoInstrument.PendingReset(); if (meas.CurrentRepetition == 0) { meas.MeasDate = new DateTimeOffset(ptrFile.DTO.Ticks, ptrFile.DTO.Offset); meas.Detectors[0].Id.source = ConstructedSource.PTRFile; PseudoInstrument.id.source = ConstructedSource.PTRFile; } Cycle cycle = new Cycle(ctrllog); cycle.UpdateDataSourceId(ConstructedSource.PTRFile, InstrType.PTR32 /* revisit this, it could be from any source */, ptrFile.DTO, ptrFile.Filename); meas.Add(cycle); rdt.StartCycle(cycle); meas.CurrentRepetition++; pps.Reset(); NC.App.Opstate.SOH = NCC.OperatingState.Living; PseudoInstrument.id.FileName = ptrFile.Filename; ctrllog.TraceEvent(LogLevels.Info, 3335, "Assaying with {0}", ptrFile.Filename); /// unique here UInt32 deltaTime = 0; Double ShakeTime; // like pulse files 10^e-8 int maxValuesInBuffer = (int)pps.maxValuesInBuffer; try { UInt32[] chnmask = new UInt32[32]; for (int i = 0; i < 32; i++) { chnmask[i] = (uint)1 << i; } rdt.NumProcessedRawDataBuffers = 0; string issue = String.Empty; //read the header from the BIN file ptrFile.Events.ReadHeader(); ShakeTime = 0; //Add this as check. We should have this count - big T as final count rate pps.PTRReportedCountTime += ptrFile.Events.ReportedCountTime; if (!ptrFile.Channels.Active) { ptrFile.Channels.fulllen = ptrFile.Events.stream.Length; } while (ptrFile.Channels.read < ptrFile.Channels.fulllen && String.IsNullOrEmpty(issue)) { rdt.NumProcessedRawDataBuffers++; // divide file size into discrete lengths of a reasonable size, say 50Mb or a 128 Mb default int elen = (ptrFile.Events.eventsectionlen > pps.eventBufferLength ? (int)pps.eventBufferLength : (int)ptrFile.Events.eventsectionlen); int clen = (ptrFile.Channels.fulllen > maxValuesInBuffer ? maxValuesInBuffer : (int)ptrFile.Channels.fulllen); // read the times up to the buffer limit int events = Math.Min(maxValuesInBuffer, elen / sizeof(UInt32)); events = Math.Min(events, ptrFile.Events.EventsYetToRead()); // might be at the last buffers-worth, check and constrain ptrFile.Events.thisread = ptrFile.Events.ReadUInt32Array(pps.timeInBuffer, 0, events); ptrFile.Events.read += ptrFile.Events.thisread; // read or simulate the channel bytes if (ptrFile.Channels.Active) { ptrFile.Channels.thisread = ptrFile.Channels.reader.Read(pps.chnInBuffer, 0, clen); ptrFile.Channels.read += ptrFile.Channels.thisread; } else // fake it with a single channel, an empty channel mask means channel 1 { ptrFile.Channels.read += clen; } // Todo: fix this because could be or'ing over the buffer end, so this will blow int edi = -1; // actual events for (int ec = 0; ec < events; ec++) // raw event counts { deltaTime = pps.timeInBuffer[ec]; if (deltaTime == 0) // or the channel mask { pps.channels[edi] |= chnmask[pps.chnInBuffer[ec]]; ptrFile.Log.TraceEvent(LogLevels.Verbose, 3246, "multiple hits {0:x8} at {1} ({2} {3})", pps.channels[edi], ShakeTime, edi, ec); continue; } edi++; // the next event ShakeTime += deltaTime; // accumulate total time //set the neutron event with the new neutron pps.channels[edi] = chnmask[pps.chnInBuffer[ec]]; //set the event time with the new neutron time pps.times[edi] = ShakeTime; if (pps.channels[edi] == 0) ptrFile.Log.TraceEvent(LogLevels.Warning, 3334, "0 event at {0}", ShakeTime); // ptrFile.Log.TraceEvent(LogLevels.Verbose, 777, "{0} {1} [{2:x8}]", ae, pps.times[ae], pps.channels[ae]); } int counted = edi + 1; ptrFile.Log.TraceEvent(LogLevels.Verbose, 3336, "constructed buffer {0} of {1} hits for {2} neutrons", rdt.NumProcessedRawDataBuffers, counted, events); // push the time doubles through the pipeline StreamStatusBlock ssb = rdt.PassBufferToTheCounters(counted); if (ssb != null) { rdt.ParseStatusBlock(ssb, cycle); ctrllog.TraceEvent(LogLevels.Verbose, 412, "End of stream, status message at byte {0}, len {1}", ssb.index, ssb.msglen); } rdt.StartNewBuffer(); ctrllog.TraceEvent(LogLevels.Verbose, 411, "[{0}] Counted {1} triggers, {2} hits, over {3} secs", rdt.NumProcessedRawDataBuffers, cycle.TotalEvents, cycle.Totals, cycle.TS.TotalSeconds); NC.App.Loggers.Flush(); if (NC.App.Opstate.IsQuitRequested) // cancellation in between buffers { ctrllog.TraceEvent(LogLevels.Warning, 428, "Processing cancelled, stopped at " + BufferStateSnapshot(PseudoInstrument)); break; } } } catch (FatalNeutronCountingException e) { NC.App.Opstate.SOH = NCC.OperatingState.Trouble; ctrllog.TraceException(e); ctrllog.TraceEvent(LogLevels.Warning, 429, "Neutron counting incomplete: {0}, processing stopped at {1}", e.Message, BufferStateSnapshot(PseudoInstrument)); } catch (Exception e) { NC.App.Opstate.SOH = NCC.OperatingState.Trouble; ctrllog.TraceException(e, true); ctrllog.TraceEvent(LogLevels.Warning, 430, "Processing stopped at cycle " + BufferStateSnapshot(PseudoInstrument)); } finally { ptrFile.CloseReader(); if (meas.CurrentRepetition == 1)// this is the first file, create the results before they get used { meas.PrepareINCCResults(); meas.Persist(); } rdt.EndOfCycleProcessing(meas); rdt.FlushCycleSummaryResults(); NC.App.Loggers.Flush(); } FireEvent(EventType.ActionInProgress, this); } // loop over each PTR-32 file (pair) rdt.EndOfCycleProcessing(meas, last:true); PseudoInstrument.selected = false; FireEvent(EventType.ActionInProgress, this); NC.App.Opstate.StopTimer(0); if (!NC.App.Opstate.IsAbortRequested) // stop/quit means continue with what is available { //if (meas.HasReportableData) //{ // if we have more than one cycle (one per file), and the cycles are combined into a 'measurement', then do the meta-processing across the results cycle list here NC.App.Opstate.ResetTimer(1, postprocessing, meas, 50, (int)NC.App.AppContext.StatusTimerMilliseconds); meas.CalculateMeasurementResults(); NC.App.Opstate.StopTimer(1); FireEvent(EventType.ActionInProgress, this); ReportMangler rm = new ReportMangler(ctrllog); rm.GenerateReports(meas); meas.SaveMeasurementResults(); } NC.App.Opstate.ResetTokens(); Instruments.All.Remove(PseudoInstrument); NC.App.Opstate.SOH = NCC.OperatingState.Stopping; NC.App.Opstate.StampOperationStopTime(); FireEvent(EventType.ActionStop, this); }
protected void SortedPulseFileToNCD() { List<string> ext = new List<string>() { ".pulse.sorted" }; FileList<SortedPulseFile> hdlr = new FileList<SortedPulseFile>(); hdlr.Init(ext, ctrllog); // get the list of files from the named folder FileList<SortedPulseFile> files = (FileList<SortedPulseFile>)hdlr.BuildFileList(NC.App.AppContext.FileInput, NC.App.AppContext.Recurse, false); if (files == null || files.Count() < 1) { return; } Random rand = new Random(); int eventBufferLength = (int)50 * 1024 * 1024; int maxValuesInBuffer = eventBufferLength / sizeof(double); double[] timeInBuffer = new double[maxValuesInBuffer]; int NumProcessedRawDataBuffers = 0; foreach (var pf in files) { if (NC.App.Opstate.IsQuitRequested) // cancellation occurs here and at selected steps in the internal file and analyzer processing break; if (!pf.OpenForReading()) continue; NCDFile ncdfile = PrepNCDFile(NC.App.AppContext.RootLoc, pf, 0); if (ncdfile == null) continue; ctrllog.TraceEvent(LogLevels.Info, 3335, "Converting {0} to {1}", pf.Filename, ncdfile.Filename); try { int a = rand.Next(0, 3); byte byt = (byte)(1 << (byte)rand.Next(1, 7)); byte[] chnbytes = new byte[4] { 0x0, 0x0, 0x0, 0x0 }; // 1 channel is active chnbytes[a] = byt; NumProcessedRawDataBuffers = 0; long read = 0; string issue = String.Empty; while (!pf.reader.EndOfStream && String.IsNullOrEmpty(issue)) { string s; int rb = 0; NumProcessedRawDataBuffers++; do { s = pf.reader.ReadLine(); read += 8; // buffer is used up in 8-byte chunks double res = 0; if (Double.TryParse(s, out res)) { timeInBuffer[rb++] = res; } } while (!pf.reader.EndOfStream && rb < maxValuesInBuffer); // now transform the doubles to ncd issue = ncdfile.TransferToTraditionalNCDFormat(timeInBuffer, chnbytes, (ulong)rb); pf.Log.TraceEvent(LogLevels.Verbose, 3332, "Processing buffer {0} of {1} doubles", NumProcessedRawDataBuffers, rb); } ncdfile.CustomStatusBlock("Pulse", NC.App.Name + " " + NC.App.Config.VersionString, pf.Filename, issue); ncdfile.WriteTagAndStatusBlock(); } catch (Exception e) { NC.App.Opstate.SOH = NCC.OperatingState.Trouble; ctrllog.TraceException(e, true); ctrllog.TraceEvent(LogLevels.Warning, 3361, "Processing stopped at cycle " + NumProcessedRawDataBuffers); } finally { pf.CloseReader(); ncdfile.CloseWriter(); NC.App.Loggers.Flush(); } } // loop over each file }
protected void PTRFilePairToNCD() { List<string> ext = new List<string>() { ".chn", ".bin" }; FileList<PTRFilePair> hdlr = new FileList<PTRFilePair>(); hdlr.Init(ext, ctrllog); // get the list of files from the named folder FileList<PTRFilePair> files = (FileList<PTRFilePair>)hdlr.BuildFileList(NC.App.AppContext.FileInput, NC.App.AppContext.Recurse, false); if (files == null || files.Count() < 2) { return; } int removed = files.RemoveAll(f => f.PairEntryFileExtension(".chn")); if (files.Count() < 1) { return; } int eventBufferLength = (int)50 * 1024 * 1024; int maxValuesInBuffer = eventBufferLength / sizeof(UInt32); maxValuesInBuffer /= 4; Byte[] chnInBuffer = new Byte[maxValuesInBuffer]; UInt32[] timeInBuffer = new UInt32[maxValuesInBuffer]; UInt32[] chnmask = new UInt32[32]; for (int i = 0; i < 32; i++) { chnmask[i] = (uint)1 << i; } int NumProcessedRawDataBuffers = 0; UInt32[] channels = new UInt32[maxValuesInBuffer]; Double[] times = new Double[maxValuesInBuffer]; foreach (var ptr32file in files) { if (NC.App.Opstate.IsQuitRequested) // cancellation occurs here and at selected steps in the internal file and analyzer processing break; if (!ptr32file.OpenForReading()) continue; NCDFile ncdfile = PrepNCDFile(NC.App.AppContext.RootLoc, ptr32file.Channels, ptr32file.CycleNumber); if (ncdfile == null) continue; ctrllog.TraceEvent(LogLevels.Verbose, 3335, "Converting {0}+{1} to {2}", ptr32file.Events.Filename, ptr32file.Channels.Filename, ncdfile.Filename); UInt32 deltaTime = 0; Double ShakeTime; // 10ns, like pulse files 10^e-8 try { string issue = String.Empty; //read the header from the BIN file ptr32file.Events.ReadHeader(); ShakeTime = 0; while (ptr32file.Channels.read < ptr32file.Channels.fulllen && String.IsNullOrEmpty(issue)) { NumProcessedRawDataBuffers++; // divide file size into discrete lengths of a reasonable size, say 50Mb or a 128 Mb default int elen = (ptr32file.Events.eventsectionlen > eventBufferLength ? eventBufferLength : (int)ptr32file.Events.eventsectionlen); int clen = (ptr32file.Channels.fulllen > maxValuesInBuffer ? maxValuesInBuffer : (int)ptr32file.Channels.fulllen); // read the times up to the buffer limit int events = Math.Min(maxValuesInBuffer, elen / sizeof(UInt32)); events = Math.Min(events, ptr32file.Events.EventsYetToRead()); // might be at the last buffers-worth, check and constrain ptr32file.Events.thisread = ptr32file.Events.ReadUInt32Array(timeInBuffer, 0, events); ptr32file.Events.read += ptr32file.Events.thisread; // read the channel bytes ptr32file.Channels.thisread = ptr32file.Channels.reader.Read(chnInBuffer, 0, clen); ptr32file.Channels.read += ptr32file.Channels.thisread; // todo: fix this because could be or'ing over the buffer end, so this will blow int edi = -1; // actual events for (int ec = 0; ec < events; ec++) // raw event counts { deltaTime = timeInBuffer[ec]; if (deltaTime == 0) // or the channel mask { channels[edi] |= chnmask[chnInBuffer[ec]]; ptr32file.Log.TraceEvent(LogLevels.Verbose, 3246, "multiple hits {0:x8} at {1} ({2} {3})", channels[edi], ShakeTime, edi, ec); continue; } edi++; // the next event ShakeTime += deltaTime; // accumulate total time //set the neutron event with the new neutron channels[edi] = chnmask[chnInBuffer[ec]]; //set the event time with the new neutron time times[edi] = ShakeTime; if (channels[edi] == 0) ptr32file.Log.TraceEvent(LogLevels.Warning, 3334, "0 event at {0}", ShakeTime); // ptrFile.Log.TraceEvent(LogLevels.Verbose, 777, "{0} {1} [{2:x8}]", ae, times[ae], channels[ae]); } // now transform to ncd issue = ncdfile.TransferToTraditionalNCDFormat(times: times, chns: channels, num: (ulong)edi + 1, combineDuplicateHits: true); ptr32file.Log.TraceEvent(LogLevels.Verbose, 3336, "transferred buffer {0} of {1} hits for {2} neutrons", NumProcessedRawDataBuffers, edi + 1, events); } issue = ptr32file.Events.headerstr; ncdfile.CustomStatusBlock("PTR-32", NC.App.Name + " " + NC.App.Config.VersionString, ptr32file.Filename, issue); ncdfile.WriteTagAndStatusBlock(); } catch (Exception e) { NC.App.Opstate.SOH = NCC.OperatingState.Trouble; ctrllog.TraceException(e, true); ctrllog.TraceEvent(LogLevels.Warning, 3361, "Processing stopped at cycle " + NumProcessedRawDataBuffers); } finally { ptr32file.CloseReader(); ncdfile.CloseWriter(); NC.App.Loggers.Flush(); } } // loop over each file }
protected void PulseFileSort() { //uint eventBufferLength = 50 * 1024 * 1024; FileList<UnsortedPulseFile> hdlr = new FileList<UnsortedPulseFile>(); hdlr.Init(UnsortedPulseFile.ExtensionList, ctrllog); // get the list of files from the named folder FileList<UnsortedPulseFile> files = (FileList<UnsortedPulseFile>)hdlr.BuildFileList(NC.App.AppContext.FileInput, NC.App.AppContext.Recurse, false); if (files == null || files.Count() < 1) { return; } foreach (var pf in files) { if (NC.App.Opstate.IsQuitRequested) // cancellation occurs here and at selected steps in the internal file and analyzer processing break; string derivedDataFilenamePrefix = pf.GenerateDerivedName(); ctrllog.TraceEvent(LogLevels.Verbose, 3330, "Sorting {0} to {1}", pf.Filename, derivedDataFilenamePrefix + ".sorted"); var opt = new ExternalMergeSort.Options(ExternalMergeSort.sizeFromMB(50), ExternalMergeSort.sizeFromMB(10 /*0*/ /*10*/) / 10, 1024 * 10); opt.SkipInitialSort = false; // a single file only, so no external merge at this time, save that feature for later opt.RemoveIntermediateFiles = true; // no merge here, we are breaking up a single pulse file into n temp files, so we need to remove the n temp files ExternalMergeSort.Logger = pf.Log; ExternalMergeSort.Sort(pf.Filename, derivedDataFilenamePrefix + ".sorted", NC.App.Config.RootLoc, opt); } // loop over each file }
/// <summary> /// Process a list of NCD files /// </summary> void NCDFileAssay() { if (!NC.App.Opstate.Measurement.Detectors[0].ListMode) ctrllog.TraceEvent(LogLevels.Warning, 439, NC.App.Opstate.Measurement.Detectors[0].Id.DetectorName + " is not a list mode detector."); List<string> ext = new List<string>() { ".ncd" }; FileList<NCDFile> hdlr = new FileList<NCDFile>(); hdlr.Init(ext, ctrllog); FileList<NCDFile> files = null; /// this section now occurs in the caller, the state is set up earlier Measurement meas = NC.App.Opstate.Measurement; PseudoInstrument = new LMDAQ.LMInstrument(meas.Detectors[0]); // psuedo LM until we can map from user or deduce from file content at run-time PseudoInstrument.selected = true; if (!Instruments.Active.Contains(PseudoInstrument)) Instruments.Active.Add(PseudoInstrument); // add to global runtime list DataSourceIdentifier did = meas.Detectors[0].Id; LMRawDataTransform rdt = (LMRawDataTransform)PseudoInstrument.RDT; rdt.SetLMState(((LMConnectionInfo)(PseudoInstrument.id.FullConnInfo)).NetComm); // initialize operation timer here NC.App.Opstate.ResetTimer(0, filegather, files, 170, (int)NC.App.AppContext.StatusTimerMilliseconds); FireEvent(EventType.ActionPrep, this); NC.App.Opstate.StampOperationStartTime(); // get the list of files from the named folder, or use the supplied list if (NC.App.AppContext.FileInputList == null) files = (FileList<NCDFile>)hdlr.BuildFileList(NC.App.AppContext.FileInput, NC.App.AppContext.Recurse, false); else files = (FileList<NCDFile>)hdlr.BuildFileList(NC.App.AppContext.FileInputList); if (files == null || files.Count() < 1) { NC.App.Opstate.StopTimer(0); return; } NC.App.Opstate.ResetTimer(0, this.neutronCountingPrep, 0, 170, (int)NC.App.AppContext.StatusTimerMilliseconds / 4); rdt.SetupCountingAnalyzerHandler(NC.App.Config, did.source.TimeBase(did.SRType), (string s) => { PseudoInstrument.PendingComplete(); ctrllog.TraceEvent(LogLevels.Verbose, 439, "Neutron counting processing complete: '" + s + "'"); }, (string s) => { PseudoInstrument.PendingComplete(); ctrllog.TraceEvent(LogLevels.Error, 438, "Neutron counting processing stopped with error: '" + s + "'"); rdt.EndAnalysisImmediately(); throw new FatalNeutronCountingException(s); // emergency exit, caught and noted in file processing loop below }, (string s) => { PseudoInstrument.PendingComplete(); ctrllog.TraceEvent(LogLevels.Error, 437, "Neutron counting processing [Block] stopped with error: '" + s + "'"); rdt.EndAnalysisImmediately(); throw new FatalNeutronCountingException(s); // emergency exit, caught and noted in file processing loop below } ); meas.AcquireState.num_runs = (ushort)files.Count(); // RequestedRepetitions rdt.PrepareAndStartCountingAnalyzers(meas.AnalysisParams); NC.App.Opstate.StopTimer(0); FireEvent(EventType.ActionStart, this); meas.CurrentRepetition = 0; NC.App.Opstate.ResetTimer(0, filerawprocessing, PseudoInstrument, 250, (int)NC.App.AppContext.StatusTimerMilliseconds); foreach (var ncd in files) { if (NC.App.Opstate.IsQuitRequested) // cancellation occurs here and at selected steps in the internal file and analyzer processing break; if (!ncd.OpenForReading()) continue; PseudoInstrument.PendingReset(); if (meas.CurrentRepetition == 0) { meas.MeasDate = ncd.DTO; meas.Detectors[0].Id.source = ConstructedSource.NCDFile; PseudoInstrument.id.source = ConstructedSource.NCDFile; } Cycle cycle = new Cycle(ctrllog); cycle.UpdateDataSourceId(ConstructedSource.NCDFile, InstrType.LMMM /* revisit this, it could be from any source */, ncd.DTO, ncd.Filename); meas.Add(cycle); rdt.StartCycle(cycle); meas.CurrentRepetition++; rdt.NumProcessedRawDataBuffers = 0; int thisread = 0; long read = 0, fulllen = ncd.stream.Length; NC.App.Opstate.SOH = NCC.OperatingState.Living; PseudoInstrument.id.FileName = ncd.Filename; ctrllog.TraceEvent(LogLevels.Info, 3335, "Assaying with {0}", ncd.Filename); try { while (read < fulllen) { rdt.NumProcessedRawDataBuffers++; // divide file size into discrete lengths of a reasonable size, say 50Mb or a 128 Mb default int len = (fulllen > (int)rdt.CurEventBuffLen ? (int)rdt.CurEventBuffLen : (int)fulllen); thisread = ncd.reader.Read(rdt.RawDataBuff, 0, len); read += thisread; ctrllog.TraceEvent(LogLevels.Verbose, 410, "Processing buffer {0} of {1} bytes", rdt.NumProcessedRawDataBuffers, len); // push the bytes through the pipeline StreamStatusBlock res = rdt.PassBufferToTheCounters(thisread); if (res != null) { rdt.ParseStatusBlock(res, cycle); // assert read >= fullen here, because we found a valid status block at the end of the file ctrllog.TraceEvent(LogLevels.Verbose, 412, "End of stream, status message at byte {0}, len {1}", res.index, res.msglen); } rdt.StartNewBuffer(); ctrllog.TraceEvent(LogLevels.Verbose, 411, "[{0}] Counted {1} triggers, {2} hits, over {3} secs", rdt.NumProcessedRawDataBuffers, cycle.TotalEvents, cycle.Totals, cycle.TS.TotalSeconds); NC.App.Loggers.Flush(); if (NC.App.Opstate.IsQuitRequested) // cancellation in between buffers { ctrllog.TraceEvent(LogLevels.Warning, 428, "Processing cancelled, stopped at " + BufferStateSnapshot(PseudoInstrument)); break; } } } catch (FatalNeutronCountingException e) { NC.App.Opstate.SOH = NCC.OperatingState.Trouble; ctrllog.TraceException(e); ctrllog.TraceEvent(LogLevels.Warning, 429, "Neutron counting incomplete: {0}, processing stopped at {1}", e.Message, BufferStateSnapshot(PseudoInstrument)); } catch (Exception e) { NC.App.Opstate.SOH = NCC.OperatingState.Trouble; ctrllog.TraceException(e, true); ctrllog.TraceEvent(LogLevels.Warning, 430, "Processing stopped at cycle " + BufferStateSnapshot(PseudoInstrument)); } finally { ncd.CloseReader(); if (meas.CurrentRepetition == 1)// this is the first file, create the results before they get used { meas.PrepareINCCResults(); meas.Persist(); } rdt.EndOfCycleProcessing(meas); NC.App.Loggers.Flush(); } FireEvent(EventType.ActionInProgress, this); } // loop over each NCD file PseudoInstrument.selected = false; rdt.EndOfCycleProcessing(meas, last: true); FireEvent(EventType.ActionInProgress, this); NC.App.Opstate.StopTimer(0); if (!NC.App.Opstate.IsAbortRequested) // stop/quit means continue with what is available { //if (meas.HasReportableData) //{ // if we have more than one cycle (one per file), and the cycles are combined into a 'measurement', then do the meta-processing across the results cycle list here NC.App.Opstate.ResetTimer(1, postprocessing, meas, 50, (int)NC.App.AppContext.StatusTimerMilliseconds); meas.CalculateMeasurementResults(); NC.App.Opstate.StopTimer(1); FireEvent(EventType.ActionInProgress, this); ReportMangler rm = new ReportMangler(ctrllog); rm.GenerateReports(meas); meas.SaveMeasurementResults(); //} } NC.App.Opstate.ResetTokens(); Instruments.All.Remove(PseudoInstrument); NC.App.Opstate.SOH = NCC.OperatingState.Stopping; NC.App.Opstate.StampOperationStopTime(); FireEvent(EventType.ActionStop, this); }
protected void SortedPulseFileAssay() { List<string> ext = new List<string>() { ".pulse.sorted", ".txt.sorted" }; FileList<SortedPulseFile> hdlr = new FileList<SortedPulseFile>(); hdlr.Init(ext, ctrllog); FileList<SortedPulseFile> files = null; // initialize operation timer here NC.App.Opstate.ResetTimer(0, filegather, files, 170, (int)NC.App.AppContext.StatusTimerMilliseconds); FireEvent(EventType.ActionPrep, this); NC.App.Opstate.StampOperationStartTime(); // get the list of files from the named folder files = (FileList<SortedPulseFile>)hdlr.BuildFileList(NC.App.AppContext.FileInput, NC.App.AppContext.Recurse, true); if (files == null || files.Count() < 1) { NC.App.Opstate.StopTimer(0); return; } Measurement meas = NC.App.Opstate.Measurement; PseudoInstrument = new LMDAQ.LMInstrument(meas.Detectors[0]); // psuedo LM until we can map from user or deduce from file content at run-time PseudoInstrument.selected = true; if (!Instruments.Active.Contains(PseudoInstrument)) Instruments.Active.Add(PseudoInstrument); // add to global runtime list // Force RDT.State To be a pulse file RDT, this shows a design failure, so need to rework the entire scheme LMRawDataTransform rdt = (PseudoInstrument as LMDAQ.LMInstrument).RDT; PulseProcessingState c = new PulseProcessingState(rdt.State.maxValuesInBuffer); PseudoInstrument.RDT.State = null; PseudoInstrument.RDT.State = c; rdt.Init(NC.App.Loggers.Logger(LMLoggers.AppSection.Data), NC.App.Loggers.Logger(LMLoggers.AppSection.Analysis)); rdt.SetLMState(((LMConnectionInfo)(PseudoInstrument.id.FullConnInfo)).NetComm); rdt.RawDataBuff = null;// free it back up, don't need it NC.App.Opstate.ResetTimer(0, this.neutronCountingPrep, 0, 170, (int)NC.App.AppContext.StatusTimerMilliseconds / 4); DataSourceIdentifier did = meas.Detectors[0].Id; rdt.SetupCountingAnalyzerHandler(NC.App.Config, did.source.TimeBase(did.SRType), // 1e-8 expected here (string s) => { PseudoInstrument.PendingComplete(); ctrllog.TraceEvent(LogLevels.Verbose, 439, "Neutron counting processing complete: '" + s + "'"); }, (string s) => { PseudoInstrument.PendingComplete(); ctrllog.TraceEvent(LogLevels.Error, 438, "Neutron counting processing stopped with error: '" + s + "'"); rdt.EndAnalysisImmediately(); throw new FatalNeutronCountingException(s); // emergency exit, caught and noted in file processing loop below }, (string s) => { PseudoInstrument.PendingComplete(); ctrllog.TraceEvent(LogLevels.Error, 437, "Neutron counting processing [Block] stopped with error: '" + s + "'"); rdt.EndAnalysisImmediately(); throw new FatalNeutronCountingException(s); // emergency exit, caught and noted in file processing loop below } ); meas.AcquireState.num_runs = (ushort)files.Count(); // RequestedRepetitions rdt.PrepareAndStartCountingAnalyzers(meas.AnalysisParams); NC.App.Opstate.StopTimer(0); FireEvent(EventType.ActionStart, this); meas.CurrentRepetition = 0; NC.App.Opstate.ResetTimer(0, filerawprocessing, PseudoInstrument, 250, (int)NC.App.AppContext.StatusTimerMilliseconds); Random rand = new Random(); int a = rand.Next(0, 3); byte byt = (byte)(1 << (byte)rand.Next(1, 7)); PulseProcessingState pps = PseudoInstrument.RDT.State as PulseProcessingState; foreach (var sortedpulse in files) { if (NC.App.Opstate.IsQuitRequested) // cancellation occurs here and at selected steps in the internal file and analyzer processing break; if (!sortedpulse.OpenForReading()) continue; PseudoInstrument.PendingReset(); if (meas.CurrentRepetition == 0) { meas.MeasDate = new DateTimeOffset(sortedpulse.DTO.Ticks, sortedpulse.DTO.Offset); meas.Detectors[0].Id.source = ConstructedSource.SortedPulseTextFile; PseudoInstrument.id.source = ConstructedSource.SortedPulseTextFile; } Cycle cycle = new Cycle(ctrllog); cycle.UpdateDataSourceId(ConstructedSource.SortedPulseTextFile, InstrType.LMMM, // MCNPX is common source of pulse files, but we need an explicit LM type here /* revisit this, it could be from any source */, sortedpulse.DTO, sortedpulse.Filename); meas.Add(cycle); rdt.StartCycle(cycle); meas.CurrentRepetition++; pps.Reset(); NC.App.Opstate.SOH = NCC.OperatingState.Living; PseudoInstrument.id.FileName = sortedpulse.Filename; ctrllog.TraceEvent(LogLevels.Info, 3335, "Assaying with {0}", sortedpulse.Filename); try { pps.chnbytes[a] = byt; // use the same rand value for all these files, makes for consistent per-channel rates counting rdt.NumProcessedRawDataBuffers = 0; string issue = String.Empty; while (!sortedpulse.reader.EndOfStream && String.IsNullOrEmpty(issue)) { string s; int rb = 0; rdt.NumProcessedRawDataBuffers++; do { s = sortedpulse.reader.ReadLine(); double res = 0; if (Double.TryParse(s, out res)) { pps.timeInBuffer[rb++] = res; } } while (!sortedpulse.reader.EndOfStream && rb < pps.maxValuesInBuffer); // now transform the doubles list and assay at the same time ctrllog.TraceEvent(LogLevels.Verbose, 410, "Processing buffer {0} of {1} times", rdt.NumProcessedRawDataBuffers, rb); // push the time doubles through the pipeline StreamStatusBlock ssb = rdt.PassBufferToTheCounters(rb); if (ssb != null) { rdt.ParseStatusBlock(ssb, cycle); // assert read >= fullen here, because we found a valid status block at the end of the file ctrllog.TraceEvent(LogLevels.Verbose, 412, "End of stream, status message at byte {0}, len {1}", ssb.index, ssb.msglen); } rdt.StartNewBuffer(); ctrllog.TraceEvent(LogLevels.Verbose, 411, "[{0}] Counted {1} triggers, {2} hits, over {3} secs", rdt.NumProcessedRawDataBuffers, cycle.TotalEvents, cycle.Totals, cycle.TS.TotalSeconds); NC.App.Loggers.Flush(); if (NC.App.Opstate.IsQuitRequested) // cancellation in between buffers { ctrllog.TraceEvent(LogLevels.Warning, 428, "Processing cancelled, stopped at " + BufferStateSnapshot(PseudoInstrument)); break; } } // Analysis.StreamStatusBlock sb = NCDFile.CustomStatusBlock("Pulse", NC.App.name + " " + NC.App.Config.VersionString, pf.Filename, issue); } catch (FatalNeutronCountingException e) { NC.App.Opstate.SOH = NCC.OperatingState.Trouble; ctrllog.TraceException(e); ctrllog.TraceEvent(LogLevels.Warning, 429, "Neutron counting incomplete: {0}, processing stopped at {1}", e.Message, BufferStateSnapshot(PseudoInstrument)); } catch (Exception e) { NC.App.Opstate.SOH = NCC.OperatingState.Trouble; ctrllog.TraceException(e, true); ctrllog.TraceEvent(LogLevels.Warning, 430, "Processing stopped at cycle " + BufferStateSnapshot(PseudoInstrument)); } finally { sortedpulse.CloseReader(); if (meas.CurrentRepetition == 1)// this is the first file, create the results before they get used { meas.PrepareINCCResults(); meas.Persist(); } rdt.EndOfCycleProcessing(meas); NC.App.Loggers.Flush(); } FireEvent(EventType.ActionInProgress, this); } // loop over each NCD file rdt.EndOfCycleProcessing(meas, last: true); PseudoInstrument.selected = false; FireEvent(EventType.ActionInProgress, this); NC.App.Opstate.StopTimer(0); if (!NC.App.Opstate.IsAbortRequested) // stop/quit means continue with what is available { //if (meas.HasReportableData) //{ // if we have more than one cycle (one per file), and the cycles are combined into a 'measurement', then do the meta-processing across the results cycle list here NC.App.Opstate.ResetTimer(1, postprocessing, meas, 50, (int)NC.App.AppContext.StatusTimerMilliseconds); meas.CalculateMeasurementResults(); NC.App.Opstate.StopTimer(1); FireEvent(EventType.ActionInProgress, this); ReportMangler rm = new ReportMangler(ctrllog); rm.GenerateReports(meas); meas.SaveMeasurementResults(); //} } NC.App.Opstate.ResetTokens(); Instruments.All.Remove(PseudoInstrument); NC.App.Opstate.SOH = NCC.OperatingState.Stopping; NC.App.Opstate.StampOperationStopTime(); FireEvent(EventType.ActionStop, this); }
void DatazFileAssay() { List <string> exts = new List <string>() { ".dataz" }; FileList <DatazFile> hdlr = new FileList <DatazFile>(); hdlr.Init(exts, ctrllog); FileList <DatazFile> files = null; // initialize operation timer here NC.App.Opstate.ResetTimer(filegather, files, 170, (int)NC.App.AppContext.StatusTimerMilliseconds); FireEvent(EventType.ActionPrep, this); NC.App.Opstate.StampOperationStartTime(); if (NC.App.AppContext.FileInputList == null) { files = (FileList <DatazFile>)hdlr.BuildFileList(NC.App.AppContext.FileInput, NC.App.AppContext.Recurse, true); } else { files = (FileList <DatazFile>)hdlr.BuildFileList(NC.App.AppContext.FileInputList); } if (files == null || files.Count < 1) { NC.App.Opstate.StopTimer(); NC.App.Opstate.StampOperationStopTime(); FireEvent(EventType.ActionStop, this); ctrllog.TraceEvent(LogLevels.Warning, 33085, "No usable Dataz files found"); return; } AssaySelector.MeasurementOption mo = NC.App.Opstate.Measurement.MeasOption; foreach (DatazFile mc in files) { Measurement meas = null; try { if (!mc.OpenForReading()) { continue; } if (NC.App.Opstate.IsQuitRequested) { break; } mc.ScanSections(); mc.ProcessSections(); if (mc.Cycles.Count == 0) { ctrllog.TraceEvent(LogLevels.Error, 404, "This Dataz file has no good cycles."); } if (mc.Plateaux.Count == 0) { ctrllog.TraceEvent(LogLevels.Error, 404, $"This Dataz file has no defined sequences, over {mc.Cycles.Count} cycles."); } else { AcquireParameters orig_acq = new AcquireParameters(NC.App.Opstate.Measurement.AcquireState); Detector curdet = NC.App.Opstate.Measurement.Detector; if (mc.AcquistionStateChanged) { orig_acq = new AcquireParameters(NC.App.Opstate.Measurement.AcquireState); curdet = mc.DataZDetector; if (curdet.AB.Unset) { ABKey abkey = new ABKey(curdet.MultiplicityParams, mc.MaxBins); LMRawAnalysis.SDTMultiplicityCalculator.SetAlphaBeta(abkey, curdet.AB); } } ctrllog.TraceInformation($"{mc.Cycles.Count} cycles and {mc.Plateaux.Count} sequences encountered in Dataz file {mc.Filename}"); System.Collections.IEnumerator iter = mc.GetSequences(); while (iter.MoveNext()) { DatazFile.Plateau pla = (DatazFile.Plateau)iter.Current; ResetMeasurement(); // update acq and then meas here AcquireParameters newacq = ConfigureAcquireState(curdet, orig_acq, pla.Cycles[0].DTO, (ushort)pla.Num, mc.Filename); newacq.data_src = ConstructedSource.DatazFile; IntegrationHelpers.BuildMeasurement(newacq, curdet, mo); meas = NC.App.Opstate.Measurement; meas.MeasDate = newacq.MeasDateTime; meas.Persist(); // preserve the basic results record meas.RequestedRepetitions = (ushort)pla.Num; for (int i = 0; i < meas.RequestedRepetitions; i++) { /* run date and time (IAEA format) */ AddMCSRDataCycle(i, pla.Cycles[i], meas, mc.Filename); if (i % 8 == 0) { FireEvent(EventType.ActionInProgress, this); } } FireEvent(EventType.ActionInProgress, this); ComputeFromINCC5SRData(meas); FireEvent(EventType.ActionInProgress, this); } } } catch (Exception e) { NC.App.Opstate.SOH = OperatingState.Trouble; ctrllog.TraceException(e, true); ctrllog.TraceEvent(LogLevels.Error, 437, "Dataz data file processing stopped with error: '" + e.Message + "'"); } finally { mc.CloseReader(); NC.App.Loggers.Flush(); } } NC.App.Opstate.ResetTokens(); NC.App.Opstate.SOH = OperatingState.Stopping; NC.App.Opstate.StampOperationStopTime(); FireEvent(EventType.ActionFinished, this); }