// Acquire from Database, Manual data entry and Reanalysis work the same from this point void DBDataAssay() { NC.App.Opstate.ResetTimer(filegather, null, 170, (int)NC.App.AppContext.StatusTimerMilliseconds); FireEvent(EventType.ActionPrep, this); NC.App.Opstate.StampOperationStartTime(); Measurement meas = NC.App.Opstate.Measurement; if (meas.AcquireState.run_count_time <= 0.0) { ctrllog.TraceEvent(LogLevels.Error, 441, "Count time is <= 0."); } else if (meas.Cycles.GetUseableCycleCount() < 1) { ctrllog.TraceEvent(LogLevels.Error, 440, "This measurement has no good cycles."); } else { if (meas.Detector.AB.Unset) { ABKey abkey = new ABKey(meas.Detector.MultiplicityParams, 512); // NEXT: maxbins is arbitrary, just the first of many for VSR LMRawAnalysis.SDTMultiplicityCalculator.SetAlphaBeta(abkey, meas.Detector.AB); } meas.Persist(); // preserve the basic results record FireEvent(EventType.ActionInProgress, this); meas.Cycles.ResetStatus(meas.Detector.MultiplicityParams); // APluralityOfMultiplicityAnalyzers: set to None, CycleConditioning sets each cycle anew ComputeFromINCC5SRData(meas); FireEvent(EventType.ActionInProgress, this); } NC.App.Opstate.ResetTokens(); NC.App.Opstate.SOH = OperatingState.Stopping; NC.App.Opstate.StampOperationStopTime(); FireEvent(EventType.ActionFinished, this); }
void TestDataAssay() { List <string> exts = new List <string>() { ".dat", ".cnn" }; FileList <TestDataFile> hdlr = new FileList <TestDataFile>(); hdlr.Init(exts, ctrllog); FileList <TestDataFile> files = null; // initialize operation timer here NC.App.Opstate.ResetTimer(filegather, files, 170, (int)NC.App.AppContext.StatusTimerMilliseconds); FireEvent(EventType.ActionPrep, this); NC.App.Opstate.StampOperationStartTime(); if (NC.App.AppContext.FileInputList == null) { files = (FileList <TestDataFile>)hdlr.BuildFileList(NC.App.AppContext.FileInput, NC.App.AppContext.Recurse, true); } else { files = (FileList <TestDataFile>)hdlr.BuildFileList(NC.App.AppContext.FileInputList); } if (files == null || files.Count < 1) { NC.App.Opstate.StopTimer(); NC.App.Opstate.StampOperationStopTime(); FireEvent(EventType.ActionStop, this); ctrllog.TraceEvent(LogLevels.Warning, 33085, "No usable Test data/Disk .dat or .cnn files found"); return; } AcquireParameters orig_acq = new AcquireParameters(NC.App.Opstate.Measurement.AcquireState); Detector curdet = NC.App.Opstate.Measurement.Detector; AssaySelector.MeasurementOption mo = NC.App.Opstate.Measurement.MeasOption; if (curdet.AB.Unset) { ABKey abkey = new ABKey(curdet.MultiplicityParams, 512); // NEXT: maxbins is arbitrary, just the first of many for VSR LMRawAnalysis.SDTMultiplicityCalculator.SetAlphaBeta(abkey, curdet.AB); } // Each .dat file is a separate measurement foreach (TestDataFile td in files) { Measurement meas = null; ResetMeasurement(); try { if (!td.OpenForReading()) { continue; } if (NC.App.Opstate.IsQuitRequested) { break; } uint run_seconds; ushort number_good_runs = 0; ushort total_number_runs = 0; double run_count_time = 0; double total_good_count_time = 0; if (mo != AssaySelector.MeasurementOption.holdup) { /* number of good runs */ string l = td.reader.ReadLine(); ushort.TryParse(l, out number_good_runs); if (number_good_runs == 0) { ctrllog.TraceEvent(LogLevels.Error, 440, "This measurement has no good cycles."); continue; } /* run count time */ l = td.reader.ReadLine(); double.TryParse(l, out run_count_time); if (run_count_time <= 0.0) { ctrllog.TraceEvent(LogLevels.Error, 441, "Count time is <= 0."); continue; } } // update acq and then meas here AcquireParameters newacq = ConfigureAcquireState(curdet, orig_acq, DateTimeOffset.Now, number_good_runs, td.Filename); IntegrationHelpers.BuildMeasurement(newacq, curdet, mo); meas = NC.App.Opstate.Measurement; meas.MeasDate = newacq.MeasDateTime; meas.Persist(); // preserve the basic results record /* convert to total count time */ total_number_runs = number_good_runs; total_good_count_time = number_good_runs * run_count_time; meas.RequestedRepetitions = total_number_runs; // update the acq status and do the persist /* read in run data */ for (int i = 0; i < number_good_runs; i++) { /* run date and time (IAEA format) */ run_seconds = (uint)(i * (ushort)run_count_time); // from start time AddTestDataCycle(i, run_seconds, run_count_time, meas, td); if (i % 8 == 0) { FireEvent(EventType.ActionInProgress, this); } } FireEvent(EventType.ActionInProgress, this); if (meas.MeasOption == AssaySelector.MeasurementOption.verification && meas.INCCAnalysisState.Methods.Has(AnalysisMethod.AddASource) && meas.AcquireState.well_config == WellConfiguration.Passive) { AddASourceSetup aass = IntegrationHelpers.GetCurrentAASSParams(meas.Detector); for (int n = 1; n <= aass.number_positions; n++) { /* number of good runs */ string l = td.reader.ReadLine(); if (td.reader.EndOfStream) { ctrllog.TraceEvent(LogLevels.Error, 440, "No add-a-source data found in disk file. AAS p" + n.ToString()); } ushort.TryParse(l, out number_good_runs); if (number_good_runs == 0) { ctrllog.TraceEvent(LogLevels.Error, 440, "This measurement has no good cycles. AAS p" + n.ToString()); continue; } /* run count time */ l = td.reader.ReadLine(); double.TryParse(l, out run_count_time); if (run_count_time <= 0.0) { ctrllog.TraceEvent(LogLevels.Error, 441, "Count time is <= 0. AAS p" + n.ToString()); continue; } /* read in run data */ for (int i = 0; i < number_good_runs; i++) { /* run date and time (IAEA format) */ run_seconds = (uint)((n + 1) * (i + 1) * (ushort)run_count_time); // from start time AddTestDataCycle(i, run_seconds, run_count_time, meas, td, " AAS p" + n.ToString(), n); if (i % 8 == 0) { FireEvent(EventType.ActionInProgress, this); } } FireEvent(EventType.ActionInProgress, this); } } } catch (Exception e) { NC.App.Opstate.SOH = OperatingState.Trouble; ctrllog.TraceException(e, true); ctrllog.TraceEvent(LogLevels.Error, 437, "Test data file processing stopped with error: '" + e.Message + "'"); } finally { td.CloseReader(); NC.App.Loggers.Flush(); } FireEvent(EventType.ActionInProgress, this); ComputeFromINCC5SRData(meas); FireEvent(EventType.ActionInProgress, this); } NC.App.Opstate.ResetTokens(); NC.App.Opstate.SOH = OperatingState.Stopping; NC.App.Opstate.StampOperationStopTime(); FireEvent(EventType.ActionFinished, this); }
/// <summary> /// Process one or more .NCC Review measurement data files, with related .NOP/.COP files. /// Presence of NOP/COP files is optional /// See Import Operator Declarations from Operator Review and Measurements from Radiation Review p. 24, /// See Operator Declaration File Format p. 87, /// See Operator Declaration File Format for Curium Ratio Measurements p. 88, /// See Radiation Review Measurement Data File Format p. 93, INCC Software Users Manual, March 29, 2009 /// </summary> void INCCReviewFileProcessing() { FireEvent(EventType.ActionPrep, this); NC.App.Opstate.StampOperationStartTime(); // This code would be the same for an interactive version of this operation // > Start here // first find and process and .NOP files List <string> exts = new List <string>() { ".nop", ".cop" }; FileList <CSVFile> hdlr = new FileList <CSVFile>(); hdlr.Init(exts, ctrllog); FileList <CSVFile> files = null; OPFiles opfiles = new OPFiles(); // The INCC5 semantics if (NC.App.AppContext.FileInputList == null) { files = (FileList <CSVFile>)hdlr.BuildFileList(NC.App.AppContext.FileInput, NC.App.AppContext.Recurse, true); } else { files = (FileList <CSVFile>)hdlr.BuildFileList(NC.App.AppContext.FileInputList); } if (files != null && files.Count > 0) { // construct lists of isotopics and items from the NOP and COP files opfiles.Process(files); ctrllog.TraceEvent(LogLevels.Verbose, 33085, "NOP items " + opfiles.Results.ItemIds.Count); //ctrllog.TraceEvent(LogLevels.Verbose, 33085, "COP entries " + opfiles.Results.COPRatioRecs.Count); } else { ctrllog.TraceEvent(LogLevels.Warning, 33085, "No operator declarations available, continuing with default values"); } // process the NCC files only INCCFileOrFolderInfo foo = new INCCFileOrFolderInfo(ctrllog, "*.NCC"); if (NC.App.AppContext.FileInputList == null) { foo.SetPath(NC.App.AppContext.FileInput); } else { foo.SetFileList(NC.App.AppContext.FileInputList); } foo.eh += new TransferEventHandler((s, e) => { FireEvent(EventType.ActionInProgress, e); }); List <INCCTransferBase> res = foo.Restore(); // use RemoveAll to cull those NCC files that reference a non-existent detector DetectorList dl = NC.App.DB.Detectors; foreach (INCCReviewFile rf in res) { List <Detector> tdk = dl.FindAll(d => 0 == string.Compare(d.Id.DetectorName, rf.detector, true)); if (tdk.Count < 1) { rf.skip = true; ctrllog.TraceEvent(LogLevels.Warning, 33085, "No detector " + rf.detector + " defined, cannot complete processing NCC review file " + System.IO.Path.GetFileName(rf.Path)); } } res.RemoveAll(rf => (rf as INCCReviewFile).skip); res.Sort((rf1, rf2) => // sort chronologically { return(DateTime.Compare((rf1 as INCCReviewFile).dt, (rf2 as INCCReviewFile).dt)); }); /// end here > The sorted, filtered and processed list here would be returned to the UI for display and interactive selection if (res == null || res.Count < 1) { NC.App.Opstate.StopTimer(); NC.App.Opstate.StampOperationStopTime(); FireEvent(EventType.ActionStop, this); ctrllog.TraceEvent(LogLevels.Warning, 33085, "No usable NCC review files found in " + System.IO.Path.GetFullPath(foo.GetPath())); return; } AcquireParameters orig_acq = new AcquireParameters(NC.App.Opstate.Measurement.AcquireState); // Each NCC file is a separate measurement foreach (INCCReviewFile irf in res) { ResetMeasurement(); if (NC.App.Opstate.IsQuitRequested) { break; } // Find the detector named in the NCC file (existence shown in earlier processing above) Detector curdet = NC.App.DB.Detectors.Find(d => string.Compare(d.Id.DetectorName, irf.detector, true) == 0); if (curdet.AB.Unset) { ABKey abkey = new ABKey(curdet.MultiplicityParams, 512); // NEXT: maxbins is arbitrary, just the first of many for VSR LMRawAnalysis.SDTMultiplicityCalculator.SetAlphaBeta(abkey, curdet.AB); } // set up acquire state based on the NCC file content AcquireParameters newacq = ConfigureAcquireState(curdet, orig_acq, irf); AssaySelector.MeasurementOption mo = NCCMeasOption(irf); // make a temp MeasId MeasId thisone = new MeasId(mo, newacq.MeasDateTime); // get the list of measurement Ids for this detector List <MeasId> list = NC.App.DB.MeasurementIds(curdet.Id.DetectorName, mo.PrintName()); MeasId already = list.Find(mid => mid.Equals(thisone)); if (already != null) { // URGENT: do the replacement as it says ctrllog.TraceEvent(LogLevels.Warning, 33085, "Replacing the matching {0} measurement, timestamp {1} (in a future release)", already.MeasOption.PrintName(), already.MeasDateTime.ToString()); } IntegrationHelpers.BuildMeasurement(newacq, curdet, mo); Measurement meas = NC.App.Opstate.Measurement; meas.MeasDate = newacq.MeasDateTime; // use the date and time from the NCC file content meas.Persist(); // preserve the basic results record if (AssaySelector.ForMass(meas.MeasOption) && !meas.INCCAnalysisState.Methods.AnySelected()) { ctrllog.TraceEvent(LogLevels.Warning, 437, "No mass methods for " + meas.INCCAnalysisState.Methods.selector.ToString()); } try { ushort total_number_runs = 0; double run_count_time = 0; double total_good_count_time = 0; if (irf.num_runs == 0) { ctrllog.TraceEvent(LogLevels.Error, 440, "This measurement has no good cycles."); continue; } if (meas.MeasOption == AssaySelector.MeasurementOption.holdup) { continue; } /* convert to total count time */ total_number_runs = irf.num_runs; total_good_count_time = irf.num_runs * run_count_time; meas.RequestedRepetitions = total_number_runs; // convert runs to cycles for (int i = 0; i < irf.num_runs; i++) { /* run date and time (IAEA format) */ AddReviewFileCycle(i, irf.runs[i], irf.times[i], meas, irf.Path); if (i % 8 == 0) { FireEvent(EventType.ActionInProgress, this); } } FireEvent(EventType.ActionInProgress, this); // NEXT: handle true AAS cycles as in INCC5 if (meas.MeasOption == AssaySelector.MeasurementOption.verification && meas.INCCAnalysisState.Methods.Has(AnalysisMethod.AddASource) && meas.AcquireState.well_config == WellConfiguration.Passive) { ctrllog.TraceEvent(LogLevels.Error, 440, "No add-a-source data processed because the implementation is incomplete. AAS"); //AddASourceSetup aass = IntegrationHelpers.GetCurrentAASSParams(meas.Detector); //for (int n = 1; n <= aass.number_positions; n++) //{ // /* number of good runs */ // string l = td.reader.ReadLine(); // if (td.reader.EndOfStream) // { // ctrllog.TraceEvent(LogLevels.Error, 440, "No add-a-source data found in disk file. " + "AAS p" + n.ToString()); // } //} } } catch (Exception e) { NC.App.Opstate.SOH = OperatingState.Trouble; ctrllog.TraceException(e, true); ctrllog.TraceEvent(LogLevels.Error, 437, "NCC file processing stopped with error: '" + e.Message + "'"); } finally { NC.App.Loggers.Flush(); } FireEvent(EventType.ActionInProgress, this); ComputeFromINCC5SRData(meas); FireEvent(EventType.ActionInProgress, this); } NC.App.Opstate.ResetTokens(); NC.App.Opstate.SOH = OperatingState.Stopping; NC.App.Opstate.StampOperationStopTime(); FireEvent(EventType.ActionFinished, this); }
public static void SetAlphaBeta(Multiplicity mkey, MultiplicityCountingRes mcr) { ABKey abkey = new ABKey(mkey, mcr); SetAlphaBeta(abkey, mcr.AB); }
/// <summary> /// Calc alpha beta, save result in cache /// NEXT: extend with Numerics.BigInteger replacing BigFloat, as per above (for when phi is non-zero) /// </summary> /// <param name="key">three values needed for alpha beta (max bin count, gatewidth, phi aka multiplicity T) </param> /// <param name="AB">The alpha beta array as used on detectors and multiplicity counting results</param> public static void SetAlphaBeta(ABKey key, AlphaBeta AB) { AlphaBeta _AB = AlphaBetaCache.GetAlphaBeta(key); if (_AB != null) { AB.α = _AB.α; AB.β = _AB.β; return; } if (key.bins != AB.α.Length) { AB.Resize((int)key.bins + 1); } AB.α[0] = 0.0; AB.β[0] = 0.0; if (key.deadTimeCoefficientTinNanoSecs == 0.0) { double n = 0; for (uint i = 1; i < key.bins; i++) { n = i; AB.α[i] = n; AB.β[i] = (n * (n - 1.0)) / 2.0; } } else if (key.gateWidthTics != 0.0) { uint biggestKey = key.bins - 1; AB.Init((int)key.bins); if (biggestKey <= 1) { goto cache; } double gateInSeconds = key.gateWidthTics * 1e-7; double phi = (key.deadTimeCoefficientTinNanoSecs / 1E9) / gateInSeconds; AB.α[0] = 0.0; AB.α[1] = 1.0; AB.β[0] = 0.0; AB.β[1] = 0.0; if (biggestKey > 1) { for (int n = 2; n <= biggestKey; n++) { if (phi > 1e-20) { AB.α[n] = 1.0; double alphaCoeff = 0; for (int k = 0; k <= (n - 2); k++) { alphaCoeff = binomialCoefficient(n - 1, k + 1) * Math.Pow((k + 1) * phi, k) / Math.Pow(1.0 - ((k + 1) * phi), k + 2); AB.α[n] += alphaCoeff; } } else { AB.α[n] = 1.0; } } AB.β[0] = 0.0; AB.β[1] = 0.0; AB.β[2] = AB.α[2] - 1.0; for (int n = 3; n <= biggestKey; n++) { if (phi > 1e-20) { AB.β[n] = AB.α[n] - 1.0; for (int k = 0; k <= (n - 3); k++) { double betaCoeff; betaCoeff = binomialCoefficient(n - 1, k + 2) * (k + 1) * Math.Pow((k + 2) * phi, k) / Math.Pow(1.0 - ((k + 2) * phi), k + 3); AB.β[n] += betaCoeff; } } else { AB.β[n] = 0.0; } } } } cache : AlphaBetaCache.AddAlphaBeta(key, AB); }
// cf index only for AAS positional cycles unsafe ulong AddToCycleList(run_rec run, Detector det, int cfindex = -1) { Cycle cycle = new Cycle(mlogger); ulong MaxBins = 0; try { cycle.UpdateDataSourceId(ConstructedSource.INCCTransferCopy, // becomes transfer if reanalysis occurs det.Id.SRType, INCC.DateTimeFrom(TransferUtils.str(run.run_date, INCC.DATE_TIME_LENGTH), TransferUtils.str(run.run_time, INCC.DATE_TIME_LENGTH)), det.Id.FileName); meas.Add(cycle, cfindex); // mcr is created and stored in the cycle's CountingAnalysisResults MultiplicityCountingRes mcr = RunToCycle(run, cycle, det.MultiplicityParams); // AB is calculated at runtime when conditioning a cycle AFAICT, so its gotta be worked here at least once if (det.AB.Unset) { ABKey abkey = new ABKey(det.MultiplicityParams, mcr); LMRawAnalysis.SDTMultiplicityCalculator.SetAlphaBeta(abkey, det.AB); } mcr.AB.TransferIntermediates(det.AB); // copy alpha beta onto the cycle's results MaxBins = mcr.MaxBins; } catch (Exception e) { mlogger.TraceEvent(LogLevels.Warning, 33085, "Cycle processing error {0} {1}", run.run_number, e.Message); } return MaxBins; }
public unsafe void BuildDetector(INCCInitialDataDetectorFile iddf, int num) { INCCDB DB = NC.App.DB; detector_rec d = iddf.Detector[0]; sr_parms_rec sr = iddf.SRParms[0]; bkg_parms_rec bkh = iddf.BKGParms[0]; norm_parms_rec norm = iddf.NormParms[0]; tm_bkg_parms_rec tmbkg = iddf.TMBKGParms[0]; add_a_source_setup_rec aass = iddf.AASParms[0]; InstrType srtype = (InstrType)sr.sr_type; bool overwrite = NC.App.AppContext.OverwriteImportedDefs; mlogger.TraceEvent(LogLevels.Verbose, 34100, "Building '{0}' detector '{1}' from {2} {3}", srtype.ToString(), TransferUtils.str(d.detector_id, INCC.MAX_DETECTOR_ID_LENGTH), num, iddf.Path); // if the detector is not known internally, then // add the detector to the list of detectors in memory, and // associate a new set of SR, Bkg and Norm and AB params with the new detector // todo: What are the HV Params from INCC5, and once that is known, can they be transferred to the INCC6 HV Param and results tables Detector det = new Detector(); if (srtype.IsListMode()) det.Id.FullConnInfo = new LMConnectionInfo(); try { // this transfer should be a method in this class, it will be used elsewhere too det.Id.DetectorId = TransferUtils.str(d.detector_id, INCC.MAX_DETECTOR_ID_LENGTH); det.Id.SRType = srtype; det.Id.Type = TransferUtils.str(d.detector_type, INCC.DETECTOR_TYPE_LENGTH); det.Id.ElectronicsId = TransferUtils.str(d.electronics_id, INCC.ELECTRONICS_ID_LENGTH); det.Id.ConnInfo = sr.sr_port_number.ToString(); det.Id.source = ConstructedSource.INCCTransferCopy; det.MultiplicityParams.FA = srtype.DefaultFAFor(); det.MultiplicityParams.gateWidthTics = (ulong)(sr.gate_length * 10.0); // shift down to tics from microseconds det.SRParams.deadTimeCoefficientAinMicroSecs = sr.coeff_a_deadtime; det.SRParams.deadTimeCoefficientBinPicoSecs = sr.coeff_b_deadtime; det.SRParams.deadTimeCoefficientCinNanoSecs = sr.coeff_c_deadtime; det.SRParams.deadTimeCoefficientMultiplicityinNanoSecs = sr.multiplicity_deadtime; det.SRParams.dieAwayTime = sr.die_away_time * 10.0; // shift down to tics from microseconds det.SRParams.doublesGateFraction = sr.doubles_gate_fraction; det.SRParams.efficiency = sr.efficiency; det.SRParams.gateLength = (ulong)(sr.gate_length * 10.0); // shift down to tics from microseconds //det.SRParams.gateLength2 = sr.gate_length2; det.SRParams.highVoltage = sr.high_voltage; det.SRParams.predelay = (ulong)(sr.predelay * 10.0); // shift down to tics from microseconds det.SRParams.triplesGateFraction = sr.triples_gate_fraction; // = sr.sr_type , sr.sr_port_number, sr.sr_detector_id these are in the Id now, not the SRparams, but they travel together. if (NC.App.AppContext.OverwriteImportedDefs) DB.Detectors.Replace(det); else DB.Detectors.AddOnlyIfNotThere(det); DetectorIndex = DB.Detectors.Count - 1; BackgroundParameters bkg = new BackgroundParameters(); bkg.DeadtimeCorrectedRates.Singles.v = bkh.curr_passive_bkg_singles_rate; bkg.DeadtimeCorrectedRates.Doubles.v = bkh.curr_passive_bkg_doubles_rate; bkg.DeadtimeCorrectedRates.Triples.v = bkh.curr_passive_bkg_triples_rate; bkg.DeadtimeCorrectedRates.Singles.err = bkh.curr_passive_bkg_singles_err; bkg.DeadtimeCorrectedRates.Doubles.err = bkh.curr_passive_bkg_doubles_err; bkg.DeadtimeCorrectedRates.Triples.err = bkh.curr_passive_bkg_triples_err; bkg.Scaler1.v = bkh.curr_passive_bkg_scaler1_rate; bkg.Scaler2.v = bkh.curr_passive_bkg_scaler2_rate; bkg.INCCActive.Singles.v = bkh.curr_active_bkg_singles_rate; bkg.INCCActive.Singles.err = bkh.curr_active_bkg_singles_err; bkg.INCCActive.Scaler1Rate = bkh.curr_active_bkg_scaler1_rate; bkg.INCCActive.Scaler2Rate = bkh.curr_active_bkg_scaler2_rate; bkg.TMBkgParams.Singles.v = tmbkg.tm_singles_bkg; bkg.TMBkgParams.Ones.v = tmbkg.tm_ones_bkg; bkg.TMBkgParams.Twos.v = tmbkg.tm_twos_bkg; bkg.TMBkgParams.Zeros.v = tmbkg.tm_zeros_bkg; bkg.TMBkgParams.Singles.err = tmbkg.tm_singles_bkg_err; bkg.TMBkgParams.Ones.err = tmbkg.tm_ones_bkg_err; bkg.TMBkgParams.Twos.err = tmbkg.tm_twos_bkg_err; bkg.TMBkgParams.Zeros.err = tmbkg.tm_zeros_bkg_err; bkg.TMBkgParams.ComputeTMBkg = (tmbkg.tm_bkg == 0 ? false : true); if (DB.BackgroundParameters.Get(det.Id.DetectorName) == null) { DB.BackgroundParameters.GetMap().Add(det, bkg); // saved to DB at below } else if (overwrite) { bkg.modified = true; NC.App.DB.BackgroundParameters.GetMap().Remove(det); NC.App.DB.BackgroundParameters.GetMap().Add(det, bkg); NC.App.DB.BackgroundParameters.Set(det, bkg); } // save the params listed here using the detector as the key NormParameters normp = new NormParameters(); normp.acceptanceLimitPercent = norm.acceptance_limit_percent; normp.acceptanceLimitStdDev = norm.acceptance_limit_std_dev; normp.amliRefSinglesRate = norm.amli_ref_singles_rate; normp.biasMode = OldToNewBiasTestId(norm.bias_mode); normp.biasPrecisionLimit = norm.bias_precision_limit; normp.biasTestAddasrcPosition = norm.bias_test_addasrc_position; normp.biasTestUseAddasrc = (norm.bias_test_use_addasrc == 0 ? false : true); normp.cf252RefDoublesRate.v = norm.cf252_ref_doubles_rate; normp.currNormalizationConstant.v = norm.curr_normalization_constant; normp.currNormalizationConstant.err = norm.curr_normalization_constant_err; normp.initSrcPrecisionLimit = norm.init_src_precision_limit; normp.measRate.v = norm.meas_rate; normp.measRate.err = norm.meas_rate_err; normp.refDate = INCC.DateFrom(TransferUtils.str(norm.ref_date, INCC.DATE_TIME_LENGTH)); normp.sourceId = TransferUtils.str(norm.source_id, INCC.SOURCE_ID_LENGTH); normp.yieldRelativeToMrc95 = norm.yield_relative_to_mrc_95; if (DB.NormParameters.Get(det.Id.DetectorName) == null) { DB.NormParameters.GetMap().Add(det, normp); // saved to DB at end } else if (overwrite) { normp.modified = true; DB.NormParameters.GetMap().Remove(det); DB.NormParameters.GetMap().Add(det, normp); // the in-memory map DB.NormParameters.Set(det, normp); // the DB table } AddASourceSetup aassp = new AddASourceSetup(); aassp.type = OldToNewAASId(aass.ad_type); aassp.port_number = aass.ad_port_number; aassp.forward_over_travel = aass.ad_forward_over_travel; aassp.reverse_over_travel = aass.ad_reverse_over_travel; aassp.number_positions = aass.ad_number_positions; aassp.dist_to_move = TransferUtils.Copy(aass.ad_dist_to_move, INCC.MAX_ADDASRC_POSITIONS); aassp.cm_steps_per_inch = aass.cm_steps_per_inch; aassp.cm_forward_mask = aass.cm_forward_mask; aassp.cm_reverse_mask = aass.cm_reverse_mask; aassp.cm_axis_number = aass.cm_axis_number; aassp.cm_over_travel_state = aass.cm_over_travel_state; aassp.cm_step_ratio = aass.cm_step_ratio; aassp.cm_slow_inches = aass.cm_slow_inches; aassp.plc_steps_per_inch = aass.plc_steps_per_inch; aassp.scale_conversion_factor = aass.scale_conversion_factor; aassp.cm_rotation = (aass.cm_rotation == 0 ? false : true); if (!DB.AASSParameters.GetMap().ContainsKey(det)) { DB.AASSParameters.GetMap().Add(det, aassp); } else if (overwrite) { aassp.modified = true; DB.AASSParameters.GetMap().Remove(det); DB.AASSParameters.GetMap().Add(det, aassp); // todo: in-memory and db } if (!DB.UnattendedParameters.GetMap().ContainsKey(det)) { DB.UnattendedParameters.GetMap().Add(det, new UnattendedParameters()); } else if (overwrite) { DB.UnattendedParameters.GetMap().Remove(det); DB.UnattendedParameters.GetMap().Add(det, new UnattendedParameters()); } // the alpha beta arrays must be sized here, for first use by the calc_alpha_beta code in the cycle conditioning code Multiplicity mkey = new Multiplicity(srtype.DefaultFAFor()); mkey.SR = new ShiftRegisterParameters(det.SRParams); MultiplicityCountingRes mcr = new MultiplicityCountingRes(srtype.DefaultFAFor(), 0); ABKey abkey = new ABKey(mkey, 512); // NEXT: maxbins is arbitrary LMRawAnalysis.SDTMultiplicityCalculator.SetAlphaBeta(abkey, det.AB); mcr.AB.TransferIntermediates(det.AB); } catch (Exception e) { mlogger.TraceEvent(LogLevels.Warning, 34064, "Detector transfer processing error {0} {1} ({2})", det.Id.DetectorName, e.Message, System.IO.Path.GetFileName(iddf.Path)); } }
/// <summary> /// Process one or more .NCC Review measurement data files, with related .NOP/.COP files. /// Presence of NOP/COP files is optional /// See Import Operator Declarations from Operator Review and Measurements from Radiation Review p. 24, /// See Operator Declaration File Format p. 87, /// See Operator Declaration File Format for Curium Ratio Measurements p. 88, /// See Radiation Review Measurement Data File Format p. 93, INCC Software Users Manual, March 29, 2009 /// </summary> void INCCReviewFileProcessing() { FireEvent(EventType.ActionPrep, this); NC.App.Opstate.StampOperationStartTime(); // This code would be the same for an interactive version of this operation // > Start here // first find and process and .NOP files List<string> exts = new List<string>() { ".nop", ".cop" }; FileList<CSVFile> hdlr = new FileList<CSVFile>(); hdlr.Init(exts, ctrllog); FileList<CSVFile> files = null; OPFiles opfiles = new OPFiles(); // The INCC5 semantics if (NC.App.AppContext.FileInputList == null) files = (FileList<CSVFile>)hdlr.BuildFileList(NC.App.AppContext.FileInput, NC.App.AppContext.Recurse, true); else files = (FileList<CSVFile>)hdlr.BuildFileList(NC.App.AppContext.FileInputList); if (files != null && files.Count > 0) { // construct lists of isotopics and items from the NOP and COP files opfiles.Process(files); ctrllog.TraceEvent(LogLevels.Verbose, 33085, "NOP items " + opfiles.Results.ItemIds.Count); } else ctrllog.TraceEvent(LogLevels.Warning, 33085, "No operator declarations available, continuing with default values"); // process the NCC files only INCCFileOrFolderInfo foo = new INCCFileOrFolderInfo(ctrllog, "*.NCC"); if (NC.App.AppContext.FileInputList == null) foo.SetPath(NC.App.AppContext.FileInput); else foo.SetFileList(NC.App.AppContext.FileInputList); foo.eh += new TransferEventHandler((s, e) => { FireEvent(EventType.ActionInProgress, e); }); List<INCCTransferBase> res = foo.Restore(); // use RemoveAll to cull those NCC files that reference a non-existent detector DetectorList dl = NC.App.DB.Detectors; foreach (INCCReviewFile rf in res) { List<Detector> tdk = dl.FindAll(d => 0 == string.Compare(d.Id.DetectorName, rf.detector, true)); if (tdk.Count < 1) { rf.skip = true; ctrllog.TraceEvent(LogLevels.Warning, 33085, "No detector " + rf.detector + " defined, cannot complete processing NCC review file " + System.IO.Path.GetFileName(rf.Path)); } } res.RemoveAll(rf => (rf as INCCReviewFile).skip); res.Sort((rf1, rf2) => // sort chronologically { return DateTime.Compare((rf1 as INCCReviewFile).dt, (rf2 as INCCReviewFile).dt); }); /// end here > The sorted, filtered and processed list here would be returned to the UI for display and interactive selection if (res == null || res.Count < 1) { NC.App.Opstate.StopTimer(); NC.App.Opstate.StampOperationStopTime(); FireEvent(EventType.ActionStop, this); ctrllog.TraceEvent(LogLevels.Warning, 33085, "No usable NCC review files found in " + System.IO.Path.GetFullPath(foo.GetPath())); return; } AcquireParameters orig_acq = new AcquireParameters(NC.App.Opstate.Measurement.AcquireState); // Each NCC file is a separate measurement foreach (INCCReviewFile irf in res) { ResetMeasurement(); if (NC.App.Opstate.IsQuitRequested) break; // Find the detector named in the NCC file (existence shown in earlier processing above) Detector curdet = NC.App.DB.Detectors.Find(d => string.Compare(d.Id.DetectorName, irf.detector, true) == 0); if (curdet.AB.Unset) { ABKey abkey = new ABKey(curdet.MultiplicityParams, 512); // NEXT: maxbins is arbitrary, just the first of many for VSR LMRawAnalysis.SDTMultiplicityCalculator.SetAlphaBeta(abkey, curdet.AB); } // set up acquire state based on the NCC file content AcquireParameters newacq = ConfigureAcquireState(curdet, orig_acq, irf); AssaySelector.MeasurementOption mo = NCCMeasOption(irf); // make a temp MeasId MeasId thisone = new MeasId(mo, newacq.MeasDateTime); // get the list of measurement Ids for this detector List<MeasId> list = NC.App.DB.MeasurementIds(curdet.Id.DetectorName, mo.PrintName()); MeasId already = list.Find(mid => mid.Equals(thisone)); if (already != null) { // URGENT: do the replacement as it says ctrllog.TraceEvent(LogLevels.Warning, 33085, "Replacing the matching {0} measurement, timestamp {1} (in a future release)", already.MeasOption.PrintName(), already.MeasDateTime.ToString()); } IntegrationHelpers.BuildMeasurement(newacq, curdet, mo); Measurement meas = NC.App.Opstate.Measurement; meas.MeasDate = newacq.MeasDateTime; // use the date and time from the NCC file content meas.Persist(); // preserve the basic results record if (AssaySelector.ForMass(meas.MeasOption) && !meas.INCCAnalysisState.Methods.AnySelected()) ctrllog.TraceEvent(LogLevels.Warning, 437, "No mass methods for " + meas.INCCAnalysisState.Methods.selector.ToString()); try { ushort total_number_runs = 0; double run_count_time = 0; double total_good_count_time = 0; if (irf.num_runs == 0) { ctrllog.TraceEvent(LogLevels.Error, 440, "This measurement has no good cycles."); continue; } if (meas.MeasOption == AssaySelector.MeasurementOption.holdup) continue; /* convert to total count time */ total_number_runs = irf.num_runs; total_good_count_time = irf.num_runs * run_count_time; meas.RequestedRepetitions = total_number_runs; // convert runs to cycles for (int i = 0; i < irf.num_runs; i++) { /* run date and time (IAEA format) */ AddReviewFileCycle(i, irf.runs[i], irf.times[i], meas, irf.Path); if (i % 8 == 0) FireEvent(EventType.ActionInProgress, this); } FireEvent(EventType.ActionInProgress, this); // NEXT: handle true AAS cycles as in INCC5 if (meas.MeasOption == AssaySelector.MeasurementOption.verification && meas.INCCAnalysisState.Methods.Has(AnalysisMethod.AddASource) && meas.AcquireState.well_config == WellConfiguration.Passive) { ctrllog.TraceEvent(LogLevels.Error, 440, "No add-a-source data processed because the implementation is incomplete. AAS"); //AddASourceSetup aass = IntegrationHelpers.GetCurrentAASSParams(meas.Detector); //for (int n = 1; n <= aass.number_positions; n++) //{ // /* number of good runs */ // string l = td.reader.ReadLine(); // if (td.reader.EndOfStream) // { // ctrllog.TraceEvent(LogLevels.Error, 440, "No add-a-source data found in disk file. " + "AAS p" + n.ToString()); // } //} } } catch (Exception e) { NC.App.Opstate.SOH = OperatingState.Trouble; ctrllog.TraceException(e, true); ctrllog.TraceEvent(LogLevels.Error, 437, "NCC file processing stopped with error: '" + e.Message + "'"); } finally { NC.App.Loggers.Flush(); } FireEvent(EventType.ActionInProgress, this); ComputeFromINCC5SRData(meas); FireEvent(EventType.ActionInProgress, this); } NC.App.Opstate.ResetTokens(); NC.App.Opstate.SOH = OperatingState.Stopping; NC.App.Opstate.StampOperationStopTime(); FireEvent(EventType.ActionFinished, this); }
void TestDataAssay() { List<string> exts = new List<string>() { ".dat", ".cnn" }; FileList<TestDataFile> hdlr = new FileList<TestDataFile>(); hdlr.Init(exts, ctrllog); FileList<TestDataFile> files = null; // initialize operation timer here NC.App.Opstate.ResetTimer(filegather, files, 170, (int)NC.App.AppContext.StatusTimerMilliseconds); FireEvent(EventType.ActionPrep, this); NC.App.Opstate.StampOperationStartTime(); if (NC.App.AppContext.FileInputList == null) files = (FileList<TestDataFile>)hdlr.BuildFileList(NC.App.AppContext.FileInput, NC.App.AppContext.Recurse, true); else files = (FileList<TestDataFile>)hdlr.BuildFileList(NC.App.AppContext.FileInputList); if (files == null || files.Count < 1) { NC.App.Opstate.StopTimer(); NC.App.Opstate.StampOperationStopTime(); FireEvent(EventType.ActionStop, this); ctrllog.TraceEvent(LogLevels.Warning, 33085, "No usable Test data/Disk .dat or .cnn files found"); return; } AcquireParameters orig_acq = new AcquireParameters(NC.App.Opstate.Measurement.AcquireState); Detector curdet = NC.App.Opstate.Measurement.Detector; AssaySelector.MeasurementOption mo = NC.App.Opstate.Measurement.MeasOption; if (curdet.AB.Unset) { ABKey abkey = new ABKey(curdet.MultiplicityParams, 512); // NEXT: maxbins is arbitrary, just the first of many for VSR LMRawAnalysis.SDTMultiplicityCalculator.SetAlphaBeta(abkey, curdet.AB); } // Each .dat file is a separate measurement foreach (TestDataFile td in files) { Measurement meas = null; ResetMeasurement(); try { if (!td.OpenForReading()) continue; if (NC.App.Opstate.IsQuitRequested) break; uint run_seconds; ushort number_good_runs = 0; ushort total_number_runs = 0; double run_count_time = 0; double total_good_count_time = 0; if (mo != AssaySelector.MeasurementOption.holdup) { /* number of good runs */ string l = td.reader.ReadLine(); ushort.TryParse(l, out number_good_runs); if (number_good_runs == 0) { ctrllog.TraceEvent(LogLevels.Error, 440, "This measurement has no good cycles."); continue; } /* run count time */ l = td.reader.ReadLine(); double.TryParse(l, out run_count_time); if (run_count_time <= 0.0) { ctrllog.TraceEvent(LogLevels.Error, 441, "Count time is <= 0."); continue; } } // update acq and then meas here AcquireParameters newacq = ConfigureAcquireState(curdet, orig_acq, DateTimeOffset.Now, number_good_runs, td.Filename); IntegrationHelpers.BuildMeasurement(newacq, curdet, mo); meas = NC.App.Opstate.Measurement; meas.MeasDate = newacq.MeasDateTime; meas.Persist(); // preserve the basic results record /* convert to total count time */ total_number_runs = number_good_runs; total_good_count_time = number_good_runs * run_count_time; meas.RequestedRepetitions = total_number_runs; // update the acq status and do the persist /* read in run data */ for (int i = 0; i < number_good_runs; i++) { /* run date and time (IAEA format) */ run_seconds = (uint)(i * (ushort)run_count_time); // from start time AddTestDataCycle(i, run_seconds, run_count_time, meas, td); if (i % 8 == 0) FireEvent(EventType.ActionInProgress, this); } FireEvent(EventType.ActionInProgress, this); if (meas.MeasOption == AssaySelector.MeasurementOption.verification && meas.INCCAnalysisState.Methods.Has(AnalysisMethod.AddASource) && meas.AcquireState.well_config == WellConfiguration.Passive) { AddASourceSetup aass = IntegrationHelpers.GetCurrentAASSParams(meas.Detector); for (int n = 1; n <= aass.number_positions; n++) { /* number of good runs */ string l = td.reader.ReadLine(); if (td.reader.EndOfStream) { ctrllog.TraceEvent(LogLevels.Error, 440, "No add-a-source data found in disk file. AAS p" + n.ToString()); } ushort.TryParse(l, out number_good_runs); if (number_good_runs == 0) { ctrllog.TraceEvent(LogLevels.Error, 440, "This measurement has no good cycles. AAS p" + n.ToString()); continue; } /* run count time */ l = td.reader.ReadLine(); double.TryParse(l, out run_count_time); if (run_count_time <= 0.0) { ctrllog.TraceEvent(LogLevels.Error, 441, "Count time is <= 0. AAS p" + n.ToString()); continue; } /* read in run data */ for (int i = 0; i < number_good_runs; i++) { /* run date and time (IAEA format) */ run_seconds = (uint)((n + 1) * (i + 1) * (ushort)run_count_time); // from start time AddTestDataCycle(i, run_seconds, run_count_time, meas, td, " AAS p" + n.ToString(), n); if (i % 8 == 0) FireEvent(EventType.ActionInProgress, this); } FireEvent(EventType.ActionInProgress, this); } } } catch (Exception e) { NC.App.Opstate.SOH = OperatingState.Trouble; ctrllog.TraceException(e, true); ctrllog.TraceEvent(LogLevels.Error, 437, "Test data file processing stopped with error: '" + e.Message + "'"); } finally { td.CloseReader(); NC.App.Loggers.Flush(); } FireEvent(EventType.ActionInProgress, this); ComputeFromINCC5SRData(meas); FireEvent(EventType.ActionInProgress, this); } NC.App.Opstate.ResetTokens(); NC.App.Opstate.SOH = OperatingState.Stopping; NC.App.Opstate.StampOperationStopTime(); FireEvent(EventType.ActionFinished, this); }
// for report recalculation public static void ReportRecalc(this Measurement m) { NC.App.Opstate.Measurement = m; CycleList cl; if (m.Detector.ListMode) { cl = NC.App.DB.GetCycles(m.Detector, m.MeasurementId, m.AcquireState.data_src, m.AnalysisParams); // APluralityOfMultiplicityAnalyzers: // URGENT: get all the cycles associated with each analyzer, restoring into the correct key->result pair foreach (Cycle c in cl) // restore Alpha Beta coefficients on each mcr on each cycle for later per-cycle DTC rate recalculation { IEnumerator iter = c.CountingAnalysisResults.GetMultiplicityEnumerator(); while (iter.MoveNext()) try { Multiplicity mkey = (Multiplicity)((KeyValuePair<SpecificCountingAnalyzerParams, object>)(iter.Current)).Key; // clean up this syntax, it's silly MultiplicityCountingRes mcr = (MultiplicityCountingRes)((KeyValuePair<SpecificCountingAnalyzerParams, object>)(iter.Current)).Value; LMRawAnalysis.SDTMultiplicityCalculator.SetAlphaBeta(mkey, mcr); } catch (Exception) { } } } else { cl = NC.App.DB.GetCycles(m.Detector, m.MeasurementId, m.AcquireState.data_src); uint maximalmax = 0; if (m.Detector.AB.Unset) { foreach (Cycle c in cl) maximalmax = (uint)Math.Max(maximalmax, c.MultiplicityResults(m.Detector.MultiplicityParams).MaxBins); ABKey akey = new ABKey(m.Detector.MultiplicityParams, maximalmax); // max of all cycles mcr.MaxBins LMRawAnalysis.SDTMultiplicityCalculator.SetAlphaBeta(akey, m.Detector.AB); } ABKey abkey = new ABKey(m.Detector.MultiplicityParams, m.Detector.AB.MaxBins); foreach (Cycle c in cl) // restore Alpha Beta coefficients on each cycle LMRawAnalysis.SDTMultiplicityCalculator.SetAlphaBeta(abkey, c.MultiplicityResults(m.Detector.MultiplicityParams).AB); } m.Cycles = new CycleList(); m.CurrentRepetition = 0; foreach (Cycle c in cl) { m.CurrentRepetition++; m.Add(c); IEnumerator iter = c.CountingAnalysisResults.GetMultiplicityEnumerator(); while (iter.MoveNext()) try { Multiplicity mkey = (Multiplicity)((KeyValuePair<SpecificCountingAnalyzerParams, object>)(iter.Current)).Key; // clean up this syntax, it's silly MultiplicityCountingRes mcr = (MultiplicityCountingRes)((KeyValuePair<SpecificCountingAnalyzerParams, object>)(iter.Current)).Value; if (CycleProcessing.ConductStep(CycleProcessing.CycleStep.Deadtime, m.MeasOption)) CycleProcessing.DeadtimeCorrection(m, mkey, mcr, did: m.Detector.Id, isCycle: false); } catch (Exception) { } CycleProcessing.ApplyTheCycleConditioningSteps(c, m); m.CycleStatusTerminationCheck(c); } m.CalculateMeasurementResults(); }
void DatazFileAssay() { List <string> exts = new List <string>() { ".dataz" }; FileList <DatazFile> hdlr = new FileList <DatazFile>(); hdlr.Init(exts, ctrllog); FileList <DatazFile> files = null; // initialize operation timer here NC.App.Opstate.ResetTimer(filegather, files, 170, (int)NC.App.AppContext.StatusTimerMilliseconds); FireEvent(EventType.ActionPrep, this); NC.App.Opstate.StampOperationStartTime(); if (NC.App.AppContext.FileInputList == null) { files = (FileList <DatazFile>)hdlr.BuildFileList(NC.App.AppContext.FileInput, NC.App.AppContext.Recurse, true); } else { files = (FileList <DatazFile>)hdlr.BuildFileList(NC.App.AppContext.FileInputList); } if (files == null || files.Count < 1) { NC.App.Opstate.StopTimer(); NC.App.Opstate.StampOperationStopTime(); FireEvent(EventType.ActionStop, this); ctrllog.TraceEvent(LogLevels.Warning, 33085, "No usable Dataz files found"); return; } AssaySelector.MeasurementOption mo = NC.App.Opstate.Measurement.MeasOption; foreach (DatazFile mc in files) { Measurement meas = null; try { if (!mc.OpenForReading()) { continue; } if (NC.App.Opstate.IsQuitRequested) { break; } mc.ScanSections(); mc.ProcessSections(); if (mc.Cycles.Count == 0) { ctrllog.TraceEvent(LogLevels.Error, 404, "This Dataz file has no good cycles."); } if (mc.Plateaux.Count == 0) { ctrllog.TraceEvent(LogLevels.Error, 404, $"This Dataz file has no defined sequences, over {mc.Cycles.Count} cycles."); } else { AcquireParameters orig_acq = new AcquireParameters(NC.App.Opstate.Measurement.AcquireState); Detector curdet = NC.App.Opstate.Measurement.Detector; if (mc.AcquistionStateChanged) { orig_acq = new AcquireParameters(NC.App.Opstate.Measurement.AcquireState); curdet = mc.DataZDetector; if (curdet.AB.Unset) { ABKey abkey = new ABKey(curdet.MultiplicityParams, mc.MaxBins); LMRawAnalysis.SDTMultiplicityCalculator.SetAlphaBeta(abkey, curdet.AB); } } ctrllog.TraceInformation($"{mc.Cycles.Count} cycles and {mc.Plateaux.Count} sequences encountered in Dataz file {mc.Filename}"); System.Collections.IEnumerator iter = mc.GetSequences(); while (iter.MoveNext()) { DatazFile.Plateau pla = (DatazFile.Plateau)iter.Current; ResetMeasurement(); // update acq and then meas here AcquireParameters newacq = ConfigureAcquireState(curdet, orig_acq, pla.Cycles[0].DTO, (ushort)pla.Num, mc.Filename); newacq.data_src = ConstructedSource.DatazFile; IntegrationHelpers.BuildMeasurement(newacq, curdet, mo); meas = NC.App.Opstate.Measurement; meas.MeasDate = newacq.MeasDateTime; meas.Persist(); // preserve the basic results record meas.RequestedRepetitions = (ushort)pla.Num; for (int i = 0; i < meas.RequestedRepetitions; i++) { /* run date and time (IAEA format) */ AddMCSRDataCycle(i, pla.Cycles[i], meas, mc.Filename); if (i % 8 == 0) { FireEvent(EventType.ActionInProgress, this); } } FireEvent(EventType.ActionInProgress, this); ComputeFromINCC5SRData(meas); FireEvent(EventType.ActionInProgress, this); } } } catch (Exception e) { NC.App.Opstate.SOH = OperatingState.Trouble; ctrllog.TraceException(e, true); ctrllog.TraceEvent(LogLevels.Error, 437, "Dataz data file processing stopped with error: '" + e.Message + "'"); } finally { mc.CloseReader(); NC.App.Loggers.Flush(); } } NC.App.Opstate.ResetTokens(); NC.App.Opstate.SOH = OperatingState.Stopping; NC.App.Opstate.StampOperationStopTime(); FireEvent(EventType.ActionFinished, this); }
/// <summary> /// Calc alpha beta, save result in cache /// NEXT: extend with Numerics.BigInteger replacing BigFloat, as per above (for when phi is non-zero) /// </summary> /// <param name="key">three values needed for alpha beta (max bin count, gatewidth, phi aka multiplicity T) </param> /// <param name="AB">The alpha beta array as used on detectors and multiplicity counting results</param> public static void SetAlphaBeta(ABKey key, AlphaBeta AB) { AlphaBeta _AB = AlphaBetaCache.GetAlphaBeta(key); if (_AB != null) { AB.α = _AB.α; AB.β = _AB.β; return; } if (key.bins != AB.α.Length) AB.Resize((int)key.bins + 1); AB.α[0] = 0.0; AB.β[0] = 0.0; if (key.deadTimeCoefficientTinNanoSecs == 0.0) { double n = 0; for (uint i = 1; i < key.bins; i++) { n = i; AB.α[i] = n; AB.β[i] = (n * (n - 1.0)) / 2.0; } } else if (key.gateWidthTics != 0.0) { uint biggestKey = key.bins - 1; AB.Init((int)key.bins); if (biggestKey <= 1) goto cache; double gateInSeconds = key.gateWidthTics * 1e-7; double phi = (key.deadTimeCoefficientTinNanoSecs / 1E9) / gateInSeconds; AB.α[0] = 0.0; AB.α[1] = 1.0; AB.β[0] = 0.0; AB.β[1] = 0.0; if (biggestKey > 1) { for (int n = 2; n <= biggestKey; n++) { if (phi > 1e-20) { AB.α[n] = 1.0; double alphaCoeff = 0; for (int k = 0; k <= (n - 2); k++) { alphaCoeff = binomialCoefficient(n - 1, k + 1) * Math.Pow((k + 1) * phi, k) / Math.Pow(1.0 - ((k + 1) * phi), k + 2); AB.α[n] += alphaCoeff; } } else { AB.α[n] = 1.0; } } AB.β[0] = 0.0; AB.β[1] = 0.0; AB.β[2] = AB.α[2] - 1.0; for (int n = 3; n <= biggestKey; n++) { if (phi > 1e-20) { AB.β[n] = AB.α[n] - 1.0; for (int k = 0; k <= (n - 3); k++) { double betaCoeff; betaCoeff = binomialCoefficient(n - 1, k + 2) * (k + 1) * Math.Pow((k + 2) * phi, k) / Math.Pow(1.0 - ((k + 2) * phi), k + 3); AB.β[n] += betaCoeff; } } else { AB.β[n] = 0.0; } } } } cache: AlphaBetaCache.AddAlphaBeta(key, AB); }