Esempio n. 1
0
        public List <Measurement> GetAll(Experiment e)
        {
            var columns = new List <string> {
                "ID", "result", "beginning", "end"
            };
            var data = Db.GetAll("Measurements", "experiment", e.Id.ToString(), columns);

            var res = new List <Measurement>();

            foreach (var row in data)
            {
                var m = new Measurement(Convert.ToInt32(row["ID"]));

                var samples = _samplesDAO.GetAll(m, e);

                var bs        = samples.Where(s => s.Id == Convert.ToInt32(row["beginning"])).ToList();
                var es        = samples.Where(s => s.Id == Convert.ToInt32(row["end"])).ToList();
                var beginning = bs.Any() ? bs[0] : null;
                var end       = es.Any() ? es[0] : null;
                var result    = Convert.ToDouble(row["result"]);

                m.Beginning = beginning;
                m.End       = end;
                m.Result    = result;
                m.Add(samples);

                res.Add(m);
            }

            return(res);
        }
        public void SetUp()
        {
            db             = new InMemoryDatabase();
            measurementDAO = new Measurements(db);

            m1 = new Measurement(1)
            {
                Result = 3.14, Beginning = s1, End = s2
            };
            m2 = new Measurement(2)
            {
                Result = 6.28, Beginning = s4, End = s6
            };
            m3 = new Measurement(3)
            {
                Result = 0, Beginning = s7, End = s8
            };

            m1.Add(new List <Sample> {
                s1, s2, s3
            });
            m2.Add(new List <Sample> {
                s4, s5, s6
            });
            m3.Add(new List <Sample> {
                s7, s8
            });
        }
Esempio n. 3
0
        public void ShowOther()
        {
            Detector           det      = Integ.GetCurrentAcquireDetector();
            IDDMeasurementList measlist = new IDDMeasurementList();

            measlist.Init(mlist,
                          AssaySelector.MeasurementOption.verification,
                          goal: IDDMeasurementList.EndGoal.Reanalysis, lmonly: false, inspnum: "All", detector: det);
            if (measlist.bGood)
            {
                DialogResult = measlist.ShowDialog();
                if (DialogResult == DialogResult.OK)
                {
                    Measurement m = measlist.GetSingleSelectedMeas();
                    if (m == null)
                    {
                        return;
                    }
                    det = m.Detector;                                                                                             // use detector as re-constructed from the original measurement
                    // get the cycles for the selected measurement from the database, and add them to the current measurement
                    CycleList cl = N.App.DB.GetCycles(det, m.MeasurementId, DetectorDefs.ConstructedSource.DB, m.AnalysisParams); // APluralityOfMultiplicityAnalyzers: // URGENT: get all the cycles associated with each analzyer, restoring into the correct key->result pair
                    m.Add(cl);
                    new IDDReanalysisAssay(m, det).ShowDialog();
                }
            }
            else
            {
                DialogResult = DialogResult.None;
            }
        }
Esempio n. 4
0
        unsafe void AddReviewFileCycle(int i, run_rec_ext run, INCCReviewFile.run_rec_ext_plus rrep, Measurement meas, string fn)
        {
            Cycle cycle = new Cycle(datalog);

            try
            {
                cycle.UpdateDataSourceId(ConstructedSource.ReviewFile, meas.Detector.Id.SRType,
                                         rrep.dt, fn);
                cycle.seq = (run.run_number > 0 ? run.run_number : i); // INCC run record sequence numbers start at 1
                cycle.TS  = TimeSpan.FromSeconds(run.run_count_time);

                /* init run tests */
                cycle.SetQCStatus(meas.Detector.MultiplicityParams, QCTestStatus.Pass, run.run_high_voltage); // APluralityOfMultiplicityAnalyzers: creates entry if not found, expand from the single mult key from detector here
                meas.Add(cycle);
                /* singles, reals + accidentals, accidentals */
                cycle.Totals = (ulong)run.run_singles;
                MultiplicityCountingRes mcr = new MultiplicityCountingRes(meas.Detector.MultiplicityParams.FA, cycle.seq); // APluralityOfMultiplicityAnalyzers: expand when detector has multiple analyzers
                cycle.CountingAnalysisResults.Add(meas.Detector.MultiplicityParams, mcr);                                  // APluralityOfMultiplicityAnalyzers: expand when detector has multiple analyzers
                mcr.AB.TransferIntermediates(meas.Detector.AB);                                                            // copy alpha beta onto the cycle's results
                mcr.Totals        = cycle.Totals;
                mcr.TS            = cycle.TS;
                mcr.ASum          = run.run_acc;
                mcr.RASum         = run.run_reals_plus_acc;
                mcr.Scaler1.v     = run.run_scaler1;
                mcr.Scaler2.v     = run.run_scaler2;
                cycle.SinglesRate = run.run_singles / run.run_count_time;

                // assign the hits to a single channel (0)
                cycle.HitsPerChannel[0] = cycle.Totals;

                mcr.RawSinglesRate.v = cycle.SinglesRate;

                // now back-compute the actual limits of the bins
                for (int n = rrep.n_mult - 1; n >= 0; n--)
                {
                    if ((run.run_mult_reals_plus_acc[n] > 0.0) || (run.run_mult_acc[n] > 0.0))
                    {
                        mcr.MinBins = mcr.MaxBins = (ulong)(n + 1);
                        break;
                    }
                }
                mcr.RAMult      = new ulong[mcr.MaxBins];
                mcr.NormedAMult = new ulong[mcr.MaxBins];
                mcr.UnAMult     = new ulong[mcr.MaxBins]; // todo: compute this

                // copy the bin values, if any
                for (ushort j = 0; j < mcr.MaxBins; j++)
                {
                    mcr.RAMult[j]      = (ulong)run.run_mult_reals_plus_acc[j];
                    mcr.NormedAMult[j] = (ulong)run.run_mult_acc[j];
                }
                ctrllog.TraceEvent(LogLevels.Verbose, 5439, "Cycle " + cycle.seq.ToString() + (rrep.n_mult > 0 ? " n_:" + rrep.n_mult.ToString() + " max:" + mcr.MaxBins.ToString() : " *"));
            }
            catch (Exception e)
            {
                ctrllog.TraceEvent(LogLevels.Warning, 33085, "Cycle processing error {0} {1}", run, e.Message);
            }
        }
Esempio n. 5
0
        void AddMCSRDataCycle(int run, DatazFile.Cycle c, Measurement meas, string fname)
        {
            Cycle cycle = new Cycle(datalog);

            try
            {
                cycle.UpdateDataSourceId(ConstructedSource.DatazFile, meas.Detector.Id.SRType, c.DTO, fname);
                cycle.seq = run;
                cycle.TS  = c.Duration;
                /* init run tests */
                cycle.SetQCStatus(meas.Detector.MultiplicityParams, QCTestStatus.None); // APluralityOfMultiplicityAnalyzers: creates entry if not found, expand from the single mult key from detector here
                meas.Add(cycle);
                /* singles, reals + accidentals, accidentals */
                cycle.Totals = c.Singles;
                MultiplicityCountingRes mcr = new MultiplicityCountingRes(meas.Detector.MultiplicityParams.FA, cycle.seq); // APluralityOfMultiplicityAnalyzers: expand when detector has multiple analyzers
                cycle.CountingAnalysisResults.Add(meas.Detector.MultiplicityParams, mcr);                                  // APluralityOfMultiplicityAnalyzers: expand when detector has multiple analyzers
                mcr.AB.TransferIntermediates(meas.Detector.AB);                                                            // copy alpha beta onto the cycle's results
                mcr.Totals        = cycle.Totals;
                mcr.TS            = cycle.TS;
                mcr.ASum          = c.Accidentals;
                mcr.RASum         = c.RealsPlusAccidentals;
                mcr.Scaler1.v     = 0;
                mcr.Scaler2.v     = 0;
                cycle.SinglesRate = cycle.Totals / c.Duration.TotalSeconds;

                // assign the hits to a single channel (0)
                cycle.HitsPerChannel[0] = cycle.Totals;

                mcr.RawSinglesRate.v = cycle.SinglesRate;

                /* number of multiplicity values */
                mcr.MinBins     = mcr.MaxBins = (ulong)c.BinLen;
                mcr.RAMult      = new ulong[c.BinLen];
                mcr.NormedAMult = new ulong[c.BinLen];
                mcr.UnAMult     = new ulong[c.BinLen]; // not used, not LM
                /* multiplicity values */
                for (ushort j = 0; j < c.BinLen; j++)
                {
                    mcr.RAMult[j]      = c.MultBins[j];
                    mcr.NormedAMult[j] = c.MultAccBins[j];
                }
                ctrllog.TraceEvent(LogLevels.Verbose, 5439, "Cycle " + cycle.seq.ToString() + ((mcr.RAMult[0] + mcr.NormedAMult[0]) > 0 ? " max:" + mcr.MaxBins.ToString() : " *"));
            }
            catch (Exception e)
            {
                ctrllog.TraceEvent(LogLevels.Warning, 33085, "cycle processing error {0} {1}", run, e.Message);
            }
        }
Esempio n. 6
0
        void AddTestDataCycle(int run, uint run_seconds, double run_count_time, Measurement meas, TestDataFile td, string pivot = "", int cfindex = -1)
        {
            Cycle cycle = new Cycle(datalog);

            try
            {
                cycle.UpdateDataSourceId(ConstructedSource.CycleFile, meas.Detector.Id.SRType,
                                         td.DTO.AddSeconds(run_seconds), td.Filename);
                cycle.seq = run;
                cycle.TS  = TimeSpan.FromSeconds(run_count_time); // dev note: check if this is always only in seconds, or fractions of a second
                                                                  // hn -- 9/4/14 -- not integer for count time.  Convert from double seconds here.
                                                                  // Joe still has force to int.  bleck!

                /* init run tests */
                cycle.SetQCStatus(meas.Detector.MultiplicityParams, QCTestStatus.None); // APluralityOfMultiplicityAnalyzers: creates entry if not found, expand from the single mult key from detector here
                meas.Add(cycle, cfindex);
                /* singles, reals + accidentals, accidentals */
                string   l     = td.reader.ReadLine();
                string[] zorks = l.Split((char[])null, StringSplitOptions.RemoveEmptyEntries);
                double[] v     = new double[5];
                for (int z = 0; z < 5; z++)
                {
                    double d;
                    bool   b = double.TryParse(zorks[z], out d);
                    if (b)
                    {
                        v[z] = d;
                    }
                }
                cycle.Totals = (ulong)v[0];
                MultiplicityCountingRes mcr = new MultiplicityCountingRes(meas.Detector.MultiplicityParams.FA, cycle.seq); // APluralityOfMultiplicityAnalyzers: expand when detector has multiple analyzers
                cycle.CountingAnalysisResults.Add(meas.Detector.MultiplicityParams, mcr);                                  // APluralityOfMultiplicityAnalyzers: expand when detector has multiple analyzers
                mcr.AB.TransferIntermediates(meas.Detector.AB);                                                            // copy alpha beta onto the cycle's results
                mcr.Totals        = cycle.Totals;
                mcr.TS            = cycle.TS;
                mcr.ASum          = v[4];
                mcr.RASum         = v[3];
                mcr.Scaler1.v     = v[1];
                mcr.Scaler2.v     = v[2];
                cycle.SinglesRate = v[0] / run_count_time;

                // assign the hits to a single channel (0)
                cycle.HitsPerChannel[0] = cycle.Totals;

                mcr.RawSinglesRate.v = cycle.SinglesRate;

                /* number of multiplicity values */
                string mv = td.reader.ReadLine();
                ushort k  = 0;
                ushort.TryParse(mv, out k);
                if (k == 0)  // test data files require an entry with 1 bin set 0s for the absence of multiplicity, go figure
                {
                    ctrllog.TraceEvent(LogLevels.Error, 440, "This" + pivot + " cycle " + run.ToString() + " has no good multiplicity data.");
                    return;
                }
                mcr.MinBins     = mcr.MaxBins = k;
                mcr.RAMult      = new ulong[k];
                mcr.NormedAMult = new ulong[k];
                mcr.UnAMult     = new ulong[k]; // todo: compute this
                /* multiplicity values */
                for (ushort j = 0; j < k; j++)
                {
                    string   ra     = td.reader.ReadLine();
                    string[] blorks = ra.Split((char[])null, StringSplitOptions.RemoveEmptyEntries);
                    double[] ve     = new double[2];
                    for (int z = 0; z < 2; z++)
                    {
                        double d;
                        bool   b = double.TryParse(blorks[z], out d);
                        if (b)
                        {
                            ve[z] = d;
                        }
                    }
                    mcr.RAMult[j]      = (ulong)ve[0];
                    mcr.NormedAMult[j] = (ulong)ve[1];
                }
                ctrllog.TraceEvent(LogLevels.Verbose, 5439, "Cycle " + cycle.seq.ToString() + pivot + ((mcr.RAMult[0] + mcr.NormedAMult[0]) > 0 ? " max:" + mcr.MaxBins.ToString() : " *"));
            }
            catch (Exception e)
            {
                ctrllog.TraceEvent(LogLevels.Warning, 33085, pivot + "cycle processing error {0} {1} {2}", run, e.Message, pivot);
            }
        }
Esempio n. 7
0
        unsafe void AddReviewFileCycle(int i, run_rec_ext run, INCCReviewFile.run_rec_ext_plus rrep, Measurement meas, string fn)
        {
            Cycle cycle = new Cycle(datalog);
            try
            {
                cycle.UpdateDataSourceId(ConstructedSource.ReviewFile, meas.Detectors[0].Id.SRType,
                        rrep.dt, fn);
                cycle.seq = (run.run_number > 0 ? run.run_number : i); // INCC run record sequence numbers start at 1
                cycle.TS = TimeSpan.FromSeconds(run.run_count_time);

                /* init run tests */
                cycle.SetQCStatus(meas.Detectors[0].MultiplicityParams, QCTestStatus.Pass, run.run_high_voltage); // multmult creates entry if not found
                meas.Add(cycle);
                /* singles, reals + accidentals, accidentals */
                cycle.Totals = (ulong)run.run_singles;
                MultiplicityCountingRes mcr = new MultiplicityCountingRes(meas.Detectors[0].MultiplicityParams.FA, cycle.seq); // multmult
                cycle.CountingAnalysisResults.Add(meas.Detectors[0].MultiplicityParams, mcr); // multmult
                mcr.AB.TransferIntermediates(meas.Detectors[0].AB);  // copy alpha beta onto the cycle's results 
                mcr.Totals = cycle.Totals;
                mcr.TS = cycle.TS;
                mcr.ASum = run.run_acc;
                mcr.RASum = run.run_reals_plus_acc;
                mcr.Scaler1.v = run.run_scaler1;
                mcr.Scaler2.v = run.run_scaler2;
                cycle.SinglesRate = run.run_singles / run.run_count_time;

                // assign the hits to a single channel (0)
                cycle.HitsPerChannel[0] = cycle.Totals;

                mcr.RawSinglesRate.v = cycle.SinglesRate;

                // now back-compute the actual limits of the bins
                for (int n = rrep.n_mult - 1; n >= 0; n--)
                {
                    if ((run.run_mult_reals_plus_acc[n] > 0.0) || (run.run_mult_acc[n] > 0.0))
                    {
                        mcr.MinBins = mcr.MaxBins = (ulong)(n + 1);
                        break;
                    }
                }
                mcr.RAMult = new ulong[mcr.MaxBins];
                mcr.NormedAMult = new ulong[mcr.MaxBins];
                mcr.UnAMult = new ulong[mcr.MaxBins]; // todo: compute this

                // copy the bin values, if any
                for (UInt16 j = 0; j < mcr.MaxBins; j++)
                {
                    mcr.RAMult[j] = (ulong)run.run_mult_reals_plus_acc[j];
                    mcr.NormedAMult[j] = (ulong)run.run_mult_acc[j];
                }
                ctrllog.TraceEvent(LogLevels.Verbose, 5439, "Cycle " + cycle.seq.ToString() + (rrep.n_mult > 0 ? " n_:" + rrep.n_mult.ToString() + " max:" + mcr.MaxBins.ToString() : " *"));
            }
            catch (Exception e)
            {
                ctrllog.TraceEvent(LogLevels.Warning, 33085, "Cycle processing error {0} {1}", run, e.Message);
            }
        }
Esempio n. 8
0
        void AddTestDataCycle(int run, uint run_seconds, double run_count_time, Measurement meas, TestDataFile td, string pivot = "", int cfindex = -1)
        {
            Cycle cycle = new Cycle(datalog);
            try
            {
                cycle.UpdateDataSourceId(ConstructedSource.CycleFile, meas.Detectors[0].Id.SRType,
                        td.DTO.AddSeconds(run_seconds), td.Filename);
                cycle.seq = run;
                cycle.TS = TimeSpan.FromSeconds(run_count_time);  // dev note: check if this is always only in seconds, or fractions of a second
                                                                  // hn -- 9/4/14 -- not integer for count time.  Convert from double seconds here.
                                                                  // Joe still has force to int.  bleck!

                /* init run tests */
                cycle.SetQCStatus(meas.Detectors[0].MultiplicityParams, QCTestStatus.None); // multmult creates entry if not found
                meas.Add(cycle, cfindex);
                /* singles, reals + accidentals, accidentals */
                string l = td.reader.ReadLine();
                string[] zorks = l.Split((char[])null, StringSplitOptions.RemoveEmptyEntries);
                double[] v = new double[5];
                for (int z = 0; z < 5; z++)
                {
                    double d;
                    bool b = Double.TryParse(zorks[z], out d);
                    if (b)
                        v[z] = d;
                }
                cycle.Totals = (ulong)v[0];
                MultiplicityCountingRes mcr = new MultiplicityCountingRes(meas.Detectors[0].MultiplicityParams.FA, cycle.seq); // multmult
                cycle.CountingAnalysisResults.Add(meas.Detectors[0].MultiplicityParams, mcr);  // multmult
                mcr.AB.TransferIntermediates(meas.Detectors[0].AB);  // copy alpha beta onto the cycle's results 
                mcr.Totals = cycle.Totals;
                mcr.TS = cycle.TS;
                mcr.ASum = v[4];
                mcr.RASum = v[3];
                mcr.Scaler1.v = v[1];
                mcr.Scaler2.v = v[2];
                cycle.SinglesRate = v[0] / run_count_time;

                // assign the hits to a single channel (0)
                cycle.HitsPerChannel[0] = cycle.Totals;

                mcr.RawSinglesRate.v = cycle.SinglesRate;

                /* number of multiplicity values */
                string mv = td.reader.ReadLine();
                UInt16 k = 0;
                UInt16.TryParse(mv, out k);
                if (k == 0)  // test data files require an entry with 1 bin set 0s for the absence of multiplicity, go figure
                {
                    ctrllog.TraceEvent(LogLevels.Error, 440, "This" + pivot + " cycle " + run.ToString() + " has no good multiplicity data.");
                    return;
                }
                mcr.MinBins = mcr.MaxBins = k;
                mcr.RAMult = new ulong[k];
                mcr.NormedAMult = new ulong[k];
                mcr.UnAMult = new ulong[k]; // todo: compute this
                /* multiplicity values */
                for (UInt16 j = 0; j < k; j++)
                {
                    string ra = td.reader.ReadLine();
                    string[] blorks = ra.Split((char[])null, StringSplitOptions.RemoveEmptyEntries);
                    double[] ve = new double[2];
                    for (int z = 0; z < 2; z++)
                    {
                        double d;
                        bool b = Double.TryParse(blorks[z], out d);
                        if (b)
                            ve[z] = d;
                    }
                    mcr.RAMult[j] = (ulong)ve[0];
                    mcr.NormedAMult[j] = (ulong)ve[1];
                }
                ctrllog.TraceEvent(LogLevels.Verbose, 5439, "Cycle " + cycle.seq.ToString() + pivot + ((mcr.RAMult[0] + mcr.NormedAMult[0]) > 0 ? " max:" + mcr.MaxBins.ToString() : " *"));

            }
            catch (Exception e)
            {
                ctrllog.TraceEvent(LogLevels.Warning, 33085, pivot + "cycle processing error {0} {1} {2}", run, e.Message, pivot);
            }
        }
Esempio n. 9
0
        internal void Run()
        {
            Console.WriteLine("preparing...");
            BufferSet[] bufferSets = new BufferSet [NUMBER_OF_BUFFER_SETS];

            for (int iBufferSet = 0; iBufferSet < NUMBER_OF_BUFFER_SETS; iBufferSet++)
            {
                bufferSets[iBufferSet] = new BufferSet();
            }

            byte[] tempBuffer = new byte [BUFFER_SIZE];

            List <string> summaryLines = new List <string>();
            var           csv          = new StringBuilder();

            csv.Append("Outer,Middle,Inner,Multiply,Encode,Check\n");

            foreach (ICodingLoop codingLoop in CodingLoopBase.ALL_CODING_LOOPS)
            {
                var encodeAverage = new Measurement();

                {
                    string testName = codingLoop.GetType().Name + " encodeParity";
                    Console.WriteLine("\nTEST: " + testName);
                    var codec = new ReedSolomon(DATA_COUNT, PARITY_COUNT, codingLoop);
                    Console.WriteLine("    warm up...");
                    DoOneEncodeMeasurement(codec, bufferSets);
                    DoOneEncodeMeasurement(codec, bufferSets);
                    Console.WriteLine("    testing...");

                    for (int iMeasurement = 0; iMeasurement < 10; iMeasurement++)
                    {
                        encodeAverage.Add(DoOneEncodeMeasurement(codec, bufferSets));
                    }

                    Console.WriteLine("\nAVERAGE: {0}", encodeAverage);
                    summaryLines.Add($"    {testName,-45} {encodeAverage}");
                }

                // The encoding test should have filled all of the buffers with
                // correct parity, so we can benchmark parity checking.
                var checkAverage = new Measurement();

                {
                    string testName = codingLoop.GetType().Name + " isParityCorrect";
                    Console.WriteLine("\nTEST: " + testName);
                    var codec = new ReedSolomon(DATA_COUNT, PARITY_COUNT, codingLoop);
                    Console.WriteLine("    warm up...");
                    DoOneEncodeMeasurement(codec, bufferSets);
                    DoOneEncodeMeasurement(codec, bufferSets);
                    Console.WriteLine("    testing...");

                    for (int iMeasurement = 0; iMeasurement < 10; iMeasurement++)
                    {
                        checkAverage.Add(DoOneCheckMeasurement(codec, bufferSets, tempBuffer));
                    }

                    Console.WriteLine("\nAVERAGE: {0}", checkAverage);
                    summaryLines.Add($"    {testName,-45} {checkAverage}");
                }

                csv.Append(CodingLoopNameToCsvPrefix(codingLoop.GetType().Name));
                csv.Append(encodeAverage.GetRate());
                csv.Append(",");
                csv.Append(checkAverage.GetRate());
                csv.Append("\n");
            }

            Console.WriteLine("\n");
            Console.WriteLine(csv.ToString());

            Console.WriteLine("\nSummary:\n");

            foreach (string line in summaryLines)
            {
                Console.WriteLine(line);
            }
        }
Esempio n. 10
0
        /// <summary>
        /// Performs a high voltage calibration operation.
        /// </summary>
        /// <param name="voltage">The voltage to set in volts.</param>
        /// <param name="duration">The length of the measurement to take.</param>
        /// <param name="cancellationToken">A <see cref="CancellationToken"/> to monitor for cancellation requests.</param>
        /// <exception cref="OperationCanceledException">Cancellation was requested.</exception>
        /// <exception cref="MCADeviceLostConnectionException">An error occurred communicating with the device.</exception>
        private void PerformHVCalibration(Measurement measurement, int voltage, TimeSpan duration, CancellationToken cancellationToken)
        {
            Thread.CurrentThread.CurrentCulture = System.Globalization.CultureInfo.InvariantCulture;
            try
            {
                m_logger.TraceEvent(LogLevels.Info, 0, "MCA527[{0}]: Started HV calibration", DeviceName);
                m_logger.Flush();
                cancellationToken.ThrowIfCancellationRequested();

                uint x = SetVoltage((ushort)voltage, MaxSetVoltageTime, cancellationToken);

                cancellationToken.ThrowIfCancellationRequested();

                HVControl.HVStatus status = new HVControl.HVStatus();
                status.HVread  = (int)m_device.GetHighVoltage();
                status.HVsetpt = voltage;

                /// begin TakeMeasurement
                measurement.Cycles.Clear();
                Cycle cycle = new Cycle(m_logger);
                cycle.UpdateDataSourceId(DetectorDefs.ConstructedSource.Live, DetectorDefs.InstrType.MCA527, DateTimeOffset.Now, string.Empty);
                measurement.Add(cycle);
                RDT.StartCycle(cycle);
                cycle.ExpectedTS = new TimeSpan(duration.Ticks);                      // expected is that requested for HV, not acquire
                ((MCA527ProcessingState)(RDT.State)).writingFile = false;             // never for HV
                measurement.AnalysisParams = new CountingAnalysisParameters();
                measurement.AnalysisParams.Add(new BaseRate());                       // prep for a single cycle
                measurement.CountTimeInSeconds   = duration.TotalSeconds;             // the requested count time
                measurement.RequestedRepetitions = measurement.CurrentRepetition = 1; // 1 rep, always set to complete
                measurement.InitializeResultsSummarizers();                           // reset data structures
                RDT.SetupCountingAnalyzerHandler(NC.App.Config, DetectorDefs.ConstructedSourceExtensions.TimeBase(DetectorDefs.ConstructedSource.Live, DetectorDefs.InstrType.MCA527));
                RDT.PrepareAndStartCountingAnalyzers(measurement.AnalysisParams);     // 1 rate counter
                m_setvoltage = false;                                                 // override DB settings here
                PerformAssay(measurement, new MeasTrackParams()
                {
                    seq = 1, interval = duration.TotalSeconds
                }, cancellationToken);
                /// end TakeMeasurement

                for (int i = 0; i < 1; i++)
                {
                    status.counts[i] = (ulong)cycle.HitsPerChannel[i];
                }

                if (m_cancellationTokenSource != null)
                {
                    lock (m_monitor)
                    {
                        m_cancellationTokenSource.Dispose();
                        m_cancellationTokenSource = null;
                    }
                }

                DAQControl.gControl.AppendHVCalibration(status);
                DAQControl.gControl.StepHVCalibration();
            }
            catch (OperationCanceledException)
            {
                m_logger.TraceEvent(LogLevels.Info, 0, "MCA527[{0}]: Stopped HV calibration", DeviceName);
                m_logger.Flush();
                DAQControl.gControl.MajorOperationCompleted();                  // causes pending control thread caller to move forward
                PendingComplete();
                //throw;
            }
            catch (Exception ex)
            {
                m_logger.TraceEvent(LogLevels.Error, 0, "MCA527[{0}]: Error during HV calibration: {1}", DeviceName, ex.Message);
                m_logger.TraceException(ex, true);
                m_logger.Flush();
                DAQControl.gControl.MajorOperationCompleted();                  // causes pending control thread caller to move forward
                PendingComplete();
                //throw;
            }
        }
        public void SetUp()
        {
            db             = new InMemoryDatabase();
            experimentsDAO = new Experiments(db);
            e = new Experiment("e")
            {
                Description = "d",
                Result      = 3.14,
                Goal        = "Sky",
                Summary     = "",
                Parameters  = new Dictionary <string, string>
                {
                    { "a", "a" },
                    { "b", "b" }
                }
            };
            e2 = new Experiment("e2")
            {
                Description = "d",
                Result      = 3.14,
                Goal        = "Sky",
                Summary     = "",
                Parameters  = new Dictionary <string, string>
                {
                    { "c", "c" },
                    { "d", "d" }
                }
            };
            d = new Dictionary <string, string>
            {
                { "ID", "1" },
                { "name", e.Name },
                { "description", e.Description },
                { "goal", e.Goal },
                { "result", e.Result.ToString() },
                { "summary", e.Summary }
            };
            var p1 = new Dictionary <string, string>
            {
                { "experiment", e.Id.ToString() },
                { "name", "a" },
                { "value", "a" }
            };
            var p2 = new Dictionary <string, string>
            {
                { "experiment", e.Id.ToString() },
                { "name", "b" },
                { "value", "b" }
            };

            m1 = new Measurement(1)
            {
                Result = 3.14, Beginning = s1, End = s2
            };
            m2 = new Measurement(2)
            {
                Result = 6.28, Beginning = s4, End = s6
            };
            m3 = new Measurement(3)
            {
                Result = 0, Beginning = s7, End = s8
            };

            m1.Add(new List <Sample> {
                s1, s2, s3
            });
            m2.Add(new List <Sample> {
                s4, s5, s6
            });
            m3.Add(new List <Sample> {
                s7, s8
            });

            p = new List <Dictionary <string, string> > {
                p1, p2
            };
            e.AddMeasurements(new List <Measurement> {
                m1, m2
            });
            e2.AddMeasurements(new List <Measurement> {
                m3
            });
        }