Пример #1
0
        public void NextFrame(int frameNo, MovementType movementType, bool isLastFrame, AstroImage astroImage, int firstFrameInIntegrationPeriod, string fileName)
        {
            m_CurrentFrameNo = frameNo;

            if (m_OperationState == SpectroscopyState.RunningMeasurements)
            {
                if (m_FirstMeasuredFrame == null)
                {
                    m_FirstMeasuredFrame = m_CurrentFrameNo;
                    if (m_VideoController.HasEmbeddedTimeStamps())
                    {
                        m_FirstFrameTimeStamp = m_VideoController.GetCurrentFrameTime();
                    }
                    else if (m_VideoController.HasSystemTimeStamps())
                    {
                        m_FirstFrameTimeStamp = m_VideoController.GetCurrentFrameTime();
                    }
                    m_FrameBitmapPixels = astroImage.Pixelmap.DisplayBitmapPixels;
                }

                m_Tracker.NextFrame(frameNo, astroImage);
                if (m_Tracker.IsTrackedSuccessfully)
                {
                    TrackedObject trackedStar = m_Tracker.TrackedStar;
                    SelectedStar = trackedStar.Center;

                    m_Reader = new SpectraReader(astroImage, SelectedStarBestAngle, m_SpectroscopyController.SpectraReductionContext.PixelValueCoefficient);

                    Spectra thisFrameSpectra = m_Reader.ReadSpectra(
                        trackedStar.ThisFrameX,
                        trackedStar.ThisFrameY,
                        m_SpectroscopyController.SpectraReductionContext.MeasurementAreaWing,
                        m_SpectroscopyController.SpectraReductionContext.BackgroundAreaWing,
                        m_SpectroscopyController.SpectraReductionContext.BackgroundAreaGap,
                        m_SpectroscopyController.SpectraReductionContext.BackgroundMethod);

                    thisFrameSpectra.ZeroOrderFWHM = trackedStar.PSFFit != null ? (float)trackedStar.PSFFit.FWHM : float.NaN;

                    m_AllFramesSpectra.Add(thisFrameSpectra);
                }

                if (isLastFrame || m_CancelMeasurementsRequested || m_AllFramesSpectra.Count >= m_SpectroscopyController.SpectraReductionContext.FramesToMeasure)
                {
                    m_FramePlayer.Stop();

                    m_MasterSpectra = m_SpectroscopyController.ComputeResult(
                        m_AllFramesSpectra,
                        m_SpectroscopyController.SpectraReductionContext.FrameCombineMethod,
                        m_SpectroscopyController.SpectraReductionContext.UseFineAdjustments,
                        m_SpectroscopyController.SpectraReductionContext.AlignmentAbsorptionLinePos);

                    m_AllFramesSpectra.Clear();

                    m_MasterSpectra.MeasurementInfo = m_SpectroscopyController.GetMeasurementInfo();
                    m_MasterSpectra.MeasurementInfo.FirstMeasuredFrame  = m_FirstMeasuredFrame.Value;
                    m_MasterSpectra.MeasurementInfo.LastMeasuredFrame   = m_CurrentFrameNo;
                    m_MasterSpectra.MeasurementInfo.FirstFrameTimeStamp = m_FirstFrameTimeStamp;
                    if (m_VideoController.HasEmbeddedTimeStamps())
                    {
                        m_MasterSpectra.MeasurementInfo.LastFrameTimeStamp = m_VideoController.GetCurrentFrameTime();
                    }
                    else if (m_VideoController.HasSystemTimeStamps())
                    {
                        m_MasterSpectra.MeasurementInfo.LastFrameTimeStamp = m_VideoController.GetCurrentFrameTime();
                    }

                    FrameStateData frameStatus = m_VideoController.GetCurrentFrameState();
                    m_MasterSpectra.MeasurementInfo.Gain            = frameStatus.Gain;
                    m_MasterSpectra.MeasurementInfo.ExposureSeconds = m_SpectroscopyController.SpectraReductionContext.ExposureSeconds;

                    m_MasterSpectra.MeasurementInfo.FrameBitmapPixels = m_FrameBitmapPixels;

                    m_SpectroscopyController.PopulateMasterSpectraObservationDetails(m_MasterSpectra);

                    m_OperationState = SpectroscopyState.DisplayingMeasurements;
                    m_ControlPanel.MeasurementsFinished();
                    DisplaySpectra();
                }

                Application.DoEvents();
            }
        }
Пример #2
0
        private void frmRunMultiFrameMeasurements_Load(object sender, EventArgs e)
        {
            cbxExpectedMotion.SelectedIndex = (int)m_MeasurementContext.MovementExpectation;
            cbxSignalType.SelectedIndex     = (int)m_MeasurementContext.ObjectExposureQuality;
            cbxFrameTimeType.SelectedIndex  = (int)m_MeasurementContext.FrameTimeType;
            nudInstrDelay.SetNUDValue(m_MeasurementContext.InstrumentalDelay);
            cbxInstDelayUnit.SelectedIndex = (int)m_MeasurementContext.InstrumentalDelayUnits;
            nudIntegratedFrames.SetNUDValue(m_MeasurementContext.IntegratedFramesCount);

            nudInstrDelay.Enabled        = true;
            cbxInstDelayUnit.Enabled     = true;
            nudIntegratedFrames.Enabled  = true;
            btnDetectIntegration.Enabled = true;

            cbxInstDelayCamera.SelectedIndex = cbxInstDelayCamera.Items.IndexOf(TangraConfig.Settings.PlateSolve.SelectedCameraModel);
            cbxInstDelayMode.SelectedIndex   = 0;           // Automatic
            if (cbxInstDelayCamera.SelectedIndex == -1)
            {
                cbxInstDelayCamera.SelectedIndex = 0;               // No camera selected
                cbxInstDelayMode.SelectedIndex   = 1;               // Manual
            }

            m_AavStacking     = false;
            m_AavIntegration  = false;
            m_VideoFileFormat = m_VideoController.GetVideoFileFormat();
            if (m_VideoFileFormat != VideoFileFormat.AVI)
            {
                cbxFrameTimeType.SelectedIndex = 0;
                cbxFrameTimeType.Enabled       = false;

                DateTime?timeStamp = m_VideoController.GetCurrentFrameTime();
                if (timeStamp != null && timeStamp != DateTime.MinValue)
                {
                    DateTime timestamp = timeStamp.Value;

                    if (m_VideoFileFormat.IsAAV())
                    {
                        m_NativeFormat = m_VideoController.GetVideoFormat(m_VideoFileFormat);

                        // Compute the first timestamp value
                        if (m_VideoController.HasTimestampOCR())
                        {
                            timestamp = m_FieldSolveContext.UtcTime.Date.Add(timestamp.TimeOfDay);
                        }
                        else
                        {
                            FrameStateData frameState = m_VideoController.GetCurrentFrameState();
                            timestamp = timestamp.AddMilliseconds(-0.5 * frameState.ExposureInMilliseconds);

                            if (m_NativeFormat == "PAL")
                            {
                                timestamp = timestamp.AddMilliseconds(20);
                            }
                            else if (m_NativeFormat == "NTSC")
                            {
                                timestamp = timestamp.AddMilliseconds(16.68);
                            }
                        }
                    }

                    ucUtcTimePicker.DateTimeUtc = timestamp;
                    ucUtcTimePicker.Enabled     = false;                 // The video has embedded timestamp so the users should not enter times manually
                }

                if (m_VideoFileFormat.IsAAV())
                {
                    nudIntegratedFrames.Enabled = false;

                    if (m_VideoController.AstroAnalogueVideoStackedFrameRate > 0)
                    {
                        cbxFrameTimeType.Items[0]    = "AAV stacked frame";
                        pnlIntegration.Visible       = true;
                        btnDetectIntegration.Visible = false;
                        lblFrames.Text            = string.Format("frames (stacking: {0} frames)", m_VideoController.AstroAnalogueVideoStackedFrameRate);
                        m_AavStacking             = true;
                        nudIntegratedFrames.Value = m_VideoController.AstroAnalogueVideoIntegratedAAVFrames;
                    }
                    else if (m_VideoController.AstroAnalogueVideoIntegratedAAVFrames > 0)
                    {
                        cbxFrameTimeType.Items[0]    = "AAV integrated frame";
                        pnlIntegration.Visible       = true;
                        btnDetectIntegration.Visible = false;
                        nudIntegratedFrames.Value    = m_VideoController.AstroAnalogueVideoIntegratedAAVFrames;
                        m_AavIntegration             = true;
                    }
                }
            }
            else
            {
                // This will be set in the exported CSV file and will then be used to identify repeated measurements from integrated AAV files
                m_NativeFormat = "AVI-Integrated";
            }

            activePage = 0;
            DisplayActivePage();

            // Allow time corrections to be set automatically even when the integration is not recognized ("use suggested value" button) or something
        }