public void NextFrame(int frameNo, MovementType movementType, bool isLastFrame, AstroImage astroImage, int firstFrameInIntegrationPeriod, string fileName)
        {
            m_AstroImage = astroImage;
            m_CurrFrameNo = frameNo;
            m_CurrFileName = fileName;

            TangraContext.Current.CrashReportInfo.FrameNumber = frameNo;

            if (m_Correcting)
                // Do not track or process the frame while correcting the tracking
                return;

            if (m_Configuring)
            {
                if (m_CurrFrameNo != frameNo) m_StackedAstroImage = null;
                if (frameNo != m_StateMachine.SelectedObjectFrameNo) m_StateMachine.SelectedObject = null;
            }

            if (m_Refining || m_Measuring)
            {
                if (m_VideoController.IsAstroAnalogueVideo && frameNo == 0)
                {
                    // Do not attempt refining on the first AAV frame as it has unique dynamic range and the refining is not going to work at all.
                }
                else
                {
                    m_Tracker.NextFrame(frameNo, astroImage);

                    if (!m_Tracker.IsTrackedSuccessfully) m_NumberFramesWithBadTracking++;

                    m_VideoController.SetDisplayHueBackgroundModeTargets(m_Tracker.TrackedObjects);

                    if (m_Refining)
                    {
                        if (m_Tracker.RefiningPercentageWorkLeft <= 0)
                        {
                            bool canSwitchFromRefiningToMeasuringNow = true;

                            if (m_VideoController.IsUsingSteppedAveraging)
                            {
                                // When using stepped averaging the measurements should start at the first frame of an integration 'step'
                                // which means we need to flip the switch from 'Refining' to 'Measuring' one frame before that
                                if (firstFrameInIntegrationPeriod + m_VideoController.FramesToIntegrate - 1 > frameNo)
                                {
                                    Trace.WriteLine(string.Format("Skipping frame {0}, waiting for the first frame in the next integration period to start measurments.", frameNo));
                                    canSwitchFromRefiningToMeasuringNow = false;
                                }
                            }

                            if (canSwitchFromRefiningToMeasuringNow)
                            {
                                float oneSigma;
                                m_AavNtpTimestampError = m_VideoController.AstroAnalogueVideoNormaliseNtpDataIfNeeded(out oneSigma);
                                m_AavNtpFitOneSigmaError = oneSigma;
                                // Begin measurements
                                m_Measuring = true;
                                m_Refining = false;

                                m_ProcessedFrames = 0;
                                m_UnsuccessfulFrames = 0;
                                m_PartiallySuccessfulFrames = 0;
                                m_StopWatch.Reset();
                                m_StopWatch.Start();

                                m_Tracker.BeginMeasurements(astroImage);

                                // IMPORTANT: The finalHeader must be changed as well if changing this
                                LCFile.NewOnTheFlyOutputFile(
                                    m_VideoController.CurrentVideoFileName,
                                    string.Format("Video ({0})", m_VideoController.CurrentVideoFileType),
                                    (byte)m_Tracker.TrackedObjects.Count, (float)m_Tracker.PositionTolerance,
                                    LightCurveReductionContext.Instance.LightCurveReductionType == LightCurveReductionType.TotalLunarReppearance);

                                m_MinFrame = uint.MaxValue;
                                m_MaxFrame = uint.MinValue;
                                m_TotalFrames = 0;
                                m_FirstMeasuredFrame = frameNo;

                                m_AverageFWHM = astroImage.GetAverageFWHM();

                                m_VideoController.StatusChanged("Measuring");
                            }
                        }
                    }
                }

                if (m_Measuring)
                {
                    if (!m_Tracker.IsTrackedSuccessfully &&
                         LightCurveReductionContext.Instance.StopOnLostTracking)
                    {
                        m_ControlPanel.StopMeasurements(ucLightCurves.StopReason.LostTracking);

                        if (TangraConfig.Settings.Tracking.PlaySound)
                            Console.Beep(800, 750);

                        if (TangraConfig.Settings.Tracking.PopUpOnLostTracking)
                            m_VideoController.ShowMessageBox(
                                "Use the mouse to pan the object apertures back to the object location and press 'Continue' to continue with the measurements. You can also do adjustments to the objects individually or skip the frame all together.",
                                "Tracking has been lost",
                                MessageBoxButtons.OK,
                                MessageBoxIcon.Exclamation);

                        return;
                    }

                    m_ProcessedFrames++;
                    if (!m_Tracker.IsTrackedSuccessfully) m_UnsuccessfulFrames++;
                    if (m_Tracker.IsTrackedSuccessfully && m_Tracker.TrackedObjects.Any(x => !x.IsLocated))
                        m_PartiallySuccessfulFrames++;

                    SaveEmbeddedOrORCedTimeStamp(frameNo);

                    MeasureObjects();

                    m_PrevMeasuredFrame = m_CurrFrameNo;

                    m_StopWatch.Stop();
                    m_ControlPanel.UpdateProcessedFrames(m_ProcessedFrames, m_UnsuccessfulFrames, m_PartiallySuccessfulFrames, (int)(m_StopWatch.ElapsedMilliseconds / 1000));
                    m_StopWatch.Start();
                }
            }
            else if (m_ViewingLightCurve && m_LightCurveController.LcFile != null)
            {
                var currentSelection = new LCMeasurement[m_LightCurveController.LcFile.Header.ObjectCount];

                if (m_LightCurveController.LcFile.Header.MinFrame <= m_CurrFrameNo &&
                    m_LightCurveController.LcFile.Header.MaxFrame >= m_CurrFrameNo)
                {
                    for (int i = 0; i < m_LightCurveController.LcFile.Header.ObjectCount; i++)
                    {
                        List<LCMeasurement> measurements = m_LightCurveController.Context.AllReadings[i];
                        currentSelection[i] = measurements[(int)(m_CurrFrameNo - m_LightCurveController.LcFile.Header.MinFrame)];
                    }

                    m_LightCurveController.OnNewSelectedMeasurements(currentSelection.ToArray());
                }
            }

            if (m_ControlPanel != null)
                m_ControlPanel.UpdateState();
            if (isLastFrame)
            {
                if (m_Refining)
                {
                    m_ControlPanel.StopRefining();
                    if (TangraConfig.Settings.Tracking.PlaySound)
                        Console.Beep();
                }
                else if (m_Measuring)
                {
                    m_ControlPanel.StopMeasurements(ucLightCurves.StopReason.EndOfFile);

                    if (TangraConfig.Settings.Tracking.PlaySound)
                        Console.Beep();

                    m_ControlPanel.StoppedAtLastFrame();
                }
            }
        }