コード例 #1
0
        public void BeginMeasurements()
        {
            bool isColourBitmap = false;
            AstroImage currentImage = m_VideoController.GetCurrentAstroImage(false);
            //using (Bitmap nonIntegratedBmp = m_Host.FramePlayer.GetFrame(m_CurrFrameNo, true))
            //{
            //    isColourBitmap = BitmapFilter.IsColourBitmap(nonIntegratedBmp);
            //}

            LightCurveReductionContext.Instance.IsColourVideo = isColourBitmap;

            if (isColourBitmap &&
                TangraConfig.Settings.Photometry.ColourChannel != TangraConfig.ColourChannel.GrayScale)
            {
                string channel = TangraConfig.Settings.Photometry.ColourChannel.ToString();
                DialogResult dlgRes = m_VideoController.ShowMessageBox(
                    "Would you like to use the GrayScale band for this measurement only?\r\n\r\n" +
                    "This appears to be a colour video but the current band to measure is not set to GrayScale. It is recommended to use the GrayScale band for colour videos. \r\n\r\n" +
                    "To use the GrayScale band for this reduction only - press 'Yes', to use the currently set [" + channel + "] " +
                    "band press 'No'. To manually set a different band for this and other reductions press 'Cancel' and configure the band from the Tangra settings form before you continue.",
                    "Warning",
                    MessageBoxButtons.YesNoCancel,
                    MessageBoxIcon.Warning);

                if (dlgRes == DialogResult.Cancel)
                    return;

                if (dlgRes == DialogResult.Yes)
                    LightCurveReductionContext.Instance.ColourChannel = TangraConfig.ColourChannel.GrayScale;
                else
                    LightCurveReductionContext.Instance.ColourChannel = TangraConfig.Settings.Photometry.ColourChannel;
            }

            LightCurveReductionContext.Instance.ColourChannel = TangraConfig.Settings.Photometry.ColourChannel;

            //LightCurveReductionContext.Instance.OSDFrame =
            //    RegistryConfig.Instance.OSDSizes.GetOSDRectangleForFrameSize(
            //        VideoContext.Current.VideoStream.Width, VideoContext.Current.VideoStream.Height);

            m_BackedUpSelectMeasuringStarsState = m_StateMachine.m_CurrentStateObject;
            m_Measuring = false;
            m_Refining = true;
            m_ViewingLightCurve = false;
            m_Configuring = false;
            m_Correcting = false;

            if (m_StackedAstroImage == null)
            {
                EnsureStackedAstroImage();
                m_AveragedFrame = new AveragedFrame(m_StackedAstroImage);
            }

            string usedTrackerType;
            m_Tracker = TrackerFactory.CreateTracker(
                m_StackedAstroImage.Width,
                m_StackedAstroImage.Height,
                LightCurveReductionContext.Instance.LightCurveReductionType,
                m_VideoController.GetVideoFileFormat(),
                m_StateMachine.MeasuringStars,
                out usedTrackerType);

            LightCurveReductionContext.Instance.UsedTracker = usedTrackerType;
            TangraContext.Current.CrashReportInfo.Tracker = usedTrackerType;

            if (!m_Tracker.InitializeNewTracking(m_VideoController.GetCurrentAstroImage(false)))
                return;

            for (int i = 0; i < 4; i++)
            {
                m_ManualTrackingDeltaX[i] = 0;
                m_ManualTrackingDeltaY[i] = 0;
            }

            m_VideoController.InitializeTimestampOCR();

            if (m_VideoController.HasTimestampOCR() || m_VideoController.HasEmbeddedTimeStamps())
                LightCurveReductionContext.Instance.HasEmbeddedTimeStamps = true;
            else
                LightCurveReductionContext.Instance.HasEmbeddedTimeStamps = false;

            m_VideoController.StatusChanged("Refining");

            m_StateMachine.ChangeState(LightCurvesState.Running);

            MeasuringStarted();

            m_CameraName = m_VideoController.IsAstroDigitalVideo || m_VideoController.IsAstroAnalogueVideo ? m_VideoController.AstroVideoCameraModel : string.Empty;
            m_AavFrameIntegration = m_VideoController.IsAstroAnalogueVideo ? m_VideoController.AstroAnalogueVideoIntegratedAAVFrames : -1;
            m_AavNativeVideoFormat = m_VideoController.IsAstroAnalogueVideo ? m_VideoController.AstroVideoNativeVideoStandard : string.Empty;
            m_AavStackedFrameRate = m_VideoController.IsAstroAnalogueVideo ? m_VideoController.AstroAnalogueVideoStackedFrameRate : 0;

            if (LightCurveReductionContext.Instance.DebugTracking)
            {
                TangraContext.Current.CanPlayVideo = true;
                m_VideoController.UpdateViews();
            }
            else
                m_VideoController.PlayVideo(m_CurrFrameNo > -1 ? (int?)m_CurrFrameNo : null, 1);
        }
コード例 #2
0
        internal LCFile FlushLightCurveFile()
        {
            var matrixSizes = new List<int>();
            var apertures = new List<float>();
            var fixedFlags = new List<bool>();
            var psfGroupIds = new List<int>();

            m_Tracker.TrackedObjects.ForEach(
                delegate(ITrackedObject o)
                {
                    matrixSizes.Add(o.OriginalObject.PsfFitMatrixSize);
                    apertures.Add(o.OriginalObject.ApertureInPixels);
                    fixedFlags.Add(o.OriginalObject.IsWeakSignalObject);
                    psfGroupIds.Add(o.OriginalObject.GroupId);
                }
            );

            MeasurementTimingType measurementTimingType = MeasurementTimingType.UserEnteredFrameReferences;
            if (m_VideoController.HasEmbeddedTimeStamps())
                measurementTimingType = MeasurementTimingType.EmbeddedTimeForEachFrame;
            else if (m_VideoController.HasTimestampOCR())
                measurementTimingType = MeasurementTimingType.OCRedTimeForEachFrame;

            SerUseTimeStamp serTimingType = SerUseTimeStamp.None;
            if (m_VideoController.IsSerVideo)
                serTimingType = m_VideoController.GetSerTimingType();

            LCMeasurementHeader finalHeader = new LCMeasurementHeader(
                m_VideoController.CurrentVideoFileName,
                string.Format("Video ({0})", m_VideoController.CurrentVideoFileType),
                m_VideoController.VideoFirstFrame,
                m_VideoController.VideoCountFrames,
                m_VideoController.VideoFrameRate,
                m_MinFrame,
                m_MaxFrame,
                (uint)m_TotalFrames,
                (uint)m_MeasurementInterval,
                (byte)m_Tracker.TrackedObjects.Count,
                LightCurveReductionContext.Instance.LightCurveReductionType,
                measurementTimingType,
                serTimingType,
                (int)LightCurveReductionContext.Instance.NoiseMethod,
                (int)LightCurveReductionContext.Instance.DigitalFilter,
                matrixSizes.ToArray(), apertures.ToArray(), fixedFlags.ToArray(), psfGroupIds.ToArray(), (float)m_Tracker.PositionTolerance);

            finalHeader.FirstTimedFrameTime = m_StartFrameTime;
            finalHeader.SecondTimedFrameTime = m_EndFrameTime;

            finalHeader.FirstTimedFrameNo = m_StartTimeFrame;
            finalHeader.LastTimedFrameNo = m_EndTimeFrame;

            if (m_AveragedFrame == null)
            {
                if (m_StackedAstroImage == null) EnsureStackedAstroImage();
                m_AveragedFrame = new AveragedFrame(m_StackedAstroImage);
            }

            LCMeasurementFooter footer = new LCMeasurementFooter(
                m_AveragedFrame.Pixelmap,
                TangraConfig.Settings,
                LightCurveReductionContext.Instance,
                m_StateMachine.MeasuringStars,
                m_Tracker,
                m_VideoController.GetTimestampOCRNameAndVersion(),
                null,
                m_InstumentalDelaySelectedConfig,
                m_InstumentalDelaySelectedCamera,
                m_CameraName,
                m_AavNativeVideoFormat,
                m_AavFrameIntegration,
                m_AavNtpTimestampError,
                m_AavNtpFitOneSigmaError,
                TangraConfig.Settings.AAV.NtpTimeUseDirectTimestamps,
                PSFFit.BitPix,
                PSFFit.NormVal,
                m_AavStackedFrameRate);

            return LCFile.FlushOnTheFlyOutputFile(finalHeader, footer, m_VideoController);
        }