Пример #1
0
        private void SyncTimeStampControlWithExpectedFrameTime()
        {
            VideoFileFormat format = m_VideoController.GetVideoFileFormat();

            if (format == VideoFileFormat.AVI ||
                format == VideoFileFormat.AAV || /* Old AAV files with manually entered timestamps */
                (format == VideoFileFormat.AAV2 && !m_VideoController.HasEmbeddedTimeStamps()))
            {
                double milisecondsDiff =
                    (m_VideoController.CurrentFrameIndex - AstrometryContext.Current.FieldSolveContext.FrameNoOfUtcTime) * 1000.0 / m_VideoController.VideoFrameRate;

                ucUtcTimePicker.DateTimeUtc = AstrometryContext.Current.FieldSolveContext.UtcTime.AddMilliseconds(milisecondsDiff);
                if (m_VideoController.HasTimestampOCR())
                {
                    m_VideoController.AssertOCRTimestamp(ucUtcTimePicker.DateTimeUtc, true);
                }
            }
            else
            {
                ucUtcTimePicker.DateTimeUtc = m_VideoController.GetBestGuessDateTimeForCurrentFrame();
            }

            if (format == VideoFileFormat.AAV || /* Old AAV files with manually entered timestamps */
                (format == VideoFileFormat.AAV2 && !m_VideoController.HasEmbeddedTimeStamps()))
            {
                // Refresh the current frame to split the AAV timestamp for manual validation
                // NOTE: This assumes the Split option is selected in the settings (which will be by default)
                m_VideoController.RefreshCurrentFrame();
            }
        }
Пример #2
0
        private void SyncTimeStampControlWithExpectedFrameTime()
        {
            VideoFileFormat format = m_VideoController.GetVideoFileFormat();

            if (format == VideoFileFormat.AVI ||
                format == VideoFileFormat.AAV || /* Old AAV files with manually entered timestamps */
                (format == VideoFileFormat.AAV2 && !m_VideoController.HasEmbeddedTimeStamps()))
            {
                double milisecondsDiff =
                    (m_VideoController.CurrentFrameIndex - AstrometryContext.Current.FieldSolveContext.FrameNoOfUtcTime) * 1000.0 / m_VideoController.VideoFrameRate;

                ucUtcTimePicker.DateTimeUtc = AstrometryContext.Current.FieldSolveContext.UtcTime.AddMilliseconds(milisecondsDiff);
                if (m_VideoController.HasTimestampOCR())
                {
                    m_VideoController.AssertOCRTimestamp(ucUtcTimePicker.DateTimeUtc, true);
                }
            }
            else
            {
                DateTime?timeStamp = m_VideoController.GetCurrentFrameTime();
                if (timeStamp != null && timeStamp != DateTime.MinValue)
                {
                    ucUtcTimePicker.DateTimeUtc = timeStamp.Value;
                }
            }
        }
Пример #3
0
        public SpectraFileHeader GetSpectraFileHeader()
        {
            var rv = new SpectraFileHeader()
            {
                PathToVideoFile  = m_VideoController.CurrentVideoFileName,
                Width            = TangraContext.Current.FrameWidth,
                Height           = TangraContext.Current.FrameHeight,
                BitPix           = m_VideoController.VideoBitPix,
                DataAav16NormVal = m_VideoController.VideoAav16NormVal,
                SourceInfo       = string.Format("Video ({0})", m_VideoController.CurrentVideoFileType),
                ObjectName       = "",
                Telescope        = "",
                Instrument       = "",
                Recorder         = "",
                Observer         = "",
                RA        = float.NaN,
                DEC       = float.NaN,
                Longitude = float.NaN,
                Latitude  = float.NaN
            };

            VideoFileFormat fileFormat = m_VideoController.GetVideoFileFormat();

            if (fileFormat.IsAAV())
            {
                Dictionary <string, string> tags = m_VideoController.GetVideoFileTags();
                tags.TryGetValue("ObjectName", out rv.ObjectName);
                tags.TryGetValue("Telescope", out rv.Telescope);
                tags.TryGetValue("Instrument", out rv.Instrument);
                tags.TryGetValue("Recorder", out rv.Recorder);
                tags.TryGetValue("Observer", out rv.Observer);

                string ra, dec, lng, lat;
                tags.TryGetValue("RA", out ra);
                tags.TryGetValue("DEC", out dec);
                tags.TryGetValue("Longitude", out lng);
                tags.TryGetValue("Latitude", out lat);

                if (!string.IsNullOrEmpty(ra))
                {
                    float.TryParse(ra, NumberStyles.Float, CultureInfo.InvariantCulture, out rv.RA);
                }
                if (!string.IsNullOrEmpty(dec))
                {
                    float.TryParse(ra, NumberStyles.Float, CultureInfo.InvariantCulture, out rv.DEC);
                }
                if (!string.IsNullOrEmpty(lng))
                {
                    float.TryParse(ra, NumberStyles.Float, CultureInfo.InvariantCulture, out rv.Longitude);
                }
                if (!string.IsNullOrEmpty(lat))
                {
                    float.TryParse(ra, NumberStyles.Float, CultureInfo.InvariantCulture, out rv.Latitude);
                }
            }

            return(rv);
        }
        internal void StartExport(string fileName, bool fitsCube, Rectangle roi, UsedTimeBase timeBase, bool usesOCR, bool ocrHasDatePart)
        {
            m_FrameWidth     = TangraContext.Current.FrameWidth;
            m_FrameHeight    = TangraContext.Current.FrameHeight;
            m_IsFitsSequence = m_VideoController.IsFitsSequence;

            m_RegionOfInterest = roi;
            m_UsesROI          = roi.Width != m_FrameWidth || roi.Height != m_FrameHeight;

            m_FitsCube = fitsCube;
            if (!fitsCube)
            {
                m_FolderName = fileName;

                if (!Directory.Exists(m_FolderName))
                {
                    Directory.CreateDirectory(m_FolderName);
                }
            }

            m_Note = string.Format("Converted from {0} file.", m_VideoController.GetVideoFileFormat());
            if (m_Note.Length > HeaderCard.MAX_VALUE_LENGTH)
            {
                m_Note = m_Note.Substring(0, HeaderCard.MAX_VALUE_LENGTH);
            }

            if (m_UsesROI)
            {
                m_Note += string.Format(" Selected ROI: ({0},{1},{2},{3})", roi.Left, roi.Top, roi.Right, roi.Bottom);
                if (m_Note.Length > HeaderCard.MAX_VALUE_LENGTH)
                {
                    m_Note = m_Note.Substring(0, HeaderCard.MAX_VALUE_LENGTH);
                }
            }

            if (timeBase == UsedTimeBase.UserEnterred)
            {
                m_DateObsComment = "Date and Time are user entered & computed";
            }
            else if (timeBase == UsedTimeBase.EmbeddedTimeStamp)
            {
                if (usesOCR)
                {
                    if (ocrHasDatePart)
                    {
                        m_DateObsComment = "Date and Time are ORC-ed";
                    }
                    else
                    {
                        m_DateObsComment = "Time is ORC-ed, Date is user entered";
                    }
                }
                else if (!m_IsFitsSequence)
                {
                    m_DateObsComment = "Date and Time are saved by recorder";
                }
            }
        }
Пример #5
0
 public DialogResult EnteredTimeIntervalLooksOkay()
 {
     if (m_VideoController.IsAstroAnalogueVideo && !m_VideoController.AstroAnalogueVideoHasOcrOrNtpData)
     {
         return(CheckAavRate(m_VideoController.GetVideoFileFormat()));
     }
     else
     {
         return(CheckPALOrNTSCRate());
     }
 }
Пример #6
0
        public ucConvertVideoToFits(ConvertVideoToFitsOperation operation, VideoController videoController)
            : this()
        {
            m_Operation       = operation;
            m_VideoController = videoController;

            var videoFileFormat = m_VideoController.GetVideoFileFormat();

            nudFirstFrame.Minimum = videoController.VideoFirstFrame + (videoFileFormat == VideoFileFormat.AAV ? 1 : 0);
            nudFirstFrame.Maximum = videoController.VideoLastFrame - 1;
            nudFirstFrame.Value   = nudFirstFrame.Minimum;

            nudLastFrame.Minimum = videoController.VideoFirstFrame + (videoFileFormat == VideoFileFormat.AAV ? 1 : 0);
            nudLastFrame.Maximum = videoController.VideoLastFrame - 1;
            nudLastFrame.Value   = nudLastFrame.Maximum;

            cbxEveryFrame.SelectedIndex = 0;
        }
Пример #7
0
        public bool InitializeOperation(IVideoController videoController, Panel controlPanel, IFramePlayer framePlayer, Form topForm)
        {
            m_Status          = ConvertVideoToFitsState.Configuring;
            m_VideoController = (VideoController)videoController;

            // We don't allow loading of calibration frames for now. Doing so with complicate the export
            TangraContext.Current.CanLoadFlatFrame = false;
            TangraContext.Current.CanLoadDarkFrame = false;
            TangraContext.Current.CanLoadBiasFrame = false;

            if (m_ControlPanel == null)
            {
                lock (m_SyncRoot)
                {
                    if (m_ControlPanel == null)
                    {
                        m_ControlPanel = new ucConvertVideoToFits(this, (VideoController)videoController);
                    }
                }
            }

            controlPanel.Controls.Clear();
            controlPanel.Controls.Add(m_ControlPanel);
            m_ControlPanel.Dock = DockStyle.Fill;

            if (topForm.Height < 736)
            {
                // Make sure all controls of the panel are visible
                topForm.Height = 736;
            }

            var videoFileFormat = m_VideoController.GetVideoFileFormat();

            if (videoFileFormat == VideoFileFormat.AAV &&
                m_VideoController.CurrentFrameIndex == m_VideoController.VideoFirstFrame)
            {
                m_VideoController.StepForward();
            }

            return(true);
        }
Пример #8
0
        public FlyByMotionFitter.FrameTimeInfo GetFrameTimeInfo(int frameId)
        {
            if (m_VideoController.IsAstroAnalogueVideo && m_VideoController.HasTimestampOCR())
            {
                var exposure  = m_VideoController.AstroAnalogueVideoIntegratedAAVFrames * m_VideoController.FramePlayer.Video.MillisecondsPerFrame;
                var timestamp = m_VideoController.OCRTimestamp();
                if (timestamp != DateTime.MinValue)
                {
                    timestamp = AstrometryContext.Current.FieldSolveContext.UtcTime.Date.Add(timestamp.TimeOfDay);
                    timestamp = timestamp.AddMilliseconds(0.5 * exposure);
                    var nativeFormat = m_VideoController.GetVideoFormat(m_VideoController.GetVideoFileFormat());
                    if (nativeFormat == "PAL")
                    {
                        timestamp = timestamp.AddMilliseconds(-20);
                    }
                    else if (nativeFormat == "NTSC")
                    {
                        timestamp = timestamp.AddMilliseconds(-16.68);
                    }
                }

                return(new FlyByMotionFitter.FrameTimeInfo()
                {
                    CentralExposureTime = timestamp,
                    ExposureInMilliseconds = exposure
                });
            }

            var frameStateInfo = m_VideoController.GetFrameStateData(frameId);

            return(new FlyByMotionFitter.FrameTimeInfo()
            {
                CentralExposureTime = frameStateInfo.CentralExposureTime,
                ExposureInMilliseconds = frameStateInfo.ExposureInMilliseconds
            });
        }
Пример #9
0
        internal void StartExport(string fileName, bool fitsCube, Rectangle roi, UsedTimeBase timeBase, bool usesOCR, bool ocrHasDatePart)
        {
            m_FrameWidth     = TangraContext.Current.FrameWidth;
            m_FrameHeight    = TangraContext.Current.FrameHeight;
            m_IsFitsSequence = m_VideoController.IsFitsSequence;

            if (m_VideoController.IsAstroAnalogueVideo)
            {
                m_IntegrationRate = m_VideoController.AstroAnalogueVideoIntegratedAAVFrames;
            }
            else
            {
                m_IntegrationRate = 0;
            }

            m_NativeFormat = m_VideoController.GetVideoFormat(m_VideoController.GetVideoFileFormat());
            m_VideoCamera  = m_VideoController.AstroVideoCameraModel;

            m_RegionOfInterest = roi;
            m_UsesROI          = roi.Width != m_FrameWidth || roi.Height != m_FrameHeight;

            m_ExportAs8BitFloat = false;
            m_NormalValue       = m_VideoController.EffectiveMaxPixelValue;
            m_VideoFormat       = m_VideoController.GetVideoFileFormat();

            if (m_VideoFormat == VideoFileFormat.AAV || m_VideoFormat == VideoFileFormat.AAV2 || m_VideoFormat == VideoFileFormat.AVI)
            {
                if (m_VideoController.VideoBitPix == 8 || m_VideoController.EffectiveMaxPixelValue > 255)
                {
                    // For video from analogue cameras we export as 8-bit floating point numbers
                    m_ExportAs8BitFloat = true;
                }
            }

            m_AdditionalFileHeaders.Clear();
            var fileHeaderProvider = m_VideoController.FramePlayer.Video as IFileHeaderProvider;

            if (fileHeaderProvider != null)
            {
                var AAV_HEADERS = new Dictionary <string, string>
                {
                    { "LATITUDE", "LATITUDE" },
                    { "LONGITUDE", "LONGITUD" },
                    { "OBSERVER", "OBSERVER" },
                    { "TELESCOP", "TELESCOP" },
                    { "OBJECT", "OBJECT" },
                    { "RA_OBJ", "RA_OBJ" },
                    { "DEC_OBJ", "DEC_OBJ" },
                    { "RECORDER-SOFTWARE", "REC-SOFT" },
                    { "RECORDER-SOFTWARE-VERSION", "REC-VER" },
                    { "ADVLIB-VERSION", "ADVLIB" }
                };

                var coppiedFromHeaderDescription = string.Format("Copied from {0} file headers", m_VideoFormat);
                var fileHeaders = fileHeaderProvider.GetFileHeaders();
                fileHeaders
                .Where(kvp => AAV_HEADERS.ContainsKey(kvp.Key))
                .Select(kvp => kvp)
                .ToList()
                .ForEach(kvp => m_AdditionalFileHeaders[AAV_HEADERS[kvp.Key]] = Tuple.Create(kvp.Value, coppiedFromHeaderDescription));

                if (m_VideoFormat == VideoFileFormat.AAV || m_VideoFormat == VideoFileFormat.AAV2)
                {
                    var camera = m_VideoCamera;
                    if (camera.IndexOf(m_NativeFormat, StringComparison.InvariantCultureIgnoreCase) == -1)
                    {
                        camera = camera.Trim() + string.Format(" ({0})", m_NativeFormat);
                    }
                    var   instumentalDelaySelectedConfig = InstrumentalDelayConfigManager.GetConfigurationForCamera(camera);
                    float instDelay;
                    if (instumentalDelaySelectedConfig.TryGetValue(m_VideoController.AstroAnalogueVideoIntegratedAAVFrames, out instDelay))
                    {
                        m_AdditionalFileHeaders["INSTDELY"] = Tuple.Create(
                            instDelay.ToString(CultureInfo.InvariantCulture),
                            string.Format("Instr. delay in sec. for x{0} frames integration for '{1}' camera. This has not been applied to DATE-OBS", m_VideoController.AstroAnalogueVideoIntegratedAAVFrames, camera));
                    }
                }
            }

            m_FitsCube = fitsCube;
            if (!fitsCube)
            {
                m_FolderName = fileName;

                if (!Directory.Exists(m_FolderName))
                {
                    Directory.CreateDirectory(m_FolderName);
                }
            }

            m_Note = string.Format("Converted from {0} file.", m_VideoController.GetVideoFileFormat());
            if (m_Note.Length > HeaderCard.MAX_VALUE_LENGTH)
            {
                m_Note = m_Note.Substring(0, HeaderCard.MAX_VALUE_LENGTH);
            }

            if (m_UsesROI)
            {
                m_Note += string.Format(" Selected ROI: ({0},{1},{2},{3})", roi.Left, roi.Top, roi.Right, roi.Bottom);
                if (m_Note.Length > HeaderCard.MAX_VALUE_LENGTH)
                {
                    m_Note = m_Note.Substring(0, HeaderCard.MAX_VALUE_LENGTH);
                }
            }

            if (timeBase == UsedTimeBase.UserEnterred)
            {
                m_DateObsComment = "Date and Time are user entered & computed";
            }
            else if (timeBase == UsedTimeBase.EmbeddedTimeStamp)
            {
                if (usesOCR)
                {
                    if (ocrHasDatePart)
                    {
                        m_DateObsComment = "Date and Time are ORC-ed";
                    }
                    else
                    {
                        m_DateObsComment = "Time is ORC-ed, Date is user entered";
                    }
                }
                else if (!m_IsFitsSequence)
                {
                    m_DateObsComment = "Date and Time are saved by recorder";
                }
            }
        }