Exemplo n.º 1
0
        private void SyncTimeStampControlWithExpectedFrameTime()
        {
            VideoFileFormat format = m_VideoController.GetVideoFileFormat();

            if (format == VideoFileFormat.AVI ||
                format == VideoFileFormat.AAV || /* Old AAV files with manually entered timestamps */
                (format == VideoFileFormat.AAV2 && !m_VideoController.HasEmbeddedTimeStamps()))
            {
                double milisecondsDiff =
                    (m_VideoController.CurrentFrameIndex - AstrometryContext.Current.FieldSolveContext.FrameNoOfUtcTime) * 1000.0 / m_VideoController.VideoFrameRate;

                ucUtcTimePicker.DateTimeUtc = AstrometryContext.Current.FieldSolveContext.UtcTime.AddMilliseconds(milisecondsDiff);
                if (m_VideoController.HasTimestampOCR())
                {
                    m_VideoController.AssertOCRTimestamp(ucUtcTimePicker.DateTimeUtc, true);
                }
            }
            else
            {
                ucUtcTimePicker.DateTimeUtc = m_VideoController.GetBestGuessDateTimeForCurrentFrame();
            }

            if (format == VideoFileFormat.AAV || /* Old AAV files with manually entered timestamps */
                (format == VideoFileFormat.AAV2 && !m_VideoController.HasEmbeddedTimeStamps()))
            {
                // Refresh the current frame to split the AAV timestamp for manual validation
                // NOTE: This assumes the Split option is selected in the settings (which will be by default)
                m_VideoController.RefreshCurrentFrame();
            }
        }
Exemplo n.º 2
0
        private void SyncTimeStampControlWithExpectedFrameTime()
        {
            VideoFileFormat format = m_VideoController.GetVideoFileFormat();

            if (format == VideoFileFormat.AVI ||
                format == VideoFileFormat.AAV || /* Old AAV files with manually entered timestamps */
                (format == VideoFileFormat.AAV2 && !m_VideoController.HasEmbeddedTimeStamps()))
            {
                double milisecondsDiff =
                    (m_VideoController.CurrentFrameIndex - AstrometryContext.Current.FieldSolveContext.FrameNoOfUtcTime) * 1000.0 / m_VideoController.VideoFrameRate;

                ucUtcTimePicker.DateTimeUtc = AstrometryContext.Current.FieldSolveContext.UtcTime.AddMilliseconds(milisecondsDiff);
                if (m_VideoController.HasTimestampOCR())
                {
                    m_VideoController.AssertOCRTimestamp(ucUtcTimePicker.DateTimeUtc, true);
                }
            }
            else
            {
                DateTime?timeStamp = m_VideoController.GetCurrentFrameTime();
                if (timeStamp != null && timeStamp != DateTime.MinValue)
                {
                    ucUtcTimePicker.DateTimeUtc = timeStamp.Value;
                }
            }
        }
Exemplo n.º 3
0
        public void Start(VideoFileFormat videoFileFormat, int frameRate, string outputPath)
        {
            Size blockRegionSize;

            //If EntireScreen option is set to true, get the user screen size
            if (Settings.Instance.EntireScreen)
            {
                blockRegionSize = new Size(SystemInformation.VirtualScreen.Width, SystemInformation.VirtualScreen.Height);
            }
            else
            {
                Delimiter delimiter = Delimiter.Instance;
                delimiter.TopMost = true;
                blockRegionSize   = delimiter.getInnerDelimiterSize();
                upperLeftSource   = delimiter.getInnerDelimiterUpperLeftLocation();
                delimiter.Lock();
            }
            Recorder.blockRegionSize = blockRegionSize;
            video = Video.Create(videoFileFormat, frameRate, outputPath);
            delay = 1000 / frameRate;

            //Initialize buffers
            bitmapBuffer    = new Bitmap(blockRegionSize.Width, blockRegionSize.Height);
            graphicsBuffer  = Graphics.FromImage(bitmapBuffer);
            recordingThread = new Thread(tick);

            //Start countdown, when it is done it will start recording
            (new Countdown()).Show();
        }
Exemplo n.º 4
0
        public SpectraFileHeader GetSpectraFileHeader()
        {
            var rv = new SpectraFileHeader()
            {
                PathToVideoFile  = m_VideoController.CurrentVideoFileName,
                Width            = TangraContext.Current.FrameWidth,
                Height           = TangraContext.Current.FrameHeight,
                BitPix           = m_VideoController.VideoBitPix,
                DataAav16NormVal = m_VideoController.VideoAav16NormVal,
                SourceInfo       = string.Format("Video ({0})", m_VideoController.CurrentVideoFileType),
                ObjectName       = "",
                Telescope        = "",
                Instrument       = "",
                Recorder         = "",
                Observer         = "",
                RA        = float.NaN,
                DEC       = float.NaN,
                Longitude = float.NaN,
                Latitude  = float.NaN
            };

            VideoFileFormat fileFormat = m_VideoController.GetVideoFileFormat();

            if (fileFormat.IsAAV())
            {
                Dictionary <string, string> tags = m_VideoController.GetVideoFileTags();
                tags.TryGetValue("ObjectName", out rv.ObjectName);
                tags.TryGetValue("Telescope", out rv.Telescope);
                tags.TryGetValue("Instrument", out rv.Instrument);
                tags.TryGetValue("Recorder", out rv.Recorder);
                tags.TryGetValue("Observer", out rv.Observer);

                string ra, dec, lng, lat;
                tags.TryGetValue("RA", out ra);
                tags.TryGetValue("DEC", out dec);
                tags.TryGetValue("Longitude", out lng);
                tags.TryGetValue("Latitude", out lat);

                if (!string.IsNullOrEmpty(ra))
                {
                    float.TryParse(ra, NumberStyles.Float, CultureInfo.InvariantCulture, out rv.RA);
                }
                if (!string.IsNullOrEmpty(dec))
                {
                    float.TryParse(ra, NumberStyles.Float, CultureInfo.InvariantCulture, out rv.DEC);
                }
                if (!string.IsNullOrEmpty(lng))
                {
                    float.TryParse(ra, NumberStyles.Float, CultureInfo.InvariantCulture, out rv.Longitude);
                }
                if (!string.IsNullOrEmpty(lat))
                {
                    float.TryParse(ra, NumberStyles.Float, CultureInfo.InvariantCulture, out rv.Latitude);
                }
            }

            return(rv);
        }
Exemplo n.º 5
0
        public DialogResult CheckAavRate(VideoFileFormat videoFileFormat)
        {
            int totalIntegratedFrames = 0;

            m_VideoController.SetPictureBoxCursor(Cursors.WaitCursor);
            m_VideoController.NotifyFileProgress(-1, m_EndTimeFrame - m_StartTimeFrame);
            try
            {
                for (int i = m_StartTimeFrame; i < m_EndTimeFrame; i++)
                {
                    FrameStateData frameState = m_VideoController.GetFrameStateData(i);

                    totalIntegratedFrames += frameState.NumberIntegratedFrames.Value;
                    m_VideoController.NotifyFileProgress(i, m_EndTimeFrame - m_StartTimeFrame);
                }
            }
            finally
            {
                m_VideoController.NotifyFileProgress(-1, 0);
                m_VideoController.SetPictureBoxCursor(Cursors.Default);
            }

            // The actual integration could even be of PAL or NTSC frames

            TimeSpan ts = new TimeSpan(Math.Abs(m_EndFrameTime.Ticks - m_StartFrameTime.Ticks) /*Taking ABS to handle backwards measuring*/);
            double   videoTimeInSecPAL  = totalIntegratedFrames / 25.0;
            double   videoTimeInSecNTSC = totalIntegratedFrames / 29.97;

            double timeDiscrepancy_PAL = Math.Abs((videoTimeInSecPAL - ts.TotalSeconds) * 1000);
            bool   isTimeOk_PAL        = (videoTimeInSecPAL > 0 && timeDiscrepancy_PAL < TangraConfig.Settings.Special.MaxAllowedTimestampShiftInMilliSecs);;

            double timeDiscrepancy_NTSC = Math.Abs((videoTimeInSecNTSC - ts.TotalSeconds) * 1000);
            bool   isTimeOk_NTSC        = (videoTimeInSecNTSC > 0 && timeDiscrepancy_NTSC < TangraConfig.Settings.Special.MaxAllowedTimestampShiftInMilliSecs);

            bool   isTimeOk        = isTimeOk_NTSC || isTimeOk_PAL;
            double timeDiscrepancy = Math.Min(timeDiscrepancy_PAL, timeDiscrepancy_NTSC);

            if (!isTimeOk)
            {
                if (MessageBox.Show(
                        string.Format(
                            "The time computed from the measured number of frames in this AAV video is off by {0} ms from the entered time. This may indicate " +
                            "incorrectly entered start or end time or an almanac update or a leap second event. Please enter the start and end times again. The export operation can " +
                            "only continute if the times match exactly.",
                            timeDiscrepancy.ToString("0.00")),
                        "Error",
                        MessageBoxButtons.RetryCancel, MessageBoxIcon.Error, MessageBoxDefaultButton.Button1) == DialogResult.Retry)
                {
                    return(DialogResult.Retry);
                }

                return(DialogResult.Abort);
            }

            return(DialogResult.OK);
        }
Exemplo n.º 6
0
        internal ucLCFileInfo(LCFile lcFile, IVideoController videoController)
        {
            InitializeComponent();

            m_lcFile          = lcFile;
            m_VideoController = videoController;
            DisplayLCFileInfo();

            // Only analogue video (or derived from it) has fields
            VideoFileFormat videoFileFormat = m_lcFile.Header.GetVideoFileFormat();

            btnShowFields.Visible = videoFileFormat == VideoFileFormat.AVI || videoFileFormat.IsAAV();
            m_ShowingFields       = false;
        }
        private void frmRunMultiFrameMeasurements_Load(object sender, EventArgs e)
        {
            cbxExpectedMotion.SelectedIndex = (int)m_MeasurementContext.MovementExpectation;
            cbxSignalType.SelectedIndex = (int)m_MeasurementContext.ObjectExposureQuality;
            cbxFrameTimeType.SelectedIndex = (int)m_MeasurementContext.FrameTimeType;
            nudInstrDelay.SetNUDValue(m_MeasurementContext.InstrumentalDelay);
            cbxInstDelayUnit.SelectedIndex = (int)m_MeasurementContext.InstrumentalDelayUnits;
            nudIntegratedFrames.SetNUDValue(m_MeasurementContext.IntegratedFramesCount);

            nudInstrDelay.Enabled = true;
            cbxInstDelayUnit.Enabled = true;
            nudIntegratedFrames.Enabled = true;
            btnDetectIntegration.Enabled = true;

            cbxInstDelayCamera.SelectedIndex = cbxInstDelayCamera.Items.IndexOf(TangraConfig.Settings.PlateSolve.SelectedCameraModel);
            cbxInstDelayMode.SelectedIndex = 0; // Automatic
            if (cbxInstDelayCamera.SelectedIndex == -1)
            {
                cbxInstDelayCamera.SelectedIndex = 0; // No camera selected
                cbxInstDelayMode.SelectedIndex = 1; // Manual
            }

            m_AavStacking = false;
            m_AavIntegration = false;
            m_VideoFileFormat = m_VideoController.GetVideoFileFormat();
            if (m_VideoFileFormat != VideoFileFormat.AVI)
            {
                cbxFrameTimeType.SelectedIndex = 0;
                cbxFrameTimeType.Enabled = false;

                DateTime? timeStamp = m_VideoController.GetCurrentFrameTime();
                if (timeStamp != null && timeStamp != DateTime.MinValue)
                {
                    DateTime timestamp = timeStamp.Value;

                    if (m_VideoFileFormat.IsAAV())
                    {
                        m_NativeFormat = m_VideoController.GetVideoFormat(m_VideoFileFormat);

                        // Compute the first timestamp value
                        if (m_VideoController.HasTimestampOCR())
                        {
                            timestamp = m_FieldSolveContext.UtcTime.Date.Add(timestamp.TimeOfDay);
                        }
                        else
                        {
                            FrameStateData frameState = m_VideoController.GetCurrentFrameState();
                            timestamp = timestamp.AddMilliseconds(-0.5 * frameState.ExposureInMilliseconds);

                            if (m_NativeFormat == "PAL") timestamp = timestamp.AddMilliseconds(20);
                            else if (m_NativeFormat == "NTSC") timestamp = timestamp.AddMilliseconds(16.68);
                        }
                    }

                    ucUtcTimePicker.DateTimeUtc = timestamp;
                    ucUtcTimePicker.Enabled = false; // The video has embedded timestamp so the users should not enter times manually
                }

                if (m_VideoFileFormat.IsAAV())
                {
                    nudIntegratedFrames.Enabled = false;

                    if (m_VideoController.AstroAnalogueVideoStackedFrameRate > 0)
                    {
                        cbxFrameTimeType.Items[0] = "AAV stacked frame";
                        pnlIntegration.Visible = true;
                        btnDetectIntegration.Visible = false;
                        lblFrames.Text = string.Format("frames (stacking: {0} frames)", m_VideoController.AstroAnalogueVideoStackedFrameRate);
                        m_AavStacking = true;
                        nudIntegratedFrames.Value = m_VideoController.AstroAnalogueVideoIntegratedAAVFrames;
                    }
                    else if (m_VideoController.AstroAnalogueVideoIntegratedAAVFrames > 0)
                    {
                        cbxFrameTimeType.Items[0] = "AAV integrated frame";
                        pnlIntegration.Visible = true;
                        btnDetectIntegration.Visible = false;
                        nudIntegratedFrames.Value = m_VideoController.AstroAnalogueVideoIntegratedAAVFrames;
                        m_AavIntegration = true;
                    }
                }
            }

            activePage = 0;
            DisplayActivePage();

            // Allow time corrections to be set automatically even when the integration is not recognized ("use suggested value" button) or something
        }
Exemplo n.º 8
0
 public static bool IsAAV(this VideoFileFormat format)
 {
     return
         (format == VideoFileFormat.AAV ||
          format == VideoFileFormat.AAV2);
 }
Exemplo n.º 9
0
        internal void StartExport(string fileName, bool fitsCube, Rectangle roi, UsedTimeBase timeBase, bool usesOCR, bool ocrHasDatePart)
        {
            m_FrameWidth     = TangraContext.Current.FrameWidth;
            m_FrameHeight    = TangraContext.Current.FrameHeight;
            m_IsFitsSequence = m_VideoController.IsFitsSequence;

            if (m_VideoController.IsAstroAnalogueVideo)
            {
                m_IntegrationRate = m_VideoController.AstroAnalogueVideoIntegratedAAVFrames;
            }
            else
            {
                m_IntegrationRate = 0;
            }

            m_NativeFormat = m_VideoController.GetVideoFormat(m_VideoController.GetVideoFileFormat());
            m_VideoCamera  = m_VideoController.AstroVideoCameraModel;

            m_RegionOfInterest = roi;
            m_UsesROI          = roi.Width != m_FrameWidth || roi.Height != m_FrameHeight;

            m_ExportAs8BitFloat = false;
            m_NormalValue       = m_VideoController.EffectiveMaxPixelValue;
            m_VideoFormat       = m_VideoController.GetVideoFileFormat();

            if (m_VideoFormat == VideoFileFormat.AAV || m_VideoFormat == VideoFileFormat.AAV2 || m_VideoFormat == VideoFileFormat.AVI)
            {
                if (m_VideoController.VideoBitPix == 8 || m_VideoController.EffectiveMaxPixelValue > 255)
                {
                    // For video from analogue cameras we export as 8-bit floating point numbers
                    m_ExportAs8BitFloat = true;
                }
            }

            m_AdditionalFileHeaders.Clear();
            var fileHeaderProvider = m_VideoController.FramePlayer.Video as IFileHeaderProvider;

            if (fileHeaderProvider != null)
            {
                var AAV_HEADERS = new Dictionary <string, string>
                {
                    { "LATITUDE", "LATITUDE" },
                    { "LONGITUDE", "LONGITUD" },
                    { "OBSERVER", "OBSERVER" },
                    { "TELESCOP", "TELESCOP" },
                    { "OBJECT", "OBJECT" },
                    { "RA_OBJ", "RA_OBJ" },
                    { "DEC_OBJ", "DEC_OBJ" },
                    { "RECORDER-SOFTWARE", "REC-SOFT" },
                    { "RECORDER-SOFTWARE-VERSION", "REC-VER" },
                    { "ADVLIB-VERSION", "ADVLIB" }
                };

                var coppiedFromHeaderDescription = string.Format("Copied from {0} file headers", m_VideoFormat);
                var fileHeaders = fileHeaderProvider.GetFileHeaders();
                fileHeaders
                .Where(kvp => AAV_HEADERS.ContainsKey(kvp.Key))
                .Select(kvp => kvp)
                .ToList()
                .ForEach(kvp => m_AdditionalFileHeaders[AAV_HEADERS[kvp.Key]] = Tuple.Create(kvp.Value, coppiedFromHeaderDescription));

                if (m_VideoFormat == VideoFileFormat.AAV || m_VideoFormat == VideoFileFormat.AAV2)
                {
                    var camera = m_VideoCamera;
                    if (camera.IndexOf(m_NativeFormat, StringComparison.InvariantCultureIgnoreCase) == -1)
                    {
                        camera = camera.Trim() + string.Format(" ({0})", m_NativeFormat);
                    }
                    var   instumentalDelaySelectedConfig = InstrumentalDelayConfigManager.GetConfigurationForCamera(camera);
                    float instDelay;
                    if (instumentalDelaySelectedConfig.TryGetValue(m_VideoController.AstroAnalogueVideoIntegratedAAVFrames, out instDelay))
                    {
                        m_AdditionalFileHeaders["INSTDELY"] = Tuple.Create(
                            instDelay.ToString(CultureInfo.InvariantCulture),
                            string.Format("Instr. delay in sec. for x{0} frames integration for '{1}' camera. This has not been applied to DATE-OBS", m_VideoController.AstroAnalogueVideoIntegratedAAVFrames, camera));
                    }
                }
            }

            m_FitsCube = fitsCube;
            if (!fitsCube)
            {
                m_FolderName = fileName;

                if (!Directory.Exists(m_FolderName))
                {
                    Directory.CreateDirectory(m_FolderName);
                }
            }

            m_Note = string.Format("Converted from {0} file.", m_VideoController.GetVideoFileFormat());
            if (m_Note.Length > HeaderCard.MAX_VALUE_LENGTH)
            {
                m_Note = m_Note.Substring(0, HeaderCard.MAX_VALUE_LENGTH);
            }

            if (m_UsesROI)
            {
                m_Note += string.Format(" Selected ROI: ({0},{1},{2},{3})", roi.Left, roi.Top, roi.Right, roi.Bottom);
                if (m_Note.Length > HeaderCard.MAX_VALUE_LENGTH)
                {
                    m_Note = m_Note.Substring(0, HeaderCard.MAX_VALUE_LENGTH);
                }
            }

            if (timeBase == UsedTimeBase.UserEnterred)
            {
                m_DateObsComment = "Date and Time are user entered & computed";
            }
            else if (timeBase == UsedTimeBase.EmbeddedTimeStamp)
            {
                if (usesOCR)
                {
                    if (ocrHasDatePart)
                    {
                        m_DateObsComment = "Date and Time are ORC-ed";
                    }
                    else
                    {
                        m_DateObsComment = "Time is ORC-ed, Date is user entered";
                    }
                }
                else if (!m_IsFitsSequence)
                {
                    m_DateObsComment = "Date and Time are saved by recorder";
                }
            }
        }
Exemplo n.º 10
0
        public static ITracker CreateTracker(int imageWidth, int imageHeight, LightCurveReductionType lightCurveReductionType, VideoFileFormat fileFormat, List<TrackedObjectConfig> measuringStars, out string usedTrackerType)
        {
            // NOTE: Figure out what tracker to create based on the type of event, number of objects and their intensity
            bool createRefiningTracker = TangraConfig.Settings.Tracking.SelectedEngine == TangraConfig.TrackingEngine.TrackingWithRefining;

            if (TangraConfig.Settings.Tracking.SelectedEngine == TangraConfig.TrackingEngine.LetTangraChoose)
            {
                if (LightCurveReductionContext.Instance.WindOrShaking ||
                    LightCurveReductionContext.Instance.StopOnLostTracking ||
                    LightCurveReductionContext.Instance.IsDriftThrough ||
                    LightCurveReductionContext.Instance.HighFlickeringOrLargeStars ||
                    LightCurveReductionContext.Instance.FullDisappearance)
                {
                    createRefiningTracker = true;
                }
            }

            bool createFITSFileTracker = lightCurveReductionType == LightCurveReductionType.VariableStarOrTransit &&
                                         fileFormat == VideoFileFormat.FITS;

            if (lightCurveReductionType == LightCurveReductionType.Asteroidal || lightCurveReductionType == LightCurveReductionType.VariableStarOrTransit)
            {
                if (createFITSFileTracker && lightCurveReductionType == LightCurveReductionType.VariableStarOrTransit)
                {
                    usedTrackerType = "Star field tracker";
                    return new StarFieldTracker(measuringStars);
                }
                else if (createRefiningTracker)
                {
                    if (measuringStars.Count == 1)
                    {
                        usedTrackerType = "One star tracking";
                        return new OneStarTracker(measuringStars);
                    }
                    else
                    {
                        usedTrackerType = "Tracking with recovery";
                        return new OccultationTracker(measuringStars);
                    }
                }
                else
                {
            #if WIN32
                    if (TangraConfig.Settings.Tracking.UseNativeTracker)
                    {
                        usedTrackerType = "Simplified Native";
                        return new NativeSimplifiedTracker(imageWidth, imageHeight, measuringStars, LightCurveReductionContext.Instance.FullDisappearance);
                    }
                    else
            #endif
                    {
                        usedTrackerType = "Simplified";
                        return new SimplifiedTracker(measuringStars);
                    }
                }
            }
            else if (lightCurveReductionType == LightCurveReductionType.MutualEvent)
            {
                if (measuringStars.Any(x => x.ProcessInPsfGroup))
                {
                    usedTrackerType = "Mutual Event M";
                    return new MutualEventTracker(measuringStars, LightCurveReductionContext.Instance.FullDisappearance);
                }
                else
                {
                    if (createRefiningTracker)
                    {
                        usedTrackerType = "Mutual Event O";
                        return new OccultationTracker(measuringStars);
                    }

                    usedTrackerType = "Mutual Event S";

            #if WIN32
                    if (TangraConfig.Settings.Tracking.UseNativeTracker)
                        return new NativeSimplifiedTracker(imageWidth, imageHeight, measuringStars, LightCurveReductionContext.Instance.FullDisappearance);
                    else
            #endif
                        return new SimplifiedTracker(measuringStars);
                }
            }
            else if (lightCurveReductionType == LightCurveReductionType.TotalLunarDisappearance ||
                lightCurveReductionType == LightCurveReductionType.TotalLunarReppearance ||
                lightCurveReductionType == LightCurveReductionType.LunarGrazingOccultation)
            {
                usedTrackerType = "Lunar Occuration Tracker";
                return new LunarOccultationTracker(measuringStars);
            }
            else if (lightCurveReductionType == LightCurveReductionType.UntrackedMeasurement)
            {
                usedTrackerType = "Untracked";
                return new UntrackedTracker(measuringStars);
            }

            throw new NotSupportedException();
        }
Exemplo n.º 11
0
        private void frmRunMultiFrameMeasurements_Load(object sender, EventArgs e)
        {
            cbxExpectedMotion.SelectedIndex = (int)m_MeasurementContext.MovementExpectation;
            cbxSignalType.SelectedIndex     = (int)m_MeasurementContext.ObjectExposureQuality;
            cbxFrameTimeType.SelectedIndex  = (int)m_MeasurementContext.FrameTimeType;
            nudInstrDelay.SetNUDValue(m_MeasurementContext.InstrumentalDelay);
            cbxInstDelayUnit.SelectedIndex = (int)m_MeasurementContext.InstrumentalDelayUnits;
            nudIntegratedFrames.SetNUDValue(m_MeasurementContext.IntegratedFramesCount);

            nudInstrDelay.Enabled        = true;
            cbxInstDelayUnit.Enabled     = true;
            nudIntegratedFrames.Enabled  = true;
            btnDetectIntegration.Enabled = true;

            cbxInstDelayCamera.SelectedIndex = cbxInstDelayCamera.Items.IndexOf(TangraConfig.Settings.PlateSolve.SelectedCameraModel);
            cbxInstDelayMode.SelectedIndex   = 0;           // Automatic
            if (cbxInstDelayCamera.SelectedIndex == -1)
            {
                cbxInstDelayCamera.SelectedIndex = 0;               // No camera selected
                cbxInstDelayMode.SelectedIndex   = 1;               // Manual
            }

            m_AavStacking     = false;
            m_AavIntegration  = false;
            m_VideoFileFormat = m_VideoController.GetVideoFileFormat();
            if (m_VideoFileFormat != VideoFileFormat.AVI)
            {
                cbxFrameTimeType.SelectedIndex = 0;
                cbxFrameTimeType.Enabled       = false;

                DateTime?timeStamp = m_VideoController.GetCurrentFrameTime();
                if (timeStamp != null && timeStamp != DateTime.MinValue)
                {
                    DateTime timestamp = timeStamp.Value;

                    if (m_VideoFileFormat.IsAAV())
                    {
                        m_NativeFormat = m_VideoController.GetVideoFormat(m_VideoFileFormat);

                        // Compute the first timestamp value
                        if (m_VideoController.HasTimestampOCR())
                        {
                            timestamp = m_FieldSolveContext.UtcTime.Date.Add(timestamp.TimeOfDay);
                        }
                        else
                        {
                            FrameStateData frameState = m_VideoController.GetCurrentFrameState();
                            timestamp = timestamp.AddMilliseconds(-0.5 * frameState.ExposureInMilliseconds);

                            if (m_NativeFormat == "PAL")
                            {
                                timestamp = timestamp.AddMilliseconds(20);
                            }
                            else if (m_NativeFormat == "NTSC")
                            {
                                timestamp = timestamp.AddMilliseconds(16.68);
                            }
                        }
                    }

                    ucUtcTimePicker.DateTimeUtc = timestamp;
                    ucUtcTimePicker.Enabled     = false;                 // The video has embedded timestamp so the users should not enter times manually
                }

                if (m_VideoFileFormat.IsAAV())
                {
                    nudIntegratedFrames.Enabled = false;

                    if (m_VideoController.AstroAnalogueVideoStackedFrameRate > 0)
                    {
                        cbxFrameTimeType.Items[0]    = "AAV stacked frame";
                        pnlIntegration.Visible       = true;
                        btnDetectIntegration.Visible = false;
                        lblFrames.Text            = string.Format("frames (stacking: {0} frames)", m_VideoController.AstroAnalogueVideoStackedFrameRate);
                        m_AavStacking             = true;
                        nudIntegratedFrames.Value = m_VideoController.AstroAnalogueVideoIntegratedAAVFrames;
                    }
                    else if (m_VideoController.AstroAnalogueVideoIntegratedAAVFrames > 0)
                    {
                        cbxFrameTimeType.Items[0]    = "AAV integrated frame";
                        pnlIntegration.Visible       = true;
                        btnDetectIntegration.Visible = false;
                        nudIntegratedFrames.Value    = m_VideoController.AstroAnalogueVideoIntegratedAAVFrames;
                        m_AavIntegration             = true;
                    }
                }
            }
            else
            {
                // This will be set in the exported CSV file and will then be used to identify repeated measurements from integrated AAV files
                m_NativeFormat = "AVI-Integrated";
            }

            activePage = 0;
            DisplayActivePage();

            // Allow time corrections to be set automatically even when the integration is not recognized ("use suggested value" button) or something
        }
Exemplo n.º 12
0
        public static Video Create(VideoFileFormat videoFileFormat, int frameRate, string outputPath)
        {
            return(new VideoGIF(frameRate, outputPath));

            throw new ArgumentException();
        }
Exemplo n.º 13
0
        public static ITracker CreateTracker(int imageWidth, int imageHeight, LightCurveReductionType lightCurveReductionType, VideoFileFormat fileFormat, List <TrackedObjectConfig> measuringStars, out string usedTrackerType)
        {
            // NOTE: Figure out what tracker to create based on the type of event, number of objects and their intensity
            bool createRefiningTracker = TangraConfig.Settings.Tracking.SelectedEngine == TangraConfig.TrackingEngine.TrackingWithRefining;

            if (TangraConfig.Settings.Tracking.SelectedEngine == TangraConfig.TrackingEngine.LetTangraChoose)
            {
                if (LightCurveReductionContext.Instance.WindOrShaking ||
                    LightCurveReductionContext.Instance.StopOnLostTracking ||
                    LightCurveReductionContext.Instance.IsDriftThrough ||
                    LightCurveReductionContext.Instance.HighFlickeringOrLargeStars ||
                    LightCurveReductionContext.Instance.FullDisappearance)
                {
                    createRefiningTracker = true;
                }
            }

            bool createFITSFileTracker = lightCurveReductionType == LightCurveReductionType.VariableStarOrTransit &&
                                         fileFormat == VideoFileFormat.FITS;

            if (lightCurveReductionType == LightCurveReductionType.Asteroidal || lightCurveReductionType == LightCurveReductionType.VariableStarOrTransit)
            {
                if (createFITSFileTracker && lightCurveReductionType == LightCurveReductionType.VariableStarOrTransit)
                {
                    usedTrackerType = "Star field tracker";
                    return(new StarFieldTracker(measuringStars));
                }
                else if (createRefiningTracker)
                {
                    if (measuringStars.Count == 1)
                    {
                        usedTrackerType = "One star tracking";
                        return(new OneStarTracker(measuringStars));
                    }
                    else
                    {
                        usedTrackerType = "Tracking with recovery";
                        return(new OccultationTracker(measuringStars));
                    }
                }
                else
                {
#if WIN32
                    if (TangraConfig.Settings.Tracking.UseNativeTracker)
                    {
                        usedTrackerType = "Simplified Native";
                        return(new NativeSimplifiedTracker(imageWidth, imageHeight, measuringStars, LightCurveReductionContext.Instance.FullDisappearance));
                    }
                    else
#endif
                    {
                        usedTrackerType = "Simplified";
                        return(new SimplifiedTracker(measuringStars));
                    }
                }
            }
            else if (lightCurveReductionType == LightCurveReductionType.MutualEvent)
            {
                if (measuringStars.Any(x => x.ProcessInPsfGroup))
                {
                    usedTrackerType = "Mutual Event M";
                    return(new MutualEventTracker(measuringStars, LightCurveReductionContext.Instance.FullDisappearance));
                }
                else
                {
                    if (createRefiningTracker)
                    {
                        usedTrackerType = "Mutual Event O";
                        return(new OccultationTracker(measuringStars));
                    }

                    usedTrackerType = "Mutual Event S";

#if WIN32
                    if (TangraConfig.Settings.Tracking.UseNativeTracker)
                    {
                        return(new NativeSimplifiedTracker(imageWidth, imageHeight, measuringStars, LightCurveReductionContext.Instance.FullDisappearance));
                    }
                    else
#endif
                    return(new SimplifiedTracker(measuringStars));
                }
            }
            else if (lightCurveReductionType == LightCurveReductionType.TotalLunarDisappearance ||
                     lightCurveReductionType == LightCurveReductionType.TotalLunarReppearance ||
                     lightCurveReductionType == LightCurveReductionType.LunarGrazingOccultation)
            {
                usedTrackerType = "Lunar Occuration Tracker";
                return(new LunarOccultationTracker(measuringStars));
            }
            else if (lightCurveReductionType == LightCurveReductionType.UntrackedMeasurement)
            {
                usedTrackerType = "Untracked";
                return(new UntrackedTracker(measuringStars));
            }

            throw new NotSupportedException();
        }