Пример #1
0
        public ProcessingReturnValues FitAndPlotFastFlyby(
            Dictionary <int, SingleMultiFrameMeasurement> measurements,
            FlybyMeasurementContext meaContext,
            FittingContext fittingContext,
            FittingValue fittingValue,
            GetFrameStateDataCallback getFrameStateDataCallback,
            Graphics g, FlybyPlottingContext plottingContext, float xScale, float yScale, int imageWidth, int imageHeight,
            out double motionRate)
        {
            // Do linear regression, use residual based exclusion rules
            // Two possible modes: (A) non trailed and (B) trailed images
            // A) Non Trailed Images
            // Report interpolated times for video normal position [How to use the MPC page for this?]
            // Don't forget to add the video normal position flag in the OBS file
            // Expect elongated images and apply instrumental delay corrections (in both integrated and non integrated modes)
            // B) Trailed Images
            // TODO: R&D Required

            if (fittingContext.ObjectExposureQuality == ObjectExposureQuality.GoodSignal)
            {
                return(FitAndPlotSlowFlyby(
                           measurements, meaContext, fittingContext, fittingValue, getFrameStateDataCallback,
                           g, plottingContext, xScale, yScale, imageWidth, imageHeight, out motionRate));
            }
            else
            {
                MessageBox.Show("This operation is currently not suppored.");
                motionRate = double.NaN;
                return(new ProcessingReturnValues());
            }
        }
Пример #2
0
        private FrameTime GetTimeForFrame(FittingContext context, int frameNumber, int firstVideoFrame, GetFrameStateDataCallback getFrameStateData, DateTime? ocrTime)
        {
            var rv = new FrameTime();
            rv.RequestedFrameNo = frameNumber;

            double instrumentalDelayFrames = 0;
            double instrumentalDelaySeconds = 0;

            if (context.InstrumentalDelayUnits == InstrumentalDelayUnits.Frames)
                instrumentalDelayFrames = context.InstrumentalDelay;
            else if (context.InstrumentalDelayUnits == InstrumentalDelayUnits.Seconds)
                instrumentalDelaySeconds = context.InstrumentalDelay;

            int precision = 1000000;
            double sigma = 0.000005;
            if (context.FrameTimeType == FrameTimeType.NonIntegratedFrameTime)
            {
                if (context.VideoFileFormat == VideoFileFormat.AVI)
                {
                    // No integration is used, directly derive the time and apply instrumental delay
                    rv.ResolvedFrameNo = frameNumber - firstVideoFrame;
                    rv.UT =
                        context.FirstFrameUtcTime.AddSeconds(
                        ((frameNumber - context.FirstFrameIdInIntegrationPeroid - instrumentalDelayFrames) / context.FrameRate) - instrumentalDelaySeconds);
                }
                else if (context.VideoFileFormat == VideoFileFormat.AAV2 && ocrTime.HasValue)
                {
                    rv.ResolvedFrameNo = frameNumber - firstVideoFrame;
                    var dateTime = context.FirstFrameUtcTime.Date.Add(ocrTime.Value.TimeOfDay);
                    rv.UT = dateTime.AddSeconds(-instrumentalDelayFrames/context.FrameRate-instrumentalDelaySeconds);
                }
                else
                {
                    rv.ResolvedFrameNo = frameNumber;

                    var frameState = getFrameStateData(frameNumber);
                    rv.UT = frameState.CentralExposureTime;

                    // Convert Central time to the timestamp of the end of the first field
                    rv.UT = rv.UT.AddMilliseconds(-0.5 * frameState.ExposureInMilliseconds);

                    if (context.NativeVideoFormat == "PAL") rv.UT = rv.UT.AddMilliseconds(20);
                    else if (context.NativeVideoFormat == "NTSC") rv.UT = rv.UT.AddMilliseconds(16.68);

                    if (context.AavStackedMode)
                    {
                        throw new NotSupportedException("AAV Stacked Mode is not supported!");
                    }
                    else
                    {
                        // Then apply instrumental delay
                        rv.UT = rv.UT.AddSeconds(-1 * instrumentalDelaySeconds);
                    }
                }
            }
            else
            {
                // Integration was used. Find the integrated frame no
                int integratedFrameNo = frameNumber - firstVideoFrame;

                double deltaMiddleFrame = 0;
                if (context.IntegratedFramesCount > 1)
                    deltaMiddleFrame = (context.IntegratedFramesCount / 2) - 0.5
                        /* This 0.5 frame correction is only used for the 'visual' mapping between where the user clicked and what is the most 'logical' normal frame to where they clicked */;

                // The instrumental delay is always from the timestamp of the first frame of the integrated interval

                rv.ResolvedFrameNo = integratedFrameNo;

                rv.UT =
                    context.FirstFrameUtcTime.AddSeconds(
                    ((integratedFrameNo - deltaMiddleFrame - context.FirstFrameIdInIntegrationPeroid - instrumentalDelayFrames) / context.FrameRate) - instrumentalDelaySeconds);
            }

            if (context.MovementExpectation == MovementExpectation.Slow)
            {
                precision = 100000;
                sigma = 0.000005;
            }
            else
            {
                precision = 1000000;
                sigma = 0.0000005;
            }

            double utTime = (rv.UT.Hour + rv.UT.Minute / 60.0 + (rv.UT.Second + (rv.UT.Millisecond / 1000.0)) / 3600.0) / 24;

            double roundedTime = Math.Truncate(Math.Round(utTime * precision)) / precision;

            rv.ClosestNormalFrameTime = new DateTime(rv.UT.Year, rv.UT.Month, rv.UT.Day).AddDays(roundedTime);

            TimeSpan delta = new TimeSpan(rv.ClosestNormalFrameTime.Ticks - rv.UT.Ticks);
            rv.ClosestNormalFrameNo = rv.ResolvedFrameNo + (delta.TotalSeconds * context.FrameRate);

            double framesEachSide = sigma * 24 * 3600 * context.FrameRate;
            rv.ClosestNormalIntervalFirstFrameNo = (int)Math.Floor(rv.ClosestNormalFrameNo - framesEachSide);
            rv.ClosestNormalIntervalLastFrameNo = (int)Math.Ceiling(rv.ClosestNormalFrameNo + framesEachSide);

            //Trace.WriteLine(string.Format("{0} / {1}; {2} / {3}; {4} / {5}",
            //    roundedTime.ToString("0.00000"), utTime,
            //    rv.ClosestNormalFrameNo, rv.ResolvedFrameNo,
            //    rv.UT.ToString("HH:mm:ss.fff"), rv.ClosestNormalFrameTime.ToString("HH:mm:ss.fff")));

            return rv;
        }
Пример #3
0
        public ProcessingReturnValues FitAndPlotSlowFlyby(
            Dictionary<int, SingleMultiFrameMeasurement> measurements,
            FlybyMeasurementContext meaContext,
            FittingContext fittingContext,
            FittingValue fittingValue,
            GetFrameStateDataCallback getFrameStateDataCallback,
            Graphics g, FlybyPlottingContext plottingContext, float xScale, float yScale, int imageWidth, int imageHight,
            out double motionRate)
        {
            try
            {
                #region Building Test Cases
                if (m_DumpTestCaseData)
                {
                    var mvSer = new XmlSerializer(typeof(FlybyMeasurementContext));
                    var sb = new StringBuilder();
                    using (var wrt = new StringWriter(sb))
                    {
                        mvSer.Serialize(wrt, meaContext);
                    }
                    Trace.WriteLine(sb.ToString());
                    var fcSer = new XmlSerializer(typeof(FittingContext));
                    sb.Clear();
                    using (var wrt = new StringWriter(sb))
                    {
                        fcSer.Serialize(wrt, fittingContext);
                    }
                    Trace.WriteLine(sb.ToString());

                    var smfmSer = new XmlSerializer(typeof(SingleMultiFrameMeasurement));
                    foreach (int key in measurements.Keys)
                    {
                        sb.Clear();
                        using (var wrt2 = new StringWriter(sb))
                        {
                            smfmSer.Serialize(wrt2, measurements[key]);
                            if (measurements[key].FrameNo != key)
                                throw new InvalidOperationException();
                            Trace.WriteLine(sb.ToString());
                        }
                    }
                }
                #endregion

                // Do linear regression, use residual based exclusion rules
                // Report the interpolated position at the middle of the measured interva
                // Don't forget to add the video normal position flag in the OBS file
                // Expect elongated images and apply instrumental delay corrections

                motionRate = double.NaN;

                var rv = new ProcessingReturnValues();

                int numFramesUser = 0;

                rv.EarliestFrame = int.MaxValue;
                rv.LatestFrame = int.MinValue;

                var intervalValues = new Dictionary<int, Tuple<List<double>, List<double>>>();
                var intervalMedians = new Dictionary<double, double>();
                var intervalWeights = new Dictionary<double, double>();

                LinearRegression regression = null;
                if (measurements.Values.Count > 1)
                {
                    rv.EarliestFrame = measurements.Values.Select(m => m.FrameNo).Min();
                    rv.LatestFrame = measurements.Values.Select(m => m.FrameNo).Max();

                    var minUncertainty = meaContext.MinPositionUncertaintyPixels * meaContext.ArsSecsInPixel;

                    foreach (SingleMultiFrameMeasurement measurement in measurements.Values)
                    {
                        int integrationInterval = (measurement.FrameNo - fittingContext.FirstFrameIdInIntegrationPeroid) / fittingContext.IntegratedFramesCount;

                        Tuple<List<double>,List<double>> intPoints;
                        if (!intervalValues.TryGetValue(integrationInterval, out intPoints))
                        {
                            intPoints = Tuple.Create(new List<double>(), new List<double>());
                            intervalValues.Add(integrationInterval, intPoints);
                        }

                        if (fittingValue == FittingValue.RA)
                        {
                            intPoints.Item1.Add(measurement.RADeg);
                            intPoints.Item2.Add(ComputePositionWeight(measurement.SolutionUncertaintyRACosDEArcSec, measurement, minUncertainty, fittingContext.Weighting));
                        }
                        else
                        {
                            intPoints.Item1.Add(measurement.DEDeg);
                            intPoints.Item2.Add(ComputePositionWeight(measurement.SolutionUncertaintyDEArcSec, measurement, minUncertainty, fittingContext.Weighting));
                        }
                    }

                    if (intervalValues.Count > 2)
                    {
                        regression = new LinearRegression();

                        foreach (int integratedFrameNo in intervalValues.Keys)
                        {
                            Tuple<List<double>, List<double>> data = intervalValues[integratedFrameNo];

                            double median;
                            double medianWeight;

                            WeightedMedian(data, out median, out medianWeight);

                            // Assign the data point to the middle of the integration interval (using frame numbers)
                            //
                            // |--|--|--|--|--|--|--|--|
                            // |           |           |
                            //
                            // Because the time associated with the first frame is the middle of the frame, but the
                            // time associated with the middle of the interval is the end of the field then the correction
                            // is (N / 2) - 0.5 frames

                            double dataPointFrameNo =
                                rv.EarliestFrame +
                                fittingContext.IntegratedFramesCount * integratedFrameNo
                                + (fittingContext.IntegratedFramesCount / 2)
                                - 0.5;

                            intervalMedians.Add(dataPointFrameNo, median);
                            intervalWeights.Add(dataPointFrameNo, medianWeight);
                            if (fittingContext.Weighting != WeightingMode.None)
                                regression.AddDataPoint(dataPointFrameNo, median, medianWeight);
                            else
                                regression.AddDataPoint(dataPointFrameNo, median);
                        }

                        regression.Solve();

                        var firstPos = measurements[rv.EarliestFrame];
                        var lastPos = measurements[rv.LatestFrame];
                        double distanceArcSec = AngleUtility.Elongation(firstPos.RADeg, firstPos.DEDeg, lastPos.RADeg, lastPos.DEDeg) * 3600;
                        var firstTime = GetTimeForFrame(fittingContext, rv.EarliestFrame, meaContext.FirstVideoFrame, getFrameStateDataCallback, firstPos.OCRedTimeStamp);
                        var lastTime = GetTimeForFrame(fittingContext, rv.LatestFrame, meaContext.FirstVideoFrame, getFrameStateDataCallback, lastPos.OCRedTimeStamp);
                        double elapsedSec = new TimeSpan(lastTime.UT.Ticks - firstTime.UT.Ticks).TotalSeconds;
                        motionRate = distanceArcSec / elapsedSec;
                    }
                }

                FrameTime resolvedTime = null;
                if (int.MinValue != meaContext.UserMidFrame)
                {
                    // Find the closest video 'normal' MPC time and compute the frame number for it
                    // Now compute the RA/DE for the computed 'normal' frame
                    resolvedTime = GetTimeForFrame(fittingContext, meaContext.UserMidFrame, meaContext.FirstVideoFrame, getFrameStateDataCallback, measurements[meaContext.UserMidFrame].OCRedTimeStamp);

                    #region Plotting Code
                    if (g != null)
                    {
                        float xPosBeg = (float)(resolvedTime.ClosestNormalIntervalFirstFrameNo - rv.EarliestFrame) * xScale + 5;
                        float xPosEnd = (float)(resolvedTime.ClosestNormalIntervalLastFrameNo - rv.EarliestFrame) * xScale + 5;

                        g.FillRectangle(s_NormalTimeIntervalHighlightBrush, xPosBeg, 1, (xPosEnd - xPosBeg), imageHight - 2);
                    }
                    #endregion
                }

                Dictionary<double, double> secondPassData = new Dictionary<double, double>();

                int minFrameId = measurements.Keys.Min();

                #region Plotting Code
                if (g != null)
                {
                    foreach (SingleMultiFrameMeasurement measurement in measurements.Values)
                    {
                        float x = (measurement.FrameNo - minFrameId) * xScale + 5;

                        ProcessingValues val = new ProcessingValues()
                        {
                            Value = fittingValue == FittingValue.RA ? measurement.RADeg : measurement.DEDeg,
                            StdDev = fittingValue == FittingValue.RA ? measurement.StdDevRAArcSec / 3600.0 : measurement.StdDevDEArcSec / 3600.0
                        };

                        double valueFrom = val.Value - val.StdDev;
                        double valueTo = val.Value + val.StdDev;

                        float yFrom = (float)(valueFrom - plottingContext.MinValue) * yScale + 5;
                        float yTo = (float)(valueTo - plottingContext.MinValue) * yScale + 5;

                        g.DrawLine(plottingContext.IncludedPen, x, yFrom, x, yTo);
                        g.DrawLine(plottingContext.IncludedPen, x - 1, yFrom, x + 1, yFrom);
                        g.DrawLine(plottingContext.IncludedPen, x - 1, yTo, x + 1, yTo);
                    }
                }
                #endregion

                foreach (double integrFrameNo in intervalMedians.Keys)
                {
                    double val = intervalMedians[integrFrameNo];

                    double fittedValAtFrame = regression != null
                        ? regression.ComputeY(integrFrameNo)
                        : double.NaN;

                    bool included = Math.Abs(fittedValAtFrame - val) < 3 * regression.StdDev;

                    #region Plotting Code
                    if (g != null)
                    {
                        if (fittingContext.IntegratedFramesCount > 1)
                        {
                            Pen mPen = included ? plottingContext.IncludedPen : plottingContext.ExcludedPen;

                            float x = (float)(integrFrameNo - minFrameId) * xScale + 5;
                            float y = (float)(val - plottingContext.MinValue) * yScale + 5;

                            g.DrawEllipse(mPen, x - 3, y - 3, 6, 6);
                            g.DrawLine(mPen, x - 5, y - 5, x + 5, y + 5);
                            g.DrawLine(mPen, x + 5, y - 5, x - 5, y + 5);
                        }
                    }
                    #endregion

                    if (included) secondPassData.Add(integrFrameNo, val);
                }

                #region Second Pass
                regression = null;
                if (secondPassData.Count > 2)
                {
                    regression = new LinearRegression();
                    foreach (double frameNo in secondPassData.Keys)
                    {
                        if (fittingContext.Weighting != WeightingMode.None)
                            regression.AddDataPoint(frameNo, secondPassData[frameNo], intervalWeights[frameNo]);
                        else
                            regression.AddDataPoint(frameNo, secondPassData[frameNo]);
                    }
                    regression.Solve();
                }
                #endregion

                if (regression != null)
                {
                    #region Plotting Code
                    if (g != null)
                    {
                        double leftFittedVal = regression.ComputeY(rv.EarliestFrame);
                        double rightFittedVal = regression.ComputeY(rv.LatestFrame);

                        double err = 3 * regression.StdDev;

                        float leftAve = (float)(leftFittedVal - plottingContext.MinValue) * yScale + 5;
                        float rightAve = (float)(rightFittedVal - plottingContext.MinValue) * yScale + 5;
                        float leftX = 5 + (float)(rv.EarliestFrame - rv.EarliestFrame) * xScale;
                        float rightX = 5 + (float)(rv.LatestFrame - rv.EarliestFrame) * xScale;

                        g.DrawLine(plottingContext.AveragePen, leftX, leftAve - 1, rightX, rightAve - 1);
                        g.DrawLine(plottingContext.AveragePen, leftX, leftAve, rightX, rightAve);
                        g.DrawLine(plottingContext.AveragePen, leftX, leftAve + 1, rightX, rightAve + 1);

                        float leftMin = (float)(leftFittedVal - err - plottingContext.MinValue) * yScale + 5;
                        float leftMax = (float)(leftFittedVal + err - plottingContext.MinValue) * yScale + 5;
                        float rightMin = (float)(rightFittedVal - err - plottingContext.MinValue) * yScale + 5;
                        float rightMax = (float)(rightFittedVal + err - plottingContext.MinValue) * yScale + 5;

                        g.DrawLine(plottingContext.AveragePen, leftX, leftMin, rightX, rightMin);
                        g.DrawLine(plottingContext.AveragePen, leftX, leftMax, rightX, rightMax);
                    }
                    #endregion

                    if (int.MinValue != meaContext.UserMidFrame &&
                        resolvedTime != null)
                    {
                        // Find the closest video 'normal' MPC time and compute the frame number for it
                        // Now compute the RA/DE for the computed 'normal' frame

                        double fittedValueUncertainty;
                        double fittedValueAtMiddleFrame = regression.ComputeYWithError(resolvedTime.ClosestNormalFrameNo, out fittedValueUncertainty);

                        Trace.WriteLine(string.Format("{0}; Included: {1}; Normal Frame No: {2}; Fitted Val: {3} +/- {4:0.00}",
                            meaContext.UserMidValue.ToString("0.00000"),
                            numFramesUser, resolvedTime.ClosestNormalFrameNo,
                            AstroConvert.ToStringValue(fittedValueAtMiddleFrame, "+HH MM SS.T"),
                            regression.StdDev * 60 * 60));

                        // Report the interpolated position at the middle of the measured interval
                        // Don't forget to add the video normal position flag in the OBS file
                        // Expect elongated images and apply instrumental delay corrections

                        rv.FittedValue = fittedValueAtMiddleFrame;
                        rv.FittedValueTime = resolvedTime.ClosestNormalFrameTime;
                        rv.IsVideoNormalPosition = true;
                        rv.FittedNormalFrame = resolvedTime.ClosestNormalFrameNo;
                        rv.FittedValueUncertaintyArcSec = fittedValueUncertainty * 60 * 60;

                        #region Plotting Code
                        if (g != null)
                        {
                            // Plot the frame
                            float xPos = (float)(resolvedTime.ClosestNormalFrameNo - rv.EarliestFrame) * xScale + 5;
                            float yPos = (float)(rv.FittedValue - plottingContext.MinValue) * yScale + 5;
                            g.DrawLine(Pens.Yellow, xPos, 1, xPos, imageHight - 2);
                            g.FillEllipse(Brushes.Yellow, xPos - 3, yPos - 3, 6, 6);
                        }
                        #endregion
                    }
                    else
                        rv.FittedValue = double.NaN;
                }
                else
                    rv.FittedValue = double.NaN;

                return rv;
            }
            catch (Exception ex)
            {
                Trace.WriteLine(ex.GetFullStackTrace());
                motionRate = 0;
                return null;
            }
        }
Пример #4
0
        public ProcessingReturnValues FitAndPlotFastFlyby(
            Dictionary<int, SingleMultiFrameMeasurement> measurements,
            FlybyMeasurementContext meaContext,
            FittingContext fittingContext,
            FittingValue fittingValue,
            GetFrameStateDataCallback getFrameStateDataCallback,
            Graphics g, FlybyPlottingContext plottingContext, float xScale, float yScale, int imageWidth, int imageHeight,
            out double motionRate)
        {
            // Do linear regression, use residual based exclusion rules
            // Two possible modes: (A) non trailed and (B) trailed images
            // A) Non Trailed Images
            // Report interpolated times for video normal position [How to use the MPC page for this?]
            // Don't forget to add the video normal position flag in the OBS file
            // Expect elongated images and apply instrumental delay corrections (in both integrated and non integrated modes)
            // B) Trailed Images
            // TODO: R&D Required

            if (fittingContext.ObjectExposureQuality == ObjectExposureQuality.GoodSignal)
            {
                return FitAndPlotSlowFlyby(
                    measurements, meaContext, fittingContext, fittingValue, getFrameStateDataCallback,
                    g, plottingContext, xScale, yScale, imageWidth, imageHeight, out motionRate);
            }
            else
            {
                MessageBox.Show("This operation is currently not suppored.");
                motionRate = double.NaN;
                return new ProcessingReturnValues();
            }
        }
Пример #5
0
        public ProcessingReturnValues FitAndPlotSlowFlyby(
            Dictionary <int, SingleMultiFrameMeasurement> measurements,
            FlybyMeasurementContext meaContext,
            FittingContext fittingContext,
            FittingValue fittingValue,
            GetFrameStateDataCallback getFrameStateDataCallback,
            Graphics g, FlybyPlottingContext plottingContext, float xScale, float yScale, int imageWidth, int imageHight,
            out double motionRate)
        {
            try
            {
                #region Building Test Cases
                if (m_DumpTestCaseData)
                {
                    var mvSer = new XmlSerializer(typeof(FlybyMeasurementContext));
                    var sb    = new StringBuilder();
                    using (var wrt = new StringWriter(sb))
                    {
                        mvSer.Serialize(wrt, meaContext);
                    }
                    Trace.WriteLine(sb.ToString());
                    var fcSer = new XmlSerializer(typeof(FittingContext));
                    sb.Clear();
                    using (var wrt = new StringWriter(sb))
                    {
                        fcSer.Serialize(wrt, fittingContext);
                    }
                    Trace.WriteLine(sb.ToString());

                    var smfmSer = new XmlSerializer(typeof(SingleMultiFrameMeasurement));
                    foreach (int key in measurements.Keys)
                    {
                        sb.Clear();
                        using (var wrt2 = new StringWriter(sb))
                        {
                            smfmSer.Serialize(wrt2, measurements[key]);
                            if (measurements[key].FrameNo != key)
                            {
                                throw new InvalidOperationException();
                            }
                            Trace.WriteLine(sb.ToString());
                        }
                    }
                }
                #endregion

                // Do linear regression, use residual based exclusion rules
                // Report the interpolated position at the middle of the measured interva
                // Don't forget to add the video normal position flag in the OBS file
                // Expect elongated images and apply instrumental delay corrections

                motionRate = double.NaN;

                var rv = new ProcessingReturnValues();

                int numFramesUser = 0;

                rv.EarliestFrame = int.MaxValue;
                rv.LatestFrame   = int.MinValue;

                var intervalValues  = new Dictionary <int, Tuple <List <double>, List <double> > >();
                var intervalMedians = new Dictionary <double, double>();
                var intervalWeights = new Dictionary <double, double>();

                LinearRegression regression = null;
                if (measurements.Values.Count > 1)
                {
                    rv.EarliestFrame = measurements.Values.Select(m => m.FrameNo).Min();
                    rv.LatestFrame   = measurements.Values.Select(m => m.FrameNo).Max();

                    var minUncertainty = meaContext.MinPositionUncertaintyPixels * meaContext.ArsSecsInPixel;

                    foreach (SingleMultiFrameMeasurement measurement in measurements.Values)
                    {
                        int integrationInterval = (measurement.FrameNo - fittingContext.FirstFrameIdInIntegrationPeroid) / fittingContext.IntegratedFramesCount;

                        Tuple <List <double>, List <double> > intPoints;
                        if (!intervalValues.TryGetValue(integrationInterval, out intPoints))
                        {
                            intPoints = Tuple.Create(new List <double>(), new List <double>());
                            intervalValues.Add(integrationInterval, intPoints);
                        }

                        if (fittingValue == FittingValue.RA)
                        {
                            intPoints.Item1.Add(measurement.RADeg);
                            intPoints.Item2.Add(ComputePositionWeight(measurement.SolutionUncertaintyRACosDEArcSec, measurement, minUncertainty, fittingContext.Weighting));
                        }
                        else
                        {
                            intPoints.Item1.Add(measurement.DEDeg);
                            intPoints.Item2.Add(ComputePositionWeight(measurement.SolutionUncertaintyDEArcSec, measurement, minUncertainty, fittingContext.Weighting));
                        }
                    }

                    if (intervalValues.Count > 2)
                    {
                        regression = new LinearRegression();

                        foreach (int integratedFrameNo in intervalValues.Keys)
                        {
                            Tuple <List <double>, List <double> > data = intervalValues[integratedFrameNo];

                            double median;
                            double medianWeight;

                            WeightedMedian(data, out median, out medianWeight);

                            // Assign the data point to the middle of the integration interval (using frame numbers)
                            //
                            // |--|--|--|--|--|--|--|--|
                            // |           |           |
                            //
                            // Because the time associated with the first frame is the middle of the frame, but the
                            // time associated with the middle of the interval is the end of the field then the correction
                            // is (N / 2) - 0.5 frames when integration is used or no correction when integration of x1 is used.

                            double dataPointFrameNo =
                                rv.EarliestFrame +
                                fittingContext.IntegratedFramesCount * integratedFrameNo
                                + (fittingContext.IntegratedFramesCount / 2)
                                - (fittingContext.IntegratedFramesCount > 1 ? 0.5 : 0);

                            intervalMedians.Add(dataPointFrameNo, median);
                            intervalWeights.Add(dataPointFrameNo, medianWeight);
                            if (fittingContext.Weighting != WeightingMode.None)
                            {
                                regression.AddDataPoint(dataPointFrameNo, median, medianWeight);
                            }
                            else
                            {
                                regression.AddDataPoint(dataPointFrameNo, median);
                            }
                        }

                        regression.Solve();

                        var    firstPos       = measurements[rv.EarliestFrame];
                        var    lastPos        = measurements[rv.LatestFrame];
                        double distanceArcSec = AngleUtility.Elongation(firstPos.RADeg, firstPos.DEDeg, lastPos.RADeg, lastPos.DEDeg) * 3600;
                        var    firstTime      = GetTimeForFrame(fittingContext, rv.EarliestFrame, meaContext.FirstVideoFrame, getFrameStateDataCallback, firstPos.FrameTimeStamp);
                        var    lastTime       = GetTimeForFrame(fittingContext, rv.LatestFrame, meaContext.FirstVideoFrame, getFrameStateDataCallback, lastPos.FrameTimeStamp);
                        double elapsedSec     = new TimeSpan(lastTime.UT.Ticks - firstTime.UT.Ticks).TotalSeconds;
                        motionRate = distanceArcSec / elapsedSec;
                    }
                }

                FrameTime resolvedTime = null;
                if (int.MinValue != meaContext.UserMidFrame)
                {
                    // Find the closest video 'normal' MPC time and compute the frame number for it
                    // Now compute the RA/DE for the computed 'normal' frame
                    resolvedTime = GetTimeForFrame(fittingContext, meaContext.UserMidFrame, meaContext.FirstVideoFrame, getFrameStateDataCallback, measurements[meaContext.UserMidFrame].FrameTimeStamp);

                    #region Plotting Code
                    if (g != null)
                    {
                        float xPosBeg = (float)(resolvedTime.ClosestNormalIntervalFirstFrameNo - rv.EarliestFrame) * xScale + 5;
                        float xPosEnd = (float)(resolvedTime.ClosestNormalIntervalLastFrameNo - rv.EarliestFrame) * xScale + 5;

                        g.FillRectangle(s_NormalTimeIntervalHighlightBrush, xPosBeg, 1, (xPosEnd - xPosBeg), imageHight - 2);
                    }
                    #endregion
                }

                Dictionary <double, double> secondPassData = new Dictionary <double, double>();

                int minFrameId = measurements.Keys.Min();

                #region Plotting Code
                if (g != null)
                {
                    foreach (SingleMultiFrameMeasurement measurement in measurements.Values)
                    {
                        float x = (measurement.FrameNo - minFrameId) * xScale + 5;

                        ProcessingValues val = new ProcessingValues()
                        {
                            Value  = fittingValue == FittingValue.RA ? measurement.RADeg : measurement.DEDeg,
                            StdDev = fittingValue == FittingValue.RA ? measurement.StdDevRAArcSec / 3600.0 : measurement.StdDevDEArcSec / 3600.0
                        };

                        double valueFrom = val.Value - val.StdDev;
                        double valueTo   = val.Value + val.StdDev;

                        float yFrom = (float)(valueFrom - plottingContext.MinValue) * yScale + 5;
                        float yTo   = (float)(valueTo - plottingContext.MinValue) * yScale + 5;

                        g.DrawLine(plottingContext.IncludedPen, x, yFrom, x, yTo);
                        g.DrawLine(plottingContext.IncludedPen, x - 1, yFrom, x + 1, yFrom);
                        g.DrawLine(plottingContext.IncludedPen, x - 1, yTo, x + 1, yTo);
                    }
                }
                #endregion

                foreach (double integrFrameNo in intervalMedians.Keys)
                {
                    double val = intervalMedians[integrFrameNo];

                    double fittedValAtFrame = regression != null
                        ? regression.ComputeY(integrFrameNo)
                        : double.NaN;

                    bool included = Math.Abs(fittedValAtFrame - val) < 3 * regression.StdDev;

                    #region Plotting Code
                    if (g != null)
                    {
                        if (fittingContext.IntegratedFramesCount > 1)
                        {
                            Pen mPen = included ? plottingContext.IncludedPen : plottingContext.ExcludedPen;

                            float x = (float)(integrFrameNo - minFrameId) * xScale + 5;
                            float y = (float)(val - plottingContext.MinValue) * yScale + 5;

                            g.DrawEllipse(mPen, x - 3, y - 3, 6, 6);
                            g.DrawLine(mPen, x - 5, y - 5, x + 5, y + 5);
                            g.DrawLine(mPen, x + 5, y - 5, x - 5, y + 5);
                        }
                    }
                    #endregion

                    if (included)
                    {
                        secondPassData.Add(integrFrameNo, val);
                    }
                }

                #region Second Pass
                regression = null;
                if (secondPassData.Count > 2)
                {
                    regression = new LinearRegression();
                    foreach (double frameNo in secondPassData.Keys)
                    {
                        if (fittingContext.Weighting != WeightingMode.None)
                        {
                            regression.AddDataPoint(frameNo, secondPassData[frameNo], intervalWeights[frameNo]);
                        }
                        else
                        {
                            regression.AddDataPoint(frameNo, secondPassData[frameNo]);
                        }
                    }
                    regression.Solve();
                }
                #endregion

                if (regression != null)
                {
                    #region Plotting Code
                    if (g != null)
                    {
                        double leftFittedVal  = regression.ComputeY(rv.EarliestFrame);
                        double rightFittedVal = regression.ComputeY(rv.LatestFrame);

                        double err = 3 * regression.StdDev;

                        float leftAve  = (float)(leftFittedVal - plottingContext.MinValue) * yScale + 5;
                        float rightAve = (float)(rightFittedVal - plottingContext.MinValue) * yScale + 5;
                        float leftX    = 5 + (float)(rv.EarliestFrame - rv.EarliestFrame) * xScale;
                        float rightX   = 5 + (float)(rv.LatestFrame - rv.EarliestFrame) * xScale;

                        g.DrawLine(plottingContext.AveragePen, leftX, leftAve - 1, rightX, rightAve - 1);
                        g.DrawLine(plottingContext.AveragePen, leftX, leftAve, rightX, rightAve);
                        g.DrawLine(plottingContext.AveragePen, leftX, leftAve + 1, rightX, rightAve + 1);

                        float leftMin  = (float)(leftFittedVal - err - plottingContext.MinValue) * yScale + 5;
                        float leftMax  = (float)(leftFittedVal + err - plottingContext.MinValue) * yScale + 5;
                        float rightMin = (float)(rightFittedVal - err - plottingContext.MinValue) * yScale + 5;
                        float rightMax = (float)(rightFittedVal + err - plottingContext.MinValue) * yScale + 5;

                        g.DrawLine(plottingContext.AveragePen, leftX, leftMin, rightX, rightMin);
                        g.DrawLine(plottingContext.AveragePen, leftX, leftMax, rightX, rightMax);
                    }
                    #endregion

                    if (int.MinValue != meaContext.UserMidFrame &&
                        resolvedTime != null)
                    {
                        // Find the closest video 'normal' MPC time and compute the frame number for it
                        // Now compute the RA/DE for the computed 'normal' frame

                        double fittedValueUncertainty;
                        double fittedValueAtMiddleFrame = regression.ComputeYWithError(resolvedTime.ClosestNormalFrameNo, out fittedValueUncertainty);

                        Trace.WriteLine(string.Format("{0}; Included: {1}; Normal Frame No: {2}; Fitted Val: {3} +/- {4:0.00}",
                                                      meaContext.UserMidValue.ToString("0.00000"),
                                                      numFramesUser, resolvedTime.ClosestNormalFrameNo,
                                                      AstroConvert.ToStringValue(fittedValueAtMiddleFrame, "+HH MM SS.T"),
                                                      regression.StdDev * 60 * 60));

                        // Report the interpolated position at the middle of the measured interval
                        // Don't forget to add the video normal position flag in the OBS file
                        // Expect elongated images and apply instrumental delay corrections

                        rv.FittedValue                  = fittedValueAtMiddleFrame;
                        rv.FittedValueTime              = resolvedTime.ClosestNormalFrameTime;
                        rv.IsVideoNormalPosition        = true;
                        rv.FittedNormalFrame            = resolvedTime.ClosestNormalFrameNo;
                        rv.FittedValueUncertaintyArcSec = fittedValueUncertainty * 60 * 60;

                        #region Plotting Code
                        if (g != null)
                        {
                            // Plot the frame
                            float xPos = (float)(resolvedTime.ClosestNormalFrameNo - rv.EarliestFrame) * xScale + 5;
                            float yPos = (float)(rv.FittedValue - plottingContext.MinValue) * yScale + 5;
                            g.DrawLine(Pens.Yellow, xPos, 1, xPos, imageHight - 2);
                            g.FillEllipse(Brushes.Yellow, xPos - 3, yPos - 3, 6, 6);
                        }
                        #endregion
                    }
                    else
                    {
                        rv.FittedValue = double.NaN;
                    }
                }
                else
                {
                    rv.FittedValue = double.NaN;
                }

                return(rv);
            }
            catch (Exception ex)
            {
                Trace.WriteLine(ex.GetFullStackTrace());
                motionRate = 0;
                return(null);
            }
        }
Пример #6
0
        private FrameTime GetTimeForFrame(FittingContext context, int frameNumber, int firstVideoFrame, GetFrameStateDataCallback getFrameStateData, DateTime?ocrTime)
        {
            var rv = new FrameTime();

            rv.RequestedFrameNo = frameNumber;

            double instrumentalDelayFrames  = 0;
            double instrumentalDelaySeconds = 0;

            if (context.InstrumentalDelayUnits == InstrumentalDelayUnits.Frames)
            {
                instrumentalDelayFrames = context.InstrumentalDelay;
            }
            else if (context.InstrumentalDelayUnits == InstrumentalDelayUnits.Seconds)
            {
                instrumentalDelaySeconds = context.InstrumentalDelay;
            }

            int    precision = 1000000;
            double sigma     = 0.000005;

            if (context.FrameTimeType == FrameTimeType.NonIntegratedFrameTime)
            {
                if (context.VideoFileFormat == VideoFileFormat.AVI)
                {
                    // No integration is used, directly derive the time and apply instrumental delay
                    rv.ResolvedFrameNo = frameNumber - firstVideoFrame;
                    rv.UT =
                        context.FirstFrameUtcTime.AddSeconds(
                            ((frameNumber - context.FirstFrameIdInIntegrationPeroid - instrumentalDelayFrames) / context.FrameRate) - instrumentalDelaySeconds);
                }
                else if (context.VideoFileFormat == VideoFileFormat.AAV2 && ocrTime.HasValue)
                {
                    rv.ResolvedFrameNo = frameNumber - firstVideoFrame;
                    var dateTime = context.FirstFrameUtcTime.Date.Add(ocrTime.Value.TimeOfDay);
                    rv.UT = dateTime.AddSeconds(-instrumentalDelayFrames / context.FrameRate - instrumentalDelaySeconds);
                }
                else
                {
                    rv.ResolvedFrameNo = frameNumber;

                    var frameState = getFrameStateData(frameNumber);
                    rv.UT = frameState.CentralExposureTime;

                    // Convert Central time to the timestamp of the end of the first field
                    rv.UT = rv.UT.AddMilliseconds(-0.5 * frameState.ExposureInMilliseconds);

                    if (context.NativeVideoFormat == "PAL")
                    {
                        rv.UT = rv.UT.AddMilliseconds(20);
                    }
                    else if (context.NativeVideoFormat == "NTSC")
                    {
                        rv.UT = rv.UT.AddMilliseconds(16.68);
                    }

                    if (context.AavStackedMode)
                    {
                        throw new NotSupportedException("AAV Stacked Mode is not supported!");
                    }
                    else
                    {
                        // Then apply instrumental delay
                        rv.UT = rv.UT.AddSeconds(-1 * instrumentalDelaySeconds);
                    }
                }
            }
            else
            {
                // Integration was used. Find the integrated frame no
                int integratedFrameNo = frameNumber - firstVideoFrame;

                double deltaMiddleFrame = 0;
                if (context.IntegratedFramesCount > 1)
                {
                    deltaMiddleFrame = (context.IntegratedFramesCount / 2) - 0.5
                                       /* This 0.5 frame correction is only used for the 'visual' mapping between where the user clicked and what is the most 'logical' normal frame to where they clicked */;
                }

                // The instrumental delay is always from the timestamp of the first frame of the integrated interval

                rv.ResolvedFrameNo = integratedFrameNo;

                rv.UT =
                    context.FirstFrameUtcTime.AddSeconds(
                        ((integratedFrameNo - deltaMiddleFrame - context.FirstFrameIdInIntegrationPeroid - instrumentalDelayFrames) / context.FrameRate) - instrumentalDelaySeconds);
            }

            if (context.MovementExpectation == MovementExpectation.Slow)
            {
                precision = 100000;
                sigma     = 0.000005;
            }
            else
            {
                precision = 1000000;
                sigma     = 0.0000005;
            }

            double utTime = (rv.UT.Hour + rv.UT.Minute / 60.0 + (rv.UT.Second + (rv.UT.Millisecond / 1000.0)) / 3600.0) / 24;


            double roundedTime = Math.Truncate(Math.Round(utTime * precision)) / precision;

            rv.ClosestNormalFrameTime = new DateTime(rv.UT.Year, rv.UT.Month, rv.UT.Day).AddDays(roundedTime);

            TimeSpan delta = new TimeSpan(rv.ClosestNormalFrameTime.Ticks - rv.UT.Ticks);

            rv.ClosestNormalFrameNo = rv.ResolvedFrameNo + (delta.TotalSeconds * context.FrameRate);

            double framesEachSide = sigma * 24 * 3600 * context.FrameRate;

            rv.ClosestNormalIntervalFirstFrameNo = (int)Math.Floor(rv.ClosestNormalFrameNo - framesEachSide);
            rv.ClosestNormalIntervalLastFrameNo  = (int)Math.Ceiling(rv.ClosestNormalFrameNo + framesEachSide);

            //Trace.WriteLine(string.Format("{0} / {1}; {2} / {3}; {4} / {5}",
            //    roundedTime.ToString("0.00000"), utTime,
            //    rv.ClosestNormalFrameNo, rv.ResolvedFrameNo,
            //    rv.UT.ToString("HH:mm:ss.fff"), rv.ClosestNormalFrameTime.ToString("HH:mm:ss.fff")));

            return(rv);
        }
Пример #7
0
        public void TestInstrumentalDelayInAVI(int integratedFrames, int measureFramesPerInterval, double frameRate, double instrumentalDelaySec, bool missFirstFrame, int? clickedFrameIndex)
        {
            // Simulated 10 integrated frames with integration starting at frame 100
            // Simulated motion in RA with rate 2.34567s per minute
            // Simulated motion in DEC with rate 34.5678" per minute

            double frameDurationSec = 1.0 / frameRate;
            const double raArcSecRatePerMinute = 2.34567;
            double raArcSecRatePerIntegrationPeriod = integratedFrames * frameDurationSec * raArcSecRatePerMinute / 60.0;
            const double startingRaDeg = 123.0;
            const int firstFrameId = 100;
            const int numIntegrationIntervalsMeasured = 10;
            DateTime firstIntegratedFrameRealMidExposureUT = DateTime.ParseExact("2016 09 08 13:45:04.718", "yyyy MM dd HH:mm:ss.fff", CultureInfo.InvariantCulture);

            const double decArcSecRatePerMinute = 34.5678;
            double decArcSecRatePerIntegrationPeriod = integratedFrames * frameDurationSec * decArcSecRatePerMinute / 60.0;
            const double startingDecDeg = 42.0;

            DateTime midExpWithDelay = firstIntegratedFrameRealMidExposureUT.AddSeconds(instrumentalDelaySec);
            DateTime endFirstFieldTimeStamp = midExpWithDelay;

            var measurements = new Dictionary<int, SingleMultiFrameMeasurement>();

            var fittingContext = new FittingContext()
            {
                AavStackedMode = false,
                FirstFrameIdInIntegrationPeroid = firstFrameId,
                FirstFrameUtcTime = endFirstFieldTimeStamp,
                FrameRate = frameRate,
                FrameTimeType = FrameTimeType.TimeStampOfFirstIntegratedFrame,
                InstrumentalDelay = instrumentalDelaySec,
                InstrumentalDelayUnits = InstrumentalDelayUnits.Seconds,
                IntegratedFramesCount = integratedFrames,
                MovementExpectation = MovementExpectation.SlowFlyby,
                NativeVideoFormat = null,
                ObjectExposureQuality = ObjectExposureQuality.GoodSignal
            };

            var meaContext = new FlybyMeasurementContext()
            {
                FirstVideoFrame = 0, /* First frame is in the whole video file */
                MaxStdDev = 0,
                UserMidValue = 0 /* This is used for plotting only */
            };

            for (int i = 0; i < numIntegrationIntervalsMeasured; i++)
            {
                double de = startingDecDeg + (decArcSecRatePerIntegrationPeriod * i) / 3600;
                double ra = startingRaDeg + (raArcSecRatePerIntegrationPeriod * i) / 3600;

                for (int f = 0; f < measureFramesPerInterval; f++)
                {
                    var mea = new SingleMultiFrameMeasurement()
                    {
                        RADeg = ra,
                        DEDeg = de,
                        FrameNo = firstFrameId + i * integratedFrames + f,
                        Mag = 14
                    };

                    measurements.Add(mea.FrameNo, mea);
                }
            }

            if (missFirstFrame)
                // Simulate a prevoous buggy condition: Missed first frame
                measurements.Remove(firstFrameId);

            meaContext.MinFrameNo = measurements.Keys.Min();
            meaContext.MaxFrameNo = measurements.Keys.Max();
            if (clickedFrameIndex == null)
                // When not specified explicitely compute a normal position close to the last measured frame
                meaContext.UserMidFrame = meaContext.MaxFrameNo;
            else
                meaContext.UserMidFrame = meaContext.MinFrameNo + clickedFrameIndex.Value;

            var fitter = new FlyByMotionFitter();
            double motionRate;

            var raFit = fitter.FitAndPlotSlowFlyby(
                measurements, meaContext, fittingContext, FittingValue.RA,
                null, null, null, 0, 0, 0, 0, out motionRate);

            var deFit = fitter.FitAndPlotSlowFlyby(
                measurements, meaContext, fittingContext, FittingValue.DEC,
                null, null, null, 0, 0, 0, 0, out motionRate);

            Assert.IsNotNull(raFit);
            Assert.IsNotNull(deFit);

            Assert.IsTrue(raFit.IsVideoNormalPosition);
            Assert.IsTrue(deFit.IsVideoNormalPosition);

            #region Compute Expected Normal Position and Time of Normal Position

            int userFrameIntegrationInterval = (meaContext.UserMidFrame - firstFrameId) / integratedFrames;
            double userFrameIntegrationIntervalMidFrame = firstFrameId + userFrameIntegrationInterval * integratedFrames + integratedFrames / 2 - (frameDurationSec / 2 /* Correction for finding the middle even frames block */);
            DateTime userFrameIntegrationIntervalMidExposureRealTimeStamp = firstIntegratedFrameRealMidExposureUT.AddSeconds(userFrameIntegrationInterval * integratedFrames * frameDurationSec);

            double userFrameOffsetFromMidExposureSec = (meaContext.UserMidFrame - userFrameIntegrationIntervalMidFrame) * frameDurationSec;
            DateTime userFrameRealProjectedTimeStamp = userFrameIntegrationIntervalMidExposureRealTimeStamp.AddSeconds(userFrameOffsetFromMidExposureSec);

            double fractionalDaysProjectedTimeStamp = GetFractionalDays(userFrameRealProjectedTimeStamp);

            double fractionalDaysClosestNormalTimeStamp = Math.Round(fractionalDaysProjectedTimeStamp * 1E6) / 1E6;
            DateTime closestNormalTimeStamp =
                new DateTime(userFrameRealProjectedTimeStamp.Year, userFrameRealProjectedTimeStamp.Month,
                    userFrameRealProjectedTimeStamp.Day).AddDays(fractionalDaysClosestNormalTimeStamp);
            #endregion

            TimeSpan diffExpectedMinusFitted = closestNormalTimeStamp - raFit.FittedValueTime;
            if (diffExpectedMinusFitted.TotalMilliseconds > 0.5)
            {
                diffExpectedMinusFitted = closestNormalTimeStamp.AddDays(0.000001) - raFit.FittedValueTime;
                if (diffExpectedMinusFitted.TotalMilliseconds > 0.5)
                {
                    diffExpectedMinusFitted = closestNormalTimeStamp.AddDays(-0.000001) - raFit.FittedValueTime;
                    if (diffExpectedMinusFitted.TotalMilliseconds > 0.5)
                    {
                        Assert.Fail("Normal Time Difference. Expected: {0:0.0000000}, Actual: {1:0.0000000}",
                                GetFractionalDays(closestNormalTimeStamp),
                                GetFractionalDays(raFit.FittedValueTime));
                    }
                }
            }

            // Make sure we calculate the position for the selected normal frame by the fitter, which may be 1 microday away from ours
            closestNormalTimeStamp = raFit.FittedValueTime;

            TimeSpan diffNormalMinusStartTime = closestNormalTimeStamp - firstIntegratedFrameRealMidExposureUT;
            double raAtNormalTimeArcSec = startingRaDeg * 3600 + raArcSecRatePerMinute * diffNormalMinusStartTime.TotalMinutes;
            double deAtNormalTimeArcSec = startingDecDeg * 3600 + decArcSecRatePerMinute * diffNormalMinusStartTime.TotalMinutes;

            double fittedRaArcSec = raFit.FittedValue * 3600;
            double fittedDecArcSec = deFit.FittedValue * 3600;

            Trace.WriteLine(string.Format("RA-Diff={0}\", DEC-Diff-{1}\"", Math.Abs(raAtNormalTimeArcSec - fittedRaArcSec), Math.Abs(deAtNormalTimeArcSec - fittedDecArcSec)));

            Assert.AreEqual(raAtNormalTimeArcSec, fittedRaArcSec, 0.1 /* 0.1 arcsec */);
            Assert.AreEqual(deAtNormalTimeArcSec, fittedDecArcSec, 0.1 /* 0.1 arcsec */);
        }