Exemplo n.º 1
0
        public bool GetCurrentFrame(out Bitmap cameraFrame, out int frameNumber, out FrameProcessingStatus status)
        {
            if (!IsRunning)
            {
                cameraFrame = null;
                frameNumber = -1;
                status = FrameProcessingStatus.Empty;

                return false;
            }

            var sts = new FrameProcessingStatus();
            cameraFrame = null;

            frameNumber = m_FrameCounter;
            status = sts;

            if (m_FrameCounter > 0)
            {
                ImageStatus imgStatus;
                cameraFrame = NativeHelpers.GetCurrentImage(out imgStatus);
                status = new FrameProcessingStatus(imgStatus);
            }

            return true;
        }
Exemplo n.º 2
0
        public bool GetCurrentFrame(out Bitmap bmp, out int frameNumber, out FrameProcessingStatus status)
        {
            if (!IsRunning)
            {
                bmp = null;
                frameNumber = -1;
                status = FrameProcessingStatus.Empty;

                return false;
            }

            long frameNo = 0;
            var sts = new FrameProcessingStatus();

            NonBlockingLock.Lock(
                NonBlockingLock.LOCK_ID_GetNextFrame,
                () =>
                {
                    frameNo = aavStream.FirstFrame + (frameCounter % (aavStream.LastFrame - aavStream.FirstFrame));
                    sts = FrameProcessingStatus.Clone(frameStatus);
                });

            frameNumber = (int)frameNo;
            status = sts;

            if (fullAAVSimulation)
            {
                ImageStatus imgStatus;
                bmp = NativeHelpers.GetCurrentImage(out imgStatus);
                status = new FrameProcessingStatus(imgStatus);

            }
            else
            {
                bmp = aavStream.GetFrame(frameNumber);
            }

            return true;
        }
Exemplo n.º 3
0
        private void Run(object state)
        {
            int waitTimeMs = (int)(1000/frameRate);

            while (IsRunning)
            {
                long nextFrameCounterValue = frameCounter + 1;
                FrameProcessingStatus nextFrameStatus = FrameProcessingStatus.Empty;

                Thread.Sleep(waitTimeMs);

                if (Settings.Default.SimulatorRunOCR || fullAAVSimulation)
                {
                    long frameNo = aavStream.FirstFrame + (frameCounter % (aavStream.LastFrame - aavStream.FirstFrame));
                    using (Bitmap bmp = aavStream.GetFrame((int)frameNo))
                    {
                        int[,] pixels = ImageUtils.GetPixelArray(aavStream.Width, aavStream.Height, bmp, AdvImageSection.GetPixelMode.Raw8Bit);

                        if (fullAAVSimulation)
                        {
                            if (pixels != null)
                                nextFrameStatus = NativeHelpers.ProcessVideoFrame2(pixels);
                        }

                        if (Settings.Default.SimulatorRunOCR &&
                            !Settings.Default.OcrSimulatorNativeCode &&
                            ocrTester != null)
                        {
                            OsdFrameInfo frameInfo = ocrTester.ProcessFrame(pixels, frameNo);
                            if (callbacksObject != null && frameInfo != null)
                            {
                                callbacksObject.OnEvent(0, frameInfo.ToDisplayString());
                                if (!frameInfo.FrameInfoIsOk())
                                    callbacksObject.OnEvent(1, null);
                            }
                        }
                    }
                }

                NonBlockingLock.Lock(
                    NonBlockingLock.LOCK_ID_BufferCB,
                    () =>
                    {
                        frameCounter = nextFrameCounterValue;
                        frameStatus = FrameProcessingStatus.Clone(nextFrameStatus);
                    });
            }
        }
Exemplo n.º 4
0
        public bool GetCurrentFrame(out Bitmap cameraFrame, out int frameNumber, out FrameProcessingStatus status)
        {
            if (!IsRunning)
            {
                cameraFrame = null;
                frameNumber = -1;
                status = FrameProcessingStatus.Empty;

                return false;
            }

            long frameNo = 0;
            var sts = new FrameProcessingStatus();

            NonBlockingLock.Lock(
                NonBlockingLock.LOCK_ID_GetNextFrame,
                () =>
                {
                    frameNo = 0 + (frameCounter % m_FrameCount);
                    sts = FrameProcessingStatus.Clone(frameStatus);
                });

            frameNumber = (int)frameNo;
            status = sts;

            if (fullAAVSimulation)
            {
                ImageStatus imgStatus;
                cameraFrame = NativeHelpers.GetCurrentImage(out imgStatus);
                status = new FrameProcessingStatus(imgStatus);

            }
            else
            {
                double frameTime = ConvertFrameNumberToSeconds(frameNumber);
                cameraFrame = GetImageAtTime(frameTime);
            }

            return true;
        }
Exemplo n.º 5
0
        public void UpdateFromFrameStatus(long frameNo, FrameProcessingStatus status)
        {
            bool updatedMade = false;

            if (TrackedObjectId != -1 && status.TrkdTargetIsTracked > 0)
            {
                TargetStar.X = status.TrkdTargetXPos;
                TargetStar.Y = status.TrkdTargetYPos;
                TargetStar.FWHM = status.TrkdTargetPsfInfo.FWHM;
                TargetStar.IsLocated = status.TrkdTargetIsLocated > 0;
                TargetStar.HasSaturatedPixels = status.TrkdTargetHasSaturatedPixels > 0;
                TargetStar.Measurement = status.TrkdTargetMeasurement;
                TargetStar.PsfFit.LoadFromNativePsfFitInfo(status.TrkdTargetPsfInfo, status.TrkdTargetResiduals);

                updatedMade = true;
            }

            if (GuidingObjectId != -1 && status.TrkdGuidingIsTracked > 0)
            {
                GuidingStar.X = status.TrkdGuidingXPos;
                GuidingStar.Y = status.TrkdGuidingYPos;
                GuidingStar.FWHM = status.TrkdGuidingPsfInfo.FWHM;
                GuidingStar.IsLocated = status.TrkdGuidingIsLocated > 0;
                GuidingStar.HasSaturatedPixels = status.TrkdGuidingHasSaturatedPixels > 0;
                GuidingStar.Measurement = status.TrkdGuidingMeasurement;
                GuidingStar.PsfFit.LoadFromNativePsfFitInfo(status.TrkdGuidingPsfInfo, status.TrkdGuidingResiduals);

                updatedMade = true;
            }

            if (updatedMade)
                LastTrackedFrameNo = frameNo;
        }
Exemplo n.º 6
0
        private static BasicVideoFrame InternalCreateFrame(int width, int height, Bitmap cameraFrame, int fameNumber, bool variant, FrameProcessingStatus status)
        {
            var rv = new BasicVideoFrame();

            rv.pixels = ImageUtils.GetPixelArray(width, height, cameraFrame);
            rv.previewBitmap = cameraFrame;
            rv.pixelsVariant = null;

            // TODO: Set these from the unmanaged OCR data, when native OCR is running

            rv.frameNumber = fameNumber;

            if (status.Equals(FrameProcessingStatus.Empty))
            {
                rv.exposureStartTime = null;
                rv.exposureDuration = null;
                rv.imageInfo = null;
            }
            else
            {
                if (status.StartExposureSystemTime > 0)
                {
                    try
                    {
                        rv.exposureStartTime =
                            new DateTime(status.StartExposureSystemTime).ToString("yyyy/MM/dd HH:mm:ss ffff - ") +
                            new DateTime(status.EndExposureSystemTime).ToString("yyyy/MM/dd HH:mm:ss ffff | ") +
                            status.StartExposureFrameNo.ToString("0 - ") +
                            status.EndExposureFrameNo.ToString("0");
                    }
                    catch { }

                    try
                    {
                        rv.exposureDuration = new TimeSpan(status.EndExposureSystemTime - status.StartExposureSystemTime).TotalMilliseconds;
                    }
                    catch { }
                }
                else
                {
                    rv.exposureStartTime = null;
                    rv.exposureDuration = null;
                }

                rv.imageInfo = string.Format("INT:{0};SFID:{1};EFID:{2};CTOF:{3};UFID:{4};IFID:{5};DRPD:{6}",
                    status.DetectedIntegrationRate,
                    status.StartExposureFrameNo,
                    status.EndExposureFrameNo,
                    status.CurrentSignatureRatio,
                    status.CameraFrameNo,
                    status.IntegratedFrameNo,
                    status.DropedFramesSinceIntegrationLock);

                rv.MaxSignalValue = (uint)(status.DetectedIntegrationRate * 255);

                if (status.PerformedAction > 0)
                {
                    rv.imageInfo += string.Format(";ACT:{0};ACT%:{1}", status.PerformedAction, status.PerformedActionProgress);
                }

                if (status.OcrWorking > 0)
                {
                    rv.imageInfo += string.Format(";ORER:{0}", status.OcrErrorsSinceLastReset);
                }

                if (status.UserIntegratonRateHint > 0)
                {
                    rv.imageInfo += string.Format(";USRI:{0}", status.UserIntegratonRateHint);
                }

                if (TrackingContext.Current.IsTracking)
                {
                    TrackingContext.Current.UpdateFromFrameStatus(fameNumber, status);
                }
            }

            return rv;
        }
Exemplo n.º 7
0
 internal static BasicVideoFrame CreateFrameVariant(int width, int height, Bitmap cameraFrame, int fameNumber, FrameProcessingStatus status)
 {
     return InternalCreateFrame(width, height, cameraFrame, fameNumber, true, status);
 }
Exemplo n.º 8
0
        public static FrameProcessingStatus ProcessVideoFrame2(int[,] pixels)
        {
            double ntpBasedTimeError;
            long currentNtpTimeAsTicks = NTPTimeKeeper.UtcNow(out ntpBasedTimeError).AddMilliseconds(-1 * Settings.Default.NTPTimingHardwareCorrection).Ticks;
            long currentSecondaryTimeAsTicks = DateTime.UtcNow.Ticks;

            var frameInfo = new FrameProcessingStatus();
            frameInfo.TrkdTargetResiduals = new double[290];
            frameInfo.TrkdGuidingResiduals = new double[290];
            frameInfo.TrkdTargetPsfInfo = new NativePsfFitInfo();
            frameInfo.TrkdGuidingPsfInfo = new NativePsfFitInfo();

            long currentUtcDayAsTicks = DateTime.UtcNow.Date.Ticks;

            ProcessVideoFrame2(pixels, currentUtcDayAsTicks, currentNtpTimeAsTicks, ntpBasedTimeError, currentSecondaryTimeAsTicks, CurrentCameraGain, CurrentCameraGamma, CurrentTemperature, CurrentCameraExposure, ref frameInfo);

            return frameInfo;
        }
Exemplo n.º 9
0
        public static FrameProcessingStatus ProcessVideoFrame(IntPtr bitmapData)
        {
            // Get the NTP time from the internal NTP syncronised high precision clock
            double ntpBasedTimeError;
            long currentNtpTimeAsTicks = NTPTimeKeeper.UtcNow(out ntpBasedTimeError).AddMilliseconds(-1 * Settings.Default.NTPTimingHardwareCorrection).Ticks;
            long currentSecondaryTimeAsTicks = DateTime.UtcNow.Ticks;

            var frameInfo = new FrameProcessingStatus();
            frameInfo.TrkdTargetResiduals = new double[290];
            frameInfo.TrkdGuidingResiduals = new double[290];
            frameInfo.TrkdTargetPsfInfo = new NativePsfFitInfo();
            frameInfo.TrkdGuidingPsfInfo = new NativePsfFitInfo();

            long currentUtcDayAsTicks = DateTime.UtcNow.Date.Ticks;

            ProcessVideoFrame(bitmapData, currentUtcDayAsTicks, currentNtpTimeAsTicks, ntpBasedTimeError, currentSecondaryTimeAsTicks, CurrentCameraGain, CurrentCameraGamma, CurrentTemperature, CurrentCameraExposure, ref frameInfo);

            return frameInfo;
        }
Exemplo n.º 10
0
        public static FrameProcessingStatus Clone(FrameProcessingStatus cloneFrom)
        {
            var rv = new FrameProcessingStatus();
            rv.CameraFrameNo = cloneFrom.CameraFrameNo;
            rv.IntegratedFrameNo = cloneFrom.IntegratedFrameNo;
            rv.CountedFrames = cloneFrom.CountedFrames;
            rv.FrameDiffSignature = cloneFrom.FrameDiffSignature;
            rv.CurrentSignatureRatio = cloneFrom.CurrentSignatureRatio;
            rv.PerformedAction = cloneFrom.PerformedAction;
            rv.PerformedActionProgress = cloneFrom.PerformedActionProgress;
            rv.TrkdGuidingXPos = cloneFrom.TrkdGuidingXPos;
            rv.TrkdGuidingYPos = cloneFrom.TrkdGuidingYPos;
            rv.TrkdGuidingIsTracked = cloneFrom.TrkdGuidingIsTracked;
            rv.TrkdGuidingMeasurement = cloneFrom.TrkdGuidingMeasurement;
            rv.TrkdTargetXPos = cloneFrom.TrkdTargetXPos;
            rv.TrkdTargetYPos = cloneFrom.TrkdTargetYPos;
            rv.TrkdTargetIsTracked = cloneFrom.TrkdTargetIsTracked;
            rv.TrkdTargetMeasurement = cloneFrom.TrkdTargetMeasurement;
            rv.TrkdTargetIsLocated = cloneFrom.TrkdTargetIsLocated;
            rv.TrkdGuidingIsLocated = cloneFrom.TrkdGuidingIsLocated;
            rv.TrkdTargetHasSaturatedPixels = cloneFrom.TrkdTargetHasSaturatedPixels;
            rv.TrkdGuidingHasSaturatedPixels = cloneFrom.TrkdGuidingHasSaturatedPixels;
            rv.TrkdTargetResiduals = cloneFrom.TrkdTargetResiduals;
            rv.TrkdGuidingResiduals = cloneFrom.TrkdGuidingResiduals;

            return rv;
        }