コード例 #1
0
ファイル: VideoFrame.cs プロジェクト: hpavlov/occurec
 internal static VideoFrame CreateFrameVariant(int width, int height, VideoCameraFrame cameraFrame)
 {
     return InternalCreateFrame(width, height, cameraFrame, true);
 }
コード例 #2
0
ファイル: VideoFrame.cs プロジェクト: hpavlov/occurec
        private static VideoFrame InternalCreateFrame(int width, int height, VideoCameraFrame cameraFrame, bool variant)
        {
            var rv = new VideoFrame();

            if (cameraFrame.ImageLayout == VideoFrameLayout.Monochrome)
            {
                if (variant)
                {
                    rv.pixelsVariant = new object[height, width];
                    rv.pixels = null;
                }
                else
                {
                    rv.pixels = new int[height, width];
                    rv.pixelsVariant = null;
                }

                if (variant)
                    Array.Copy((int[,])cameraFrame.Pixels, (object[,])rv.pixelsVariant, ((int[,])cameraFrame.Pixels).Length);
                else
                    rv.pixels = (int[,])cameraFrame.Pixels;
            }
            else if (cameraFrame.ImageLayout == VideoFrameLayout.Color)
            {
                if (variant)
                {
                    rv.pixelsVariant = new object[height, width, 3];
                    rv.pixels = null;
                }
                else
                {
                    rv.pixels = new int[height, width, 3];
                    rv.pixelsVariant = null;
                }

                if (variant)
                    Array.Copy((int[, ,])cameraFrame.Pixels, (object[, ,])rv.pixelsVariant, ((int[, ,])cameraFrame.Pixels).Length);
                else
                    rv.pixels = (int[, ,])cameraFrame.Pixels;
            }
            else if (cameraFrame.ImageLayout == VideoFrameLayout.BayerRGGB)
            {
                throw new NotSupportedException();
            }
            else
                throw new NotSupportedException();

            rv.previewBitmap = cameraFrame.PreviewBitmap;

            rv.frameNumber = cameraFrame.FrameNumber;
            rv.exposureStartTime = new DateTime(cameraFrame.ImageStatus.StartExposureSystemTime).ToString("HH:mm:ss.fff");
            rv.exposureDuration = null;
            rv.imageInfo = string.Format("INT:{0};SFID:{1};EFID:{2};CTOF:{3};UFID:{4};IFID:{5};DRPD:{6}",
                cameraFrame.ImageStatus.DetectedIntegrationRate,
                cameraFrame.ImageStatus.StartExposureFrameNo,
                cameraFrame.ImageStatus.EndExposureFrameNo,
                cameraFrame.ImageStatus.CutOffRatio,
                cameraFrame.ImageStatus.UniqueFrameNo,
                cameraFrame.ImageStatus.IntegratedFrameNo,
                cameraFrame.ImageStatus.DropedFramesSinceIntegrationLock);

            rv.MaxSignalValue = (uint)(cameraFrame.ImageStatus.DetectedIntegrationRate*255);

            if (cameraFrame.ImageStatus.PerformedAction > 0)
            {
                rv.imageInfo += string.Format(";ACT:{0};ACT%:{1}", cameraFrame.ImageStatus.PerformedAction, cameraFrame.ImageStatus.PerformedActionProgress);
            }

            if (cameraFrame.ImageStatus.OcrWorking > 0)
            {
                rv.imageInfo += string.Format(";ORER:{0}", cameraFrame.ImageStatus.OcrErrorsSinceLastReset);
            }

            if (cameraFrame.ImageStatus.UserIntegratonRateHint > 0)
            {
                rv.imageInfo += string.Format(";USRI:{0}", cameraFrame.ImageStatus.UserIntegratonRateHint);
            }

            if (TrackingContext.Current.IsTracking)
            {
                TrackingContext.Current.UpdateFromFrameStatus(cameraFrame.FrameNumber, cameraFrame.ImageStatus);
            }
            return rv;
        }
コード例 #3
0
ファイル: VideoCapture.cs プロジェクト: hpavlov/occurec
        public bool GetCurrentFrame(out VideoCameraFrame cameraFrame)
        {
            ImageStatus status;
            Bitmap bmp = dsCapture.GetNextFrame(out status);

            if (bmp != null)
            {
                object pixels = cameraImageHelper.GetImageArray(bmp, SimulatedSensorType, LumaConversionMode.R, Settings.Default.HorizontalFlip, Settings.Default.VerticalFlip);
                //object pixels = ImageUtils.GetPixelArray(bmp.Width, bmp.Height, bmp);
                cameraFrame = new VideoCameraFrame()
                {
                    FrameNumber = status.IntegratedFrameNo,
                    UniqueFrameNumber = status.UniqueFrameNo,
                    ImageStatus = status,
                    Pixels = pixels,
                    PreviewBitmap = bmp,
                    ImageLayout = VideoFrameLayout.Monochrome
                };

                return true;
            }

            cameraFrame = null;
            return false;
        }