Пример #1
0
 public override void Show(object value)
 {
     if (value != null)
     {
         if (value is TailAngleData <double> )
         {
             TailAngleData <double> newValue = (TailAngleData <double>)value;
             if (newValue.Image != null)
             {
                 tailPoints = newValue.Points;
                 imageSize  = newValue.Image.Size;
                 base.Show(newValue.Image);
             }
         }
         else if (value is TailAngleData <double[]> )
         {
             TailAngleData <double[]> newValue = (TailAngleData <double[]>)value;
             if (newValue.Image != null)
             {
                 tailPoints = newValue.Points;
                 imageSize  = newValue.Image.Size;
                 base.Show(newValue.Image);
             }
         }
     }
     else
     {
         if (tailPoints.Length != 0)
         {
             tailPoints = new Point2f[0];
         }
     }
 }
Пример #2
0
 void RenderRegion(Point[] region, PrimitiveType mode, Color color, Size imageSize)
 {
     GL.Color4(color);
     GL.Begin(mode);
     for (int i = 0; i < region.Length; i++)
     {
         GL.Vertex2(DrawingHelper.NormalizePoint(region[i], imageSize));
     }
     GL.End();
 }
Пример #3
0
 public static Vector2 NormalizePoint(Point2f point, OpenCV.Net.Size imageSize)
 {
     return(new Vector2((point.X * 2f / imageSize.Width) - 1, -((point.Y * 2f / imageSize.Height) - 1)));
 }
Пример #4
0
 public static Vector2 NormalizePointForTailAngle(Point2f point, double angle, OpenCV.Net.Size imageSize)
 {
     return(new Vector2(((point.X + (float)Math.Cos(angle) * 10f) * 2f / imageSize.Width) - 1, -(((point.Y + (float)Math.Sin(angle) * 10f) * 2f / imageSize.Height) - 1)));
 }
Пример #5
0
        public uEyeCapture()
        {
            source = Observable.Create <uEyeDataFrame>(observer =>
            {
                var deviceId = DeviceId;
                var camera   = new Camera();
                try
                {
                    var statusRet = deviceId.HasValue ? camera.Init(deviceId.Value | (int)DeviceEnumeration.UseDeviceID) : camera.Init();
                    HandleResult(statusRet);

                    if (!string.IsNullOrEmpty(ConfigFile))
                    {
                        statusRet = camera.Parameter.Load(ConfigFile);
                        HandleResult(statusRet);
                    }

                    statusRet = camera.Memory.Allocate();
                    HandleResult(statusRet);

                    Int32 s32MemID;
                    statusRet = camera.Memory.GetActive(out s32MemID);
                    HandleResult(statusRet);

                    int frameWidth;
                    statusRet = camera.Memory.GetWidth(s32MemID, out frameWidth);
                    HandleResult(statusRet);

                    int frameHeight;
                    statusRet = camera.Memory.GetHeight(s32MemID, out frameHeight);
                    HandleResult(statusRet);

                    int s32Bpp;
                    statusRet = camera.Memory.GetBitsPerPixel(s32MemID, out s32Bpp);
                    HandleResult(statusRet);

                    ColorMode colorMode;
                    statusRet = camera.PixelFormat.Get(out colorMode);
                    HandleResult(statusRet);

                    var frameSize = new OpenCV.Net.Size(frameWidth, frameHeight);
                    var depth     = GetImageDepth(colorMode);
                    var channels  = s32Bpp / (int)depth;
                    statusRet     = camera.Memory.Allocate();
                    HandleResult(statusRet);

                    camera.EventFrame += (sender, e) =>
                    {
                        Int32 activeMemID;
                        camera.Memory.GetActive(out activeMemID);

                        IntPtr imageBuffer;
                        camera.Memory.ToIntPtr(activeMemID, out imageBuffer);

                        ImageInfo imageInfo;
                        camera.Information.GetImageInfo(activeMemID, out imageInfo);

                        using (var output = new IplImage(frameSize, depth, channels, imageBuffer))
                        {
                            observer.OnNext(new uEyeDataFrame(output.Clone(), imageInfo));
                        }
                    };

                    statusRet = camera.Acquisition.Capture();
                    HandleResult(statusRet);
                }
                catch
                {
                    camera.Exit();
                    throw;
                }

                return(() =>
                {
                    camera.Acquisition.Stop();
                    camera.Exit();
                });
            })
                     .PublishReconnectable()
                     .RefCount();
        }