public void GetSoftWareFrame()
        {
            using (new TimeTicker("AAA 显示消耗的时间"))
            {
                getonecamera.StreamGrabber.Start(1);
                while (getonecamera.StreamGrabber.IsGrabbing)
                {
                    using (new TimeTicker("AAA 等待时间"))
                    {
                        if (getonecamera.WaitForFrameTriggerReady(5, TimeoutHandling.ThrowException))
                        {
                            getonecamera.ExecuteSoftwareTrigger();
                        }
                    }

                    using (new TimeTicker("AAA 时间"))
                    {
                        // Wait for an image and then retrieve it. A timeout of 5000 ms is used.
                        IGrabResult grabResult = null;
                        using (new TimeTicker("AAA RetrieveResult"))
                        {
                            // grabResult = getonecamera.StreamGrabber.GrabOne(1000);
                            grabResult = getonecamera.StreamGrabber.RetrieveResult(5000, TimeoutHandling.ThrowException);
                        }

                        using (grabResult)
                        {
                            // Image grabbed successfully?
                            if (grabResult.GrabSucceeded)
                            {
                                Bitmap bitmap = new Bitmap(grabResult.Width, grabResult.Height, PixelFormat.Format32bppRgb);
                                // Lock the bits of the bitmap.
                                BitmapData bmpData = bitmap.LockBits(new Rectangle(0, 0, bitmap.Width, bitmap.Height), ImageLockMode.ReadWrite, bitmap.PixelFormat);
                                // Place the pointer to the buffer of the bitmap.
                                converter.OutputPixelFormat = PixelType.BGRA8packed;
                                IntPtr ptrBmp = bmpData.Scan0;
                                converter.Convert(ptrBmp, bmpData.Stride * bitmap.Height, grabResult); //Exception handling TODO
                                bitmap.UnlockBits(bmpData);
                                Bitmap temp = (Bitmap)bitmap.Clone();
                                CallFunction(null, temp);
                                if (bitmap != null)
                                {
                                    bitmap.Dispose();
                                }

                                GC.Collect();
                            }
                            else
                            {
                                Console.WriteLine("Error: {0} {1}", grabResult.ErrorCode, grabResult.ErrorDescription);
                            }
                        }
                    }
                }
            }
        }
Exemple #2
0
        public void GrabOne(out Bitmap bitmap, out IntPtr ptrBmp)
        {
            try
            {
                // GrabImage = null;
                if (camera.StreamGrabber.IsGrabbing)
                {
                    StopGrabbing();
                    delay(200);
                }

                IGrabResult grabResult = camera.StreamGrabber.GrabOne(3000);
                stopWatch.Restart();    // ****  重启采集时间计时器   ****
                if (grabResult.PixelTypeValue == PixelType.Mono8)
                {
                    if (latestFrameAddress == IntPtr.Zero)
                    {
                        latestFrameAddress = Marshal.AllocHGlobal((Int32)grabResult.PayloadSize);
                    }
                    converter.OutputPixelFormat = PixelType.Mono8;
                    converter.Convert(latestFrameAddress, grabResult.PayloadSize, grabResult);
                    // 转换为Halcon图像显示
                    //HOperatorSet.GenImage1(out GrabImage, "byte", (HTuple)grabResult.Width, (HTuple)grabResult.Height, (HTuple)latestFrameAddress);


                    //      BitmapData bmpData = m_bitmap.LockBits(new System.Drawing.Rectangle(0, 0, m_bitmap.Width, m_bitmap.Height), ImageLockMode.ReadOnly,
                    //System.Drawing.Imaging.PixelFormat.Format8bppIndexed);
                    //IntPtr intp = bmpData.Scan0;
                    //m_bitmap.UnlockBits(bmpData);


                    bitmap = new Bitmap(grabResult.Width, grabResult.Height, PixelFormat.Format8bppIndexed);
                    BitmapData bmpData = bitmap.LockBits(new Rectangle(0, 0, bitmap.Width, bitmap.Height), ImageLockMode.ReadWrite, bitmap.PixelFormat);
                    converter.OutputPixelFormat = PixelType.Mono8;
                    ptrBmp = bmpData.Scan0;
                    converter.Convert(ptrBmp, bmpData.Stride * bitmap.Height, grabResult); //Exception handling TODO
                    bitmap.UnlockBits(bmpData);
                }
                else
                {
                    bitmap = null; ptrBmp = new IntPtr();
                }
            }
            catch (Exception e)
            {
                bitmap = null; ptrBmp = new IntPtr();
                // ShowException(e);
            }
        }
Exemple #3
0
        public static byte[] ConvertToBytes(this PixelDataConverter converter, IGrabResult grabResult)
        {
            if (grabResult.Width == 0 || grabResult.Height == 0)
            {
                return(new byte[0]);
            }

            var bufferSize = grabResult.Width * grabResult.Height;

            switch (grabResult.PixelTypeValue)
            {
            case PixelType.Mono8:
                converter.OutputPixelFormat = PixelType.Mono8;
                break;

            case PixelType.Mono12:
            case PixelType.Mono12p:
            case PixelType.Mono12packed:
                converter.OutputPixelFormat = PixelType.Mono16;
                bufferSize *= 2;
                break;

            default:
                throw new NotSupportedException($"Pixel type {grabResult.PixelTypeValue} not supported");
            }

            var bytes = new byte[bufferSize];

            converter.Convert(bytes, grabResult);

            return(bytes);
        }
Exemple #4
0
        /// <summary>
        /// Basler相机回调函数 使用方法: Camera camera = new Camera(); camera.CameraOpened +=
        /// Configuration.AcquireContinuous; camera.StreamGrabber.ImageGrabbed +=
        /// HalconCameraConverter.OnImageGrabbed; HalconCameraConverter.ImageReceived +=
        /// YourImageProcessFunction; camera.Open(); camera.StreamGrabber.UserData = deviceName;
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="e"></param>
        public static void OnImageGrabbed(object sender, ImageGrabbedEventArgs e)
        {
            IGrabResult grabResult = e.GrabResult;

            if (!grabResult.GrabSucceeded)
            {
                Task.Run(() => { throw new InvalidOperationException($"Balser camera error {grabResult.ErrorCode}: {grabResult.ErrorDescription}"); });
                return;
            }

            HImage ho_Image;

            using (var bitmap = new Bitmap(grabResult.Width, grabResult.Height, PixelFormat.Format32bppRgb))
            {
                // Lock the bits of the bitmap.
                BitmapData bmpData = bitmap.LockBits(new Rectangle(0, 0, bitmap.Width, bitmap.Height), ImageLockMode.ReadWrite, bitmap.PixelFormat);
                // Place the pointer to the buffer of the bitmap.
                var converter = new PixelDataConverter {
                    OutputPixelFormat = PixelType.Mono8
                };
                IntPtr ptrBmp = bmpData.Scan0;
                converter.Convert(ptrBmp, bmpData.Stride * bitmap.Height, grabResult); //Exception handling TODO
                bitmap.UnlockBits(bmpData);
                ho_Image = new HImage("byte", grabResult.Width, grabResult.Height, ptrBmp);
            }

            OnImageReceived.BeginInvoke(sender, ho_Image, grabResult.StreamGrabberUserData.ToString(), EndingImageReceived, ho_Image);
        }
Exemple #5
0
 public void ConvertGrabResultToByteArray(IGrabResult grabResult, ref byte[] grabResultBufferRGB)
 {
     //grabResultBufferRGB = new byte[grabResultBufferRGB_size];
     converter.Convert <byte, byte>(grabResultBufferRGB, (byte[])grabResult.PixelData,
                                    sourcePixelType, grabResult.Width, grabResult.Height,
                                    grabResult.PaddingX, grabResult.Orientation);
     initialized = true;
 }
Exemple #6
0
        private void StreamGrabber_ImageGrabbed(object sender, ImageGrabbedEventArgs e)// 回调函数
        {
            IGrabResult grabResult = e.GrabResult;

            using (grabResult)
            {
                try
                {
                    if (grabResult.GrabSucceeded == true)
                    {
                        if (IsMonoFormat(grabResult) == true)//黑白图像
                        {
                            Bitmap _bitmap  = bitmap;
                            HImage _hImage  = hImage;
                            Bitmap __bitmap = null;
                            __bitmap = new Bitmap(grabResult.Width, grabResult.Height, PixelFormat.Format32bppRgb);
                            BitmapData bmpData = __bitmap.LockBits(new Rectangle(0, 0, __bitmap.Width, __bitmap.Height), ImageLockMode.ReadWrite, __bitmap.PixelFormat);
                            converter.OutputPixelFormat = PixelType.BGRA8packed;
                            IntPtr ptrBmp = bmpData.Scan0;
                            converter.Convert(ptrBmp, bmpData.Stride * __bitmap.Height, grabResult);
                            __bitmap.UnlockBits(bmpData);
                            bitmap = RotateImage(__bitmap, 180);
                            byte[] buffer   = grabResult.PixelData as byte[];
                            IntPtr p        = Marshal.UnsafeAddrOfPinnedArrayElement(buffer, 0);
                            HImage __hImage = new HImage("byte", grabResult.Width, grabResult.Height, p);
                            hImage = __hImage.RotateImage(180d, "constant");
                            // CameraData.InertData.bitmap = new Bitmap(bitmap);//如果这里不使用new Bitmap则会内存泄漏
                            // CameraData.InertData.hImage = hImage;
                            DoSomething(bitmap, hImage);
                            if (_bitmap != null)
                            {
                                _bitmap.Dispose();
                            }
                            if (_hImage != null)
                            {
                                _hImage.Dispose();
                            }
                        }
                        else
                        {
                            if (grabResult.PixelTypeValue != PixelType.RGB8packed)//不是RGB8格式,则转换为RGB8格式,然后生成彩色图像(如果是RGB8格式则不需要转换)
                            {
                                byte[] buffer_rgb = new byte[grabResult.Width * grabResult.Height * 3];
                                IntPtr p          = Marshal.UnsafeAddrOfPinnedArrayElement(buffer_rgb, 0);
                            }
                        }
                    }
                }
                catch (Exception ex)
                {
                    MessageBox.Show(ex.Message.ToString());
                }
                finally
                {
                    e.DisposeGrabResultIfClone();
                }
            }
        }
Exemple #7
0
        // Occurs when an image has been acquired and is ready to be processed.
        private void OnImageGrabbed(Object sender, ImageGrabbedEventArgs e)
        {
            if (!Dispatcher.CheckAccess())
            {
                // If called from a different thread, we must use the Invoke method to marshal the call to the proper GUI thread.
                // The grab result will be disposed after the event call. Clone the event arguments for marshaling to the GUI thread.
                Dispatcher.BeginInvoke(new EventHandler <ImageGrabbedEventArgs>(OnImageGrabbed), sender, e.Clone());
                return;
            }

            try
            {
                // Acquire the image from the camera. Only show the latest image. The camera may acquire images faster than the images can be displayed.

                // Get the grab result.
                IGrabResult grabResult = e.GrabResult;

                // Check if the image can be displayed.
                if (grabResult.IsValid)
                {
                    // Reduce the number of displayed images to a reasonable amount if the camera is acquiring images very fast.
                    if (!stopWatch.IsRunning || stopWatch.ElapsedMilliseconds > 33)
                    {
                        stopWatch.Restart();

                        Bitmap bitmap = new Bitmap(grabResult.Width, grabResult.Height, System.Drawing.Imaging.PixelFormat.Format32bppRgb);
                        // Lock the bits of the bitmap.
                        BitmapData bmpData = bitmap.LockBits(new System.Drawing.Rectangle(0, 0, bitmap.Width, bitmap.Height), ImageLockMode.ReadWrite, bitmap.PixelFormat);
                        // Place the pointer to the buffer of the bitmap.
                        converter.OutputPixelFormat = PixelType.BGRA8packed;
                        IntPtr ptrBmp = bmpData.Scan0;
                        converter.Convert(ptrBmp, bmpData.Stride * bitmap.Height, grabResult); //Exception handling TODO
                        bitmap.UnlockBits(bmpData);

                        // Assign a temporary variable to dispose the bitmap after assigning the new bitmap to the display control.
                        //Bitmap bitmapOld = MicroVideo.Image as Bitmap;
                        // Provide the display control with the new bitmap. This action automatically updates the display.
                        //MicroVideo.Image = bitmap;
                        MicroVideo.Source = ImageSourceForBitmap(bitmap);
                        //if (bitmapOld != null)
                        //{
                        //    // Dispose the bitmap.
                        //    bitmapOld.Dispose();
                        //}
                    }
                }
            }
            catch (Exception exception)
            {
                ShowException(exception);
            }
            finally
            {
                // Dispose the grab result if needed for returning it to the grab loop.
                e.DisposeGrabResultIfClone();
            }
        }
Exemple #8
0
        Bitmap GrabResult2Bmp(IGrabResult grabResult)
        {
            Bitmap     b       = new Bitmap(grabResult.Width, grabResult.Height, PixelFormat.Format32bppRgb);
            BitmapData bmpData = b.LockBits(new Rectangle(0, 0, b.Width, b.Height), ImageLockMode.ReadWrite, b.PixelFormat);

            pxConvert.OutputPixelFormat = PixelType.BGRA8packed;
            IntPtr bmpIntpr = bmpData.Scan0;

            pxConvert.Convert(bmpIntpr, bmpData.Stride * b.Height, grabResult);
            b.UnlockBits(bmpData);
            return(b);
        }
        // Occurs when an image has been acquired and is ready to be processed.
        private void OnImageGrabbed(Object sender, ImageGrabbedEventArgs e)
        {
            try
            {
                // Acquire the image from the camera. Only show the latest image. The camera may acquire images faster than the images can be displayed.

                // Get the grab result.
                IGrabResult grabResult = e.GrabResult;

                // Check if the image can be displayed.
                if (grabResult.GrabSucceeded)
                {
                    Bitmap bitmap = new Bitmap(grabResult.Width, grabResult.Height, PixelFormat.Format32bppRgb);
                    // Lock the bits of the bitmap.
                    BitmapData bmpData = bitmap.LockBits(new Rectangle(0, 0, bitmap.Width, bitmap.Height), ImageLockMode.ReadWrite, bitmap.PixelFormat);
                    // Place the pointer to the buffer of the bitmap.
                    converter.OutputPixelFormat = PixelType.BGRA8packed;
                    IntPtr ptrBmp = bmpData.Scan0;
                    converter.Convert(ptrBmp, bmpData.Stride * bitmap.Height, grabResult);
                    bitmap.UnlockBits(bmpData);

                    //UI thread에 접근하기 위해 dispatcher 사용
                    dispatcher.BeginInvoke((Action)(() =>
                    {
                        CameraImage = ConvertImage.BitmapToImageSource(bitmap);
                    }));

                    FrameRate = Param.ResultingFrameRateAbs;

                    if (IsImageSaveAuto == true)
                    {
                        Image_Save();
                    }

                    /*
                     * dispatcher.BeginInvoke((Action)(() =>
                     * {
                     *  CameraImageComplete();
                     * }));
                     */
                }
            }
            catch (Exception exception)
            {
                Log.Set(string.Format("Exception: {0}", exception.Message));
            }
            finally
            {
                // Dispose the grab result if needed for returning it to the grab loop.
                e.DisposeGrabResultIfClone();
            }
        }
        private void                            StreamGrabber_ImageGrabbed(object sender, ImageGrabbedEventArgs e)
        {
            if (_ngrabCount > 0)
            {
                _ncount++;

                if (_ncount >= _ngrabCount)
                {
                    Set_Stop();
                }
            }

            IGrabResult result = e.GrabResult;

            if (result.GrabSucceeded)
            {
                if (result.PixelTypeValue.IsMonoImage())
                {
                    var src  = result.PixelData as byte[];
                    var data = new byte[src.Length];
                    Array.Copy(src, data, src.Length);
                    if (ImageGrabbed != null)
                    {
                        ImageGrabbed(new tsInfo_Grab(EGRAB_RESULT.eSuccess, result.Width, result.Height, 1, data));
                    }

                    return;
                }
                else
                {
                    var data = new byte[result.Width * result.Height * 3];
                    _converter.Convert(data, result);

                    if (ImageGrabbed != null)
                    {
                        ImageGrabbed(new tsInfo_Grab(EGRAB_RESULT.eSuccess, result.Width, result.Height, 3, data));
                    }

                    return;
                }
            }

            if (ImageGrabbed != null)
            {
                ImageGrabbed(new tsInfo_Grab(EGRAB_RESULT.eError));
            }
        }
Exemple #11
0
        public static Bitmap Convert(this PixelDataConverter converter, IGrabResult grabResult)
        {
            if (grabResult.Width == 0 || grabResult.Height == 0)
            {
                return(null);
            }

            var bitmap     = new Bitmap(grabResult.Width, grabResult.Height, PixelFormat.Format32bppRgb);
            var rectangle  = new Rectangle(0, 0, bitmap.Width, bitmap.Height);
            var bitmapData = bitmap.LockBits(rectangle, ImageLockMode.ReadWrite, bitmap.PixelFormat);

            converter.OutputPixelFormat = PixelType.BGRA8packed;

            var ptrBmp = bitmapData.Scan0;

            converter.Convert(ptrBmp, bitmapData.Stride * bitmap.Height, grabResult);
            bitmap.UnlockBits(bitmapData);
            return(bitmap);
        }
Exemple #12
0
        public BitmapSource GetBitmapSourceFromData(IGrabResult grabResult, int width, int height, string pixelFormat = "Gray8")
        {
            try
            {
                BitmapSource bmpSrc = null;

                PixelDataConverter converter = new PixelDataConverter();  //把pixelFormat转化为Bitmap的PixelFormat属性
                Bitmap             bitmap    = new Bitmap(width, height, PixelFormat.Format32bppRgb);

                BitmapData bmpData = bitmap.LockBits(new Rectangle(0, 0, bitmap.Width, bitmap.Height), ImageLockMode.ReadWrite, bitmap.PixelFormat); // Lock the bits of the bitmap.

                converter.OutputPixelFormat = PixelType.BGRA8packed;                                                                                 // Place the pointer to the buffer of the bitmap.
                IntPtr ptrBmp = bmpData.Scan0;
                converter.Convert(ptrBmp, bmpData.Stride * bitmap.Height, grabResult);                                                               //Exception handling TODO
                bitmap.UnlockBits(bmpData);
                bmpSrc = BitmapToBitmapSource(bitmap);
                return(bmpSrc);
            }
            catch (Exception ex)
            {
                Log.L_I.WriteError(NameClass, ex);
                return(null);
            }
        }
Exemple #13
0
        /****************************************************/


        /****************  图像响应事件函数  ****************/


        // 相机取像回调函数.
        private void OnImageGrabbed(Object sender, ImageGrabbedEventArgs e)
        {
            try
            {
                IGrabResult grabResult = e.GrabResult;
                if (grabResult.GrabSucceeded)
                {
                    grabTime = stopWatch.ElapsedMilliseconds;
                    {
                        if (latestFrameAddress == IntPtr.Zero)
                        {
                            latestFrameAddress = Marshal.AllocHGlobal((Int32)grabResult.PayloadSize);
                        }
                        converter.OutputPixelFormat = PixelType.Mono8;
                        converter.Convert(latestFrameAddress, grabResult.PayloadSize, grabResult);
                        // 转换为Halcon图像显示
                        HOperatorSet.GenImage1(out hPylonImage, "byte", (HTuple)grabResult.Width, (HTuple)grabResult.Height, (HTuple)latestFrameAddress);
                        // 抛出图像处理事件
                        eventProcessImage(hPylonImage);
                        hPylonImage.Dispose();
                    }
                }
                else
                {
                    MessageBox.Show("Grab faild!\n" + grabResult.ErrorDescription, "Error", MessageBoxButtons.OK, MessageBoxIcon.Error);
                }
            }
            catch (Exception exception)
            {
                ShowException(exception);
            }
            finally
            {
                e.DisposeGrabResultIfClone();
            }
        }
Exemple #14
0
        private void OnImageGrabbed(object sender, ImageGrabbedEventArgs e)
        {
            try
            {
                if (ignoreImage)
                {
                    return;
                }


                //HOperatorSet.CountSeconds(out startTime);
                // Acquire the image from the camera. Only show the latest image. The camera may acquire images faster than the images can be displayed.

                // Get the grab result.
                IGrabResult grabResult = e.GrabResult;

                // Check if the image can be displayed.
                if (grabResult.IsValid)
                {
                    // Reduce the number of displayed images to a reasonable amount if the camera is acquiring images very fast.
                    //if (!stopWatch.IsRunning || stopWatch.ElapsedMilliseconds > 33)
                    {
                        //stopWatch.Restart();
                        width  = grabResult.Width;
                        height = grabResult.Height;
                        //if (hPylonImage != null && hPylonImage.IsInitialized())
                        //{
                        //    hPylonImage.Dispose();
                        //}
                        hPylonImage = new Cognex.VisionPro.CogImage8Grey();
                        if (grabResult.PixelTypeValue == PixelType.Mono8)
                        {
                            // while (grabResult.GrabSucceeded == false) ;
                            if (grabResult.GrabSucceeded == false)
                            {/*
                              * Util.Notify(string.Format("相机{0}数据损坏,采集失败", cameraIndex));*/
                                Trace.WriteLine("数据损坏,采集失败" + width + "." + height);
                                return;
                            }
                            if (grabResult.PayloadSize == 0)
                            {/*
                              * Util.Notify(string.Format("相机{0}数据损坏,图像包大小为0", cameraIndex));*/
                                return;
                            }


                            //Util.Notify(string.Format("相机{0}数据尺寸{1}", cameraIndex, grabResult.PayloadSize));
                            //allocate the m_stream_size amount of bytes in non-managed environment
                            if (m_latestFrameAddress == IntPtr.Zero)
                            {
                                m_latestFrameAddress = Marshal.AllocHGlobal((Int32)grabResult.PayloadSize);
                            }
                            g_converter.OutputPixelFormat = PixelType.Mono8;
                            g_converter.Convert(m_latestFrameAddress, grabResult.PayloadSize, grabResult);

                            byte[] ImagebufferPtr = new byte[grabResult.Width * grabResult.Height];

                            //转换为Halcon图像显示
                            //hPylonImage.Get8GreyPixelMemory( Cognex.VisionPro.CogImageDataModeConstants.Read,);
                            Marshal.Copy(m_latestFrameAddress, ImagebufferPtr, 0, grabResult.Width * grabResult.Height - 1);


                            var cogRoot = new Cognex.VisionPro.CogImage8Root();
                            cogRoot.Initialize(grabResult.Width, grabResult.Height, m_latestFrameAddress, grabResult.Width, null);
                            hPylonImage.SetRoot(cogRoot);
                            //hPylonImage.GenImage1("byte", grabResult.Width, grabResult.Height, m_latestFrameAddress);
                        }
                        else if (grabResult.PixelTypeValue == PixelType.BayerBG8 || grabResult.PixelTypeValue == PixelType.BayerGB8)
                        {
                            //allocate the m_stream_size amount of bytes in non-managed environment
                            if (m_latestFrameAddress == IntPtr.Zero)
                            {
                                m_latestFrameAddress = Marshal.AllocHGlobal((Int32)(3 * g_camera.Parameters[PLCamera.PayloadSize].GetValue()));
                            }
                            g_converter.OutputPixelFormat = PixelType.BGR8packed;
                            g_converter.Convert(m_latestFrameAddress, 3 * grabResult.PayloadSize, grabResult);

                            /*hPylonImage.GenImageInterleaved(m_latestFrameAddress, "bgr",
                             *       grabResult.Width, grabResult.Height, -1, "byte", grabResult.Width, grabResult.Height, 0, 0, -1, 0);
                             */
                        }
                        else
                        {/*
                          * Util.Notify(string.Format("相机{0}编码格式不正确", cameraIndex));*/
                        }
                        Trace.WriteLine("采集成功" + width + "." + height);
                        TrigerImageEvent();
                    }
                }
            }
            catch (System.ArgumentException ex)
            {/*
              * Util.WriteLog(this.GetType(), ex);
              * Util.Notify(string.Format("相机{0}图像数据包丢失", cameraIndex));*/
            }
            catch (Exception ex)
            {/*
              * Util.WriteLog(this.GetType(), ex);
              * Util.Notify(string.Format("相机{0}图像数据返回出现异常", cameraIndex));*/
            }
            finally
            {
                // Dispose the grab result if needed for returning it to the grab loop.
                e.DisposeGrabResultIfClone();
            }
        }
Exemple #15
0
        public void GrabImage(string path)
        {
            try
            {
                // Create a camera object that selects the first camera device found.
                // More constructors are available for selecting a specific camera device.
                using (Camera camera = new Camera())
                {
                    // Print the model name of the camera.
                    Debug.WriteLine("Using camera {0}.", camera.CameraInfo[CameraInfoKey.ModelName]);

                    // Set the acquisition mode to free running continuous acquisition when the camera is opened.
                    camera.CameraOpened += Configuration.AcquireContinuous;

                    // Open the connection to the camera device.
                    camera.Open();

                    // The parameter MaxNumBuffer can be used to control the amount of buffers
                    // allocated for grabbing. The default value of this parameter is 10.
                    camera.Parameters[PLCameraInstance.MaxNumBuffer].SetValue(5);

                    // Start grabbing.
                    camera.StreamGrabber.Start();

                    // Grab a image.
                    // Wait for an image and then retrieve it. A timeout of 5000 ms is used.
                    IGrabResult grabResult = camera.StreamGrabber.RetrieveResult(5000, TimeoutHandling.ThrowException);

                    using (grabResult)
                    {
                        // Image grabbed successfully?
                        if (grabResult.GrabSucceeded)
                        {
                            // Access the image data.
                            Debug.WriteLine("SizeX: {0}", grabResult.Width);
                            Debug.WriteLine("SizeY: {0}", grabResult.Height);
                            Debug.WriteLine("IsValid: {0}", grabResult.IsValid);
                            Debug.WriteLine("Orientation: {0}", grabResult.Orientation);
                            Debug.WriteLine("PaddingX: {0}", grabResult.PaddingX);
                            Debug.WriteLine("PaddingY: {0}", grabResult.PaddingY);
                            byte[] buffer = grabResult.PixelData as byte[];
                            Debug.WriteLine("PixelData Count: {0}", buffer.Length);
                            //Debug.WriteLine("Gray value of first pixel: {0}", buffer[0]);
                            Debug.WriteLine("");

                            // Display the grabbed image.
                            //ImageWindow.DisplayImage(0, grabResult);

                            // 이미지를 하드디스크에 저장
                            ImagePersistence.Save((int)ImageFileFormat.BMP, path + "\\" + DateTime.Now.ToString("yyyyMMdd_HHMMss") + "_GrabOrg.bmp", grabResult);
                            //ImagePersistence.Save((int)ImageFileFormat.BMP, @"D:\" + DateTime.Now.ToString("yyyyMMdd_HHMMss") + "_GrabOrg.bmp", grabResult);

                            // IImage를 bitmap 형식으로 변환
                            myBitmap = new Bitmap(grabResult.Width, grabResult.Height, PixelFormat.Format32bppRgb);
                            // Lock the bits of the bitmap.
                            BitmapData bmpData = myBitmap.LockBits(new Rectangle(0, 0, myBitmap.Width, myBitmap.Height), ImageLockMode.ReadWrite, myBitmap.PixelFormat);
                            // Place the pointer to the buffer of the bitmap.
                            myPixelDataConverter.OutputPixelFormat = PixelType.BGRA8packed;
                            IntPtr ptrBmp = bmpData.Scan0;
                            myPixelDataConverter.Convert(ptrBmp, bmpData.Stride * myBitmap.Height, grabResult);
                            myBitmap.UnlockBits(bmpData);

                        }
                        else
                        {
                            Debug.WriteLine("Error: {0} {1}", grabResult.ErrorCode, grabResult.ErrorDescription);
                        }
                    }

                    // Stop grabbing.
                    camera.StreamGrabber.Stop();

                    // Close the connection to the camera device.
                    camera.Close();
                }
            }
            catch (Exception e)
            {
                Debug.WriteLine("Exception: {0}", e.Message);
            }
        }
Exemple #16
0
        private void onIamgeGrabbed(object sender, Basler.Pylon.ImageGrabbedEventArgs e)
        {
            try
            {
                // Get the grab result.
                IGrabResult grabResult = e.GrabResult;

                if (!grabResult.GrabSucceeded)
                {
                    BitmapGrabbed?.Invoke(null, false);
                    return;
                }

                // Check if the image can be displayed.
                if (!grabResult.IsValid)
                {
                    BitmapGrabbed?.Invoke(null, false);
                    return;
                }
                lock (this)
                {
                    newBmp = new Bitmap(grabResult.Width, grabResult.Height, PixelFormat.Format8bppIndexed);
                    // Lock the bits of the bitmap.
                    BitmapData bmpData = newBmp.LockBits(new Rectangle(0, 0, newBmp.Width, newBmp.Height), ImageLockMode.ReadWrite, newBmp.PixelFormat);
                    // Place the pointer to the buffer of the bitmap.
                    converter.OutputPixelFormat = PixelType.Mono8;
                    IntPtr ptrBmp = bmpData.Scan0;
                    converter.Convert(ptrBmp, bmpData.Stride * newBmp.Height, grabResult);
                    //palette
                    System.Drawing.Imaging.ColorPalette palette = newBmp.Palette;
                    for (int i = 0; i < 256; ++i)
                    {
                        palette.Entries[i] = Color.FromArgb(255, i, i, i);
                    }
                    newBmp.Palette = palette;
                    //convert to bytes data
                    newBytes = new byte[bmpData.Stride * newBmp.Height];
                    Marshal.Copy(ptrBmp, newBytes, 0, newBytes.Length);
                    //reverse y
                    if (this.reverseY)
                    {
                        newBytes = this.bytesReverseY(newBytes, bmpData.Stride, newBmp.Height);
                    }
                    //copy to ptr
                    Marshal.Copy(newBytes, 0, ptrBmp, newBytes.Length);
                    newBmp.UnlockBits(bmpData);

                    this.CurrentBytes = this.newBytes;
                    this.LastBmp      = this.CurrentBmp;
                    this.CurrentBmp   = this.newBmp;
                }
                BmpGrabedEvnet.Set();
                this.TriggerSts.Update(true);
                BitmapGrabbed?.Invoke(CurrentBmp, true);
                BytesSaveBuffer?.Invoke(CurrentBytes);
                // Reduce the number of displayed images to a reasonable amount
                // if the camera is acquiring images very fast.
                this.currDisplayedTime = DateTime.Now;
                if (this.currDisplayedTime - this.lastDisplayedTime > TimeSpan.FromMilliseconds(70))
                {
                    this.BitmapDisplayed?.Invoke(this.CurrentBmp);
                    this.lastDisplayedTime = this.currDisplayedTime;
                    // the LastBmp can be dispose after BitmapDisplayed invoke execute.
                    this.LastBmp.Dispose();
                }
            }
            catch (Exception)
            {
                BitmapGrabbed?.Invoke(null, false);
            }
        }
Exemple #17
0
        /****************************************************/


        /****************  图像响应事件函数  ****************/


        // 相机取像回调函数.
        private void OnImageGrabbed(Object sender, ImageGrabbedEventArgs e)
        {
            try
            {
                // Acquire the image from the camera. Only show the latest image. The camera may acquire images faster than the images can be displayed.

                // Get the grab result.
                IGrabResult grabResult = e.GrabResult;

                // Check if the image can be displayed.
                if (grabResult.GrabSucceeded)
                {
                    grabTime = stopWatch.ElapsedMilliseconds;
                    eventComputeGrabTime(grabTime);

                    // Reduce the number of displayed images to a reasonable amount if the camera is acquiring images very fast.
                    // ****  降低显示帧率,减少CPU占用率  **** //
                    //if (!stopWatch.IsRunning || stopWatch.ElapsedMilliseconds > 33)

                    {
                        //stopWatch.Restart();
                        // 判断是否是黑白图片格式
                        if (grabResult.PixelTypeValue == PixelType.Mono8)
                        {
                            //allocate the m_stream_size amount of bytes in non-managed environment
                            if (latestFrameAddress == IntPtr.Zero)
                            {
                                latestFrameAddress = Marshal.AllocHGlobal((Int32)grabResult.PayloadSize);
                            }
                            converter.OutputPixelFormat = PixelType.Mono8;
                            converter.Convert(latestFrameAddress, grabResult.PayloadSize, grabResult);

                            // 转换为Halcon图像显示
                            HOperatorSet.GenImage1(out hPylonImage, "byte", (HTuple)grabResult.Width, (HTuple)grabResult.Height, (HTuple)latestFrameAddress);
                        }
                        else if (grabResult.PixelTypeValue == PixelType.BayerBG8 || grabResult.PixelTypeValue == PixelType.BayerGB8 ||
                                 grabResult.PixelTypeValue == PixelType.BayerRG8 || grabResult.PixelTypeValue == PixelType.BayerGR8)
                        {
                            int imageWidth  = grabResult.Width - 1;
                            int imageHeight = grabResult.Height - 1;
                            int payloadSize = imageWidth * imageHeight;

                            //allocate the m_stream_size amount of bytes in non-managed environment
                            if (latestFrameAddress == IntPtr.Zero)
                            {
                                latestFrameAddress = Marshal.AllocHGlobal((Int32)(3 * payloadSize));
                            }
                            converter.OutputPixelFormat = PixelType.BGR8packed;     // 根据bayer格式不同切换以下代码
                            //converter.OutputPixelFormat = PixelType.RGB8packed;
                            converter.Parameters[PLPixelDataConverter.InconvertibleEdgeHandling].SetValue("Clip");
                            converter.Convert(latestFrameAddress, 3 * payloadSize, grabResult);

                            HOperatorSet.GenImageInterleaved(out hPylonImage, latestFrameAddress, "bgr",
                                                             (HTuple)imageWidth, (HTuple)imageHeight, -1, "byte", (HTuple)imageWidth, (HTuple)imageHeight, 0, 0, -1, 0);
                        }

                        // 抛出图像处理事件
                        eventProcessImage(hPylonImage);
                        //hPylonImage.Dispose();
                    }
                }
                else
                {
                    MessageBox.Show("Grab faild!\n" + grabResult.ErrorDescription, "Error", MessageBoxButtons.OK, MessageBoxIcon.Error);
                }
            }
            catch (Exception exception)
            {
                ShowException(exception);
            }
            finally
            {
                // Dispose the grab result if needed for returning it to the grab loop.
                e.DisposeGrabResultIfClone();
            }
        }
Exemple #18
0
        // start image grabbing
        public void GrabImages()
        {
            if (camera != null)
            {
                _cameraRecord = true;

                System.Threading.ThreadPool.QueueUserWorkItem(new WaitCallback(delegate(object o)
                {
                    try
                    {
                        // Start grabbing
                        camera.StreamGrabber.Start();

                        // Grab a number of images.
                        while (cameraRecord && systemState.reconThreadFree)
                        {
                            // Wait for an image and then retrieve it. A timeout of 5000 ms is used.
                            IGrabResult grabResult = camera.StreamGrabber.RetrieveResult(5000, TimeoutHandling.ThrowException);
                            using (grabResult)
                            {
                                // Image grabbed successfully?
                                if (grabResult.GrabSucceeded)
                                {
                                    // Access the image data.
                                    int stride    = (int)grabResult.ComputeStride();
                                    byte[] buffer = grabResult.PixelData as byte[];

                                    // new buffer for format conversion
                                    byte[] new_buffer = new byte[grabResult.Width * grabResult.Height * 3];
                                    int new_stride    = 3 * stride;

                                    // pixel conversion from Bayer to rgb
                                    converter.OutputPixelFormat = PixelType.RGB8packed;
                                    converter.Convert <byte>(new_buffer, grabResult);//converter.Convert(buffer, grabResult);

                                    // create Bitmap
                                    bmpSource = BitmapSource.Create(grabResult.Width, grabResult.Height, 0, 0,
                                                                    PixelFormats.Rgb24, null, new_buffer, new_stride);
                                    bmpSource.Freeze();

                                    systemState.currentCameraImage = bmpSource;
                                }
                                else
                                {
                                    Console.WriteLine("ERROR: {0} {1}", grabResult.ErrorCode, grabResult.ErrorDescription);
                                }
                            }
                        }

                        // show monitor mouse message, when recon thread is active
                        if (systemState.reconThreadFree == false)
                        {
                            // show laser warning sign --> no camera means clinical version
                            BitmapImage src = new BitmapImage();
                            src.BeginInit();
                            src.CacheOption = BitmapCacheOption.OnLoad;
                            src.UriSource   = new Uri("pack://application:,,,/ViewMSOTc/ViewsOAM/CameraInactive.png");
                            src.EndInit();
                            src.Freeze();

                            systemState.currentCameraImage = src;
                        }

                        // Stop grabbing.
                        camera.StreamGrabber.Stop();
                    }
                    catch (Exception e)
                    {
                        Console.Error.WriteLine("INFO: {0}", e.Message);

                        // show laser warning sign --> no camera means clinical version
                        BitmapImage src = new BitmapImage();
                        src.BeginInit();
                        src.CacheOption = BitmapCacheOption.OnLoad;
                        src.UriSource   = new Uri("pack://application:,,,/ViewMSOTc/ViewsOAM/LaserSafetyImageWearGoggles.png");
                        src.EndInit();
                        src.Freeze();

                        systemState.currentCameraImage = src;
                    }
                }));
            }
        }
Exemple #19
0
        public HObject BuffersToImage24PlanarColor(IGrabResult grabResult)
        {
            HObject Hobj;

            HOperatorSet.GenEmptyObj(out Hobj);
            try
            {
                int imageWidth  = grabResult.Width - 1;
                int imageHeight = grabResult.Height - 1;
                int payloadSize = imageWidth * imageHeight;
                PixelDataConverter converter = new PixelDataConverter();
                //// 设置最后一行和一列不进行Bayer转换
                converter.Parameters[PLPixelDataConverter.InconvertibleEdgeHandling].SetValue("Clip");
                converter.OutputPixelFormat = PixelType.BGR8packed;
                byte[] buffer = new byte[payloadSize * 3];
                converter.Convert(buffer, grabResult);
                Bitmap     bitmap  = new Bitmap(imageWidth, imageHeight, PixelFormat.Format24bppRgb);
                BitmapData bmpData = bitmap.LockBits(new Rectangle(0, 0, bitmap.Width, bitmap.Height),
                                                     ImageLockMode.ReadWrite, bitmap.PixelFormat);
                try
                {
                    IntPtr ptrBmp      = bmpData.Scan0;
                    int    imageStride = imageWidth * 3;
                    if (imageStride == bmpData.Stride)
                    {
                        Marshal.Copy(buffer, 0, ptrBmp, bmpData.Stride * bitmap.Height);
                    }
                    else
                    {
                        /* The widths in bytes are not equal, copy line by line.
                         * This can happen if the image width is not divisible by four.*/
                        for (int i = 0; i < bitmap.Height; ++i)
                        {
                            Marshal.Copy(buffer, i * imageStride, new IntPtr(ptrBmp.ToInt64() + i * bmpData.Stride), imageWidth * 3);
                        }
                    }
                    int width  = grabResult.Width;
                    int height = grabResult.Height;
                    unsafe
                    {
                        int count = height * width;
                        ////计算3通道图像指针
                        byte[] r  = new byte[count];
                        byte[] g  = new byte[count];
                        byte[] b  = new byte[count];
                        byte * r1 = null;
                        byte * g1 = null;
                        byte * b1 = null;
                        Marshal.Copy((IntPtr)r1, r, 0, r.Length);
                        Marshal.Copy((IntPtr)g1, g, 0, r.Length);
                        Marshal.Copy((IntPtr)b1, b, 0, r.Length);
                        byte *r_c1 = r1;
                        byte *g_c1 = g1;
                        byte *b_c1 = b1;
                        byte *p1   = (byte *)bmpData.Scan0;
                        //B G R A ->R G B
                        for (int i = height - 1; i >= 0; i--)
                        {
                            for (int j = 0; j < width; j = j + 4)
                            {
                                //R channel
                                *r_c1 = p1[i * width + (j + 2)];
                                r_c1++;
                                *g_c1 = p1[i * width + (j + 1)];
                                ++g_c1;
                                *b_c1 = p1[i * width + (j + 0)];
                                ++b_c1;
                            }
                        }
                        HOperatorSet.GenImage3(out Hobj, "byte", width, height, new IntPtr(r1), new IntPtr(g1), new IntPtr(b1));
                    }
                }
                finally
                {
                    bitmap.UnlockBits(bmpData);
                }
                return(Hobj);
            }
            catch (Exception ex)
            {
                return(null);
            }
        }
        private void OnImageGrabbed(object sender, ImageGrabbedEventArgs e)
        {
            try
            {
                // Acquire the image from the camera. Only show the latest image. The camera may acquire images faster than the images can be displayed.

                // Get the grab result.
                IGrabResult grabResult = e.GrabResult;

                // Check if the image can be displayed.
                if (grabResult.GrabSucceeded)
                {
                    grabTime = stopWatch.ElapsedMilliseconds;
                    base.ProcessGrabTimeCallback(grabTime);

                    // Reduce the number of displayed images to a reasonable amount if the camera is acquiring images very fast.
                    // ****  降低显示帧率,减少CPU占用率  **** //
                    //if (!stopWatch.IsRunning || stopWatch.ElapsedMilliseconds > 33)

                    {
                        //stopWatch.Restart();
                        // 判断是否是黑白图片格式
                        if (grabResult.PixelTypeValue == PixelType.Mono8)
                        {
                            //allocate the m_stream_size amount of bytes in non-managed environment
                            if (latestFrameAddress == IntPtr.Zero)
                            {
                                latestFrameAddress = Marshal.AllocHGlobal((Int32)grabResult.PayloadSize);
                            }
                            converter.OutputPixelFormat = PixelType.Mono8;
                            converter.Convert(latestFrameAddress, grabResult.PayloadSize, grabResult);

                            // 转换为Halcon图像显示
                            Image.GenImage1("byte", grabResult.Width, grabResult.Height, latestFrameAddress);
                        }
                        else if (grabResult.PixelTypeValue == PixelType.BayerBG8 || grabResult.PixelTypeValue == PixelType.BayerGB8 ||
                                 grabResult.PixelTypeValue == PixelType.BayerRG8 || grabResult.PixelTypeValue == PixelType.BayerGR8)
                        {
                            int imageWidth  = grabResult.Width - 1;
                            int imageHeight = grabResult.Height - 1;
                            int payloadSize = imageWidth * imageHeight;

                            //allocate the m_stream_size amount of bytes in non-managed environment
                            if (latestFrameAddress == IntPtr.Zero)
                            {
                                latestFrameAddress = Marshal.AllocHGlobal((Int32)(3 * payloadSize));
                            }
                            converter.OutputPixelFormat = PixelType.BGR8packed;     // 根据bayer格式不同切换以下代码
                            //converter.OutputPixelFormat = PixelType.RGB8packed;
                            converter.Parameters[PLPixelDataConverter.InconvertibleEdgeHandling].SetValue("Clip");
                            converter.Convert(latestFrameAddress, 3 * payloadSize, grabResult);

                            Image.GenImageInterleaved(latestFrameAddress, "bgr",
                                                      imageWidth, imageHeight, -1, "byte", imageWidth, imageHeight, 0, 0, -1, 0);
                        }
                        HImage newImage = Image.CopyImage();
                        image = newImage;
                        // 抛出图像处理事件
                        base.ProcessImageCallBack(newImage);

                        Image.Dispose();
                    }
                }
                else
                {
                    WriteErrorLog("Grab faild!\n" + grabResult.ErrorDescription + " of " + UserID);
                }
            }
            catch
            {
                bConnectOk = false;
            }
            finally
            {
                // Dispose the grab result if needed for returning it to the grab loop.
                e.DisposeGrabResultIfClone();
            }
        }
        // 采集进程
        public void GrabThreadProcess()
        {
            if ((chooseHIK) && (!chooseBasler))
            {
                MyCamera.MVCC_INTVALUE stParam = new MyCamera.MVCC_INTVALUE();
                int nRet = camera2.MV_CC_GetIntValue_NET("PayloadSize", ref stParam);
                if (MyCamera.MV_OK != nRet)
                {
                    MessageBox.Show("Get PayloadSize failed", "Error", MessageBoxButtons.OK, MessageBoxIcon.Error);
                    return;
                }
                UInt32 nPayloadSize = stParam.nCurValue;
                if (nPayloadSize > m_nBufSizeForDriver)
                {
                    if (m_BufForDriver != IntPtr.Zero)
                    {
                        Marshal.Release(m_BufForDriver);
                    }
                    m_nBufSizeForDriver = nPayloadSize;
                    m_BufForDriver      = Marshal.AllocHGlobal((Int32)m_nBufSizeForDriver);
                }
                if (m_BufForDriver == IntPtr.Zero)
                {
                    return;
                }

                MyCamera.MV_FRAME_OUT_INFO_EX stFrameInfo = new MyCamera.MV_FRAME_OUT_INFO_EX();  // 定义输出帧信息结构体
                //IntPtr pTemp = IntPtr.Zero;

                while (hikCanGrab)
                {
                    // 将海康数据类型转为Mat
                    nRet = camera2.MV_CC_GetOneFrameTimeout_NET(m_BufForDriver, nPayloadSize, ref stFrameInfo, 1000); // m_BufForDriver为图像数据接收指针
                    //pTemp = m_BufForDriver;
                    byte[] byteImage = new byte[stFrameInfo.nHeight * stFrameInfo.nWidth];
                    Marshal.Copy(m_BufForDriver, byteImage, 0, stFrameInfo.nHeight * stFrameInfo.nWidth);
                    Mat matImage = new Mat(stFrameInfo.nHeight, stFrameInfo.nWidth, MatType.CV_8UC1, byteImage);
                    // 单通道图像转为三通道
                    Mat matImageNew = new Mat();
                    Cv2.CvtColor(matImage, matImageNew, ColorConversionCodes.GRAY2RGB);
                    Bitmap bitmap = matImageNew.ToBitmap();  // Mat转为Bitmap
                    // 是否进行推理
                    if (isInference)
                    {
                        bitmap = Inference(bitmap);
                    }
                    if (pictureBox1.InvokeRequired)  // 当一个控件的InvokeRequired属性值为真时,说明有一个创建它以外的线程想访问它
                    {
                        UpdateUI update = delegate { pictureBox1.Image = bitmap; };
                        pictureBox1.BeginInvoke(update);
                    }
                    else
                    {
                        pictureBox1.Image = bitmap;
                    }
                }
            }
            else if ((chooseBasler) && (!chooseHIK))
            {
                while (baslerCanGrab)
                {
                    IGrabResult grabResult;
                    using (grabResult = camera1.StreamGrabber.RetrieveResult(5000, TimeoutHandling.ThrowException))
                    {
                        if (grabResult.GrabSucceeded)
                        {
                            // 四通道RGBA
                            Bitmap bitmap = new Bitmap(grabResult.Width, grabResult.Height, PixelFormat.Format32bppRgb);
                            // 锁定位图的位
                            BitmapData bmpData = bitmap.LockBits(new Rectangle(0, 0, bitmap.Width, bitmap.Height), ImageLockMode.ReadWrite, bitmap.PixelFormat);
                            // 将指针放置到位图的缓冲区
                            converter.OutputPixelFormat = PixelType.BGRA8packed;
                            IntPtr ptrBmp = bmpData.Scan0;
                            converter.Convert(ptrBmp, bmpData.Stride * bitmap.Height, grabResult);
                            bitmap.UnlockBits(bmpData);
                            // 是否进行推理
                            if (isInference)
                            {
                                bitmap = Inference(bitmap);
                            }
                            // 禁止跨线程直接访问控件,故invoke到主线程中
                            // 参考:https://bbs.csdn.net/topics/350050105
                            //       https://www.cnblogs.com/lky-learning/p/14025280.html
                            if (pictureBox1.InvokeRequired)  // 当一个控件的InvokeRequired属性值为真时,说明有一个创建它以外的线程想访问它
                            {
                                UpdateUI update = delegate { pictureBox1.Image = bitmap; };
                                pictureBox1.BeginInvoke(update);
                            }
                            else
                            {
                                pictureBox1.Image = bitmap;
                            }
                        }
                    }
                }
            }
        }
Exemple #22
0
        public PylonCapture()
        {
            source = Observable.Create <PylonDataFrame>((observer, cancellationToken) =>
            {
                return(Task.Factory.StartNew(() =>
                {
                    lock (captureLock)
                    {
                        var configFile = ParameterFile;
                        using (var camera = new Camera(SerialNumber))
                            using (var converter = new PixelDataConverter())
                            {
                                camera.Open();
                                if (!string.IsNullOrEmpty(configFile))
                                {
                                    camera.Parameters.Load(configFile, ParameterPath.CameraDevice);
                                }

                                try
                                {
                                    camera.StreamGrabber.ImageGrabbed += (sender, e) =>
                                    {
                                        var result = e.GrabResult;
                                        if (result.IsValid)
                                        {
                                            int channels;
                                            IplDepth depth;
                                            PixelType outputFormat;
                                            var size = new Size(result.Width, result.Height);
                                            GetImageDepth(result.PixelTypeValue, out depth, out channels, out outputFormat);
                                            converter.OutputPixelFormat = outputFormat;
                                            var output = new IplImage(size, depth, channels);
                                            converter.Convert(output.ImageData, output.WidthStep * output.Height, result);
                                            observer.OnNext(new PylonDataFrame(output, result));
                                        }
                                    };

                                    camera.StreamGrabber.GrabStopped += (sender, e) =>
                                    {
                                        if (e.Reason != GrabStopReason.UserRequest)
                                        {
                                            observer.OnError(new CaptureException(e.ErrorMessage));
                                        }
                                    };

                                    camera.Parameters[PLCamera.AcquisitionMode].SetValue(PLCamera.AcquisitionMode.Continuous);
                                    camera.StreamGrabber.Start(GrabStrategy, GrabLoop.ProvidedByStreamGrabber);
                                    cancellationToken.WaitHandle.WaitOne();
                                }
                                finally
                                {
                                    camera.StreamGrabber.Stop();
                                    camera.Close();
                                }
                            }
                    }
                },
                                             cancellationToken,
                                             TaskCreationOptions.LongRunning,
                                             TaskScheduler.Default));
            })
                     .PublishReconnectable()
                     .RefCount();
        }
Exemple #23
0
            /// <summary>
              ///  相机取像回调函数.
            /// </summary>
            /// <param name="sender"></param>
            /// <param name="e"></param>
        private void OnImageGrabbed(Object sender, ImageGrabbedEventArgs e)
        {
            try
            {

                // Acquire the image from the camera. Only show the latest image. The camera may acquire images faster than the images can be displayed.
                // Get the grab result.
                if ( getcpu )
                {
                    if (sortnum == 0)
                    {
                    
                        GetCpu.SetThreadAffinityMask(GetCpu.GetCurrentThread(), new UIntPtr(0x1));
                    }
                    else
                    {
                        GetCpu.SetThreadAffinityMask(GetCpu.GetCurrentThread(), new UIntPtr(0x2));
                    }

                    Thread.CurrentThread.Priority = ThreadPriority.Highest;
                    getcpu = false;
                }
          
                IGrabResult grabResult =e.GrabResult;
                // Check if the image can be displayed.
                if (grabResult.GrabSucceeded)
                {
                        //stopWatch.Restart();
                        // 判断是否是黑白图片格式
                        if (grabResult.PixelTypeValue == PixelType.Mono8)
                        {
                            //allocate the m_stream_size amount of bytes in non-managed environment 
                            if (latestFrameAddress == IntPtr.Zero)
                            {
                                latestFrameAddress = Marshal.AllocHGlobal((Int32)grabResult.PayloadSize);
                            }
                            converter.OutputPixelFormat = PixelType.Mono8;
                            converter.Convert(latestFrameAddress, grabResult.PayloadSize, grabResult);

                            // 转换为Halcon图像显示
                            HOperatorSet.GenImage1(out hPylonImage, "byte", (HTuple)grabResult.Width, (HTuple)grabResult.Height, (HTuple)latestFrameAddress);

                        }
                        else if (grabResult.PixelTypeValue == PixelType.BayerBG8 || grabResult.PixelTypeValue == PixelType.BayerGB8
                                    || grabResult.PixelTypeValue == PixelType.BayerRG8 || grabResult.PixelTypeValue == PixelType.BayerGR8)
                        {
                            int imageWidth = grabResult.Width - 1;
                            int imageHeight = grabResult.Height - 1;
                            int payloadSize = imageWidth * imageHeight;

                            //allocate the m_stream_size amount of bytes in non-managed environment 
                            if (latestFrameAddress == IntPtr.Zero)
                            {
                                latestFrameAddress = Marshal.AllocHGlobal((Int32)(3 * payloadSize));
                            }
                            converter.OutputPixelFormat = PixelType.BGR8packed;     // 根据bayer格式不同切换以下代码
                         
                            converter.Parameters[PLPixelDataConverter.InconvertibleEdgeHandling].SetValue("Clip");
                            converter.Convert(latestFrameAddress, 3 * payloadSize, grabResult);

                            HOperatorSet.GenImageInterleaved(out hPylonImage, latestFrameAddress, "bgr",
                                     (HTuple)imageWidth, (HTuple)imageHeight, -1, "byte", (HTuple)imageWidth, (HTuple)imageHeight, 0, 0, -1, 0);

                        }

                        // 抛出图像处理事件
                        if (hPylonImage == null)
                        {
                            MessageBox.Show("hPylonImage null");
                        }
                        //Console.WriteLine("CAMmm" + sortnum + "when--1--" + DateTime.Now.ToString("mm-ss-fff"));
                        poscmin = Turntable.Instance.copos[sortnum].pos.FirstOrDefault();
                      //  poscmin = Turntable.Instance.copos[sortnum].pos.First();
                        //Console.WriteLine("w" + sortnum + "ss" + poscmin);
                       Turntable.Instance.copos[sortnum].pos.Clear();
                        imagenum++;
                        //if (kkk > 1 && sortnum == 0)
                        //{
                        //    HOperatorSet.WriteImage(hPylonImage, "bmp", 0, @"C:\Users\mxw\Desktop\a2.bmp");
                        //    Console.WriteLine(sortnum+1);

                        ////}
                        HObject mimage = new HObject();
                        HOperatorSet.GenEmptyObj(out mimage);
                        mimage.Dispose();
                       // TImage.Dispose();
                        HOperatorSet.CopyImage(hPylonImage, out mimage);
                       // listimage.Add(mimage);
                        listimage1.Enqueue(mimage);
                        hPylonImage.Dispose();
                       
       
                }
                else
                {
                    MessageBox.Show("Grab faild!\n" + grabResult.ErrorDescription, "Error", MessageBoxButtons.OK, MessageBoxIcon.Error);
                }

            }
            catch (Exception exception)
            {
                Console.WriteLine("  ShowException(exception);");
             //   ShowException(exception);
              //  Turntable.Instance.copos[sortnum + 1].onwrite = false;
                poscmin = Turntable.Instance.copos[sortnum].pos.FirstOrDefault();
                Turntable.Instance.copos[sortnum].pos.Clear();
            }
            finally
            {
              
                // Dispose the grab result if needed for returning it to the grab loop.
                e.DisposeGrabResultIfClone();
          //    Console.WriteLine("CAM" + sortnum + "when--2--" + DateTime.Now.ToString("mm-ss-fff"));
            }
        }
Exemple #24
0
        private void OnImageGrabbed(object sender, ImageGrabbedEventArgs e)
        {
            try
            {
                if (ignoreImage)
                {
                    return;
                }
                if (Command == Command.Video || Command == Command.ExtTrigger)
                {
                    HOperatorSet.CountSeconds(out startTime);
                }
                // Acquire the image from the camera. Only show the latest image. The camera may acquire images faster than the images can be displayed.

                // Get the grab result.
                IGrabResult grabResult = e.GrabResult;

                // Check if the image can be displayed.
                if (grabResult.IsValid)
                {
                    // Reduce the number of displayed images to a reasonable amount if the camera is acquiring images very fast.
                    //if (!stopWatch.IsRunning || stopWatch.ElapsedMilliseconds > 33)
                    {
                        //stopWatch.Restart();
                        width  = grabResult.Width;
                        height = grabResult.Height;
                        //if (hPylonImage != null && hPylonImage.IsInitialized())
                        //{
                        //    hPylonImage.Dispose();
                        //}
                        hPylonImage = new HImage();
                        if (grabResult.PixelTypeValue == PixelType.Mono8)
                        {
                            if (grabResult.GrabSucceeded == false)
                            {
                                Util.Notify(string.Format("相机{0}数据损坏,采集失败", cameraIndex));
                                return;
                            }
                            if (grabResult.PayloadSize == 0)
                            {
                                Util.Notify(string.Format("相机{0}数据损坏,图像包大小为0", cameraIndex));
                                return;
                            }


                            //Util.Notify(string.Format("相机{0}数据尺寸{1}", cameraIndex, grabResult.PayloadSize));
                            //allocate the m_stream_size amount of bytes in non-managed environment
                            if (m_latestFrameAddress == IntPtr.Zero)
                            {
                                m_latestFrameAddress = Marshal.AllocHGlobal((Int32)grabResult.PayloadSize);
                            }
                            g_converter.OutputPixelFormat = PixelType.Mono8;
                            g_converter.Convert(m_latestFrameAddress, grabResult.PayloadSize, grabResult);

                            //转换为Halcon图像显示
                            hPylonImage.GenImage1("byte", grabResult.Width, grabResult.Height, m_latestFrameAddress);
                        }
                        else if (grabResult.PixelTypeValue == PixelType.BayerBG8 || grabResult.PixelTypeValue == PixelType.BayerGB8)
                        {
                            //allocate the m_stream_size amount of bytes in non-managed environment
                            if (m_latestFrameAddress == IntPtr.Zero)
                            {
                                m_latestFrameAddress = Marshal.AllocHGlobal((Int32)(3 * g_camera.Parameters[PLCamera.PayloadSize].GetValue()));
                            }
                            g_converter.OutputPixelFormat = PixelType.BGR8packed;
                            g_converter.Convert(m_latestFrameAddress, 3 * grabResult.PayloadSize, grabResult);
                            hPylonImage.GenImageInterleaved(m_latestFrameAddress, "bgr",
                                                            grabResult.Width, grabResult.Height, -1, "byte", grabResult.Width, grabResult.Height, 0, 0, -1, 0);
                        }
                        else
                        {
                            Util.Notify(Common.Basic.Level.Err, string.Format("相机{0}编码格式不正确,当前格式{1}", cameraIndex, grabResult.PixelTypeValue));
                        }
                        TrigerImageEvent();
                    }
                }
            }
            catch (System.ArgumentException ex)
            {
                Util.WriteLog(this.GetType(), ex);
                Util.Notify(string.Format("相机{0}图像数据包丢失", cameraIndex));
            }
            catch (Exception ex)
            {
                Util.WriteLog(this.GetType(), ex);
                Util.Notify(string.Format("相机{0}图像数据返回出现异常", cameraIndex));
            }
            finally
            {
                // Dispose the grab result if needed for returning it to the grab loop.
                e.DisposeGrabResultIfClone();
            }
        }
        private void DetectIncomingTrains()
        {
            _camera.StreamGrabber.Start(GrabStrategy.LatestImages, GrabLoop.ProvidedByUser);

            // Detection class
            var detector = new EntryDetector(() =>
            {
                // correct exposure
                var exposureTime          = _camera.Parameters[PLCamera.ExposureTime].GetValue();
                var gain                  = _camera.Parameters[PLCamera.Gain].GetValue();
                var formattedExposureTime = string.Format(CultureInfo.CurrentCulture, "{0:0,0}", exposureTime);
                var formattedGain         = string.Format(CultureInfo.CurrentCulture, "{0:0,0}", gain);
                Log.Info($"Exposure is {formattedExposureTime}μs. Gain is {formattedGain}db. Automatically adjusting");

                _camera.Parameters[PLCamera.ExposureAuto].SetValue(PLCamera.ExposureAuto.Off);
                _camera.Parameters[PLCamera.GainAuto].TrySetValue(PLCamera.GainAuto.Off);

                _camera.Parameters[PLCamera.ExposureAuto].SetValue(PLCamera.ExposureAuto.Once);
                _camera.Parameters[PLCamera.GainAuto].TrySetValue(PLCamera.GainAuto.Once);
            });

            // Event handlers
            detector.Enter += (sender, args) => Publish(Commands.CaptureStart, FileUtil.GenerateTimestampFilename());
            detector.Exit  += (sender, args) => Publish(Commands.CaptureStop);
            detector.Abort += (sender, args) => Publish(Commands.CaptureAbort);

            // Array to contain images that will be collected until it's full and we analyze them.
            var images = new Image <Gray, byte> [AnalyzeSequenceImages];

            // Analyze image array counter
            var i = 0;

            // Buffer to put debayered RGB image into (3 channels)
            var convertedBuffer = new byte[_size.Width * _size.Height * 3];

            // Some precalculated constants that we'll use later
            var roi            = new Rectangle(0, _size.Height - RoiY, _size.Width, RoiHeight);
            var downscaledSize = new Size(_size.Width / 2, RoiHeight / 2);

            // Count error frames
            var errorCount = 0;

            // Grab images.
            while (true)
            {
                HandleStateChange();

                // Wait for an image and then retrieve it. A timeout of 5000 ms is used.
                var grabResult = _camera.StreamGrabber.RetrieveResult(5000, TimeoutHandling.ThrowException);

                using (grabResult)
                {
                    // Image grabbed successfully?
                    if (!grabResult.GrabSucceeded)
                    {
                        Log.Error($"Error: {grabResult.ErrorCode} {grabResult.ErrorDescription}");
                        errorCount++;

                        if (errorCount > ErrorThreshold)
                        {
                            Log.Error("Too many errors. Exiting detection. Not exiting recoding.");
                            break;
                        }

                        continue;
                    }

                    // Debayering RGB image
                    _converter.Convert(convertedBuffer, grabResult);

                    // Convert into EmguCV image type
                    var image = new Image <Rgb, byte>(grabResult.Width, grabResult.Height)
                    {
                        Bytes = convertedBuffer
                    };

                    // Write to recorder (if the recorder is not recording, it will discard it)
                    _recorder.Write(image.Mat);

                    // Convert to grayscale image for further analysis and cut down the region of interest
                    var grayImage = image.Convert <Gray, byte>();
                    grayImage.ROI = roi;
                    CvInvoke.Resize(grayImage, grayImage, downscaledSize);

                    // Append to analyze array
                    images[i] = grayImage;
                    i++;

                    // Skip analysation step until we collected a full array of images
                    if (i != images.Length)
                    {
                        continue;
                    }

                    // Reset array counter
                    i = 0;

                    // Let the detector do it's thing (is a train entering? exiting?)
                    detector.Tick(images);

                    // dispose of references to lower memory consumption
                    for (var k = 0; k < images.Length; k++)
                    {
                        images[k] = null;
                    }
                }
            }
        }