コード例 #1
0
        public void CaptureThreadProc()
        {
            CameraSdkStatus eStatus;
            tSdkFrameHead   FrameHead;
            uint            uRawBuffer;//rawbuffer由SDK内部申请。应用层不要调用delete之类的释放函数

            while (m_bExitCaptureThread == false)
            {
                //500毫秒超时,图像没捕获到前,线程会被挂起,释放CPU,所以该线程中无需调用sleep
                eStatus = MvApi.CameraGetImageBuffer(m_hCamera, out FrameHead, out uRawBuffer, 500);

                if (eStatus == CameraSdkStatus.CAMERA_STATUS_SUCCESS)//如果是触发模式,则有可能超时
                {
                    //图像处理,将原始输出转换为RGB格式的位图数据,同时叠加白平衡、饱和度、LUT等ISP处理。
                    MvApi.CameraImageProcess(m_hCamera, uRawBuffer, m_ImageBuffer, ref FrameHead);
                    //叠加十字线、自动曝光窗口、白平衡窗口信息(仅叠加设置为可见状态的)。
                    MvApi.CameraImageOverlay(m_hCamera, m_ImageBuffer, ref FrameHead);
                    //调用SDK封装好的接口,显示预览图像
                    MvApi.CameraDisplayRGB24(m_hCamera, m_ImageBuffer, ref FrameHead);
                    //成功调用CameraGetImageBuffer后必须释放,下次才能继续调用CameraGetImageBuffer捕获图像。
                    MvApi.CameraReleaseImageBuffer(m_hCamera, uRawBuffer);

                    if (FrameHead.iWidth != m_tFrameHead.iWidth || FrameHead.iHeight != m_tFrameHead.iHeight)
                    {
                        m_bEraseBk   = true;
                        m_tFrameHead = FrameHead;
                    }
                    m_iDisplayedFrames++;
                }
            }
        }
コード例 #2
0
        public void CaptureThreadProc()
        {
            CameraSdkStatus eStatus;
            tSdkFrameHead   pFrameHead;
            IntPtr          uRawBuffer;//rawbuffer由SDK内部申请。应用层不要调用delete之类的释放函数


            while (m_bExitCaptureThread == false)
            {
                //500毫秒超时,图像没捕获到前,线程会被挂起,释放CPU,所以该线程中无需调用sleep
                eStatus = MvApi.CameraGetImageBuffer(m_hCamera, out pFrameHead, out uRawBuffer, 5000);
                if (eStatus == CameraSdkStatus.CAMERA_STATUS_SUCCESS)//如果是触发模式,则有可能超时
                {
                    MvApi.CameraImageProcess(m_hCamera, uRawBuffer, m_ImageBuffer, ref pFrameHead);

                    MvApi.CameraReleaseImageBuffer(m_hCamera, uRawBuffer);

                    //使用halcon显示
                    int bytewidth  = (pFrameHead.iWidth * 3 + 3) / 4 * 4; //要设置的RGB图 的宽度  保证其是4的倍数
                    int bytewidthg = (pFrameHead.iWidth + 3) / 4 * 4;     //要设置分量图
                                                                          //实例化申请内存空间
                    byte[] m_pImageData  = new byte[bytewidth * pFrameHead.iHeight];
                    byte[] m_pImageDataR = new byte[pFrameHead.iWidth * pFrameHead.iHeight];
                    byte[] m_pImageDataG = new byte[pFrameHead.iWidth * pFrameHead.iHeight];
                    byte[] m_pImageDataB = new byte[pFrameHead.iWidth * pFrameHead.iHeight];
                    //复制一张包含RGB的图像缓存(将RGB图像转换成Byte图像缓存)  RGB图像缓存,图像byte的图像类型,相机的索引,图像的字节数大小
                    Marshal.Copy(m_ImageBuffer, m_pImageData, 0, pFrameHead.iWidth * pFrameHead.iHeight * 3);
                    for (int j = 0; j < pFrameHead.iHeight; j++)
                    {
                        for (int i = 0; i < pFrameHead.iWidth; i++)
                        {
                            //将图像的RGB分量 分别分给R分量图,G分量图,B分量图
                            m_pImageDataB[j * bytewidthg + i] = m_pImageData[j * bytewidth + i * 3 + 0];
                            m_pImageDataG[j * bytewidthg + i] = m_pImageData[j * bytewidth + i * 3 + 1];
                            m_pImageDataR[j * bytewidthg + i] = m_pImageData[j * bytewidth + i * 3 + 2];

                            // m_pImageDatagray[j * bytewidthg + i] = (byte)(0.11 * m_pImageData[j * bytewidth + i * 3 + 0] + 0.59 * m_pImageData[j * bytewidth + i * 3 + 1] + 0.30 * m_pImageData[j * bytewidth + i * 3 + 2]);
                        }
                    }
                    unsafe
                    {
                        fixed(byte *pR = m_pImageDataR, pB = m_pImageDataB, pG = m_pImageDataG)
                        {
                            HOperatorSet.GenImage3(out halcon_image, "byte", pFrameHead.iWidth, pFrameHead.iHeight, new IntPtr(pR), new IntPtr(pB), new IntPtr(pG));
                        }
                    }
                    //获得图像宽高,然后设置显示图像的宽高
                    HOperatorSet.GetImageSize(halcon_image, out hv_Width, out hv_Height);
                    HOperatorSet.SetPart(hv_WindowHandle, 0, 0, hv_Height, hv_Width);
                    //如果图像反向可以用halcon镜像图像的函数 将图像镜像反转一下
                    //HOperatorSet.MirrorImage(halcon_image, out halcon_image, "row");
                    HOperatorSet.DispObj(halcon_image, hv_WindowHandle);
                    halcon_image.Dispose();
                }
            }
        }
コード例 #3
0
        public void CaptureThreadProc()
        {
            CameraSdkStatus eStatus;
            tSdkFrameHead   FrameHead;
            IntPtr          uRawBuffer;//rawbuffer由SDK内部申请。应用层不要调用delete之类的释放函数

            while (m_bExitCaptureThread == false)
            {
                //500毫秒超时,图像没捕获到前,线程会被挂起,释放CPU,所以该线程中无需调用sleep
                eStatus = MvApi.CameraGetImageBuffer(m_hCamera, out FrameHead, out uRawBuffer, 500);

                if (eStatus == CameraSdkStatus.CAMERA_STATUS_SUCCESS)//如果是触发模式,则有可能超时
                {
                    //图像处理,将原始输出转换为RGB格式的位图数据,同时叠加白平衡、饱和度、LUT等ISP处理。
                    MvApi.CameraImageProcess(m_hCamera, uRawBuffer, m_ImageBuffer, ref FrameHead);
                    //叠加十字线、自动曝光窗口、白平衡窗口信息(仅叠加设置为可见状态的)。
                    MvApi.CameraImageOverlay(m_hCamera, m_ImageBuffer, ref FrameHead);
                    //调用SDK封装好的接口,显示预览图像
                    MvApi.CameraDisplayRGB24(m_hCamera, m_ImageBuffer, ref FrameHead);
                    //成功调用CameraGetImageBuffer后必须释放,下次才能继续调用CameraGetImageBuffer捕获图像。
                    MvApi.CameraReleaseImageBuffer(m_hCamera, uRawBuffer);

                    if (FrameHead.iWidth != m_tFrameHead.iWidth || FrameHead.iHeight != m_tFrameHead.iHeight)
                    {
                        m_bEraseBk   = true;
                        m_tFrameHead = FrameHead;
                    }

                    m_iDisplayedFrames++;

                    if (m_bSaveImage)
                    {
                        string file_path;
                        file_path = "c:\\test.bmp";
                        byte[] file_path_bytes = Encoding.Default.GetBytes(file_path);
                        MvApi.CameraSaveImage(m_hCamera, file_path_bytes, m_ImageBuffer, ref FrameHead, emSdkFileType.FILE_BMP, 100);
                        m_bSaveImage = false;
                    }
                }
            }
        }