/************************************************************************ * @fn IsColorData() * @brief 判断是否是彩色数据 * @param enGvspPixelType [IN] 像素格式 * @return 成功,返回0;错误,返回-1 ************************************************************************/ private Boolean IsColorData(MyCamera.MvGvspPixelType enGvspPixelType) { switch (enGvspPixelType) { case MyCamera.MvGvspPixelType.PixelType_Gvsp_BayerGR8: case MyCamera.MvGvspPixelType.PixelType_Gvsp_BayerRG8: case MyCamera.MvGvspPixelType.PixelType_Gvsp_BayerGB8: case MyCamera.MvGvspPixelType.PixelType_Gvsp_BayerBG8: case MyCamera.MvGvspPixelType.PixelType_Gvsp_BayerGR10: case MyCamera.MvGvspPixelType.PixelType_Gvsp_BayerRG10: case MyCamera.MvGvspPixelType.PixelType_Gvsp_BayerGB10: case MyCamera.MvGvspPixelType.PixelType_Gvsp_BayerBG10: case MyCamera.MvGvspPixelType.PixelType_Gvsp_BayerGR12: case MyCamera.MvGvspPixelType.PixelType_Gvsp_BayerRG12: case MyCamera.MvGvspPixelType.PixelType_Gvsp_BayerGB12: case MyCamera.MvGvspPixelType.PixelType_Gvsp_BayerBG12: case MyCamera.MvGvspPixelType.PixelType_Gvsp_BayerGR10_Packed: case MyCamera.MvGvspPixelType.PixelType_Gvsp_BayerRG10_Packed: case MyCamera.MvGvspPixelType.PixelType_Gvsp_BayerGB10_Packed: case MyCamera.MvGvspPixelType.PixelType_Gvsp_BayerBG10_Packed: case MyCamera.MvGvspPixelType.PixelType_Gvsp_BayerGR12_Packed: case MyCamera.MvGvspPixelType.PixelType_Gvsp_BayerRG12_Packed: case MyCamera.MvGvspPixelType.PixelType_Gvsp_BayerGB12_Packed: case MyCamera.MvGvspPixelType.PixelType_Gvsp_BayerBG12_Packed: case MyCamera.MvGvspPixelType.PixelType_Gvsp_RGB8_Packed: case MyCamera.MvGvspPixelType.PixelType_Gvsp_YUV422_Packed: case MyCamera.MvGvspPixelType.PixelType_Gvsp_YUV422_YUYV_Packed: case MyCamera.MvGvspPixelType.PixelType_Gvsp_YCBCR411_8_CBYYCRYY: return(true); default: return(false); } }
// ch:去除自定义的像素格式 | en:Remove custom pixel formats private bool RemoveCustomPixelFormats(MyCamera.MvGvspPixelType enPixelFormat) { Int32 nResult = ((int)enPixelFormat) & (unchecked ((Int32)0x80000000)); if (0x80000000 == nResult) { return(true); } else { return(false); } }
private Boolean IsMonoData(MyCamera.MvGvspPixelType enGvspPixelType) { switch (enGvspPixelType) { case MyCamera.MvGvspPixelType.PixelType_Gvsp_Mono8: case MyCamera.MvGvspPixelType.PixelType_Gvsp_Mono10: case MyCamera.MvGvspPixelType.PixelType_Gvsp_Mono10_Packed: case MyCamera.MvGvspPixelType.PixelType_Gvsp_Mono12: case MyCamera.MvGvspPixelType.PixelType_Gvsp_Mono12_Packed: return(true); default: return(false); } }
private bool IsMonoPixelFormat(MyCamera.MvGvspPixelType enType) { switch (enType) { case MyCamera.MvGvspPixelType.PixelType_Gvsp_Mono8: case MyCamera.MvGvspPixelType.PixelType_Gvsp_Mono10: case MyCamera.MvGvspPixelType.PixelType_Gvsp_Mono10_Packed: case MyCamera.MvGvspPixelType.PixelType_Gvsp_Mono12: case MyCamera.MvGvspPixelType.PixelType_Gvsp_Mono12_Packed: return(true); default: return(false); } }
private bool IsColorPixelFormat(MyCamera.MvGvspPixelType enType) { switch (enType) { case MyCamera.MvGvspPixelType.PixelType_Gvsp_RGB8_Packed: case MyCamera.MvGvspPixelType.PixelType_Gvsp_BGR8_Packed: case MyCamera.MvGvspPixelType.PixelType_Gvsp_RGBA8_Packed: case MyCamera.MvGvspPixelType.PixelType_Gvsp_BGRA8_Packed: case MyCamera.MvGvspPixelType.PixelType_Gvsp_YUV422_Packed: case MyCamera.MvGvspPixelType.PixelType_Gvsp_YUV422_YUYV_Packed: case MyCamera.MvGvspPixelType.PixelType_Gvsp_BayerGR8: case MyCamera.MvGvspPixelType.PixelType_Gvsp_BayerRG8: case MyCamera.MvGvspPixelType.PixelType_Gvsp_BayerGB8: case MyCamera.MvGvspPixelType.PixelType_Gvsp_BayerBG8: case MyCamera.MvGvspPixelType.PixelType_Gvsp_BayerGB10: case MyCamera.MvGvspPixelType.PixelType_Gvsp_BayerGB10_Packed: case MyCamera.MvGvspPixelType.PixelType_Gvsp_BayerBG10: case MyCamera.MvGvspPixelType.PixelType_Gvsp_BayerBG10_Packed: case MyCamera.MvGvspPixelType.PixelType_Gvsp_BayerRG10: case MyCamera.MvGvspPixelType.PixelType_Gvsp_BayerRG10_Packed: case MyCamera.MvGvspPixelType.PixelType_Gvsp_BayerGR10: case MyCamera.MvGvspPixelType.PixelType_Gvsp_BayerGR10_Packed: case MyCamera.MvGvspPixelType.PixelType_Gvsp_BayerGB12: case MyCamera.MvGvspPixelType.PixelType_Gvsp_BayerGB12_Packed: case MyCamera.MvGvspPixelType.PixelType_Gvsp_BayerBG12: case MyCamera.MvGvspPixelType.PixelType_Gvsp_BayerBG12_Packed: case MyCamera.MvGvspPixelType.PixelType_Gvsp_BayerRG12: case MyCamera.MvGvspPixelType.PixelType_Gvsp_BayerRG12_Packed: case MyCamera.MvGvspPixelType.PixelType_Gvsp_BayerGR12: case MyCamera.MvGvspPixelType.PixelType_Gvsp_BayerGR12_Packed: return(true); default: return(false); } }
public override bool Open() { try { int nRet; //Util.Notify("相机开始打开"); uint pixelFormat = 0; nRet = m_pOperator.GetEnumValue("PixelFormat", ref pixelFormat); if (MyCamera.MV_OK != nRet) { throw new Exception("图像格式获取错误"); } MyCamera.MvGvspPixelType imgType = (MyCamera.MvGvspPixelType)pixelFormat; if (imgType == MyCamera.MvGvspPixelType.PixelType_Gvsp_YUV411_Packed || imgType == MyCamera.MvGvspPixelType.PixelType_Gvsp_YUV422_Packed || imgType == MyCamera.MvGvspPixelType.PixelType_Gvsp_YUV422_YUYV_Packed || imgType == MyCamera.MvGvspPixelType.PixelType_Gvsp_YUV444_Packed) { int result = m_pOperator.SetEnumValue("PixelFormat", (uint)MyCamera.MvGvspPixelType.PixelType_Gvsp_RGB8_Packed); if (MyCamera.MV_OK != result) { throw new Exception("图像格式设置错误"); } } ////设置采集连续模式 //nRet = m_pOperator.SetEnumValue("AcquisitionMode", 2); //if (MyCamera.MV_OK != nRet) //{ // throw new Exception("采集模式设置失败"); //} //nRet = m_pOperator.SetEnumValue("TriggerMode", 0); //if (MyCamera.MV_OK != nRet) //{ // throw new Exception("触发模式设置失败"); //} ImageCallback = new MyCamera.cbOutputdelegate(SaveImage); nRet = m_pOperator.RegisterImageCallBack(ImageCallback, IntPtr.Zero); if (MyCamera.MV_OK != nRet) { throw new Exception("回调函数注册失败"); } ContinuousShotStop(); //开始采集 nRet = m_pOperator.StartGrabbing(); if (MyCamera.MV_OK != nRet) { throw new Exception("开始采集失败"); } // Reset the stopwatch used to reduce the amount of displayed images. The camera may acquire images faster than the images can be displayed //stopWatch.Reset(); GetCameraSettingData(); IsLink = true; } catch (Exception ex) { Util.WriteLog(this.GetType(), ex); Util.Notify("相机打开出现异常:" + ex.Message); throw ex; } return(true); }
static void Main(string[] args) { int nRet = MyCamera.MV_OK; MyCamera device = new MyCamera(); do { // ch:枚举设备 | en:Enum device MyCamera.MV_CC_DEVICE_INFO_LIST stDevList = new MyCamera.MV_CC_DEVICE_INFO_LIST(); nRet = MyCamera.MV_CC_EnumDevices_NET(MyCamera.MV_GIGE_DEVICE | MyCamera.MV_USB_DEVICE, ref stDevList); if (MyCamera.MV_OK != nRet) { Console.WriteLine("Enum device failed:{0:x8}", nRet); break; } Console.WriteLine("Enum device count : " + Convert.ToString(stDevList.nDeviceNum)); if (0 == stDevList.nDeviceNum) { break; } MyCamera.MV_CC_DEVICE_INFO stDevInfo; // 通用设备信息 // ch:打印设备信息 en:Print device info for (Int32 i = 0; i < stDevList.nDeviceNum; i++) { stDevInfo = (MyCamera.MV_CC_DEVICE_INFO)Marshal.PtrToStructure(stDevList.pDeviceInfo[i], typeof(MyCamera.MV_CC_DEVICE_INFO)); if (MyCamera.MV_GIGE_DEVICE == stDevInfo.nTLayerType) { MyCamera.MV_GIGE_DEVICE_INFO stGigEDeviceInfo = (MyCamera.MV_GIGE_DEVICE_INFO)MyCamera.ByteToStruct(stDevInfo.SpecialInfo.stGigEInfo, typeof(MyCamera.MV_GIGE_DEVICE_INFO)); uint nIp1 = ((stGigEDeviceInfo.nCurrentIp & 0xff000000) >> 24); uint nIp2 = ((stGigEDeviceInfo.nCurrentIp & 0x00ff0000) >> 16); uint nIp3 = ((stGigEDeviceInfo.nCurrentIp & 0x0000ff00) >> 8); uint nIp4 = (stGigEDeviceInfo.nCurrentIp & 0x000000ff); Console.WriteLine("\n" + i.ToString() + ": [GigE] User Define Name : " + stGigEDeviceInfo.chUserDefinedName); Console.WriteLine("device IP :" + nIp1 + "." + nIp2 + "." + nIp3 + "." + nIp4); } else if (MyCamera.MV_USB_DEVICE == stDevInfo.nTLayerType) { MyCamera.MV_USB3_DEVICE_INFO stUsb3DeviceInfo = (MyCamera.MV_USB3_DEVICE_INFO)MyCamera.ByteToStruct(stDevInfo.SpecialInfo.stUsb3VInfo, typeof(MyCamera.MV_USB3_DEVICE_INFO)); Console.WriteLine("\n" + i.ToString() + ": [U3V] User Define Name : " + stUsb3DeviceInfo.chUserDefinedName); Console.WriteLine("\n Serial Number : " + stUsb3DeviceInfo.chSerialNumber); Console.WriteLine("\n Device Number : " + stUsb3DeviceInfo.nDeviceNumber); } } Int32 nDevIndex = 0; Console.Write("\nPlease input index (0 -- {0:d}) : ", stDevList.nDeviceNum - 1); try { nDevIndex = Convert.ToInt32(Console.ReadLine()); } catch { Console.Write("Invalid Input!\n"); break; } if (nDevIndex > stDevList.nDeviceNum - 1 || nDevIndex < 0) { Console.Write("Input Error!\n"); break; } stDevInfo = (MyCamera.MV_CC_DEVICE_INFO)Marshal.PtrToStructure(stDevList.pDeviceInfo[nDevIndex], typeof(MyCamera.MV_CC_DEVICE_INFO)); // ch:创建设备 | en:Create device nRet = device.MV_CC_CreateDevice_NET(ref stDevInfo); if (MyCamera.MV_OK != nRet) { Console.WriteLine("Create device failed:{0:x8}", nRet); break; } // ch:打开设备 | en:Open device nRet = device.MV_CC_OpenDevice_NET(); if (MyCamera.MV_OK != nRet) { Console.WriteLine("Open device failed:{0:x8}", nRet); break; } // ch:判断设备是否是设置的3D格式 | en:Judge Whether the device is set to 3D format MyCamera.MVCC_ENUMVALUE EnumValue = new MyCamera.MVCC_ENUMVALUE(); nRet = device.MV_CC_GetEnumValue_NET("PixelFormat", ref EnumValue); if (MyCamera.MV_OK != nRet) { Console.WriteLine("Get the Camera format fail:{0:x8}", nRet); break; } MyCamera.MvGvspPixelType ePixelFormat = (MyCamera.MvGvspPixelType)EnumValue.nCurValue; switch (ePixelFormat) { case MyCamera.MvGvspPixelType.PixelType_Gvsp_Coord3D_ABC32: case MyCamera.MvGvspPixelType.PixelType_Gvsp_Coord3D_ABC32f: case MyCamera.MvGvspPixelType.PixelType_Gvsp_Coord3D_AB32: case MyCamera.MvGvspPixelType.PixelType_Gvsp_Coord3D_AB32f: case MyCamera.MvGvspPixelType.PixelType_Gvsp_Coord3D_AC32: case MyCamera.MvGvspPixelType.PixelType_Gvsp_Coord3D_AC32f: { nRet = MyCamera.MV_OK; break; } default: { nRet = MyCamera.MV_E_SUPPORT; break; } } if (MyCamera.MV_OK != nRet) { Console.WriteLine("This is not a supported 3D format!"); break; } // ch:探测网络最佳包大小(只对GigE相机有效) | en:Detection network optimal package size(It only works for the GigE camera) if (stDevInfo.nTLayerType == MyCamera.MV_GIGE_DEVICE) { int nPacketSize = device.MV_CC_GetOptimalPacketSize_NET(); if (nPacketSize > 0) { nRet = device.MV_CC_SetIntValue_NET("GevSCPSPacketSize", Convert.ToUInt32(nPacketSize)); if (nRet != MyCamera.MV_OK) { Console.WriteLine("Warning: Set Packet Size failed {0:x8}", nRet); } } else { Console.WriteLine("Warning: Get Packet Size failed {0:x8}", nPacketSize); } } // ch:获取触发模式的访问模式 | en:Get Access mode of trigger mode MyCamera.MV_XML_AccessMode pAccessMode = MyCamera.MV_XML_AccessMode.AM_NI; if (MyCamera.MV_OK != device.MV_XML_GetNodeAccessMode_NET("TriggerMode", ref pAccessMode)) { Console.WriteLine("Get Access mode of trigger mode fail! nRet [0x%x]\n", nRet); } else { // ch:设置触发模式为off || en:set trigger mode as off if (MyCamera.MV_OK != device.MV_CC_SetEnumValue_NET("TriggerMode", 0)) { Console.WriteLine("Set TriggerMode failed!"); break; } } // ch:获取包大小 || en: Get Payload Size MyCamera.MVCC_INTVALUE stParam = new MyCamera.MVCC_INTVALUE(); nRet = device.MV_CC_GetIntValue_NET("PayloadSize", ref stParam); if (MyCamera.MV_OK != nRet) { Console.WriteLine("Get PayloadSize failed:{0:x8}", nRet); break; } UInt32 nPayloadSize = stParam.nCurValue; // ch:开启抓图 | en:start grab nRet = device.MV_CC_StartGrabbing_NET(); if (MyCamera.MV_OK != nRet) { Console.WriteLine("Start grabbing failed:{0:x8}", nRet); break; } uint nImageNum = 100; byte[] bSaveImageBuf = null; try { // ch:申请足够大的缓存,用于保存获取到的图像 bSaveImageBuf = new byte[nPayloadSize * nImageNum]; } catch (Exception ex) { Console.WriteLine("Malloc Save buffer fail!\n"); break; } uint nSaveImageSize = nPayloadSize * nImageNum; // ch:已获取的总图片大小 uint nSaveDataLen = 0; MyCamera.MV_FRAME_OUT stOutFrame = new MyCamera.MV_FRAME_OUT(); for (uint i = 0; i < nImageNum; i++) { nRet = device.MV_CC_GetImageBuffer_NET(ref stOutFrame, 1000); if (nRet == MyCamera.MV_OK) { Console.WriteLine("Get One Frame:" + "Width[" + Convert.ToString(stOutFrame.stFrameInfo.nWidth) + "] , Height[" + Convert.ToString(stOutFrame.stFrameInfo.nHeight) + "] , FrameNum[" + Convert.ToString(stOutFrame.stFrameInfo.nFrameNum) + "]"); if (nSaveImageSize > (nSaveDataLen + stOutFrame.stFrameInfo.nFrameLen)) { // ch:将图像拷贝到pSaveImageBuf | Copy one frame of image to the buffer named pSaveImageBuf Marshal.Copy(stOutFrame.pBufAddr, bSaveImageBuf, Convert.ToInt32(nSaveDataLen), Convert.ToInt32(stOutFrame.stFrameInfo.nFrameLen)); nSaveDataLen += stOutFrame.stFrameInfo.nFrameLen; } nRet = device.MV_CC_FreeImageBuffer_NET(ref stOutFrame); if (nRet != MyCamera.MV_OK) { Console.WriteLine("Free Image Buffer fail:{0:x8}", nRet); } } else { Console.WriteLine("No data:{0:x8}", nRet); } } MyCamera.MV_SAVE_POINT_CLOUD_PARAM stSavePoCloudPar = new MyCamera.MV_SAVE_POINT_CLOUD_PARAM(); stSavePoCloudPar.nLinePntNum = stOutFrame.stFrameInfo.nWidth; stSavePoCloudPar.nLineNum = stOutFrame.stFrameInfo.nHeight * nImageNum; byte[] bDstImageBuf = new byte[stSavePoCloudPar.nLineNum * stSavePoCloudPar.nLinePntNum * (16 * 3 + 4) + 2048]; uint nDstImageSize = stSavePoCloudPar.nLineNum * stSavePoCloudPar.nLinePntNum * (16 * 3 + 4) + 2048; stSavePoCloudPar.enPointCloudFileType = MyCamera.MV_SAVE_POINT_CLOUD_FILE_TYPE.MV_PointCloudFile_PLY; stSavePoCloudPar.enSrcPixelType = stOutFrame.stFrameInfo.enPixelType; stSavePoCloudPar.nSrcDataLen = nSaveDataLen; GCHandle hSrcData = GCHandle.Alloc(bSaveImageBuf, GCHandleType.Pinned); stSavePoCloudPar.pSrcData = hSrcData.AddrOfPinnedObject(); GCHandle hDstData = GCHandle.Alloc(bDstImageBuf, GCHandleType.Pinned); stSavePoCloudPar.pDstBuf = hDstData.AddrOfPinnedObject(); stSavePoCloudPar.nDstBufSize = nDstImageSize; //Save point cloud data nRet = device.MV_CC_SavePointCloudData_NET(ref stSavePoCloudPar); if (MyCamera.MV_OK != nRet) { Console.WriteLine("Save point cloud data fail:{0:x8}", nRet); break; } FileStream file = new FileStream("PointCloudData.ply", FileMode.Create, FileAccess.Write); file.Write(bDstImageBuf, 0, Convert.ToInt32(stSavePoCloudPar.nDstBufLen)); file.Close(); Console.WriteLine("Save point cloud data succeed"); hSrcData.Free(); hDstData.Free(); // ch:停止抓图 | en:Stop grab image nRet = device.MV_CC_StopGrabbing_NET(); if (MyCamera.MV_OK != nRet) { Console.WriteLine("Stop grabbing failed{0:x8}", nRet); break; } // ch:关闭设备 | en:Close device nRet = device.MV_CC_CloseDevice_NET(); if (MyCamera.MV_OK != nRet) { Console.WriteLine("Close device failed{0:x8}", nRet); break; } // ch:销毁设备 | en:Destroy device nRet = device.MV_CC_DestroyDevice_NET(); if (MyCamera.MV_OK != nRet) { Console.WriteLine("Destroy device failed:{0:x8}", nRet); break; } } while (false); if (MyCamera.MV_OK != nRet) { // ch:销毁设备 | en:Destroy device nRet = device.MV_CC_DestroyDevice_NET(); if (MyCamera.MV_OK != nRet) { Console.WriteLine("Destroy device failed:{0:x8}", nRet); } } Console.WriteLine("Press enter to exit"); Console.ReadKey(); }
private void bnSaveBmp_Click(object sender, EventArgs e) { if (false == m_bGrabbing) { ShowErrorMsg("Not Start Grabbing", 0); return; } if (RemoveCustomPixelFormats(m_stFrameInfo.enPixelType)) { ShowErrorMsg("Not Support!", 0); return; } IntPtr pTemp = IntPtr.Zero; MyCamera.MvGvspPixelType enDstPixelType = MyCamera.MvGvspPixelType.PixelType_Gvsp_Undefined; if (m_stFrameInfo.enPixelType == MyCamera.MvGvspPixelType.PixelType_Gvsp_Mono8 || m_stFrameInfo.enPixelType == MyCamera.MvGvspPixelType.PixelType_Gvsp_BGR8_Packed) { pTemp = m_BufForDriver; enDstPixelType = m_stFrameInfo.enPixelType; } else { UInt32 nSaveImageNeedSize = 0; MyCamera.MV_PIXEL_CONVERT_PARAM stConverPixelParam = new MyCamera.MV_PIXEL_CONVERT_PARAM(); lock (BufForDriverLock) { if (m_stFrameInfo.nFrameLen == 0) { ShowErrorMsg("Save Bmp Fail!", 0); return; } if (IsMonoData(m_stFrameInfo.enPixelType)) { enDstPixelType = MyCamera.MvGvspPixelType.PixelType_Gvsp_Mono8; nSaveImageNeedSize = (uint)m_stFrameInfo.nWidth * m_stFrameInfo.nHeight; } else if (IsColorData(m_stFrameInfo.enPixelType)) { enDstPixelType = MyCamera.MvGvspPixelType.PixelType_Gvsp_BGR8_Packed; nSaveImageNeedSize = (uint)m_stFrameInfo.nWidth * m_stFrameInfo.nHeight * 3; } else { ShowErrorMsg("No such pixel type!", 0); return; } if (m_nBufSizeForSaveImage < nSaveImageNeedSize) { if (m_BufForSaveImage != IntPtr.Zero) { Marshal.Release(m_BufForSaveImage); } m_nBufSizeForSaveImage = nSaveImageNeedSize; m_BufForSaveImage = Marshal.AllocHGlobal((Int32)m_nBufSizeForSaveImage); } stConverPixelParam.nWidth = m_stFrameInfo.nWidth; stConverPixelParam.nHeight = m_stFrameInfo.nHeight; stConverPixelParam.pSrcData = m_BufForDriver; stConverPixelParam.nSrcDataLen = m_stFrameInfo.nFrameLen; stConverPixelParam.enSrcPixelType = m_stFrameInfo.enPixelType; stConverPixelParam.enDstPixelType = enDstPixelType; stConverPixelParam.pDstBuffer = m_BufForSaveImage; stConverPixelParam.nDstBufferSize = m_nBufSizeForSaveImage; int nRet = m_MyCamera.MV_CC_ConvertPixelType_NET(ref stConverPixelParam); if (MyCamera.MV_OK != nRet) { ShowErrorMsg("Convert Pixel Type Fail!", nRet); return; } pTemp = m_BufForSaveImage; } } lock (BufForDriverLock) { if (enDstPixelType == MyCamera.MvGvspPixelType.PixelType_Gvsp_Mono8) { //************************Mono8 转 Bitmap******************************* Bitmap bmp = new Bitmap(m_stFrameInfo.nWidth, m_stFrameInfo.nHeight, m_stFrameInfo.nWidth * 1, PixelFormat.Format8bppIndexed, pTemp); ColorPalette cp = bmp.Palette; // init palette for (int i = 0; i < 256; i++) { cp.Entries[i] = Color.FromArgb(i, i, i); } // set palette back bmp.Palette = cp; bmp.Save("image.bmp", ImageFormat.Bmp); } else { //*********************BGR8 转 Bitmap************************** try { Bitmap bmp = new Bitmap(m_stFrameInfo.nWidth, m_stFrameInfo.nHeight, m_stFrameInfo.nWidth * 3, PixelFormat.Format24bppRgb, pTemp); bmp.Save("image.bmp", ImageFormat.Bmp); } catch { ShowErrorMsg("Write File Fail!", 0); } } } ShowErrorMsg("Save Succeed!", 0); }
public Int32 ConvertToRGB(object obj, IntPtr pSrc, ushort nHeight, ushort nWidth, MyCamera.MvGvspPixelType nPixelType, IntPtr pDst) { if (IntPtr.Zero == pSrc || IntPtr.Zero == pDst) { return(MyCamera.MV_E_PARAMETER); } int nRet = MyCamera.MV_OK; MyCamera device = obj as MyCamera; MyCamera.MV_PIXEL_CONVERT_PARAM stPixelConvertParam = new MyCamera.MV_PIXEL_CONVERT_PARAM(); stPixelConvertParam.pSrcData = pSrc;//源数据 if (IntPtr.Zero == stPixelConvertParam.pSrcData) { return(-1); } stPixelConvertParam.nWidth = nWidth; //图像宽度 stPixelConvertParam.nHeight = nHeight; //图像高度 stPixelConvertParam.enSrcPixelType = nPixelType; //源数据的格式 stPixelConvertParam.nSrcDataLen = (uint)(nWidth * nHeight * ((((uint)nPixelType) >> 16) & 0x00ff) >> 3); stPixelConvertParam.nDstBufferSize = (uint)(nWidth * nHeight * ((((uint)MyCamera.MvGvspPixelType.PixelType_Gvsp_RGB8_Packed) >> 16) & 0x00ff) >> 3); stPixelConvertParam.pDstBuffer = pDst;//转换后的数据 stPixelConvertParam.enDstPixelType = MyCamera.MvGvspPixelType.PixelType_Gvsp_RGB8_Packed; stPixelConvertParam.nDstBufferSize = (uint)nWidth * nHeight * 3; nRet = device.MV_CC_ConvertPixelType_NET(ref stPixelConvertParam);//格式转换 if (MyCamera.MV_OK != nRet) { return(-1); } return(MyCamera.MV_OK); }
/// <summary>生成一个对应的Halcon图像对象</summary> public int GenHalcon(out object hoImg) { hoImg = null; HObject hImg = null; MyCamera.MvGvspPixelType dstPixType = MyCamera.MvGvspPixelType.PixelType_Gvsp_Mono8; if (IsMono) { if (_frameInfo.enPixelType == MyCamera.MvGvspPixelType.PixelType_Gvsp_Mono8) { try { HOperatorSet.GenImage1(out hImg, "byte", _frameInfo.nWidth, _frameInfo.nHeight, Marshal.UnsafeAddrOfPinnedArrayElement(_dataBytes, 0)); hoImg = hImg; return((int)ErrorDef.Success); } catch { return((int)ErrorDef.MemoryExcp); } } } else if (IsColor) { if (_frameInfo.enPixelType == MyCamera.MvGvspPixelType.PixelType_Gvsp_RGB8_Planar) { try { IntPtr pDataR = Marshal.UnsafeAddrOfPinnedArrayElement(_dataBytes, 0); IntPtr pDataG = pDataR + _frameInfo.nWidth * _frameInfo.nHeight; IntPtr pDataB = pDataG + _frameInfo.nWidth * _frameInfo.nHeight; HOperatorSet.GenImage3(out hImg, "byte", _frameInfo.nWidth, _frameInfo.nHeight, pDataR, pDataG, pDataB); hoImg = hImg; return((int)ErrorDef.Success); } catch { return((int)ErrorDef.MemoryExcp); } } dstPixType = MyCamera.MvGvspPixelType.PixelType_Gvsp_RGB8_Planar; //将图像转化为RGB平面格式 } else { return((int)ErrorDef.PixelFormatError); } int tmpBuffLen = (int)(_frameInfo.nWidth * _frameInfo.nHeight * ((((uint)dstPixType) >> 16) & 0x00ff) >> 3); IntPtr pTmp = Marshal.AllocHGlobal(tmpBuffLen); MyCamera.MV_PIXEL_CONVERT_PARAM stConverPixelParam = new MyCamera.MV_PIXEL_CONVERT_PARAM(); stConverPixelParam.nWidth = _frameInfo.nWidth; stConverPixelParam.nHeight = _frameInfo.nHeight; stConverPixelParam.pSrcData = Marshal.UnsafeAddrOfPinnedArrayElement(_dataBytes, 0); stConverPixelParam.nSrcDataLen = (uint)(_frameInfo.nWidth * _frameInfo.nHeight * ((((uint)_frameInfo.enPixelType) >> 16) & 0x00ff) >> 3); stConverPixelParam.enSrcPixelType = _frameInfo.enPixelType; stConverPixelParam.enDstPixelType = dstPixType; stConverPixelParam.pDstBuffer = pTmp; stConverPixelParam.nDstBufferSize = (uint)tmpBuffLen; int err = _cmr.MV_CC_ConvertPixelType_NET(ref stConverPixelParam); if (MyCamera.MV_OK != err) { Marshal.FreeHGlobal(pTmp); return((int)ErrorDef.InvokeFailed); } try { if (dstPixType == MyCamera.MvGvspPixelType.PixelType_Gvsp_Mono8) { HOperatorSet.GenImage1(out hImg, "byte", _frameInfo.nWidth, _frameInfo.nHeight, pTmp); } else { IntPtr pDataR = pTmp; IntPtr pDataG = pDataR + _frameInfo.nWidth * _frameInfo.nHeight; IntPtr pDataB = pDataG + _frameInfo.nWidth * _frameInfo.nHeight; HOperatorSet.GenImage3(out hImg, "byte", _frameInfo.nWidth, _frameInfo.nHeight, pDataR, pDataG, pDataB); } Marshal.FreeHGlobal(pTmp); hoImg = hImg; return((int)ErrorDef.Success); } catch { Marshal.FreeHGlobal(pTmp); return((int)ErrorDef.MemoryExcp); } }
/// <summary>生成一个对应的bmp图像对象</summary> public int GenBmp(out Bitmap bmp) { bmp = null; IntPtr pBmpData = IntPtr.Zero; //IntPtr pImage = Marshal.UnsafeAddrOfPinnedArrayElement(_dataBytes, 0); MyCamera.MvGvspPixelType dstPixelType = MyCamera.MvGvspPixelType.PixelType_Gvsp_Mono8; //将要生成的BMP对象的图像格式 int nBmpLen = (int)(_frameInfo.nWidth * _frameInfo.nHeight * ((((uint)MyCamera.MvGvspPixelType.PixelType_Gvsp_Mono8) >> 16) & 0x00ff) >> 3); if (IsMono) { pBmpData = Marshal.AllocHGlobal(nBmpLen); } else if (IsColor) { dstPixelType = MyCamera.MvGvspPixelType.PixelType_Gvsp_BGR8_Packed; //RGB24 nBmpLen = (int)(_frameInfo.nWidth * _frameInfo.nHeight * ((((uint)MyCamera.MvGvspPixelType.PixelType_Gvsp_BGR8_Packed) >> 16) & 0x00ff) >> 3); pBmpData = Marshal.AllocHGlobal(nBmpLen); } else { return((int)ErrorDef.PixelFormatError); } MyCamera.MV_PIXEL_CONVERT_PARAM stConverPixelParam = new MyCamera.MV_PIXEL_CONVERT_PARAM(); stConverPixelParam.nWidth = _frameInfo.nWidth; stConverPixelParam.nHeight = _frameInfo.nHeight; stConverPixelParam.pSrcData = Marshal.UnsafeAddrOfPinnedArrayElement(_dataBytes, 0); stConverPixelParam.nSrcDataLen = (uint)(_frameInfo.nWidth * _frameInfo.nHeight * ((((uint)_frameInfo.enPixelType) >> 16) & 0x00ff) >> 3); stConverPixelParam.enSrcPixelType = _frameInfo.enPixelType; stConverPixelParam.enDstPixelType = dstPixelType; stConverPixelParam.pDstBuffer = pBmpData; stConverPixelParam.nDstBufferSize = (uint)nBmpLen; int err = _cmr.MV_CC_ConvertPixelType_NET(ref stConverPixelParam); if (MyCamera.MV_OK != err) { Marshal.FreeHGlobal(pBmpData); return((int)ErrorDef.InvokeFailed); } if (dstPixelType == MyCamera.MvGvspPixelType.PixelType_Gvsp_Mono8) { Bitmap bmpTmp = new Bitmap(_frameInfo.nWidth, _frameInfo.nHeight, _frameInfo.nWidth, PixelFormat.Format8bppIndexed, pBmpData); ColorPalette cp = bmpTmp.Palette; // init palette for (int i = 0; i < 256; i++) { cp.Entries[i] = Color.FromArgb(i, i, i); } // set palette back bmpTmp.Palette = cp; bmp = new Bitmap(bmpTmp); bmpTmp.Dispose(); Marshal.FreeHGlobal(pBmpData); return((int)ErrorDef.Success); } ///将图像转化为RGB24 try { bmp = new Bitmap(_frameInfo.nWidth, _frameInfo.nHeight, _frameInfo.nWidth * 3, PixelFormat.Format24bppRgb, pBmpData); } catch { Marshal.FreeHGlobal(pBmpData); return((int)ErrorDef.MemoryExcp); } return((int)ErrorDef.Success); }
/// <summary> /// 接收图片线程函数 /// </summary> /// <param name="obj"></param> public void ReceiveImageWorkThread(object obj) { int nRet = MyCamera.MV_OK; MyCamera device = obj as MyCamera; MyCamera.MV_FRAME_OUT_INFO_EX pFrameInfo = new MyCamera.MV_FRAME_OUT_INFO_EX(); IntPtr pData = Marshal.AllocHGlobal((int)g_nPayloadSize); if (pData == IntPtr.Zero) { return; } IntPtr pImageBuffer = Marshal.AllocHGlobal((int)m_nRowStep * 3); if (pImageBuffer == IntPtr.Zero) { return; } IntPtr pTemp = IntPtr.Zero; Byte[] byteArrImageData = new Byte[m_nRowStep * 3]; while (m_bGrabbing) { nRet = device.MV_CC_GetOneFrameTimeout_NET(pData, g_nPayloadSize, ref pFrameInfo, 1000); if (MyCamera.MV_OK == nRet) { MyCamera.MvGvspPixelType pixelType = MyCamera.MvGvspPixelType.PixelType_Gvsp_RGB8_Packed; if (IsColorPixelFormat(pFrameInfo.enPixelType)) // 彩色图像处理 { if (pFrameInfo.enPixelType == MyCamera.MvGvspPixelType.PixelType_Gvsp_RGB8_Packed) { pTemp = pData; } else { // 其他格式彩色图像转为RGB nRet = ConvertToRGB(obj, pData, pFrameInfo.nHeight, pFrameInfo.nWidth, pFrameInfo.enPixelType, pImageBuffer); if (MyCamera.MV_OK != nRet) { return; } pTemp = pImageBuffer; } // Packed转Plane unsafe { byte *pBufForSaveImage = (byte *)pTemp; UInt32 nSupWidth = (pFrameInfo.nWidth + (UInt32)3) & 0xfffffffc; for (int nRow = 0; nRow < pFrameInfo.nHeight; nRow++) { for (int col = 0; col < pFrameInfo.nWidth; col++) { byteArrImageData[nRow * nSupWidth + col] = pBufForSaveImage[nRow * pFrameInfo.nWidth * 3 + (3 * col)]; byteArrImageData[pFrameInfo.nWidth * pFrameInfo.nHeight + nRow * nSupWidth + col] = pBufForSaveImage[nRow * pFrameInfo.nWidth * 3 + (3 * col + 1)]; byteArrImageData[pFrameInfo.nWidth * pFrameInfo.nHeight * 2 + nRow * nSupWidth + col] = pBufForSaveImage[nRow * pFrameInfo.nWidth * 3 + (3 * col + 2)]; } } pTemp = Marshal.UnsafeAddrOfPinnedArrayElement(byteArrImageData, 0); } } else if (IsMonoPixelFormat(pFrameInfo.enPixelType)) // Mono图像处理 { if (pFrameInfo.enPixelType == MyCamera.MvGvspPixelType.PixelType_Gvsp_Mono8) { pTemp = pData; } else { // 其他格式Mono转为Mono8 nRet = ConvertToMono8(device, pData, pImageBuffer, pFrameInfo.nHeight, pFrameInfo.nWidth, pFrameInfo.enPixelType); if (MyCamera.MV_OK != nRet) { return; } pTemp = pImageBuffer; } pixelType = MyCamera.MvGvspPixelType.PixelType_Gvsp_Mono8; } else { continue; } VisionProDisplay(pFrameInfo.nHeight, pFrameInfo.nWidth, pTemp, pixelType); } else { continue; } } if (pData != IntPtr.Zero) { Marshal.FreeHGlobal(pData); } if (pImageBuffer != IntPtr.Zero) { Marshal.FreeHGlobal(pImageBuffer); } return; }
/// <summary> /// 显示图片 /// </summary> /// <param name="nHeight">高</param> /// <param name="nWidth">宽</param> /// <param name="pImageBuf">图片数据</param> /// <param name="enPixelType">像素格式</param> public void VisionProDisplay(UInt32 nHeight, UInt32 nWidth, IntPtr pImageBuf, MyCamera.MvGvspPixelType enPixelType) { // ch: 显示 || display try { if (enPixelType == MyCamera.MvGvspPixelType.PixelType_Gvsp_Mono8) { CogImage8Root cogImage8Root = new CogImage8Root(); cogImage8Root.Initialize((Int32)nWidth, (Int32)nHeight, pImageBuf, (Int32)nWidth, null); CogImage8Grey cogImage8Grey = new CogImage8Grey(); cogImage8Grey.SetRoot(cogImage8Root); this.cogDisplayImage.Image = cogImage8Grey.ScaleImage((int)nWidth, (int)nHeight); System.GC.Collect(); } else { CogImage8Root image0 = new CogImage8Root(); IntPtr ptr0 = new IntPtr(pImageBuf.ToInt64()); image0.Initialize((int)nWidth, (int)nHeight, ptr0, (int)nWidth, null); CogImage8Root image1 = new CogImage8Root(); IntPtr ptr1 = new IntPtr(pImageBuf.ToInt64() + m_nRowStep); image1.Initialize((int)nWidth, (int)nHeight, ptr1, (int)nWidth, null); CogImage8Root image2 = new CogImage8Root(); IntPtr ptr2 = new IntPtr(pImageBuf.ToInt64() + m_nRowStep * 2); image2.Initialize((int)nWidth, (int)nHeight, ptr2, (int)nWidth, null); CogImage24PlanarColor colorImage = new CogImage24PlanarColor(); colorImage.SetRoots(image0, image1, image2); this.cogDisplayImage.Image = colorImage.ScaleImage((int)nWidth, (int)nHeight); System.GC.Collect(); } } catch (System.Exception ex) { MessageBox.Show(ex.ToString()); return; } return; }