public static void ReceiveImageWorkThread(object obj) { int nRet = MyCamera.MV_OK; MyCamera device = obj as MyCamera; MyCamera.MV_FRAME_OUT_INFO_EX stImageInfo = new MyCamera.MV_FRAME_OUT_INFO_EX(); IntPtr pData = Marshal.AllocHGlobal((int)g_nPayloadSize); if (pData == IntPtr.Zero) { return; } uint nDataSize = g_nPayloadSize; while (true) { nRet = device.MV_CC_GetOneFrameTimeout_NET(pData, nDataSize, ref stImageInfo, 1000); if (nRet == MyCamera.MV_OK) { Console.WriteLine("Get One Frame:" + "Width[" + Convert.ToString(stImageInfo.nWidth) + "] , Height[" + Convert.ToString(stImageInfo.nHeight) + "] , FrameNum[" + Convert.ToString(stImageInfo.nFrameNum) + "]"); } else { Console.WriteLine("No data:{0:x8}", nRet); } if (g_bExit) { break; } } }
private void GrabHal_CallBack(IntPtr pData, ref MyCamera.MV_FRAME_OUT_INFO_EX pFrameInfo, IntPtr pUser) { try { //使用触发模式 if (g_BaseParCamera.BlUsingTrigger) { MyCamera.MV_FRAME_OUT_INFO_EX pFrameInfoNew = pFrameInfo; new Task(new Action(() => { ImageAll imTemp = new ImageAll(); imTemp.Ho_Image = ConvertHobject(pFrameInfoNew, pData); imTemp.TimeTrigger = PathDirectory.P_I.GetTimeName();//获取时间 if (BlTriggerBitmap)//触发需要获取ImageAll { BlTriggerBitmap = false; PFrameInfo_Bit = pFrameInfoNew; PData_Bit = pData; } ImageAll_Q.Enqueue(imTemp);//图像加入队列 if (Trigger_event != null) { Trigger_event();//触发事件 } })).Start(); } } catch (Exception ex) { BlTriggerBitmap = false; Log.L_I.WriteError(NameClass, ex); } }
private void ImageCallBack(IntPtr pData, ref MyCamera.MV_FRAME_OUT_INFO_EX pFrameInfo, IntPtr pUser) { int nIndex = (int)pUser; lock (m_dicSaveImge[nIndex].Lock) { if (m_dicSaveImge[nIndex].Buf == IntPtr.Zero || pFrameInfo.nFrameLen > m_dicSaveImge[nIndex].Size) { if (m_dicSaveImge[nIndex].Buf != IntPtr.Zero) { Marshal.Release(m_dicSaveImge[nIndex].Buf); m_dicSaveImge[nIndex].Buf = IntPtr.Zero; } m_dicSaveImge[nIndex].Buf = Marshal.AllocHGlobal((Int32)pFrameInfo.nFrameLen); if (m_dicSaveImge[nIndex].Buf == IntPtr.Zero) { StrErrorMsg = "缓存图片分配内存失败"; return; } m_dicSaveImge[nIndex].Size = pFrameInfo.nFrameLen; } m_stFrameInfo[nIndex] = pFrameInfo; CopyMemory(m_dicSaveImge[nIndex].Buf, pData, pFrameInfo.nFrameLen); if (IsTriggerMode) { m_dicImageCallBackSignal[nIndex].Set(); } } }
//抓取图像 public override ImageAll GrabImageAll() { g_MtGrabImage.WaitOne(); ImageAll imageAll = new ImageAll(); UInt32 nDataLen = 0; int waitTime = 1000; try { if (g_BaseParCamera.BlUsingTrigger && TriggerSource_e != TriggerSourceCamera_enum.Software)//使用软触发 { if (ImageAll_Q.Count > 0) { ImageAll im = ImageAll_Q.Peek(); im.BitmapSource = ConvertImageBitmap(PFrameInfo_Bit, PData_Bit); ImageAll_Q.Dequeue();//出站 if (ImageAll_Q.Count > 20)//认为溢出 { ImageAll_Q.Clear(); } im.Time = g_PathDirectory.GetTimeName(); im.TimeShort = g_PathDirectory.GetShortTimeName();//图像抓取短时间 return im; } else { return null; } } else { #if(SDK) IntPtr pData = Marshal.UnsafeAddrOfPinnedArrayElement(g_PBufForDriver, 0); MyCamera.MV_FRAME_OUT_INFO_EX pFrameInfo = new MyCamera.MV_FRAME_OUT_INFO_EX(); //获取一帧,超时时间为1秒,此处单位为ms int nRet = GetOneFrameTimeout(pData, ref nDataLen, g_BufSizeForDriver, ref pFrameInfo, waitTime); imageAll.Ho_Image = ConvertHobject(pFrameInfo, pData); imageAll.BitmapSource = ConvertImageBitmap(pFrameInfo, pData); imageAll.Time = g_PathDirectory.GetTimeName(); imageAll.TimeShort = g_PathDirectory.GetShortTimeName();//图像抓取短时间 return imageAll; } #endif return null; } catch { return null; } finally { g_MtGrabImage.ReleaseMutex(); } }
private void ImageCallbackFunc(IntPtr pData, ref MyCamera.MV_FRAME_OUT_INFO_EX pFrameInfo, IntPtr pUser) { try { lock (this) { newBmp = new Bitmap(pFrameInfo.nWidth, pFrameInfo.nHeight, PixelFormat.Format8bppIndexed); // Lock the bits of the bitmap. BitmapData bmpData = newBmp.LockBits(new Rectangle(0, 0, newBmp.Width, newBmp.Height), ImageLockMode.ReadWrite, newBmp.PixelFormat); IntPtr ptrBmp = bmpData.Scan0; //palette System.Drawing.Imaging.ColorPalette palette = newBmp.Palette; for (int i = 0; i < 256; ++i) { palette.Entries[i] = Color.FromArgb(255, i, i, i); } newBmp.Palette = palette; //convert to bytes data newBytes = new byte[bmpData.Stride * newBmp.Height]; Marshal.Copy(pData, newBytes, 0, newBytes.Length); //reverse y if (this.reverseY) { newBytes = this.bytesReverseY(newBytes, bmpData.Stride, newBmp.Height); } //copy to ptr Marshal.Copy(newBytes, 0, ptrBmp, newBytes.Length); newBmp.UnlockBits(bmpData); this.CurrentBytes = newBytes; this.LastBmp = this.CurrentBmp; this.CurrentBmp = newBmp; } BmpGrabedEvnet.Set(); this.TriggerSts.Update(true); BitmapGrabbed?.Invoke(CurrentBmp, true); BytesSaveBuffer?.Invoke(CurrentBytes); // Reduce the number of displayed images to a reasonable amount // if the camera is acquiring images very fast. this.currDisplayedTime = DateTime.Now; if (this.currDisplayedTime - this.lastDisplayedTime > TimeSpan.FromMilliseconds(70)) { this.BitmapDisplayed?.Invoke(this.CurrentBmp); this.lastDisplayedTime = this.currDisplayedTime; // the LastBmp can be dispose after BitmapDisplayed invoke execute. this.LastBmp.Dispose(); } } catch { BitmapGrabbed?.Invoke(null, false); } }
private void ImageCallBack_2(IntPtr m_BufForDriver, ref MyCamera.MV_FRAME_OUT_INFO_EX pFrameInfo, IntPtr pUser) { MyCamera.MVCC_INTVALUE stParam = new MyCamera.MVCC_INTVALUE(); int nRet = cameraArr1[0].MV_CC_GetIntValue_NET("PayloadSize", ref stParam); if (MyCamera.MV_OK != nRet) { MessageBox.Show("Get PayloadSize failed", "Error", MessageBoxButtons.OK, MessageBoxIcon.Error); return; } UInt32 nPayloadSize = stParam.nCurValue; if (nPayloadSize > m_nBufSizeForDriver) { if (m_BufForDriver != IntPtr.Zero) { Marshal.Release(m_BufForDriver); } m_nBufSizeForDriver = nPayloadSize; m_BufForDriver = Marshal.AllocHGlobal((Int32)m_nBufSizeForDriver); } if (m_BufForDriver == IntPtr.Zero) { MessageBox.Show("采集失败,请重新连接设备", "Error", MessageBoxButtons.OK, MessageBoxIcon.Error); return; } // 将海康数据类型转为Mat nRet = cameraArr1[0].MV_CC_GetOneFrameTimeout_NET(m_BufForDriver, nPayloadSize, ref pFrameInfo, 1000); // m_BufForDriver为图像数据接收指针 //pTemp = m_BufForDriver; byte[] byteImage = new byte[pFrameInfo.nHeight * pFrameInfo.nWidth]; Marshal.Copy(m_BufForDriver, byteImage, 0, pFrameInfo.nHeight * pFrameInfo.nWidth); Mat matImage = new Mat(pFrameInfo.nHeight, pFrameInfo.nWidth, MatType.CV_8UC1, byteImage); // 单通道图像转为三通道 Mat matImageNew = new Mat(); Cv2.CvtColor(matImage, matImageNew, ColorConversionCodes.GRAY2RGB); Bitmap bitmap = matImageNew.ToBitmap(); // Mat转为Bitmap // 是否进行推理 DeepLearning deepLearning = new DeepLearning(); if (isInference2) { bitmap = deepLearning.Inference(model2, modelType, bitmap); } if (pictureBox2.InvokeRequired) // 当一个控件的InvokeRequired属性值为真时,说明有一个创建它以外的线程想访问它 { UpdateUI update = delegate { pictureBox2.Image = bitmap; }; pictureBox2.BeginInvoke(update); } else { pictureBox2.Image = bitmap; } }
public void ReceiveThreadProcess() { MyCamera.MVCC_INTVALUE stParam = new MyCamera.MVCC_INTVALUE(); int nRet = m_MyCamera.MV_CC_GetIntValue_NET("PayloadSize", ref stParam); if (MyCamera.MV_OK != nRet) { ShowErrorMsg("Get PayloadSize failed", nRet); return; } UInt32 nPayloadSize = stParam.nCurValue; if (nPayloadSize > m_nGrabBufSize) { m_nGrabBufSize = nPayloadSize; m_pGrabBuf = Marshal.AllocHGlobal((Int32)m_nGrabBufSize); } if (m_pGrabBuf == IntPtr.Zero) { return; } MyCamera.MV_FRAME_OUT_INFO_EX stFrameInfo = new MyCamera.MV_FRAME_OUT_INFO_EX(); MyCamera.MV_DISPLAY_FRAME_INFO stDisplayInfo = new MyCamera.MV_DISPLAY_FRAME_INFO(); while (m_bGrabbing) { nRet = m_MyCamera.MV_CC_GetOneFrameTimeout_NET(m_pGrabBuf, nPayloadSize, ref stFrameInfo, 1000); if (nRet == MyCamera.MV_OK) { stDisplayInfo.hWnd = pictureBox1.Handle; stDisplayInfo.pData = m_pGrabBuf; stDisplayInfo.nDataLen = stFrameInfo.nFrameLen; stDisplayInfo.nWidth = stFrameInfo.nWidth; stDisplayInfo.nHeight = stFrameInfo.nHeight; stDisplayInfo.enPixelType = stFrameInfo.enPixelType; m_MyCamera.MV_CC_DisplayOneFrame_NET(ref stDisplayInfo); } else { if (bnTriggerMode.Checked) { Thread.Sleep(5); } } } }
/**************************************************************************** * @fn GetOneFrame * @brief 获取一帧图像数据 * @param pData IN-OUT 数据数组指针 * @param pnDataLen IN 数据大小 * @param nDataSize IN 数组缓存大小 * @param pFrameInfo OUT 数据信息 * @return 成功:0;错误:-1 ****************************************************************************/ public int GetOneFrame(IntPtr pData, ref UInt32 pnDataLen, UInt32 nDataSize, ref MyCamera.MV_FRAME_OUT_INFO_EX pFrameInfo) { pnDataLen = 0; int nRet = m_pCSI.MV_CC_GetOneFrameEx_NET(pData, nDataSize, ref pFrameInfo); nRet = m_pCSI.MV_CC_GetOneFrameTimeout_NET(pData, nDataSize, ref pFrameInfo, 10000); if (MyCamera.MV_OK != nRet) { return(CO_FAIL); } pnDataLen = (uint)(pFrameInfo.nWidth * pFrameInfo.nWidth * (((((UInt32)pFrameInfo.enPixelType) >> 16) & 0xffff) >> 3)); return(CO_OK); }
/// <summary>实时抓拍一张图片</summary> public int GrabOne(out IJFImage img, int timeoutMilSeconds = -1) { img = null; if (!IsDeviceOpen) { return((int)ErrorDef.NotOpen); } if (!IsGrabbing) { return((int)ErrorDef.NotGrabbing); } JFCmrTrigMode currTM = JFCmrTrigMode.disable; GetTrigMode(out currTM); if (currTM != JFCmrTrigMode.disable) { return((int)ErrorDef.TrigModeUnMatch); } int err = 0; if (_payloadSize <= 0) { MyCamera.MVCC_INTVALUE stVal = new MyCamera.MVCC_INTVALUE(); err = _hikCmr.MV_CC_GetIntValue_NET("PayloadSize", ref stVal); if (MyCamera.MV_OK != err) { return((int)ErrorDef.InvokeFailed); } else { _payloadSize = (int)stVal.nCurValue; } } byte[] dataBytes = new byte[_payloadSize]; IntPtr ptr = Marshal.UnsafeAddrOfPinnedArrayElement(dataBytes, 0); MyCamera.MV_FRAME_OUT_INFO_EX frameInfo = new MyCamera.MV_FRAME_OUT_INFO_EX(); err = _hikCmr.MV_CC_GetOneFrameTimeout_NET(ptr, (uint)_payloadSize, ref frameInfo, timeoutMilSeconds); if (err != MyCamera.MV_OK) { return((int)ErrorDef.InvokeFailed); } img = new JFImage_Hik(dataBytes, frameInfo, _hikCmr); return((int)ErrorDef.Success); }
//public MyCamera.MV_FRAME_OUT_INFO stFrameInfo; public void OnMVImageReadyEventCallback() { lock (obj) { try { byte[] buffer = new byte[SizeMax.Width * SizeMax.Height * 3]; IntPtr pData = Marshal.UnsafeAddrOfPinnedArrayElement(buffer, 0); UInt32 nDataLen = 0; //stFrameInfo = new MyCamera.MV_FRAME_OUT_INFO(); stFrameInfo = new MyCamera.MV_FRAME_OUT_INFO_EX(); int nRet; //超时获取一帧,超时时间为1秒 // nRet = GetOneFrame(pData, ref nDataLen, (uint)SizeMax.Width * (uint)SizeMax.Height * 3, ref stFrameInfo); nRet = GetImageBGR(pData, (uint)SizeMax.Width * (uint)SizeMax.Height * 3, ref stFrameInfo); if (MyCamera.MV_OK == nRet) { if (m_bitmap != null) { /* Update the bitmap with the image data. */ CameraOperator_HK.UpdateBitmap(m_bitmap, buffer, stFrameInfo.nWidth, stFrameInfo.nHeight, true); /* To show the new image, request the display control to update itself. */ } else /* A new bitmap is required. */ { CameraOperator_HK.CreateBitmap(ref m_bitmap, stFrameInfo.nWidth, stFrameInfo.nHeight, true); CameraOperator_HK.UpdateBitmap(m_bitmap, buffer, stFrameInfo.nWidth, stFrameInfo.nHeight, true); //* Provide the display control with the new bitmap. This action automatically updates the display. */ } bitmapInfo.m_Bitmap = (Bitmap)m_bitmap.Clone(); if (ImageReady != null) { ImageReady(bitmapInfo, IsContinuousShot); } } } catch (Exception e) { } } }
/// <summary> /// 将图像转换为Halcon 格式 /// </summary> /// <returns></returns> HObject ConvertHobject(MyCamera.MV_FRAME_OUT_INFO_EX image_grab, IntPtr pData) { try { HObject hImage = new HObject(); unsafe { //将抓取到的图像转换成halcon图像 HOperatorSet.GenImage1(out hImage, (HTuple)"byte", (HTuple)(image_grab.nWidth), (HTuple)(image_grab.nHeight), pData); } return hImage; } catch (Exception ex) { Log.L_I.WriteError(NameClass, ex); return null; } }
public override HObject SnapShot() { lock (m_lock) { HObject _image = new HObject(); MyCamera.MV_FRAME_OUT_INFO_EX FrameInfo = new MyCamera.MV_FRAME_OUT_INFO_EX(); IntPtr pBufForDriver = IntPtr.Zero; IntPtr pBufForSaveImage = IntPtr.Zero; try { // ch:获取包大小 || en: Get Payload Size MyCamera.MVCC_INTVALUE stParam = new MyCamera.MVCC_INTVALUE(); int nRet = m_pMyCamera.MV_CC_GetIntValue_NET("PayloadSize", ref stParam); if (MyCamera.MV_OK != nRet) { Console.WriteLine("Get PayloadSize failed:{0:x8}", nRet); } UInt32 nPayloadSize = stParam.nCurValue; int nCount = 0; pBufForDriver = Marshal.AllocHGlobal((int)nPayloadSize); nRet = m_pMyCamera.MV_CC_GetOneFrameTimeout_NET(pBufForDriver, nPayloadSize, ref FrameInfo, 1000); // ch:获取一帧图像 | en:Get one image if (MyCamera.MV_OK == nRet) { _image = GetImage(pBufForDriver, ref FrameInfo); } } catch (Exception ex) { throw new Exception($"{m_Cameraname} 获取图像失败," + ex.Message); } finally { Marshal.FreeHGlobal(pBufForDriver); Marshal.FreeHGlobal(pBufForSaveImage); } return(_image); } }
private bool StartGrab() { int nRet; foreach (var pMyCamera in m_dicMyCamera) { MyCamera.MV_FRAME_OUT_INFO_EX frameInfo = m_stFrameInfo[pMyCamera.Key]; frameInfo.nFrameLen = 0;//取流之前先清除帧长度 frameInfo.enPixelType = MyCamera.MvGvspPixelType.PixelType_Gvsp_Undefined; //开启抓图 nRet = pMyCamera.Value.MV_CC_StartGrabbing_NET(); if (MyCamera.MV_OK != nRet) { StrErrorMsg = "相机" + m_listUserDefinedName[pMyCamera.Key] + "开始抓图失败(" + nRet.ToString("X") + ")"; return(false); } } m_bGrabbing = true; return(true); }
static MyCamera.MV_CHUNK_DATA_CONTENT stChunkInfo;// Chunk结构体信息 static void ImageCallbackFunc(IntPtr pData, ref MyCamera.MV_FRAME_OUT_INFO_EX pFrameInfo, IntPtr pUser) { //Print parse the timestamp information in the frame Console.WriteLine("ImageCallBack: ExposureTime[" + Convert.ToString(pFrameInfo.fExposureTime) + "], SecondCount[" + Convert.ToString(pFrameInfo.nSecondCount) + "], CycleCount[" + Convert.ToString(pFrameInfo.nCycleCount) + "], CycleOffset[" + Convert.ToString(pFrameInfo.nCycleOffset) + "], FrameNum[" + Convert.ToString(pFrameInfo.nFrameNum) + "]"); int nStrSize = Marshal.SizeOf(stChunkInfo); int nUnparsedChunkContent = (int)pFrameInfo.UnparsedChunkList.pUnparsedChunkContent; for (int i = 0; i < pFrameInfo.nUnparsedChunkNum; i++) { stChunkInfo = (MyCamera.MV_CHUNK_DATA_CONTENT)Marshal.PtrToStructure((IntPtr)(nUnparsedChunkContent + i * nStrSize), typeof(MyCamera.MV_CHUNK_DATA_CONTENT)); Console.WriteLine("ChunkInfo:" + "ChunkID[0x{0:x8}],ChunkLen[" + Convert.ToString(stChunkInfo.nChunkLen) + "]", stChunkInfo.nChunkID); } Console.WriteLine("************************************"); }
// ch:保存图片 | en:Save image private void SaveImage(IntPtr pData, MyCamera.MV_FRAME_OUT_INFO_EX stFrameInfo, int nIndex) { if ((3 * stFrameInfo.nFrameLen + 2048) > m_nBufSizeForSaveImage) { m_nBufSizeForSaveImage = 3 * stFrameInfo.nFrameLen + 2048; m_pBufForSaveImage = Marshal.AllocHGlobal((Int32)m_nBufSizeForSaveImage); } MyCamera.MV_SAVE_IMAGE_PARAM_EX stSaveParam = new MyCamera.MV_SAVE_IMAGE_PARAM_EX(); stSaveParam.enImageType = MyCamera.MV_SAVE_IAMGE_TYPE.MV_Image_Bmp; stSaveParam.enPixelType = stFrameInfo.enPixelType; stSaveParam.pData = pData; stSaveParam.nDataLen = stFrameInfo.nFrameLen; stSaveParam.nHeight = stFrameInfo.nHeight; stSaveParam.nWidth = stFrameInfo.nWidth; stSaveParam.pImageBuffer = m_pBufForSaveImage; stSaveParam.nBufferSize = m_nBufSizeForSaveImage; //stSaveParam.nJpgQuality = 80;//存Jpeg时有效 int nRet = m_pMyCamera[nIndex].MV_CC_SaveImageEx_NET(ref stSaveParam); if (MyCamera.MV_OK != nRet) { string temp = "No." + (nIndex + 1).ToString() + "Device save Failed!"; ShowErrorMsg(temp, 0); } else { string[] path = { "image1.bmp", "image2.bmp", "image3.bmp", "image4.bmp" }; Byte[] bArrBufForSaveImage = new Byte[stSaveParam.nImageLen]; Marshal.Copy(m_pBufForSaveImage, bArrBufForSaveImage, 0, (Int32)stSaveParam.nImageLen); Marshal.Release(m_pBufForSaveImage); FileStream file = new FileStream(path[nIndex], FileMode.Create, FileAccess.Write); file.Write(bArrBufForSaveImage, 0, (int)stSaveParam.nImageLen); file.Close(); string temp = "No." + (nIndex + 1).ToString() + "Device Save Succeed!"; ShowErrorMsg(temp, 0); } }
private void ImageOut(IntPtr pixelPointer, ref MyCamera.MV_FRAME_OUT_INFO_EX pFrameInfo, IntPtr pUser) { HObject objImage = new HObject(); // 原始数据转换 int width = pFrameInfo.nWidth; int height = pFrameInfo.nHeight; if (pFrameInfo.enPixelType == MyCamera.MvGvspPixelType.PixelType_Gvsp_Mono8) { HOperatorSet.GenImage1(out objImage, "byte", width, height, pixelPointer); } else if (pFrameInfo.enPixelType == MyCamera.MvGvspPixelType.PixelType_Gvsp_BayerGR8) { width = (width + 3) & 0xfffc; //宽度补齐为4的倍数 int nLength = width * height; byte[] dataBlue = new byte[nLength]; byte[] dataGreen = new byte[nLength]; byte[] dataRed = new byte[nLength]; for (int row = 0; row < height; row++) { //uint char* ptr = &pixelPointer[row * width * 3]; for (int col = 0; col < width; col++) { //dataBlue[row * width + col] = pixelPointer[3 * col]; //dataGreen[row * width + col] = pixelPointer[3 * col + 1]; //dataRed[row * width + col] = pixelPointer[3 * col + 2]; } } //objImage=new HImage("") //HOperatorSet.GenImage1(out objImage, "byte", width*3, height, pixelPointer); HOperatorSet.GenImageInterleaved(out objImage, pixelPointer, "rgb", width, height, -1, "byte", 0, 0, 0, 0, -1, 0); } ImageEventArgs <HObject> outEvent = new ImageEventArgs <HObject>(); outEvent.image = objImage; OnNewImageAcquired(outEvent); }
private static void ImageCallbackFunc(IntPtr pData, ref MyCamera.MV_FRAME_OUT_INFO_EX pFrameInfo, IntPtr pUser) { var camera = (HikCamera)((GCHandle)pUser).Target; if (camera._grabCount > 0) { camera._count++; if (camera._count > camera._grabCount) { camera.Stop(); if (camera.GrabDone != null) { camera.GrabDone(); } } } if (camera.ImageGrabbed != null) { camera.ImageGrabbed(ConvertImage(pFrameInfo, pData, camera._device)); } }
/// <summary> /// 图像格式转换,转换为BitmapSource /// </summary> #if(SDK) public BitmapSource ConvertImageBitmap(MyCamera.MV_FRAME_OUT_INFO_EX image_grab, IntPtr pData) { try { if (BlOpen) { //获取bmp图像 Bitmap image_bmp = new Bitmap(image_grab.nWidth, image_grab.nHeight, image_grab.nWidth * 1, PixelFormat.Format8bppIndexed, pData); ColorPalette cp = image_bmp.Palette; // init palette for (int i = 0; i < 256; i++) { cp.Entries[i] = Color.FromArgb(i, i, i); } // set palette back image_bmp.Palette = cp; IntPtr p_bmpImage = image_bmp.GetHbitmap(); BitmapSource BbitmapSource = System.Windows.Interop.Imaging.CreateBitmapSourceFromHBitmap(p_bmpImage, IntPtr.Zero, Int32Rect.Empty, BitmapSizeOptions.FromEmptyOptions()); DeleteObject(p_bmpImage); image_bmp.Dispose(); return BbitmapSource; } else { return null; } } catch (Exception ex) { Log.L_I.WriteError(NameClass, ex); return null; } finally { GC.Collect(); } }
static void WorkThread() { // ch:获取包大小 || en: Get Payload Size MyCamera.MVCC_INTVALUE stParam = new MyCamera.MVCC_INTVALUE(); int nRet = device.MV_CC_GetIntValue_NET("PayloadSize", ref stParam); if (MyCamera.MV_OK != nRet) { Console.WriteLine("Get PayloadSize failed:{0:x8}", nRet); return; } UInt32 nPayloadSize = stParam.nCurValue; IntPtr pBufForDriver = Marshal.AllocHGlobal((int)nPayloadSize); MyCamera.MV_FRAME_OUT_INFO_EX FrameInfo = new MyCamera.MV_FRAME_OUT_INFO_EX(); while (true) { nRet = device.MV_CC_GetOneFrameTimeout_NET(pBufForDriver, nPayloadSize, ref FrameInfo, 1000); // ch:获取一帧图像 | en:Get image if (MyCamera.MV_OK == nRet) { Console.WriteLine("Get One Frame:" + "Width[" + Convert.ToString(FrameInfo.nWidth) + "] , Height[" + Convert.ToString(FrameInfo.nHeight) + "] , FrameNum[" + Convert.ToString(FrameInfo.nFrameNum) + "]"); } else { Console.WriteLine("No data:{0:x8}", nRet); } if (g_bExit) { break; } } Marshal.FreeHGlobal(pBufForDriver); return; }
private static GrabInfo ConvertImage(MyCamera.MV_FRAME_OUT_INFO_EX frameInfo, IntPtr pData, MyCamera device) { uint channel = frameInfo.nFrameLen / frameInfo.nWidth / frameInfo.nHeight; if (channel == 1) { var data = new byte[frameInfo.nFrameLen]; Marshal.Copy(pData, data, 0, (int)frameInfo.nFrameLen); return(new GrabInfo(EGrabResult.Success, frameInfo.nWidth, frameInfo.nHeight, 1, data)); } else { var data = new byte[frameInfo.nWidth * frameInfo.nHeight * 3]; var handle = GCHandle.Alloc(data, GCHandleType.Pinned); var stConverPixelParam = new MyCamera.MV_PIXEL_CONVERT_PARAM(); stConverPixelParam.nWidth = frameInfo.nWidth; stConverPixelParam.nHeight = frameInfo.nHeight; stConverPixelParam.pSrcData = pData; stConverPixelParam.nSrcDataLen = frameInfo.nFrameLen; stConverPixelParam.enSrcPixelType = frameInfo.enPixelType; stConverPixelParam.enDstPixelType = MyCamera.MvGvspPixelType.PixelType_Gvsp_RGB8_Packed; stConverPixelParam.pDstBuffer = handle.AddrOfPinnedObject(); stConverPixelParam.nDstBufferSize = (uint)(frameInfo.nWidth * frameInfo.nHeight * 3); if (MyCamera.MV_OK != device.MV_CC_ConvertPixelType_NET(ref stConverPixelParam)) { handle.Free(); return(new GrabInfo(EGrabResult.Error)); } handle.Free(); return(new GrabInfo(EGrabResult.Success, frameInfo.nWidth, frameInfo.nHeight, 3, data)); } }
// ch:取流回调函数 | en:Aquisition Callback Function private void ImageCallBack(IntPtr pData, ref MyCamera.MV_FRAME_OUT_INFO_EX pFrameInfo, IntPtr pUser) { int nIndex = (int)pUser; // ch:抓取的帧数 | en:Aquired Frame Number ++m_nFrames[nIndex]; //ch:判断是否需要保存图片 | en:Determine whether to save image if (m_bSaveImg[nIndex]) { SaveImage(pData, pFrameInfo, nIndex); m_bSaveImg[nIndex] = false; } MyCamera.MV_DISPLAY_FRAME_INFO stDisplayInfo = new MyCamera.MV_DISPLAY_FRAME_INFO(); stDisplayInfo.hWnd = m_hDisplayHandle[nIndex]; stDisplayInfo.pData = pData; stDisplayInfo.nDataLen = pFrameInfo.nFrameLen; stDisplayInfo.nWidth = pFrameInfo.nWidth; stDisplayInfo.nHeight = pFrameInfo.nHeight; stDisplayInfo.enPixelType = pFrameInfo.enPixelType; m_pMyCamera[nIndex].MV_CC_DisplayOneFrame_NET(ref stDisplayInfo); }
private void ImageCallback(IntPtr pdata, ref MyCamera.MV_FRAME_OUT_INFO_EX pframeinfo, IntPtr puser) { var bitMap = ParseRawImageData(pdata, pframeinfo); OnImageAcquired(bitMap); }
private Bitmap ParseRawImageData(IntPtr pData, MyCamera.MV_FRAME_OUT_INFO_EX stFrameInfo) { int nRet; UInt32 newBufferSize = 0; Bitmap output; MyCamera.MVCC_INTVALUE stParam = new MyCamera.MVCC_INTVALUE(); nRet = _myCamera.MV_CC_GetIntValue_NET("PayloadSize", ref stParam); if (MyCamera.MV_OK != nRet) { throw new InvalidOperationException("Can not get payload size"); } newBufferSize = stParam.nCurValue; if (newBufferSize > _bufferSize) { _bufferSize = newBufferSize; _buffer = new byte[_bufferSize]; // ch:同时对保存图像的缓存做大小判断处理 | en:Determine the buffer size to save image // ch:BMP图片大小:width * height * 3 + 2048(预留BMP头大小) | en:BMP image size: width * height * 3 + 2048 (Reserved for BMP header) _buffSizeForSaveImage = _bufferSize * 3 + 2048; _bufForSaveImage = new byte[_buffSizeForSaveImage]; } MyCamera.MvGvspPixelType enDstPixelType; if (IsMonoData(stFrameInfo.enPixelType)) { enDstPixelType = MyCamera.MvGvspPixelType.PixelType_Gvsp_Mono8; } else if (IsColorData(stFrameInfo.enPixelType)) { enDstPixelType = MyCamera.MvGvspPixelType.PixelType_Gvsp_RGB8_Packed; } else { throw new NotSupportedException("Can not support such pixel type currently"); } IntPtr pImage = Marshal.UnsafeAddrOfPinnedArrayElement(_bufForSaveImage, 0); MyCamera.MV_PIXEL_CONVERT_PARAM stConverPixelParam = new MyCamera.MV_PIXEL_CONVERT_PARAM { nWidth = stFrameInfo.nWidth, nHeight = stFrameInfo.nHeight, pSrcData = pData, nSrcDataLen = stFrameInfo.nFrameLen, enSrcPixelType = stFrameInfo.enPixelType, enDstPixelType = enDstPixelType, pDstBuffer = pImage, nDstBufferSize = _buffSizeForSaveImage }; nRet = _myCamera.MV_CC_ConvertPixelType_NET(ref stConverPixelParam); if (MyCamera.MV_OK != nRet) { throw new InvalidOperationException("Unable to convert pixel type"); } if (enDstPixelType == MyCamera.MvGvspPixelType.PixelType_Gvsp_Mono8) { //************************Mono8 转 Bitmap******************************* output = new Bitmap(stFrameInfo.nWidth, stFrameInfo.nHeight, stFrameInfo.nWidth * 1, PixelFormat.Format8bppIndexed, pImage); ColorPalette cp = output.Palette; // init palette for (int i = 0; i < 256; i++) { cp.Entries[i] = Color.FromArgb(i, i, i); } output.Palette = cp; } else { //*********************RGB8 转 Bitmap************************** for (int i = 0; i < stFrameInfo.nHeight; i++) { for (int j = 0; j < stFrameInfo.nWidth; j++) { byte chRed = _bufForSaveImage[i * stFrameInfo.nWidth * 3 + j * 3]; _bufForSaveImage[i * stFrameInfo.nWidth * 3 + j * 3] = _bufForSaveImage[i * stFrameInfo.nWidth * 3 + j * 3 + 2]; _bufForSaveImage[i * stFrameInfo.nWidth * 3 + j * 3 + 2] = chRed; } } output = new Bitmap(stFrameInfo.nWidth, stFrameInfo.nHeight, stFrameInfo.nWidth * 3, PixelFormat.Format24bppRgb, pImage); } return(output); }
static void ImageCallbackFunc(IntPtr pData, ref MyCamera.MV_FRAME_OUT_INFO_EX pFrameInfo, IntPtr pUser) { Console.WriteLine("Get one frame: Width[" + Convert.ToString(pFrameInfo.nWidth) + "] , Height[" + Convert.ToString(pFrameInfo.nHeight) + "] , FrameNum[" + Convert.ToString(pFrameInfo.nFrameNum) + "]"); }
private bool DeviceOpenAll() { if (m_stDeviceList.nDeviceNum == 0) { StrErrorMsg = "无相机设备"; return(false); } cbImage = new MyCamera.cbOutputExdelegate(ImageCallBack); int nIndex; string UserDefinedName; foreach (var item in m_listUserDefinedName) { nIndex = item.Key; UserDefinedName = item.Value; //获取选择的设备信息 MyCamera.MV_CC_DEVICE_INFO device = (MyCamera.MV_CC_DEVICE_INFO)Marshal.PtrToStructure(m_stDeviceList.pDeviceInfo[nIndex], typeof(MyCamera.MV_CC_DEVICE_INFO)); m_dicMyCamera[nIndex] = new MyCamera(); if (m_dicMyCamera[nIndex] == null) { StrErrorMsg = "相机" + UserDefinedName + "对象创建失败"; m_dicMyCamera.Remove(nIndex); return(false); } int nRet = m_dicMyCamera[nIndex].MV_CC_CreateDevice_NET(ref device); if (MyCamera.MV_OK != nRet) { StrErrorMsg = "相机" + UserDefinedName + "创建设备失败"; m_dicMyCamera.Remove(nIndex); return(false); } for (int i = 0; i < 2; i++) { nRet = m_dicMyCamera[nIndex].MV_CC_OpenDevice_NET(); if (MyCamera.MV_OK != nRet) { StrErrorMsg = "相机" + UserDefinedName + "打开设备失败,正在尝试重连"; Thread.Sleep(1000); } else { break; } } if (MyCamera.MV_OK != nRet) { StrErrorMsg = "相机" + UserDefinedName + "打开设备失败(" + nRet.ToString("X") + ")"; m_dicMyCamera.Remove(nIndex); return(false); } else { m_pDeviceInfo[nIndex] = device; //探测网络最佳包大小 int nPacketSize = m_dicMyCamera[nIndex].MV_CC_GetOptimalPacketSize_NET(); if (nPacketSize > 0) { nRet = m_dicMyCamera[nIndex].MV_CC_SetIntValue_NET("GevSCPSPacketSize", (uint)nPacketSize); if (nRet != MyCamera.MV_OK) { StrErrorMsg = "相机" + UserDefinedName + "设置数据包大小失败(" + nRet.ToString("X") + ")"; } } else { StrErrorMsg = "相机" + UserDefinedName + "设置数据包大小失败(" + nRet.ToString("X") + ")"; } //打开软触发模式 m_dicMyCamera[nIndex].MV_CC_SetEnumValue_NET("TriggerMode", (uint)MyCamera.MV_CAM_TRIGGER_MODE.MV_TRIGGER_MODE_ON); m_dicMyCamera[nIndex].MV_CC_SetEnumValue_NET("TriggerSource", (uint)MyCamera.MV_CAM_TRIGGER_SOURCE.MV_TRIGGER_SOURCE_SOFTWARE); ////打开实时模式 //m_dicMyCamera[nIndex].MV_CC_SetEnumValue_NET("TriggerMode", (uint)MyCamera.MV_CAM_TRIGGER_MODE.MV_TRIGGER_MODE_OFF); m_stFrameInfo[nIndex] = new MyCamera.MV_FRAME_OUT_INFO_EX(); m_dicSaveImge[nIndex] = new SaveImage(); m_dicImageCallBackSignal[nIndex] = new EventWaitHandle(false, EventResetMode.AutoReset); m_dicMyCamera[nIndex].MV_CC_RegisterImageCallBackEx_NET(cbImage, (IntPtr)nIndex); } } return(true); }
private void HardCapture(IntPtr pData, ref MyCamera.MV_FRAME_OUT_INFO_EX pFrameInfo, IntPtr pUser) { int nIndex = (int)pUser; int nRet; IntPtr pImageBuf = IntPtr.Zero; int nImageBufSize = 0; IntPtr pTemp = IntPtr.Zero; HObject outImage = null; if (IsColorPixelFormat(pFrameInfo.enPixelType)) { if (pFrameInfo.enPixelType == MyCamera.MvGvspPixelType.PixelType_Gvsp_RGB8_Packed) { pTemp = pData; } else { if (IntPtr.Zero == pImageBuf || nImageBufSize < (pFrameInfo.nWidth * pFrameInfo.nHeight * 3)) { if (pImageBuf != IntPtr.Zero) { Marshal.FreeHGlobal(pImageBuf); pImageBuf = IntPtr.Zero; } pImageBuf = Marshal.AllocHGlobal((int)pFrameInfo.nWidth * pFrameInfo.nHeight * 3); if (IntPtr.Zero == pImageBuf) { return; } nImageBufSize = pFrameInfo.nWidth * pFrameInfo.nHeight * 3; } MyCamera.MV_PIXEL_CONVERT_PARAM stPixelConvertParam = new MyCamera.MV_PIXEL_CONVERT_PARAM { pSrcData = pData, //源数据 nWidth = pFrameInfo.nWidth, //图像宽度 nHeight = pFrameInfo.nHeight, //图像高度 enSrcPixelType = pFrameInfo.enPixelType, //源数据的格式 nSrcDataLen = pFrameInfo.nFrameLen, nDstBufferSize = (uint)nImageBufSize, pDstBuffer = pImageBuf,//转换后的数据 enDstPixelType = MyCamera.MvGvspPixelType.PixelType_Gvsp_RGB8_Packed }; nRet = m_dicMyCamera[nIndex].MV_CC_ConvertPixelType_NET(ref stPixelConvertParam);//格式转换 if (MyCamera.MV_OK != nRet) { return; } pTemp = pImageBuf; } try { HOperatorSet.GenImageInterleaved(out outImage, (HTuple)pTemp, (HTuple)"rgb", (HTuple)pFrameInfo.nWidth, (HTuple)pFrameInfo.nHeight, -1, "byte", 0, 0, 0, 0, -1, 0); } catch (Exception ex) { return; } } else if (IsMonoPixelFormat(pFrameInfo.enPixelType)) { if (pFrameInfo.enPixelType == MyCamera.MvGvspPixelType.PixelType_Gvsp_Mono8) { pTemp = pData; } else { if (IntPtr.Zero == pImageBuf || nImageBufSize < (pFrameInfo.nWidth * pFrameInfo.nHeight)) { if (pImageBuf != IntPtr.Zero) { Marshal.FreeHGlobal(pImageBuf); pImageBuf = IntPtr.Zero; } pImageBuf = Marshal.AllocHGlobal((int)pFrameInfo.nWidth * pFrameInfo.nHeight); if (IntPtr.Zero == pImageBuf) { return; } nImageBufSize = pFrameInfo.nWidth * pFrameInfo.nHeight; } MyCamera.MV_PIXEL_CONVERT_PARAM stPixelConvertParam = new MyCamera.MV_PIXEL_CONVERT_PARAM { pSrcData = pData, //源数据 nWidth = pFrameInfo.nWidth, //图像宽度 nHeight = pFrameInfo.nHeight, //图像高度 enSrcPixelType = pFrameInfo.enPixelType, //源数据的格式 nSrcDataLen = pFrameInfo.nFrameLen, nDstBufferSize = (uint)nImageBufSize, pDstBuffer = pImageBuf,//转换后的数据 enDstPixelType = MyCamera.MvGvspPixelType.PixelType_Gvsp_Mono8 }; nRet = m_dicMyCamera[nIndex].MV_CC_ConvertPixelType_NET(ref stPixelConvertParam);//格式转换 if (MyCamera.MV_OK != nRet) { return; } pTemp = pImageBuf; } try { HOperatorSet.GenImage1Extern(out outImage, "byte", pFrameInfo.nWidth, pFrameInfo.nHeight, pTemp, IntPtr.Zero); } catch (Exception ex) { return; } } if (pImageBuf != IntPtr.Zero) { Marshal.FreeHGlobal(pImageBuf); pImageBuf = IntPtr.Zero; } HardTriggerEvent?.Invoke(m_listUserDefinedName[nIndex], outImage); }
private void ImageCallbackFunc(IntPtr pData, ref MyCamera.MV_FRAME_OUT_INFO_EX pFrameInfo, IntPtr pUser) { Console.WriteLine("Get one frame: Width[" + Convert.ToString(pFrameInfo.nWidth) + "] , Height[" + Convert.ToString(pFrameInfo.nHeight) + "] , FrameNum[" + Convert.ToString(pFrameInfo.nFrameNum) + "]"); MyCamera.MvGvspPixelType enDstPixelType; if (IsMonoData(pFrameInfo.enPixelType)) { enDstPixelType = MyCamera.MvGvspPixelType.PixelType_Gvsp_Mono8; } else if (IsColorData(pFrameInfo.enPixelType)) { enDstPixelType = MyCamera.MvGvspPixelType.PixelType_Gvsp_RGB8_Packed; } else { return; } byte[] m_pBufForSaveImage = new byte[3 * (pFrameInfo.nWidth * pFrameInfo.nHeight) + 2048]; IntPtr pImage = Marshal.UnsafeAddrOfPinnedArrayElement(m_pBufForSaveImage, 0); MyCamera.MV_PIXEL_CONVERT_PARAM stConverPixelParam = new MyCamera.MV_PIXEL_CONVERT_PARAM(); stConverPixelParam.nWidth = pFrameInfo.nWidth; stConverPixelParam.nHeight = pFrameInfo.nHeight; stConverPixelParam.pSrcData = pData; stConverPixelParam.nSrcDataLen = pFrameInfo.nFrameLen; stConverPixelParam.enSrcPixelType = pFrameInfo.enPixelType; stConverPixelParam.enDstPixelType = enDstPixelType; stConverPixelParam.pDstBuffer = pImage; stConverPixelParam.nDstBufferSize = (uint)(3 * (pFrameInfo.nWidth * pFrameInfo.nHeight) + 2048); nRet = getonecamera.MV_CC_ConvertPixelType_NET(ref stConverPixelParam); if (MyCamera.MV_OK != nRet) { return; } if (enDstPixelType == MyCamera.MvGvspPixelType.PixelType_Gvsp_Mono8) { //************************Mono8 转 Bitmap******************************* Bitmap bmp = new Bitmap(pFrameInfo.nWidth, pFrameInfo.nHeight, pFrameInfo.nWidth * 1, PixelFormat.Format8bppIndexed, pImage); ColorPalette cp = bmp.Palette; // init palette for (int i = 0; i < 256; i++) { cp.Entries[i] = Color.FromArgb(i, i, i); } // set palette back bmp.Palette = cp; Bitmap temp = (Bitmap)bmp.Clone(); CallFunction(this.Name, bmp); GC.Collect(); } else { //*********************RGB8 转 Bitmap************************** for (int i = 0; i < pFrameInfo.nHeight; i++) { for (int j = 0; j < pFrameInfo.nWidth; j++) { byte chRed = m_pBufForSaveImage[i * pFrameInfo.nWidth * 3 + j * 3]; m_pBufForSaveImage[i * pFrameInfo.nWidth * 3 + j * 3] = m_pBufForSaveImage[i * pFrameInfo.nWidth * 3 + j * 3 + 2]; m_pBufForSaveImage[i * pFrameInfo.nWidth * 3 + j * 3 + 2] = chRed; } } try { Bitmap bmp = new Bitmap(pFrameInfo.nWidth, pFrameInfo.nHeight, pFrameInfo.nWidth * 3, PixelFormat.Format24bppRgb, pImage); Bitmap temp = (Bitmap)bmp.Clone(); CallFunction(this.Name, bmp); GC.Collect(); } catch { } } }
public void ReceiveImageWorkThread(object obj) { int nRet = MyCamera.MV_OK; MyCamera device = obj as MyCamera; MyCamera.MV_FRAME_OUT_INFO_EX pFrameInfo = new MyCamera.MV_FRAME_OUT_INFO_EX(); IntPtr pData = Marshal.AllocHGlobal((int)g_nPayloadSize * 3); if (pData == IntPtr.Zero) { return; } IntPtr pImageBuffer = Marshal.AllocHGlobal((int)g_nPayloadSize * 3); if (pImageBuffer == IntPtr.Zero) { return; } uint nDataSize = g_nPayloadSize * 3; HObject Hobj = new HObject(); IntPtr RedPtr = IntPtr.Zero; IntPtr GreenPtr = IntPtr.Zero; IntPtr BluePtr = IntPtr.Zero; IntPtr pTemp = IntPtr.Zero; while (m_bGrabbing) { nRet = device.MV_CC_GetOneFrameTimeout_NET(pData, nDataSize, ref pFrameInfo, 1000); if (MyCamera.MV_OK == nRet) { if (IsColorPixelFormat(pFrameInfo.enPixelType)) { if (pFrameInfo.enPixelType == MyCamera.MvGvspPixelType.PixelType_Gvsp_RGB8_Packed) { pTemp = pData; } else { nRet = ConvertToRGB(obj, pData, pFrameInfo.nHeight, pFrameInfo.nWidth, pFrameInfo.enPixelType, pImageBuffer); if (MyCamera.MV_OK != nRet) { return; } pTemp = pImageBuffer; } unsafe { byte *pBufForSaveImage = (byte *)pTemp; UInt32 nSupWidth = (pFrameInfo.nWidth + (UInt32)3) & 0xfffffffc; for (int nRow = 0; nRow < pFrameInfo.nHeight; nRow++) { for (int col = 0; col < pFrameInfo.nWidth; col++) { m_pDataForRed[nRow * nSupWidth + col] = pBufForSaveImage[nRow * pFrameInfo.nWidth * 3 + (3 * col)]; m_pDataForGreen[nRow * nSupWidth + col] = pBufForSaveImage[nRow * pFrameInfo.nWidth * 3 + (3 * col + 1)]; m_pDataForBlue[nRow * nSupWidth + col] = pBufForSaveImage[nRow * pFrameInfo.nWidth * 3 + (3 * col + 2)]; } } } RedPtr = Marshal.UnsafeAddrOfPinnedArrayElement(m_pDataForRed, 0); GreenPtr = Marshal.UnsafeAddrOfPinnedArrayElement(m_pDataForGreen, 0); BluePtr = Marshal.UnsafeAddrOfPinnedArrayElement(m_pDataForBlue, 0); try { HOperatorSet.GenImage3Extern(out Hobj, (HTuple)"byte", pFrameInfo.nWidth, pFrameInfo.nHeight, (new HTuple(RedPtr)), (new HTuple(GreenPtr)), (new HTuple(BluePtr)), IntPtr.Zero); } catch (System.Exception ex) { MessageBox.Show(ex.ToString()); } } else if (IsMonoPixelFormat(pFrameInfo.enPixelType)) { if (pFrameInfo.enPixelType == MyCamera.MvGvspPixelType.PixelType_Gvsp_Mono8) { pTemp = pData; } else { nRet = ConvertToMono8(device, pData, pImageBuffer, pFrameInfo.nHeight, pFrameInfo.nWidth, pFrameInfo.enPixelType); if (MyCamera.MV_OK != nRet) { return; } pTemp = pImageBuffer; } try { HOperatorSet.GenImage1Extern(out Hobj, "byte", pFrameInfo.nWidth, pFrameInfo.nHeight, pTemp, IntPtr.Zero); } catch (System.Exception ex) { MessageBox.Show(ex.ToString()); return; } } else { continue; } HalconDisplay(m_Window, Hobj, pFrameInfo.nHeight, pFrameInfo.nWidth); } else { continue; } } if (pData != IntPtr.Zero) { Marshal.FreeHGlobal(pData); } if (pImageBuffer != IntPtr.Zero) { Marshal.FreeHGlobal(pImageBuffer); } return; }
public int GetOneFrameTimeout(IntPtr pData, ref UInt32 pnDataLen, UInt32 nDataSize, ref MyCamera.MV_FRAME_OUT_INFO_EX pFrameInfo, Int32 nMsec) { pnDataLen = 0; int nRet = m_pCSI.MV_CC_GetOneFrameTimeout_NET(pData, nDataSize, ref pFrameInfo, nMsec); pnDataLen = pFrameInfo.nFrameLen; if (MyCamera.MV_OK != nRet) { return(nRet); } return(nRet); }
/// <summary> /// /// </summary> /// <param name="dataPtr">指向图像数据</param> /// <param name="frameInfo"></param> internal JFImage_Hik(byte[] dataBytes, MyCamera.MV_FRAME_OUT_INFO_EX frameInfo, MyCamera cmr) { _dataBytes = dataBytes; _frameInfo = frameInfo; _cmr = cmr; }