static void FetchFrameLoop(IntPtr handle) { IntPtr color_isp_handle = new IntPtr(); uint cal_size = calib_inf.CSize(); SDK.TYGetStruct(handle, SDK.TY_COMPONENT_DEPTH_CAM, SDK.TY_STRUCT_CAM_CALIB_DATA, calib_inf.getCPtr(), cal_size); Console.WriteLine(string.Format("Depth calib inf width:{0} height:{1}", calib_inf.intrinsicWidth, calib_inf.intrinsicHeight)); Console.WriteLine(string.Format("Depth intrinsic:{0} {1} {2} {3} {4} {5} {6} {7} {8}", calib_inf.intrinsic.data[0], calib_inf.intrinsic.data[1], calib_inf.intrinsic.data[2], calib_inf.intrinsic.data[3], calib_inf.intrinsic.data[4], calib_inf.intrinsic.data[5], calib_inf.intrinsic.data[6], calib_inf.intrinsic.data[7], calib_inf.intrinsic.data[8])); SDK.TYEnableComponents(handle, SDK.TY_COMPONENT_DEPTH_CAM); SDK.TYEnableComponents(handle, SDK.TY_COMPONENT_RGB_CAM); //set depth cam resolution SDK.TYSetEnum(handle, SDK.TY_COMPONENT_DEPTH_CAM, SDK.TY_ENUM_IMAGE_MODE, (int)(SDK.TY_RESOLUTION_MODE_640x480 | SDK.TY_PIXEL_FORMAT_DEPTH16)); SDK.TYISPCreate(ref color_isp_handle); SDK_ISP.ColorIspInitSetting(color_isp_handle, handle); uint buff_sz; SDK.TYGetFrameBufferSize(handle, out buff_sz); int width, height; SDK.TYGetInt(handle, SDK.TY_COMPONENT_RGB_CAM, SDK.TY_INT_WIDTH, out width); SDK.TYGetInt(handle, SDK.TY_COMPONENT_RGB_CAM, SDK.TY_INT_HEIGHT, out height); Console.WriteLine(string.Format("RGB Image size:{0} {1}", width, height)); int color_size = width * height * 3; buffer[0] = new uint8_t_ARRAY((int)buff_sz); buffer[1] = new uint8_t_ARRAY((int)buff_sz); color_data = new uint8_t_ARRAY(color_size); SDK.TYEnqueueBuffer(handle, buffer[0].VoidPtr(), buff_sz); SDK.TYEnqueueBuffer(handle, buffer[1].VoidPtr(), buff_sz); SDK.TYStartCapture(handle); int img_index = 0; while (true) { TY_FRAME_DATA frame = new TY_FRAME_DATA(); try { SDK.TYFetchFrame(handle, frame, -1); Console.WriteLine(string.Format("capture {0} ", img_index)); var images = frame.image; for (int idx = 0; idx < frame.validCount; idx++) { var img = images[idx]; if (img.componentID == SDK.TY_COMPONENT_DEPTH_CAM) { var pixel_arr = uint16_t_ARRAY.FromVoidPtr(img.buffer); IntPtr pt = pixel_arr.VoidPtr2(); int offset = img.width * img.height / 2 + img.width / 2; ushort distance = pixel_arr[offset]; TY_PIXEL_DESC pix = new TY_PIXEL_DESC(); TY_VECT_3F p3d = new TY_VECT_3F(); pix.x = (short)(img.width / 2); pix.y = (short)(img.height / 2); pix.depth = distance; SDK.TYMapDepthToPoint3d(calib_inf, (uint)img.width, (uint)img.height, pix, 1, p3d); Console.WriteLine(string.Format("Depth Image Center Pixel Distance:{0}", distance)); Console.WriteLine(string.Format("Point Cloud Center Data:(x:{0} y:{1} z:{2})", p3d.x, p3d.y, p3d.z)); } else if (img.componentID == SDK.TY_COMPONENT_RGB_CAM) { if (img.pixelFormat == SDK.TY_PIXEL_FORMAT_YVYU) { var pixel_arr = uint8_t_ARRAY.FromVoidPtr(img.buffer); SDK_ISP.ConvertYVYU2RGB(pixel_arr, color_data, img.width, img.height); int offset = 3 * (img.width * img.height / 2 + img.width / 2); byte b = color_data[offset]; byte g = color_data[offset + 1]; byte r = color_data[offset + 2]; Console.WriteLine(string.Format("Color Image Center Pixel value(YVYU):{0} {1} {2}", r, g, b)); } else if (img.pixelFormat == SDK.TY_PIXEL_FORMAT_YUYV) { var pixel_arr = uint8_t_ARRAY.FromVoidPtr(img.buffer); SDK_ISP.ConvertYUYV2RGB(pixel_arr, color_data, img.width, img.height); int offset = 3 * (img.width * img.height / 2 + img.width / 2); byte b = color_data[offset]; byte g = color_data[offset + 1]; byte r = color_data[offset + 2]; Console.WriteLine(string.Format("Color Image Center Pixel value(YUYV):{0} {1} {2}", r, g, b)); } else if (img.pixelFormat == SDK.TY_PIXEL_FORMAT_BAYER8GB) { var pixel_arr = uint8_t_ARRAY.FromVoidPtr(img.buffer); SWIGTYPE_p_void pointer = (SWIGTYPE_p_void)color_data.VoidPtr(); TY_IMAGE_DATA out_buff = SDK.TYInitImageData((uint)color_size, pointer, (uint)(img.width), (uint)(img.height)); out_buff.pixelFormat = (int)SDK.TY_PIXEL_FORMAT_BGR; SDK.TYISPProcessImage(color_isp_handle, img, out_buff); SDK.TYISPUpdateDevice(color_isp_handle); var color_pixel_arr = uint8_t_ARRAY.FromVoidPtr(out_buff.buffer); int offset = 3 * (img.width * img.height / 2 + img.width / 2); byte b = color_pixel_arr[offset]; byte g = color_pixel_arr[offset + 1]; byte r = color_pixel_arr[offset + 2]; Console.WriteLine(string.Format("Color Image Center Pixel value(Bayer):{0} {1} {2}", r, g, b)); } else { Console.WriteLine(string.Format("Color Image Type:{0}", img.pixelFormat)); } } } SDK.TYEnqueueBuffer(handle, frame.userBuffer, (uint)frame.bufferSize); img_index++; } catch (System.ComponentModel.Win32Exception ex) { Console.WriteLine(ex.ToString()); } } }
static void FetchFrameLoop(IntPtr handle) { uint cal_size1 = depth_calib.CSize(); SDK.TYGetStruct(handle, SDK.TY_COMPONENT_DEPTH_CAM, SDK.TY_STRUCT_CAM_CALIB_DATA, depth_calib.getCPtr(), cal_size1); uint cal_size2 = color_calib.CSize(); SDK.TYGetStruct(handle, SDK.TY_COMPONENT_RGB_CAM, SDK.TY_STRUCT_CAM_CALIB_DATA, color_calib.getCPtr(), cal_size2); IntPtr color_isp_handle = new IntPtr(); SDK.TYEnableComponents(handle, SDK.TY_COMPONENT_DEPTH_CAM); SDK.TYEnableComponents(handle, SDK.TY_COMPONENT_RGB_CAM); SDK.TYISPCreate(ref color_isp_handle); SDK_ISP.ColorIspInitSetting(color_isp_handle, handle); uint buff_sz; SDK.TYGetFrameBufferSize(handle, out buff_sz); int depth_width, depth_height; SDK.TYGetInt(handle, SDK.TY_COMPONENT_DEPTH_CAM, SDK.TY_INT_WIDTH, out depth_width); SDK.TYGetInt(handle, SDK.TY_COMPONENT_DEPTH_CAM, SDK.TY_INT_HEIGHT, out depth_height); Console.WriteLine(string.Format("Depth Image size:{0} {1}", depth_width, depth_height)); int color_width, color_height; SDK.TYGetInt(handle, SDK.TY_COMPONENT_RGB_CAM, SDK.TY_INT_WIDTH, out color_width); SDK.TYGetInt(handle, SDK.TY_COMPONENT_RGB_CAM, SDK.TY_INT_HEIGHT, out color_height); Console.WriteLine(string.Format("RGB Image size:{0} {1}", color_width, color_height)); int color_size = color_width * color_height * 3; buffer[0] = new uint8_t_ARRAY((int)buff_sz); buffer[1] = new uint8_t_ARRAY((int)buff_sz); color_data = new uint8_t_ARRAY(color_size); undistort_color_data = new uint8_t_ARRAY(color_size); registration_depth_data = new uint16_t_ARRAY(color_size); src.width = color_width; src.height = color_height; src.size = 3 * color_width * color_height; src.pixelFormat = (int)SDK.TY_PIXEL_FORMAT_RGB; src.buffer = color_data.VoidPtr(); dst.width = color_width; dst.height = color_height; dst.size = 3 * color_width * color_height; dst.pixelFormat = (int)SDK.TY_PIXEL_FORMAT_RGB; dst.buffer = undistort_color_data.VoidPtr(); SDK.TYEnqueueBuffer(handle, buffer[0].VoidPtr(), buff_sz); SDK.TYEnqueueBuffer(handle, buffer[1].VoidPtr(), buff_sz); SDK.TYStartCapture(handle); int img_index = 0; while (true) { TY_FRAME_DATA frame = new TY_FRAME_DATA(); try { SDK.TYFetchFrame(handle, frame, -1); Console.WriteLine(string.Format("capture {0} ", img_index)); bool depth_enable = false; bool color_enable = false; uint16_t_ARRAY depth_pixel_arr = null; var images = frame.image; for (int idx = 0; idx < frame.validCount; idx++) { var img = images[idx]; if (img.componentID == SDK.TY_COMPONENT_DEPTH_CAM) { depth_pixel_arr = uint16_t_ARRAY.FromVoidPtr(img.buffer); depth_enable = true; } else if (img.componentID == SDK.TY_COMPONENT_RGB_CAM) { if (img.pixelFormat == SDK.TY_PIXEL_FORMAT_YVYU) { var pixel_arr = uint8_t_ARRAY.FromVoidPtr(img.buffer); SDK_ISP.ConvertYVYU2RGB(pixel_arr, color_data, img.width, img.height); color_enable = true; } else if (img.pixelFormat == SDK.TY_PIXEL_FORMAT_YUYV) { var pixel_arr = uint8_t_ARRAY.FromVoidPtr(img.buffer); SDK_ISP.ConvertYUYV2RGB(pixel_arr, color_data, img.width, img.height); color_enable = true; } else if (img.pixelFormat == SDK.TY_PIXEL_FORMAT_BAYER8GB) { var pixel_arr = uint8_t_ARRAY.FromVoidPtr(img.buffer); SWIGTYPE_p_void pointer = (SWIGTYPE_p_void)color_data.VoidPtr(); TY_IMAGE_DATA out_buff = SDK.TYInitImageData((uint)color_size, pointer, (uint)(img.width), (uint)(img.height)); out_buff.pixelFormat = (int)SDK.TY_PIXEL_FORMAT_BGR; SDK.TYISPProcessImage(color_isp_handle, img, out_buff); SDK.TYISPUpdateDevice(color_isp_handle); var color_pixel_arr = uint8_t_ARRAY.FromVoidPtr(out_buff.buffer); color_enable = true; } else { Console.WriteLine(string.Format("Color Image Type:{0}", img.pixelFormat)); } } } if ((depth_enable) && (color_enable)) { SDK.TYUndistortImage(color_calib, src, color_calib.intrinsic, dst); int offset = color_width * color_height / 2 + color_width / 2; byte b = undistort_color_data[3 * offset]; byte g = undistort_color_data[3 * offset + 1]; byte r = undistort_color_data[3 * offset + 2]; SDK.TYMapDepthImageToColorCoordinate(depth_calib, (uint)depth_width, (uint)depth_height, depth_pixel_arr.cast(), color_calib, (uint)color_width, (uint)color_height, registration_depth_data.cast()); ushort distance = registration_depth_data[offset]; Console.WriteLine(string.Format("The rgbd value of the center position of the image :R.{0} G.{1} B.{2} D.{3}", r, g, b, distance)); } SDK.TYEnqueueBuffer(handle, frame.userBuffer, (uint)frame.bufferSize); img_index++; } catch (System.ComponentModel.Win32Exception ex) { Console.WriteLine(ex.ToString()); } } }