/// <summary> /// Returns the RGB value at a given theta and phi given a TangoImageBuffer. /// </summary> /// <param name="buffer">The TangoImageBuffer to sample.</param> /// <param name="i">Range from [0..height].</param> /// <param name="j">Range from [0..width].</param> /// <returns>The RGB value on the buffer at the given theta and phi.</returns> private Vector3 _GetRgbFromImageBuffer(Tango.TangoUnityImageData buffer, int i, int j) { int width = (int)buffer.width; int height = (int)buffer.height; int uv_buffer_offset = width * height; int x_index = j; if (j % 2 != 0) { x_index = j - 1; } // Get the YUV color for this pixel. int yValue = buffer.data[(i * width) + j]; int uValue = buffer.data[uv_buffer_offset + ((i / 2) * width) + x_index + 1]; int vValue = buffer.data[uv_buffer_offset + ((i / 2) * width) + x_index]; // Convert the YUV value to RGB. float r = yValue + (1.370705f * (vValue - 128)); float g = yValue - (0.689001f * (vValue - 128)) - (0.337633f * (uValue - 128)); float b = yValue + (1.732446f * (uValue - 128)); Vector3 result = new Vector3(r / 255.0f, g / 255.0f, b / 255.0f); // Gamma correct color to linear scale. result.x = Mathf.Pow(Mathf.Max(0.0f, result.x), 2.2f); result.y = Mathf.Pow(Mathf.Max(0.0f, result.y), 2.2f); result.z = Mathf.Pow(Mathf.Max(0.0f, result.z), 2.2f); return(result); }
/// <summary> /// Register to get Tango image events. /// /// NOTE: Tango image events happen on a different thread than the main /// Unity thread. /// </summary> /// <param name="cameraId">Camera identifier to get events for.</param> /// <param name="useExperimentalOverlay">If true, use the experimental video overlay.</param> /// <param name="videoOverlayTexture">The video overlay texture to use. Only used in experimental mode.</param> internal virtual void SetCallback(Tango.TangoEnums.TangoCameraId cameraId, bool useExperimentalOverlay, YUVTexture videoOverlayTexture) { m_usingExperimentalOverlay = useExperimentalOverlay; if (!useExperimentalOverlay) { m_previousImageBuffer = new TangoUnityImageData(); m_onImageAvailable = new Tango.VideoOverlayProvider.TangoService_onImageAvailable(_OnImageAvailable); Tango.VideoOverlayProvider.SetCallback(cameraId, m_onImageAvailable); } else { if (videoOverlayTexture != null) { m_onUnityFrameAvailable = new Tango.VideoOverlayProvider.TangoService_onUnityFrameAvailable(_OnExperimentalUnityFrameAvailable); VideoOverlayProvider.ExperimentalConnectTexture(cameraId, videoOverlayTexture, m_onUnityFrameAvailable); Debug.Log("VideoOverlayListener.SetCallback() : Experimental Overlay listener hooked up"); } else { Debug.Log("VideoOverlayListener.SetCallback() : No Texture2D found!"); } } }
/// <summary> /// Register to get Tango image events. /// /// NOTE: Tango image events happen on a different thread than the main /// Unity thread. /// </summary> /// <param name="cameraId">Camera identifier to get events for.</param> /// <param name="useExperimentalOverlay">If true, use the experimental video overlay.</param> /// <param name="videoOverlayTexture">The video overlay texture to use. Only used in experimental mode.</param> internal virtual void SetCallback(Tango.TangoEnums.TangoCameraId cameraId, bool useExperimentalOverlay, YUVTexture videoOverlayTexture) { m_usingExperimentalOverlay = useExperimentalOverlay; if (!useExperimentalOverlay) { m_previousImageBuffer = new TangoUnityImageData(); m_onImageAvailable = new Tango.VideoOverlayProvider.TangoService_onImageAvailable(_OnImageAvailable); Tango.VideoOverlayProvider.SetCallback(cameraId, m_onImageAvailable); } else { if (videoOverlayTexture != null) { m_onUnityFrameAvailable = new Tango.VideoOverlayProvider.TangoService_onUnityFrameAvailable(_OnExperimentalUnityFrameAvailable); VideoOverlayProvider.ExperimentalConnectTexture(cameraId, videoOverlayTexture, m_onUnityFrameAvailable); Debug.Log("VideoOverlayListener.SetCallback() : Experimental Overlay listener hooked up"); } else { Debug.Log("VideoOverlayListener.SetCallback() : No Texture2D found!"); } } }
/// <summary> /// INTERNAL USE: Fill out most recent color data for Tango emulation. /// NOTE: Does not emulate first line of metadata in color buffer. /// </summary> /// <param name="colorImageData">TangoUnityImageData structure to update with emulated data.</param> internal static void GetTangoEmulation(TangoUnityImageData colorImageData) { int yDataSize = EMULATED_CAMERA_WIDTH * EMULATED_CAMERA_HEIGHT; int cbCrDataSize = yDataSize / 2; int dataSize = yDataSize + cbCrDataSize; if (colorImageData.data == null || colorImageData.data.Length != dataSize) { colorImageData.data = new byte[dataSize]; } RenderTexture yRT = RenderTexture.GetTemporary(EMULATED_CAMERA_PACKED_WIDTH, EMULATED_CAMERA_PACKED_Y_HEIGHT, 0, RenderTextureFormat.ARGB32); RenderTexture cbCrRT = RenderTexture.GetTemporary(EMULATED_CAMERA_PACKED_WIDTH, EMULATED_CAMERA_PACKED_UV_HEIGHT, 0, RenderTextureFormat.ARGB32); Graphics.Blit(m_emulatedColorRenderTexture, yRT, m_yuvFilterY); m_emulationByteBufferCaptureTextures[0].ReadPixels(new Rect(0, 0, yRT.width, yRT.height), 0, 0); Graphics.Blit(m_emulatedColorRenderTexture, cbCrRT, m_yuvFilterCbCr); m_emulationByteBufferCaptureTextures[1].ReadPixels(new Rect(0, 0, cbCrRT.width, cbCrRT.height), 0, 0); Color32[] colors = m_emulationByteBufferCaptureTextures[0].GetPixels32(); for (int i = 0; i < yDataSize / 4; i++) { colorImageData.data[(i * 4)] = colors[i].r; colorImageData.data[(i * 4) + 1] = colors[i].g; colorImageData.data[(i * 4) + 2] = colors[i].b; colorImageData.data[(i * 4) + 3] = colors[i].a; } int startOffset = colors.Length * 4; colors = m_emulationByteBufferCaptureTextures[1].GetPixels32(); for (int i = 0; i < cbCrDataSize / 4; i++) { colorImageData.data[(i * 4) + startOffset] = colors[i].r; colorImageData.data[(i * 4) + startOffset + 1] = colors[i].g; colorImageData.data[(i * 4) + startOffset + 2] = colors[i].b; colorImageData.data[(i * 4) + startOffset + 3] = colors[i].a; } RenderTexture.ReleaseTemporary(yRT); RenderTexture.ReleaseTemporary(cbCrRT); colorImageData.format = TangoEnums.TangoImageFormatType.TANGO_HAL_PIXEL_FORMAT_YV12; colorImageData.width = EMULATED_CAMERA_WIDTH; colorImageData.height = EMULATED_CAMERA_HEIGHT; colorImageData.stride = EMULATED_CAMERA_WIDTH; colorImageData.timestamp = m_lastColorEmulationTime; }
/// <summary> /// It's backwards, but fill tango image buffer data with already-emulated data. /// It is the responsibility of the caller to GC pin/free the colorImageData's data array. /// </summary> /// <returns>Emulated raw color buffer.</returns> /// <param name="colorImageData">Emulated color buffer data.</param>> /// <param name="pinnedColorBuffer">Pinned array of imageBuffer.data.</param> private static TangoImageBuffer _GetEmulatedTangoImageBuffer(TangoUnityImageData colorImageData, GCHandle pinnedColorBuffer) { TangoImageBuffer imageBuffer = new TangoImageBuffer(); imageBuffer.data = pinnedColorBuffer.AddrOfPinnedObject(); imageBuffer.width = colorImageData.width; imageBuffer.height = colorImageData.height; imageBuffer.stride = colorImageData.stride; imageBuffer.format = colorImageData.format; imageBuffer.timestamp = colorImageData.timestamp; imageBuffer.frame_number = colorImageData.frame_number; return(imageBuffer); }
public void OnTangoImageAvailableEventHandler(Tango.TangoEnums.TangoCameraId cameraId, Tango.TangoUnityImageData imageBuffer) { TangoSupport.DetectMarkers(imageBuffer, cameraId, TangoSupport.MarkerType.QRCODE, MARKER_SIZE, markerList); if (markerList.Count > 0) { TangoSupport.Marker marker = markerList[0]; qrcodePlane.transform.position = marker.m_translation; qrcodePlane.transform.rotation = marker.m_orientation; var bottomToTop = marker.m_corner3DP3 - marker.m_corner3DP0; var leftToRight = marker.m_corner3DP1 - marker.m_corner3DP0; plane.transform.localScale = new Vector3(leftToRight.magnitude, 1, bottomToTop.magnitude) * 0.1f; } }
/// <summary> /// Stop getting Tango image or texture callbacks. /// </summary> internal static void Reset() { // Avoid calling into tango_client_api before the correct library is loaded. if (m_onImageAvailable != null || m_onTextureAvailable != null || m_onYUVTextureAvailable != null) { VideoOverlayProvider.ClearCallback(COLOR_CAMERA_ID); } m_onImageAvailable = null; m_onTextureAvailable = null; m_onYUVTextureAvailable = null; m_previousImageBuffer = new TangoUnityImageData(); m_shouldSendTextureMethodEvent = false; m_shouldSendByteBufferMethodEvent = false; m_shouldSendYUVTextureIdMethodEvent = false; m_onTangoImageAvailable = null; m_onTangoCameraTextureAvailable = null; m_onTangoYUVTextureAvailable = null; m_onTangoImageMultithreadedAvailable = null; }
/// <summary> /// It's backwards, but fill tango image buffer data with already-emulated data. /// It is the responsibility of the caller to GC pin/free the colorImageData's data array. /// </summary> /// <returns>Emulated raw color buffer.</returns> /// <param name="colorImageData">Emulated color buffer data.</param>> /// <param name="pinnedColorBuffer">Pinned array of imageBuffer.data.</param> /// <param name="image"> TangoImage populated with emulator image data.</param> /// <param name="cameraMetadata">Camera metadata for emulated image.</param> private static void _GetEmulatedTangoImage(TangoUnityImageData colorImageData, GCHandle pinnedColorBuffer, out TangoImage image, out TangoCameraMetadata cameraMetadata) { image = new TangoImage(); cameraMetadata = new TangoCameraMetadata(); image.m_planeData0 = pinnedColorBuffer.AddrOfPinnedObject(); image.m_planeData2 = new IntPtr(pinnedColorBuffer.AddrOfPinnedObject().ToInt64() + image.m_width * image.m_height); image.m_planeData1 = new IntPtr(image.m_planeData2.ToInt64() + 1); image.m_width = colorImageData.width; image.m_height = colorImageData.height; // The existing system assumes the whole image has the same row stride across all planes. int stride = (int)colorImageData.stride; image.m_planeRowStride0 = stride; image.m_planeRowStride1 = stride; image.m_planeRowStride2 = stride; image.m_planeRowStride3 = stride; image.m_format = colorImageData.format; image.m_timestampNs = Convert.ToInt64(colorImageData.timestamp / Common.SECS_PER_NANOSECS); cameraMetadata.m_frameNumber = colorImageData.frame_number; cameraMetadata.m_timestampNs = image.m_timestampNs; }
/// <summary> /// Computes the spherical harmonic diffuse coefficients for a given /// TangoImageBuffer. /// </summary> /// <param name="imageBuffer">The TangoImageBuffer to sample.</param> private void _ComputeDiffuseCoefficients(TangoUnityImageData imageBuffer) { if (m_enableEnvironmentalLighting) { // Compute SH Coefficients. float weight = 4.0f * Mathf.PI; int numSamples = m_samples.Length; int numCoefficients = m_coefficients.Length; for (int coeffIdx = 0; coeffIdx < numCoefficients; ++coeffIdx) { m_coefficients[coeffIdx] = Vector3.zero; } for (int sampleIdx = 0; sampleIdx < numSamples; ++sampleIdx) { float theta = m_samples[sampleIdx].sph.x; float phi = m_samples[sampleIdx].sph.y; // Normalize between 0 and 1. float x = 1.0f - Mathf.Pow(Mathf.Cos(theta / 2.0f), 2.0f); float y = phi / Mathf.PI / 2.0f; int i = (int)(imageBuffer.height * x); int j = (int)(imageBuffer.width * y); Vector3 rgb = _GetRgbFromImageBuffer(imageBuffer, i, j); for (int coeffIdx = 0; coeffIdx < numCoefficients; ++coeffIdx) { m_coefficients[coeffIdx] += rgb * m_samples[sampleIdx].coeff[coeffIdx]; } } // Divide the result by weight and number of samples. float factor = weight / numSamples; for (int coeffIdx = 0; coeffIdx < numCoefficients; ++coeffIdx) { m_coefficients[coeffIdx] *= factor; } Shader.SetGlobalMatrix("_TangoLightingSphericalHarmonicMatrixR", _SetShmMatrix(0)); Shader.SetGlobalMatrix("_TangoLightingSphericalHarmonicMatrixG", _SetShmMatrix(1)); Shader.SetGlobalMatrix("_TangoLightingSphericalHarmonicMatrixB", _SetShmMatrix(2)); Shader.SetGlobalFloat("_TangoLightingExposure", m_coefficients[0].magnitude); } }
/// <summary> /// Stop getting Tango image or texture callbacks. /// </summary> internal static void Reset() { // Avoid calling into tango_client_api before the correct library is loaded. if (m_onImageAvailable != null || m_onTextureAvailable != null || m_onYUVTextureAvailable != null) { VideoOverlayProvider.ClearCallback(COLOR_CAMERA_ID); } m_onImageAvailable = null; m_onTextureAvailable = null; m_onYUVTextureAvailable = null; m_previousImageBuffer = new TangoUnityImageData(); m_shouldSendTextureMethodEvent = false; m_shouldSendByteBufferMethodEvent = false; m_shouldSendYUVTextureIdMethodEvent = false; m_onTangoImageAvailable = null; m_onTangoCameraTextureAvailable = null; m_onTangoYUVTextureAvailable = null; m_onTangoImageMultithreadedAvailable = null; }
/// <summary> /// Register to get Tango image events for getting the texture byte buffer callback. /// /// NOTE: Tango image events happen on a different thread than the main /// Unity thread. /// </summary> /// <param name="cameraId">Camera identifier to get events for.</param> internal virtual void SetCallbackByteBufferMethod(Tango.TangoEnums.TangoCameraId cameraId) { m_previousImageBuffer = new TangoUnityImageData(); m_onImageAvailable = new Tango.VideoOverlayProvider.TangoService_onImageAvailable(_OnImageAvailable); Tango.VideoOverlayProvider.SetCallback(cameraId, m_onImageAvailable); }
/// <summary> /// This will be called when a new frame is available from the camera. /// </summary> /// <param name="cameraId">Camera identifier.</param> /// <param name="imageBuffer">Tango camera image buffer.</param> public void OnTangoImageAvailableEventHandler(TangoEnums.TangoCameraId cameraId, TangoUnityImageData imageBuffer) { _ComputeDiffuseCoefficients(imageBuffer); }
/// <summary> /// Register to get Tango image events for getting the texture byte buffer callback. /// /// NOTE: Tango image events happen on a different thread than the main /// Unity thread. /// </summary> /// <param name="cameraId">Camera identifier to get events for.</param> internal virtual void SetCallbackByteBufferMethod(Tango.TangoEnums.TangoCameraId cameraId) { m_previousImageBuffer = new TangoUnityImageData(); m_onImageAvailable = new Tango.VideoOverlayProvider.TangoService_onImageAvailable(_OnImageAvailable); Tango.VideoOverlayProvider.SetCallback(cameraId, m_onImageAvailable); }
/// <summary> /// Register to get Tango image events for getting the texture byte buffer callback. /// /// NOTE: Tango image events happen on a different thread than the main /// Unity thread. /// </summary> internal void SetCallbackByteBufferMethod() { m_previousImageBuffer = new TangoUnityImageData(); m_onImageAvailable = new VideoOverlayProvider.TangoService_onImageAvailable(_OnImageAvailable); VideoOverlayProvider.SetCallback(COLOR_CAMERA_ID, m_onImageAvailable); }
/// <summary> /// It's backwards, but fill tango image buffer data with already-emulated data. /// It is the responsibility of the caller to GC pin/free the colorImageData's data array. /// </summary> /// <returns>Emulated raw color buffer.</returns> /// <param name="colorImageData">Emulated color buffer data.</param>> /// <param name="pinnedColorBuffer">Pinned array of imageBuffer.data.</param> private static TangoImageBuffer _GetEmulatedTangoImageBuffer(TangoUnityImageData colorImageData, GCHandle pinnedColorBuffer) { TangoImageBuffer imageBuffer = new TangoImageBuffer(); imageBuffer.data = pinnedColorBuffer.AddrOfPinnedObject(); imageBuffer.width = colorImageData.width; imageBuffer.height = colorImageData.height; imageBuffer.stride = colorImageData.stride; imageBuffer.format = colorImageData.format; imageBuffer.timestamp = colorImageData.timestamp; imageBuffer.frame_number = colorImageData.frame_number; return imageBuffer; }
/// <summary> /// Fill out <c>colorCameraData</c> with emulated values from Tango. /// </summary> /// <param name="colorCameraData">The image data to fill out.</param> private static void _FillEmulatedColorCameraData(TangoUnityImageData colorCameraData) { VideoOverlayProvider.GetTangoEmulation(colorCameraData); }
public void OnTangoImageAvailableEventHandler(TangoEnums.TangoCameraId cameraId, TangoUnityImageData imageBuffer) { Debug.Log( "IMAGE DATA: " + cameraId + "; " + imageBuffer.format ); }
/// <summary> /// Detect one or more markers in the input image. /// </summary> /// <param name="imageBuffer"> /// The input image buffer. /// </param> /// <param name="cameraId"> /// Camera that is used for detecting markers, can be TangoEnums.TangoCameraId.TANGO_CAMERA_FISHEYE or /// TangoEnums.TangoCameraId.TANGO_CAMERA_COLOR. /// </param> /// <param name="markerType"> /// Target marker's type. Current support marker types are QR marker and Alvar marker. /// </param> /// <param name="markerSize"> /// Physical size of marker's length. /// </param> /// <param name="markers"> /// The returned marker list. /// </param> /// <returns> /// Common.ErrorType.TANGO_SUCCESS on success, Common.ErrorType.TANGO_INVALID on invalid input, and /// Common.ErrorType.TANGO_ERROR on failure. /// </returns> public static bool DetectMarkers(TangoUnityImageData imageBuffer, TangoEnums.TangoCameraId cameraId, MarkerType markerType, double markerSize, List <Marker> markers) { if (markers == null) { Debug.Log("markers is null. " + Environment.StackTrace); return(false); } // Clear any existing marker markers.Clear(); // Detect marker. TangoImageBuffer buffer = new TangoImageBuffer(); GCHandle gchandle = GCHandle.Alloc(imageBuffer.data, GCHandleType.Pinned); IntPtr ptr = gchandle.AddrOfPinnedObject(); buffer.data = ptr; buffer.format = imageBuffer.format; buffer.frame_number = imageBuffer.frame_number; buffer.height = imageBuffer.height; buffer.stride = imageBuffer.stride; buffer.timestamp = imageBuffer.timestamp; buffer.width = imageBuffer.width; // Get Pose. TangoPoseData poseData = new TangoPoseData(); TangoCoordinateFramePair pair; pair.baseFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_START_OF_SERVICE; pair.targetFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_CAMERA_COLOR; PoseProvider.GetPoseAtTime(poseData, buffer.timestamp, pair); APIMarkerList rawAPIMarkerList = new APIMarkerList(); APIMarkerParam rawMarkerParam = new APIMarkerParam(markerType, markerSize); int ret = TangoSupportAPI.TangoSupport_detectMarkers(ref buffer, cameraId, ref poseData.translation, ref poseData.orientation, ref rawMarkerParam, ref rawAPIMarkerList); gchandle.Free(); if (ret != Common.ErrorType.TANGO_SUCCESS) { return(false); } if (rawAPIMarkerList.markerCount != 0) { List <APIMarker> apiMarkers = new List <APIMarker>(); MarshallingHelper.MarshalUnmanagedStructArrayToList <TangoSupport.APIMarker>( rawAPIMarkerList.markers, rawAPIMarkerList.markerCount, apiMarkers); for (int i = 0; i < apiMarkers.Count; ++i) { APIMarker apiMarker = apiMarkers[i]; Marker marker = new Marker(); marker.m_type = apiMarker.m_type; marker.m_timestamp = apiMarker.m_timestamp; marker.m_content = apiMarker.m_content; // Covert 2D corner points from pixel space to UV space. marker.m_corner2DP0.x = apiMarker.m_corner2DP0.x / buffer.width; marker.m_corner2DP0.y = apiMarker.m_corner2DP0.y / buffer.height; marker.m_corner2DP1.x = apiMarker.m_corner2DP1.x / buffer.width; marker.m_corner2DP1.y = apiMarker.m_corner2DP1.y / buffer.height; marker.m_corner2DP2.x = apiMarker.m_corner2DP2.x / buffer.width; marker.m_corner2DP2.y = apiMarker.m_corner2DP2.y / buffer.height; marker.m_corner2DP3.x = apiMarker.m_corner2DP3.x / buffer.width; marker.m_corner2DP3.y = apiMarker.m_corner2DP3.y / buffer.height; // Convert 3D corner points from Start of Service space to Unity World space. marker.m_corner3DP0 = GetMarkerInUnitySpace(apiMarker.m_corner3DP0); marker.m_corner3DP1 = GetMarkerInUnitySpace(apiMarker.m_corner3DP1); marker.m_corner3DP2 = GetMarkerInUnitySpace(apiMarker.m_corner3DP2); marker.m_corner3DP3 = GetMarkerInUnitySpace(apiMarker.m_corner3DP3); // Convert pose from Start of Service to Unity World space. Vector3 translation = new Vector3( (float)apiMarker.m_translation.x, (float)apiMarker.m_translation.y, (float)apiMarker.m_translation.z); Quaternion orientation = new Quaternion( (float)apiMarker.m_rotation.x, (float)apiMarker.m_rotation.y, (float)apiMarker.m_rotation.z, (float)apiMarker.m_rotation.w); Matrix4x4 ss_T_marker = Matrix4x4.TRS(translation, orientation, Vector3.one); // Note that UNITY_WORLD_T_START_SERVICE is involutory matrix. The actually transform // we wanted to multiply on the right hand side is START_SERVICE_T_UNITY_WORLD. Matrix4x4 uw_T_u_marker = TangoSupport.UNITY_WORLD_T_START_SERVICE * ss_T_marker * TangoSupport.UNITY_WORLD_T_START_SERVICE; marker.m_translation = uw_T_u_marker.GetColumn(3); marker.m_orientation = Quaternion.LookRotation(uw_T_u_marker.GetColumn(2), uw_T_u_marker.GetColumn(1)); // Add the marker to the output list markers.Add(marker); } } TangoSupportAPI.TangoSupport_freeMarkerList(ref rawAPIMarkerList); return(false); }
/// <summary> /// Fill out <c>colorCameraData</c> with emulated values from Tango. /// </summary> /// <param name="colorCameraData">The image data to fill out.</param> private static void _FillEmulatedColorCameraData(TangoUnityImageData colorCameraData) { VideoOverlayProvider.GetTangoEmulation(colorCameraData); }
/// <summary> /// INTERNAL USE: Fill out most recent color data for Tango emulation. /// NOTE: Does not emulate first line of metadata in color buffer. /// </summary> /// <param name="colorImageData">TangoUnityImageData structure to update with emulated data.</param> internal static void GetTangoEmulation(TangoUnityImageData colorImageData) { int yDataSize = EMULATED_CAMERA_WIDTH * EMULATED_CAMERA_HEIGHT; int cbCrDataSize = yDataSize / 2; int dataSize = yDataSize + cbCrDataSize; if (colorImageData.data == null || colorImageData.data.Length != dataSize) { colorImageData.data = new byte[dataSize]; } RenderTexture yRT = RenderTexture.GetTemporary(EMULATED_CAMERA_PACKED_WIDTH, EMULATED_CAMERA_PACKED_Y_HEIGHT, 0, RenderTextureFormat.ARGB32); RenderTexture cbCrRT = RenderTexture.GetTemporary(EMULATED_CAMERA_PACKED_WIDTH, EMULATED_CAMERA_PACKED_UV_HEIGHT, 0, RenderTextureFormat.ARGB32); Graphics.Blit(m_emulatedColorRenderTexture, yRT, m_yuvFilterY); m_emulationByteBufferCaptureTextures[0].ReadPixels(new Rect(0, 0, yRT.width, yRT.height), 0, 0); Graphics.Blit(m_emulatedColorRenderTexture, cbCrRT, m_yuvFilterCbCr); m_emulationByteBufferCaptureTextures[1].ReadPixels(new Rect(0, 0, cbCrRT.width, cbCrRT.height), 0, 0); Color32[] colors = m_emulationByteBufferCaptureTextures[0].GetPixels32(); for (int i = 0; i < yDataSize / 4; i++) { colorImageData.data[(i * 4)] = colors[i].r; colorImageData.data[(i * 4) + 1] = colors[i].g; colorImageData.data[(i * 4) + 2] = colors[i].b; colorImageData.data[(i * 4) + 3] = colors[i].a; } int startOffset = colors.Length * 4; colors = m_emulationByteBufferCaptureTextures[1].GetPixels32(); for (int i = 0; i < cbCrDataSize / 4; i++) { colorImageData.data[(i * 4) + startOffset] = colors[i].r; colorImageData.data[(i * 4) + startOffset + 1] = colors[i].g; colorImageData.data[(i * 4) + startOffset + 2] = colors[i].b; colorImageData.data[(i * 4) + startOffset + 3] = colors[i].a; } RenderTexture.ReleaseTemporary(yRT); RenderTexture.ReleaseTemporary(cbCrRT); colorImageData.format = TangoEnums.TangoImageFormatType.TANGO_HAL_PIXEL_FORMAT_YV12; colorImageData.width = EMULATED_CAMERA_WIDTH; colorImageData.height = EMULATED_CAMERA_HEIGHT; colorImageData.stride = EMULATED_CAMERA_WIDTH; colorImageData.timestamp = m_lastColorEmulationTime; }
public void OnTangoImageAvailableEventHandler(Tango.TangoEnums.TangoCameraId cameraId, Tango.TangoUnityImageData imageBuffer) { if (imageBuffer != null) { videoImages.Add(imageBuffer); } }