XRCameraImagePlane plane = image.GetPlane(0); // use the Y plane #else public static void GetPlaneDataFast(ref IntPtr pixels, XRCpuImage image) { XRCpuImage.Plane plane = image.GetPlane(0); // use the Y plane #endif int width = image.width, height = image.height; if (width == plane.rowStride) { unsafe { pixels = (IntPtr)NativeArrayUnsafeUtility.GetUnsafeBufferPointerWithoutChecks(plane.data); } } else { unsafe { ulong handle; byte[] data = new byte[width * height]; byte * srcPtr = (byte *)NativeArrayUnsafeUtility.GetUnsafeBufferPointerWithoutChecks(plane.data); byte * dstPtr = (byte *)UnsafeUtility.PinGCArrayAndGetDataAddress(data, out handle); if (width > 0 && height > 0) { UnsafeUtility.MemCpyStride(dstPtr, width, srcPtr, plane.rowStride, width, height); } pixels = (IntPtr)dstPtr; UnsafeUtility.ReleaseGCObject(handle); } } }
XRCameraImagePlane plane = image.GetPlane(0); // use the Y plane #else public static void GetPlaneData(out byte[] pixels, XRCpuImage image) { XRCpuImage.Plane plane = image.GetPlane(0); // use the Y plane #endif int width = image.width, height = image.height; pixels = new byte[width * height]; if (width == plane.rowStride) { plane.data.CopyTo(pixels); } else { unsafe { ulong handle; byte *srcPtr = (byte *)NativeArrayUnsafeUtility.GetUnsafePtr(plane.data); byte *dstPtr = (byte *)UnsafeUtility.PinGCArrayAndGetDataAddress(pixels, out handle); if (width > 0 && height > 0) { UnsafeUtility.MemCpyStride(dstPtr, width, srcPtr, plane.rowStride, width, height); } UnsafeUtility.ReleaseGCObject(handle); } } }
private void UpdateRawImage(ref Texture2D texture, XRCpuImage cpuImage) { if (texture == null || texture.width != cpuImage.width || texture.height != cpuImage.height) { texture = new Texture2D(cpuImage.width, cpuImage.height, TextureFormat.RGB565, false); } var conversionParams = new XRCpuImage.ConversionParams(cpuImage, TextureFormat.R16); var rawTextureData = texture.GetRawTextureData <byte>(); cpuImage.Convert(conversionParams, rawTextureData); texture.Apply(); }
private static void UpdateRawImage(Texture2D texture, XRCpuImage cpuImage) { var conversionParams = new XRCpuImage.ConversionParams(cpuImage, cpuImage.format.AsTextureFormat(), XRCpuImage.Transformation.MirrorY); var rawTextureData = texture.GetRawTextureData <byte>(); Debug.Assert( rawTextureData.Length == cpuImage.GetConvertedDataSize(conversionParams.outputDimensions, conversionParams.outputFormat), "The Texture2D is not the same size as the converted data."); cpuImage.Convert(conversionParams, rawTextureData); texture.Apply(); }
private IEnumerator ProcessImage(XRCpuImage image, Action <Texture2D> callback) { var request = image.ConvertAsync(new XRCpuImage.ConversionParams() { inputRect = new RectInt(0, 0, image.width, image.height), outputDimensions = new Vector2Int(image.width, image.height), outputFormat = TextureFormat.RGBA32, transformation = XRCpuImage.Transformation.MirrorX }); while (!request.status.IsDone()) { yield return(null); } if (request.status != XRCpuImage.AsyncConversionStatus.Ready) { request.Dispose(); yield break; } var rawData = request.GetData <byte>(); if (_receivedTexture == null) { _receivedTexture = new Texture2D( request.conversionParams.outputDimensions.x, request.conversionParams.outputDimensions.y, request.conversionParams.outputFormat, false); } _receivedTexture.LoadRawTextureData(rawData); _receivedTexture.Apply(); // convert to rgba texture if (_rbgaTexture == null) { _rbgaTexture = new Texture2D(_receivedTexture.width, _receivedTexture.height, TextureFormat.RGBA32, false); } _rbgaTexture.SetPixels(_receivedTexture.GetPixels()); _rbgaTexture.Apply(); _debugMaterial.mainTexture = _rbgaTexture; callback.Invoke(_rbgaTexture); request.Dispose(); }
IEnumerator Process(XRCpuImage image) { _isBusy = true; //set flag to prevent multiple calls to Process. var request = image.ConvertAsync(new XRCpuImage.ConversionParams { inputRect = new RectInt(0, 0, image.width, image.height), //downsample in half outputDimensions = new Vector2Int(image.width / 2, image.height / 2), outputFormat = TextureFormat.RGB24, transformation = XRCpuImage.Transformation.None }); //wait for it to finish while (!request.status.IsDone()) { yield return(null); } if (request.status != XRCpuImage.AsyncConversionStatus.Ready) { Debug.Log($"image is done, but not ready."); request.Dispose(); _isBusy = false; //set flag to allow new calls to Process yield break; } var rawData = request.GetData <byte>(); if (_texture == null) { _texture = new Texture2D( request.conversionParams.outputDimensions.x, request.conversionParams.outputDimensions.y, request.conversionParams.outputFormat, false); } //To texture, to PNG, to Base64 _texture.LoadRawTextureData(rawData); _texture.Apply(); string _base64 = System.Convert.ToBase64String(_texture.EncodeToPNG()); request.Dispose(); PublishBase64(_base64); _isBusy = false; //set flag to allow new calls to Process }
void UpdateRawImage(Texture2D texture, XRCpuImage cpuImage) { // For display, we need to mirror about the vertical access. var conversionParams = new XRCpuImage.ConversionParams(cpuImage, cpuImage.format.AsTextureFormat(), XRCpuImage.Transformation.MirrorY); // Get the Texture2D's underlying pixel buffer. var rawTextureData = texture.GetRawTextureData <byte>(); // Make sure the destination buffer is large enough to hold the converted data (they should be the same size) Debug.Assert(rawTextureData.Length == cpuImage.GetConvertedDataSize(conversionParams.outputDimensions, conversionParams.outputFormat), "The Texture2D is not the same size as the converted data."); // Perform the conversion. cpuImage.Convert(conversionParams, rawTextureData); // "Apply" the new pixel data to the Texture2D. texture.Apply(); }
/// <summary> /// Converts a new CPU image into byte buffers and caches to be accessed later. /// </summary> /// <param name="image">The new CPU image to process.</param> private void OnImageAvailable(XRCpuImage image) { if (_cameraBufferY == null || _cameraBufferU == null || _cameraBufferV == null) { _cameraWidth = image.width; _cameraHeight = image.height; _rowStrideY = image.GetPlane(0).rowStride; _rowStrideUV = image.GetPlane(1).rowStride; _pixelStrideUV = image.GetPlane(1).pixelStride; _cameraBufferY = new byte[image.GetPlane(0).data.Length]; _cameraBufferU = new byte[image.GetPlane(1).data.Length]; _cameraBufferV = new byte[image.GetPlane(2).data.Length]; } image.GetPlane(0).data.CopyTo(_cameraBufferY); image.GetPlane(1).data.CopyTo(_cameraBufferU); image.GetPlane(2).data.CopyTo(_cameraBufferV); }
public static void GetPlaneDataRGB(out byte[] pixels, XRCpuImage image) { var conversionParams = new XRCpuImage.ConversionParams { inputRect = new RectInt(0, 0, image.width, image.height), outputDimensions = new Vector2Int(image.width, image.height), outputFormat = TextureFormat.RGB24, transformation = XRCpuImage.Transformation.None }; int size = image.GetConvertedDataSize(conversionParams); pixels = new byte[size]; GCHandle bufferHandle = GCHandle.Alloc(pixels, GCHandleType.Pinned); image.Convert(conversionParams, bufferHandle.AddrOfPinnedObject(), pixels.Length); bufferHandle.Free(); }
/// <summary> /// Determines whether a given /// [`TextureFormat`](https://docs.unity3d.com/ScriptReference/TextureFormat.html) is supported for image /// conversion. /// </summary> /// <param name="image">The <see cref="XRCpuImage"/> to convert.</param> /// <param name="format">The [`TextureFormat`](https://docs.unity3d.com/ScriptReference/TextureFormat.html) /// to test.</param> /// <returns>Returns `true` if <paramref name="image"/> can be converted to <paramref name="format"/>. /// Returns `false` otherwise.</returns> public override bool FormatSupported(XRCpuImage image, TextureFormat format) { switch (image.format) { case XRCpuImage.Format.IosYpCbCr420_8BiPlanarFullRange: return(s_SupportedVideoConversionFormats.Contains(format)); case XRCpuImage.Format.OneComponent8: return (format == TextureFormat.R8 || format == TextureFormat.Alpha8); case XRCpuImage.Format.DepthFloat32: return(format == TextureFormat.RFloat); default: return(false); } }
/// <summary> /// Determines whether a given /// [`TextureFormat`](https://docs.unity3d.com/ScriptReference/TextureFormat.html) is supported for image /// conversion. /// </summary> /// <param name="image">The <see cref="XRCpuImage"/> to convert.</param> /// <param name="format">The [`TextureFormat`](https://docs.unity3d.com/ScriptReference/TextureFormat.html) /// to test.</param> /// <returns>Returns `true` if <paramref name="image"/> can be converted to <paramref name="format"/>. /// Returns `false` otherwise.</returns> public override bool FormatSupported(XRCpuImage image, TextureFormat format) => (((image.format == XRCpuImage.Format.AndroidYuv420_888) || (image.format == XRCpuImage.Format.DepthUint16)) && s_SupportedVideoConversionFormats.Contains(format));
private static bool HasEqualDimensions(Texture tex, XRCpuImage image) { return(tex.width == image.width || tex.height == image.height); }
IEnumerator ProcessImage(XRCpuImage image, Vector3 viewportScaling) { // Create the async conversion request. XRCpuImage.ConversionParams conv_params = new XRCpuImage.ConversionParams { // Use the full image. inputRect = new RectInt(0, 0, image.width, image.height), // Downsample by 2. outputDimensions = new Vector2Int(image.width, image.height), // Color image format. outputFormat = TextureFormat.RGBA32, // Flip across the Y axis. transformation = XRCpuImage.Transformation.MirrorY }; var request = image.ConvertAsync(conv_params); // Wait for the conversion to complete. while (!request.status.IsDone()) { yield return(null); } // Check status to see if the conversion completed successfully. if (request.status != XRCpuImage.AsyncConversionStatus.Ready) { // Something went wrong. Debug.LogErrorFormat("Request failed with status {0}", request.status); // Dispose even if there is an error. request.Dispose(); yield break; } // Image data is ready. Let's apply it to a Texture2D. var rawData = request.GetData <byte>(); // Create a texture if necessary. if (m_Texture == null) { m_Texture = new Texture2D( request.conversionParams.outputDimensions.x, request.conversionParams.outputDimensions.y, request.conversionParams.outputFormat, false); } // Copy the image data into the texture. m_Texture.LoadRawTextureData(rawData); m_Texture.Apply(); Debug.Log("TEX: " + m_Texture.height + "h " + m_Texture.width + "w"); Debug.Log("Screen: " + m_Texture.height + "h " + m_Texture.width + "w"); Mat inputMat = new Mat(image.height, image.width, CvType.CV_8UC4); Mat outputMat = new Mat(1500, 1500, CvType.CV_8UC4); Utils.fastTexture2DToMat(m_Texture, inputMat); if (tex2d == null) { tex2d = new Texture2D(1500, 1500, conv_params.outputFormat, false); } Debug.Log("positionAnchor"); Debug.Log(positionAnchor); Debug.Log("anchorRef"); Debug.Log(anchorRef); int counter = 0; Point[] srcPointsVec = new Point[4]; foreach (var point in anchorRef.getWorldPoints()) { Vector3 screenPoint = mainCam.WorldToScreenPoint(point); srcPointsVec[counter] = new Point(screenPoint.y * viewportScaling.y / 3, 100 - screenPoint.x * viewportScaling.x / 3); counter += 1; } MatOfPoint2f srcPoints = new MatOfPoint2f(new[] { srcPointsVec[0], srcPointsVec[1], srcPointsVec[2], srcPointsVec[3] }); MatOfPoint2f dstPoints = new MatOfPoint2f(new[] { new Point(195 * 1.25, 0), new Point(0, 0), new Point(0, 280 * 1.25), new Point(195 * 1.25, 280 * 1.25), }); Mat H = Calib3d.findHomography(srcPoints, dstPoints); Imgproc.warpPerspective(inputMat, outputMat, H, new Size(1500, 1500)); Utils.fastMatToTexture2D(outputMat, tex2d); if (websocket.State == WebSocketState.Open && canProcess) { websocket.Send(ImageConversion.EncodeToJPG(tex2d, 50)); canProcess = false; } inputMat.Dispose(); inputMat = null; outputMat.Dispose(); outputMat = null; request.Dispose(); }
private IEnumerator ProcessImage(XRCpuImage image, XRCameraIntrinsics cameraIntrinsics, Action <bool> callback = null, bool inverse = false, bool autoCalibrate = false, bool force = false, bool showNotification = false) { // Get ARCamera Transform Matrix ARCameraTransformMatrix = Matrix4x4.TRS(ARCamera.position, ARCamera.rotation, ARCamera.localScale); // Create the async conversion request. var request = image.ConvertAsync(new XRCpuImage.ConversionParams { // Use the full image. inputRect = new RectInt(0, 0, image.width, image.height), // Downsample by 2. outputDimensions = new Vector2Int(image.width, image.height), // Color image format. outputFormat = TextureFormat.RGBA32, // Flip across the Y axis. transformation = XRCpuImage.Transformation.MirrorY }); // Wait for the conversion to complete. while (!request.status.IsDone()) { yield return(null); } // Check status to see if the conversion completed successfully. if (request.status != XRCpuImage.AsyncConversionStatus.Ready) { // Something went wrong. Debug.LogErrorFormat("Request failed with status {0}", request.status); // Dispose even if there is an error. request.Dispose(); callback?.Invoke(false); yield break; } // Image data is ready. Let's apply it to a Texture2D. var rawData = request.GetData <byte>(); // Create a texture if necessary. //Texture2D m_Texture = new Texture2D( request.conversionParams.outputDimensions.x, request.conversionParams.outputDimensions.y, request.conversionParams.outputFormat, false); // Copy the image data into the texture. m_Texture.LoadRawTextureData(rawData); m_Texture.Apply(); // Need to dispose the request to delete resources associated // with the request, including the raw data. request.Dispose(); string imageString = System.Text.Encoding.GetEncoding("iso-8859-1").GetString(m_Texture.EncodeToJPG()); //System.IO.File.WriteAllBytes(Application.persistentDataPath + "/image" + imageNum + ".jpg", m_Texture.EncodeToJPG()); //imageNum++; //Debug.Log("Image size: " + request.conversionParams.outputDimensions.x + " x " + request.conversionParams.outputDimensions.y); //Debug.Log("Camera Resolution: " + cameraConfiguration.Value); //Debug.Log("Camera width: " + cameraConfiguration.Value.width + " height: " + cameraConfiguration.Value.height + " framerate: " + cameraConfiguration.Value.framerate); CameraParameters cameraParams = new CameraParameters(cx: (decimal)cameraIntrinsics.principalPoint.x, cy: (decimal)cameraIntrinsics.principalPoint.y, distCoefs: new List <decimal>() { 0, 0, 0, 0 }, fx: (decimal)cameraIntrinsics.focalLength.x, fy: (decimal)cameraIntrinsics.focalLength.y); //Debug.Log(cameraParams.ToString()); if (inverse) { GetMarkerPosition(cameraParams, imageString, autoCalibrate: autoCalibrate, force: force, showNotification: showNotification); } else { //GetCameraPosition(cameraParams, imageString, autoCalibrate); } yield return(new WaitWhile(() => markerDetectionState == MarkerDetectionState.Processing)); if (markerDetectionState == MarkerDetectionState.Success) { callback?.Invoke(true); } else if (markerDetectionState == MarkerDetectionState.Failure) { callback?.Invoke(false); } //GetMarkerCornersPosition(cameraParams, imageString); }