private IEnumerator ProcessImage(XRCpuImage image, Action <Texture2D> callback) { var request = image.ConvertAsync(new XRCpuImage.ConversionParams() { inputRect = new RectInt(0, 0, image.width, image.height), outputDimensions = new Vector2Int(image.width, image.height), outputFormat = TextureFormat.RGBA32, transformation = XRCpuImage.Transformation.MirrorX }); while (!request.status.IsDone()) { yield return(null); } if (request.status != XRCpuImage.AsyncConversionStatus.Ready) { request.Dispose(); yield break; } var rawData = request.GetData <byte>(); if (_receivedTexture == null) { _receivedTexture = new Texture2D( request.conversionParams.outputDimensions.x, request.conversionParams.outputDimensions.y, request.conversionParams.outputFormat, false); } _receivedTexture.LoadRawTextureData(rawData); _receivedTexture.Apply(); // convert to rgba texture if (_rbgaTexture == null) { _rbgaTexture = new Texture2D(_receivedTexture.width, _receivedTexture.height, TextureFormat.RGBA32, false); } _rbgaTexture.SetPixels(_receivedTexture.GetPixels()); _rbgaTexture.Apply(); _debugMaterial.mainTexture = _rbgaTexture; callback.Invoke(_rbgaTexture); request.Dispose(); }
IEnumerator Process(XRCpuImage image) { _isBusy = true; //set flag to prevent multiple calls to Process. var request = image.ConvertAsync(new XRCpuImage.ConversionParams { inputRect = new RectInt(0, 0, image.width, image.height), //downsample in half outputDimensions = new Vector2Int(image.width / 2, image.height / 2), outputFormat = TextureFormat.RGB24, transformation = XRCpuImage.Transformation.None }); //wait for it to finish while (!request.status.IsDone()) { yield return(null); } if (request.status != XRCpuImage.AsyncConversionStatus.Ready) { Debug.Log($"image is done, but not ready."); request.Dispose(); _isBusy = false; //set flag to allow new calls to Process yield break; } var rawData = request.GetData <byte>(); if (_texture == null) { _texture = new Texture2D( request.conversionParams.outputDimensions.x, request.conversionParams.outputDimensions.y, request.conversionParams.outputFormat, false); } //To texture, to PNG, to Base64 _texture.LoadRawTextureData(rawData); _texture.Apply(); string _base64 = System.Convert.ToBase64String(_texture.EncodeToPNG()); request.Dispose(); PublishBase64(_base64); _isBusy = false; //set flag to allow new calls to Process }
IEnumerator ProcessImage(XRCpuImage image, Vector3 viewportScaling) { // Create the async conversion request. XRCpuImage.ConversionParams conv_params = new XRCpuImage.ConversionParams { // Use the full image. inputRect = new RectInt(0, 0, image.width, image.height), // Downsample by 2. outputDimensions = new Vector2Int(image.width, image.height), // Color image format. outputFormat = TextureFormat.RGBA32, // Flip across the Y axis. transformation = XRCpuImage.Transformation.MirrorY }; var request = image.ConvertAsync(conv_params); // Wait for the conversion to complete. while (!request.status.IsDone()) { yield return(null); } // Check status to see if the conversion completed successfully. if (request.status != XRCpuImage.AsyncConversionStatus.Ready) { // Something went wrong. Debug.LogErrorFormat("Request failed with status {0}", request.status); // Dispose even if there is an error. request.Dispose(); yield break; } // Image data is ready. Let's apply it to a Texture2D. var rawData = request.GetData <byte>(); // Create a texture if necessary. if (m_Texture == null) { m_Texture = new Texture2D( request.conversionParams.outputDimensions.x, request.conversionParams.outputDimensions.y, request.conversionParams.outputFormat, false); } // Copy the image data into the texture. m_Texture.LoadRawTextureData(rawData); m_Texture.Apply(); Debug.Log("TEX: " + m_Texture.height + "h " + m_Texture.width + "w"); Debug.Log("Screen: " + m_Texture.height + "h " + m_Texture.width + "w"); Mat inputMat = new Mat(image.height, image.width, CvType.CV_8UC4); Mat outputMat = new Mat(1500, 1500, CvType.CV_8UC4); Utils.fastTexture2DToMat(m_Texture, inputMat); if (tex2d == null) { tex2d = new Texture2D(1500, 1500, conv_params.outputFormat, false); } Debug.Log("positionAnchor"); Debug.Log(positionAnchor); Debug.Log("anchorRef"); Debug.Log(anchorRef); int counter = 0; Point[] srcPointsVec = new Point[4]; foreach (var point in anchorRef.getWorldPoints()) { Vector3 screenPoint = mainCam.WorldToScreenPoint(point); srcPointsVec[counter] = new Point(screenPoint.y * viewportScaling.y / 3, 100 - screenPoint.x * viewportScaling.x / 3); counter += 1; } MatOfPoint2f srcPoints = new MatOfPoint2f(new[] { srcPointsVec[0], srcPointsVec[1], srcPointsVec[2], srcPointsVec[3] }); MatOfPoint2f dstPoints = new MatOfPoint2f(new[] { new Point(195 * 1.25, 0), new Point(0, 0), new Point(0, 280 * 1.25), new Point(195 * 1.25, 280 * 1.25), }); Mat H = Calib3d.findHomography(srcPoints, dstPoints); Imgproc.warpPerspective(inputMat, outputMat, H, new Size(1500, 1500)); Utils.fastMatToTexture2D(outputMat, tex2d); if (websocket.State == WebSocketState.Open && canProcess) { websocket.Send(ImageConversion.EncodeToJPG(tex2d, 50)); canProcess = false; } inputMat.Dispose(); inputMat = null; outputMat.Dispose(); outputMat = null; request.Dispose(); }
private IEnumerator ProcessImage(XRCpuImage image, XRCameraIntrinsics cameraIntrinsics, Action <bool> callback = null, bool inverse = false, bool autoCalibrate = false, bool force = false, bool showNotification = false) { // Get ARCamera Transform Matrix ARCameraTransformMatrix = Matrix4x4.TRS(ARCamera.position, ARCamera.rotation, ARCamera.localScale); // Create the async conversion request. var request = image.ConvertAsync(new XRCpuImage.ConversionParams { // Use the full image. inputRect = new RectInt(0, 0, image.width, image.height), // Downsample by 2. outputDimensions = new Vector2Int(image.width, image.height), // Color image format. outputFormat = TextureFormat.RGBA32, // Flip across the Y axis. transformation = XRCpuImage.Transformation.MirrorY }); // Wait for the conversion to complete. while (!request.status.IsDone()) { yield return(null); } // Check status to see if the conversion completed successfully. if (request.status != XRCpuImage.AsyncConversionStatus.Ready) { // Something went wrong. Debug.LogErrorFormat("Request failed with status {0}", request.status); // Dispose even if there is an error. request.Dispose(); callback?.Invoke(false); yield break; } // Image data is ready. Let's apply it to a Texture2D. var rawData = request.GetData <byte>(); // Create a texture if necessary. //Texture2D m_Texture = new Texture2D( request.conversionParams.outputDimensions.x, request.conversionParams.outputDimensions.y, request.conversionParams.outputFormat, false); // Copy the image data into the texture. m_Texture.LoadRawTextureData(rawData); m_Texture.Apply(); // Need to dispose the request to delete resources associated // with the request, including the raw data. request.Dispose(); string imageString = System.Text.Encoding.GetEncoding("iso-8859-1").GetString(m_Texture.EncodeToJPG()); //System.IO.File.WriteAllBytes(Application.persistentDataPath + "/image" + imageNum + ".jpg", m_Texture.EncodeToJPG()); //imageNum++; //Debug.Log("Image size: " + request.conversionParams.outputDimensions.x + " x " + request.conversionParams.outputDimensions.y); //Debug.Log("Camera Resolution: " + cameraConfiguration.Value); //Debug.Log("Camera width: " + cameraConfiguration.Value.width + " height: " + cameraConfiguration.Value.height + " framerate: " + cameraConfiguration.Value.framerate); CameraParameters cameraParams = new CameraParameters(cx: (decimal)cameraIntrinsics.principalPoint.x, cy: (decimal)cameraIntrinsics.principalPoint.y, distCoefs: new List <decimal>() { 0, 0, 0, 0 }, fx: (decimal)cameraIntrinsics.focalLength.x, fy: (decimal)cameraIntrinsics.focalLength.y); //Debug.Log(cameraParams.ToString()); if (inverse) { GetMarkerPosition(cameraParams, imageString, autoCalibrate: autoCalibrate, force: force, showNotification: showNotification); } else { //GetCameraPosition(cameraParams, imageString, autoCalibrate); } yield return(new WaitWhile(() => markerDetectionState == MarkerDetectionState.Processing)); if (markerDetectionState == MarkerDetectionState.Success) { callback?.Invoke(true); } else if (markerDetectionState == MarkerDetectionState.Failure) { callback?.Invoke(false); } //GetMarkerCornersPosition(cameraParams, imageString); }