Пример #1
0
        public static void GetPlaneData(out byte[] pixels, XRCameraImage image)
        {
            XRCameraImagePlane plane = image.GetPlane(0);             // use the Y plane
            int width = image.width, height = image.height;

            pixels = new byte[width * height];

            if (width == plane.rowStride)
            {
                plane.data.CopyTo(pixels);
            }
            else
            {
                unsafe
                {
                    ulong handle;
                    byte *srcPtr = (byte *)NativeArrayUnsafeUtility.GetUnsafePtr(plane.data);
                    byte *dstPtr = (byte *)UnsafeUtility.PinGCArrayAndGetDataAddress(pixels, out handle);
                    if (width > 0 && height > 0)
                    {
                        UnsafeUtility.MemCpyStride(dstPtr, width, srcPtr, plane.rowStride, width, height);
                    }
                    UnsafeUtility.ReleaseGCObject(handle);
                }
            }
        }
        /// <summary>
        /// Run pose estimation on an XRCameraImage (synchronized)
        /// </summary>
        /// <param name="image">The camera image to run prediction on</param>
        public static string ProcessPoseFromImage(XRCameraImage image)
        {
            object[] methodParams = extractYUVFromImage(image);
            string   message      = poseManager.Call <string>("processPose", methodParams);

            return(message);
        }
Пример #3
0
        /// <summary>
        /// Attempt to get the latest camera image. This provides directly access to the raw pixel data, as well as
        /// utilities to convert to RGB and Grayscale formats.
        /// </summary>
        /// <remarks>
        /// The <c>XRCameraImage</c> must be disposed to avoid resource leaks.
        /// </remarks>
        /// <param name="cameraImage">A valid <c>XRCameraImage</c> if this method returns <c>true</c>.</param>
        /// <returns>
        /// <c>true</c> if the image was acquired. Otherwise, <c>false</c>.
        /// </returns>
        public bool TryGetLatestImage(out XRCameraImage cameraImage)
        {
            if (subsystem == null)
            {
                cameraImage = default(XRCameraImage);
                return(false);
            }

            return(subsystem.TryGetLatestImage(out cameraImage));
        }
Пример #4
0
        public static void GetPlaneDataRGB(out byte[] pixels, XRCameraImage image)
        {
            var conversionParams = new XRCameraImageConversionParams
            {
                inputRect        = new RectInt(0, 0, image.width, image.height),
                outputDimensions = new Vector2Int(image.width, image.height),
                outputFormat     = TextureFormat.RGB24,
                transformation   = CameraImageTransformation.None
            };

            int size = image.GetConvertedDataSize(conversionParams);

            pixels = new byte[size];
            GCHandle bufferHandle = GCHandle.Alloc(pixels, GCHandleType.Pinned);

            image.Convert(conversionParams, bufferHandle.AddrOfPinnedObject(), pixels.Length);
            bufferHandle.Free();
        }
        private void GetImageAsync()
        {
            currentXRImage = new XRCameraImage();

            if (cameraManager.TryGetLatestImage(out currentXRImage))
            {
                currentXRImage.ConvertAsync(new XRCameraImageConversionParams
                {
                    inputRect        = new RectInt(0, 0, currentXRImage.width, currentXRImage.height),
                    outputDimensions = new Vector2Int(ScapeImgWidth, ScapeImgHeight),
                    outputFormat     = TextureFormat.R8
                }, ProcessImage);

                currentXRImage.Dispose();

                ScapeLogging.LogDebug("GetImageAsync() " + (Time.time - requestTime));
                measurementsRequested = false;
            }
        }
    private void RecreateTextureIfNeeded(XRCameraImage cameraImage)
    {
        if (_texture != null && _texture.width == cameraImage.width && _texture.height == cameraImage.height)
        {
            return;
        }

        if (_texture != null)
        {
            DestroyImmediate(_texture);
        }

        if (_previewTexture != null)
        {
            _previewTexture.Release();
        }

        _texture        = new Texture2D(cameraImage.width, cameraImage.height, TextureFormat.RGBA32, false);
        _previewTexture = new RenderTexture(_texture.width, _texture.height, 0, RenderTextureFormat.BGRA32);
        _previewTexture.Create();

        ResizePreviewPlane();
    }
        private static object[] extractYUVFromImage(XRCameraImage image)
        {
            // Consider each image plane
            XRCameraImagePlane plane = image.GetPlane(0);
            var yRowStride           = plane.rowStride;
            var y = plane.data;

            XRCameraImagePlane plane2 = image.GetPlane(1);
            var uvRowStride           = plane2.rowStride;
            var uvPixelStride         = plane2.pixelStride;
            var u = plane2.data;

            XRCameraImagePlane plane3 = image.GetPlane(2);
            var v = plane3.data;

            byte[] yDst = new byte[y.Length];
            byte[] uDst = new byte[u.Length];
            byte[] vDst = new byte[v.Length];

            object[] objParams = new object[8];
            NativeArray <byte> .Copy(y, yDst);

            NativeArray <byte> .Copy(u, uDst);

            NativeArray <byte> .Copy(v, vDst);

            objParams[0] = yDst;
            objParams[1] = uDst;
            objParams[2] = vDst;
            objParams[3] = yRowStride;
            objParams[4] = uvRowStride;
            objParams[5] = uvPixelStride;
            objParams[6] = image.width;
            objParams[7] = image.height;

            return(objParams);
        }
 /// <summary>
 /// (Android only) Run pose estimation on an XRCameraImage (async)
 /// </summary>
 /// <param name="cameraImage">The camera image to run prediction on</param>
 public static void ProcessPoseFromImageAsync(XRCameraImage cameraImage)
 {
     FritzAndroidPoseManager.ProcessPoseFromImageAsync(cameraImage);
 }
        /// <summary>
        /// (Android only) Run pose estimation on an XRCameraImage (synchronized)
        /// </summary>
        /// <param name="cameraImage">The camera image to run prediction on</param>
        public static List <FritzPose> ProcessPoseFromImage(XRCameraImage cameraImage)
        {
            string message = FritzAndroidPoseManager.ProcessPoseFromImage(cameraImage);

            return(ProcessEncodedPoses(message));
        }
    // Convert image to correct size, rotate it, send it off to TorchServe, and draw resulting bounding boxes
    private IEnumerator ProcessImage(XRCameraImage image)
    {
        using (var request = image.ConvertAsync(new XRCameraImageConversionParams
        {
            inputRect = new RectInt(0, 0, image.width, image.height),
            outputDimensions = new Vector2Int((int)Width, (int)Height),
            outputFormat = TextureFormat.RGBA32,
            transformation = CameraImageTransformation.MirrorY
        }))
        {
            while (!request.status.IsDone())
            {
                yield return(null);
            }

            if (request.status != AsyncCameraImageConversionStatus.Ready)
            {
                Debug.LogErrorFormat("Image request failed with status {0}", request.status);
                yield break;
            }

            _currentJpgBytes = ConvertBufferToJpg(request.GetData <byte>(), request.conversionParams);
        }

        using (var client = new HttpClient {
            Timeout = TimeSpan.FromMilliseconds(2000)
        })
        {
            if (settingsManager.SettingsModel.activeEndpoint?.url == null)
            {
                Debug.LogWarning("Null Model Endpoint URL");
                yield break;
            }

            var content = new ByteArrayContent(_currentJpgBytes);
            content.Headers.Add("Content-Type", "image/jpg");

            var startTime = Time.realtimeSinceStartup;

            Task <HttpResponseMessage> webRequestTask;
            try
            {
                _activeRequests += 1;
                webRequestTask   = client.PostAsync(new Uri(settingsManager.SettingsModel.activeEndpoint?.url), content);
            }
            catch (Exception e)
            {
                _activeRequests -= 1;

                Console.WriteLine(e);
                yield break;
            }

            yield return(Utils.WaitForTaskToComplete(webRequestTask));

            _activeRequests -= 1;

            Console.WriteLine("Round trip: " + (Time.realtimeSinceStartup - startTime));

            if (!webRequestTask.Result.IsSuccessStatusCode)
            {
                Debug.LogErrorFormat("Error While Sending: {0}", webRequestTask.Result.ReasonPhrase);
                yield break;
            }

            var stringReadingTask = webRequestTask.Result.Content.ReadAsStringAsync();
            yield return(Utils.WaitForTaskToComplete(stringReadingTask));

            // Wrap output in top-level object for JsonUtility
            var text = "{\"objects\":" + stringReadingTask.Result + "}";

            // If JSON output does not have an array, the response was not a 200 OK
            if (!text.Contains("["))
            {
                Debug.LogErrorFormat("Prediction error: {0}\n", stringReadingTask.Result);
                yield break;
            }

            var rotation = RotationForScreenOrientation();
            if (!rotation.HasValue)
            {
                Debug.LogErrorFormat("Invalid screen orientation: {0}", _orientationObserver.ScreenOrientation);
                yield break;
            }

            _currentClassifications = JsonUtility.FromJson <JsonWrapper>(text).objects
                                      .FindAll(it => it.score >= settingsManager.SettingsModel.predictionScoreThreshold);

            var classifications = _currentClassifications
                                  .ConvertAll(old => new ObjectClassification(old.label,
                                                                              old.label_id,
                                                                              old.box
                                                                              .RotatedBy(rotation.Value, new Vector2(Width, Height))
                                                                              .ScaledBy(ScaleFactor),
                                                                              old.score));

            boundingBoxManager.SetObjectClassifications(classifications);
        }
    }
Пример #11
0
    IEnumerator ProcessImage(XRCameraImage image)
    {
        // Create the async conversion request.
        var request = image.ConvertAsync(new XRCameraImageConversionParams
        {
            // Use the full image.
            inputRect = new RectInt(0, 0, image.width, image.height),

            // Downsample by 2.
            outputDimensions = new Vector2Int(image.width / 2, image.height / 2),

            // Color image format.
            outputFormat = TextureFormat.RGB24,

            // Flip across the Y axis.
            transformation = CameraImageTransformation.MirrorY
        });

        // Wait for the conversion to complete.
        while (!request.status.IsDone())
        {
            yield return(null);
        }

        // Check status to see if the conversion completed successfully.
        if (request.status != AsyncCameraImageConversionStatus.Ready)
        {
            // Something went wrong.
            Debug.LogErrorFormat("Request failed with status {0}", request.status);

            // Dispose even if there is an error.
            request.Dispose();
            yield break;
        }

        // Image data is ready. Let's apply it to a Texture2D.
        var rawData = request.GetData <byte>();

        m_LastCameraTexture = new Texture2D(
            request.conversionParams.outputDimensions.x,
            request.conversionParams.outputDimensions.y,
            request.conversionParams.outputFormat,
            false);

        // Copy the image data into the texture.
        m_LastCameraTexture.LoadRawTextureData(rawData);
        m_LastCameraTexture.Apply();

        // Need to dispose the request to delete resources associated
        // with the request, including the raw data.
        request.Dispose();

        // enable mesh + set shader properties
        arInstance.gameObject.SetActive(true);
        arInstance.transform.position = ARCamera.transform.position;
        arInstance.transform.rotation = ARCamera.transform.rotation;
        mat.SetTexture("Albedo", m_LastCameraTexture);

        // update ui
        raw.texture = m_LastCameraTexture;

        // TODO update size of image to match texture
    }
Пример #12
0
    IEnumerator ProcessImage(XRCameraImage image)
    {
        // Create the async conversion request
        var request = image.ConvertAsync(new XRCameraImageConversionParams
        {
            // Use the full image
            inputRect = new RectInt(0, 0, image.width, image.height),

            // Downsample by 2
            outputDimensions = new Vector2Int(image.width / 8, image.height / 8),

            // Color image format
            outputFormat = TextureFormat.RGB24,

            // Flip across the Y axis
            transformation = CameraImageTransformation.MirrorY
        });

        // Wait for it to complete
        while (!request.status.IsDone())
        {
            yield return(null);
        }

        // Check status to see if it completed successfully.
        if (request.status != AsyncCameraImageConversionStatus.Ready)
        {
            // Something went wrong
            Debug.LogErrorFormat("Request failed with status {0}", request.status);

            // Dispose even if there is an error.
            request.Dispose();
            yield break;
        }

        // Image data is ready. Let's apply it to a Texture2D.
        var rawData = request.GetData <byte>();


        Texture2D texture = new Texture2D(
            request.conversionParams.outputDimensions.x,
            request.conversionParams.outputDimensions.y,
            request.conversionParams.outputFormat,
            false);

        // Copy the image data into the texture
        texture.LoadRawTextureData(rawData);
        texture.Apply();
        texture.Compress(false);

        m_Texture = Common.Util.TextureUtil.AnticlockwiseRotate90(texture);

        Debug.Log("生成图片!");

        byte[] bytes = m_Texture.EncodeToPNG();
        //SaveImage("temp", bytes);
        ImageRecognition(bytes);

        // Need to dispose the request to delete resources associated
        // with the request, including the raw data.
        request.Dispose();
    }
Пример #13
0
 public static void GetPlaneDataFast(ref IntPtr pixels, XRCameraImage image)
 {
     XRCameraImagePlane plane = image.GetPlane(0);               // use the Y plane
Пример #14
0
 public static void GetPlaneDataRGB(out byte[] pixels, XRCameraImage image)
 {
     var conversionParams = new XRCameraImageConversionParams
Пример #15
0
 public static void GetPlaneData(out byte[] pixels, XRCameraImage image)
 {
     XRCameraImagePlane plane = image.GetPlane(0);               // use the Y plane
 /// <summary>
 /// Run pose estimation on an XRCameraImage (async)
 /// </summary>
 /// <param name="image">The camera image to run prediction on</param>
 public static void ProcessPoseFromImageAsync(XRCameraImage image)
 {
     object[] methodParams = extractYUVFromImage(image);
     poseManager.Call("processPoseAsync", methodParams);
 }