示例#1
0
 private void Update()
 {
     if (webCamTex.width > 100)
     {
         if (textureToSend == null)
         {
             openCV.CreateWritableTexture(webCamTex.width, webCamTex.height);
             textureToSend = new Texture2D(webCamTex.width, webCamTex.height, openCV.sendFormat, false);
             return;
         }
         textureToSend.SetPixels32(webCamTex.GetPixels32());
         textureToSend.Apply();
         openCV.ProcessImage(textureToSend);
     }
 }
示例#2
0
文件: ARCamFeed.cs 项目: weacw/Cloak
    unsafe void OnCameraFrameReceived(ARCameraFrameEventArgs eventArgs)
    {
        if (!arCameraManager.TryGetLatestImage(out XRCameraImage image))
        {
            return;
        }

        //figure out cam transform
        CameraImageTransformation camTransform = CameraImageTransformation.None;

        //assume portrait only for now
        camTransform = CameraImageTransformation.MirrorX;
        camImageScreen.localEulerAngles = new Vector3(0, 0, -90);

        //downsample to save fps if needed
        Vector2Int outputSize;

        if (image.width > 1280)
        {
            outputSize = new Vector2Int(image.width / 2, image.height / 2);
        }
        else
        {
            outputSize = new Vector2Int(image.width, image.height);
        }

        XRCameraImageConversionParams conversionParams = new XRCameraImageConversionParams {
            // Get the entire image
            inputRect = new RectInt(0, 0, image.width, image.height),

            // Downsample if needed
            outputDimensions = outputSize,

            // Choose RGB format
            outputFormat = openCV.sendFormat,

            transformation = camTransform
        };

        // See how many bytes we need to store the final image.
        int size = image.GetConvertedDataSize(conversionParams);

        // Allocate a buffer to store the image
        var buffer = new NativeArray <byte>(size, Allocator.Temp);

        // Extract the image data
        image.Convert(conversionParams, new IntPtr(buffer.GetUnsafePtr()), buffer.Length);

        image.Dispose();

        if (textureToSend == null)
        {
            textureToSend = new Texture2D(
                conversionParams.outputDimensions.x,
                conversionParams.outputDimensions.y,
                conversionParams.outputFormat,
                false);
        }

        textureToSend.LoadRawTextureData(buffer);
        textureToSend.Apply();

        if (!texturesCreated)
        {
            texturesCreated = true;
            //init textures here
            openCV.CreateWritableTexture(textureToSend.width, textureToSend.height);
            return;
        }

        //process the image
        openCV.ProcessImage(textureToSend);

        // Done with our temporary data
        buffer.Dispose();
    }