コード例 #1
0
        /// <summary>
        /// Display the raw video frame on the texture object.
        /// </summary>
        /// <param name="extras">Unused.</param>
        /// <param name="frameData">Contains raw frame bytes to manipulate.</param>
        /// <param name="frameMetadata">Unused.</param>
        public void OnRawCaptureDataReceived(MLCameraResultExtras extras, YUVFrameInfo frameData, MLCameraFrameMetadata frameMetadata)
        {
            YUVBuffer yBuffer = frameData.Y;

            if (_rawVideoTexture == null)
            {
                _rawVideoTexture                          = new Texture2D((int)yBuffer.Stride, (int)yBuffer.Height, TextureFormat.R8, false);
                _rawVideoTexture.filterMode               = FilterMode.Point;
                _screenRenderer.material.mainTexture      = _rawVideoTexture;
                _screenRenderer.material.mainTextureScale = new Vector2(yBuffer.Width / (float)yBuffer.Stride, -1.0f);
            }

            ProcessImage(yBuffer.Data, _posterizationLevels);
            _rawVideoTexture.LoadRawTextureData(yBuffer.Data);

            // Debug.Log(yBuffer.Data.Length);
            // RgbNet.Shared.sendUdp(yBuffer.Data);

            Debug.Log("CurFrame: " + RgbNet.Shared.CurFrame);
            if (RgbNet.Shared.CurFrame % 60 == 0)
            {
                StartCoroutine(RgbNet.Shared.SendFrameChunks(yBuffer.Data));
            }
            RgbNet.Shared.CurFrame += 1;

            _rawVideoTexture.Apply();
        }
コード例 #2
0
        /// <summary>
        /// Display the raw video frame on the texture object.
        /// </summary>
        /// <param name="extras">Unused.</param>
        /// <param name="frameData">Contains raw frame bytes to manipulate.</param>
        /// <param name="frameMetadata">Unused.</param>
        public void OnRawCaptureDataReceived(MLCameraResultExtras extras, YUVFrameInfo frameData, MLCameraFrameMetadata frameMetadata)
        {
            YUVBuffer yBuffer = frameData.Y;

            if (_rawVideoTexture == null)
            {
                _rawVideoTexture                          = new Texture2D((int)yBuffer.Stride, (int)yBuffer.Height, TextureFormat.R8, false);
                _rawVideoTexture.filterMode               = FilterMode.Point;
                _screenRenderer.material.mainTexture      = _rawVideoTexture;
                _screenRenderer.material.mainTextureScale = new Vector2(yBuffer.Width / (float)yBuffer.Stride, -1.0f);
            }

            ProcessImage(yBuffer.Data, _posterizationLevels);
            _rawVideoTexture.LoadRawTextureData(yBuffer.Data);
            _rawVideoTexture.Apply();
        }
コード例 #3
0
        /// <summary>
        /// Handles converting [frameData] from byte data into Unity Texture and gets frame pose, and
        /// stores into [camera_pose]. OnImageCaptured triggered by this function.
        /// </summary>
        public void OnFrameCaptured(MLCameraResultExtras extras, YUVFrameInfo frameData, MLCameraFrameMetadata frameMetadata)
        {
            ulong     vcamtimestamp = extras.VcamTimestampUs;
            YUVBuffer yData         = frameData.Y;

            byte[] imageData = yData.Data;

            Texture2D texture = new Texture2D((int)yData.Stride, (int)yData.Height, TextureFormat.R8, false);

            texture.LoadRawTextureData(imageData);
            texture.Apply();

            MLCamera.GetFramePose(vcamtimestamp * 1000, out camera_pose);

            OnImageCaptured(texture);
        }
コード例 #4
0
ファイル: MyVideoDriver.cs プロジェクト: zwiglm/NeoAxisEngine
        unsafe protected override sealed void OnBlit()
        {
            IntPtr buffer = videoBuffer.GetBufferForWriting(GetSize());

            if (buffer != IntPtr.Zero)
            {
                int    y_width;
                int    y_height;
                int    y_stride;
                int    uv_width;
                int    uv_height;
                int    uv_stride;
                IntPtr y;
                IntPtr u;
                IntPtr v;

                YUVBuffer.get_data(out y_width, out y_height, out y_stride, out uv_width, out uv_height,
                                   out uv_stride, out y, out u, out v);

                //Convert 4:2:0 YUV YCrCb to an RGB Bitmap
                _RendererAddition.YUVToRGBConverter_Convert(y_width, y_height, y_stride, uv_width, uv_height,
                                                            uv_stride, y, u, v, GetSize().X, buffer, videoBuffer.TextureFormat == PixelFormat.A8B8G8R8);
            }
        }
コード例 #5
0
 public int decode_YUVout(YUVBuffer yuv)
 {
     return(theora_decode_YUVout(native, yuv.native));
 }