/// <summary>
        /// Display the raw video frame on the texture object.
        /// </summary>
        /// <param name="extras">Unused.</param>
        /// <param name="frameData">Contains raw frame bytes to manipulate.</param>
        /// <param name="frameMetadata">Unused.</param>
        public void OnRawCaptureDataReceived(MLCamera.ResultExtras extras, MLCamera.YUVFrameInfo frameData, MLCamera.FrameMetadata frameMetadata)
        {
            MLCamera.YUVBuffer yBuffer = frameData.Y;

            if (_rawVideoTexture == null)
            {
                _rawVideoTexture                          = new Texture2D((int)yBuffer.Stride, (int)yBuffer.Height, TextureFormat.R8, false);
                _rawVideoTexture.filterMode               = FilterMode.Point;
                _screenRenderer.material.mainTexture      = _rawVideoTexture;
                _screenRenderer.material.mainTextureScale = new Vector2(yBuffer.Width / (float)yBuffer.Stride, -1.0f);
            }

            ProcessImage(yBuffer.Data, _posterizationLevels);
            _rawVideoTexture.LoadRawTextureData(yBuffer.Data);
            _rawVideoTexture.Apply();
        }
        private void PushYUVFrame(MLCamera.ResultExtras results, MLCamera.YUVFrameInfo frameInfo, MLCamera.FrameMetadata metadata)
        {
            MLCamera.YUVBuffer buffer;
            MLWebRTC.VideoSink.Frame.ImagePlane[] imagePlaneArray = imagePlanesBuffer.Get();
            for (int i = 0; i < imagePlaneArray.Length; ++i)
            {
                switch (i)
                {
                case 0:
                {
                    buffer = frameInfo.Y;
                    break;
                }

                case 1:
                {
                    buffer = frameInfo.U;
                    break;
                }

                case 2:
                {
                    buffer = frameInfo.V;
                    break;
                }

                default:
                {
                    buffer = new MLCamera.YUVBuffer();
                    break;
                }
                }

                imagePlaneArray[i] = MLWebRTC.VideoSink.Frame.ImagePlane.Create(buffer.Width, buffer.Height, buffer.Stride, buffer.BytesPerPixel, buffer.Size, buffer.DataPtr);
            }

            MLWebRTC.VideoSink.Frame frame = MLWebRTC.VideoSink.Frame.Create((ulong)results.RequestId, results.VcamTimestampUs, imagePlaneArray, MLWebRTC.VideoSink.Frame.OutputFormat.YUV_420_888);

            _ = this.PushFrameAsync(frame);
        }
Exemple #3
0
        /// <summary>
        /// Display the raw video frame on the texture object.
        /// </summary>
        /// <param name="extras">Unused.</param>
        /// <param name="frameData">Contains raw frame bytes to manipulate.</param>
        /// <param name="frameMetadata">Unused.</param>
        public void OnRawCaptureDataReceived(MLCamera.ResultExtras extras, MLCamera.YUVFrameInfo frameData, MLCamera.FrameMetadata frameMetadata)
        {
            //MLCamera.YUVBuffer yBuffer = frameData.Y;

            //if (_rawVideoTexture == null)
            //{
            //    _rawVideoTexture = new Texture2D((int)yBuffer.Stride, (int)yBuffer.Height, TextureFormat.R8, false);
            //    _rawVideoTexture.filterMode = FilterMode.Point;
            //    _screenRenderer.material.mainTexture = _rawVideoTexture;
            //    _screenRenderer.material.mainTextureScale = new Vector2(yBuffer.Width / (float)yBuffer.Stride, -1.0f);
            //}

            //ProcessImage(yBuffer.Data, _posterizationLevels);
            //_rawVideoTexture.LoadRawTextureData(yBuffer.Data);
            //_rawVideoTexture.Apply();


            Utils.setDebugMode(true);

            ////only yBuffer
            //MLCamera.YUVBuffer yBuffer = frameData.Y;

            //if (yBufferMat == null) yBufferMat = new Mat((int)yBuffer.Height, (int)yBuffer.Stride, CvType.CV_8UC1);
            ////MatUtils.copyToMat<byte>(yBuffer.Data, frameMat);
            //yBufferMat.put(0, 0, yBuffer.Data);

            //Imgproc.putText(yBufferMat, "W:" + yBufferMat.width() + " H:" + yBufferMat.height() + " SO:" + Screen.orientation, new Point(5, yBufferMat.rows() - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false);

            //if (_rawVideoTexture == null)
            //{
            //    _rawVideoTexture = new Texture2D((int)yBuffer.Stride, (int)yBuffer.Height, TextureFormat.R8, false);
            //    _rawVideoTexture.filterMode = FilterMode.Point;
            //    _screenRenderer.material.mainTexture = _rawVideoTexture;
            //    _screenRenderer.material.mainTextureScale = new Vector2(yBuffer.Width / (float)yBuffer.Stride, -1.0f);
            //}

            //Utils.fastMatToTexture2D(yBufferMat, _rawVideoTexture, false);


            ////YUV -> RGB
            //MLCamera.YUVBuffer yBuffer = frameData.Y;
            //MLCamera.YUVBuffer uBuffer = frameData.U;
            //MLCamera.YUVBuffer vBuffer = frameData.V;

            //if (yBufferMat == null) yBufferMat = new Mat((int)yBuffer.Height, (int)yBuffer.Stride, CvType.CV_8UC1);
            ////MatUtils.copyToMat<byte>(yBuffer.Data, frameMat);
            //yBufferMat.put(0, 0, yBuffer.Data);
            ////Debug.LogError("yBufferMat.ToString() " + yBufferMat.ToString());

            //if (uBufferMat == null) uBufferMat = new Mat((int)uBuffer.Height, (int)uBuffer.Stride, CvType.CV_8UC1);
            ////MatUtils.copyToMat<byte>(yBuffer.Data, frameMat);
            //uBufferMat.put(0, 0, uBuffer.Data);
            ////Debug.LogError("uBufferMat.ToString() " + uBufferMat.ToString());
            //if (uBufferResizedMat == null) uBufferResizedMat = new Mat();
            //Imgproc.resize(uBufferMat, uBufferResizedMat, new Size(yBuffer.Stride, yBuffer.Height), 0, 0, Imgproc.INTER_NEAREST);
            ////Debug.LogError("uBufferResizedMat.ToString() " + uBufferResizedMat.ToString());

            //if (vBufferMat == null) vBufferMat = new Mat((int)vBuffer.Height, (int)vBuffer.Stride, CvType.CV_8UC1);
            ////MatUtils.copyToMat<byte>(yBuffer.Data, frameMat);
            //vBufferMat.put(0, 0, vBuffer.Data);
            ////Debug.LogError("vBufferMat.ToString() " + vBufferMat.ToString());
            //if (vBufferResizedMat == null) vBufferResizedMat = new Mat();
            //Imgproc.resize(vBufferMat, vBufferResizedMat, new Size(yBuffer.Stride, yBuffer.Height), 0, 0, Imgproc.INTER_NEAREST);
            ////Debug.LogError("vBufferResizedMat.ToString() " + vBufferResizedMat.ToString());

            //List<Mat> mv = new List<Mat>();
            //mv.Add(yBufferMat);
            ////mv.Add(uBufferMat);
            ////mv.Add(vBufferMat);
            //mv.Add(uBufferResizedMat);
            //mv.Add(vBufferResizedMat);

            //if (yuvMat == null) yuvMat = new Mat();
            //OpenCVForUnity.CoreModule.Core.merge(mv, yuvMat);
            ////Debug.LogError("yuvMat.ToString() " + yuvMat.ToString());

            //if (rgbMat == null) rgbMat = new Mat();

            ////Imgproc.cvtColor(yuvMat, rgbMat, Imgproc.COLOR_YUV420p2RGB);
            //Imgproc.cvtColor(yuvMat, rgbMat, Imgproc.COLOR_YUV2RGB);
            ////Debug.LogError("rgbMat.ToString() " + rgbMat.ToString());

            //Imgproc.putText(rgbMat, "W:" + rgbMat.width() + " H:" + rgbMat.height() + " SO:" + Screen.orientation, new Point(5, rgbMat.rows() - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar(255, 255, 255), 2, Imgproc.LINE_AA, false);


            //if (_rawVideoTexture == null)
            //{
            //    _rawVideoTexture = new Texture2D((int)yBuffer.Stride, (int)yBuffer.Height, TextureFormat.RGB24, false);
            //    _rawVideoTexture.filterMode = FilterMode.Point;
            //    _screenRenderer.material.mainTexture = _rawVideoTexture;
            //    _screenRenderer.material.mainTextureScale = new Vector2(yBuffer.Width / (float)yBuffer.Stride, -1.0f);
            //}

            //Utils.fastMatToTexture2D(rgbMat, _rawVideoTexture,false);



            //YUV420 -> RGB
            MLCamera.YUVBuffer yBuffer = frameData.Y;
            MLCamera.YUVBuffer uBuffer = frameData.U;
            MLCamera.YUVBuffer vBuffer = frameData.V;

            int width  = (int)yBuffer.Stride;
            int height = (int)yBuffer.Height;

            if (yuvMat == null)
            {
                yuvMat = new Mat(height + height / 2, width, CvType.CV_8UC1);
            }
            yuvMat.put(0, 0, yBuffer.Data);
            yuvMat.put(height, 0, uBuffer.Data);
            yuvMat.put(height + height / 4, 0, vBuffer.Data);
            //Debug.LogError("yuvMat.ToString() " + yuvMat.ToString());

            if (rgbMat == null)
            {
                rgbMat = new Mat();
            }

            Imgproc.cvtColor(yuvMat, rgbMat, Imgproc.COLOR_YUV2RGB_I420);
            //Debug.LogError("rgbMat.ToString() " + rgbMat.ToString());


            ProcessMat(rgbMat);


            Imgproc.putText(rgbMat, "W:" + rgbMat.width() + " H:" + rgbMat.height() + " SO:" + Screen.orientation, new Point(5, rgbMat.rows() - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar(255, 255, 255), 2, Imgproc.LINE_AA, false);


            if (_rawVideoTexture == null)
            {
                _rawVideoTexture                          = new Texture2D((int)yBuffer.Stride, (int)yBuffer.Height, TextureFormat.RGB24, false);
                _rawVideoTexture.filterMode               = FilterMode.Point;
                _screenRenderer.material.mainTexture      = _rawVideoTexture;
                _screenRenderer.material.mainTextureScale = new Vector2(yBuffer.Width / (float)yBuffer.Stride, -1.0f);
            }

            Utils.fastMatToTexture2D(rgbMat, _rawVideoTexture, false);

            Utils.setDebugMode(false);
        }