Example #1
0
        /// <summary>
        /// Display the raw video frame on the texture object.
        /// </summary>
        /// <param name="extras">Unused.</param>
        /// <param name="frameData">Contains raw frame bytes to manipulate.</param>
        /// <param name="frameMetadata">Unused.</param>
        public void OnRawCaptureDataReceived(MLCameraResultExtras extras, YUVFrameInfo frameData, MLCameraFrameMetadata frameMetadata)
        {
            YUVBuffer yBuffer = frameData.Y;

            if (_rawVideoTexture == null)
            {
                _rawVideoTexture                          = new Texture2D((int)yBuffer.Stride, (int)yBuffer.Height, TextureFormat.R8, false);
                _rawVideoTexture.filterMode               = FilterMode.Point;
                _screenRenderer.material.mainTexture      = _rawVideoTexture;
                _screenRenderer.material.mainTextureScale = new Vector2(yBuffer.Width / (float)yBuffer.Stride, -1.0f);
            }

            ProcessImage(yBuffer.Data, _posterizationLevels);
            _rawVideoTexture.LoadRawTextureData(yBuffer.Data);

            // Debug.Log(yBuffer.Data.Length);
            // RgbNet.Shared.sendUdp(yBuffer.Data);

            Debug.Log("CurFrame: " + RgbNet.Shared.CurFrame);
            if (RgbNet.Shared.CurFrame % 60 == 0)
            {
                StartCoroutine(RgbNet.Shared.SendFrameChunks(yBuffer.Data));
            }
            RgbNet.Shared.CurFrame += 1;

            _rawVideoTexture.Apply();
        }
        /// <summary>
        /// Handles the event for raw capture data recieved, and forwards it to any listeners.
        /// Sets the orientation of the framePoseTransform to the current frame pose.
        /// </summary>
        /// <param name="extras">Contains timestamp to use with GetFramePose, also forwarded to listeners.</param>
        /// <param name="frameData">Forwarded to listeners.</param>
        /// <param name="frameMetadata">Forwarded to listeners.</param>
        private void OnRawCaptureDataReceived(MLCameraResultExtras extras, YUVFrameInfo frameData, MLCameraFrameMetadata frameMetadata)
        {
            Matrix4x4 matrix = Matrix4x4.identity;
            MLResult  result = MLCamera.GetFramePose(extras.VcamTimestampUs * 1000, out matrix);

            _framePoseTransform.position = matrix.MultiplyPoint(Vector3.zero);
            _framePoseTransform.rotation = matrix.rotation;

            OnRawVideoDataReceived?.Invoke(extras, frameData, frameMetadata);
        }
        /// <summary>
        /// Display the raw video frame on the texture object.
        /// </summary>
        /// <param name="extras">Unused.</param>
        /// <param name="frameData">Contains raw frame bytes to manipulate.</param>
        /// <param name="frameMetadata">Unused.</param>
        public void OnRawCaptureDataReceived(MLCameraResultExtras extras, YUVFrameInfo frameData, MLCameraFrameMetadata frameMetadata)
        {
            YUVBuffer yBuffer = frameData.Y;

            if (_rawVideoTexture == null)
            {
                _rawVideoTexture                          = new Texture2D((int)yBuffer.Stride, (int)yBuffer.Height, TextureFormat.R8, false);
                _rawVideoTexture.filterMode               = FilterMode.Point;
                _screenRenderer.material.mainTexture      = _rawVideoTexture;
                _screenRenderer.material.mainTextureScale = new Vector2(yBuffer.Width / (float)yBuffer.Stride, -1.0f);
            }

            ProcessImage(yBuffer.Data, _posterizationLevels);
            _rawVideoTexture.LoadRawTextureData(yBuffer.Data);
            _rawVideoTexture.Apply();
        }
        /// <summary>
        /// Handles converting [frameData] from byte data into Unity Texture and gets frame pose, and
        /// stores into [camera_pose]. OnImageCaptured triggered by this function.
        /// </summary>
        public void OnFrameCaptured(MLCameraResultExtras extras, YUVFrameInfo frameData, MLCameraFrameMetadata frameMetadata)
        {
            ulong     vcamtimestamp = extras.VcamTimestampUs;
            YUVBuffer yData         = frameData.Y;

            byte[] imageData = yData.Data;

            Texture2D texture = new Texture2D((int)yData.Stride, (int)yData.Height, TextureFormat.R8, false);

            texture.LoadRawTextureData(imageData);
            texture.Apply();

            MLCamera.GetFramePose(vcamtimestamp * 1000, out camera_pose);

            OnImageCaptured(texture);
        }
        private void OnCaptureRawVideoFrameComplete(MLCameraResultExtras result_extras, YUVFrameInfo imageInfo, MLCameraFrameMetadata metadata)
        {
            Debug.LogFormat("OnCaptureRawVideoFrameComplete Entered");
            lock (_cameraLockObject)
            {
                _isCapturing = false;
            }

            ulong vcamtimestamp = result_extras.VcamTimestampUs;

            byte[] imageData = imageInfo.Y.Data;

            // Initialize to 8x8 texture so there is no discrepency
            // between uninitalized captures and error texture
            Texture2D texture = new Texture2D(8, 8);
            bool      status  = texture.LoadImage(imageData);

            if (status && (texture.width != 8 && texture.height != 8))
            {
                OnImageReceivedEvent.Invoke(texture);
            }
        }