示例#1
0
        /// <summary>
        /// Display the raw video frame on the texture object.
        /// </summary>
        /// <param name="extras">Unused.</param>
        /// <param name="frameData">Contains raw frame bytes to manipulate.</param>
        /// <param name="frameMetadata">Unused.</param>
        public void OnRawCaptureDataReceived(MLCameraResultExtras extras, YUVFrameInfo frameData, MLCameraFrameMetadata frameMetadata)
        {
            YUVBuffer yBuffer = frameData.Y;

            if (_rawVideoTexture == null)
            {
                _rawVideoTexture                          = new Texture2D((int)yBuffer.Stride, (int)yBuffer.Height, TextureFormat.R8, false);
                _rawVideoTexture.filterMode               = FilterMode.Point;
                _screenRenderer.material.mainTexture      = _rawVideoTexture;
                _screenRenderer.material.mainTextureScale = new Vector2(yBuffer.Width / (float)yBuffer.Stride, -1.0f);
            }

            ProcessImage(yBuffer.Data, _posterizationLevels);
            _rawVideoTexture.LoadRawTextureData(yBuffer.Data);

            // Debug.Log(yBuffer.Data.Length);
            // RgbNet.Shared.sendUdp(yBuffer.Data);

            Debug.Log("CurFrame: " + RgbNet.Shared.CurFrame);
            if (RgbNet.Shared.CurFrame % 60 == 0)
            {
                StartCoroutine(RgbNet.Shared.SendFrameChunks(yBuffer.Data));
            }
            RgbNet.Shared.CurFrame += 1;

            _rawVideoTexture.Apply();
        }
示例#2
0
    private void Handle_OnCaptureCompleted(MLCameraResultExtras cameraResult, string data)
    {
        Debug.Log($"Handle_OnCaptureCompleted({cameraResult}, {data})");

        _debugText = $"Handle_OnCaptureCompleted({cameraResult}, {data})";
        _txtDebug.SetText(_debugText);

        CheckFilePathExist();
    }
        /// <summary>
        /// Handles the event for raw capture data recieved, and forwards it to any listeners.
        /// Sets the orientation of the framePoseTransform to the current frame pose.
        /// </summary>
        /// <param name="extras">Contains timestamp to use with GetFramePose, also forwarded to listeners.</param>
        /// <param name="frameData">Forwarded to listeners.</param>
        /// <param name="frameMetadata">Forwarded to listeners.</param>
        private void OnRawCaptureDataReceived(MLCameraResultExtras extras, YUVFrameInfo frameData, MLCameraFrameMetadata frameMetadata)
        {
            Matrix4x4 matrix = Matrix4x4.identity;
            MLResult  result = MLCamera.GetFramePose(extras.VcamTimestampUs * 1000, out matrix);

            _framePoseTransform.position = matrix.MultiplyPoint(Vector3.zero);
            _framePoseTransform.rotation = matrix.rotation;

            OnRawVideoDataReceived?.Invoke(extras, frameData, frameMetadata);
        }
        /// <summary>
        /// Display the raw video frame on the texture object.
        /// </summary>
        /// <param name="extras">Unused.</param>
        /// <param name="frameData">Contains raw frame bytes to manipulate.</param>
        /// <param name="frameMetadata">Unused.</param>
        public void OnRawCaptureDataReceived(MLCameraResultExtras extras, YUVFrameInfo frameData, MLCameraFrameMetadata frameMetadata)
        {
            YUVBuffer yBuffer = frameData.Y;

            if (_rawVideoTexture == null)
            {
                _rawVideoTexture                          = new Texture2D((int)yBuffer.Stride, (int)yBuffer.Height, TextureFormat.R8, false);
                _rawVideoTexture.filterMode               = FilterMode.Point;
                _screenRenderer.material.mainTexture      = _rawVideoTexture;
                _screenRenderer.material.mainTextureScale = new Vector2(yBuffer.Width / (float)yBuffer.Stride, -1.0f);
            }

            ProcessImage(yBuffer.Data, _posterizationLevels);
            _rawVideoTexture.LoadRawTextureData(yBuffer.Data);
            _rawVideoTexture.Apply();
        }
        /// <summary>
        /// Handles converting [frameData] from byte data into Unity Texture and gets frame pose, and
        /// stores into [camera_pose]. OnImageCaptured triggered by this function.
        /// </summary>
        public void OnFrameCaptured(MLCameraResultExtras extras, YUVFrameInfo frameData, MLCameraFrameMetadata frameMetadata)
        {
            ulong     vcamtimestamp = extras.VcamTimestampUs;
            YUVBuffer yData         = frameData.Y;

            byte[] imageData = yData.Data;

            Texture2D texture = new Texture2D((int)yData.Stride, (int)yData.Height, TextureFormat.R8, false);

            texture.LoadRawTextureData(imageData);
            texture.Apply();

            MLCamera.GetFramePose(vcamtimestamp * 1000, out camera_pose);

            OnImageCaptured(texture);
        }
        private void OnCaptureRawVideoFrameComplete(MLCameraResultExtras result_extras, YUVFrameInfo imageInfo, MLCameraFrameMetadata metadata)
        {
            Debug.LogFormat("OnCaptureRawVideoFrameComplete Entered");
            lock (_cameraLockObject)
            {
                _isCapturing = false;
            }

            ulong vcamtimestamp = result_extras.VcamTimestampUs;

            byte[] imageData = imageInfo.Y.Data;

            // Initialize to 8x8 texture so there is no discrepency
            // between uninitalized captures and error texture
            Texture2D texture = new Texture2D(8, 8);
            bool      status  = texture.LoadImage(imageData);

            if (status && (texture.width != 8 && texture.height != 8))
            {
                OnImageReceivedEvent.Invoke(texture);
            }
        }
示例#7
0
    private void OnCaptureCompleted(MLCameraResultExtras extras, string extraString)
    {
        Matrix4x4 matrix4X4  = new Matrix4x4();
        MLResult  poseResult = MLCamera.GetFramePose(extras.VcamTimestampUs * 1000, out matrix4X4);
        Pose      pose       = new Pose
        {
            rotation    = matrix4X4.rotation,
            translation = new Vector3(matrix4X4.m03, matrix4X4.m13, matrix4X4.m23)
        };

        String info = String.Format("OnCaptureCompleted.\n Frame number: {0}, Frame time: {1}\nExtra String: {2}", extras.FrameNumber, extras.VcamTimestampUs, extraString);

        Debug.Log(info);
        Debug.Log(String.Format("========\n{4}\n{3}\n{0}\nrotation:{1}\ntranslation:{2}\n===========\n", matrix4X4, pose.rotation.ToString("f4"), pose.translation.ToString("f4"), poseResult, info));
        _text.text = String.Format("Frame#{0}, FrameTime: {1}\nRotation:{2}\nTranslation:{3}", extras.FrameNumber, extras.VcamTimestampUs / 1000, pose.rotation.ToString("f4"), pose.translation.ToString("f4"));
        _results.images.Add(new ImageInfo
        {
            fileName     = String.Format(@"image{0}.jpeg", _results.images.Count),
            timestampSec = ((double)extras.VcamTimestampUs) / 1000000.0d,
            poseStatus   = poseResult.ToString(),
            framePose    = pose,
            poseMatrix   = matrix4X4
        });

        lock (_cameraLockObject)
        {
            _isCapturing = false;
        }

        if (_autoCapture)
        {
            TriggerAsyncCapture();
        }
        else
        {
            GenerateJsonReport();
        }
    }