/// <summary>
        /// Handles the event for raw capture data recieved, and forwards it to any listeners.
        /// Sets the orientation of the framePoseTransform to the current frame pose.
        /// </summary>
        /// <param name="extras">Contains timestamp to use with GetFramePose, also forwarded to listeners.</param>
        /// <param name="frameData">Forwarded to listeners.</param>
        /// <param name="frameMetadata">Forwarded to listeners.</param>
        private void OnRawCaptureDataReceived(MLCameraResultExtras extras, YUVFrameInfo frameData, MLCameraFrameMetadata frameMetadata)
        {
            Matrix4x4 matrix = Matrix4x4.identity;
            MLResult  result = MLCamera.GetFramePose(extras.VcamTimestampUs * 1000, out matrix);

            _framePoseTransform.position = matrix.MultiplyPoint(Vector3.zero);
            _framePoseTransform.rotation = matrix.rotation;

            OnRawVideoDataReceived?.Invoke(extras, frameData, frameMetadata);
        }
        /// <summary>
        /// Handles converting [frameData] from byte data into Unity Texture and gets frame pose, and
        /// stores into [camera_pose]. OnImageCaptured triggered by this function.
        /// </summary>
        public void OnFrameCaptured(MLCameraResultExtras extras, YUVFrameInfo frameData, MLCameraFrameMetadata frameMetadata)
        {
            ulong     vcamtimestamp = extras.VcamTimestampUs;
            YUVBuffer yData         = frameData.Y;

            byte[] imageData = yData.Data;

            Texture2D texture = new Texture2D((int)yData.Stride, (int)yData.Height, TextureFormat.R8, false);

            texture.LoadRawTextureData(imageData);
            texture.Apply();

            MLCamera.GetFramePose(vcamtimestamp * 1000, out camera_pose);

            OnImageCaptured(texture);
        }
예제 #3
0
    private void OnCaptureCompleted(MLCameraResultExtras extras, string extraString)
    {
        Matrix4x4 matrix4X4  = new Matrix4x4();
        MLResult  poseResult = MLCamera.GetFramePose(extras.VcamTimestampUs * 1000, out matrix4X4);
        Pose      pose       = new Pose
        {
            rotation    = matrix4X4.rotation,
            translation = new Vector3(matrix4X4.m03, matrix4X4.m13, matrix4X4.m23)
        };

        String info = String.Format("OnCaptureCompleted.\n Frame number: {0}, Frame time: {1}\nExtra String: {2}", extras.FrameNumber, extras.VcamTimestampUs, extraString);

        Debug.Log(info);
        Debug.Log(String.Format("========\n{4}\n{3}\n{0}\nrotation:{1}\ntranslation:{2}\n===========\n", matrix4X4, pose.rotation.ToString("f4"), pose.translation.ToString("f4"), poseResult, info));
        _text.text = String.Format("Frame#{0}, FrameTime: {1}\nRotation:{2}\nTranslation:{3}", extras.FrameNumber, extras.VcamTimestampUs / 1000, pose.rotation.ToString("f4"), pose.translation.ToString("f4"));
        _results.images.Add(new ImageInfo
        {
            fileName     = String.Format(@"image{0}.jpeg", _results.images.Count),
            timestampSec = ((double)extras.VcamTimestampUs) / 1000000.0d,
            poseStatus   = poseResult.ToString(),
            framePose    = pose,
            poseMatrix   = matrix4X4
        });

        lock (_cameraLockObject)
        {
            _isCapturing = false;
        }

        if (_autoCapture)
        {
            TriggerAsyncCapture();
        }
        else
        {
            GenerateJsonReport();
        }
    }