Example #1
0
        // Update is called once per frame
        async void Update()
        {
#if ENABLE_WINMD_SUPPORT
            _frameCount += 1;

            // Predict every 3rd frame
            if (_frameCount == skipFrames)
            {
                var detections = await Task.Run(() => _pvMediaFrameSourceGroup.DetectArUcoMarkers(_sensorType));

                // Update the game object pose with current detections
                UpdateArUcoDetections(detections);

                _frameCount = 0;
            }
#endif
        }
        // Get the latest frame from hololens media
        // frame source group -- not needed
        unsafe void UpdateHoloLensMediaFrameSourceGroup()
        {
#if ENABLE_WINMD_SUPPORT
            if (!_mediaFrameSourceGroupsStarted ||
                _pvMediaFrameSourceGroup == null)
            {
                return;
            }

            // Destroy all marker gameobject instances from prior frames
            // otherwise game objects will pile on top of marker
            if (_markerGOs.Count != 0)
            {
                foreach (var marker in _markerGOs)
                {
                    Destroy(marker);
                }
            }

            // Get latest sensor frames
            // Photo video
            //SensorFrame latestPvCameraFrame =
            //    _pvMediaFrameSourceGroup.GetLatestSensorFrame(
            //    _sensorType);

            //if (latestPvCameraFrame == null)
            //    return;

            // Detect ArUco markers in current frame
            // https://docs.opencv.org/2.4/modules/calib3d/doc/camera_calibration_and_3d_reconstruction.html#void%20Rodrigues(InputArray%20src,%20OutputArray%20dst,%20OutputArray%20jacobian)
            IList <DetectedArUcoMarker> detectedArUcoMarkers =
                _pvMediaFrameSourceGroup.DetectArUcoMarkers(_sensorType);

            //detectedArUcoMarkers =
            //    _arUcoMarkerTracker.DetectArUcoMarkersInFrame(latestPvCameraFrame);

            // If we detect a marker, display
            if (detectedArUcoMarkers.Count != 0)
            {
                foreach (var detectedMarker in detectedArUcoMarkers)
                {
                    // Get pose from OpenCV and format for Unity
                    Vector3 position = CvUtils.Vec3FromFloat3(detectedMarker.Position);
                    position.y *= -1f;
                    Quaternion rotation             = CvUtils.RotationQuatFromRodrigues(CvUtils.Vec3FromFloat3(detectedMarker.Rotation));
                    Matrix4x4  cameraToWorldUnity   = CvUtils.Mat4x4FromFloat4x4(detectedMarker.CameraToWorldUnity);
                    Matrix4x4  transformUnityCamera = CvUtils.TransformInUnitySpace(position, rotation);

                    // Use camera to world transform to get world pose of marker
                    Matrix4x4 transformUnityWorld = cameraToWorldUnity * transformUnityCamera;

                    // Instantiate game object marker in world coordinates
                    var thisGo = Instantiate(
                        markerGo,
                        CvUtils.GetVectorFromMatrix(transformUnityWorld),
                        CvUtils.GetQuatFromMatrix(transformUnityWorld)) as GameObject;

                    // Scale the game object to the size of the markers
                    thisGo.transform.localScale = new Vector3(markerSize, markerSize, markerSize);
                    _markerGOs.Add(thisGo);
                }
            }

            // Remove viewing of frame for now. Getting memory leaks
            // from passing the SensorFrame class object across the
            // WinRT ABI...

            //// Convert the frame to be unity viewable
            //var pvFrame = SoftwareBitmap.Convert(
            //    latestPvCameraFrame.SoftwareBitmap,
            //    BitmapPixelFormat.Bgra8,
            //    BitmapAlphaMode.Ignore);

            //// Display the incoming pv frames as a texture.
            //// Set texture to the desired renderer
            //Destroy(_pvTexture);
            //_pvTexture = new Texture2D(
            //    pvFrame.PixelWidth,
            //    pvFrame.PixelHeight,
            //    TextureFormat.BGRA32, false);

            //// Get byte array, update unity material with texture (RGBA)
            //byte* inBytesPV = GetByteArrayFromSoftwareBitmap(pvFrame);
            //_pvTexture.LoadRawTextureData((IntPtr)inBytesPV, pvFrame.PixelWidth * pvFrame.PixelHeight * 4);
            //_pvTexture.Apply();
            //_pvMaterial.mainTexture = _pvTexture;

            //myText.text = "Began streaming sensor frames. Double tap to end streaming.";
#endif
        }