コード例 #1
0
    public SoftwareBitmap GetImage()
    {
        if (!webcamInitialized)
        {
            return(null);
        }

        SensorFrame latestFrame;

        latestFrame = _holoLensMediaFrameSourceGroup.GetLatestSensorFrame(SensorType.PhotoVideo);

        if (latestFrame == null || latestFrame.Timestamp == lastFrameTimestamp)
        {
            return(null);
        }

        lastFrameTimestamp = latestFrame.Timestamp;

        webcamToWorldMatrix.m00 = latestFrame.FrameToOrigin.M11;
        webcamToWorldMatrix.m01 = latestFrame.FrameToOrigin.M21;
        webcamToWorldMatrix.m02 = latestFrame.FrameToOrigin.M31;

        webcamToWorldMatrix.m10 = latestFrame.FrameToOrigin.M12;
        webcamToWorldMatrix.m11 = latestFrame.FrameToOrigin.M22;
        webcamToWorldMatrix.m12 = latestFrame.FrameToOrigin.M32;

        webcamToWorldMatrix.m20 = -latestFrame.FrameToOrigin.M13;
        webcamToWorldMatrix.m21 = -latestFrame.FrameToOrigin.M23;
        webcamToWorldMatrix.m22 = -latestFrame.FrameToOrigin.M33;

        webcamToWorldMatrix.m03 = latestFrame.FrameToOrigin.Translation.X;
        webcamToWorldMatrix.m13 = latestFrame.FrameToOrigin.Translation.Y;
        webcamToWorldMatrix.m23 = -latestFrame.FrameToOrigin.Translation.Z;
        webcamToWorldMatrix.m33 = 1;


        if (imageInitialized == false)
        {
            height = latestFrame.SoftwareBitmap.PixelHeight;
            width  = latestFrame.SoftwareBitmap.PixelWidth;

            projectionMatrix     = new Matrix4x4();
            projectionMatrix.m00 = 2 * latestFrame.CameraIntrinsics.FocalLength.X / width;
            projectionMatrix.m11 = 2 * latestFrame.CameraIntrinsics.FocalLength.Y / height;
            projectionMatrix.m02 = -2 * (latestFrame.CameraIntrinsics.PrincipalPoint.X - width / 2) / width;
            projectionMatrix.m12 = 2 * (latestFrame.CameraIntrinsics.PrincipalPoint.Y - height / 2) / height;
            projectionMatrix.m22 = -1;
            projectionMatrix.m33 = -1;

            imageInitialized = true;
        }


        return(latestFrame.SoftwareBitmap);
    }
コード例 #2
0
        // Get the latest frame from hololens media
        // frame source group -- not needed
        unsafe void UpdateHoloLensMediaFrameSourceGroup()
        {
#if ENABLE_WINMD_SUPPORT
            if (!_mediaFrameSourceGroupsStarted ||
                _pvMediaFrameSourceGroup == null)
            {
                return;
            }

            // Destroy all marker gameobject instances from prior frames
            // otherwise game objects will pile on top of marker
            if (_markerGOs.Count != 0)
            {
                foreach (var marker in _markerGOs)
                {
                    Destroy(marker);
                }
            }

            // Get latest sensor frames
            // Photo video
            SensorFrame latestPvCameraFrame =
                _pvMediaFrameSourceGroup.GetLatestSensorFrame(
                    _sensorType);

            if (latestPvCameraFrame == null)
            {
                return;
            }

            // Detect ArUco markers in current frame
            // https://docs.opencv.org/2.4/modules/calib3d/doc/camera_calibration_and_3d_reconstruction.html#void%20Rodrigues(InputArray%20src,%20OutputArray%20dst,%20OutputArray%20jacobian)
            IList <DetectedArUcoMarker> detectedArUcoMarkers = new List <DetectedArUcoMarker>();
            detectedArUcoMarkers =
                _arUcoMarkerTracker.DetectArUcoMarkersInFrame(latestPvCameraFrame);

            // If we detect a marker, display
            if (detectedArUcoMarkers.Count != 0)
            {
                foreach (var detectedMarker in detectedArUcoMarkers)
                {
                    // Get pose from OpenCV and format for Unity
                    Vector3 position = CvUtils.Vec3FromFloat3(detectedMarker.Position);
                    position.y *= -1f;
                    Quaternion rotation             = CvUtils.RotationQuatFromRodrigues(CvUtils.Vec3FromFloat3(detectedMarker.Rotation));
                    Matrix4x4  cameraToWorldUnity   = CvUtils.Mat4x4FromFloat4x4(detectedMarker.CameraToWorldUnity);
                    Matrix4x4  transformUnityCamera = CvUtils.TransformInUnitySpace(position, rotation);

                    // Use camera to world transform to get world pose of marker
                    Matrix4x4 transformUnityWorld = cameraToWorldUnity * transformUnityCamera;

                    // Instantiate game object marker in world coordinates
                    var thisGo = Instantiate(
                        markerGo,
                        CvUtils.GetVectorFromMatrix(transformUnityWorld),
                        CvUtils.GetQuatFromMatrix(transformUnityWorld)) as GameObject;

                    // Scale the game object to the size of the markers
                    thisGo.transform.localScale = new Vector3(markerSize, markerSize, markerSize);
                    _markerGOs.Add(thisGo);
                }
            }

            // Convert the frame to be unity viewable
            var pvFrame = SoftwareBitmap.Convert(
                latestPvCameraFrame.SoftwareBitmap,
                BitmapPixelFormat.Bgra8,
                BitmapAlphaMode.Ignore);

            // Display the incoming pv frames as a texture.
            // Set texture to the desired renderer
            Destroy(_pvTexture);
            _pvTexture = new Texture2D(
                pvFrame.PixelWidth,
                pvFrame.PixelHeight,
                TextureFormat.BGRA32, false);

            // Get byte array, update unity material with texture (RGBA)
            byte *inBytesPV = GetByteArrayFromSoftwareBitmap(pvFrame);
            _pvTexture.LoadRawTextureData((IntPtr)inBytesPV, pvFrame.PixelWidth * pvFrame.PixelHeight * 4);
            _pvTexture.Apply();
            _pvMaterial.mainTexture = _pvTexture;

            myText.text = "Began streaming sensor frames. Double tap to end streaming.";
#endif
        }
コード例 #3
0
        // Get the latest frame from hololens media
        // frame source group -- not needed
        unsafe void UpdateHoloLensMediaFrameSourceGroup()
        {
#if ENABLE_WINMD_SUPPORT
            if (!_mediaFrameSourceGroupsStarted ||
                _pvMediaFrameSourceGroup == null ||
                _shortDepthMediaFrameSourceGroup == null)
            {
                return;
            }

            // Get latest sensor frames
            // Photo video
            SensorFrame latestPvCameraFrame =
                _pvMediaFrameSourceGroup.GetLatestSensorFrame(
                    _sensorType);

            // ToF Depth
            SensorFrame latestShortDepthCameraFrame =
                _shortDepthMediaFrameSourceGroup.GetLatestSensorFrame(
                    _sensorTypeResearch);

            // Initialize depth pv mapper class to cache
            // the resulting depth transform.
            if (!_isDepthPvMapperInit && latestShortDepthCameraFrame != null)
            {
                _depthPvMapper = new DepthPvMapper(
                    latestShortDepthCameraFrame);
                _isDepthPvMapperInit = true;
            }

            // Map depth frames to photo video camera
            // with from/to range and specified radius.
            SensorFrame latestPvDepthFrame = _depthPvMapper.MapDepthToPV(
                latestPvCameraFrame,
                latestShortDepthCameraFrame,
                depthRangeFrom,
                depthRangeTo,
                patchRadius);

            // Convert the frame to be unity viewable
            var pvDepthFrame = SoftwareBitmap.Convert(
                latestPvDepthFrame.SoftwareBitmap,
                BitmapPixelFormat.Bgra8,
                BitmapAlphaMode.Ignore);

            // Display the incoming pv frames as a texture.
            // Set texture to the desired renderer
            Destroy(_pvDepthTexture);
            _pvDepthTexture = new Texture2D(
                pvDepthFrame.PixelWidth,
                pvDepthFrame.PixelHeight,
                TextureFormat.BGRA32, false);

            // Get byte array, update unity material with texture (RGBA)
            byte *inBytesPV = GetByteArrayFromSoftwareBitmap(pvDepthFrame);
            _pvDepthTexture.LoadRawTextureData((IntPtr)inBytesPV, pvDepthFrame.PixelWidth * pvDepthFrame.PixelHeight * 4);
            _pvDepthTexture.Apply();
            _pvDepthMaterial.mainTexture = _pvDepthTexture;

            myText.text = "Began streaming sensor frames. Double tap to end streaming.";
#endif
        }