示例#1
0
    /// <summary>
    /// Unity Monobehavior function: update texture depending on whether new frame arrives,
    /// trigger visualization of video frame rate information, initialize media texture when
    /// initialization of video pipeline is done. [internal use]
    /// </summary>
    private void Update()
    {
        Debug.Log("update 0");
        if (signalInitDone)
        {
            // mediaTexture = new Texture2D(controller.frameWidth, controller.frameHeight, TextureFormat.RGBA32, false);
            // mediaMaterial.mainTexture = mediaTexture;
            signalInitDone = false;
        }

        Debug.Log("1");

        // if (videoPreview && previewPlane.activeSelf && mediaTexture != null && signalTrackingUpdated) {
        //     UpdateVideoPreview();
        // }
        Debug.Log("2");

        videoDeltaTime = ARUWPUtils.GetVideoDeltaTime();
        Debug.Log("3");
        if (videoFPS != null)
        {
            videoFPS.text = string.Format("Video:   {0:0.0} ms ({1:0.} fps)", videoDeltaTime, 1000.0f / videoDeltaTime);
        }
        signalTrackingUpdated = false;
    }
示例#2
0
 // return true if visible
 // return false if error or invisible
 public bool updateMarkerTracking()
 {
     if (id != -1)
     {
         if (ARUWP.aruwpQueryMarkerTransformation(id, trans))
         {
             transMatrix = ARUWPUtils.ConvertARUWPFloatArrayToMatrix4x4(trans);
             if (performMagicFunction)
             {
                 MagicFunction();
             }
             if (type != MarkerType.multi)
             {
                 confidence = ARUWP.aruwpGetMarkerOptionFloat(id, ARUWP.ARUWP_MARKER_OPTION_SQUARE_CONFIDENCE);
             }
             visible = true;
             return(true);
         }
         else
         {
             visible    = false;
             confidence = 0.0f;
             return(false);
         }
     }
     else
     {
         Debug.Log(TAG + ": Transformation not valid for id -1");
         visible = false;
         return(false);
     }
 }
示例#3
0
    /// <summary>
    /// Unity Monobehavior function: update texture depending on whether new frame arrives,
    /// trigger visualization of video frame rate information, initialize media texture when
    /// initialization of video pipeline is done. [internal use]
    /// </summary>
    private void Update()
    {
        if (signalInitDone)
        {
            if (mediaMaterial != null)
            {
                mediaTexture = new Texture2D(controller.frameWidth, controller.frameHeight, TextureFormat.RGBA32, false);
                mediaMaterial.mainTexture = mediaTexture;
            }
            signalInitDone = false;
        }


        if (videoPreview && previewPlane != null && mediaMaterial != null && signalTrackingUpdated)
        {
            UpdateVideoPreview();
        }

        videoDeltaTime = ARUWPUtils.GetVideoDeltaTime();
        if (videoFPS != null)
        {
            videoFPS.text = string.Format("Video:   {0:0.0} ms ({1:0.} fps)", videoDeltaTime, 1000.0f / videoDeltaTime);
        }

        signalTrackingUpdated = false;
    }
示例#4
0
    /// <summary>
    /// Unity Monobehavior function: update texture depending on whether new frame arrives,
    /// trigger visualization of video frame rate information, initialize media texture when
    /// initialization of video pipeline is done. [internal use]
    /// </summary>
    private void LateUpdate()
    {
        if (signalInitDone)
        {
            if (mediaMaterial != null)
            {
                // Since v0.3, feature grayscale is forced
                mediaTexture = new Texture2D(controller.frameWidth, controller.frameHeight, TextureFormat.Alpha8, false);
                mediaMaterial.mainTexture = mediaTexture;
            }
            signalInitDone = false;
        }

        if (signalTrackingUpdated)
        {
            if (videoPreview && previewPlane != null && mediaMaterial != null)
            {
                // Since v0.3, feature grayscale is forced
                if (frameData != null)
                {
                    mediaTexture.LoadRawTextureData(frameData);
                    mediaTexture.Apply();
                }
            }
        }

        videoDeltaTime = ARUWPUtils.GetVideoDeltaTime();
        if (videoFPS != null)
        {
            videoFPS.text = string.Format("Video:   {0:0.0} ms ({1:0.} fps)", videoDeltaTime, 1000.0f / videoDeltaTime);
        }

        signalTrackingUpdated = false;
    }
示例#5
0
    /// <summary>
    /// The callback that is triggered when new video preview frame arrives. In this function,
    /// video frame is saved for Unity UI if videoPreview is enabled, tracking task is triggered
    /// in this function call, and video FPS is recorded. [internal use]
    /// </summary>
    /// <param name="sender">MediaFrameReader object</param>
    /// <param name="args">arguments not used here</param>
    private void OnFrameArrived(MediaFrameReader sender, MediaFrameArrivedEventArgs args)
    {
        ARUWPUtils.VideoTick();
        using (var frame = sender.TryAcquireLatestFrame()) {
            if (frame != null)
            {
                float[] cameraToWorldMatrixAsFloat;
                if (TryGetCameraToWorldMatrix(frame, out cameraToWorldMatrixAsFloat) == false)
                {
                    return;
                }

                Interlocked.Exchange(ref _cameraToWorldMatrix, cameraToWorldMatrixAsFloat);

                var originalSoftwareBitmap = frame.VideoMediaFrame.SoftwareBitmap;
                var softwareBitmap         = SoftwareBitmap.Convert(originalSoftwareBitmap, BitmapPixelFormat.Rgba8, BitmapAlphaMode.Ignore);
                originalSoftwareBitmap?.Dispose();
                if (videoPreview)
                {
                    Interlocked.Exchange(ref _bitmap, softwareBitmap);
                    controller.ProcessFrameAsync(SoftwareBitmap.Copy(softwareBitmap));
                }
                else
                {
                    controller.ProcessFrameAsync(softwareBitmap);
                }
                signalTrackingUpdated = true;
            }
        }
    }
示例#6
0
 void Update()
 {
     // visualization for single markers
     if (type != MarkerType.multi)
     {
         if (confidenceColoBox != null)
         {
             confidenceColoBox.color = new Color(0, confidence, 0, 1);
         }
         if (confidenceTextBox != null)
         {
             confidenceTextBox.text = string.Format("{0:0.000}", confidence);
         }
     }
     // visulization for all
     if (target != null && visible)
     {
         if (applyRotation)
         {
             target.transform.localRotation = ARUWPUtils.QuaternionFromMatrix(transMatrix);
         }
         if (applyTranslation)
         {
             target.transform.localPosition = ARUWPUtils.PositionFromMatrix(transMatrix);
             // Debug.Log(TAG + ": Current local position " + target.transform.localPosition);
         }
     }
 }
示例#7
0
 /// <summary>
 /// DetectDone function executes when the detection finishes, to update the information of all
 /// the markers in the scene. [internal use]
 /// </summary>
 /// <param name="locatableCameraToWorld">The locatable camera transformantion at capture time</param>
 private void DetectDone(Matrix4x4 locatableCameraToWorld)
 {
     foreach (var key in markers.Keys)
     {
         markers[key].UpdateTrackingInfo(locatableCameraToWorld);
     }
     signalTrackingUpdated = true;
     ARUWPUtils.TrackTick();
 }
示例#8
0
 /// <summary>
 /// DetectDone function executes when the detection finishes, to update the information of all
 /// the markers in the scene. [internal use]
 /// </summary>
 private void DetectDone()
 {
     foreach (var key in markers.Keys)
     {
         markers[key].UpdateTrackingInfo();
     }
     signalTrackingUpdated = true;
     ARUWPUtils.TrackTick();
 }
 // Use this for initialization
 void Start()
 {
     ARUWPUtils.aruwpRegisterLogCallback(Log);
     ARUWPUtils.aruwpSetLogLevel((int)currentLogLevel);
     lastTick  = Time.time;
     deltaTime = 0;
     startWebcam();
     startARUWP();
 }
示例#10
0
    /// <summary>
    /// This function is called by the associated ARUWPMarker.cs to update the pose of this
    /// virtual object. When smoothing is enabled, the new pose will be filtered with current
    /// pose using lerp. Big sudden change of 6-DOF pose will be prohibited. [public use]
    /// </summary>
    public void SetMatrix4x4(Matrix4x4 localToWorldMatrix)
    {
        Vector3    previousPosition = transform.localPosition;
        Quaternion previousRotation = transform.localRotation;
        Vector3    targetPosition   = ARUWPUtils.PositionFromMatrix(localToWorldMatrix);
        Quaternion targetRotation   = ARUWPUtils.QuaternionFromMatrix(localToWorldMatrix);

        if (!smoothing)
        {
            transform.localRotation = targetRotation;
            transform.localPosition = targetPosition;
        }
        else
        {
            float positionDiff = Vector3.Distance(targetPosition, previousPosition);
            float rotationDiff = Quaternion.Angle(targetRotation, previousRotation);

            if (Mathf.Abs(positionDiff) < positionJumpThreshold && Mathf.Abs(rotationDiff) < rotationJumpThreshold)
            {
                transform.localRotation = Quaternion.Slerp(previousRotation, targetRotation, lerp);
                transform.localPosition = Vector3.Lerp(previousPosition, targetPosition, lerp);
                pendingPositionList.Clear();
                pendingRotationList.Clear();
            }
            else
            {
                // maybe there is a jump
                pendingPositionList.Add(targetPosition);
                pendingRotationList.Add(targetRotation);
                bool confirmJump = true;
                if (pendingPositionList.Count > maxPendingList)
                {
                    for (int i = 0; i < maxPendingList - 1; i++)
                    {
                        float tempPositionDiff = Vector3.Distance(pendingPositionList[pendingPositionList.Count - i - 1], pendingPositionList[pendingPositionList.Count - i - 2]);
                        float tempRotationDiff = Quaternion.Angle(pendingRotationList[pendingRotationList.Count - i - 1], pendingRotationList[pendingRotationList.Count - i - 2]);
                        if (Mathf.Abs(tempPositionDiff) > positionRecoverThreshold || Mathf.Abs(tempRotationDiff) > rotationRecoverThreshold)
                        {
                            confirmJump = false;
                            break;
                        }
                    }
                    if (confirmJump)
                    {
                        transform.localRotation = targetRotation;
                        transform.localPosition = targetPosition;
                        pendingPositionList.Clear();
                        pendingRotationList.Clear();
                    }
                }
            }
        }
    }
示例#11
0
 private void visualizeTFCoordinates()
 {
     // not visualizing the LEFT GRIPPER frame
     for (int i = 0; i < TCPManager.TCPPackageConstants.TOTAL_NUM_TF - 1; i++)
     {
         if (tf_coordinate_gameobject_list[i] != null && isShowingTFCoordinates)
         {
             ARUWPUtils.SetMatrix4x4ToGameObject(ref tf_coordinate_gameobject_list[i], robotPoseToHololensMatrices[i]);
             // Debug.Log("processed tf frame visual " + i +" :" + ARUWPUtils.PositionFromMatrix(robotPoseToHololensMatrices[i]));
         }
     }
 }
示例#12
0
    /// <summary>
    /// If we are having a different source of (grayscale) frames provide the frame and pose information, rather than setting up the camera in ARUWPVideo,
    /// the external class will call this function.
    /// </summary>
    unsafe public void OnFrameArrivedExternal(Matrix4x4 cameraPoseMatrix, byte[] grayscaleFrameBytes)
    {
        ARUWPUtils.VideoTick();

        latestLocatableCameraToWorld = cameraPoseMatrix;

        grayscaleFrameBytes.CopyTo(frameData, 0);

        controller.ProcessFrameSync(frameData, latestLocatableCameraToWorld);

        signalTrackingUpdated = true;
    }
示例#13
0
    /// <summary>
    /// Unity Monobehavior function: update tracking information of the current marker.
    /// If magic function is enabled, then a distortion transformation is applied on top
    /// of the tracking result. [internal use]
    /// </summary>
    private void Update()
    {
        if (signalTrackingUpdated && id != -1)
        {
            Interlocked.Exchange(ref latestTrackingInfo, _info);

            if (latestTrackingInfo.visible)
            {
                // Debug.Log(latestTrackingInfo.confidence); //check the confidence of the marker
                if (tracking_progress_text != null)
                {
                    tracking_progress_text.text = string.Format("Confidence:   {0:0.00}", latestTrackingInfo.confidence);
                }
                // if confidence over a certain threshold (0.9 ?)
                // show something indicating robot tracked
                // possible: if higher confidence, update the tracker position
                if (latestTrackingInfo.confidence > robot_tracked_confidence_threshold)
                {
                    tracking_progress_text.text += "    Robot Tracked";
                    latestTransMatrix            = ARUWPUtils.ConvertARUWPFloatArrayToMatrix4x4(latestTrackingInfo.trans);
                    tracking_updated             = true;
                }


                // if (performMagicFunction) { MagicFunction(); }
                //
                // if (target != null ) {
                //     if (!anchoredToWorld) {
                //         if (applyRotation) {
                //             target.transform.localRotation = ARUWPUtils.QuaternionFromMatrix(latestTransMatrix);
                //         }
                //         if (applyTranslation) {
                //             target.transform.localPosition = ARUWPUtils.PositionFromMatrix(latestTransMatrix);
                //         }
                //     }
                //     else {
                //         if (applyRotation) {
                //             dummyGameObject.transform.localRotation = ARUWPUtils.QuaternionFromMatrix(latestTransMatrix);
                //         }
                //         if (applyTranslation) {
                //             dummyGameObject.transform.localPosition = ARUWPUtils.PositionFromMatrix(latestTransMatrix);
                //         }
                //         ARUWPUtils.SetMatrix4x4ToGameObject(ref target, dummyGameObject.transform.localToWorldMatrix);
                //     }
                // }
            }
        }

        signalTrackingUpdated = false;
    }
示例#14
0
    // latestMarkerTransMatrix now contains the latest marker pose in static world frame
    private void processMarkerTrackingInfo()
    {
        // Interlocked.Exchange(ref latestMarkerTransMatrix, aruwpMarker.GetMarkerTransformationMatrix());
        latestMarkerTransMatrix = aruwpMarker.GetMarkerTransformationMatrix();

        // convert the transformation in hololens frame
        // to transformation in the global frame
        // because hololens can move in the world, it's frame is not fixed
        dummyGameObject.transform.localRotation = ARUWPUtils.QuaternionFromMatrix(latestMarkerTransMatrix);
        dummyGameObject.transform.localPosition = ARUWPUtils.PositionFromMatrix(latestMarkerTransMatrix);
        latestMarkerTransMatrix = dummyGameObject.transform.localToWorldMatrix;
        // Debug.Log("marker to user" + ARUWPUtils.PositionFromMatrix(latestMarkerTransMatrix));
        // Debug.Log("marker to user orientation" + ARUWPUtils.QuaternionFromMatrix(latestMarkerTransMatrix).eulerAngles);
    }
示例#15
0
    /// <summary>
    /// Unity Monobehavior function. Initialization of video and tracking will be auto-started in the
    /// Update() loop. User customized functionalities can be place here. [internal use]
    /// </summary>
    private void Update()
    {
        if (!signalInitTriggered)
        {
            InitAsync();
            signalInitTriggered = true;
        }


        // Do stuff, for example, control the behavior of tracking and preview by bluetooth keyboard
        if (Input.GetKeyDown(KeyCode.P))
        {
            if (videoManager.videoPreview)
            {
                videoManager.DisablePreview();
            }
            else
            {
                videoManager.EnablePreview();
            }
        }
        if (Input.GetKeyDown(KeyCode.V))
        {
            if (status == ARUWP.ARUWP_STATUS_RUNNING)
            {
                Pause();
            }
            else if (status == ARUWP.ARUWP_STATUS_CTRL_INITIALIZED)
            {
                Resume();
            }
        }


        ARUWPUtils.RenderTick();
        renderDeltaTime = ARUWPUtils.GetRenderDeltaTime();
        trackDeltaTime  = ARUWPUtils.GetTrackDeltaTime();

        if (renderFPS != null)
        {
            renderFPS.text = string.Format("Render: {0:0.0} ms ({1:0.} fps)", renderDeltaTime, 1000.0f / renderDeltaTime);
        }
        if (trackFPS != null)
        {
            trackFPS.text = string.Format("Track:   {0:0.0} ms ({1:0.} fps)", trackDeltaTime, 1000.0f / trackDeltaTime);
        }
        signalTrackingUpdated = false;
    }
示例#16
0
    /// <summary>
    /// Unity Monobehavior function: update tracking information of the current marker.
    /// If magic function is enabled, then a distortion transformation is applied on top
    /// of the tracking result. [internal use]
    /// </summary>
    private void Update()
    {
        if (signalTrackingUpdated && id != -1)
        {
            Interlocked.Exchange(ref latestTrackingInfo, _info);
            if (latestTrackingInfo.visible)
            {
                latestTransMatrix = ARUWPUtils.ConvertARUWPFloatArrayToMatrix4x4(latestTrackingInfo.trans);


                if (performMagicFunction)
                {
                    MagicFunction();
                }

                if (target != null)
                {
                    if (!anchoredToWorld)
                    {
                        if (applyRotation)
                        {
                            target.transform.localRotation = ARUWPUtils.QuaternionFromMatrix(latestTransMatrix);
                        }
                        if (applyTranslation)
                        {
                            target.transform.localPosition = ARUWPUtils.PositionFromMatrix(latestTransMatrix);
                        }
                    }
                    else
                    {
                        if (applyRotation)
                        {
                            dummyGameObject.transform.localRotation = ARUWPUtils.QuaternionFromMatrix(latestTrackingInfo.locatableCameraToWorld * latestTransMatrix);
                        }
                        if (applyTranslation)
                        {
                            dummyGameObject.transform.localPosition = ARUWPUtils.PositionFromMatrix(latestTrackingInfo.locatableCameraToWorld * latestTransMatrix);
                        }

                        latestPoseInWorldCoordinates = dummyGameObject.transform.localToWorldMatrix;
                        ARUWPUtils.SetMatrix4x4ToGameObject(ref target, latestPoseInWorldCoordinates);
                    }
                }
            }
        }

        signalTrackingUpdated = false;
    }
示例#17
0
    /// <summary>
    /// Unity Monobehavior function: update tracking information of the current marker.
    /// If magic function is enabled, then a distortion transformation is applied on top
    /// of the tracking result. [internal use]
    /// </summary>
    private void LateUpdate()
    {
        if (signalTrackingUpdated && id != -1)
        {
            Interlocked.Exchange(ref latestTrackingInfo, _info);
            if (latestTrackingInfo.visible)
            {
                latestTransMatrix = ARUWPUtils.ConvertARUWPFloatArrayToMatrix4x4(latestTrackingInfo.trans);

                // ARssistSi
                latestTransMatrix = calibrationMatrix * latestTransMatrix;

                if (target != null)
                {
                    if (!anchoredToWorld)
                    {
                        if (applyRotation)
                        {
                            target.transform.localRotation = ARUWPUtils.QuaternionFromMatrix(latestTransMatrix);
                        }
                        if (applyTranslation)
                        {
                            target.transform.localPosition = ARUWPUtils.PositionFromMatrix(latestTransMatrix);
                        }
                    }
                    else
                    {
                        //if (applyRotation)
                        //{
                        //    dummyGameObject.transform.localRotation = ARUWPUtils.QuaternionFromMatrix(latestTransMatrix);
                        //}
                        //if (applyTranslation)
                        //{
                        //    dummyGameObject.transform.localPosition = ARUWPUtils.PositionFromMatrix(latestTransMatrix);
                        //}
                        ARUWPUtils.SetMatrix4x4ToGameObject(ref target, latestTrackingInfo.locatableCameraToWorld * latestTransMatrix);
                        //ARUWPUtils.SetMatrix4x4ToGameObject(ref target, dummyGameObject.transform.localToWorldMatrix);
                    }
                }
            }
        }

        signalTrackingUpdated = false;
    }
示例#18
0
 void Update()
 {
     // visualization for single markers
     if (type != MarkerType.multi)
     {
         if (confidenceColoBox != null)
         {
             confidenceColoBox.color = new Color(0, confidence, 0, 1);
         }
         if (confidenceTextBox != null)
         {
             confidenceTextBox.text = string.Format("{0:0.000}", confidence);
         }
     }
     // visulization for all
     if (target != null && visible)
     {
         if (!anchoredToWorld)
         {
             if (applyRotation)
             {
                 target.transform.localRotation = ARUWPUtils.QuaternionFromMatrix(transMatrix);
             }
             if (applyTranslation)
             {
                 target.transform.localPosition = ARUWPUtils.PositionFromMatrix(transMatrix);
             }
         }
         else
         {
             if (applyRotation)
             {
                 dummyGameObject.transform.localRotation = ARUWPUtils.QuaternionFromMatrix(transMatrix);
             }
             if (applyTranslation)
             {
                 dummyGameObject.transform.localPosition = ARUWPUtils.PositionFromMatrix(transMatrix);
             }
             ARUWPUtils.SetMatrix4x4ToGameObject(ref target, dummyGameObject.transform.localToWorldMatrix);
         }
     }
 }
示例#19
0
 /// <summary>
 /// The callback that is triggered when new video preview frame arrives. In this function,
 /// video frame is saved for Unity UI if videoPreview is enabled, tracking task is triggered
 /// in this function call, and video FPS is recorded. [internal use]
 /// </summary>
 /// <param name="sender">MediaFrameReader object</param>
 /// <param name="args">arguments not used here</param>
 private void OnFrameArrived(MediaFrameReader sender, MediaFrameArrivedEventArgs args)
 {
     ARUWPUtils.VideoTick();
     using (var frame = sender.TryAcquireLatestFrame()) {
         if (frame != null)
         {
             var softwareBitmap = SoftwareBitmap.Convert(frame.VideoMediaFrame.SoftwareBitmap, BitmapPixelFormat.Rgba8, BitmapAlphaMode.Ignore);
             if (videoPreview)
             {
                 Interlocked.Exchange(ref _bitmap, softwareBitmap);
                 controller.ProcessFrameAsync(SoftwareBitmap.Copy(softwareBitmap));
             }
             else
             {
                 controller.ProcessFrameAsync(softwareBitmap);
             }
             signalTrackingUpdated = true;
         }
     }
 }
示例#20
0
    private void updateUILocations()
    {
        Vector3 headPos       = ARUWPUtils.PositionFromMatrix(robotPoseToHololensMatrices[TCPManager.TCPPackageConstants.HEAD]);
        Vector3 buttonPosBase = headPos + new Vector3(0.2f, 0.2f, 0);

        // put plannerToggleButton at buttonPosBase
        plannerToggleButton.transform.position = buttonPosBase;


        // showing cameraToggleButton to the right
        cameraToggleButton.transform.position = buttonPosBase + new Vector3(0.1f, 0, 0);

        // showing tfToggleButton to the right of camera button + x
        tfToggleButton.transform.position = buttonPosBase + new Vector3(0.2f, 0, 0);

        // showing the images from robot camera above head (y+)
        if (!isShowingCameraView)
        {
            previewPlane.transform.parent.position = headPos + new Vector3(0, 0.5f, 0);
        }

        // put the warning sign to be to the left of gripper (x-)
        Vector3 leftHandPos = ARUWPUtils.PositionFromMatrix(robotPoseToHololensMatrices[TCPManager.TCPPackageConstants.LEFT_HAND]);

        warningSign.transform.position = leftHandPos + new Vector3(-0.15f, -0.1f, 0);

        // put gripper button to be to the right of the left gripper +x, and higher +y
        gripperButton.transform.position = leftHandPos + new Vector3(0.25f, 0.2f, 0);

        //put the planner canvas in front of the robot -z (default position)
        // if the planner canvas is opened, stop setting this location (allow canvas to be dragged)
        if (!isShowingPlannerCanvas)
        {
            plannerCanvas.transform.position = headPos + new Vector3(0, 0, -0.3f);
        }

        if (!isShowingForceBar)
        {
            forceBarGraph.transform.position = leftHandPos + new Vector3(-0.1f, 0, 0);
        }
    }
示例#21
0
    /// <summary>
    /// Unity Monobehavior function: update tracking information of the current marker.
    /// If magic function is enabled, then a distortion transformation is applied on top
    /// of the tracking result. [internal use]
    /// </summary>
    private void LateUpdate()
    {
        if (signalTrackingUpdated && id != -1)
        {
            Interlocked.Exchange(ref latestTrackingInfo, _info);
            if (latestTrackingInfo.visible)
            {
                latestTransMatrix = ARUWPUtils.ConvertARUWPFloatArrayToMatrix4x4(latestTrackingInfo.trans);

                // apply the calibration matrix on top of the tracking results
                latestTransMatrix = calibrationMatrix * latestTransMatrix;

                if (target != null)
                {
                    target.SetMatrix4x4(latestTrackingInfo.locatableCameraToWorld * latestTransMatrix);
                }
            }
        }

        signalTrackingUpdated = false;
    }
示例#22
0
    /// <summary>
    /// The callback that is triggered when new video preview frame arrives. In this function,
    /// video frame is saved for Unity UI if videoPreview is enabled, tracking task is triggered
    /// in this function call, and video FPS is recorded. [internal use]
    /// </summary>
    /// <param name="sender">MediaFrameReader object</param>
    /// <param name="args">arguments not used here</param>
    unsafe private void OnFrameArrived(MediaFrameReader sender, MediaFrameArrivedEventArgs args)
    {
        ARUWPUtils.VideoTick();
        using (var frame = sender.TryAcquireLatestFrame()) {
            if (frame != null)
            {
                float[] cameraToWorldMatrixAsFloat = null;
                if (HL == 1)
                {
                    if (HL1TryGetCameraToWorldMatrix(frame, out cameraToWorldMatrixAsFloat) == false)
                    {
                        Debug.Log(TAG + ": HL1TryGetCameraToWorldMatrix failed");
                        return;
                    }
                }
                else if (HL == 2)
                {
                    if (HL2TryGetCameraToWorldMatrix(frame, out cameraToWorldMatrixAsFloat) == false)
                    {
                        Debug.Log(TAG + ": HL2TryGetCameraToWorldMatrix failed");
                        return;
                    }
                }
                latestLocatableCameraToWorld = ConvertFloatArrayToMatrix4x4(cameraToWorldMatrixAsFloat);

                var originalSoftwareBitmap = frame.VideoMediaFrame.SoftwareBitmap;
                using (var input = originalSoftwareBitmap.LockBuffer(BitmapBufferAccessMode.Read))
                    using (var inputReference = input.CreateReference()) {
                        byte *inputBytes;
                        uint  inputCapacity;
                        ((IMemoryBufferByteAccess)inputReference).GetBuffer(out inputBytes, out inputCapacity);
                        Marshal.Copy((IntPtr)inputBytes, frameData, 0, frameData.Length);
                    }
                // Process the frame in this thread (still different from Unity thread)
                controller.ProcessFrameSync(frameData, latestLocatableCameraToWorld);
                originalSoftwareBitmap?.Dispose();
                signalTrackingUpdated = true;
            }
        }
    }
    public void TryPublishing(TimeSpan currentTime, double elapsedTimeInSeconds)
    {
        if (elapsedTimeInSeconds >= nextPublishTime && marker != null)
        {
            nextPublishTime = nextPublishTime + publishPeriod;

            if (marker.GetMarkerVisibility())
            {
                Matrix4x4  latestPoseMatrix = marker.GetMarkerPoseInWorldCoordinateFrame();
                Quaternion currentRotation  = ARUWPUtils.QuaternionFromMatrix(latestPoseMatrix);
                Vector3    currentPosition  = ARUWPUtils.PositionFromMatrix(latestPoseMatrix);

#if NETFX_CORE
                ThreadPool.RunAsync((MarkerPoseSendWork) => { SendPose(currentTime.Add(Timer.GetOffsetUTC()), currentRotation, currentPosition); });
#endif
            }
            else
            {
                Debug.Log("Marker not detected.");
            }
        }
    }
示例#24
0
    /// <summary>
    /// The callback that is triggered when new video preview frame arrives. In this function,
    /// video frame is saved for Unity UI if videoPreview is enabled, tracking task is triggered
    /// in this function call, and video FPS is recorded. [internal use]
    /// </summary>
    /// <param name="sender">MediaFrameReader object</param>
    /// <param name="args">arguments not used here</param>
    unsafe private void OnFrameArrived(MediaFrameReader sender, MediaFrameArrivedEventArgs args)
    {
        ARUWPUtils.VideoTick();
        using (var frame = sender.TryAcquireLatestFrame()) {
            if (frame != null)
            {
                float[] cameraToWorldMatrixAsFloat;
                if (TryGetCameraToWorldMatrix(frame, out cameraToWorldMatrixAsFloat) == false)
                {
                    return;
                }

                latestLocatableCameraToWorld = ConvertFloatArrayToMatrix4x4(cameraToWorldMatrixAsFloat);

                var originalSoftwareBitmap = frame.VideoMediaFrame.SoftwareBitmap;
                using (var input = originalSoftwareBitmap.LockBuffer(BitmapBufferAccessMode.Read))
                    using (var inputReference = input.CreateReference()) {
                        byte *inputBytes;
                        uint  inputCapacity;
                        ((IMemoryBufferByteAccess)inputReference).GetBuffer(out inputBytes, out inputCapacity);
                        Marshal.Copy((IntPtr)inputBytes, frameData, 0, frameData.Length);
                    }
                // Process the frame in this thread (still different from Unity thread)
                controller.ProcessFrameSync(frameData, latestLocatableCameraToWorld);

                if (_requestingSavePicture)
                {
                    Debug.Log("Saving picture...");
                    var softwareBitmap = SoftwareBitmap.Convert(originalSoftwareBitmap, BitmapPixelFormat.Rgba8, BitmapAlphaMode.Ignore);
                    SaveRequestedPicture(softwareBitmap);
                    _requestingSavePicture = false;
                }

                originalSoftwareBitmap?.Dispose();
                signalTrackingUpdated = true;
            }
        }
    }
    /// <summary>
    /// Unity Monobehavior function: update tracking information of the current marker.
    /// If magic function is enabled, then a distortion transformation is applied on top
    /// of the tracking result. [internal use]
    /// </summary>
    private void LateUpdate()
    {
        if (signalTrackingUpdated && id != -1)
        {
            Interlocked.Exchange(ref latestTrackingInfo, _info);
            if (latestTrackingInfo.visible)
            {
                latestTransMatrix = ARUWPUtils.ConvertARUWPFloatArrayToMatrix4x4(latestTrackingInfo.trans);

                // apply the calibration matrix on top of the tracking results
                latestTransMatrix = calibrationMatrix * latestTransMatrix;

                if (target != null)
                {
                    target.SetMatrix4x4(latestTrackingInfo.locatableCameraToWorld * latestTransMatrix);
                }
            }

            float distance = (target.transform.localPosition - lastPosition).magnitude;
            if (controller.autoLock && distance < controller.threshold)
            {
                Debug.Log("ARUWPMarker: Locked marker at position " + lastPosition + " at threshold " + distance.ToString("F4"));
                controller.Pause();
                locked = true;
                //Reset last tracked position for future realignment to avoid immediate locking if tracking is reinitialized
                lastPosition = Vector3.zero;
            }
            else
            {
                Debug.Log("ARUWPMarker: Found marker but tracking error exceeds threshold)" + controller.threshold.ToString("F4") + "): " + distance.ToString("F4"));
                lastPosition = target.transform.localPosition;
            }
        }

        signalTrackingUpdated = false;
    }
示例#26
0
    private void visualizeForce()
    {
        // the robot base frame xyz direction is not what we what
        adjustForceArrowSize(_left_gripper_info.force.x, AXIS_X);
        adjustForceArrowSize(-_left_gripper_info.force.y, AXIS_Y);
        adjustForceArrowSize(_left_gripper_info.force.z, AXIS_Z);
        adjustForceArrowSize(_left_gripper_info.force.magnitude, AXIS_TOTAL);

        Vector3 dir = left_gripper_force_target_x.transform.up * _left_gripper_info.force.x +
                      left_gripper_force_target_y.transform.up * -_left_gripper_info.force.y +
                      left_gripper_force_target_z.transform.up * _left_gripper_info.force.z;
        Quaternion rot = Quaternion.LookRotation(dir);

        // Debug.Log(rot.eulerAngles);
        left_gripper_force_target_total.transform.rotation = rot;
        // Quaternion rot = Quaternion.FromToRotation(-left_gripper_force_target_z.transform.up, dir);
        // left_gripper_force_target_total.transform.localRotation = rot;

        // prevent the force visual overlapping with the tf frame on the left hand
        // set in down -y, and to the left -x
        left_gripper_force_target.transform.localPosition = ARUWPUtils.PositionFromMatrix(robotPoseToHololensMatrices[TCPManager.TCPPackageConstants.LEFT_GRIPPER])
                                                            + new Vector3(0.07f, -0.1f, 0);
        left_gripper_force_target.transform.localRotation = _forceVisualOrientation;
    }
示例#27
0
    /// <summary>
    /// The callback that is triggered when new video preview frame arrives. In this function,
    /// video frame is saved for Unity UI if videoPreview is enabled, tracking task is triggered
    /// in this function call, and video FPS is recorded. [internal use]
    /// </summary>
    /// <param name="sender">MediaFrameReader object</param>
    /// <param name="args">arguments not used here</param>
    private void OnFrameArrived(MediaFrameReader sender, MediaFrameArrivedEventArgs args)
    {
        ARUWPUtils.VideoTick();
        using (var frame = sender.TryAcquireLatestFrame())
        {
            if (frame != null)
            {
                float[] cameraToWorldMatrixAsFloat;
                if (TryGetCameraToWorldMatrix(frame, out cameraToWorldMatrixAsFloat) == false)
                {
                    return;
                }

                Matrix4x4 latestLocatableCameraToWorld = ConvertFloatArrayToMatrix4x4(cameraToWorldMatrixAsFloat);

                var originalSoftwareBitmap = frame.VideoMediaFrame.SoftwareBitmap;
                var softwareBitmap         = SoftwareBitmap.Convert(originalSoftwareBitmap, BitmapPixelFormat.Rgba8, BitmapAlphaMode.Ignore);
                originalSoftwareBitmap?.Dispose();
                if (videoPreview)
                {
                    Interlocked.Exchange(ref _bitmap, softwareBitmap);
                    controller.ProcessFrameAsync(SoftwareBitmap.Copy(softwareBitmap), latestLocatableCameraToWorld);
                }
                else
                {
                    controller.ProcessFrameAsync(SoftwareBitmap.Copy(softwareBitmap), latestLocatableCameraToWorld);
                }
                signalTrackingUpdated = true;

                if (isPublishing)
                {
                    TimeSpan sampledCurrentTime  = HololensRobotController.Utilities.Timer.SampleCurrentStopwatch();
                    double   elapsedTotalSeconds = HololensRobotController.Utilities.Timer.GetElapsedTimeInSeconds(sampledCurrentTime);
                    if (elapsedTotalSeconds >= nextPublishTime)
                    {
                        SoftwareBitmap publishedBitmap;
                        if (Config.convertColorToGrayscale)
                        {
                            publishedBitmap = SoftwareBitmap.Convert(softwareBitmap, BitmapPixelFormat.Gray8);
                            originalSoftwareBitmap?.Dispose();
                        }
                        else
                        {
                            publishedBitmap = softwareBitmap;
                        }

                        IBuffer ibuffer = publishBuffer.AsBuffer();
                        publishedBitmap.CopyToBuffer(ibuffer);
                        OnFrameReadyToPublish(
                            new HololensRobotController.WindowsSensors.FrameEventArgs(
                                (uint)controller.frameHeight,
                                (uint)controller.frameWidth,
                                HololensRobotController.WindowsSensors.CameraHandler.GetCameraSourceEncoding(MediaFrameSourceKind.Color),
                                (uint)publishRowSize,
                                publishBuffer,
                                sampledCurrentTime + HololensRobotController.Utilities.Timer.GetOffsetUTC()));

                        nextPublishTime = nextPublishTime + publishPeriod;
                    }
                }
            }
        }
    }
示例#28
0
    /// <summary>
    /// Unity Monobehavior function: update tracking information of the current marker.
    /// If magic function is enabled, then a distortion transformation is applied on top
    /// of the tracking result. [internal use]
    /// </summary>
    private void Update()
    {
        if (signalTrackingUpdated && id != -1)
        {
            Interlocked.Exchange(ref latestTrackingInfo, _info);
            if (latestTrackingInfo.visible)
            {
                latestTransMatrix = ARUWPUtils.ConvertARUWPFloatArrayToMatrix4x4(latestTrackingInfo.trans);

                //if (performMagicFunction) { MagicFunction(); }

                if (target != null)
                {
                    if (!anchoredToWorld)
                    {
                        if (applyRotation)
                        {
                            target.transform.localRotation = ARUWPUtils.QuaternionFromMatrix(latestTransMatrix);
                        }
                        if (applyTranslation)
                        {
                            target.transform.localPosition = ARUWPUtils.PositionFromMatrix(latestTransMatrix);
                        }

                        if (performCustomMagic)
                        {
                            Matrix4x4 newPos = new Matrix4x4();
                            newPos.SetColumn(0, new Vector4(target.transform.localPosition.x, dummyGameObject.transform.localPosition.y, dummyGameObject.transform.localPosition.z, 1));
                            Matrix4x4 result = newMagic * newPos;
                            target.transform.localPosition = result.GetColumn(0);
                        }
                    }
                    else
                    {
                        if (applyRotation)
                        {
                            dummyGameObject.transform.localRotation = ARUWPUtils.QuaternionFromMatrix(latestTransMatrix);
                        }
                        if (applyTranslation)
                        {
                            dummyGameObject.transform.localPosition = ARUWPUtils.PositionFromMatrix(latestTransMatrix);
                        }

                        if (performCustomMagic)
                        {
                            Matrix4x4 newPos = new Matrix4x4();
                            newPos.SetColumn(0, new Vector4(dummyGameObject.transform.localPosition.x, dummyGameObject.transform.localPosition.y, dummyGameObject.transform.localPosition.z, 1));
                            Matrix4x4 result = newMagic * newPos;
                            dummyGameObject.transform.localPosition = result.GetColumn(0);
                        }

                        // Dummy Object is anchored to camera. And this line converts it to world coordinate.
                        ARUWPUtils.SetMatrix4x4ToGameObject(ref target, dummyGameObject.transform.localToWorldMatrix);
                        //if (performCustomMagic)
                        //{
                        //    Matrix4x4 newPos = new Matrix4x4();
                        //    newPos.SetColumn(0, new Vector4(target.transform.localPosition.x, target.transform.localPosition.y, target.transform.localPosition.z, 1));
                        //    Matrix4x4 result = newMagic * newPos;
                        //    target.transform.localPosition = result.GetColumn(0);
                        //}
                    }
                }
            }
        }

        signalTrackingUpdated = false;
    }
示例#29
0
 private void calculateForceVisualOrientation()
 {
     _forceVisualOrientation = ARUWPUtils.QuaternionFromMatrix(robotPoseToHololensMatrices[TCPManager.TCPPackageConstants.BASE]);
     _forceVisualOrientation = toVisualizeForce * _forceVisualOrientation;
 }
示例#30
0
 private void visualizeGripper()
 {
     // Debug.Log("left_gripper position: " + ARUWPUtils.PositionFromMatrix(robotPoseToHololensMatrices[TCPManager.TCPPackageConstants.LEFT_GRIPPER]));
     ARUWPUtils.SetMatrix4x4ToGameObject(ref left_gripper_target, robotPoseToHololensMatrices[TCPManager.TCPPackageConstants.LEFT_GRIPPER]);
 }