Ejemplo n.º 1
0
    public void SubmitForm()
    {
        RoomName = GameObject.Find("Canvas/Panel/Text").GetComponent <Text> ().text;
        foreach (GameObject Reservetoggle in toggles)
        {
            Toggle m_Toggle = Reservetoggle.GetComponent <Toggle>();
            ReservationTime  = m_Toggle.name.Substring(1, 2);
            ReservationTime += ":00:00";
            if (m_Toggle.isOn)
            {
                //Debug.Log ("Reserve" + m_Toggle.name);
                ReservationStatus = "true";
            }
            else
            {
                //Debug.Log ("Not reserve" + m_Toggle.name);
                ReservationStatus = "false";
            }
            StartCoroutine("PostForm");
        }
        PopUpPanel = GameObject.Find("Canvas/PopUpPanel");
        PopUpPanel.transform.localScale      = new Vector3(0, 0, 0);
        EmptyGameObject.transform.localScale = new Vector3(1, 1, 1);
        GameObject Camera    = GameObject.Find("ARCamera");
        ARCamera   TakePhoto = (ARCamera)Camera.GetComponent(typeof(ARCamera));

        TakePhoto.OnStartClick();
    }
Ejemplo n.º 2
0
        public static string GetRecommendation(this ARCamera self)
        {
            string result = null;

            switch (self.TrackingState)
            {
            case ARTrackingState.Limited:
                switch (self.TrackingStateReason)
                {
                case ARTrackingStateReason.ExcessiveMotion:
                    result = "Try slowing down your movement, or reset the session.";
                    break;

                case ARTrackingStateReason.InsufficientFeatures:
                    result = "Try pointing at a flat surface, or reset the session.";
                    break;

                case ARTrackingStateReason.Relocalizing:
                    result = "Return to the location where you left off or try resetting the session.";
                    break;
                }
                break;
            }

            return(result);
        }
Ejemplo n.º 3
0
 private void Awake()
 {
     instance         = this;
     cameraTransform  = GetComponent <Transform>();
     attachedCamera   = GetComponent <Camera>();
     cameraBackground = GetComponent <ARCameraBackground>();
 }
Ejemplo n.º 4
0
        /// <summary>
        /// Update the transform of the game board with the latest hit test result and camera
        /// </summary>
        private void UpdateTransform(ARHitTestResult hitTestResult, ARCamera camera)
        {
            var position = hitTestResult.WorldTransform.GetTranslation();

            // Average using several most recent positions.
            this.recentPositions.Add(position);
            this.recentPositions = new List <SCNVector3>(this.recentPositions.TakeLast(10));

            // Move to average of recent positions to avoid jitter.
            var average = this.recentPositions.Reduce(new SCNVector3(0f, 0f, 0f)) / (float)this.recentPositions.Count;

            this.Position = average;

            // Orient bounds to plane if possible
            if (hitTestResult.Anchor is ARPlaneAnchor planeAnchor)
            {
                this.OrientToPlane(planeAnchor, camera);
                this.ScaleToPlane(planeAnchor);
            }
            else
            {
                // Fall back to camera orientation
                this.OrientToCamera(camera);
                this.Scale = new SCNVector3(GameBoard.MinimumScale, GameBoard.MinimumScale, GameBoard.MinimumScale);
            }

            // Remove any animation duration if present
            SCNTransaction.AnimationDuration = 0;
        }
Ejemplo n.º 5
0
        internal static string PresentationString(this ARCamera self)
        {
            var tracking = self.TrackingState;

            switch (tracking)
            {
            case ARTrackingState.NotAvailable: return("ARKit tracking UNAVAILABLE");

            case ARTrackingState.Normal: return("ARKit tracking NORMAL");

            case ARTrackingState.Limited:
                switch (self.TrackingStateReason)
                {
                case ARTrackingStateReason.ExcessiveMotion: return("ARKit tracking LIMITED : Excessive motion");

                case ARTrackingStateReason.InsufficientFeatures: return("ARKit tracking LIMITED : Low detail");

                case ARTrackingStateReason.Initializing: return("ARKit is initializing");

                case ARTrackingStateReason.Relocalizing: return("ARKit is relocalizing");
                }
                break;
            }
            // Can't actually get here
            return("");
        }
Ejemplo n.º 6
0
 public override void CameraDidChangeTrackingState(ARSession session, ARCamera camera)
 {
     if (arkit.TryGetTarget(out var ap) && ap.Application.IsActive)
     {
         Urho.Application.InvokeOnMain(() => ap.OnCameraDidChangeTrackingState(camera));
     }
 }
Ejemplo n.º 7
0
    public void StartLocalization()
    {
        GameObject Camera    = GameObject.Find("ARCamera");
        ARCamera   TakePhoto = (ARCamera)Camera.GetComponent(typeof(ARCamera));

        TakePhoto.OnStartClick();
    }
Ejemplo n.º 8
0
        private void CameraDidChangeTrackingState(ARSession session, ARCamera camera)
        {
            var message = string.Empty;

            // Inform the user of their camera tracking state.
            switch (camera.TrackingState)
            {
            case ARTrackingState.NotAvailable:
                message = "Tracking unavailable";
                break;

            case ARTrackingState.Limited:
                switch (camera.TrackingStateReason)
                {
                case ARTrackingStateReason.ExcessiveMotion:
                    message = "Tracking limited - Too much camera movement";
                    break;

                case ARTrackingStateReason.InsufficientFeatures:
                    message = "Tracking limited - Not enough surface detail";
                    break;

                case ARTrackingStateReason.Initializing:
                    message = "Initializing AR Session";
                    break;
                }
                break;

            case ARTrackingState.Normal:
                message = "Tracking normal";
                break;
            }

            this.sessionInfoLabel.Text = message;
        }
        override public void OnInspectorGUI()
        {
            serializedObject.Update();

            ARCamera go = target as ARCamera;

            EditorGUILayout.PropertyField(APIKey, new GUIContent("Developer API Key", "Enter your developer API Key"));

            UseThirdPartySDKProp.boolValue = EditorGUILayout.Foldout(UseThirdPartySDKProp.boolValue, new GUIContent("Use Third Party SDK", "Enable this to select Third Party SDK"));

            if (UseThirdPartySDKProp.boolValue)
            {
                EditorGUILayout.PropertyField(SDKToUseProp, new GUIContent("SDK To Use", "The Third party SDK to use"));
                EditorGUILayout.PropertyField(ThirdPartyGameObjectProp, new GUIContent("Third Party GameObject To Track", "Attach the GameObject that is tracked by the Third Party SDK to mimic its pose"));
                EditorGUILayout.PropertyField(EnableHybridTrackingProp, new GUIContent("Enable Hybrid Tracking", "Hybrid tracking makes use of the best of both Holoboard and Third Party SDKs wherever possible (Experimental)"));

                if (SDKToUseProp.enumValueIndex == (int)ThirdPartySDK.Vuforia)
                {
                    EditorGUILayout.LabelField("Make sure you add the \"VuforiaHoloboardTrackableEventHandler\" script to all");
                    EditorGUILayout.LabelField("your Vuforia Targets");

                    if (EnableHybridTrackingProp.boolValue)
                    {
                        AddVuforiaScript(go);
                    }
                    else
                    {
                        RemoveVuforiaScript(go);
                    }

                    Repaint();
                }
                else
                {
                    RemoveVuforiaScript(go);
                }
            }

            EditorGUILayout.Space();

            if (!UseThirdPartySDKProp.boolValue)
            {
                EditorGUILayout.PropertyField(recenterPoint, new GUIContent("Recenter Point", "The Transform to which the Camera will face when Recenter() is called"));
                EditorGUILayout.PropertyField(UseHeadTrackingProp, new GUIContent("Use Head Tracking", "Variable to keep track of head tracking"));
                RemoveVuforiaScript(go);
            }

            foldout_Cameras = EditorGUILayout.Foldout(foldout_Cameras, new GUIContent("Use Custom Stereo Camera Setup", "Attach custom Left And Right Camera (optional)"));

            if (foldout_Cameras)
            {
                EditorGUILayout.PropertyField(LeftCamProp, new GUIContent("Left Camera"));
                EditorGUILayout.PropertyField(LeftCamProp, new GUIContent("Right Camera"));
            }

            EditorGUILayout.PropertyField(eyeSeparationProp, new GUIContent("Eye Separation (m)", "Eye Separation or IPD"));

            serializedObject.ApplyModifiedProperties();
        }
    private float lightEstimationSmoothVelocity; // For the smoothing

    void Start()
    {
        if (arCamera == null)
        {
            arCamera = GetComponent <ARCamera>();
        }
        Debug.Assert(arCamera, "An ARCamera is required");
    }
Ejemplo n.º 11
0
    void Start()
    {
        skyboxMaterial = Resources.Load <Material>("Materials/ARSkybox");
        Debug.Assert(skyboxMaterial);

        arCamera = GetComponent <ARCamera>();
        RenderSettings.skybox = skyboxMaterial;
    }
Ejemplo n.º 12
0
    public void StopLocalization()
    {
        GameObject Camera    = GameObject.Find("ARCamera");
        ARCamera   TakePhoto = (ARCamera)Camera.GetComponent(typeof(ARCamera));

        TakePhoto.OnStopClick();
        GameObject.Find("Canvas/Panel/Text").GetComponent <Text> ().text = "Take a photo for localization";
    }
    public void Start()
    {
        arCamera   = ARCamera.Instance;
        controller = HoloboardController.Instance;

        ray          = arCamera.GetRayFromGaze();
        interactible = GetComponent <ARInteractible> ();
    }
Ejemplo n.º 14
0
        public void OnDrawFrame(IGL10 gl)
        {
            GLES20.GlClear(GLES20.GlColorBufferBit | GLES20.GlDepthBufferBit);

            if (mSession == null)
            {
                return;
            }
            if (mDisplayRotationManager.GetDeviceRotation())
            {
                mDisplayRotationManager.UpdateArSessionDisplayGeometry(mSession);
            }

            try {
                mSession.SetCameraTextureName(mTextureDisplay.GetExternalTextureId());
                ARFrame  arFrame  = mSession.Update();
                ARCamera arCamera = arFrame.Camera;

                // The size of the projection matrix is 4 * 4.
                float[] projectionMatrix = new float[16];

                arCamera.GetProjectionMatrix(projectionMatrix, PROJ_MATRIX_OFFSET, PROJ_MATRIX_NEAR, PROJ_MATRIX_FAR);
                mTextureDisplay.OnDrawFrame(arFrame);
                StringBuilder sb = new StringBuilder();
                UpdateMessageData(sb);
                mTextDisplay.OnDrawFrame(sb);

                // The size of ViewMatrix is 4 * 4.
                float[] viewMatrix = new float[16];
                arCamera.GetViewMatrix(viewMatrix, 0);
                var allTrackables = mSession.GetAllTrackables(Java.Lang.Class.FromType(typeof(ARPlane)));

                foreach (ARPlane plane in allTrackables)
                {
                    if (plane.Type != ARPlane.PlaneType.UnknownFacing &&
                        plane.TrackingState == ARTrackableTrackingState.Tracking)
                    {
                        HideLoadingMessage();
                        break;
                    }
                }
                mLabelDisplay.OnDrawFrame(allTrackables, arCamera.DisplayOrientedPose,
                                          projectionMatrix);
                HandleGestureEvent(arFrame, arCamera, projectionMatrix, viewMatrix);
                ARLightEstimate lightEstimate       = arFrame.LightEstimate;
                float           lightPixelIntensity = 1;
                if (lightEstimate.GetState() != ARLightEstimate.State.NotValid)
                {
                    lightPixelIntensity = lightEstimate.PixelIntensity;
                }
                DrawAllObjects(projectionMatrix, viewMatrix, lightPixelIntensity);
            } catch (ArDemoRuntimeException e) {
                Log.Info(TAG, "Exception on the ArDemoRuntimeException!");
            } catch (Exception t) {
                // This prevents the app from crashing due to unhandled exceptions.
                Log.Info(TAG, "Exception on the OpenGL thread: " + t.Message);
            }
        }
        private void RemoveVuforiaScript(ARCamera go)
        {
            VuforiaTrackablesTracker node = go.GetComponent <VuforiaTrackablesTracker> ();

            if (node != null)
            {
                DestroyImmediate(node);
            }
        }
Ejemplo n.º 16
0
    public override void OnInspectorGUI()
    {
        ARCamera arc = (ARCamera)target;

        if (arc == null)
        {
            return;
        }

        //
        // Stereo parameters.
        //
        EditorGUILayout.Separator();
        arc.Stereo = EditorGUILayout.Toggle("Part of a stereo pair", arc.Stereo);
        if (arc.Stereo)
        {
            arc.StereoEye = (ARCamera.ViewEye)EditorGUILayout.EnumPopup("Stereo eye:", arc.StereoEye);
        }

        //
        // Optical parameters.
        //
        EditorGUILayout.Separator();

        arc.Optical = EditorGUILayout.Toggle("Optical see-through mode.", arc.Optical);

        if (arc.Optical)
        {
            // Offer a popup with optical params file names.
            RefreshOpticalParamsFilenames();             // Update the list of available optical params from the resources dir
            if (OpticalParamsFilenames.Length > 0)
            {
                int    opticalParamsFilenameIndex = EditorGUILayout.Popup("Optical parameters file", arc.OpticalParamsFilenameIndex, OpticalParamsFilenames);
                string opticalParamsFilename      = OpticalParamsAssets[opticalParamsFilenameIndex].name;
                if (opticalParamsFilename != arc.OpticalParamsFilename)
                {
                    arc.OpticalParamsFilenameIndex = opticalParamsFilenameIndex;
                    arc.OpticalParamsFilename      = opticalParamsFilename;
                    arc.OpticalParamsFileContents  = OpticalParamsAssets[arc.OpticalParamsFilenameIndex].bytes;
                }
                arc.OpticalEyeLateralOffsetRight = EditorGUILayout.FloatField("Lateral offset right:", arc.OpticalEyeLateralOffsetRight);
                EditorGUILayout.HelpBox("Enter an amount by which this eye should be moved to the right, relative to the video camera lens. E.g. if this is the right eye, but you're using calibrated optical paramters for the left eye, enter 0.065 (65mm).", MessageType.Info);
            }
            else
            {
                arc.OpticalParamsFilenameIndex = 0;
                EditorGUILayout.LabelField("Optical parameters file", "No parameters files available");
                arc.OpticalParamsFilename     = "";
                arc.OpticalParamsFileContents = new byte[0];
            }
        }
        EditorGUILayout.Separator();
        arc.displayedGameObject = (GameObject)EditorGUILayout.ObjectField(arc.displayedGameObject, typeof(GameObject), true);

        EditorGUILayout.Separator();
        arc.mainCamera = (Camera)EditorGUILayout.ObjectField(arc.mainCamera, typeof(Camera), true);
    }
Ejemplo n.º 17
0
        public void OnDrawFrame(IGL10 gl)
        {
            // Clear the color buffer and notify the driver not to load the data of the previous frame.
            GLES20.GlClear(GLES20.GlColorBufferBit | GLES20.GlDepthBufferBit);

            if (mSession == null)
            {
                return;
            }
            if (mDisplayRotationManager.GetDeviceRotation())
            {
                mDisplayRotationManager.UpdateArSessionDisplayGeometry(mSession);
            }

            try
            {
                mSession.SetCameraTextureName(mTextureDisplay.GetExternalTextureId());
                ARFrame  arFrame  = mSession.Update();
                ARCamera arCamera = arFrame.Camera;

                // The size of the projection matrix is 4 * 4.
                float[] projectionMatrix = new float[16];

                // Obtain the projection matrix through ARCamera.
                arCamera.GetProjectionMatrix(projectionMatrix, PROJECTION_MATRIX_OFFSET, PROJECTION_MATRIX_NEAR,
                                             PROJECTION_MATRIX_FAR);
                mTextureDisplay.OnDrawFrame(arFrame);
                ICollection hands = mSession.GetAllTrackables(Java.Lang.Class.FromType(typeof(ARHand)));
                if (hands.Count == 0)
                {
                    mTextDisplay.OnDrawFrame(null);
                    return;
                }
                foreach (ARHand hand in hands)
                {
                    // Update the hand recognition information to be displayed on the screen.
                    StringBuilder sb = new StringBuilder();
                    UpdateMessageData(sb, hand);

                    // Display hand recognition information on the screen.
                    mTextDisplay.OnDrawFrame(sb);
                }
                foreach (HandRelatedDisplay handRelatedDisplay in mHandRelatedDisplays)
                {
                    handRelatedDisplay.OnDrawFrame(hands, projectionMatrix);
                }
            }
            catch (ArDemoRuntimeException e)
            {
                Log.Info(TAG, "Exception on the ArDemoRuntimeException!");
            }
            catch (Exception t)
            {
                // This prevents the app from crashing due to unhandled exceptions.
                Log.Info(TAG, "Exception on the OpenGL thread " + t.Message);
            }
        }
        private void AddVuforiaScript(ARCamera go)
        {
            VuforiaTrackablesTracker node = go.GetComponent <VuforiaTrackablesTracker> ();

            if (node == null)
            {
                go.gameObject.AddComponent <VuforiaTrackablesTracker> ();
            }
        }
        /// <summary>
        /// Update the face geometric data in the buffer.
        /// This method is called when FaceRenderManager's OnDrawFrame method calling.
        /// </summary>
        /// <param name="camera">ARCamera</param>
        /// <param name="face">ARFace</param>
        public void OnDrawFrame(ARCamera camera, ARFace face)
        {
            ARFaceGeometry faceGeometry = face.FaceGeometry;

            UpdateFaceGeometryData(faceGeometry);
            UpdateModelViewProjectionData(camera, face);
            DrawFaceGeometry();
            faceGeometry.Release();
        }
Ejemplo n.º 20
0
        private void UpdateTrackingStateDisplay(ARCamera camera)
        {
            if (!_trackingColors.TryGetValue(camera.TrackingState, out var color))
            {
                color = UIColor.Gray;
            }

            UIView.Animate(.2, () => TrackingStatusIndicator.BackgroundColor = color);
        }
Ejemplo n.º 21
0
 private void Awake()
 {
     if (instance != null)
     {
         Destroy(gameObject);
         return;
     }
     instance = this;
     DontDestroyOnLoad(gameObject);
 }
Ejemplo n.º 22
0
        public override void CameraDidChangeTrackingState(ARSession session, ARCamera camera)
        {
            var state        = camera.TrackingState;
            var messageLabel = cameraView.MessageLabel;

            switch (state)
            {
            case ARTrackingState.NotAvailable:

                messageLabel.Text = "Non è possibile rilevare l'ambiente circostante";
                break;

            case ARTrackingState.Normal:
                messageLabel.Text = "Inquadra lentamente l'ambiente circostante";
                break;

            case ARTrackingState.Limited:
                var reason = camera.TrackingStateReason;

                messageLabel.Text = "Visuale limitata";
                switch (reason)
                {
                case ARTrackingStateReason.ExcessiveMotion:

                    messageLabel.Text = "Eccessivo Movimento";
                    break;

                case ARTrackingStateReason.Initializing:

                    messageLabel.Text = "Inizializzazione";
                    break;

                case ARTrackingStateReason.InsufficientFeatures:

                    messageLabel.Text = "Non vi è abbastanza luce o si sta puntado su una superfice riflettente";
                    break;

                case ARTrackingStateReason.None:
                    Console.WriteLine("None");
                    break;

                case ARTrackingStateReason.Relocalizing:

                    messageLabel.Text = "Relocalizing";
                    break;

                default:
                    break;
                }
                break;

            default:
                break;
            }
        }
Ejemplo n.º 23
0
        // Appearance

        public void Update(SCNVector3 position, ARPlaneAnchor planeAnchor, ARCamera camera)
        {
            this.lastPosition = position;

            if (planeAnchor != null)
            {
                this.lastPositionOnPlane = position;
            }

            this.UpdateTransform(position, camera);
        }
Ejemplo n.º 24
0
        public void CameraDidChangeTrackingState(ARSession session, ARCamera camera)
        {
            var frame = session.CurrentFrame;

            if (frame == null)
            {
                throw new Exception("ARSession should have an ARFrame");
            }

            this.UpdateSessionInfoLabel(camera);
            frame.Dispose();
        }
        private void UpdateModelViewProjectionData(ARCamera camera, ARFace face)
        {
            // The size of the projection matrix is 4 * 4.
            float[] projectionMatrix = new float[16];
            camera.GetProjectionMatrix(projectionMatrix, 0, PROJECTION_MATRIX_NEAR, PROJECTION_MATRIX_FAR);
            ARPose facePose = face.Pose;

            // The size of viewMatrix is 4 * 4.
            float[] facePoseViewMatrix = new float[16];

            facePose.ToMatrix(facePoseViewMatrix, 0);
            Android.Opengl.Matrix.MultiplyMM(mModelViewProjections, 0, projectionMatrix, 0, facePoseViewMatrix, 0);
        }
        public void OnDrawFrame(IGL10 gl)
        {
            GLES20.GlClear(GLES20.GlColorBufferBit | GLES20.GlDepthBufferBit);

            if (mArSession == null)
            {
                return;
            }
            if (mDisplayRotationManager.GetDeviceRotation())
            {
                mDisplayRotationManager.UpdateArSessionDisplayGeometry(mArSession);
            }

            try
            {
                mArSession.SetCameraTextureName(mTextureDisplay.GetExternalTextureId());
                ARFrame frame = mArSession.Update();
                mTextureDisplay.OnDrawFrame(frame);
                float fpsResult = DoFpsCalculate();

                System.Collections.ICollection faces = (System.Collections.ICollection)mArSession.GetAllTrackables(Java.Lang.Class.FromType(typeof(ARFace)));

                if (faces.Count == 0)
                {
                    mTextDisplay.OnDrawFrame(null);
                    return;
                }
                Log.Debug(TAG, "Face number: " + faces.Count);
                ARCamera camera = frame.Camera;
                foreach (ARFace face in faces)
                {
                    if (face.TrackingState == ARTrackableTrackingState.Tracking)
                    {
                        mFaceGeometryDisplay.OnDrawFrame(camera, face);
                        StringBuilder sb = new StringBuilder();
                        UpdateMessageData(sb, fpsResult, face);
                        mTextDisplay.OnDrawFrame(sb);
                    }
                }
            }
            catch (ArDemoRuntimeException e)
            {
                Log.Debug(TAG, "Exception on the ArDemoRuntimeException!");
            }
            catch (Throwable t)
            {
                // This prevents the app from crashing due to unhandled exceptions.
                Log.Debug(TAG, "Exception on the OpenGL thread", t);
            }
        }
Ejemplo n.º 27
0
    public void OnEndDrag(PointerEventData eventData)
    {
        ARCamera arc = maincamera.GetComponent <ARCamera>();

        Quaternion compass = new Quaternion(maincamera.GetComponent <ARCamera>().o[0], maincamera.GetComponent <ARCamera>().o[1], maincamera.GetComponent <ARCamera>().o[2], maincamera.GetComponent <ARCamera>().o[3]);

        if (!GameObject.Find("skybox_container").GetComponent <Skybox>().isDistrict&& !GameObject.Find("skybox_container").GetComponent <Skybox>().isInside)
        {
            maincamera.GetComponent <ARCamera>().heading_correction = maincamera.transform.rotation.eulerAngles.y - compass.eulerAngles.y;
            //GameObject.Find("Main Camera").GetComponent<ARCamera>().heading_correction = GameObject.Find("Main Camera").transform.rotation.eulerAngles.y - GameObject.Find("Main Camera").GetComponent<ARCamera>().compassHeading;
            maincamera.GetComponent <ARCamera>().camera_fixed = false;
        }
        dragging  = false;
        drag_from = new Vector3();
    }
Ejemplo n.º 28
0
        public void SessionDelegate_CameraDidChangeTrackingState(ARSession session, ARCamera camera)
        {
            UserFeedback.ShowTrackingQualityInfo(camera.TrackingState, camera.TrackingStateReason, true);

            switch (camera.TrackingState)
            {
            case ARTrackingState.NotAvailable:
            case ARTrackingState.Limited:
                UserFeedback.EscalateFeedback(camera.TrackingState, camera.TrackingStateReason, 3);
                break;

            case ARTrackingState.Normal:
                UserFeedback.CancelScheduledMessage(MessageType.TrackingStateEscalation);
                break;
            }
        }
Ejemplo n.º 29
0
    // Start is called before the first frame update
    void Start()
    {
        // Subscribe for events
        Controller.ObjectAddedEvent          += OnObjectAdded;
        Controller.PoseEstimatorCreatedEvent += OnPoseEstimatorCreated;
        Controller.PoseEstimationEvent       += OnPosesEstimeted;
        //Controller.Instance.CreateNewPoseEstimator()

        // test
        this.AddObject(System.IO.Path.Combine("Assets", "Resources", "Models", "squirrel_demo_low.obj"));
        ARCamera = Camera.main.GetComponent <ARCamera>();
        this.CreateNewPoseEstimator();
        this.EstimatePoses(false, true);
        this.ToggleTracking(0);
        this.EstimatePoses(false, false);
    }
Ejemplo n.º 30
0
        private void UpdateTransform(SCNVector3 position, ARCamera camera)
        {
            // Add to the list of recent positions.
            this.recentPreviewNodePositions.Add(position);

            // Remove anything older than the last 8 positions.
            this.recentPreviewNodePositions.KeepLast(8);

            // Move to average of recent positions to avoid jitter.
            var average = this.recentPreviewNodePositions.GetAverage();

            if (average.HasValue)
            {
                this.Position = average.Value;
            }
        }
Ejemplo n.º 31
0
	// References globals ContentMode, ContentAlign, ContentRotate90, Screen.width, Screen.height.
	private Rect getViewport(int contentWidth, int contentHeight, bool stereo, ARCamera.ViewEye viewEye)
	{
		int backingWidth = Screen.width;
		int backingHeight = Screen.height;
		int left, bottom, w, h;

		if (stereo) {
			// Assume side-by-side or half side-by-side mode.
			w = backingWidth / 2;
			h = backingHeight;
			if (viewEye == ARCamera.ViewEye.Left) left = 0;
			else left = backingWidth / 2;
			bottom = 0;
		} else {
			if (ContentMode == ContentMode.Stretch) {
				w = backingWidth;
				h = backingHeight;
			} else {
				int contentWidthFinalOrientation = (ContentRotate90 ? contentHeight : contentWidth);
				int contentHeightFinalOrientation = (ContentRotate90 ? contentWidth : contentHeight);
				if (ContentMode == ContentMode.Fit || ContentMode == ContentMode.Fill) {
					float scaleRatioWidth, scaleRatioHeight, scaleRatio;
					scaleRatioWidth = (float)backingWidth / (float)contentWidthFinalOrientation;
					scaleRatioHeight = (float)backingHeight / (float)contentHeightFinalOrientation;
					if (ContentMode == ContentMode.Fill) scaleRatio = Math.Max(scaleRatioHeight, scaleRatioWidth);
					else scaleRatio = Math.Min(scaleRatioHeight, scaleRatioWidth);
					w = (int)((float)contentWidthFinalOrientation * scaleRatio);
					h = (int)((float)contentHeightFinalOrientation * scaleRatio);
				} else { // 1:1
					w = contentWidthFinalOrientation;
					h = contentHeightFinalOrientation;
				}
			}
			
			if (ContentAlign == ContentAlign.TopLeft
			    || ContentAlign == ContentAlign.Left
			    || ContentAlign == ContentAlign.BottomLeft) left = 0;
			else if (ContentAlign == ContentAlign.TopRight
			         || ContentAlign == ContentAlign.Right
			         || ContentAlign == ContentAlign.BottomRight) left = backingWidth - w;
			else left = (backingWidth - w) / 2;
			
			if (ContentAlign == ContentAlign.BottomLeft
			    || ContentAlign == ContentAlign.Bottom
			    || ContentAlign == ContentAlign.BottomRight) bottom = 0;
			else if (ContentAlign == ContentAlign.TopLeft
			         || ContentAlign == ContentAlign.Top
			         || ContentAlign == ContentAlign.TopRight) bottom = backingHeight - h;
			else bottom = (backingHeight - h) / 2;
		}

		//Log(LogTag + "For " + backingWidth + "x" + backingHeight + " screen, calculated viewport " + w + "x" + h + " at (" + left + ", " + bottom + ").");
		return new Rect(left, bottom, w, h);
	}