Пример #1
0
        // Update is called once per frame
        void Update()
        {
            bool newFrame = false;

            if (IsAnimationPlaying && !IsRecording && (DateTime.Now - lastFrameTime).TotalSeconds < Globals.INV_FPS)
            {
                return;
            }
            else if (IsAnimationPlaying)
            {
                if (Mathf.FloorToInt((float)(DateTime.Now - lastFrameTime).TotalSeconds * Globals.FPS) > 0)
                {
                    // Use FPS to determine which frame to move to (NOTE: this might skip frames)
                    CurFrame = CurFrame + Mathf.FloorToInt((float)(DateTime.Now - lastFrameTime).TotalSeconds * Globals.FPS);

                    // Store ideal execution time for this frame
                    lastFrameTime = DateTime.Now.AddSeconds(-((DateTime.Now - lastFrameTime).TotalSeconds % Globals.INV_FPS));

                    newFrame = true;
                }

                //automatically add more frames if needed
                if (controlledObject != null && CurFrame >= NumFrame)
                {
                    Debug.Log("Adding additional frames: " + NumFrame + " -> " + (CurFrame + 1));
                    NumFrame = CurFrame + 1;
                }

                CurFrame = CurFrame % NumFrame;
                foreach (var obj in objects)
                {
                    obj.EvaluateTransform(CurFrame);
                }
            }

            if (controlledObject != null)
            {
                switch (GestureManager.RecognitionState)
                {
                case GestureRecognizerState.Translating:
                    var curHandPosition = poseManager.GetHandTransform(OvrAvatar.HandType.Right, PoseManager.HandJoint.IndexTip).position;
                    var addVector       = (controlledObject.transform.parent == null) ?
                                          (curHandPosition - initialHandPosition) :
                                          controlledObject.transform.parent.InverseTransformVector(curHandPosition - initialHandPosition);
                    var position = initialObjectPose.transform.localPosition + addVector;

                    if (newFrame)
                    {
                        controlledObject.AddTranslationKey(position, CurFrame);
                    }
                    else
                    {
                        controlledObject.transform.localPosition = position;
                    }
                    break;

                case GestureRecognizerState.Scaling:
                    var curInterHandDistance = (
                        poseManager.GetHandTransform(OvrAvatar.HandType.Right, PoseManager.HandJoint.IndexTip).position -
                        poseManager.GetHandTransform(OvrAvatar.HandType.Left, PoseManager.HandJoint.IndexTip).position
                        ).magnitude;
                    var scale = initialObjectPose.transform.localScale * (curInterHandDistance / (initialInterHandDistance + float.Epsilon));

                    if (newFrame)
                    {
                        controlledObject.AddScaleKey(scale, CurFrame);
                    }
                    else
                    {
                        controlledObject.transform.localScale = scale;
                    }
                    break;

                case GestureRecognizerState.Rotating:
                    var rotation = ComputeCurrentRotation();
                    rotation = ((initialObjectPose.transform.parent == null) ?
                                Quaternion.identity :
                                Quaternion.Inverse(initialObjectPose.transform.parent.rotation)) * rotation;
                    if (newFrame)
                    {
                        controlledObject.AddRotationKey(rotation, CurFrame);
                    }
                    else
                    {
                        controlledObject.transform.localRotation = rotation;
                    }
                    break;
                }
                if (newFrame)
                {
                    controlledObject.EvaluateTransform(CurFrame);
                }
            }
            SetFrameCounterText();
        }
Пример #2
0
        // Update is called once per frame
        void Update()
        {
            if (Input.GetKeyUp(KeyCode.P))
            {
                TimeManager.SwitchAnimationPlayState();
            }

            if (Input.GetKeyUp(KeyCode.R))
            {
                TimeManager.SwitchRecordingState();
            }

            switch (RecognitionState)
            {
            case GestureRecognizerState.Default:
                if (rightInteracting.Count > 0)
                {
                    SetRecognitionState(GestureRecognizerState.Recognizing);
                }
                else if (leftInteracting.Count > 0)
                {
                    SetRecognitionState(GestureRecognizerState.Recognizing);
                }
                else if (TimeManager.SelectedObject != null)
                {
                    SetRecognitionState(GestureRecognizerState.Recognizing);
                }
                break;

            case GestureRecognizerState.Recognizing:
                if (rightInteracting.Count == 0 && leftInteracting.Count == 0 && TimeManager.SelectedObject == null)
                {
                    SetRecognitionState(GestureRecognizerState.Default);
                }
                // Wait for a bit before we infer which gesture to execute
                // when the right hand is in pinched position
                else if (rightInteracting.Count > 0 &&
                         rightPinchOn &&
                         (DateTime.Now - rightPinchOnTime).TotalSeconds > Globals.GESTURE_WAIT_TIME)
                {
                    // if left hand is also pinched onto an object, then execute scaling gesture
                    if (leftInteracting.Count > 0 && leftPinchOn)
                    {
                        SetRecognitionState(GestureRecognizerState.Scaling);
                        ProcessScalingGesture();
                    }
                    // otherwise, translate
                    else
                    {
                        SetRecognitionState(GestureRecognizerState.Translating);
                        ProcessTranslationGesture();
                    }
                }
                //show rotating axis
                else if (rightInteracting.Count > 0 &&
                         rightPointOn &&
                         (DateTime.Now - rightPointOnTime).TotalSeconds > Globals.GESTURE_WAIT_TIME)
                {
                    SetRecognitionState(GestureRecognizerState.ReadyToRotate);
                    TimeManager.KeepOldRotationAxis = false;
                    ProcessRotationAxisGesture();
                }
                // Can try to recognize an emission cone gesture if
                // 1. an emitter is selected,
                // 2. both hands have the same pose, and
                // 3. some constraints on their positions w.r.t emitter and each other??
                else if (TimeManager.SelectedObject != null &&
                         TimeManager.SelectedObject.GetComponent <ParticleEmitter>() != null &&
                         PoseManager.GetHandShape(OvrAvatar.HandType.Right) == PoseManager.GetHandShape(OvrAvatar.HandType.Left) &&
                         PoseManager.GetHandShape(OvrAvatar.HandType.Right).HasFlag(PoseManager.HandShape.Fist))
                {
                    var selected = TimeManager.SelectedObject;
                    var leftPos  = poseManager.GetHandTransform(OvrAvatar.HandType.Left, Globals.EMISSION_GESTURE_JOINT).position;
                    var rightPos = poseManager.GetHandTransform(OvrAvatar.HandType.Right, Globals.EMISSION_GESTURE_JOINT).position;
                    leftPos  = selected.transform.InverseTransformPoint(leftPos);
                    rightPos = selected.transform.InverseTransformPoint(rightPos);
                    var leftPosProj  = new Vector2(leftPos.x, leftPos.y);
                    var rightPosProj = new Vector2(rightPos.x, rightPos.y);

                    if (Vector2.Dot(-leftPosProj.normalized, rightPosProj.normalized) > 0.6f &&
                        leftPos.sqrMagnitude / rightPos.sqrMagnitude <= 2.0f &&
                        rightPos.sqrMagnitude / leftPos.sqrMagnitude <= 2.0f &&
                        Mathf.Max(Mathf.Abs(leftPos.z), Mathf.Abs(rightPos.z)) <= 0.2f / selected.transform.lossyScale.z)
                    {
                        SetRecognitionState(GestureRecognizerState.DrawingEmissionCone);
                        ProcessEmissionConeGestureStart();
                    }
                }
                else if (rightInteracting.Count == 0 && rightAlmostPointOn)
                {
                    tapGestureStartTime = DateTime.Now;
                    SetRecognitionState(GestureRecognizerState.ReadyToSelect);
                }
                break;

            case GestureRecognizerState.Scaling:
                if (rightInteracting.Count == 0)
                {
                    TimeManager.StopTransforming(RecognitionState);
                    SetRecognitionState(GestureRecognizerState.Default);
                }
                else if (leftInteracting.Count == 0)
                {
                    TimeManager.StopTransforming(RecognitionState);
                    SetRecognitionState(GestureRecognizerState.Recognizing);
                }
                break;

            case GestureRecognizerState.Translating:
                if (rightInteracting.Count == 0)
                {
                    TimeManager.StopTransforming(RecognitionState);
                    SetRecognitionState(GestureRecognizerState.Default);
                }
                break;

            case GestureRecognizerState.Rotating:
                // Interaction ended: Go back to default state
                if (rightInteracting.Count == 0)
                {
                    TimeManager.StopTransforming(RecognitionState);
                    SetRecognitionState(GestureRecognizerState.Default);
                    ProcessRotationEnd();
                }
                // Current rotation ended, but user might be taking a break
                // Keep showing the axis of rotation
                else if (!(leftPinchOn || leftPointOn))
                {
                    TimeManager.StopTransforming(RecognitionState);
                    SetRecognitionState(GestureRecognizerState.ReadyToRotate);
                    TimeManager.KeepOldRotationAxis = true;
                }
                break;

            case GestureRecognizerState.ReadyToRotate:
                // Interaction ended: Go back to default state
                if (rightInteracting.Count == 0)
                {
                    SetRecognitionState(GestureRecognizerState.Default);
                    ProcessRotationCancellation();
                }
                // Starte computing the rotation deltas
                else if (leftPointOn || leftPinchOn)
                {
                    SetRecognitionState(GestureRecognizerState.Rotating);
                    ProcessRotationGesture();
                }
                // Try to recognize "Tap" gesture
                else if (!rightPointOn && rightAlmostPointOn)
                {
                    tapGestureStartTime = DateTime.Now;
                    SetRecognitionState(GestureRecognizerState.ReadyToSelect);
                    ProcessRotationCancellation();
                }
                // Otherwise, just recompute the selected object and the rotation axis
                else
                {
                    ProcessRotationAxisGesture();
                }
                break;

            case GestureRecognizerState.ReadyToSelect:
                // Pointing right finger again
                if (rightPointOn)
                {
                    // If the amount of time spent "almost-pointing" was small, execute selection command
                    if ((DateTime.Now - tapGestureStartTime).TotalSeconds < Globals.TAP_GESTURE_MAX_TIME)
                    {
                        Debug.Log("Selecting...");
                        ProcessTapGesture();
                        tapGestureStartTime = DateTime.MinValue;
                    }
                    // Else, ignore and try to go back to "Ready-to-rotate" state
                    else
                    {
                        if (rightInteracting.Count > 0)
                        {
                            SetRecognitionState(GestureRecognizerState.ReadyToRotate);
                            TimeManager.KeepOldRotationAxis = false;
                            ProcessRotationAxisGesture();
                        }
                        else
                        {
                            SetRecognitionState(GestureRecognizerState.Default);
                        }
                    }
                }
                break;

            case GestureRecognizerState.AfterSelectGesture:
                // Ignore all other hand pose changes until an object is selected
                if (rightInteracting.Count == 0)
                {
                    SetRecognitionState(GestureRecognizerState.Default);
                }
                break;

            case GestureRecognizerState.ReadyForConeGesture:
                // hand exited cone
                if (emitterReceivingConeGestures.EnableEmissionConeGestures == false)
                {
                    SetRecognitionState(GestureRecognizerState.Default);
                }
                // If right hand is in an active pose, start recording
                else if (rightPointOn || rightPinchOn || rightGrabOn)
                {
                    SetRecognitionState(GestureRecognizerState.RecognizingConeGesture);
                    emitterReceivingConeGestures.ConeGestureStarted();
                }
                break;

            case GestureRecognizerState.RecognizingConeGesture:
                if (!(rightPointOn || rightPinchOn || rightGrabOn))
                {
                    SetRecognitionState(GestureRecognizerState.ReadyForConeGesture);
                    emitterReceivingConeGestures.ConeGestureFinished();
                }
                break;

            case GestureRecognizerState.Instantiating:
                if (
                    (HandInstantiatingObject == OvrAvatar.HandType.Right &&
                     (rightAlmostPointOn || rightPointOn || rightGrabOn || rightPinchOn)) ||
                    (HandInstantiatingObject == OvrAvatar.HandType.Left &&
                     (leftAlmostPointOn || leftPointOn || leftGrabOn || leftPinchOn))
                    )
                {
                    var fingerPos = poseManager.GetHandTransform(HandInstantiatingObject, PoseManager.HandJoint.IndexTip).position;
                    ObjectBeingInstantiated.transform.position = fingerPos;
                }
                else
                {
                    // If the object is within the bounds of an emitter
                    // then set it to be the particle mesh
                    var             colliders     = Physics.OverlapSphere(poseManager.GetHandTransform(HandInstantiatingObject, PoseManager.HandJoint.IndexTip).position, 0.01f);
                    ParticleEmitter targetEmitter = null;
                    foreach (var collider in colliders)
                    {
                        if (collider.gameObject.GetInstanceID() != ObjectBeingInstantiated.gameObject.GetInstanceID() &&
                            collider.GetComponent <ParticleEmitter>() != null)
                        {
                            targetEmitter = collider.GetComponent <ParticleEmitter>();
                            break;
                        }
                    }

                    if (targetEmitter != null && ObjectInstantiator.ParticlePrefab != null)
                    {
                        targetEmitter.TryChangeParticleShapeFromDroppedObject(ObjectInstantiator);
                        Destroy(ObjectBeingInstantiated.gameObject);
                        SetRecognitionState(GestureRecognizerState.Default);
                    }
                    // Otherwise, instatiate as an Animatable
                    else
                    {
                        ObjectBeingInstantiated.Init(ObjectInstantiator);
                        SetRecognitionState(GestureRecognizerState.Default);
                    }
                }
                break;
            }
        }
Пример #3
0
        // Update is called once per frame
        void Update()
        {
            OVRInput.Update();

            leftTouchActive  = OVRInput.IsControllerConnected(OVRInput.Controller.LTouch);
            rightTouchActive = OVRInput.IsControllerConnected(OVRInput.Controller.RTouch);

            PoseManager.HandJoint joint = PoseManager.HandJoint.Max;

            if (Input.GetKeyUp(KeyCode.Alpha0))
            {
                joint = PoseManager.HandJoint.ThumbTip;
            }
            else if (Input.GetKeyUp(KeyCode.Alpha1))
            {
                joint = PoseManager.HandJoint.IndexTip;
            }
            else if (Input.GetKeyUp(KeyCode.Alpha2))
            {
                joint = PoseManager.HandJoint.MiddleTip;
            }
            else if (Input.GetKeyUp(KeyCode.Alpha3))
            {
                joint = PoseManager.HandJoint.RingTip;
            }
            else if (Input.GetKeyUp(KeyCode.Alpha4))
            {
                joint = PoseManager.HandJoint.PinkyTip;
            }
            else if (Input.GetKeyUp(KeyCode.Alpha5))
            {
                var thumb  = poseManager.GetHandFrameVector(OvrAvatar.HandType.Right, PoseManager.HandFrame.Thumb);
                var finger = poseManager.GetHandFrameVector(OvrAvatar.HandType.Right, PoseManager.HandFrame.Fingers);
                var palm   = poseManager.GetHandFrameVector(OvrAvatar.HandType.Right, PoseManager.HandFrame.Palm);

                var wrist = poseManager.GetHandTransform(OvrAvatar.HandType.Right, PoseManager.HandJoint.Wrist);

                var indexBase = poseManager.GetHandTransform(OvrAvatar.HandType.Right, PoseManager.HandJoint.IndexBase);
                var indexTip  = poseManager.GetHandTransform(OvrAvatar.HandType.Right, PoseManager.HandJoint.IndexTip);

                var indexFingerVec = wrist.InverseTransformVector(indexTip.position - indexBase.position).normalized;

                Debug.Log(indexFingerVec.ToString("F4"));
            }
            else if (Input.GetKeyUp(KeyCode.G))
            {
                Debug.Log(GestureManager.RecognitionState);
            }

            if (joint < PoseManager.HandJoint.Max)
            {
                LogJointState(joint, OvrAvatar.HandType.Right);
            }


            // Ctrl-S to save current scene
            if (Input.GetKeyUp(KeyCode.S)
#if UNITY_EDITOR
                )
#else
                &&
                (Input.GetKey(KeyCode.RightControl) || Input.GetKey(KeyCode.LeftControl)))
#endif
            { FileIOManager.TrySaveCurrentScene(); }