/// <summary> /// Takes in a new raw value to determine the smoothed value /// </summary> /// <param name="newValue">The 'raw' up to date value we are tracking</param> /// <param name="timeSlice">How much time has passed since the last update</param> public void Update(float newValue, float timeSlice) { // Automatically reset, if we have not done so initially if (m_CurrentSampleIndex == -1) { Reset(newValue); return; } if (timeSlice <= 0.0f) { return; } var currentOffset = newValue - m_LastValue; m_LastValue = newValue; // Add new data to the current sample m_Samples[m_CurrentSampleIndex].offset += currentOffset; m_Samples[m_CurrentSampleIndex].time += timeSlice; // Accumulate and generate our new smooth, predicted float values var combinedSample = new Sample(); var sampleIndex = m_CurrentSampleIndex; while (combinedSample.time < k_Period) { var overTimeScalar = Mathf.Clamp01((k_Period - combinedSample.time) / m_Samples[sampleIndex].time); combinedSample.Accumulate(ref m_Samples[sampleIndex], overTimeScalar); sampleIndex = (sampleIndex + 1) % k_SampleLength; } var oldestValue = combinedSample.value; // Another accumulation step to weight the most recent values stronger for prediction sampleIndex = m_CurrentSampleIndex; while (combinedSample.time < k_PredictedPeriod) // combinedSample's time is altered in the Accumulate call below { var overTimeScalar = Mathf.Clamp01((k_PredictedPeriod - combinedSample.time) / m_Samples[sampleIndex].time); combinedSample.Accumulate(ref m_Samples[sampleIndex], overTimeScalar); // adjusts sample's time+offset values sampleIndex = (sampleIndex + 1) % k_SampleLength; } // Our combo sample is ready to be used to generate smooth output speed = combinedSample.offset / combinedSample.time; predictedValue = oldestValue + speed * k_SamplePeriod; // If the current sample is full, clear out the oldest sample and make that the new current sample if (m_Samples[m_CurrentSampleIndex].time < k_SamplePeriod) { return; } m_Samples[m_CurrentSampleIndex].value = newValue; m_CurrentSampleIndex = (m_CurrentSampleIndex - 1 + k_SampleLength) % k_SampleLength; m_Samples[m_CurrentSampleIndex] = new Sample(); }
/// <summary> /// Takes in a new pose to determine new physics values /// </summary> /// <param name="newPosition">The up to date position of the physics tracker</param> /// <param name="newRotation">The up to date orientation of the physics tracker</param> /// <param name="timeSlice">How much time has passed since the last pose update</param> public void Update(Vector3 newPosition, Quaternion newRotation, float timeSlice) { // Automatically reset, if we have not done so initially if (m_CurrentSampleIndex == -1) { Reset(newPosition, newRotation, Vector3.zero, Vector3.zero); return; } if (timeSlice <= 0.0f) { return; } // First get single-frame offset data that we will then feed into our smoothing and prediction steps // We use different techniques that are well suited for direction and 'speed', and then recombine to velocity later var currentOffset = newPosition - m_LastOffsetPosition; var currentDistance = currentOffset.magnitude; m_LastOffsetPosition = newPosition; var activeDirection = newPosition - m_LastDirectionPosition; // We skip extremely small deltas and wait for more reliable changes in direction if (activeDirection.magnitude < k_MinOffset) { activeDirection = Direction; } else { activeDirection.Normalize(); m_LastDirectionPosition = newPosition; } // Update angular data in the same fashion var rotationOffset = newRotation * Quaternion.Inverse(m_LastRotation); float currentAngle; var activeAxis = Vector3.zero; rotationOffset.ToAngleAxis(out currentAngle, out activeAxis); // Extremely small deltas make for a wildly unpredictable axis if (currentAngle < k_MinAngle) { currentAngle = 0.0f; activeAxis = AngularAxis; } else { m_LastRotation = newRotation; } // We let strong rotations have more of an effect on the axis of rotation than weak ones var axisDistance = 1.0f + (currentAngle / 90.0f); // Add new data to the current sample m_Samples[m_CurrentSampleIndex].distance += currentDistance; m_Samples[m_CurrentSampleIndex].offset += currentOffset; m_Samples[m_CurrentSampleIndex].angle += currentAngle; m_Samples[m_CurrentSampleIndex].time += timeSlice; // The axis can flip direction, which during this accumulation step can result in values getting small and unpredictable // We manually make sure the axis are always adding direction, instead of taking away if (Vector3.Dot(activeAxis, m_Samples[m_CurrentSampleIndex].axisOffset) < 0) { m_Samples[m_CurrentSampleIndex].axisOffset += -activeAxis * axisDistance; } else { m_Samples[m_CurrentSampleIndex].axisOffset += activeAxis * axisDistance; } // Accumulate and generate our new smooth, predicted physics values var combinedSample = new Sample(); var sampleIndex = m_CurrentSampleIndex; while (combinedSample.time < k_Period) { var overTimeScalar = Mathf.Clamp01((k_Period - combinedSample.time) / m_Samples[sampleIndex].time); combinedSample.Accumulate(ref m_Samples[sampleIndex], overTimeScalar, activeDirection, activeAxis); sampleIndex = (sampleIndex + 1) % k_SampleLength; } var oldestSpeed = combinedSample.speed; var oldestAngularSpeed = combinedSample.angularSpeed; // Another accumulation step to weight the most recent values stronger for prediction sampleIndex = m_CurrentSampleIndex; while (combinedSample.time < k_PredictedPeriod) { var overTimeScalar = Mathf.Clamp01((k_PredictedPeriod - combinedSample.time) / m_Samples[sampleIndex].time); combinedSample.Accumulate(ref m_Samples[sampleIndex], overTimeScalar, activeDirection, activeAxis); sampleIndex = (sampleIndex + 1) % k_SampleLength; } // Our combo sample is ready to be used to generate physics output Speed = combinedSample.distance / k_PredictedPeriod; // Try to use the weighted combination of offsets. if (combinedSample.offset.magnitude > k_MinLength) { Direction = combinedSample.offset.normalized; } else { var directionVsActive = Vector3.Dot(Direction, activeDirection); if (directionVsActive < 0.0f) { directionVsActive = -directionVsActive; Direction = -Direction; } Direction = Vector3.Lerp(activeDirection, Direction, directionVsActive).normalized; } Velocity = Direction * Speed; AngularSpeed = combinedSample.angle / k_PredictedPeriod; // Try to use the weighted combination of angles // We do one additional smoothing step here - this data is simply noisier than position if (combinedSample.axisOffset.magnitude > k_MinLength) { activeAxis = combinedSample.axisOffset.normalized; } var axisVsActive = Vector3.Dot(AngularAxis, activeAxis); if (axisVsActive < 0.0f) { axisVsActive = -axisVsActive; AngularAxis = -AngularAxis; } AngularAxis = Vector3.Lerp(activeAxis, AngularAxis, axisVsActive).normalized; AngularVelocity = AngularAxis * AngularSpeed * Mathf.Deg2Rad; // We compare the newest and oldest velocity samples to get the new acceleration var speedDelta = (Speed - oldestSpeed); var angularSpeedDelta = (AngularSpeed - oldestAngularSpeed); AccelerationStrength = speedDelta / k_Period; Acceleration = AccelerationStrength * Direction; AngularAccelerationStrength = angularSpeedDelta / k_Period; AngularAcceleration = AngularAxis * AngularAccelerationStrength * Mathf.Deg2Rad; // If the current sample is full, clear out the oldest sample and make that the new current sample if (m_Samples[m_CurrentSampleIndex].time < k_SamplePeriod) { return; } // We record the last speed value before we switch to a new sample, for acceleration sampling m_Samples[m_CurrentSampleIndex].speed = Speed; m_Samples[m_CurrentSampleIndex].angularSpeed = AngularSpeed; m_CurrentSampleIndex = ((m_CurrentSampleIndex - 1) + k_SampleLength) % k_SampleLength; m_Samples[m_CurrentSampleIndex] = new Sample(); }