public void Update(FrameUpdateParameters updateParameters, ChannelInputs inputs) { var outputs = channelSystem.Evaluate(null, inputs); var chestBoneTransform = chestBone.GetChainedTransform(outputs); var chestBoneRotation = chestBoneTransform.RotationStage.Rotation; chestBoneRotation.Invert(); var gravity = Vector3.Transform(Vector3.Down, chestBoneRotation); float xRotation = -5 - gravity.Y * 5; lPectoralBone.Rotation.X.SetValue(inputs, xRotation); rPectoralBone.Rotation.X.SetValue(inputs, xRotation); float yRotationInput = gravity.X; //Console.WriteLine(yRotation); lPectoralBone.Rotation.Y.SetValue(inputs, 5 * ExpandNegative(yRotationInput)); rPectoralBone.Rotation.Y.SetValue(inputs, 5 * ExpandPositive(yRotationInput)); float flatten = Max(-gravity.Z, 0); float hangForward = Max(+gravity.Z, 0); flattenChannel.SetValue(inputs, flatten); hangForwardChannel.SetValue(inputs, hangForward); }
public void Update(DeviceContext context, FrameUpdateParameters updateParameters, ImageBasedLightingEnvironment lightingEnvironment) { modelToWorldTransform.Update(context, Matrix.Scaling(0.01f)); foreach (var figure in childFigures) { figure.SyncWithModel(); } parentFigure.SyncWithModel(); var parentOutputs = parentFigure.UpdateFrame(context, updateParameters, null); foreach (var figure in childFigures) { figure.UpdateFrame(context, updateParameters, parentOutputs); } parentFigure.UpdateVertexPositionsAndGetDeltas(context, parentDeltas.OutView); foreach (var figure in childFigures) { figure.UpdateVertexPositions(context, parentDeltas.InView); } parentFigure.Update(context, lightingEnvironment); foreach (var figure in childFigures) { figure.Update(context, lightingEnvironment); } }
public void Update(FrameUpdateParameters updateParameters) { for (uint trackedDeviceIdx = 0; trackedDeviceIdx < OpenVR.k_unMaxTrackedDeviceCount; ++trackedDeviceIdx) { isPoseValid[trackedDeviceIdx] = updateParameters.GamePoses[trackedDeviceIdx].bPoseIsValid; } }
private void Run(FramePreparer framePreparer) { float time = 0; float deltaTime = 1 / 90f; TrackedDevicePose_t[] gamePoses = new TrackedDevicePose_t[OpenVR.k_unMaxTrackedDeviceCount]; OpenVR.Compositor.GetLastPoses(gamePoses, gamePoses); Vector3 headPosition = new Vector3(0, 1.5f, 1f); var stopwatch = Stopwatch.StartNew(); int frameCount = 0; while (true) { time += deltaTime; var updateParameters = new FrameUpdateParameters(time, deltaTime, gamePoses, headPosition); var preparedFrame = framePreparer.PrepareFrame(updateParameters); preparedFrame.Dispose(); frameCount += 1; if (frameCount == 100) { Console.WriteLine(stopwatch.Elapsed.TotalMilliseconds / frameCount); frameCount = 0; stopwatch.Restart(); } } }
public InverseKinematicsGoal GetGoal(FrameUpdateParameters updateParameters, RigidBoneSystemInputs inputs, ControlVertexInfo[] previousFrameControlVertexInfos) { MaybeStartTracking(updateParameters, inputs, previousFrameControlVertexInfos); var goal = MaybeContinueTracking(updateParameters); return(goal); }
public void Update(FrameUpdateParameters updateParameters, ChannelInputs inputs) { double elapsed = updateParameters.TimeDelta; if (blinking) { eyesClosedAmount += elapsed / CloseDuration; if (eyesClosedAmount < MaximumCloseAmount) { elapsed = 0; } else { //set elapsed to time remaining after close completion elapsed = (eyesClosedAmount - 1) * CloseDuration; //set elapsed to time as eyesClosedAmount = 1; blinking = false; timeUntilNextBlink = GenerateTimeUntilNextBlink(); } } if (!blinking) { eyesClosedAmount *= Math.Pow(0.5, elapsed / OpenHalflife); timeUntilNextBlink -= elapsed; if (timeUntilNextBlink < 0) { blinking = true; } } eyesClosedChannel.SetValue(inputs, MaximumCloseAmount * eyesClosedAmount); }
private InverseKinematicsGoal MaybeContinueTracking(FrameUpdateParameters updateParameters) { if (!tracking) { return(null); } if (!stateTracker.NonMenuActive) { tracking = false; return(null); } bool triggerPressed = stateTracker.IsPressed(EVRButtonId.k_EButton_SteamVR_Trigger); if (!triggerPressed) { tracking = false; return(null); } TrackedDevicePose_t gamePose = updateParameters.GamePoses[stateTracker.DeviceIdx]; Matrix controllerTransform = gamePose.mDeviceToAbsoluteTracking.Convert(); var controllerTransformDq = DualQuaternion.FromMatrix(controllerTransform); var targetPosition = controllerTransformDq.Translation * 100; var targetOrientation = controllerTransformDq.Rotation; return(new InverseKinematicsGoal(sourceBone, boneRelativeSourcePosition, boneRelativeSourceOrientation, targetPosition, targetOrientation)); }
public List <InverseKinematicsGoal> GetGoals(FrameUpdateParameters updateParameters, RigidBoneSystemInputs inputs, ControlVertexInfo[] previousFrameControlVertexInfos) { return(new List <InverseKinematicsGoal> { MakeMoveHandDownGoal(inputs), //MakeKeepFootInPlaceGoal(inputs) }); }
public void Update(FrameUpdateParameters updateParameters, ChannelInputs inputs) { foreach (var animator in animators) { animator.Update(updateParameters, inputs); } }
private void MaybeStartTracking(FrameUpdateParameters updateParameters, RigidBoneSystemInputs inputs, ControlVertexInfo[] previousFrameControlVertexInfos) { if (tracking == true) { //already tracking return; } if (!stateTracker.NonMenuActive) { return; } bool triggerPressed = stateTracker.IsPressed(EVRButtonId.k_EButton_SteamVR_Trigger); if (!triggerPressed) { return; } tracking = true; TrackedDevicePose_t gamePose = updateParameters.GamePoses[stateTracker.DeviceIdx]; Matrix controllerTransform = gamePose.mDeviceToAbsoluteTracking.Convert(); DualQuaternion controllerTransformDq = DualQuaternion.FromMatrix(controllerTransform); var worldSourcePosition = controllerTransformDq.Translation * 100; var worldSourceOrientation = controllerTransformDq.Rotation; sourceBone = parentInstance.MapPositionToBone(worldSourcePosition, previousFrameControlVertexInfos); var inverseSourceBoneTotalTransform = sourceBone.GetChainedTransform(inputs).Invert(); boneRelativeSourcePosition = inverseSourceBoneTotalTransform.Transform(worldSourcePosition) - sourceBone.CenterPoint; boneRelativeSourceOrientation = worldSourceOrientation.Chain(inverseSourceBoneTotalTransform.Rotation); }
public void Update(DeviceContext context, FrameUpdateParameters updateParameters) { menu.Update(context); renderModelRenderer.Update(updateParameters); iblEnvironment.Predraw(context); floor.Update(context); actor.Update(context, updateParameters, iblEnvironment); }
private void Trial() { var inputs = new RigidBoneSystemInputs(initialInputs); var frameUpdateParameters = new FrameUpdateParameters(0, 1 / 90f, null, Vector3.Zero); var goals = goalProvider.GetGoals(frameUpdateParameters, initialInputs, null); solver.Solve(rigidBoneSystem, goals, inputs); }
private CommandList UpdateAndRecordUpdateCommandList(FrameUpdateParameters updateParameters) { DeviceContext context = deferredContext; controllerManager.Update(); scene.Update(deferredContext, updateParameters); passController.PrepareFrame(deferredContext, scene.ToneMappingSettings); return(context.FinishCommandList(false)); }
private void KickoffFramePreparation() { var headPosition = companionWindow.HasIndependentCamera ? companionWindow.CameraPosition : PlayerPositionUtils.GetHeadPosition(gamePoses); var updateParameters = new FrameUpdateParameters( timeKeeper.NextFrameTime, //need to go one frame ahead because this is for the next frame timeKeeper.TimeDelta, gamePoses, headPosition); asyncFramePreparer.StartPreparingFrame(updateParameters); }
public List <InverseKinematicsGoal> GetGoals(FrameUpdateParameters updateParameters, RigidBoneSystemInputs inputs, ControlVertexInfo[] previousFrameControlVertexInfos) { List <InverseKinematicsGoal> goals = new List <InverseKinematicsGoal>(); foreach (var deviceTracker in deviceTrackers) { var goal = deviceTracker.GetGoal(updateParameters, inputs, previousFrameControlVertexInfos); if (goal != null) { goals.Add(goal); } } return(goals); }
public IPreparedFrame PrepareFrame(FrameUpdateParameters updateParameters) { return(new PreparedFrame( DoPrework, UpdateAndRecordUpdateCommandList(updateParameters), PrepareView, RecordDrawCommandList(), passController.ResultTexture, DoDrawCompanionWindowUi, DoPostwork )); }
public void Update(FrameUpdateParameters updateParameters, ChannelInputs channelInputs, ControlVertexInfo[] previousFrameControlVertexInfos) { var channelOutputs = channelSystem.Evaluate(null, channelInputs); boneSystem.Synchronize(channelOutputs); var baseInputs = boneSystem.ReadInputs(channelOutputs); var resultInputs = boneSystem.ApplyDeltas(baseInputs, poseDeltas); List <InverseKinematicsGoal> goals = goalProvider.GetGoals(updateParameters, resultInputs, previousFrameControlVertexInfos); solver.Solve(boneSystem, goals, resultInputs); poseDeltas = boneSystem.CalculateDeltas(baseInputs, resultInputs); boneSystem.WriteInputs(channelInputs, channelOutputs, resultInputs); }
public void Update(FrameUpdateParameters updateParameters, ChannelInputs inputs) { headPositionForecaster.Update(updateParameters.Time, updateParameters.HeadPosition); var forecastHeadPosition = headPositionForecaster.Forecast; if (!behaviorModel.LookAtPlayer) { return; } var outputs = channelSystem.Evaluate(null, inputs); var eyeParentTotalTransform = eyeParentBone.GetChainedTransform(outputs); UpdateEye(outputs, eyeParentTotalTransform, inputs, leftEyeBone, forecastHeadPosition); UpdateEye(outputs, eyeParentTotalTransform, inputs, rightEyeBone, forecastHeadPosition); }
private void Update3dAudioPosition(FrameUpdateParameters updateParameters, ChannelInputs inputs) { TrackedDevicePose_t gamePose = updateParameters.GamePoses[OpenVR.k_unTrackedDeviceIndex_Hmd]; Matrix hmdToWorldTransform = gamePose.mDeviceToAbsoluteTracking.Convert(); hmdToWorldTransform.Invert(); var outputs = channelSystem.Evaluate(null, inputs); var headTotalTransform = headBone.GetChainedTransform(outputs); var headBindPoseCenter = headBone.CenterPoint.GetValue(outputs); Vector3 headWorldPosition = headTotalTransform.Transform(headBindPoseCenter) / 100; Vector3 headHmdPosition = Vector3.TransformCoordinate(headWorldPosition, hmdToWorldTransform); phononStream.HeadRelativePosition = headHmdPosition; }
public void Update(FrameUpdateParameters updateParameters, ChannelInputs inputs) { Update3dAudioPosition(updateParameters, inputs); currentTime = updateParameters.Time; if (synth.State == SynthesizerState.Ready) { synth.SpeakAsync(Text); } float visimeProgress = MathUtil.Clamp((updateParameters.Time - visemeStartTime) / visemeDuration, 0, 1); Channel currentChannel = visemeChannels[currentViseme]; Channel nextChannel = visemeChannels[nextViseme]; currentChannel?.SetValue(inputs, 1 - visimeProgress); nextChannel?.SetValue(inputs, visimeProgress); }
public void Update(FrameUpdateParameters updateParameters) { if (trackedDeviceIdx == UnattachedSentinel) { for (uint deviceIdx = 0; deviceIdx < OpenVR.k_unMaxTrackedDeviceCount; ++deviceIdx) { ControllerStateTracker stateTracker = controllerManager.StateTrackers[deviceIdx]; if (!stateTracker.NonMenuActive) { continue; } if (!stateTracker.IsPressed(EVRButtonId.k_EButton_Grip)) { continue; } trackedDeviceIdx = deviceIdx; TrackedDevicePose_t gamePose = updateParameters.GamePoses[deviceIdx]; Matrix controllerToWorldTransform = gamePose.mDeviceToAbsoluteTracking.Convert(); Matrix worldToControllerTransform = Matrix.Invert(controllerToWorldTransform); objectToControllerTransform = objectToWorldTransform * worldToControllerTransform; } } if (trackedDeviceIdx != UnattachedSentinel) { ControllerStateTracker stateTracker = controllerManager.StateTrackers[trackedDeviceIdx]; if (!stateTracker.NonMenuActive || !stateTracker.IsPressed(EVRButtonId.k_EButton_Grip)) { trackedDeviceIdx = UnattachedSentinel; return; } TrackedDevicePose_t gamePose = updateParameters.GamePoses[trackedDeviceIdx]; Matrix controllerToWorldTransform = gamePose.mDeviceToAbsoluteTracking.Convert(); objectToWorldTransform = objectToControllerTransform * controllerToWorldTransform; } }
public void Update(FrameUpdateParameters updateParameters, ChannelInputs inputs) { headPositionForecaster.Update(updateParameters.Time, updateParameters.HeadPosition); var forecastHeadPosition = headPositionForecaster.Forecast; var outputs = channelSystem.Evaluate(null, inputs); var neckTotalTransform = headBone.Parent.GetChainedTransform(outputs); var figureEyeCenter = (leftEyeBone.CenterPoint.GetValue(outputs) + rightEyeBone.CenterPoint.GetValue(outputs)) / 2; var figureEyeWorldPosition = neckTotalTransform.Transform(figureEyeCenter); var lookPointWorldPosition = neckTotalTransform.Transform(figureEyeCenter + Vector3.BackwardRH); var lookWorldDirection = Vector3.Normalize(lookPointWorldPosition - figureEyeWorldPosition); var targetLookWorldDirection = Vector3.Normalize(forecastHeadPosition * 100 - figureEyeWorldPosition); var worldRotationCorrection = QuaternionExtensions.RotateBetween(lookWorldDirection, targetLookWorldDirection); var targetLocalRotationCorrection = Quaternion.Invert(neckTotalTransform.RotationStage.Rotation) * worldRotationCorrection * neckTotalTransform.RotationStage.Rotation; headBone.SetEffectiveRotation(inputs, outputs, targetLocalRotationCorrection); }
public void Update(FrameUpdateParameters updateParameters, ChannelInputs inputs) { double elapsedSinceStart = updateParameters.Time - expressionStartTime; double expressionProgress = expressionDuration == 0 ? Double.PositiveInfinity : elapsedSinceStart / expressionDuration; if (expressionProgress >= 1) { PrepareNextExpression(updateParameters.Time); expressionProgress = 0; } foreach (var channel in expressionChannels) { channel.SetValue(inputs, 0); } float channelValue = MathUtil.SmoothStep((float)expressionProgress); currentExpression.SetValue(inputs, 1 - channelValue); nextExpression.SetValue(inputs, channelValue); }
public ChannelInputs Update(ChannelInputs shapeInputs, FrameUpdateParameters updateParameters, ControlVertexInfo[] previousFrameControlVertexInfos) { ChannelInputs inputs = new ChannelInputs(shapeInputs); for (int idx = 0; idx < inputs.RawValues.Length; ++idx) { inputs.RawValues[idx] += model.Inputs.RawValues[idx]; } dragHandle.Update(updateParameters); DualQuaternion rootTransform = DualQuaternion.FromMatrix(dragHandle.Transform); var blendedPose = GetBlendedPose(updateParameters.Time); poser.Apply(inputs, blendedPose, rootTransform); ikAnimator.Update(updateParameters, inputs, previousFrameControlVertexInfos); proceduralAnimator.Update(updateParameters, inputs); return(inputs); }
public FigureSystemOutputs UpdateFrame(DeviceContext context, FrameUpdateParameters updateParameters, FigureSystemOutputs parentOutputs) { if (!model.IsVisible) { return(null); } var previousFrameResults = controlVertexProvider.GetPreviousFrameResults(context); ChannelInputs shapeInputs = new ChannelInputs(model.Shape.ChannelInputs); foreach (var child in children) { if (child.Model.IsVisible) { child.Model.Shape.ApplyOverrides(shapeInputs); } } ChannelInputs inputs = Animator != null?Animator.GetFrameInputs(shapeInputs, updateParameters, previousFrameResults) : shapeInputs; return(controlVertexProvider.UpdateFrame(context, parentOutputs, inputs)); }
public void Update(DeviceContext context, FrameUpdateParameters updateParameters, ImageBasedLightingEnvironment iblEnvironment) { figureGroup.Update(context, updateParameters, iblEnvironment); }
public ChannelInputs GetFrameInputs(ChannelInputs shapeInputs, FrameUpdateParameters updateParameters, ControlVertexInfo[] previousFrameControlVertexInfos) { return(actor.Behavior.Update(shapeInputs, updateParameters, previousFrameControlVertexInfos)); }
public void StartPreparingFrame(FrameUpdateParameters updateParameters) { this.updateParameters = updateParameters; this.preparedFrame = null; updateParametersReadySemaphore.Release(); }