/// <summary> /// Skins an individual vertex. /// </summary> public static void SkinVertex( Matrix[] bones, ref Microsoft.Xna.Framework.Vector3 position, ref Microsoft.Xna.Framework.Vector3 normal, ref Matrix bakedTransform, ref Microsoft.Xna.Framework.Vector4 blendIndices, ref Vector4 blendWeights, out Microsoft.Xna.Framework.Vector3 outPosition, out Microsoft.Xna.Framework.Vector3 outNormal) { int b0 = (int)blendIndices.X; int b1 = (int)blendIndices.Y; int b2 = (int)blendIndices.Z; int b3 = (int)blendIndices.W; Matrix skinnedTransformSum; Blend4x3Matrix(ref bones[b0], ref bones[b1], ref bones[b2], ref bones[b3], ref blendWeights, out skinnedTransformSum); Matrix.Multiply(ref skinnedTransformSum, ref bakedTransform, out skinnedTransformSum); // Support the 4 Bone Influences - Position then Normal Microsoft.Xna.Framework.Vector3.Transform(ref position, ref skinnedTransformSum, out outPosition); Microsoft.Xna.Framework.Vector3.TransformNormal(ref normal, ref skinnedTransformSum, out outNormal); }
// current edge detection threshold /// <summary> /// Erstellt einen neuen Cel-Shading-Effekt für den angegebenen IGameScreen. /// </summary> public CelShadingEffect(IScreen screen) : base(screen) { /* Set our light direction for the cel-shader */ lightDirection = new Vector4 (0.0f, 0.0f, 1.0f, 1.0f); /* Load and initialize the cel-shader effect */ celShader = screen.LoadEffect ("CelShader"); RegisterEffect (celShader); celShader.Parameters ["LightDirection"].SetValue (lightDirection); celMap = screen.LoadTexture ("CelMap"); if (celMap == null) { celMap = ContentLoader.CreateTexture (screen.GraphicsDevice, Color.White); } celShader.Parameters ["Color"].SetValue (Color.Green.ToVector4 ()); celShader.Parameters ["CelMap"].SetValue (celMap); /* Load and initialize the outline shader effect */ outlineShader = screen.LoadEffect ("OutlineShader"); RegisterEffect (outlineShader); outlineShader.Parameters ["Thickness"].SetValue (outlineThickness); outlineShader.Parameters ["Threshold"].SetValue (outlineThreshold); outlineShader.Parameters ["ScreenSize"].SetValue (new Vector2 (screen.Viewport.Bounds.Width, screen.Viewport.Bounds.Height)); }
public VertexPositionTextureHueExtra(Vector3 position, Vector2 textureCoordinate, Color hue, Vector4 extra) { this.Position = position; this.TextureCoordinate = textureCoordinate; Hue = hue; this.Extra = extra; }
public RainShader() : base("Rain") { this.Technique.CurrentTechnique = this.Technique.Techniques["Rain"]; lightColor = Color.White.ToVector4(); drops = ContentLoader.Load<Texture2D>(ContentType.TEXTURE, "drop1"); }
public static void Write(this ContentWriter contentWriter, Vector4 value) { contentWriter.Write(value.X); contentWriter.Write(value.Y); contentWriter.Write(value.Z); contentWriter.Write(value.W); }
public RoGndCubeData(BinaryReader bin, GenericFileFormatVersion version) : base(bin, version) { Height = bin.ReadVector4(); TileUp = bin.ReadInt32(); TileSide = bin.ReadInt32(); TileAside = bin.ReadInt32(); }
public void AmbientLightColorTest() { AwesomeEffect target = new AwesomeEffect(); // TODO: Passenden Wert initialisieren Vector4 expected = new Vector4(); // TODO: Passenden Wert initialisieren target.AmbientLightColor = expected; Assert.Inconclusive("Lesegeschützte Eigenschaften können nicht überprüft werden."); }
public Vector4(Microsoft.Xna.Framework.Vector4 v) { this.x = v.X; this.y = v.Y; this.z = v.Z; this.w = v.W; }
public VertexPositionTextureDataColor(Vector3 position, Vector2 uv, Vector4 data, Color hue) { Position = position; UV = uv; Data = data; Hue = hue; }
private static Func<Vector3, Vector3> Swizzler(string str) { str = str.ToLowerInvariant(); if (string.IsNullOrWhiteSpace(str) || str == "null" || str == "none" || str == "x,y,z") return a => a; var parts = str.Split(','); if (parts.Length != 3) throw new Exception(string.Format("Swizzle vector '{0}' has {1} elements; expected three", str, parts.Length)); var vectors = new Vector4[4]; for (int i = 0; i < 3; i++) { float p = parts[i].StartsWith("-") ? -1 : 1; if (parts[i].Contains("x")) vectors[i].X = p; else if (parts[i].Contains("y")) vectors[i].Y = p; else if (parts[i].Contains("z")) vectors[i].Z = p; } var swizzle = new Matrix( vectors[0].X, vectors[1].X, vectors[2].X, vectors[3].X, vectors[0].Y, vectors[1].Y, vectors[2].Y, vectors[3].Y, vectors[0].Z, vectors[1].Z, vectors[2].Z, vectors[3].Z, vectors[0].W, vectors[1].W, vectors[2].W, vectors[3].W ); return a => Vector3.Transform(a, swizzle); }
public void IdentityTest() { var traits = Vector4Traits.Instance; var value = new Vector4(-1, -2, 3, 1); Assert.AreEqual(value, traits.Add(value, traits.Identity())); Assert.AreEqual(value, traits.Add(traits.Identity(), value)); }
public CloudShader() : base("Cloud") { this.Technique.CurrentTechnique = this.Technique.Techniques["Clouds"]; lightColor = Color.White.ToVector4(); cloudsParts = ContentLoader.Load<Texture2D>(ContentType.TEXTURE, "clouds"); }
internal static void ConvertVector4(ref XNA.Vector4 v, out Vector4 result) { result.X = v.X; result.Y = v.Y; result.Z = v.Z; result.W = v.W; }
public VertexPositionTextureHueExtra(Vector3 position, Vector2 textureCoordinate) { this.Position = position; this.TextureCoordinate = textureCoordinate; Hue = Color.White; this.Extra = Vector4.Zero; }
/// <summary> /// CorrectSensorTilt applies camera tilt correction to the skeleton data. /// </summary> /// <param name="skeleton">The skeleton to correct</param> /// <param name="floorPlane">The floor plane (consisting of up normal and sensor height) detected by skeleton tracking (if any).</param> /// <param name="sensorElevationAngle">The tilt of the sensor as detected by Kinect.</param> public static void CorrectSensorTilt(Skeleton skeleton, Tuple <float, float, float, float> floorPlane, int sensorElevationAngle) { if (null == skeleton) { return; } // To correct the tilt of the skeleton due to a tilted camera, we have three possible up vectors: // one from any floor plane detected in Skeleton Tracking, one from the gravity normal produced by the 3D accelerometer, // and one from the tilt value sensed by the camera motor. // The raw accelerometer value is not currently available in the Kinect for Windows SDK, so instead we use the // the sensorElevationAngle, as the floor plane from skeletal tracking is typically only detected when the // camera is pointing down and sees the floor. // Note: SensorElevationAngle value varies around +/- 60 degrees. Vector3 floorNormal = Vector3.Up; // default value (has no tilt effect) // Assume camera base is level, and use the tilt of the Kinect motor. // Rotate an up vector by the negated elevation angle around the X axis floorNormal = Vector3.Transform( floorNormal, Quaternion.CreateFromAxisAngle(new Vector3(1, 0, 0), MathHelper.ToRadians(sensorElevationAngle))); if (floorPlane != null) { Vector4 floorPlaneVec = new Vector4(floorPlane.Item1, floorPlane.Item2, floorPlane.Item3, floorPlane.Item4); if (floorPlaneVec.Length() > float.Epsilon && (sensorElevationAngle == 0 || Math.Abs(sensorElevationAngle) > 50)) { // Use the floor plane for everything. floorNormal = new Vector3(floorPlaneVec.X, floorPlaneVec.Y, floorPlaneVec.Z); } } Array jointTypeValues = Enum.GetValues(typeof(JointType)); // Running average of floor normal averagedFloorNormal = (averagedFloorNormal * 0.9f) + (floorNormal * 0.1f); Quaternion rotationToRoomSpace = KinectHelper.GetShortestRotationBetweenVectors(Vector3.Up, averagedFloorNormal); Vector3 hipCenter = KinectHelper.SkeletonPointToVector3(skeleton.Joints[JointType.HipCenter].Position); // De-tilt. foreach (JointType j in jointTypeValues) { Joint joint = skeleton.Joints[j]; SkeletonPoint pt = joint.Position; Vector3 pos = KinectHelper.SkeletonPointToVector3(pt); // Move it back to the origin to rotate pos -= hipCenter; Vector3 rotatedVec = Vector3.Transform(pos, rotationToRoomSpace); rotatedVec += hipCenter; joint.Position = KinectHelper.Vector3ToSkeletonPoint(rotatedVec); skeleton.Joints[j] = joint; } }
/// <summary> /// CorrectSensorTilt applies camera tilt correction to the skeleton data. /// </summary> /// <param name="skeleton">The skeleton to correct</param> /// <param name="floorPlane">The floor plane (consisting of up normal and sensor height) detected by skeleton tracking (if any).</param> /// <param name="sensorElevationAngle">The tilt of the sensor as detected by Kinect.</param> public static void CorrectSensorTilt(Skeleton skeleton, Tuple<float, float, float, float> floorPlane, int sensorElevationAngle) { if (null == skeleton) { return; } // To correct the tilt of the skeleton due to a tilted camera, we have three possible up vectors: // one from any floor plane detected in Skeleton Tracking, one from the gravity normal produced by the 3D accelerometer, // and one from the tilt value sensed by the camera motor. // The raw accelerometer value is not currently available in the Kinect for Windows SDK, so instead we use the // the sensorElevationAngle, as the floor plane from skeletal tracking is typically only detected when the // camera is pointing down and sees the floor. // Note: SensorElevationAngle value varies around +/- 60 degrees. Vector3 floorNormal = Vector3.Up; // default value (has no tilt effect) // Assume camera base is level, and use the tilt of the Kinect motor. // Rotate an up vector by the negated elevation angle around the X axis floorNormal = Vector3.Transform( floorNormal, Quaternion.CreateFromAxisAngle(new Vector3(1, 0, 0), MathHelper.ToRadians(sensorElevationAngle))); if (floorPlane != null) { Vector4 floorPlaneVec = new Vector4(floorPlane.Item1, floorPlane.Item2, floorPlane.Item3, floorPlane.Item4); if (floorPlaneVec.Length() > float.Epsilon && (sensorElevationAngle == 0 || Math.Abs(sensorElevationAngle) > 50)) { // Use the floor plane for everything. floorNormal = new Vector3(floorPlaneVec.X, floorPlaneVec.Y, floorPlaneVec.Z); } } Array jointTypeValues = Enum.GetValues(typeof(JointType)); // Running average of floor normal averagedFloorNormal = (averagedFloorNormal * 0.9f) + (floorNormal * 0.1f); Quaternion rotationToRoomSpace = KinectHelper.GetShortestRotationBetweenVectors(Vector3.Up, averagedFloorNormal); Vector3 hipCenter = KinectHelper.SkeletonPointToVector3(skeleton.Joints[JointType.HipCenter].Position); // De-tilt. foreach (JointType j in jointTypeValues) { Joint joint = skeleton.Joints[j]; SkeletonPoint pt = joint.Position; Vector3 pos = KinectHelper.SkeletonPointToVector3(pt); // Move it back to the origin to rotate pos -= hipCenter; Vector3 rotatedVec = Vector3.Transform(pos, rotationToRoomSpace); rotatedVec += hipCenter; joint.Position = KinectHelper.Vector3ToSkeletonPoint(rotatedVec); skeleton.Joints[j] = joint; } }
public char[] color; // BGRA -- "A" seems to be ignored by the official client public RoGndTileData(BinaryReader bin, GenericFileFormatVersion version) : base(bin, version) { VectorWidth = bin.ReadVector4(); VectorHeight = bin.ReadVector4(); TextureIndex = bin.ReadUInt16(); Lightmap = bin.ReadUInt16(); color = bin.ReadChars(4); }
/// <summary> /// Gets the value of the parameter as a vector4. /// </summary> /// <returns> /// The vector4 value /// </returns> public Vector4 GetValueVector4() { XNA.Vector4 xv = _param.GetValueVector4(); Vector4 v; XNAHelper.ConvertVector4(ref xv, out v); return(v); }
/// <summary> /// Constructs a new skinning data object. /// </summary> public InstancedSkinningData(ContentReader input) { this.texture = input.ReadObject<Texture2D>(); this.animations = input.ReadObject<IDictionary<string, InstancedAnimationClip>>(); Vector4[] data = new Vector4[this.texture.Width * this.texture.Height]; this.texture.GetData<Vector4>(data); }
/// <summary> /// ConstrainSelfIntersection collides joints with the skeleton to keep the skeleton's hands and wrists from puncturing its body /// A cylinder is created to represent the torso. Intersecting joints have their positions changed to push them outside the torso. /// </summary> /// <param name="skeleton">The skeleton.</param> public static void Constrain(Skeleton skeleton) { if (null == skeleton) { return; } const float ShoulderExtend = 0.5f; const float HipExtend = 6.0f; const float CollisionTolerance = 1.01f; const float RadiusMultiplier = 1.3f; // increase for bulky avatars if (skeleton.Joints[JointType.ShoulderCenter].TrackingState != JointTrackingState.NotTracked && skeleton.Joints[JointType.HipCenter].TrackingState != JointTrackingState.NotTracked) { Vector3 shoulderDiffLeft = KinectHelper.VectorBetween(skeleton, JointType.ShoulderCenter, JointType.ShoulderLeft); Vector3 shoulderDiffRight = KinectHelper.VectorBetween(skeleton, JointType.ShoulderCenter, JointType.ShoulderRight); float shoulderLengthLeft = shoulderDiffLeft.Length(); float shoulderLengthRight = shoulderDiffRight.Length(); // The distance between shoulders is averaged for the radius float cylinderRadius = (shoulderLengthLeft + shoulderLengthRight) * 0.5f; // Calculate the shoulder center and the hip center. Extend them up and down respectively. Vector3 shoulderCenter = KinectHelper.SkeletonPointToVector3(skeleton.Joints[JointType.ShoulderCenter].Position); Vector3 hipCenter = KinectHelper.SkeletonPointToVector3(skeleton.Joints[JointType.HipCenter].Position); Vector3 hipShoulder = hipCenter - shoulderCenter; hipShoulder.Normalize(); shoulderCenter = shoulderCenter - (hipShoulder * (ShoulderExtend * cylinderRadius)); hipCenter = hipCenter + (hipShoulder * (HipExtend * cylinderRadius)); // Optionally increase radius to account for bulky avatars cylinderRadius *= RadiusMultiplier; // joints to collide JointType[] collisionIndices = { JointType.WristLeft, JointType.HandLeft, JointType.WristRight, JointType.HandRight }; foreach (JointType j in collisionIndices) { Vector3 collisionJoint = KinectHelper.SkeletonPointToVector3(skeleton.Joints[j].Position); Microsoft.Xna.Framework.Vector4 distanceNormal = KinectHelper.DistanceToLineSegment(shoulderCenter, hipCenter, collisionJoint); Vector3 normal = new Vector3(distanceNormal.X, distanceNormal.Y, distanceNormal.Z); // if distance is within the cylinder then push the joint out and away from the cylinder if (distanceNormal.W < cylinderRadius) { collisionJoint += normal * ((cylinderRadius - distanceNormal.W) * CollisionTolerance); Joint joint = skeleton.Joints[j]; joint.Position = KinectHelper.Vector3ToSkeletonPoint(collisionJoint); skeleton.Joints[j] = joint; } } } }
private void Resize(Vector3 position, Vector2 area, Vector4 uv, Vector4 data, Color hue) { Vertices = new[] { new VertexPositionTextureDataColor(position, new Vector2(uv.X, uv.Y), data, hue), // top left new VertexPositionTextureDataColor(position + new Vector3(area.X, 0, 0), new Vector2(uv.Z, uv.Y), data, hue), // top right new VertexPositionTextureDataColor(position + new Vector3(0, area.Y, 0), new Vector2(uv.X, uv.W), data, hue), // bottom left new VertexPositionTextureDataColor(position + new Vector3(area, 0), new Vector2(uv.Z, uv.W), data, hue) // bottom right }; }
public void InterpolationTest() { var traits = Vector4Traits.Instance; var value0 = new Vector4(1, 2, 3, 1); var value1 = new Vector4(-4, 5, -6, 5); Assert.IsTrue(Vector4F.AreNumericallyEqual((Vector4F)value0, (Vector4F)traits.Interpolate(value0, value1, 0.0f))); Assert.IsTrue(Vector4F.AreNumericallyEqual((Vector4F)value1, (Vector4F)traits.Interpolate(value0, value1, 1.0f))); Assert.IsTrue(Vector4F.AreNumericallyEqual((Vector4F)(0.25f * value0 + 0.75f * value1), (Vector4F)traits.Interpolate(value0, value1, 0.75f))); }
public TreeLeaf(int parentIndex, Vector4 color, float rotation, Vector2 size, int boneIndex, float axisOffset) { ParentIndex = parentIndex; Color = color; Rotation = rotation; Size = size; BoneIndex = boneIndex; AxisOffset = axisOffset; }
public StateManager(Game game, DrawState state) : base(game) { _stateChangeSecondQueue = new Queue<float>(); _stateQueue = new Queue<DrawState>(); CurrentState = state; _goalState = state; _currentColorAvoidSmallNumber = CurrentState.Color.ToVector4(); }
public bool DrawSprite(EffectState effect, Texture2D texture, Vector3 position, Vector2 area, Vector4 uv, Color hue, Vector4 data) { List<VertexPositionTextureDataColor> vl = GetVLForThisEffectAndTexture(effect, texture); position += Depth.NextZ; PreTransformedQuad q = new PreTransformedQuad(position, area, uv, hue, data); for (int i = 0; i < q.Vertices.Length; i++) { vl.Add(q.Vertices[i]); } return true; }
/// <summary> /// Gets the value of the annotation as a vector4. /// </summary> /// <returns> /// The vector4 value /// </returns> public Vector4 GetValueVector4() { XNA.Vector4 xv = _annotation.GetValueVector4(); Vector4 v; v.X = xv.X; v.Y = xv.Y; v.Z = xv.Z; v.W = xv.W; return(v); }
/// <summary> /// Initializes a new instance of the <see cref="VertexPositionNormalBlendable"/> struct. /// </summary> /// <param name="position"> /// The position of the vertex. /// </param> /// <param name="normal"> /// The normal of the vertex. /// </param> /// <param name="boneWeight"> /// The bone weightings that apply to the vertex. /// </param> /// <param name="boneIndices"> /// The bone IDs that apply to the vertex. /// </param> public VertexPositionNormalBlendable( Vector3 position, Vector3 normal, Vector4 boneWeight, Byte4 boneIndices) { this.Position = position; this.Normal = normal; this.BoneWeights = boneWeight; this.BoneIndices = boneIndices; }
public static void Fade(Color from, Color to, float duration, Action onFinished) { fading = true; fadeSecsLeft = duration; FadeManager.duration = duration; FadeManager.from = new Vector4(from.R, from.G, from.B, from.A); FadeManager.to = new Vector4(to.R, to.G, to.B, to.A); color = FadeManager.from; FadeManager.onFinished = onFinished; }
/// <summary> /// Gets the value of the annotation as a quaternion. /// </summary> /// <returns> /// The quaternion value /// </returns> public Quaternion GetValueQuaternion() { XNA.Vector4 xv = _annotation.GetValueVector4(); Quaternion q; q.X = xv.X; q.Y = xv.Y; q.Z = xv.Z; q.W = xv.W; return(q); }
public void MultiplyTest() { var traits = Vector4Traits.Instance; var value = new Vector4(-1, -2, 3, 1); Assert.IsTrue(Vector4F.AreNumericallyEqual((Vector4F)Vector4.Zero, (Vector4F)traits.Multiply(value, 0))); Assert.IsTrue(Vector4F.AreNumericallyEqual((Vector4F)value, (Vector4F)traits.Multiply(value, 1))); Assert.IsTrue(Vector4F.AreNumericallyEqual((Vector4F)(value + value), (Vector4F)traits.Multiply(value, 2))); Assert.IsTrue(Vector4F.AreNumericallyEqual((Vector4F)(value + value + value), (Vector4F)traits.Multiply(value, 3))); Assert.IsTrue(Vector4F.AreNumericallyEqual((Vector4F)(-value), (Vector4F)traits.Multiply(value, -1))); Assert.IsTrue(Vector4F.AreNumericallyEqual((Vector4F)(-value - value), (Vector4F)traits.Multiply(value, -2))); Assert.IsTrue(Vector4F.AreNumericallyEqual((Vector4F)(-value - value - value), (Vector4F)traits.Multiply(value, -3))); }
private int CalculateIndex(Microsoft.Xna.Framework.Vector4 position, DepthImageStream depth) { float x = -position.X / position.W, y = -position.Y / position.W; x = (x + 1) * depth.FrameWidth / 2; y = (y + 1) * depth.FrameHeight / 2; int xCoord = (int)MathHelper.Clamp(x, 0, depth.FrameWidth - 1); int yCoord = (int)MathHelper.Clamp(y, 0, depth.FrameHeight - 1); return(yCoord * depth.FrameHeight + xCoord); }
public void Dot() { var vector1 = new Vector4(1, 2, 3, 4); var vector2 = new Vector4(0.5f, 1.1f, -3.8f, 1.2f); var expectedResult = -3.89999962f; Assert.AreEqual(expectedResult, Vector4.Dot(vector1, vector2)); float result; Vector4.Dot(ref vector1, ref vector2, out result); Assert.AreEqual(expectedResult, result); }
public static Vector4 ReadVector4(this BinaryReader bin) { if ((bin.BaseStream.Length - bin.BaseStream.Position) < 12) { return Vector4.Zero; } Vector4 vec = new Vector4( bin.ReadSingle(), bin.ReadSingle(), bin.ReadSingle(), bin.ReadSingle() ); return vec; }
public void Draw(Matrix VM, Microsoft.Xna.Framework.Vector4 color) { if (balas.Count > 0) { for (int i = 0; i < balas.Count; i++) { balas.ElementAt(i).Draw(VM); } } base.Alpha(color.W); base.Draw(VM, new Vector3(color.X, color.Y, color.Z), 1); }
public void FromByTest() { // IAnimationValueTraits<T> is used in a from-by animation to a add a relative offset to // the start value. var traits = Vector4Traits.Instance; var from = new Vector4(-1, -2, 3, 1); var by = new Vector4(4, -5, 6, 1); var to = traits.Add(from, by); Assert.IsTrue(Vector4F.AreNumericallyEqual((Vector4F)(by + from), (Vector4F)to)); Assert.IsTrue(Vector4F.AreNumericallyEqual((Vector4F)from, (Vector4F)traits.Add(to, traits.Inverse(by)))); Assert.IsTrue(Vector4F.AreNumericallyEqual((Vector4F)by, (Vector4F)traits.Add(traits.Inverse(from), to))); }
public void Encode(VertexElement element, XNAV4 value) { var dstVertex = _Vertex.Slice(element.Offset); if (element.VertexElementFormat == VertexElementFormat.Vector4) { System.Runtime.InteropServices.MemoryMarshal.Write(dstVertex, ref value); return; } if (element.VertexElementFormat == VertexElementFormat.Byte4) { var c = new Byte4(value); System.Runtime.InteropServices.MemoryMarshal.Write(dstVertex, ref c); return; } if (element.VertexElementFormat == VertexElementFormat.Color) { var c = new Color(value); System.Runtime.InteropServices.MemoryMarshal.Write(dstVertex, ref c); return; } if (element.VertexElementFormat == VertexElementFormat.Short4) { var ns = new Short4(value); System.Runtime.InteropServices.MemoryMarshal.Write(dstVertex, ref ns); return; } if (element.VertexElementFormat == VertexElementFormat.NormalizedShort4) { var ns = new NormalizedShort4(value); System.Runtime.InteropServices.MemoryMarshal.Write(dstVertex, ref ns); return; } if (element.VertexElementFormat == VertexElementFormat.HalfVector4) { var ns = new HalfVector4(value); System.Runtime.InteropServices.MemoryMarshal.Write(dstVertex, ref ns); return; } throw new ArgumentException(nameof(element)); }
public void Render(PointCloud pcl, Matrix worldToClip, Vector3 eye, Vector3 forward, Vector4[] outRGBA) { float MAX_DEPTH = 10.0f; for (int i = 0; i < depthBuffer.Length; i++) { depthBuffer[i] = MAX_DEPTH; outRGBA[i] = new Vector4(); } foreach (Vector4 p in pcl.Points()) { Vector3 worldPos = new Vector3(p.X, p.Y, p.Z); Vector4 clipPos = Vector4.Transform(new Vector4(worldPos.X, worldPos.Y, worldPos.Z, 1.0f), worldToClip); float depth = clipPos.W; // TODO: Generalize int screenX = (int)(clipPos.X / clipPos.W * 320.0f + 320.0f); int screenY = 480 - (int)(clipPos.Y / clipPos.W * 240.0f + 240.0f); // TODO: Generalize if (screenX < 0 || screenX >= 640 || screenY < 0 || screenY >= 480) continue; // TODO: Generalize int idx = 640 * screenY + screenX; if (depth < depthBuffer[idx]) depthBuffer[idx] = depth; } for (int i = 0; i < depthBuffer.Length; i++) { if (depthBuffer[i] < MAX_DEPTH) { float depth_norm = (depthBuffer[i] - 0.4f) / 3.6f; float r = MathHelper.Min(1.0f, (MathHelper.Max(0.5f, depth_norm) - 0.5f) * 6.0f); float g = MathHelper.Min(1.0f, depth_norm * 3.0f) - MathHelper.Min(1.0f, MathHelper.Max(0.0f, depth_norm - 0.666f) * 3.0f); float b = MathHelper.Max(0.0f, 1.0f - MathHelper.Max(0.0f, depth_norm - 0.333f) * 6.0f); outRGBA[i] = new Vector4(r, g, b, 1.0f); } } }
public unsafe void Set(Vector4[] values, int offset, int count) { var data = new X.Vector4[count]; fixed(Vector4 *ptr = values) fixed(X.Vector4 * ptr2 = data) { var ptrOffset = ptr + offset; var ptrOffset2 = ptr2; for (int i = 0; i != count; ++i) { *ptrOffset2++ = *(X.Vector4 *)(ptrOffset++); } } parameter.SetValue(data); }
public unsafe void Set(Vector4[] values) { var data = new X.Vector4[values.Length]; fixed(Vector4 *ptr = values) fixed(X.Vector4 * ptr2 = data) { var ptrOffset = ptr; var ptrOffset2 = ptr2; for (int i = 0; i != values.Length; ++i) { *ptrOffset2++ = *(X.Vector4 *)(ptrOffset++); } } parameter.SetValue(data); }
public void Hermite() { var t1 = new Vector4(1.40625f, 1.40625f, 0.2f, 0.92f); var t2 = new Vector4(2.662375f, 2.26537514f,10.0f,2f); var v1 = new Vector4(1,2,3,4); var v2 = new Vector4(-1.3f,0.1f,30.0f,365.20f); var a = 2.234f; var result1 = Vector4.Hermite(v1, t1, v2, t2, a); var expected = new Vector4(39.0311f, 34.65557f, -132.5473f, -2626.85938f); Assert.That(expected, Is.EqualTo(result1).Using(Vector4Comparer.Epsilon)); Vector4 result2; // same as result1 ? - it must be same Vector4.Hermite(ref v1, ref t1, ref v2, ref t2, a, out result2); Assert.That(result1, Is.EqualTo(result2).Using(Vector4Comparer.Epsilon)); }
/// <summary> /// Skins an individual vertex. /// </summary> public static void SetVertex( Matrix[] bones, ref Vector3 position, ref Vector3 normal, ref Vector4 blendIndices, ref Vector4 blendWeights, out Vector3 outPosition, out Vector3 outNormal) { int b0 = (int)blendIndices.X; int b1 = (int)blendIndices.Y; int b2 = (int)blendIndices.Z; int b3 = (int)blendIndices.W; Matrix skinnedTransformSum; Blend4x3Matrix(ref bones[b0], ref bones[b1], ref bones[b2], ref bones[b3], ref blendWeights, out skinnedTransformSum); // Support the 4 Bone Influences - Position then Normal Vector3.Transform(ref position, ref skinnedTransformSum, out outPosition); Vector3.TransformNormal(ref normal, ref skinnedTransformSum, out outNormal); }
public ModelVertex( Vector3? position, Vector3? normal, Vector3? tangent, Vector3? bitangent, Color[] colors, Vector2[] texCoordsUV, Vector3[] texCoordsUVW, Byte4? boneIndicies, Vector4? boneWeights) { Position = position; Normal = normal; Tangent = tangent; BiTangent = bitangent; Colors = colors; TexCoordsUV = texCoordsUV; TexCoordsUVW = texCoordsUVW; BoneIndices = boneIndicies; BoneWeights = boneWeights; }
public MeshEffect(GraphicsDevice graphicsDevice, Effect innerEffect) { _innerEffect = innerEffect; _cameraPosWSParameter = _innerEffect.Parameters["CameraPosWS"]; _shadowMatrixParameter = _innerEffect.Parameters["ShadowMatrix"]; _cascadeSplitsParameter = _innerEffect.Parameters["CascadeSplits"]; _cascadeOffsetsParameter = _innerEffect.Parameters["CascadeOffsets"]; _cascadeScalesParameter = _innerEffect.Parameters["CascadeScales"]; _biasParameter = _innerEffect.Parameters["Bias"]; _offsetScaleParameter = _innerEffect.Parameters["OffsetScale"]; _lightDirectionParameter = _innerEffect.Parameters["LightDirection"]; _lightColorParameter = _innerEffect.Parameters["LightColor"]; _diffuseColorParameter = _innerEffect.Parameters["DiffuseColor"]; _worldParameter = _innerEffect.Parameters["World"]; _viewProjectionParameter = _innerEffect.Parameters["ViewProjection"]; _shadowMapParameter = _innerEffect.Parameters["ShadowMap"]; CascadeSplits = new float[MeshRenderer.NumCascades]; CascadeOffsets = new Vector4[MeshRenderer.NumCascades]; CascadeScales = new Vector4[MeshRenderer.NumCascades]; }
/// <summary> /// Update the IR distance sensor /// </summary> /// <param name="update"></param> public override void Update(simengine.FrameUpdate update) { base.Update(update); _elapsedSinceLastScan += (float)update.ElapsedTime; _appTime = (float)update.ApplicationTime; // only retrieve raycast results every SCAN_INTERVAL. // For entities that are compute intenisve, you should consider giving them // their own task queue so they dont flood a shared queue if ((_elapsedSinceLastScan > SCAN_INTERVAL) && (_raycastProperties != null)) { _elapsedSinceLastScan = 0; // The default pose has the IR sensor looking toward the back of the robot. Rotate // it by 180 degrees. _raycastProperties.OriginPose.Orientation = simengine.TypeConversion.FromXNA( simengine.TypeConversion.ToXNA(Parent.State.Pose.Orientation) * simengine.TypeConversion.ToXNA(State.Pose.Orientation)); _raycastProperties.OriginPose.Position = simengine.TypeConversion.FromXNA( xna.Vector3.Transform(simengine.TypeConversion.ToXNA(State.Pose.Position), Parent.World)); xna.Matrix orientation = xna.Matrix.CreateFromQuaternion(simengine.TypeConversion.ToXNA(State.Pose.Orientation)); World = xna.Matrix.Multiply(orientation, xna.Matrix.CreateTranslation(simengine.TypeConversion.ToXNA(State.Pose.Position))); // This entity is relative to its parent World = xna.Matrix.Multiply(World, Parent.World); // cast rays on a horizontal plane and again on a vertical plane _raycastResultsPort = PhysicsEngine.Raycast2D(_raycastProperties); _raycastResultsPort.Test(out _lastResults); if (_lastResults != null) { simcommon.RaycastResult verticalResults; // rotate the plane by 90 degrees _raycastProperties.OriginPose.Orientation = simengine.TypeConversion.FromXNA(simengine.TypeConversion.ToXNA(_raycastProperties.OriginPose.Orientation) * xna.Quaternion.CreateFromAxisAngle(new xna.Vector3(0, 0, 1), (float)Math.PI / 2f)); _raycastResultsPort = PhysicsEngine.Raycast2D(_raycastProperties); _raycastResultsPort.Test(out verticalResults); // combine the results of the second raycast with the first if (verticalResults != null) { foreach (simcommon.RaycastImpactPoint impact in verticalResults.ImpactPoints) { _lastResults.ImpactPoints.Add(impact); } } // find the shortest distance to an impact point float minRange = MaximumRange * MaximumRange; xna.Vector4 origin = new xna.Vector4(simengine.TypeConversion.ToXNA(_raycastProperties.OriginPose.Position), 1); foreach (simcommon.RaycastImpactPoint impact in _lastResults.ImpactPoints) { xna.Vector3 impactVector = new xna.Vector3( impact.Position.X - origin.X, impact.Position.Y - origin.Y, impact.Position.Z - origin.Z); float impactDistanceSquared = impactVector.LengthSquared(); if (impactDistanceSquared < minRange) { minRange = impactDistanceSquared; } } _distance = (float)Math.Sqrt(minRange); } } }
public static Vector4 ToXenko(this MonoVector4 vector) => new Vector4(vector.X, vector.Y, vector.Z, vector.W);
/// <summary> /// CorrectSkeletonOffsetFromFloor moves the skeleton to the floor. /// If no floor found in Skeletal Tracking, we can try and use the foot position /// but this can be very noisy, which causes the skeleton to bounce up and down. /// Note: Using the foot positions will reduce the visual effect of jumping when /// an avateer jumps, as we perform a running average. /// </summary> /// <param name="skeleton">The skeleton to correct.</param> /// <param name="floorPlane">The floor plane (consisting of up normal and sensor height) detected by skeleton tracking (if any).</param> /// <param name="avatarHipCenterHeight">The height of the avatar Hip Center joint.</param> public void CorrectSkeletonOffsetFromFloor(Skeleton skeleton, Tuple <float, float, float, float> floorPlane, float avatarHipCenterHeight) { if (skeleton == null || skeleton.TrackingState != SkeletonTrackingState.Tracked) { return; } Vector4 floorPlaneVec = Vector4.Zero; bool haveFloor = false; if (null != floorPlane) { floorPlaneVec = new Vector4(floorPlane.Item1, floorPlane.Item2, floorPlane.Item3, floorPlane.Item4); haveFloor = floorPlaneVec.Length() > float.Epsilon; } // If there's no floor found, try to use the lower foot position, if visible. Vector3 hipCenterPosition = KinectHelper.SkeletonPointToVector3(skeleton.Joints[JointType.HipCenter].Position); bool haveLeftFoot = KinectHelper.IsTrackedOrInferred(skeleton, JointType.FootLeft); bool haveLeftAnkle = KinectHelper.IsTracked(skeleton, JointType.AnkleLeft); bool haveRightFoot = KinectHelper.IsTrackedOrInferred(skeleton, JointType.FootRight); bool haveRightAnkle = KinectHelper.IsTracked(skeleton, JointType.AnkleRight); if (haveLeftFoot || haveLeftAnkle || haveRightFoot || haveRightAnkle) { // As this runs after de-tilt of the skeleton, so the floor-camera offset will // be the foot to camera 0 height in meters as the foot is at the floor plane. // Jumping is enabled to some extent due to the running average, but will appear reduced in height. Vector3 leftFootPosition = KinectHelper.SkeletonPointToVector3(skeleton.Joints[JointType.FootLeft].Position); Vector3 rightFootPosition = KinectHelper.SkeletonPointToVector3(skeleton.Joints[JointType.FootRight].Position); Vector3 leftAnklePosition = KinectHelper.SkeletonPointToVector3(skeleton.Joints[JointType.AnkleLeft].Position); Vector3 rightAnklePosition = KinectHelper.SkeletonPointToVector3(skeleton.Joints[JointType.AnkleRight].Position); // Average the foot and ankle if we have it float leftFootAverage = (haveLeftFoot && haveLeftAnkle) ? (leftFootPosition.Y + leftAnklePosition.Y) * 0.5f : haveLeftFoot ? leftFootPosition.Y : leftAnklePosition.Y; float rightFootAverage = (haveRightFoot && haveRightAnkle) ? (rightFootPosition.Y + rightAnklePosition.Y) * 0.5f : haveRightFoot ? rightFootPosition.Y : rightAnklePosition.Y; // We assume the lowest foot is placed on the floor float lowestFootPosition = 0; if ((haveLeftFoot || haveLeftAnkle) && (haveRightFoot || haveRightAnkle)) { // Negate, as we are looking for the camera height above the floor plane lowestFootPosition = Math.Min(leftFootAverage, rightFootAverage); } else if (haveLeftFoot || haveLeftAnkle) { lowestFootPosition = leftFootAverage; } else { lowestFootPosition = rightFootAverage; } // Running average of floor position this.averageFloorOffset = (this.averageFloorOffset * 0.9f) + (lowestFootPosition * 0.1f); } else if (haveFloor) { // Get the detected height of the camera off the floor in meters. if (0.0f == this.averageFloorOffset) { // If it's the initial frame of detection, just set the floor plane directly. this.averageFloorOffset = -floorPlaneVec.W; } else { // Running average of floor position this.averageFloorOffset = (this.averageFloorOffset * 0.9f) + (-floorPlaneVec.W * 0.1f); } } else { // Just set the avatar offset directly this.averageFloorOffset = hipCenterPosition.Y - avatarHipCenterHeight; } Array jointTypeValues = Enum.GetValues(typeof(JointType)); // Move to the floor. foreach (JointType j in jointTypeValues) { Joint joint = skeleton.Joints[j]; SkeletonPoint pt = joint.Position; pt.Y = pt.Y - this.averageFloorOffset; joint.Position = pt; skeleton.Joints[j] = joint; } }
public void SetVector(string name, XNA.Vector4 vector) { SetVector(name, vector.X, vector.Y, vector.Z, vector.W); }
static unsafe void SubmitFieldPropertyInspector(FieldPropertyListInfo info, object entityComponent, bool showMidi = true) { ImGui.PushID(GetIdString(info, entityComponent)); EditorHelper.RangeAttribute rangeAttribute = null; if (info.MemberInfo != null) { rangeAttribute = CustomAttributeExtensions.GetCustomAttribute <EditorHelper.RangeAttribute>(info.MemberInfo, true); } var infoType = info.FieldPropertyType; if (infoType == typeof(string)) { string val = (string)info.GetValue(); if (val == null) { val = string.Empty; } if (ImGui.InputText(info.Name, ref val, 1000)) { info.SetValue(val); } } else if (infoType == typeof(bool)) { if (showMidi) { SubmitMidiAssignment(entityComponent, info, MidiState.MidiControlDescriptionType.Button); } bool val = (bool)info.GetValue(); if (ImGui.Checkbox(info.Name, ref val)) { info.SetValue(val); } } else if (infoType == typeof(float)) { if (showMidi) { SubmitMidiAssignment(entityComponent, info, MidiState.MidiControlDescriptionType.Knob); } float val = (float)info.GetValue(); bool result; if (rangeAttribute != null && (rangeAttribute.RangeType == EditorHelper.RangeAttribute.RangeTypeEnum.Float || rangeAttribute.RangeType == EditorHelper.RangeAttribute.RangeTypeEnum.Int)) { if (rangeAttribute.RangeType == EditorHelper.RangeAttribute.RangeTypeEnum.Float) { result = ImGui.SliderFloat(info.Name, ref val, rangeAttribute.MinFloat, rangeAttribute.MaxFloat); } else { result = ImGui.SliderFloat(info.Name, ref val, rangeAttribute.MinInt, rangeAttribute.MaxInt); } } else { result = ImGui.DragFloat(info.Name, ref val, 0.1f); } if (result) { info.SetValue(val); } } else if (infoType == typeof(Vector2)) { if (showMidi) { SubmitMidiAssignment(entityComponent, info, MidiState.MidiControlDescriptionType.Knob); } Vector2 val = (Vector2)info.GetValue(); if (ImGui.DragFloat2(info.Name, ref val)) { info.SetValue(val); } } else if (infoType == typeof(Vector3)) { if (showMidi) { SubmitMidiAssignment(entityComponent, info, MidiState.MidiControlDescriptionType.Knob); } Vector3 val = (Vector3)info.GetValue(); if (ImGui.DragFloat3(info.Name, ref val)) { info.SetValue(val); } } else if (infoType == typeof(Vector4)) { if (showMidi) { SubmitMidiAssignment(entityComponent, info, MidiState.MidiControlDescriptionType.Knob); } Vector4 val = (Vector4)info.GetValue(); if (ImGui.DragFloat4(info.Name, ref val)) { info.SetValue(val); } } else if (infoType == typeof(Xna.Vector2)) { if (showMidi) { SubmitMidiAssignment(entityComponent, info, MidiState.MidiControlDescriptionType.Knob); } Xna.Vector2 xnaVal = (Xna.Vector2)info.GetValue(); Vector2 val = new Vector2(xnaVal.X, xnaVal.Y); if (ImGui.DragFloat2(info.Name, ref val)) { xnaVal.X = val.X; xnaVal.Y = val.Y; info.SetValue(xnaVal); } } else if (infoType == typeof(Xna.Vector3)) { if (showMidi) { SubmitMidiAssignment(entityComponent, info, MidiState.MidiControlDescriptionType.Knob); } Xna.Vector3 xnaVal = (Xna.Vector3)info.GetValue(); Vector3 val = new Vector3(xnaVal.X, xnaVal.Y, xnaVal.Z); if (ImGui.DragFloat3(info.Name, ref val)) { xnaVal.X = val.X; xnaVal.Y = val.Y; xnaVal.Z = val.Z; info.SetValue(xnaVal); } } else if (infoType == typeof(Xna.Vector4)) { if (showMidi) { SubmitMidiAssignment(entityComponent, info, MidiState.MidiControlDescriptionType.Knob); } Xna.Vector4 xnaVal = (Xna.Vector4)info.GetValue(); Vector4 val = new Vector4(xnaVal.X, xnaVal.Y, xnaVal.Z, xnaVal.W); if (ImGui.DragFloat4(info.Name, ref val)) { xnaVal.X = val.X; xnaVal.Y = val.Y; xnaVal.Z = val.Z; xnaVal.W = val.W; info.SetValue(xnaVal); } } else if (infoType == typeof(int)) { if (showMidi) { SubmitMidiAssignment(entityComponent, info, MidiState.MidiControlDescriptionType.Knob); } int val = (int)info.GetValue(); bool result; if (rangeAttribute != null && rangeAttribute.RangeType == EditorHelper.RangeAttribute.RangeTypeEnum.Int) { result = ImGui.SliderInt(info.Name, ref val, rangeAttribute.MinInt, rangeAttribute.MaxInt); } else { result = ImGui.InputInt(info.Name, ref val); } if (result) { info.SetValue(val); } } else if (infoType == typeof(uint)) { if (showMidi) { SubmitMidiAssignment(entityComponent, info, MidiState.MidiControlDescriptionType.Knob); } int val = (int)((uint)info.GetValue()); bool result; if (rangeAttribute != null && rangeAttribute.RangeType == EditorHelper.RangeAttribute.RangeTypeEnum.Int) { result = ImGui.SliderInt(info.Name, ref val, rangeAttribute.MinInt, rangeAttribute.MaxInt); } else { result = ImGui.InputInt(info.Name, ref val); } if (result) { if (val < 0) { val = 0; } info.SetValue((uint)val); } } else if (infoType.IsEnum) { if (showMidi) { SubmitMidiAssignment(entityComponent, info, MidiState.MidiControlDescriptionType.Button); } var val = info.GetValue(); var enumNames = infoType.GetEnumNames(); int currentIndex = 0; for (int i = 0; i < enumNames.Length; i++) { if (enumNames[i] == val.ToString()) { currentIndex = i; } } if (ImGui.Combo(info.Name, ref currentIndex, enumNames, enumNames.Length)) { info.SetValue(infoType.GetEnumValues().GetValue(currentIndex)); } } else if (typeof(IList).IsAssignableFrom(infoType)) { var listthing = info.GetValue(); IList list = listthing as IList; ImGui.Text($"{info.Name} List ({list.Count} items)"); ImGui.SameLine(); if (ImGui.Button("-")) { if (list.Count > 0) { list.RemoveAt(list.Count - 1); } } ImGui.SameLine(); if (ImGui.Button("+")) { Type listItemType = list.GetType().GetGenericArguments().First(); if (listItemType.IsValueType) { list.Add(Activator.CreateInstance(listItemType)); } else { list.Add(null); } } ImGui.Indent(); for (int i = 0; i < list.Count; i++) { FieldPropertyListInfo itemInfo = new FieldPropertyListInfo(list, i); SubmitFieldPropertyInspector(itemInfo, list); } ImGui.Unindent(); } else if (!infoType.IsValueType) { string valText; var value = info.GetValue(); if (value != null) { valText = value.ToString(); } else { valText = "null"; } string label = $"{info.Name}: {valText}"; if (typeof(Component).IsAssignableFrom(infoType) || typeof(Entity).IsAssignableFrom(infoType)) { if (ImGui.Selectable(label, false)) { SelectedEntityComponent = value; scrollEntitiesView = true; scrollSceneGraphView = true; } } else { ImGui.Text(label); } if (draggedObject != null && infoType.IsAssignableFrom(draggedObject.GetType())) { if (ImGui.BeginDragDropTarget()) { var payload = ImGui.AcceptDragDropPayload(PAYLOAD_STRING); if (payload.NativePtr != null) // Only when this is non-null does it mean that we've released the drag { info.SetValue(draggedObject); draggedObject = null; } ImGui.EndDragDropTarget(); } } } else { SubmitReadonlyFieldPropertyInspector(info); } ImGui.PopID(); SubmitHelpMarker(info); }
public static System.Numerics.Vector4 ToCS(this Microsoft.Xna.Framework.Vector4 vector) { return(new System.Numerics.Vector4(vector.X, vector.Y, vector.Z, vector.W)); }
// Set a vector3 parameter public void SetParameter(String parameterName, Microsoft.Xna.Framework.Vector4 value) { this.Effect.Parameters[parameterName].SetValue(value); }