public static Vector3D Transform3DVector(Transform3D transform, Vector3D vector) { Point3D input = new Point3D(vector.X, vector.Y, vector.Z); Point3D output; return transform != null && transform.TryTransform(input, out output) ? new Vector3D(output.X, output.Y, output.Z) : vector; }
// Transfrom that moves the world to a camera coordinate system // where the camera is at the origin looking down the negative z // axis and y is up. // // NOTE: We consider camera.Transform to be part of the view matrix. // internal static Matrix3D CreateViewMatrix(Transform3D transform, ref Point3D position, ref Vector3D lookDirection, ref Vector3D upDirection) { Vector3D zaxis = -lookDirection; zaxis.Normalize(); Vector3D xaxis = Vector3D.CrossProduct(upDirection, zaxis); xaxis.Normalize(); Vector3D yaxis = Vector3D.CrossProduct(zaxis, xaxis); Vector3D positionVec = (Vector3D) position; double cx = -Vector3D.DotProduct(xaxis, positionVec); double cy = -Vector3D.DotProduct(yaxis, positionVec); double cz = -Vector3D.DotProduct(zaxis, positionVec); Matrix3D viewMatrix = new Matrix3D( xaxis.X, yaxis.X, zaxis.X, 0, xaxis.Y, yaxis.Y, zaxis.Y, 0, xaxis.Z, yaxis.Z, zaxis.Z, 0, cx, cy, cz, 1); PrependInverseTransform(transform, ref viewMatrix); return viewMatrix; }
internal void PushModelTransform(Transform3D transform) { if (transform != null && transform != Transform3D.Identity) { _modelTransformStack.Push(transform.Value); } }
public static MatrixTransform3D Rotate3D(Transform3D transform, double x, double y, double z, Point3D center, Vector3D up, Vector3D look, RotationType type) { if (type != RotationType.LockAxisY) { up = transform.Transform(up); } if (type != RotationType.LockAxisZ) { look = transform.Transform(look); } center = transform.Transform(center); Vector3D axisX = Vector3D.CrossProduct(up, look); Matrix3D matrix = new Matrix3D(); matrix.RotateAt(new Quaternion(axisX, x), center); matrix.RotateAt(new Quaternion(up, y), center); matrix.RotateAt(new Quaternion(look, z), center); MatrixTransform3D mOriginTransform = transform as MatrixTransform3D; //mOriginTransform.Matrix.RotateAt( try { return new MatrixTransform3D(Matrix3D.Multiply(mOriginTransform.Matrix, matrix)); } catch (Exception err) { Exceptions.LogOnly(err); return null; } }
//Given a 3d object, it will apply the transform to it and return the new object. Note that it will not //overwrite the objects current transformation: public static Model3DGroup applyTransform(Model3DGroup obj, Transform3D transform) { Model3DGroup newObj = new Model3DGroup(); newObj.Children.Add(obj); newObj.Transform = transform; return newObj; }
public static Matrix3D CalcRotationMatrix(double x, double y, double z, Point3D center, Vector3D up, Vector3D look, Transform3D transform, RotationType type) { //Transform3DGroup trm = new Transform3DGroup(); //trm.Children.Add(transform); Vector3D realup = transform.Transform(up); if (type != RotationType.LockAxisY) { up = realup; } if (type != RotationType.LockAxisZ) { look = transform.Transform(look); } center = transform.Transform(center); Vector3D axisX = Vector3D.CrossProduct(up, look); Matrix3D matrix = new Matrix3D(); //Quaternion q = new Quaternion(); //q. double ang = AngleBetween(realup, YAxis) + x; if (ang >= 90) { x = 90 - ang; } matrix.RotateAt(new Quaternion(axisX, x), center); matrix.RotateAt(new Quaternion(up, y), center); matrix.RotateAt(new Quaternion(look, z), center); return matrix; }
private void Expand(GeometryModel3D model, Transform3D transformation) { Transform3D ot; if (originalTransforms.ContainsKey(model)) ot = originalTransforms[model]; else { ot = model.Transform; originalTransforms.Add(model, ot); } Transform3D totalTransform = Transform3DHelper.CombineTransform(transformation, ot); var mesh = model.Geometry as MeshGeometry3D; if (mesh == null) return; var bounds = new Rect3D(); foreach (int i in mesh.TriangleIndices) bounds.Union(totalTransform.Transform(mesh.Positions[i])); Point3D p = bounds.Location; Vector3D d = p - actualExpandOrigin; d *= Expansion; Point3D p2 = actualExpandOrigin + d; var t = new TranslateTransform3D(p2 - p); model.Transform = Transform3DHelper.CombineTransform(ot, t); }
/// <summary> /// Combines two transforms. /// </summary> /// <param name="t1"> /// The first transform. /// </param> /// <param name="t2"> /// The second transform. /// </param> /// <returns> /// The combined transform group. /// </returns> public static Transform3D CombineTransform(Transform3D t1, Transform3D t2) { var g = new Transform3DGroup(); g.Children.Add(t1); g.Children.Add(t2); return g; }
internal static void PrependInverseTransform(Transform3D transform, ref Matrix3D viewMatrix) { if (transform != null && transform != Transform3D.Identity) { PrependInverseTransform(transform.Value, ref viewMatrix); } }
public Plant() { var x = new Vector3D(1, 0, 0); var r1 = new RotateTransform3D(new AxisAngleRotation3D(x, 80)); var r2 = new RotateTransform3D(new AxisAngleRotation3D(x, -70)); var r3 = new RotateTransform3D(new AxisAngleRotation3D(x, -10)); var t1 = new TranslateTransform3D(0, 0, 0.5); var t2 = new TranslateTransform3D(0, 0, 0.7); var t3 = new TranslateTransform3D(0, 0, 1.0); var s1 = new ScaleTransform3D(0.5, 0.5, 0.5); var s2 = new ScaleTransform3D(0.3, 0.3, 0.3); var s3 = new ScaleTransform3D(0.8, 0.8, 0.8); var m1 = new Transform3DGroup(); m1.Children.Add(r1); m1.Children.Add(s1); m1.Children.Add(t1); var m2 = new Transform3DGroup(); m2.Children.Add(r2); m2.Children.Add(s2); m2.Children.Add(t2); var m3 = new Transform3DGroup(); m3.Children.Add(r3); m3.Children.Add(s3); m3.Children.Add(t3); T1 = m1; T2 = m2; T3 = m3; }
/// <summary> /// Exports the light. /// </summary> /// <param name="light"> /// The light. /// </param> /// <param name="inheritedTransform"> /// The inherited transform. /// </param> protected override void ExportLight(Light light, Transform3D inheritedTransform) { base.ExportLight(light, inheritedTransform); // todo... // http://www.povray.org/documentation/view/3.6.1/34/ }
public MainViewModel() { EffectsManager = new DefaultEffectsManager(); RenderTechnique = EffectsManager[DefaultRenderTechniqueNames.Blinn]; // titles this.Title = "Post Processing Effects Demo"; this.SubTitle = "WPF & SharpDX"; // ---------------------------------------------- // camera setup this.Camera = new PerspectiveCamera { Position = new Point3D(0, -30, 0), LookDirection = new Vector3D(0, 30, 0), UpDirection = new Vector3D(0, 0, 1) }; var m1 = Load3ds("suzanne.obj").Select(x => x.Geometry).ToArray(); MeshModel1 = m1[0]; var m2 = Load3ds("skeleton.3ds").Select(x => x.Geometry).ToArray(); MeshModel2 = m2[0]; Model1Transform = new Media3D.TranslateTransform3D(new Vector3D(7, 0, 0)); Model2Transform = new Media3D.TranslateTransform3D(new Vector3D(-5, 0, 0)); var builder = new MeshBuilder(); builder.AddBox(new Vector3(0, 0, -5), 15, 15, 0.2); FloorModel = builder.ToMesh(); }
// http://blogs.msdn.com/cfs-file.ashx/__key/CommunityServer-Components-PostAttachments/00-04-01-86-12/SceneSortingHelper_2E00_cs /// <summary> /// Sort Modelgroups in Farthest to Closest order, to enable transparency /// Should be applied whenever the scene is significantly re-oriented /// </summary> public static void AlphaSort(Point3D cameraPosition, Model3DCollection models, Transform3D worldTransform) { var sortedList = models.OrderBy(model => Point3D.Subtract(cameraPosition, worldTransform.Transform(model.Bounds.Location)).Length); models.Clear(); foreach (var model in sortedList) { models.Add(model); } }
//------------------------------------------------------ // // Public Methods // //----------------------------------------------------- //------------------------------------------------------ // // Public Properties // //------------------------------------------------------ //----------------------------------------------------- // // Public Events // //------------------------------------------------------ //----------------------------------------------------- // // Internal Methods // //----------------------------------------------------- internal void PushVisualTransform(Transform3D transform) { Debug.Assert(!HasModelTransformMatrix, "ModelTransform stack should be empty when pusing a visual transform"); if (transform != null && transform != Transform3D.Identity) { _visualTransformStack.Push(transform.Value); } }
protected override void ExportModel(GeometryModel3D model, Transform3D transform) { writer.WriteLine(String.Format("o object{0}", objectNo++)); writer.WriteLine(String.Format("g group{0}", groupNo++)); string matName = String.Format("mat{0}", matNo++); writer.WriteLine(String.Format("usemtl {0}", matName)); ExportMaterial(matName, model.Material, model.BackMaterial); var mesh = model.Geometry as MeshGeometry3D; ExportMesh(mesh, Transform3DHelper.CombineTransform(transform, model.Transform)); }
/// <summary> /// Exports the model. /// </summary> /// <param name="model"> /// The model. /// </param> /// <param name="inheritedTransform"> /// The inherited transform. /// </param> protected override void ExportModel(GeometryModel3D model, Transform3D inheritedTransform) { var mesh = model.Geometry as MeshGeometry3D; if (mesh == null) { return; } // todo }
public static MatrixTransform3D Rotate3D(Transform3D transform, Vector3D look, Vector3D dir, Point3D center) { Matrix3D m = new Matrix3D(); Vector3D realook = transform.Transform(look); Vector3D axis = Vector3D.CrossProduct(realook, dir); double angle = Math.Acos(Vector3D.DotProduct(realook, dir)); Quaternion q = new Quaternion(axis, angle); m.RotateAt(q, center); MatrixTransform3D rlt = transform as MatrixTransform3D; return new MatrixTransform3D(Matrix3D.Multiply(rlt.Matrix, m)); }
public static PerspectiveCamera GetTransformedPerspectiveCamera(PerspectiveCamera camera, Transform3D transform) { return new PerspectiveCamera { LookDirection = transform.Transform(camera.LookDirection), UpDirection = transform.Transform(camera.UpDirection), FieldOfView = camera.FieldOfView, FarPlaneDistance = camera.FarPlaneDistance, NearPlaneDistance = camera.NearPlaneDistance, Position = transform.Transform(camera.Position) }; }
private void AddContours(GeometryModel3D model, Transform3D transform) { var p = ContourPlane.Position; var n = ContourPlane.Normal; var segments = MeshGeometryHelper.GetContourSegments(model.Geometry as MeshGeometry3D, p, n).ToList(); foreach (var contour in MeshGeometryHelper.CombineSegments(segments, 1e-6).ToList()) { if (contour.Count == 0) continue; view2.Children.Add(new TubeVisual3D { Diameter = 0.03, Path = new Point3DCollection(contour), Fill = Brushes.Green }); } }
/// <summary> /// Exports the model. /// </summary> /// <param name="model"> /// The model. /// </param> /// <param name="inheritedTransform"> /// The inherited transform. /// </param> protected override void ExportModel(GeometryModel3D model, Transform3D inheritedTransform) { var mesh = model.Geometry as MeshGeometry3D; if (mesh == null) { return; } // writer.WriteLine("Transform {"); // todo: add transform from model.Transform and inheritedTransform this.writer.WriteLine("Shape {"); this.writer.WriteLine(" appearance Appearance {"); // todo: set material properties from model.Material this.writer.WriteLine(" material Material {"); this.writer.WriteLine(" diffuseColor 0.8 0.8 0.2"); this.writer.WriteLine(" specularColor 0.5 0.5 0.5"); this.writer.WriteLine(" }"); this.writer.WriteLine(" }"); // Appearance this.writer.WriteLine(" geometry IndexedFaceSet {"); this.writer.WriteLine(" coord Coordinate {"); this.writer.WriteLine(" point ["); foreach (var pt in mesh.Positions) { this.writer.WriteLine(string.Format(CultureInfo.InvariantCulture, "{0} {1} {2},", pt.X, pt.Y, pt.Z)); } this.writer.WriteLine(" ]"); this.writer.WriteLine(" }"); this.writer.WriteLine(" coordIndex ["); for (int i = 0; i < mesh.TriangleIndices.Count; i += 3) { this.writer.WriteLine( string.Format( CultureInfo.InvariantCulture, "{0} {1} {2},", mesh.TriangleIndices[i], mesh.TriangleIndices[i + 1], mesh.TriangleIndices[i + 2])); } this.writer.WriteLine(" ]"); this.writer.WriteLine(" }"); // IndexedFaceSet this.writer.WriteLine("}"); // Shape // writer.WriteLine("}"); // Transform }
/// <summary> /// Combines two transforms. /// Null Values are treated like the Identity transform. /// </summary> /// <param name="t1"> /// The first transform. /// </param> /// <param name="t2"> /// The second transform. /// </param> /// <returns> /// The combined transform group. /// </returns> public static Transform3D CombineTransform(Transform3D t1, Transform3D t2) { if (t1 == null && t2 == null) return Transform3D.Identity; if (t1 == null && t2 != null) return t2; if (t1 != null && t2 == null) return t1; var g = new Transform3DGroup(); g.Children.Add(t1); g.Children.Add(t2); return g; }
public static Matrix3D CalculateRotationMatrix(double x, double y, double z, Point3D center, Vector3D up, Vector3D look, Transform3D transform) { //Transform3DGroup trm = new Transform3DGroup(); //trm.Children.Add(transform); up = transform.Transform(up); look = transform.Transform(look); center = transform.Transform(center); Vector3D axisZ = Vector3D.CrossProduct(up, look); Matrix3D matrix = new Matrix3D(); matrix.RotateAt(new Quaternion(axisZ, x), center); matrix.RotateAt(new Quaternion(up, y), center); matrix.RotateAt(new Quaternion(look, z), center); return matrix; }
public void ExportMesh(MeshGeometry3D m, Transform3D t) { Dictionary<int, int> dictionary = new Dictionary<int, int>(); Dictionary<int, int> dictionary2 = new Dictionary<int, int>(); Dictionary<int, int> dictionary3 = new Dictionary<int, int>(); int num = 0; foreach (Point3D pointd in m.Positions) { dictionary.Add(num++, this.vertexIndex++); Point3D pointd2 = t.Transform(pointd); this.writer.WriteLine(string.Format(CultureInfo.InvariantCulture, "v {0} {1} {2}", new object[] { pointd2.X, pointd2.Y, pointd2.Z })); } num = 0; foreach (Point point in m.TextureCoordinates) { if (!(double.IsNegativeInfinity(point.X) || double.IsPositiveInfinity(point.Y))) { dictionary2.Add(num++, this.textureIndex++); this.writer.WriteLine(string.Format(CultureInfo.InvariantCulture, "vt {0} {1}", new object[] { point.X, point.Y })); } else { num++; } } num = 0; foreach (Vector3D vectord in m.Normals) { if (!(double.IsNegativeInfinity(vectord.X) || double.IsPositiveInfinity(vectord.Y))) { dictionary3.Add(num++, this.normalIndex++); this.writer.WriteLine(string.Format(CultureInfo.InvariantCulture, "vn {0} {1} {2}", new object[] { vectord.X, vectord.Y, vectord.Z })); } else { num++; } } for (int i = 0; i < m.TriangleIndices.Count; i += 3) { int num6; int key = m.TriangleIndices[i]; int num4 = m.TriangleIndices[i + 1]; int num5 = m.TriangleIndices[i + 2]; this.writer.WriteLine("f {0}/{1}{2} {3}/{4}{5} {6}/{7}{8}", new object[] { dictionary[key], dictionary2.ContainsKey(key) ? (num6 = dictionary2[key]).ToString() : string.Empty, dictionary3.ContainsKey(key) ? ("/" + (num6 = dictionary3[key]).ToString()) : string.Empty, dictionary[num4], dictionary2.ContainsKey(num4) ? (num6 = dictionary2[num4]).ToString() : string.Empty, dictionary3.ContainsKey(num4) ? ("/" + (num6 = dictionary3[num4]).ToString()) : string.Empty, dictionary[num5], dictionary2.ContainsKey(num5) ? (num6 = dictionary2[num5]).ToString() : string.Empty, dictionary3.ContainsKey(num5) ? ("/" + (num6 = dictionary3[num5]).ToString()) : string.Empty }); } }
public MainViewModel() { EffectsManager = new DefaultEffectsManager(); // titles this.Title = "Post Processing Effects Demo"; this.SubTitle = "WPF & SharpDX"; // ---------------------------------------------- // camera setup this.Camera = new PerspectiveCamera { Position = new Point3D(0, -30, 0), LookDirection = new Vector3D(0, 30, 0), UpDirection = new Vector3D(0, 0, 1) }; var m1 = Load3ds("suzanne.obj").Select(x => x.Geometry).ToArray(); MeshModel1 = m1[0]; var m2 = Load3ds("skeleton.3ds").Select(x => x.Geometry).ToArray(); MeshModel2 = m2[0]; Model1Transform = new Media3D.TranslateTransform3D(new Vector3D(7, 0, 0)); Model2Transform = new Media3D.TranslateTransform3D(new Vector3D(-5, 0, 0)); var builder = new MeshBuilder(); builder.AddBox(new Vector3(0, 0, -5), 15, 15, 0.2); FloorModel = builder.ToMesh(); builder = new MeshBuilder(); builder.AddSphere(new Vector3(0, 0, 0), 1); MeshModel3 = builder.ToMesh(); var lineBuilder = new LineBuilder(); lineBuilder.AddLine(Vector3.Zero, Vector3.UnitX * 5); lineBuilder.AddLine(Vector3.Zero, Vector3.UnitY * 5); lineBuilder.AddLine(Vector3.Zero, Vector3.UnitZ * 5); LineModel = lineBuilder.ToLineGeometry3D(); LineModel.Colors = new Color4Collection() { new Color4(1, 0, 0, 1), new Color4(1, 0, 0, 1), new Color4(0, 1, 0, 1), new Color4(0, 1, 0, 1), new Color4(0, 0, 1, 1), new Color4(0, 0, 1, 1), }; }
public UcAlphaViewModel(Viewport3DX viewport, ServoMovedEventHandler servoMovedEventHandler = null) { this.viewport = viewport; this.servoMovedEventHandler = servoMovedEventHandler; Background = new LinearGradientBrush(Colors.Black, Colors.DarkBlue, 90); BackgroundColor = new SharpDX.Color4(0, 0, 0, 0); this.Camera = new PerspectiveCamera { LookDirection = new Media3D.Vector3D(-5.059, 1.949, -23.856), UpDirection = new Media3D.Vector3D(0.067, 0.946, 0.319), Position = new Media3D.Point3D(3.648, 5.954, 23.353) }; RenderTechniquesManager = new DefaultRenderTechniquesManager(); EffectsManager = new DefaultEffectsManager(RenderTechniquesManager); this.ModelTransform = new Media3D.TranslateTransform3D(0, 0, 0); this.ModelGeometry = new Element3DCollection(); }
public static MeshGeometry3D Build(int numSides, List<TubeRingBase> rings, bool softSides, bool shouldCenterZ, Transform3D transform = null) { if (transform == null) { transform = Transform3D.Identity; } // Do some validation/prep work double height, curZ; Initialize(out height, out curZ, numSides, rings, shouldCenterZ); MeshGeometry3D retVal = new MeshGeometry3D(); int pointOffset = 0; // This is used when softSides is true. This allows for a way to have a common normal between one ring's bottom and the next ring's top double[] rotateAnglesForPerp = null; TubeRingBase nextRing = rings.Count > 1 ? rings[1] : null; EndCap(ref pointOffset, ref rotateAnglesForPerp, retVal, numSides, null, rings[0], nextRing, transform, true, curZ, softSides); for (int cntr = 0; cntr < rings.Count - 1; cntr++) { if (cntr > 0 && cntr < rings.Count - 1 && !(rings[cntr] is TubeRingRegularPolygon)) { throw new ArgumentException("Only rings are allowed in the middle of the tube"); } Middle(ref pointOffset, ref rotateAnglesForPerp, retVal, transform, numSides, rings[cntr], rings[cntr + 1], curZ, softSides); curZ += rings[cntr + 1].DistFromPrevRing; } TubeRingBase prevRing = rings.Count > 1 ? rings[rings.Count - 2] : null; EndCap(ref pointOffset, ref rotateAnglesForPerp, retVal, numSides, prevRing, rings[rings.Count - 1], null, transform, false, curZ, softSides); // Exit Function //retVal.Freeze(); return retVal; }
protected override void ExportModel(GeometryModel3D model, Transform3D inheritedTransform) { var mesh = model.Geometry as MeshGeometry3D; if (mesh == null) return; // http://www.povray.org/documentation/view/3.6.1/293/ // todo: create textures/material properties from model.Material writer.WriteLine("mesh2 {"); writer.WriteLine(" vertex_vectors"); writer.WriteLine(" {"); writer.WriteLine(" " + mesh.Positions.Count + ","); foreach (var pt in mesh.Positions) { writer.WriteLine(String.Format(CultureInfo.InvariantCulture, " {0} {1} {2},", pt.X, pt.Y, pt.Z)); } writer.WriteLine(" }"); writer.WriteLine(" face_indices"); writer.WriteLine(" {"); writer.WriteLine(" " + mesh.TriangleIndices.Count / 3 + ","); for (int i = 0; i < mesh.TriangleIndices.Count; i += 3) { writer.WriteLine(String.Format(CultureInfo.InvariantCulture, " {0} {1} {2},", mesh.TriangleIndices[i], mesh.TriangleIndices[i + 1], mesh.TriangleIndices[i + 2])); } writer.WriteLine(" }"); // todo: add transform from model.Transform and inheritedTransform // http://www.povray.org/documentation/view/3.6.1/49/ writer.WriteLine("}"); // mesh2 }
/// <summary> /// Volumes are calculated across axis where they are whole numbers (rounded to 0 decimal places). /// </summary> /// <param name="modelFile"></param> /// <param name="scaleMultiplyierX"></param> /// <param name="scaleMultiplyierY"></param> /// <param name="scaleMultiplyierZ"></param> /// <param name="transform"></param> /// <param name="traceType"></param> /// <param name="resetProgress"></param> /// <param name="incrementProgress"></param> /// <returns></returns> public static CubeType[][][] ReadModelVolmetic(string modelFile, double scaleMultiplyierX, double scaleMultiplyierY, double scaleMultiplyierZ, Transform3D transform, ModelTraceVoxel traceType, Action<double, double> resetProgress, Action incrementProgress) { var model = MeshHelper.Load(modelFile, ignoreErrors: true); // How far to check in from the proposed Volumetric edge. // This number is just made up, but small enough that it still represents the corner edge of the Volumetric space. // But still large enough that it isn't the exact corner. const double offset = 0.00000456f; if (scaleMultiplyierX > 0 && scaleMultiplyierY > 0 && scaleMultiplyierZ > 0 && scaleMultiplyierX != 1.0f && scaleMultiplyierY != 1.0f && scaleMultiplyierZ != 1.0f) { model.TransformScale(scaleMultiplyierX, scaleMultiplyierY, scaleMultiplyierZ); } var tbounds = model.Bounds; if (transform != null) tbounds = transform.TransformBounds(tbounds); var xMin = (int)Math.Floor(tbounds.X); var yMin = (int)Math.Floor(tbounds.Y); var zMin = (int)Math.Floor(tbounds.Z); var xMax = (int)Math.Ceiling(tbounds.X + tbounds.SizeX); var yMax = (int)Math.Ceiling(tbounds.Y + tbounds.SizeY); var zMax = (int)Math.Ceiling(tbounds.Z + tbounds.SizeZ); var xCount = xMax - xMin; var yCount = yMax - yMin; var zCount = zMax - zMin; var ccubic = ArrayHelper.Create<CubeType>(xCount, yCount, zCount); if (resetProgress != null) { double count = (from GeometryModel3D gm in model.Children select gm.Geometry as MeshGeometry3D).Aggregate<MeshGeometry3D, double>(0, (current, g) => current + (g.TriangleIndices.Count / 3)); if (traceType == ModelTraceVoxel.ThinSmoothed || traceType == ModelTraceVoxel.ThickSmoothedUp) { count += (xCount * yCount * zCount * 3); } resetProgress.Invoke(0, count); } #region basic ray trace of every individual triangle. foreach (var model3D in model.Children) { var gm = (GeometryModel3D)model3D; var g = gm.Geometry as MeshGeometry3D; var materials = gm.Material as MaterialGroup; System.Windows.Media.Color color = Colors.Transparent; if (materials != null) { var material = materials.Children.OfType<DiffuseMaterial>().FirstOrDefault(); if (material != null && material != null && material.Brush is SolidColorBrush) { color = ((SolidColorBrush)material.Brush).Color; } } for (var t = 0; t < g.TriangleIndices.Count; t += 3) { if (incrementProgress != null) { incrementProgress.Invoke(); } var p1 = g.Positions[g.TriangleIndices[t]]; var p2 = g.Positions[g.TriangleIndices[t + 1]]; var p3 = g.Positions[g.TriangleIndices[t + 2]]; if (transform != null) { p1 = transform.Transform(p1); p2 = transform.Transform(p2); p3 = transform.Transform(p3); } var minBound = MeshHelper.Min(p1, p2, p3).Floor(); var maxBound = MeshHelper.Max(p1, p2, p3).Ceiling(); Point3D[] rays; for (var y = minBound.Y; y < maxBound.Y; y++) { for (var z = minBound.Z; z < maxBound.Z; z++) { if (traceType == ModelTraceVoxel.Thin || traceType == ModelTraceVoxel.ThinSmoothed) { rays = new Point3D[] // 1 point ray trace in the center. { new Point3D(xMin, y + 0.5 + offset, z + 0.5 + offset), new Point3D(xMax, y + 0.5 + offset, z + 0.5 + offset) }; } else { rays = new Point3D[] // 4 point ray trace within each corner of the expected Volumetric cube. { new Point3D(xMin, y + offset, z + offset), new Point3D(xMax, y + offset, z + offset), new Point3D(xMin, y + 1 - offset, z + offset), new Point3D(xMax, y + 1 - offset, z + offset), new Point3D(xMin, y + offset, z + 1 - offset), new Point3D(xMax, y + offset, z + 1 - offset), new Point3D(xMin, y + 1 - offset, z + 1 - offset), new Point3D(xMax, y + 1 - offset, z + 1 - offset) }; } Point3D intersect; int normal; if (MeshHelper.RayIntersetTriangleRound(p1, p2, p3, rays, out intersect, out normal)) { ccubic[(int)Math.Floor(intersect.X) - xMin][(int)Math.Floor(intersect.Y) - yMin][(int)Math.Floor(intersect.Z) - zMin] = CubeType.Cube; } } } for (var x = minBound.X; x < maxBound.X; x++) { for (var z = minBound.Z; z < maxBound.Z; z++) { if (traceType == ModelTraceVoxel.Thin || traceType == ModelTraceVoxel.ThinSmoothed) { rays = new Point3D[] // 1 point ray trace in the center. { new Point3D(x + 0.5 + offset, yMin, z + 0.5 + offset), new Point3D(x + 0.5 + offset, yMax, z + 0.5 + offset) }; } else { rays = new Point3D[] // 4 point ray trace within each corner of the expected Volumetric cube. { new Point3D(x + offset, yMin, z + offset), new Point3D(x + offset, yMax, z + offset), new Point3D(x + 1 - offset, yMin, z + offset), new Point3D(x + 1 - offset, yMax, z + offset), new Point3D(x + offset, yMin, z + 1 - offset), new Point3D(x + offset, yMax, z + 1 - offset), new Point3D(x + 1 - offset, yMin, z + 1 - offset), new Point3D(x + 1 - offset, yMax, z + 1 - offset) }; } Point3D intersect; int normal; if (MeshHelper.RayIntersetTriangleRound(p1, p2, p3, rays, out intersect, out normal)) { ccubic[(int)Math.Floor(intersect.X) - xMin][(int)Math.Floor(intersect.Y) - yMin][(int)Math.Floor(intersect.Z) - zMin] = CubeType.Cube; } } } for (var x = minBound.X; x < maxBound.X; x++) { for (var y = minBound.Y; y < maxBound.Y; y++) { if (traceType == ModelTraceVoxel.Thin || traceType == ModelTraceVoxel.ThinSmoothed) { rays = new Point3D[] // 1 point ray trace in the center. { new Point3D(x + 0.5 + offset, y + 0.5 + offset, zMin), new Point3D(x + 0.5 + offset, y + 0.5 + offset, zMax), }; } else { rays = new Point3D[] // 4 point ray trace within each corner of the expected Volumetric cube. { new Point3D(x + offset, y + offset, zMin), new Point3D(x + offset, y + offset, zMax), new Point3D(x + 1 - offset, y + offset, zMin), new Point3D(x + 1 - offset, y + offset, zMax), new Point3D(x + offset, y + 1 - offset, zMin), new Point3D(x + offset, y + 1 - offset, zMax), new Point3D(x + 1 - offset, y + 1 - offset, zMin), new Point3D(x + 1 - offset, y + 1 - offset, zMax) }; } Point3D intersect; int normal; if (MeshHelper.RayIntersetTriangleRound(p1, p2, p3, rays, out intersect, out normal)) { ccubic[(int)Math.Floor(intersect.X) - xMin][(int)Math.Floor(intersect.Y) - yMin][(int)Math.Floor(intersect.Z) - zMin] = CubeType.Cube; } } } } } #endregion CrawlExterior(ccubic); if (traceType == ModelTraceVoxel.ThinSmoothed || traceType == ModelTraceVoxel.ThickSmoothedUp) { CalculateAddedInverseCorners(ccubic, incrementProgress); CalculateAddedSlopes(ccubic, incrementProgress); CalculateAddedCorners(ccubic, incrementProgress); } //if (traceType == ModelTraceVoxel.ThickSmoothedDown) //{ // CalculateSubtractedCorners(ccubic); // CalculateSubtractedSlopes(ccubic); // CalculateSubtractedInverseCorners(ccubic); //} return ccubic; }
public static CubeType[][][] ReadModelVolmetic(string modelFile, double scaleMultiplyier, Transform3D transform, ModelTraceVoxel traceType, Action<double, double> resetProgress, Action incrementProgress) { return ReadModelVolmetic(modelFile, scaleMultiplyier, scaleMultiplyier, scaleMultiplyier, transform, traceType, resetProgress, incrementProgress); }
public static CubeType[][][] ReadModelVolmetic(string modelFile, double scaleMultiplyier, Transform3D transform, ModelTraceVoxel traceType) { return ReadModelVolmetic(modelFile, scaleMultiplyier, scaleMultiplyier, scaleMultiplyier, transform, traceType, null, null); }
void ModelNode.SetTT(Transform3D a_TT) { if (a_TT != null) { Transform = a_TT; } }
// Returns a new axis aligned bounding box that contains the old // bounding box post the given transformation. internal static Rect3D ComputeTransformedAxisAlignedBoundingBox(/* IN */ ref Rect3D originalBox, Transform3D transform) { if (transform == null || transform == Transform3D.Identity) { return originalBox; } Matrix3D matrix = transform.Value; return ComputeTransformedAxisAlignedBoundingBox(ref originalBox, ref matrix); }
/// <summary> /// Gets the distance from the camera for the specified visual. /// </summary> /// <param name="c"> /// The visual. /// </param> /// <param name="cameraPos"> /// The camera position. /// </param> /// <param name="transform"> /// The total transform of the visual. /// </param> /// <returns> /// The camera distance. /// </returns> private double GetCameraDistance(Visual3D c, Point3D cameraPos, Transform3D transform) { var bounds = Visual3DHelper.FindBounds(c, transform); switch (this.Method) { case SortingMethod.BoundingBoxCenter: var mid = new Point3D( bounds.X + bounds.SizeX * 0.5, bounds.Y + bounds.SizeY * 0.5, bounds.Z + bounds.SizeZ * 0.5); return (mid - cameraPos).LengthSquared; case SortingMethod.BoundingBoxCorners: double d = double.MaxValue; d = Math.Min(d, cameraPos.DistanceTo(new Point3D(bounds.X, bounds.Y, bounds.Z))); d = Math.Min(d, cameraPos.DistanceTo(new Point3D(bounds.X + bounds.SizeX, bounds.Y, bounds.Z))); d = Math.Min( d, cameraPos.DistanceTo(new Point3D(bounds.X + bounds.SizeX, bounds.Y + bounds.SizeY, bounds.Z))); d = Math.Min(d, cameraPos.DistanceTo(new Point3D(bounds.X, bounds.Y + bounds.SizeY, bounds.Z))); d = Math.Min(d, cameraPos.DistanceTo(new Point3D(bounds.X, bounds.Y, bounds.Z + bounds.SizeZ))); d = Math.Min( d, cameraPos.DistanceTo(new Point3D(bounds.X + bounds.SizeX, bounds.Y, bounds.Z + bounds.SizeZ))); d = Math.Min( d, cameraPos.DistanceTo( new Point3D(bounds.X + bounds.SizeX, bounds.Y + bounds.SizeY, bounds.Z + bounds.SizeZ))); d = Math.Min( d, cameraPos.DistanceTo(new Point3D(bounds.X, bounds.Y + bounds.SizeY, bounds.Z + bounds.SizeZ))); return d; default: var boundingSphere = BoundingSphere.CreateFromRect3D(bounds); return boundingSphere.DistanceFrom(cameraPos); } }
internal override RayHitTestParameters RayFromViewportPoint(Point p, Size viewSize, Rect3D boundingRect, out double distanceAdjustment) { // The camera may be animating. Take a snapshot of the current value // and get the property values we need. (Window OS #992662) Point3D position = Position; Vector3D lookDirection = LookDirection; Vector3D upDirection = UpDirection; Transform3D transform = Transform; double zn = NearPlaneDistance; double zf = FarPlaneDistance; double fov = M3DUtil.DegreesToRadians(FieldOfView); // // Compute rayParameters // // Find the point on the projection plane in post-projective space where // the viewport maps to a 2x2 square from (-1,1)-(1,-1). Point np = M3DUtil.GetNormalizedPoint(p, viewSize); // Note: h and w are 1/2 of the inverse of the width/height ratios: // // h = 1/(heightDepthRatio) * (1/2) // w = 1/(widthDepthRatio) * (1/2) // // Computation for h is a bit different than what you will find in // D3DXMatrixPerspectiveFovRH because we have a horizontal rather // than vertical FoV. double aspectRatio = M3DUtil.GetAspectRatio(viewSize); double halfWidthDepthRatio = Math.Tan(fov / 2); double h = aspectRatio / halfWidthDepthRatio; double w = 1 / halfWidthDepthRatio; // To get from projective space to camera space we apply the // width/height ratios to find our normalized point at 1 unit // in front of the camera. (1 is convenient, but has no other // special significance.) See note above about the construction // of w and h. Vector3D rayDirection = new Vector3D(np.X / w, np.Y / h, -1); // Apply the inverse of the view matrix to our rayDirection vector // to convert it from camera to world space. // // NOTE: Because our construction of the ray assumes that the // viewMatrix translates the position to the origin we pass // null for the Camera.Transform below and account for it // later. Matrix3D viewMatrix = CreateViewMatrix(/* trasform = */ null, ref position, ref lookDirection, ref upDirection); Matrix3D invView = viewMatrix; invView.Invert(); invView.MultiplyVector(ref rayDirection); // The we have the ray direction, now we need the origin. The camera's // position would work except that we would intersect geometry between // the camera plane and the near plane so instead we must find the // point on the project plane where the ray (position, rayDirection) // intersect (Windows OS #1005064): // // | _.> p = camera position // rd _+" ld = camera look direction // .-" |ro pp = projection plane // _.-" | rd = ray direction // p +"--------+---> ro = desired ray origin on pp // ld | // pp // // Above we constructed the direction such that it's length projects to // 1 unit on the lookDirection vector. // // // rd _.> // .-" rd = unnormalized rayDirection // _.-" ld = normalized lookDirection (length = 1) // -"---------> // ld // // So to find the desired rayOrigin on the projection plane we simply do: Point3D rayOrigin = position + zn * rayDirection; rayDirection.Normalize(); // Account for the Camera.Transform we ignored during ray construction above. if (transform != null && transform != Transform3D.Identity) { Matrix3D m = transform.Value; m.MultiplyPoint(ref rayOrigin); m.MultiplyVector(ref rayDirection); PrependInverseTransform(m, ref viewMatrix); } RayHitTestParameters rayParameters = new RayHitTestParameters(rayOrigin, rayDirection); // // Compute HitTestProjectionMatrix // Matrix3D projectionMatrix = GetProjectionMatrix(aspectRatio, zn, zf); // The projectionMatrix takes camera-space 3D points into normalized clip // space. // The viewportMatrix will take normalized clip space into // viewport coordinates, with an additional 2D translation // to put the ray at the rayOrigin. Matrix3D viewportMatrix = new Matrix3D(); viewportMatrix.TranslatePrepend(new Vector3D(-p.X, viewSize.Height - p.Y, 0)); viewportMatrix.ScalePrepend(new Vector3D(viewSize.Width / 2, -viewSize.Height / 2, 1)); viewportMatrix.TranslatePrepend(new Vector3D(1, 1, 0)); // `First world-to-camera, then camera's projection, then normalized clip space to viewport. rayParameters.HitTestProjectionMatrix = viewMatrix * projectionMatrix * viewportMatrix; // // Perspective camera doesn't allow negative NearPlanes, so there's // not much point in adjusting the ray origin. Hence, the // distanceAdjustment remains 0. // distanceAdjustment = 0.0; return(rayParameters); }
internal override void UpdateResource(DUCE.Channel channel, bool skipOnChannelCheck) { // If we're told we can skip the channel check, then we must be on channel Debug.Assert(!skipOnChannelCheck || _duceResource.IsOnChannel(channel)); if (skipOnChannelCheck || _duceResource.IsOnChannel(channel)) { base.UpdateResource(channel, skipOnChannelCheck); // Read values of properties into local variables Transform3D vTransform = Transform; // Obtain handles for properties that implement DUCE.IResource DUCE.ResourceHandle hTransform; if (vTransform == null || Object.ReferenceEquals(vTransform, Transform3D.Identity) ) { hTransform = DUCE.ResourceHandle.Null; } else { hTransform = ((DUCE.IResource)vTransform).GetHandle(channel); } // Obtain handles for animated properties DUCE.ResourceHandle hNearPlaneDistanceAnimations = GetAnimationResourceHandle(NearPlaneDistanceProperty, channel); DUCE.ResourceHandle hFarPlaneDistanceAnimations = GetAnimationResourceHandle(FarPlaneDistanceProperty, channel); DUCE.ResourceHandle hPositionAnimations = GetAnimationResourceHandle(PositionProperty, channel); DUCE.ResourceHandle hLookDirectionAnimations = GetAnimationResourceHandle(LookDirectionProperty, channel); DUCE.ResourceHandle hUpDirectionAnimations = GetAnimationResourceHandle(UpDirectionProperty, channel); DUCE.ResourceHandle hFieldOfViewAnimations = GetAnimationResourceHandle(FieldOfViewProperty, channel); // Pack & send command packet DUCE.MILCMD_PERSPECTIVECAMERA data; unsafe { data.Type = MILCMD.MilCmdPerspectiveCamera; data.Handle = _duceResource.GetHandle(channel); data.htransform = hTransform; if (hNearPlaneDistanceAnimations.IsNull) { data.nearPlaneDistance = NearPlaneDistance; } data.hNearPlaneDistanceAnimations = hNearPlaneDistanceAnimations; if (hFarPlaneDistanceAnimations.IsNull) { data.farPlaneDistance = FarPlaneDistance; } data.hFarPlaneDistanceAnimations = hFarPlaneDistanceAnimations; if (hPositionAnimations.IsNull) { data.position = CompositionResourceManager.Point3DToMilPoint3F(Position); } data.hPositionAnimations = hPositionAnimations; if (hLookDirectionAnimations.IsNull) { data.lookDirection = CompositionResourceManager.Vector3DToMilPoint3F(LookDirection); } data.hLookDirectionAnimations = hLookDirectionAnimations; if (hUpDirectionAnimations.IsNull) { data.upDirection = CompositionResourceManager.Vector3DToMilPoint3F(UpDirection); } data.hUpDirectionAnimations = hUpDirectionAnimations; if (hFieldOfViewAnimations.IsNull) { data.fieldOfView = FieldOfView; } data.hFieldOfViewAnimations = hFieldOfViewAnimations; // Send packed command structure channel.SendCommand( (byte *)&data, sizeof(DUCE.MILCMD_PERSPECTIVECAMERA)); } } }
internal override void UpdateResource(DUCE.Channel channel, bool skipOnChannelCheck) { // If we're told we can skip the channel check, then we must be on channel Debug.Assert(!skipOnChannelCheck || _duceResource.IsOnChannel(channel)); if (skipOnChannelCheck || _duceResource.IsOnChannel(channel)) { base.UpdateResource(channel, skipOnChannelCheck); // Read values of properties into local variables Transform3D vTransform = Transform; // Obtain handles for properties that implement DUCE.IResource DUCE.ResourceHandle hTransform; if (vTransform == null || Object.ReferenceEquals(vTransform, Transform3D.Identity) ) { hTransform = DUCE.ResourceHandle.Null; } else { hTransform = ((DUCE.IResource)vTransform).GetHandle(channel); } // Obtain handles for animated properties DUCE.ResourceHandle hColorAnimations = GetAnimationResourceHandle(ColorProperty, channel); DUCE.ResourceHandle hPositionAnimations = GetAnimationResourceHandle(PositionProperty, channel); DUCE.ResourceHandle hRangeAnimations = GetAnimationResourceHandle(RangeProperty, channel); DUCE.ResourceHandle hConstantAttenuationAnimations = GetAnimationResourceHandle(ConstantAttenuationProperty, channel); DUCE.ResourceHandle hLinearAttenuationAnimations = GetAnimationResourceHandle(LinearAttenuationProperty, channel); DUCE.ResourceHandle hQuadraticAttenuationAnimations = GetAnimationResourceHandle(QuadraticAttenuationProperty, channel); DUCE.ResourceHandle hDirectionAnimations = GetAnimationResourceHandle(DirectionProperty, channel); DUCE.ResourceHandle hOuterConeAngleAnimations = GetAnimationResourceHandle(OuterConeAngleProperty, channel); DUCE.ResourceHandle hInnerConeAngleAnimations = GetAnimationResourceHandle(InnerConeAngleProperty, channel); // Pack & send command packet DUCE.MILCMD_SPOTLIGHT data; unsafe { data.Type = MILCMD.MilCmdSpotLight; data.Handle = _duceResource.GetHandle(channel); data.htransform = hTransform; if (hColorAnimations.IsNull) { data.color = CompositionResourceManager.ColorToMilColorF(Color); } data.hColorAnimations = hColorAnimations; if (hPositionAnimations.IsNull) { data.position = CompositionResourceManager.Point3DToMilPoint3F(Position); } data.hPositionAnimations = hPositionAnimations; if (hRangeAnimations.IsNull) { data.range = Range; } data.hRangeAnimations = hRangeAnimations; if (hConstantAttenuationAnimations.IsNull) { data.constantAttenuation = ConstantAttenuation; } data.hConstantAttenuationAnimations = hConstantAttenuationAnimations; if (hLinearAttenuationAnimations.IsNull) { data.linearAttenuation = LinearAttenuation; } data.hLinearAttenuationAnimations = hLinearAttenuationAnimations; if (hQuadraticAttenuationAnimations.IsNull) { data.quadraticAttenuation = QuadraticAttenuation; } data.hQuadraticAttenuationAnimations = hQuadraticAttenuationAnimations; if (hDirectionAnimations.IsNull) { data.direction = CompositionResourceManager.Vector3DToMilPoint3F(Direction); } data.hDirectionAnimations = hDirectionAnimations; if (hOuterConeAngleAnimations.IsNull) { data.outerConeAngle = OuterConeAngle; } data.hOuterConeAngleAnimations = hOuterConeAngleAnimations; if (hInnerConeAngleAnimations.IsNull) { data.innerConeAngle = InnerConeAngle; } data.hInnerConeAngleAnimations = hInnerConeAngleAnimations; // Send packed command structure channel.SendCommand( (byte *)&data, sizeof(DUCE.MILCMD_SPOTLIGHT)); } } }
protected override void ExportModel(System.Windows.Media.Media3D.GeometryModel3D model, System.Windows.Media.Media3D.Transform3D inheritedTransform) { var mesh = model.Geometry as MeshGeometry3D; var indices = new StringBuilder(); foreach (int i in mesh.TriangleIndices) { indices.Append(i + " "); } var points = new StringBuilder(); foreach (var pt in mesh.Positions) { points.AppendFormat(CultureInfo.InvariantCulture, "{0} {1} {2} ", pt.X, pt.Y, pt.Z); } writer.WriteStartElement("Transform"); writer.WriteStartElement("Shape"); writer.WriteStartElement("IndexedFaceSet"); writer.WriteAttributeString("coordIndex", indices.ToString()); writer.WriteStartElement("Coordinate"); writer.WriteAttributeString("point", points.ToString()); writer.WriteEndElement(); writer.WriteEndElement(); // IndexedFaceSet writer.WriteStartElement("Appearance"); writer.WriteStartElement("Material"); writer.WriteAttributeString("diffuseColor", "0.8 0.8 0.2"); writer.WriteAttributeString("specularColor", "0.5 0.5 0.5"); writer.WriteEndElement(); writer.WriteEndElement(); // Appearance writer.WriteEndElement(); // Shape writer.WriteEndElement(); // Transform }