private MeshContent CreateMesh(Mesh sceneMesh) { var mesh = new MeshContent { Name = sceneMesh.Name }; // Position vertices are shared at the mesh level foreach (var vert in sceneMesh.Vertices) { mesh.Positions.Add(new Vector3(vert.X, vert.Y, vert.Z)); } var geom = new GeometryContent { Material = _materials[sceneMesh.MaterialIndex] }; // Geometry vertices reference 1:1 with the MeshContent parent, // no indirection is necessary. //geom.Vertices.Positions.AddRange(mesh.Positions); geom.Vertices.AddRange(Enumerable.Range(0, sceneMesh.VertexCount)); geom.Indices.AddRange(sceneMesh.GetIndices()); if (sceneMesh.HasBones) { var xnaWeights = new List <BoneWeightCollection>(); for (var i = 0; i < geom.Indices.Count; i++) { var list = new BoneWeightCollection(); for (var boneIndex = 0; boneIndex < sceneMesh.BoneCount; boneIndex++) { var bone = sceneMesh.Bones[boneIndex]; foreach (var weight in bone.VertexWeights) { if (weight.VertexID != i) { continue; } list.Add(new BoneWeight(bone.Name, weight.Weight)); } } if (list.Count > 0) { xnaWeights.Add(list); } } geom.Vertices.Channels.Add(VertexChannelNames.Weights(0), xnaWeights); } // Individual channels go here if (sceneMesh.HasNormals) { geom.Vertices.Channels.Add(VertexChannelNames.Normal(), ToXna(sceneMesh.Normals)); } for (var i = 0; i < sceneMesh.TextureCoordinateChannelCount; i++) { geom.Vertices.Channels.Add(VertexChannelNames.TextureCoordinate(i), ToXnaTexCoord(sceneMesh.TextureCoordinateChannels[i])); } mesh.Geometry.Add(geom); return(mesh); }
public override ModelContent Process(TrackDefinition input, ContentProcessorContext context) { int lanes = 3; float laneThickness = 0.9f; float interLaneSpace = 3f; float trackThickness = 1f; float laneWidth = 0.5f; MeshBuilder builder = MeshBuilder.StartMesh("terrain"); // extend the key points to vertices List <Vector3> trackPoints = GenerateTrackPoints(input.verts); List <Vector3> trackVerts = GenerateTrackVertices(trackPoints, trackThickness, lanes, laneThickness, laneWidth, interLaneSpace); List <Matrix> startPositions = GenerateStartingPositions(trackPoints, lanes, laneWidth, interLaneSpace); // add vertices to mesh foreach (Vector3 v in trackVerts) { builder.CreatePosition(v); } // Create a material TODO:point it at our terrain texture. BasicMaterialContent material = new BasicMaterialContent(); String texturePath = Path.Combine(Path.GetDirectoryName(input.filename), "tex.jpg"); material.Texture = new ExternalReference <TextureContent>(texturePath); material.SpecularColor = Color.Black.ToVector3(); material.SpecularPower = 0; builder.SetMaterial(material); int texCoordId = builder.CreateVertexChannel <Vector2>(VertexChannelNames.TextureCoordinate(0)); int PART_OFFSET = 4 * (lanes + 1); // define some constants: int INT = 0; // near face, inner side, top point int INB = 1; // near face, inner side, bottom point int ONT = 2; // near face, outer side, top point int ONB = 3; // near face, outer side, bottom point int IFT = INT + PART_OFFSET; // far face, inner side, top point int IFB = INB + PART_OFFSET; // far face, inner side, bottom point int OFT = ONT + PART_OFFSET; // far face, outer side, top point int OFB = ONB + PART_OFFSET; // far face, outer side, bottom point // setup the triangles: for (int i = 0; i < trackVerts.Count - PART_OFFSET; i += PART_OFFSET) { // triangle mapping: /* * See picture for vertices order */ //float dx, dz; //// for outer: //dx = trackVerts[i + 1].X - trackVerts[i + 5].X; //dz = trackVerts[i + 1].Z - trackVerts[i + 5].Z; //float vertDistOuter = (float)Math.Sqrt((dx * dx) + (dz * dz)) / halfTrackWidth; //// for inner: //dx = trackVerts[i + 0].X - trackVerts[i + 4].X; //dz = trackVerts[i + 0].Z - trackVerts[i + 4].Z; //float vertDistInner = (float)Math.Sqrt((dx * dx) + (dz * dz)) / halfTrackWidth; // side triangles: for (int j = 0; j <= lanes; j++) // less than or equal, since there are n lanes plus 1 for the entire track { int offset = i + 4 * j; // i.e. each lane has 4 points, thus offset is 4 per lane. // inner side builder.AddTriangle(offset, INT, IFT, INB, texCoordId); builder.AddTriangle(offset, INB, IFT, IFB, texCoordId); // outer side builder.AddTriangle(offset, ONT, ONB, OFT, texCoordId); builder.AddTriangle(offset, ONB, OFB, OFT, texCoordId); } // bottom face: builder.AddTriangle(i, INB, OFB + PART_OFFSET - 4, ONB + PART_OFFSET - 4, texCoordId); builder.AddTriangle(i, IFB, OFB + PART_OFFSET - 4, INB, texCoordId); // Top faces bool laneGroove = false; for (int j = 0; j < PART_OFFSET - 2; j += 2) { int offset = i + j; if (laneGroove) { builder.AddTriangle(offset, INB, ONB, IFB, texCoordId); builder.AddTriangle(offset, ONB, OFB, IFB, texCoordId); } else { builder.AddTriangle(offset, INT, ONT, IFT, texCoordId, 0.01f, 0.01f, 0.01f, 0.99f, 0.99f, 0.99f); builder.AddTriangle(offset, ONT, OFT, IFT, texCoordId, 0.01f, 0.01f, 0.01f, 0.99f, 0.99f, 0.99f); } // alternate groove and non-groove laneGroove = !laneGroove; } } MeshContent terrainMesh = builder.FinishMesh(); ModelContent mc = context.Convert <MeshContent, ModelContent>(terrainMesh, "ModelProcessor"); mc.Tag = startPositions; return(mc); }
private ModelMeshContent ProcessMesh(MeshContent mesh, ModelBoneContent parent, ContentProcessorContext context) { var parts = new List <ModelMeshPartContent>(); var vertexBuffer = new VertexBufferContent(); var indexBuffer = new IndexCollection(); if (GenerateTangentFrames) { context.Logger.LogMessage("Generating tangent frames."); foreach (GeometryContent geom in mesh.Geometry) { if (!geom.Vertices.Channels.Contains(VertexChannelNames.Normal(0))) { MeshHelper.CalculateNormals(geom, true); } if (!geom.Vertices.Channels.Contains(VertexChannelNames.Tangent(0)) || !geom.Vertices.Channels.Contains(VertexChannelNames.Binormal(0))) { MeshHelper.CalculateTangentFrames(geom, VertexChannelNames.TextureCoordinate(0), VertexChannelNames.Tangent(0), VertexChannelNames.Binormal(0)); } } } var startVertex = 0; foreach (var geometry in mesh.Geometry) { var vertices = geometry.Vertices; var vertexCount = vertices.VertexCount; ModelMeshPartContent partContent; if (vertexCount == 0) { partContent = new ModelMeshPartContent(); } else { var geomBuffer = geometry.Vertices.CreateVertexBuffer(); vertexBuffer.Write(vertexBuffer.VertexData.Length, 1, geomBuffer.VertexData); var startIndex = indexBuffer.Count; indexBuffer.AddRange(geometry.Indices); partContent = new ModelMeshPartContent(vertexBuffer, indexBuffer, startVertex, vertexCount, startIndex, geometry.Indices.Count / 3); // Geoms are supposed to all have the same decl, so just steal one of these vertexBuffer.VertexDeclaration = geomBuffer.VertexDeclaration; startVertex += vertexCount; } partContent.Material = geometry.Material; parts.Add(partContent); } var bounds = new BoundingSphere(); if (mesh.Positions.Count > 0) { bounds = BoundingSphere.CreateFromPoints(mesh.Positions); } return(new ModelMeshContent(mesh.Name, mesh, parent, bounds, parts)); }
/// <summary> /// Helper function adds a single new billboard sprite to the output geometry. /// </summary> private void GenerateBillboard(MeshContent mesh, GeometryContent geometry, Vector3 position, Vector3 normal) { VertexContent vertices = geometry.Vertices; VertexChannelCollection channels = vertices.Channels; // First, create a vertex position entry for this billboard. Each // billboard is going to be rendered a quad, so we need to create four // vertices, but at this point we only have a single position that is // shared by all the vertices. The real position of each vertex will be // computed on the fly in the vertex shader, thus allowing us to // implement effects like making the billboard rotate to always face the // camera, and sway in the wind. As input the vertex shader only wants to // know the center point of the billboard, and that is the same for all // the vertices, so only a single position is needed here. int positionIndex = mesh.Positions.Count; mesh.Positions.Add(position); // Second, create the four vertices, all referencing the same position. int index = vertices.PositionIndices.Count; for (int i = 0; i < 4; i++) { vertices.Add(positionIndex); } // Third, add normal data for each of the four vertices. A normal for a // billboard is kind of a silly thing to define, since we are using a // 2D sprite to fake a complex 3D object that would in reality have many // different normals across its surface. Here we are just using a copy // of the normal from the ground underneath the billboard, which can be // used in our lighting computation to make the vegetation darker or // lighter depending on the lighting of the underlying landscape. VertexChannel <Vector3> normals; normals = channels.Get <Vector3>(VertexChannelNames.Normal()); for (int i = 0; i < 4; i++) { normals[index + i] = normal; } // Fourth, add texture coordinates. VertexChannel <Vector2> texCoords; texCoords = channels.Get <Vector2>(VertexChannelNames.TextureCoordinate(0)); texCoords[index + 0] = new Vector2(0, 0); texCoords[index + 1] = new Vector2(1, 0); texCoords[index + 2] = new Vector2(1, 1); texCoords[index + 3] = new Vector2(0, 1); // Fifth, add a per-billboard random value, which is the same for // all four vertices. This is used in the vertex shader to make // each billboard a slightly different size, and to be affected // differently by the wind animation. float randomValue = (float)random.NextDouble() * 2 - 1; VertexChannel <float> randomValues; randomValues = channels.Get <float>(VertexChannelNames.TextureCoordinate(1)); for (int i = 0; i < 4; i++) { randomValues[index + i] = randomValue; } // Sixth and finally, add indices defining the pair of // triangles that will be used to render the billboard. geometry.Indices.Add(index + 0); geometry.Indices.Add(index + 1); geometry.Indices.Add(index + 2); geometry.Indices.Add(index + 0); geometry.Indices.Add(index + 2); geometry.Indices.Add(index + 3); }
private void GenerateDualTextureChannelData(NodeContent node) { MeshContent mesh = node as MeshContent; if (mesh != null) { foreach (var geometry in mesh.Geometry) { if (geometry.Vertices.Channels.Contains(VertexChannelNames.TextureCoordinate(0)) && !geometry.Vertices.Channels.Contains(VertexChannelNames.TextureCoordinate(1))) { geometry.Vertices.Channels.Add <Vector2>(VertexChannelNames.TextureCoordinate(1), geometry.Vertices.Channels.Get <Vector2>(VertexChannelNames.TextureCoordinate(0))); } } } foreach (NodeContent child in node.Children) { GenerateDualTextureChannelData(child); } }
/// <summary> /// Generates skydome geometry for an input sky texture. /// </summary> public override SkyContent Process(Texture2DContent input, ContentProcessorContext context) { MeshBuilder builder = MeshBuilder.StartMesh("sky"); // Create two rings of vertices around the top and bottom of the cylinder. List <int> topVertices = new List <int>(); List <int> bottomVertices = new List <int>(); for (int i = 0; i < cylinderSegments; i++) { float angle = MathHelper.TwoPi * i / cylinderSegments; float x = (float)Math.Cos(angle) * cylinderSize; float z = (float)Math.Sin(angle) * cylinderSize; topVertices.Add(builder.CreatePosition(x, cylinderSize, z)); bottomVertices.Add(builder.CreatePosition(x, -cylinderSize, z)); } // Create two center vertices, used for closing the top and bottom. int topCenterVertex = builder.CreatePosition(0, cylinderSize * 2, 0); int bottomCenterVertex = builder.CreatePosition(0, -cylinderSize * 2, 0); // Create a vertex channel for holding texture coordinates. int texCoordId = builder.CreateVertexChannel <Vector2>( VertexChannelNames.TextureCoordinate(0)); builder.SetMaterial(new BasicMaterialContent()); // Create the individual triangles that make up our skydome. for (int i = 0; i < cylinderSegments; i++) { int j = (i + 1) % cylinderSegments; // Calculate texture coordinates for this segment of the cylinder. float u1 = (float)i / (float)cylinderSegments; float u2 = (float)(i + 1) / (float)cylinderSegments; // Two triangles form a quad, one side segment of the cylinder. AddVertex(builder, topVertices[i], texCoordId, u1, texCoordTop); AddVertex(builder, topVertices[j], texCoordId, u2, texCoordTop); AddVertex(builder, bottomVertices[i], texCoordId, u1, texCoordBottom); AddVertex(builder, topVertices[j], texCoordId, u2, texCoordTop); AddVertex(builder, bottomVertices[j], texCoordId, u2, texCoordBottom); AddVertex(builder, bottomVertices[i], texCoordId, u1, texCoordBottom); // Triangle fanning inward to fill the top above this segment. AddVertex(builder, topCenterVertex, texCoordId, u1, 0); AddVertex(builder, topVertices[j], texCoordId, u2, texCoordTop); AddVertex(builder, topVertices[i], texCoordId, u1, texCoordTop); // Triangle fanning inward to fill the bottom below this segment. AddVertex(builder, bottomCenterVertex, texCoordId, u1, 1); AddVertex(builder, bottomVertices[i], texCoordId, u1, texCoordBottom); AddVertex(builder, bottomVertices[j], texCoordId, u2, texCoordBottom); } // Create the output object. SkyContent sky = new SkyContent(); // Chain to the ModelProcessor so it can convert the mesh we just generated. MeshContent skyMesh = builder.FinishMesh(); sky.Model = context.Convert <MeshContent, ModelContent>(skyMesh, "ModelProcessor"); // Chain to the TextureProcessor so it can convert the sky // texture. We don't use the default ModelTextureProcessor // here, because that would apply DXT compression, which // doesn't usually look very good with sky images. // Note: This could also be accomplished by creating a custom ModelProcessor // that would process its textures with the default TextureProcessor, // and adding the sky texture to the materials Textures dictionary. // For simplicity, the approach below is used instead. sky.Texture = context.Convert <TextureContent, TextureContent>(input, "TextureProcessor"); return(sky); }
private NodeContent BuildMesh(ProjectileXML p) { MeshBuilder mb = MeshBuilder.StartMesh("projectile"); BasicMaterialContent material = new BasicMaterialContent(); material.Textures.Add(LightPrePassProcessor.DiffuseMapKey, new ExternalReference <TextureContent>(p.diffuse_texture)); material.Textures.Add(LightPrePassProcessor.EmissiveMapKey, new ExternalReference <TextureContent>(p.emissive_texture)); mb.SetMaterial(material); // Create data channels int channel_texCoord0 = mb.CreateVertexChannel <Vector2>(VertexChannelNames.TextureCoordinate(0)); // First create vertex data Vector2 t1 = new Vector2(0, 0); Vector2 t2 = new Vector2(1, 1); Vector2 t3 = new Vector2(1, 0); // loop through all the pixels float X1 = p.leftX; float X2 = p.rightX; float R = p.radius; mb.CreatePosition(new Vector3(X1, 0, 0)); mb.CreatePosition(new Vector3(0, 0, R)); mb.CreatePosition(new Vector3(0, R, 0)); mb.CreatePosition(new Vector3(0, 0, -R)); mb.CreatePosition(new Vector3(0, -R, 0)); mb.CreatePosition(new Vector3(X2, 0, 0)); #region left tris mb.SetVertexChannelData(channel_texCoord0, t1); mb.AddTriangleVertex(0); mb.SetVertexChannelData(channel_texCoord0, t2); mb.AddTriangleVertex(1); mb.SetVertexChannelData(channel_texCoord0, t3); mb.AddTriangleVertex(2); mb.SetVertexChannelData(channel_texCoord0, t1); mb.AddTriangleVertex(0); mb.SetVertexChannelData(channel_texCoord0, t2); mb.AddTriangleVertex(2); mb.SetVertexChannelData(channel_texCoord0, t3); mb.AddTriangleVertex(3); mb.SetVertexChannelData(channel_texCoord0, t1); mb.AddTriangleVertex(0); mb.SetVertexChannelData(channel_texCoord0, t2); mb.AddTriangleVertex(3); mb.SetVertexChannelData(channel_texCoord0, t3); mb.AddTriangleVertex(4); mb.SetVertexChannelData(channel_texCoord0, t1); mb.AddTriangleVertex(0); mb.SetVertexChannelData(channel_texCoord0, t2); mb.AddTriangleVertex(4); mb.SetVertexChannelData(channel_texCoord0, t3); mb.AddTriangleVertex(1); #endregion mb.SetVertexChannelData(channel_texCoord0, t1); mb.AddTriangleVertex(5); mb.SetVertexChannelData(channel_texCoord0, t2); mb.AddTriangleVertex(2); mb.SetVertexChannelData(channel_texCoord0, t3); mb.AddTriangleVertex(1); mb.SetVertexChannelData(channel_texCoord0, t1); mb.AddTriangleVertex(5); mb.SetVertexChannelData(channel_texCoord0, t2); mb.AddTriangleVertex(3); mb.SetVertexChannelData(channel_texCoord0, t3); mb.AddTriangleVertex(2); mb.SetVertexChannelData(channel_texCoord0, t1); mb.AddTriangleVertex(5); mb.SetVertexChannelData(channel_texCoord0, t2); mb.AddTriangleVertex(4); mb.SetVertexChannelData(channel_texCoord0, t3); mb.AddTriangleVertex(3); mb.SetVertexChannelData(channel_texCoord0, t1); mb.AddTriangleVertex(5); mb.SetVertexChannelData(channel_texCoord0, t2); mb.AddTriangleVertex(1); mb.SetVertexChannelData(channel_texCoord0, t3); mb.AddTriangleVertex(4); return(mb.FinishMesh()); }
/// <summary> /// Generates a terrain mesh from an input heightfield texture. /// </summary> public override ModelContent Process(Texture2DContent input, ContentProcessorContext context) { MeshBuilder builder = MeshBuilder.StartMesh("terrain"); // Convert the input texture to float format, for ease of processing. input.ConvertBitmapType(typeof(PixelBitmapContent <float>)); PixelBitmapContent <float> heightfield; heightfield = (PixelBitmapContent <float>)input.Mipmaps[0]; // Create the terrain vertices. for (int y = 0; y < heightfield.Height; y++) { for (int x = 0; x < heightfield.Width; x++) { Vector3 position; // position the vertices so that the heightfield is centered // around x=0,z=0 position.X = terrainScale * (x - ((heightfield.Width - 1) / 2.0f)); position.Z = terrainScale * (y - ((heightfield.Height - 1) / 2.0f)); position.Y = (heightfield.GetPixel(x, y) - 1) * terrainBumpiness; builder.CreatePosition(position); } } // Create a material, and point it at our terrain texture. BasicMaterialContent material = new BasicMaterialContent(); material.SpecularColor = new Vector3(.4f, .4f, .4f); string directory = Path.GetDirectoryName(input.Identity.SourceFilename); string texture = Path.Combine(directory, terrainTexture); material.Texture = new ExternalReference <TextureContent>(texture); builder.SetMaterial(material); // Create a vertex channel for holding texture coordinates. int texCoordId = builder.CreateVertexChannel <Vector2>( VertexChannelNames.TextureCoordinate(0)); // Create the individual triangles that make up our terrain. for (int y = 0; y < heightfield.Height - 1; y++) { for (int x = 0; x < heightfield.Width - 1; x++) { AddVertex(builder, texCoordId, heightfield.Width, x, y); AddVertex(builder, texCoordId, heightfield.Width, x + 1, y); AddVertex(builder, texCoordId, heightfield.Width, x + 1, y + 1); AddVertex(builder, texCoordId, heightfield.Width, x, y); AddVertex(builder, texCoordId, heightfield.Width, x + 1, y + 1); AddVertex(builder, texCoordId, heightfield.Width, x, y + 1); } } // Chain to the ModelProcessor so it can convert the mesh we just generated. MeshContent terrainMesh = builder.FinishMesh(); ModelContent model = context.Convert <MeshContent, ModelContent>(terrainMesh, "ModelProcessor"); // generate information about the height map, and attach it to the finished // model's tag. model.Tag = new HeightMapInfoContent(heightfield, terrainScale, terrainBumpiness); return(model); }
MeshData ProcessMesh(MeshContent mesh, ContentProcessorContext context, string rootPath, Dictionary <string, object> processedContent, SkeletonData skeletonData, AnimationData[] animations, ref int geometryCount) { MeshHelper.TransformScene(mesh, mesh.AbsoluteTransform); string[] normalMapNames = new string[] { "Bump0", "Bump", "NormalMap", "Normalmap", "Normals", "BumpMap" }; MeshHelper.OptimizeForCache(mesh); foreach (GeometryContent geom in mesh.Geometry) { if (geom.Material != null) { string map = MaterialTexture(geom.Material, rootPath, null, null, normalMapNames); if (map != null && map.Length > 0) { generateTangents = true; } } } if (generateTangents) { MeshHelper.CalculateNormals(mesh, false); bool hasNoTangent = !GeometryContainsChannel(mesh, VertexChannelNames.Tangent(0)); bool hasNoBinorm = !GeometryContainsChannel(mesh, VertexChannelNames.Binormal(0)); if (hasNoTangent || hasNoBinorm) { string tangentChannelName = hasNoTangent ? VertexChannelNames.Tangent(0) : null; string binormalChannelName = hasNoBinorm ? VertexChannelNames.Binormal(0) : null; MeshHelper.CalculateTangentFrames(mesh, VertexChannelNames.TextureCoordinate(0), tangentChannelName, binormalChannelName); } } if (swapWinding) { MeshHelper.SwapWindingOrder(mesh); } List <GeometryData> geometry = new List <GeometryData>(); BoneContent skeleton = MeshHelper.FindSkeleton(mesh); Dictionary <string, int> boneIndices = null; if (skeleton != null) { boneIndices = FlattenSkeleton(skeleton); } foreach (GeometryContent geom in mesh.Geometry) { this.ProcessVertexChannels(geom, context, rootPath, boneIndices, null); MeshHelper.MergeDuplicateVertices(geom); MaterialData material = new MaterialData( MaterialValue <float>("Alpha", geom.Material, 1), MaterialValue <float>("SpecularPower", geom.Material, 24), MaterialValue <Vector3>("DiffuseColor", geom.Material, Vector3.One), MaterialValue <Vector3>("EmissiveColor", geom.Material, Vector3.Zero), MaterialValue <Vector3>("SpecularColor", geom.Material, Vector3.Zero), MaterialTexture(geom.Material, rootPath, context, processedContent, "Texture"), MaterialTexture(geom.Material, rootPath, context, processedContent, normalMapNames), MaterialValue <bool>("VertexColorEnabled", geom.Material, true) && geom.Vertices.Channels.Contains(VertexChannelNames.Color(0))); VertexBufferContent vb; VertexElement[] ve; geom.Vertices.CreateVertexBuffer(out vb, out ve, context.TargetPlatform); int[] indices = new int[geom.Indices.Count]; geom.Indices.CopyTo(indices, 0); geometry.Add(new GeometryData(geometryCount++, geom.Name, ve, vb.VertexData, indices, material, skeletonData, animations, context.TargetPlatform == TargetPlatform.Xbox360)); } return(new MeshData(mesh.Name, geometry.ToArray(), animations)); }
/// <summary> /// Breaks the input mesh into separate un-indexed triangles. /// </summary> /// <param name="input">Input MeshContent node.</param> /// <returns>Broken MeshContent</returns> private MeshContent ProcessMesh(NodeContent input) { MeshBuilder builder = MeshBuilder.StartMesh("model"); MeshContent mesh = input as MeshContent; List <Vector3> normalList = new List <Vector3>(); List <Vector2> texCoordList = new List <Vector2>(); if (mesh != null) { int normalChannel = builder.CreateVertexChannel <Vector3>( VertexChannelNames.Normal()); int texChannel = builder.CreateVertexChannel <Vector2>( VertexChannelNames.TextureCoordinate(0)); foreach (GeometryContent geometry in mesh.Geometry) { IndirectPositionCollection positions = geometry.Vertices.Positions; VertexChannel <Vector3> normals = geometry.Vertices.Channels.Get <Vector3>( VertexChannelNames.Normal()); VertexChannel <Vector2> texCoords = geometry.Vertices.Channels.Get <Vector2>( VertexChannelNames.TextureCoordinate(0)); // Copy the positions over // To do that, we traverse the indices and grab the indexed // position and add it to the new mesh. This in effect will // duplicate positions in the mesh reversing the compacting // effect of using index buffers. foreach (int i in geometry.Indices) { builder.CreatePosition(positions[i]); // Save the normals and the texture coordinates for additon to // the mesh later. normalList.Add(normals[i]); texCoordList.Add(texCoords[i]); } } int index = 0; foreach (GeometryContent geometry in mesh.Geometry) { // Save the material to the new mesh. builder.SetMaterial(geometry.Material); // Now we create the Triangles. // To do that, we simply generate an index list that is sequential // from 0 to geometry.Indices.Count // This will create an index buffer that looks like: 0,1,2,3,4,5,... for (int i = 0; i < geometry.Indices.Count; i++) { // Set the normal for the current vertex builder.SetVertexChannelData(normalChannel, normalList[index]); // Set the texture coordinates for the current vertex builder.SetVertexChannelData(texChannel, texCoordList[index]); builder.AddTriangleVertex(index); index++; } } } MeshContent finalMesh = builder.FinishMesh(); // Copy the transform over from the source mesh to retain parent/child // relative transforms. finalMesh.Transform = input.Transform; // Now we take the new MeshContent and calculate the centers of all the // triangles. The centers are needed so that we can rotate the triangles // around them as we shatter the model. foreach (GeometryContent geometry in finalMesh.Geometry) { Vector3[] triangleCenters = new Vector3[geometry.Indices.Count / 3]; Vector3[] trianglePoints = new Vector3[2]; IndirectPositionCollection positions = geometry.Vertices.Positions; for (int i = 0; i < positions.Count; i++) { Vector3 position = positions[i]; if (i % 3 == 2) { // Calculate the center of the triangle. triangleCenters[i / 3] = (trianglePoints[0] + trianglePoints[1] + position) / 3; } else { trianglePoints[i % 3] = position; } } // Add two new channels to the MeshContent: // triangleCenterChannel: This is the channel that will store the center // of the triangle that this vertex belongs to. // rotationalVelocityChannel: This channel has randomly generated values // for x,y and z rotational angles. This information will be used to // randomly rotate the triangles as they shatter from the model. geometry.Vertices.Channels.Add <Vector3>( triangleCenterChannel, new ReplicateTriangleDataToEachVertex <Vector3>(triangleCenters)); geometry.Vertices.Channels.Add <Vector3>( rotationalVelocityChannel, new ReplicateTriangleDataToEachVertex <Vector3>( new RandomVectorEnumerable(triangleCenters.Length))); } foreach (NodeContent child in input.Children) { finalMesh.Children.Add(ProcessMesh(child)); } return(finalMesh); }
private MeshContent ExtractMesh(aiMesh aiMesh) { if (!String.IsNullOrEmpty(aiMesh.mName.Data)) { log("modelname " + aiMesh.mName.Data); meshBuilder = MeshBuilder.StartMesh(aiMesh.mName.Data); } else { meshBuilder = MeshBuilder.StartMesh(Path.GetFileNameWithoutExtension(filename)); } if (!aiMesh.HasPositions()) { throw new Exception("MOdel does not have Position"); } // Add additional vertex channels for texture coordinates and normals if (aiMesh.HasTextureCoords(0)) { textureCoordinateDataIndex = meshBuilder.CreateVertexChannel <Vector2>(VertexChannelNames.TextureCoordinate(0)); } else if (aiMesh.HasVertexColors(0)) { colorCoordinateDataIndex = meshBuilder.CreateVertexChannel <Vector4>(VertexChannelNames.Color(0)); } if (aiMesh.HasNormals()) { normalDataIndex = meshBuilder.CreateVertexChannel <Vector3>(VertexChannelNames.Normal()); } if (aiMesh.HasTangentsAndBitangents()) { tangentDataIndex = meshBuilder.CreateVertexChannel <Vector3>(VertexChannelNames.Tangent(0)); binormalDataIndex = meshBuilder.CreateVertexChannel <Vector3>(VertexChannelNames.Binormal(0)); } if (aiMesh.HasBones()) { boneDataIndex = meshBuilder.CreateVertexChannel <BoneWeightCollection>(VertexChannelNames.Weights(0)); } var numFaces = (int)aiMesh.mNumFaces; var numVertices = (int)aiMesh.mNumVertices; var aiPositions = aiMesh.mVertices; var aiNormals = aiMesh.mNormals; var aiTextureCoordsAll = aiMesh.mTextureCoords; var aiTextureCoords = (aiTextureCoordsAll != null) ? aiTextureCoordsAll[0] : null; for (int j = 0; j < aiMesh.mNumVertices; j++) { meshBuilder.CreatePosition(aiMesh.mVertices[j].x, aiMesh.mVertices[j].y, aiMesh.mVertices[j].z); } meshBuilder.SetMaterial(GetMaterial(aiMesh)); var aiFaces = aiMesh.mFaces; var dxIndices = new uint[numFaces * 3]; for (int k = 0; k < numFaces; ++k) { var aiFace = aiFaces[k]; var aiIndices = aiFace.mIndices; for (int j = 0; j < 3; ++j) { int index = (int)aiIndices[j]; if (aiMesh.HasTextureCoords(0)) { meshBuilder.SetVertexChannelData(textureCoordinateDataIndex, new Vector2(aiMesh.mTextureCoords[0][index].x, aiMesh.mTextureCoords[0][index].y)); } else if (aiMesh.HasVertexColors(0)) { meshBuilder.SetVertexChannelData(colorCoordinateDataIndex, new Vector4(aiMesh.mColors[0][index].r, aiMesh.mColors[0][index].g, aiMesh.mColors[0][index].b, aiMesh.mColors[0][index].a)); } if (aiMesh.HasNormals()) { meshBuilder.SetVertexChannelData(normalDataIndex, new Vector3(aiMesh.mNormals[index].x, aiMesh.mNormals[index].y, aiMesh.mNormals[index].z)); } if (aiMesh.HasTangentsAndBitangents()) { meshBuilder.SetVertexChannelData(tangentDataIndex, new Vector3(aiMesh.mTangents[index].x, aiMesh.mTangents[index].y, aiMesh.mTangents[index].z)); meshBuilder.SetVertexChannelData(binormalDataIndex, new Vector3(aiMesh.mBitangents[index].x, aiMesh.mBitangents[index].y, aiMesh.mBitangents[index].z)); } if (aiMesh.HasBones()) { BoneWeightCollection BoneWeightCollection = new BoneWeightCollection(); if (wbone.ContainsKey(index)) { foreach (var item in wbone[index]) { BoneWeightCollection.Add(new BoneWeight(item.Key, item.Value)); } } meshBuilder.SetVertexChannelData(boneDataIndex, BoneWeightCollection); } meshBuilder.AddTriangleVertex(index); } } MeshContent meshContent = meshBuilder.FinishMesh(); return(meshContent); }
public void OnlyUseOnce() { var mb = MeshBuilder.StartMesh("Test"); var mat = new BasicMaterialContent(); mb.CreateVertexChannel <Vector2>(VertexChannelNames.TextureCoordinate(0)); mb.CreatePosition(0f, 0f, 0f); var mesh = mb.FinishMesh(); Assert.DoesNotThrow(() => mb.SetMaterial(mat)); Assert.DoesNotThrow(() => mb.SetVertexChannelData(0, Vector2.Zero)); Assert.AreSame(mesh, mb.FinishMesh()); Assert.Throws <InvalidOperationException>(() => mb.CreatePosition(1f, 2f, 3f)); Assert.Throws <InvalidOperationException>(() => mb.AddTriangleVertex(0)); Assert.Throws <InvalidOperationException>(() => mb.CreateVertexChannel <Vector2>(VertexChannelNames.TextureCoordinate(1))); }
public override NodeContent Import(string filename, ContentImporterContext context) { context.Logger.LogMessage("Importing H3D file: {0}", filename); _identity = new ContentIdentity(filename, GetType().Name); _rootNode = new NodeContent() { Identity = _identity, Name = "RootNode" }; var scene = FormatIdentifier.IdentifyAndOpen(filename); var model = scene.Models[0]; if (!scene.Textures.Any()) { var path = Path.Combine(Path.GetDirectoryName(filename), $"{Path.GetFileNameWithoutExtension(filename)}@Textures{Path.GetExtension(filename)}"); if (File.Exists(path)) { context.Logger.LogMessage($"Found texture file {path}. Loading data..."); scene.Merge(FormatIdentifier.IdentifyAndOpen(path, model.Skeleton)); } else { context.Logger.LogMessage($"Couldn't find texture file {path}!"); } } // Textures var textures = new Dictionary <string, Texture2DContent>(); foreach (var texture in scene.Textures) { var bitmapContent = new PixelBitmapContent <Color>(texture.Width, texture.Height) { Identity = _identity, Name = texture.Name }; bitmapContent.SetPixelData(texture.ToRGBA()); var textureContent = new Texture2DContent() { Identity = _identity, Name = texture.Name }; textureContent.Faces[0].Add(bitmapContent); textures.Add(textureContent.Name, textureContent); } // Materials var materials = new Dictionary <string, H3DMaterialContent>(); foreach (var material in model.Materials) { #if DEBUG var hlslCode = new HLSLShaderGenerator(material.MaterialParams) { BoneCount = model.Skeleton.Count }.GetShader(); var glslCode = new GLSLFragmentShaderGenerator(material.MaterialParams).GetFragShader(); #endif var materialContent = new H3DMaterialContent() { Identity = _identity, Name = material.Name, Effect = new EffectContent { Identity = _identity, Name = "H3DEffect", EffectCode = new HLSLShaderGenerator(material.MaterialParams) { BoneCount = model.Skeleton.Count }.GetShader() }, Material = material.Name, FaceCulling = (H3DFaceCulling?)material.MaterialParams.FaceCulling, EmissionColor = material.MaterialParams.EmissionColor.ToXNA(), AmbientColor = material.MaterialParams.AmbientColor.ToXNA(), DiffuseColor = material.MaterialParams.DiffuseColor.ToXNA(), Specular0Color = material.MaterialParams.Specular0Color.ToXNA(), Specular1Color = material.MaterialParams.Specular1Color.ToXNA(), Constant0Color = material.MaterialParams.Constant0Color.ToXNA(), Constant1Color = material.MaterialParams.Constant1Color.ToXNA(), Constant2Color = material.MaterialParams.Constant2Color.ToXNA(), Constant3Color = material.MaterialParams.Constant3Color.ToXNA(), Constant4Color = material.MaterialParams.Constant4Color.ToXNA(), Constant5Color = material.MaterialParams.Constant5Color.ToXNA(), BlendColor = material.MaterialParams.BlendColor.ToXNA(), DepthBufferRead = material.MaterialParams.DepthBufferRead, DepthBufferWrite = material.MaterialParams.DepthBufferWrite, StencilBufferRead = material.MaterialParams.StencilBufferRead, StencilBufferWrite = material.MaterialParams.StencilBufferWrite, }; var texCount = 0; if (material.EnabledTextures[0]) { texCount++; } if (material.EnabledTextures[1]) { texCount++; } if (material.EnabledTextures[2]) { texCount++; } materialContent.TextureList = new Texture2DContent[texCount]; if (material.EnabledTextures[0]) { materialContent.TextureList[0] = textures[material.Texture0Name]; } if (material.EnabledTextures[1]) { materialContent.TextureList[1] = textures[material.Texture1Name]; } if (material.EnabledTextures[2]) { materialContent.TextureList[2] = textures[material.Texture2Name]; } materialContent.TextureSamplerSettings = material.TextureMappers.Select(tm => new TextureSamplerSettings() { WrapU = tm.WrapU.ToXNAWrap(), WrapV = tm.WrapV.ToXNAWrap(), MagFilter = (TextureSamplerSettings.TextureMagFilter)tm.MagFilter, MinFilter = (TextureSamplerSettings.TextureMinFilter)tm.MinFilter }).ToArray(); materials.Add(material.Name, materialContent); } // Geometry var meshes = new List <MeshContent>(); for (var i = 0; i < model.Meshes.Count; i++) { var modelMesh = model.Meshes[i]; if (modelMesh.Type == H3DMeshType.Silhouette) { continue; } var mesh = new MeshContent() { Identity = _identity, Name = $"{model.Materials[modelMesh.MaterialIndex].Name}_node{i}", }; var geometry = new GeometryContent { Identity = _identity, Material = materials[model.Materials[modelMesh.MaterialIndex].Name] }; var vertices = GetWorldSpaceVertices(model.Skeleton, modelMesh); var baseVertex = mesh.Positions.Count; foreach (var vertex in vertices) { mesh.Positions.Add(vertex.Position.ToVector3()); } geometry.Vertices.AddRange(Enumerable.Range(baseVertex, vertices.Length)); foreach (var attribute in modelMesh.Attributes) { if (attribute.Name >= PICAAttributeName.BoneIndex) { continue; } switch (attribute.Name) { case PICAAttributeName.Position: break; // Already added case PICAAttributeName.Normal: geometry.Vertices.Channels.Add(VertexChannelNames.Normal(0), vertices.Select(vertex => vertex.Normal.ToVector3())); break; case PICAAttributeName.Tangent: geometry.Vertices.Channels.Add(VertexChannelNames.Tangent(0), vertices.Select(vertex => vertex.Tangent.ToVector3())); break; case PICAAttributeName.Color: geometry.Vertices.Channels.Add(VertexChannelNames.Color(0), vertices.Select(vertex => vertex.Color.ToColor())); break; case PICAAttributeName.TexCoord0: geometry.Vertices.Channels.Add(VertexChannelNames.TextureCoordinate(0), vertices.Select(vertex => vertex.TexCoord0.ToVector2().ToUV())); break; case PICAAttributeName.TexCoord1: geometry.Vertices.Channels.Add(VertexChannelNames.TextureCoordinate(1), vertices.Select(vertex => vertex.TexCoord1.ToVector2().ToUV())); break; case PICAAttributeName.TexCoord2: geometry.Vertices.Channels.Add(VertexChannelNames.TextureCoordinate(2), vertices.Select(vertex => vertex.TexCoord2.ToVector2().ToUV())); break; } } var vertexOffset = 0; var xnaWeights = new List <BoneWeightCollection>(); foreach (var modelSubMesh in modelMesh.SubMeshes) { geometry.Indices.AddRange(modelSubMesh.Indices.Select(index => (int)index)); var vertexCount = modelSubMesh.MaxIndex + 1 - vertexOffset; var subMeshVertices = vertices.Skip(vertexOffset).Take(vertexCount).ToList(); if (modelSubMesh.Skinning == H3DSubMeshSkinning.Smooth) { foreach (var vertex in subMeshVertices) { var list = new BoneWeightCollection(); for (var index = 0; index < 4; index++) { var bIndex = vertex.Indices[index]; var weight = vertex.Weights[index]; if (weight == 0) { break; } if (bIndex < modelSubMesh.BoneIndicesCount && bIndex > -1) { bIndex = modelSubMesh.BoneIndices[bIndex]; } else { bIndex = 0; } list.Add(new BoneWeight(model.Skeleton[bIndex].Name, weight)); } xnaWeights.Add(list); } } else { foreach (var vertex in vertices) { var bIndex = vertex.Indices[0]; if (bIndex < modelSubMesh.BoneIndices.Length && bIndex > -1) { bIndex = modelSubMesh.BoneIndices[bIndex]; } else { bIndex = 0; } xnaWeights.Add(new BoneWeightCollection() { new BoneWeight(model.Skeleton[bIndex].Name, 0) }); } } vertexOffset += vertexCount; } geometry.Vertices.Channels.Add(VertexChannelNames.Weights(0), xnaWeights); mesh.Geometry.Add(geometry); meshes.Add(mesh); } foreach (var mesh in meshes) { _rootNode.Children.Add(mesh); } var rootBone = ImportBones(model); _rootNode.Children.Add(rootBone); if (!scene.SkeletalAnimations.Any()) { var path = Path.Combine(Path.GetDirectoryName(filename), $"{Path.GetFileNameWithoutExtension(filename)}@Animations{Path.GetExtension(filename)}"); if (File.Exists(path)) { context.Logger.LogMessage($"Found animation file {path}. Loading data..."); scene.Merge(FormatIdentifier.IdentifyAndOpen(path, model.Skeleton)); } else { context.Logger.LogMessage($"Couldn't find animation file {path}!"); } } foreach (var animation in ImportSkeletalAnimations(scene)) { rootBone.Animations.Add(animation.Name, animation); } foreach (var animation in ImportMaterialAnimations(scene)) { _rootNode.Children.Add(animation); } return(_rootNode); }
private void ValidateMesh(MeshContent mesh) { foreach (var geometry in mesh.Geometry) { if (GetExternalMaterial(mesh, geometry) != null) { // ----- External material. // The material is defined in an external XML file! // Ignore local material. continue; } // ----- Local material. // Submesh uses the material included in the model. var material = geometry.Material; var channels = geometry.Vertices.Channels; // Check if the geometry vertices contain the right number of texture coordinates. if (material != null && material.Textures.ContainsKey("Texture")) { if (!channels.Contains(VertexChannelNames.TextureCoordinate(0))) { string message = String.Format( CultureInfo.InvariantCulture, "Model \"{0}\" has texture but no texture coordinates.", geometry.Parent.Name); throw new InvalidContentException(message, geometry.Identity); } } if (material is DualTextureMaterialContent) { if (!channels.Contains(VertexChannelNames.TextureCoordinate(1))) { string message = String.Format( CultureInfo.InvariantCulture, "Model \"{0}\" uses DualTextureEffect but has only one set of texture coordinates.", geometry.Parent.Name); throw new InvalidContentException(message, geometry.Identity); } } // Check if the geometry vertices contain blend weights for mesh skinning. if (material is SkinnedMaterialContent) { // If the channel contains "Weights0", then we have a BoneWeightCollection that contains // the necessary data. if (!channels.Contains(VertexChannelNames.Weights())) { // Otherwise, we need "BlendIndices0" AND "BlendWeight0". var blendIndicesName = VertexChannelNames.EncodeName(VertexElementUsage.BlendIndices, 0); var blendWeightsName = VertexChannelNames.EncodeName(VertexElementUsage.BlendWeight, 0); if (!channels.Contains(blendIndicesName) || !channels.Contains(blendWeightsName)) { string message = String.Format( CultureInfo.InvariantCulture, "Model \"{0}\" uses mesh skinning but vertices do not have bone weights.", geometry.Parent.Name); throw new InvalidContentException(message, geometry.Identity); } } } } }
public void DefaultEffectTest() { NodeContent input; { input = new NodeContent(); var mesh = new MeshContent() { Name = "Mesh1" }; mesh.Positions.Add(new Vector3(0, 0, 0)); mesh.Positions.Add(new Vector3(1, 0, 0)); mesh.Positions.Add(new Vector3(1, 1, 1)); var geom = new GeometryContent(); geom.Vertices.Add(0); geom.Vertices.Add(1); geom.Vertices.Add(2); geom.Indices.Add(0); geom.Indices.Add(1); geom.Indices.Add(2); geom.Vertices.Channels.Add(VertexChannelNames.TextureCoordinate(0), new[] { new Vector2(0, 0), new Vector2(1, 0), new Vector2(1, 1), }); var wieghts = new BoneWeightCollection(); wieghts.Add(new BoneWeight("bone1", 0.5f)); geom.Vertices.Channels.Add(VertexChannelNames.Weights(0), new[] { wieghts, wieghts, wieghts }); mesh.Geometry.Add(geom); input.Children.Add(mesh); var bone1 = new BoneContent { Name = "bone1", Transform = Matrix.CreateTranslation(0, 1, 0) }; input.Children.Add(bone1); var anim = new AnimationContent() { Name = "anim1", Duration = TimeSpan.Zero }; input.Animations.Add(anim.Name, anim); } var processorContext = new ProcessorContext(TargetPlatform.Windows, "dummy.xnb"); var processor = new ModelProcessor { DefaultEffect = MaterialProcessorDefaultEffect.SkinnedEffect, }; var output = processor.Process(input, processorContext); // TODO: Not sure why, but XNA always returns a BasicMaterialContent // even when we specify SkinnedEffect as the default. We need to fix // the test first before we can enable the assert here. //Assert.IsInstanceOf(typeof(SkinnedMaterialContent), output.Meshes[0].MeshParts[0].Material); }
/// <summary> /// The importer's entry point. /// Called by the framework when importing a game asset. /// </summary> /// <param name="filename">Name of a game asset file.</param> /// <param name="context"> /// Contains information for importing a game asset, such as a logger interface. /// </param> /// <returns>Resulting game asset.</returns> public override NodeContent Import(string filename, ContentImporterContext context) { NodeContent rootNode = new NodeContent { Identity = new ContentIdentity(filename), Name = Path.GetFileNameWithoutExtension(filename) }; try { // Import file using Meshellator. Scene scene = MeshellatorLoader.ImportFromFile(filename); // Create materials. //System.Diagnostics.Debugger.Launch(); Dictionary <Material, MaterialContent> materials = GetMaterials(scene); // Convert Meshellator scene to XNA mesh. foreach (Mesh mesh in scene.Meshes) { MeshContent meshContent = new MeshContent { Name = mesh.Name }; foreach (Point3D position in mesh.Positions) { meshContent.Positions.Add(ConvertPoint3D(position)); } MaterialContent material = (mesh.Material != null) ? materials[mesh.Material] : new BasicMaterialContent { DiffuseColor = new Vector3(0.5f), VertexColorEnabled = false }; GeometryContent geometryContent = new GeometryContent { Material = material }; meshContent.Geometry.Add(geometryContent); geometryContent.Indices.AddRange(mesh.Indices); for (int i = 0; i < mesh.Positions.Count; ++i) { geometryContent.Vertices.Add(i); } List <Vector2> textureCoordinates = new List <Vector2>(); for (int i = 0; i < mesh.Positions.Count; ++i) { Vector2 textureCoordinate = (i < mesh.TextureCoordinates.Count) ? ConvertTextureCoordinate(mesh.TextureCoordinates[i]) : Vector2.Zero; textureCoordinates.Add(textureCoordinate); } geometryContent.Vertices.Channels.Add(VertexChannelNames.TextureCoordinate(0), textureCoordinates); List <Vector3> normals = new List <Vector3>(); foreach (Vector3D normal in mesh.Normals) { normals.Add(ConvertVector3D(normal)); } geometryContent.Vertices.Channels.Add(VertexChannelNames.Normal(), normals); // Finish the mesh and set the transform. if (SwapWindingOrder) { MeshHelper.SwapWindingOrder(meshContent); } meshContent.Transform = ConvertTransform(mesh.Transform); // Add the mesh to the model rootNode.Children.Add(meshContent); } return(rootNode); } catch (InvalidContentException) { // InvalidContentExceptions do not need further processing throw; } catch (Exception e) { // Wrap exception with content identity (includes line number) throw new InvalidContentException( "Unable to parse file. Exception:\n" + e.ToString(), rootNode.Identity, e); } }
} // Process #endregion #region Process Vertex Channel /// <summary> /// Processes geometry content vertex channels at the specified index. /// </summary> protected override void ProcessVertexChannel(GeometryContent geometry, int vertexChannelIndex, ContentProcessorContext context) { VertexChannelCollection channels = geometry.Vertices.Channels; // If the model has only position and normals a UV channel is added. // http://xnafinalengine.codeplex.com/wikipage?title=Compressed%20Vertex%20Data if (channels.Count == 1 && channels.Contains(VertexChannelNames.Normal())) { channels.Add<Vector2>(VertexChannelNames.TextureCoordinate(0), null); } // If the model has position, normal and UV then the data is packed on 32 bytes aliagned vertex data. if (channels.Count == 2 && channels.Contains(VertexChannelNames.Normal()) && channels.Contains(VertexChannelNames.TextureCoordinate(0))) { // No compressed Vertex Data base.ProcessVertexChannel(geometry, vertexChannelIndex, context); } else // If not then the data is compressed. { string name = channels[vertexChannelIndex].Name; if (name == VertexChannelNames.Normal()) { channels.ConvertChannelContent<NormalizedShort4>(vertexChannelIndex); } else if (name == VertexChannelNames.TextureCoordinate(0)) { // Clamp values. /*for (int i = 0; i < channels[vertexChannelIndex].Count; i++) { Vector2 uv = (Vector2)channels[vertexChannelIndex][i]; if (uv.X < 0) uv.X *= -1; if (uv.Y < 0) uv.Y *= -1; Vector2 uvCampled = new Vector2(uv.X - (float)Math.Truncate(uv.X), uv.Y - (float)Math.Truncate(uv.Y)); channels[vertexChannelIndex][i] = uvCampled; } // If the resource has texture coordinates outside the range [-1, 1] the values will be clamped. channels.ConvertChannelContent<NormalizedShort2>(vertexChannelIndex);*/ // Sometimes you can't just clamp values, because the distance between vertices surpass 1 uv unit. // And given that I am not removing the binormals I won't normalize the UVs. channels.ConvertChannelContent<HalfVector2>(vertexChannelIndex); } else if (name == VertexChannelNames.TextureCoordinate(1)) channels.Remove(VertexChannelNames.TextureCoordinate(1)); else if (name == VertexChannelNames.TextureCoordinate(2)) channels.Remove(VertexChannelNames.TextureCoordinate(2)); else if (name == VertexChannelNames.TextureCoordinate(3)) channels.Remove(VertexChannelNames.TextureCoordinate(3)); else if (name == VertexChannelNames.TextureCoordinate(4)) channels.Remove(VertexChannelNames.TextureCoordinate(4)); else if (name == VertexChannelNames.TextureCoordinate(5)) channels.Remove(VertexChannelNames.TextureCoordinate(5)); else if (name == VertexChannelNames.TextureCoordinate(6)) channels.Remove(VertexChannelNames.TextureCoordinate(6)); else if (name == VertexChannelNames.TextureCoordinate(7)) channels.Remove(VertexChannelNames.TextureCoordinate(7)); else if (name == VertexChannelNames.Color(0)) channels.Remove(VertexChannelNames.Color(0)); else if (name == VertexChannelNames.Tangent(0)) { channels.ConvertChannelContent<NormalizedShort4>(vertexChannelIndex); } else if (name == VertexChannelNames.Binormal(0)) { channels.ConvertChannelContent<NormalizedShort4>(vertexChannelIndex); // If the binormal is removed then the position, the normal, // the tangent and one texture coordinate can be fetched in one single block of 32 bytes. // Still, it is more fast to just pass the value. At least on the test I made. //channels.Remove(VertexChannelNames.Binormal(0)); } else { base.ProcessVertexChannel(geometry, vertexChannelIndex, context); } } } // ProcessVertexChannel
/// <summary> /// Generates skydome geometry for an input sky texture. /// </summary> public override SkyContent Process(Texture2DContent input, ContentProcessorContext context) { MeshBuilder builder = MeshBuilder.StartMesh("sky"); // Create two rings of vertices around the top and bottom of the cylinder. List <int> topVertices = new List <int>(); List <int> bottomVertices = new List <int>(); for (int i = 0; i < cylinderSegments; i++) { float angle = MathHelper.TwoPi * i / cylinderSegments; float x = (float)Math.Cos(angle) * cylinderSize; float z = (float)Math.Sin(angle) * cylinderSize; topVertices.Add(builder.CreatePosition(x, cylinderSize * 5 / 12, z)); bottomVertices.Add(builder.CreatePosition(x, -cylinderSize * 5 / 12, z)); } // Create two center vertices, used for closing the top and bottom. int topCenterVertex = builder.CreatePosition(0, cylinderSize, 0); int bottomCenterVertex = builder.CreatePosition(0, -cylinderSize, 0); // Create a vertex channel for holding texture coordinates. int texCoordId = builder.CreateVertexChannel <Vector2>( VertexChannelNames.TextureCoordinate(0)); // Create the individual triangles that make up our skydome. for (int i = 0; i < cylinderSegments; i++) { int j = (i + 1) % cylinderSegments; // Calculate texture coordinates for this segment of the cylinder. float u1 = (float)i / (float)cylinderSegments; float u2 = (float)(i + 1) / (float)cylinderSegments; // Two triangles form a quad, one side segment of the cylinder. AddVertex(builder, topVertices[i], texCoordId, u1, texCoordTop); AddVertex(builder, topVertices[j], texCoordId, u2, texCoordTop); AddVertex(builder, bottomVertices[i], texCoordId, u1, texCoordBottom); AddVertex(builder, topVertices[j], texCoordId, u2, texCoordTop); AddVertex(builder, bottomVertices[j], texCoordId, u2, texCoordBottom); AddVertex(builder, bottomVertices[i], texCoordId, u1, texCoordBottom); // Triangle fanning inward to fill the top above this segment. AddVertex(builder, topCenterVertex, texCoordId, u1, 0); AddVertex(builder, topVertices[j], texCoordId, u2, texCoordTop); AddVertex(builder, topVertices[i], texCoordId, u1, texCoordTop); // Triangle fanning inward to fill the bottom below this segment. AddVertex(builder, bottomCenterVertex, texCoordId, u1, 1); AddVertex(builder, bottomVertices[i], texCoordId, u1, texCoordBottom); AddVertex(builder, bottomVertices[j], texCoordId, u2, texCoordBottom); } // Create the output object. SkyContent sky = new SkyContent(); // Chain to the ModelProcessor so it can convert the mesh we just generated. MeshContent skyMesh = builder.FinishMesh(); sky.Model = context.Convert <MeshContent, ModelContent>(skyMesh, "ModelProcessor"); input.ConvertBitmapType(typeof(Dxt1BitmapContent)); sky.Texture = input; return(sky); }
/// <summary> /// Recursively calls MeshHelper.CalculateTangentFrames for every MeshContent /// object in the NodeContent scene. This function could be changed to add /// more per vertex data as needed. /// </summary> /// <param initialFileName="input">A node in the scene. The function should be called /// with the root of the scene.</param> private void PreprocessSceneHierarchy(NodeContent input, ContentProcessorContext context, string inputName) { MeshContent mesh = input as MeshContent; if (mesh != null) { MeshHelper.CalculateTangentFrames(mesh, VertexChannelNames.TextureCoordinate(0), VertexChannelNames.Tangent(0), VertexChannelNames.Binormal(0)); foreach (GeometryContent geometry in mesh.Geometry) { if (false == geometry.Material.Textures.ContainsKey(TextureMapKey)) { geometry.Material.Textures.Add(TextureMapKey, new ExternalReference <TextureContent>( "null_color.tga")); } else { context.Logger.LogImportantMessage("has: " + geometry.Material.Textures[TextureMapKey].Filename); string fileName = Path.GetFileName(geometry.Material.Textures[TextureMapKey].Filename); if (fileName != null && fileName.StartsWith("ship") && fileName.EndsWith("_c.tga")) { InsertMissedMapTextures(geometry.Material.Textures, fileName.Substring(0, fileName.Length - "_c.tga".Length), context); } } if (false == geometry.Material.Textures.ContainsKey(NormalMapKey)) { geometry.Material.Textures.Add(NormalMapKey, new ExternalReference <TextureContent>( "null_normal.tga")); } else { context.Logger.LogImportantMessage("has: " + geometry.Material.Textures[NormalMapKey].Filename); } if (false == geometry.Material.Textures.ContainsKey(SpecularMapKey)) { geometry.Material.Textures.Add(SpecularMapKey, new ExternalReference <TextureContent>( "null_specular.tga")); } else { context.Logger.LogImportantMessage("has: " + geometry.Material.Textures[SpecularMapKey].Filename); } if (false == geometry.Material.Textures.ContainsKey(GlowMapKey)) { geometry.Material.Textures.Add(GlowMapKey, new ExternalReference <TextureContent>( "null_glow.tga")); } else { context.Logger.LogImportantMessage("has: " + geometry.Material.Textures[GlowMapKey].Filename); } } } foreach (NodeContent child in input.Children) { PreprocessSceneHierarchy(child, context, inputName); } }
/// <summary> /// The main Process method converts an intermediate format content pipeline /// NodeContent tree to a ModelContent object with embedded animation data. /// </summary> public override ModelContent Process(NodeContent input, ContentProcessorContext context) { ValidateMesh(input, context, null); //Generate Tangents/Normals for shader MeshContent mesh = input as MeshContent; if (mesh != null) { MeshHelper.CalculateTangentFrames(mesh, VertexChannelNames.TextureCoordinate(0), VertexChannelNames.Tangent(0), VertexChannelNames.Binormal(0)); } // Find the skeleton. BoneContent skeleton = MeshHelper.FindSkeleton(input); if (skeleton == null) { throw new InvalidContentException("Input skeleton not found."); } // We don't want to have to worry about different parts of the model being // in different local coordinate systems, so let's just bake everything. FlattenTransforms(input, skeleton); // Read the bind pose and skeleton hierarchy data. IList <BoneContent> bones = MeshHelper.FlattenSkeleton(skeleton); if (bones.Count > SkinnedEffect.MaxBones) { throw new InvalidContentException(string.Format( "Skeleton has {0} bones, but the maximum supported is {1}.", bones.Count, SkinnedEffect.MaxBones)); } List <Matrix> bindPose = new List <Matrix>(); List <Matrix> inverseBindPose = new List <Matrix>(); List <int> skeletonHierarchy = new List <int>(); foreach (BoneContent bone in bones) { bindPose.Add(bone.Transform); inverseBindPose.Add(Matrix.Invert(bone.AbsoluteTransform)); skeletonHierarchy.Add(bones.IndexOf(bone.Parent as BoneContent)); } // Convert animation data to our runtime format. Dictionary <string, AnimationClip> animationClips; animationClips = ProcessAnimations(skeleton.Animations, bones); // Chain to the base ModelProcessor class so it can convert the model data. ModelContent model = base.Process(input, context); // Store our custom animation data in the Tag property of the model. model.Tag = new SkinningData(animationClips, bindPose, inverseBindPose, skeletonHierarchy); return(model); }
public override NodeContent Import(string filename, ContentImporterContext context) { var content = new NodeContent(); var reader = XmlReader.Create(filename); var xmlMesh = (XmlMesh) new XmlSerializer(typeof(XmlMesh)).Deserialize(reader); reader.Close(); reader = XmlReader.Create(Path.Combine(Path.GetDirectoryName(filename), xmlMesh.SkeletonLink.Name + ".xml")); var xmlSkeleton = (XmlSkeleton) new XmlSerializer(typeof(XmlSkeleton)).Deserialize(reader); reader.Close(); context.Logger.LogImportantMessage("Bones: " + xmlSkeleton.Bones.Length.ToString()); var bones = new Dictionary <string, BoneContent>(); foreach (var xmlBone in xmlSkeleton.Bones) { context.Logger.LogImportantMessage("{0}", "-- " + xmlBone.Name + ": " + xmlBone.Position.AsVector3().ToString() + ", " + xmlBone.Rotation.Angle.ToString() + "/" + xmlBone.Rotation.Axis.AsVector3().ToString()); var boneContent = new BoneContent() { Name = xmlBone.Name, Transform = Matrix.CreateFromAxisAngle(xmlBone.Rotation.Axis.AsVector3(), xmlBone.Rotation.Angle) * Matrix.CreateTranslation(xmlBone.Position.AsVector3()) }; bones.Add(xmlBone.Name, boneContent); } foreach (var boneParent in xmlSkeleton.BoneParents) { var parent = bones[boneParent.Parent]; var bone = bones[boneParent.Bone]; parent.Children.Add(bone); } var rootBone = bones.Single(x => x.Value.Parent == null); content.Children.Add(rootBone.Value); context.Logger.LogImportantMessage("Submeshes: " + xmlMesh.SubMeshes.Length.ToString()); //context.AddDependency(Path.GetFullPath("HUM_M.MATERIAL")); //var materialFile = File.ReadAllText("HUM_M.MATERIAL"); ////context.Logger.LogImportantMessage("{0}", materialFile); foreach (var xmlSubMesh in xmlMesh.SubMeshes) { context.Logger.LogImportantMessage("Submesh: " + xmlSubMesh.Material); context.Logger.LogImportantMessage("-- Faces: " + xmlSubMesh.Faces.Length.ToString()); if (xmlSubMesh.UseSharedGeometry) { context.Logger.LogImportantMessage("-- Uses Shared Geometry"); } else { context.Logger.LogImportantMessage("-- Vertexbuffers: " + xmlSubMesh.Geometry.VertexBuffers.Length.ToString()); context.Logger.LogImportantMessage("-- Vertices (0): " + xmlSubMesh.Geometry.VertexBuffers[0].Vertices.Length.ToString()); context.Logger.LogImportantMessage("-- Vertices (1): " + xmlSubMesh.Geometry.VertexBuffers[1].Vertices.Length.ToString()); } var builder = MeshBuilder.StartMesh(xmlSubMesh.Material); //if (xmlSubMesh.Material == "Hum_M/Chest") // builder.SetMaterial(new SkinnedMaterialContent { Texture = new ExternalReference<TextureContent>("TL2_ARMORTEST_CHEST.png") }); //else if (xmlSubMesh.Material == "Hum_M/MidLeg") // builder.SetMaterial(new SkinnedMaterialContent { Texture = new ExternalReference<TextureContent>("TL2_ARMORTEST_PANTS.png") }); //else builder.SetMaterial(new SkinnedMaterialContent { Texture = new ExternalReference <TextureContent>("Fiend\\FIEND.dds") }); var normalChannel = builder.CreateVertexChannel <Vector3>(VertexChannelNames.Normal()); var uvChannel = builder.CreateVertexChannel <Vector2>(VertexChannelNames.TextureCoordinate(0)); var weightsChannel = builder.CreateVertexChannel <BoneWeightCollection>(VertexChannelNames.Weights()); var geometry = xmlSubMesh.Geometry; if (xmlSubMesh.UseSharedGeometry) { geometry = xmlMesh.SharedGeometry; } foreach (var vertex in geometry.VertexBuffers[0].Vertices) { builder.CreatePosition(vertex.Position.AsVector3()); } foreach (var face in xmlSubMesh.Faces) { AddTriangleVertex(builder, xmlMesh, xmlSubMesh, xmlSkeleton, face.Vertex1, normalChannel, uvChannel, weightsChannel); AddTriangleVertex(builder, xmlMesh, xmlSubMesh, xmlSkeleton, face.Vertex2, normalChannel, uvChannel, weightsChannel); AddTriangleVertex(builder, xmlMesh, xmlSubMesh, xmlSkeleton, face.Vertex3, normalChannel, uvChannel, weightsChannel); } content.Children.Add(builder.FinishMesh()); } return(content); }
public override NodeContent Import(string filename, ContentImporterContext context) { var identity = new ContentIdentity(filename, GetType().Name); var importer = new AssimpImporter(); importer.AttachLogStream(new LogStream((msg, userData) => context.Logger.LogMessage(msg))); var scene = importer.ImportFile(filename, PostProcessSteps.FlipUVs | // So far appears necessary PostProcessSteps.JoinIdenticalVertices | PostProcessSteps.Triangulate | PostProcessSteps.SortByPrimitiveType | PostProcessSteps.FindInvalidData ); var rootNode = new NodeContent { Name = scene.RootNode.Name, Identity = identity, Transform = ToXna(scene.RootNode.Transform) }; // TODO: Materials var materials = new List <MaterialContent>(); foreach (var sceneMaterial in scene.Materials) { var diffuse = sceneMaterial.GetTexture(TextureType.Diffuse, 0); materials.Add(new BasicMaterialContent() { Name = sceneMaterial.Name, Identity = identity, Texture = new ExternalReference <TextureContent>(diffuse.FilePath, identity) }); } // Meshes var meshes = new Dictionary <Mesh, MeshContent>(); foreach (var sceneMesh in scene.Meshes) { if (!sceneMesh.HasVertices) { continue; } var mesh = new MeshContent { Name = sceneMesh.Name }; // Position vertices are shared at the mesh level foreach (var vert in sceneMesh.Vertices) { mesh.Positions.Add(new Vector3(vert.X, vert.Y, vert.Z)); } var geom = new GeometryContent { Name = string.Empty, //Material = materials[sceneMesh.MaterialIndex] }; // Geometry vertices reference 1:1 with the MeshContent parent, // no indirection is necessary. geom.Vertices.Positions.AddRange(mesh.Positions); geom.Vertices.AddRange(Enumerable.Range(0, sceneMesh.VertexCount)); geom.Indices.AddRange(sceneMesh.GetIntIndices()); // Individual channels go here if (sceneMesh.HasNormals) { geom.Vertices.Channels.Add(VertexChannelNames.Normal(), ToXna(sceneMesh.Normals)); } for (var i = 0; i < sceneMesh.TextureCoordsChannelCount; i++) { geom.Vertices.Channels.Add(VertexChannelNames.TextureCoordinate(i), ToXnaVector2(sceneMesh.GetTextureCoords(i))); } mesh.Geometry.Add(geom); rootNode.Children.Add(mesh); meshes.Add(sceneMesh, mesh); } // Bones var bones = new Dictionary <Node, BoneContent>(); var hierarchyNodes = scene.RootNode.Children.SelectDeep(n => n.Children).ToList(); foreach (var node in hierarchyNodes) { var bone = new BoneContent { Name = node.Name, Transform = Matrix.Transpose(ToXna(node.Transform)) }; if (node.Parent == scene.RootNode) { rootNode.Children.Add(bone); } else { var parent = bones[node.Parent]; parent.Children.Add(bone); } // Copy the bone's name to the MeshContent - this appears to be // the way it comes out of XNA's FBXImporter. foreach (var meshIndex in node.MeshIndices) { meshes[scene.Meshes[meshIndex]].Name = node.Name; } bones.Add(node, bone); } return(rootNode); }
private NodeContent buildWallMesh(PixelBitmapContent <Color> bitmap) { MeshBuilder mb = MeshBuilder.StartMesh("wall"); // Create a material, and point it at the world section texture BasicMaterialContent material = new BasicMaterialContent(); material.Textures.Add(LightPrePassProcessor.DiffuseMapKey, new ExternalReference <TextureContent>(walltexturefile)); mb.SetMaterial(material); int channel_texCoord0 = mb.CreateVertexChannel <Vector2>(VertexChannelNames.TextureCoordinate(0)); // First create vertex data // loop through all the pixels int quadcount = 0; for (int y = 0; y < 32; y++) { for (int x = 0; x < 32; x++) { if (IsWallTile(bitmap, x, y)) { bool leftWall = IsFloorTileSafe(bitmap, x - 1, y); bool rightWall = IsFloorTileSafe(bitmap, x + 1, y); bool backWall = IsFloorTileSafe(bitmap, x, y - 1); bool frontWall = IsFloorTileSafe(bitmap, x, y + 1); if (leftWall) { quadcount += AddQuadVertexPositions(mb, new Vector3(-16 + x, WallHeight, -16 + y), new Vector3(-16 + x, 0.0f, -16 + y + 1.0f)); } if (rightWall) { quadcount += AddQuadVertexPositions(mb, new Vector3(-16 + x + 1.0f, WallHeight, -16 + y + 1.0f), new Vector3(-16 + x + 1.0f, 0.0f, -16 + y)); } if (frontWall) { quadcount += AddQuadVertexPositions(mb, new Vector3(-16 + x, WallHeight, -16 + y + 1.0f), new Vector3(-16 + x + 1.0f, 0.0f, -16 + y + 1.0f)); } if (backWall) { quadcount += AddQuadVertexPositions(mb, new Vector3(-16 + x + 1.0f, WallHeight, -16 + y), new Vector3(-16 + x, 0.0f, -16 + y)); } } } } Random r = new Random(); for (int q = 0; q < quadcount; q++) { Vector2[] tex = quarters[r.Next(4)]; AddTriangleVertices(mb, q, channel_texCoord0, tex); } return(mb.FinishMesh()); }
protected virtual void ProcessGeometryUsingMaterial(MaterialContent material, IEnumerable <GeometryContent> geometryCollection, ContentProcessorContext context) { // If we don't get a material then assign a default one. if (material == null) { material = MaterialProcessor.CreateDefaultMaterial(DefaultEffect); } // Test requirements from the assigned material. int textureChannels; bool vertexWeights = false; if (material is DualTextureMaterialContent) { textureChannels = 2; } else if (material is SkinnedMaterialContent) { textureChannels = 1; vertexWeights = true; } else if (material is EnvironmentMapMaterialContent) { textureChannels = 1; } else if (material is AlphaTestMaterialContent) { textureChannels = 1; } else { // Just check for a "Texture" which should cover custom Effects // and BasicEffect which can have an optional texture. textureChannels = material.Textures.ContainsKey("Texture") ? 1 : 0; } // By default we must set the vertex color property // to match XNA behavior. material.OpaqueData["VertexColorEnabled"] = false; // If we run into a geometry that requires vertex // color we need a seperate material for it. var colorMaterial = material.Clone(); colorMaterial.OpaqueData["VertexColorEnabled"] = true; foreach (var geometry in geometryCollection) { // Process the geometry. for (var i = 0; i < geometry.Vertices.Channels.Count; i++) { ProcessVertexChannel(geometry, i, context); } // Verify we have the right number of texture coords. for (var i = 0; i < textureChannels; i++) { if (!geometry.Vertices.Channels.Contains(VertexChannelNames.TextureCoordinate(i))) { throw new InvalidContentException( string.Format("The mesh \"{0}\", using {1}, contains geometry that is missing texture coordinates for channel {2}.", geometry.Parent.Name, MaterialProcessor.GetDefaultEffect(material), i), _identity); } } // Do we need to enable vertex color? if (geometry.Vertices.Channels.Contains(VertexChannelNames.Color(0))) { geometry.Material = colorMaterial; } else { geometry.Material = material; } // Do we need vertex weights? if (vertexWeights) { var weightsName = VertexChannelNames.EncodeName(VertexElementUsage.BlendWeight, 0); if (!geometry.Vertices.Channels.Contains(weightsName)) { throw new InvalidContentException( string.Format("The skinned mesh \"{0}\" contains geometry without any vertex weights.", geometry.Parent.Name), _identity); } } } }
private void AddVerticesInformation(ref MeshBuilder builder) { //texture locations: //F,R,B,L //U,D //Front Vector2 fi = new Vector2(0, 0); //cell 0, row 0 //Right Vector2 ri = new Vector2(1, 0); //cell 1, row 0 //Back Vector2 bi = new Vector2(2, 0); //cell 2, row 0 //Left Vector2 li = new Vector2(3, 0); //cell 3, row 0 //Upward (Top) Vector2 ui = new Vector2(0, 1); //cell 0, row 1 //Downward (Bottom) Vector2 di = new Vector2(1, 1); //cell 1, row 1 int texCoordChannel = builder.CreateVertexChannel <Vector2> (VertexChannelNames.TextureCoordinate(0)); //覧覧front plane first column, first row //bottom triangle of front plane builder.SetVertexChannelData(texCoordChannel, UV(0, 0, fi)); builder.AddTriangleVertex(4); //-1,1,-1 builder.SetVertexChannelData(texCoordChannel, UV(1, 1, fi)); builder.AddTriangleVertex(5); //1,-1,-1 builder.SetVertexChannelData(texCoordChannel, UV(0, 1, fi)); builder.AddTriangleVertex(6); //-1,-1,-1 //top triangle of front plane builder.SetVertexChannelData(texCoordChannel, UV(0, 0, fi)); builder.AddTriangleVertex(4); //-1,1,-1 builder.SetVertexChannelData(texCoordChannel, UV(1, 0, fi)); builder.AddTriangleVertex(7); //1,1,-1 builder.SetVertexChannelData(texCoordChannel, UV(1, 1, fi)); builder.AddTriangleVertex(5); //1,-1,-1 //覧覧right plane builder.SetVertexChannelData(texCoordChannel, UV(1, 0, ri)); builder.AddTriangleVertex(3); builder.SetVertexChannelData(texCoordChannel, UV(1, 1, ri)); builder.AddTriangleVertex(1); builder.SetVertexChannelData(texCoordChannel, UV(0, 1, ri)); builder.AddTriangleVertex(5); builder.SetVertexChannelData(texCoordChannel, UV(1, 0, ri)); builder.AddTriangleVertex(3); builder.SetVertexChannelData(texCoordChannel, UV(0, 1, ri)); builder.AddTriangleVertex(5); builder.SetVertexChannelData(texCoordChannel, UV(0, 0, ri)); builder.AddTriangleVertex(7); //覧覧back pane //3rd column, first row //bottom triangle of back plane builder.SetVertexChannelData(texCoordChannel, UV(1, 1, bi)); //1,1 builder.AddTriangleVertex(2); //-1,-1,1 builder.SetVertexChannelData(texCoordChannel, UV(0, 1, bi)); //0,1 builder.AddTriangleVertex(1); //1,-1,1 builder.SetVertexChannelData(texCoordChannel, UV(1, 0, bi)); //1,0 builder.AddTriangleVertex(0); //-1,1,1 //top triangle of back plane builder.SetVertexChannelData(texCoordChannel, UV(0, 1, bi)); //0,1 builder.AddTriangleVertex(1); //1,-1,1 builder.SetVertexChannelData(texCoordChannel, UV(0, 0, bi)); //0,0 builder.AddTriangleVertex(3); //1,1,1 builder.SetVertexChannelData(texCoordChannel, UV(1, 0, bi)); //1,0 builder.AddTriangleVertex(0); //-1,1,1 //覧覧left plane builder.SetVertexChannelData(texCoordChannel, UV(1, 1, li)); builder.AddTriangleVertex(6); builder.SetVertexChannelData(texCoordChannel, UV(0, 1, li)); builder.AddTriangleVertex(2); builder.SetVertexChannelData(texCoordChannel, UV(0, 0, li)); builder.AddTriangleVertex(0); builder.SetVertexChannelData(texCoordChannel, UV(1, 0, li)); builder.AddTriangleVertex(4); builder.SetVertexChannelData(texCoordChannel, UV(1, 1, li)); builder.AddTriangleVertex(6); builder.SetVertexChannelData(texCoordChannel, UV(0, 0, li)); builder.AddTriangleVertex(0); //覧覧upward (top) plane builder.SetVertexChannelData(texCoordChannel, UV(1, 0, ui)); builder.AddTriangleVertex(3); builder.SetVertexChannelData(texCoordChannel, UV(0, 1, ui)); builder.AddTriangleVertex(4); builder.SetVertexChannelData(texCoordChannel, UV(0, 0, ui)); builder.AddTriangleVertex(0); builder.SetVertexChannelData(texCoordChannel, UV(1, 0, ui)); builder.AddTriangleVertex(3); builder.SetVertexChannelData(texCoordChannel, UV(1, 1, ui)); builder.AddTriangleVertex(7); builder.SetVertexChannelData(texCoordChannel, UV(0, 1, ui)); builder.AddTriangleVertex(4); //覧覧downward (bottom) plane builder.SetVertexChannelData(texCoordChannel, UV(1, 0, di)); builder.AddTriangleVertex(2); builder.SetVertexChannelData(texCoordChannel, UV(1, 1, di)); builder.AddTriangleVertex(6); builder.SetVertexChannelData(texCoordChannel, UV(0, 0, di)); builder.AddTriangleVertex(1); builder.SetVertexChannelData(texCoordChannel, UV(1, 1, di)); builder.AddTriangleVertex(6); builder.SetVertexChannelData(texCoordChannel, UV(0, 1, di)); builder.AddTriangleVertex(5); builder.SetVertexChannelData(texCoordChannel, UV(0, 0, di)); builder.AddTriangleVertex(1); }
private GeometryContent CreateGeometry(MeshContent mesh, Mesh aiMesh) { var geom = new GeometryContent { Material = _materials[aiMesh.MaterialIndex] }; // Vertices var baseVertex = mesh.Positions.Count; foreach (var vert in aiMesh.Vertices) { mesh.Positions.Add(ToXna(vert)); } geom.Vertices.AddRange(Enumerable.Range(baseVertex, aiMesh.VertexCount)); geom.Indices.AddRange(aiMesh.GetIndices()); if (aiMesh.HasBones) { var xnaWeights = new List <BoneWeightCollection>(); for (var i = 0; i < geom.Indices.Count; i++) { var list = new BoneWeightCollection(); for (var boneIndex = 0; boneIndex < aiMesh.BoneCount; boneIndex++) { var bone = aiMesh.Bones[boneIndex]; foreach (var weight in bone.VertexWeights) { if (weight.VertexID != i) { continue; } list.Add(new BoneWeight(bone.Name, weight.Weight)); } } if (list.Count > 0) { xnaWeights.Add(list); } } geom.Vertices.Channels.Add(VertexChannelNames.Weights(0), xnaWeights); } // Individual channels go here if (aiMesh.HasNormals) { geom.Vertices.Channels.Add(VertexChannelNames.Normal(), ToXna(aiMesh.Normals)); } for (var i = 0; i < aiMesh.TextureCoordinateChannelCount; i++) { geom.Vertices.Channels.Add(VertexChannelNames.TextureCoordinate(i), ToXnaTexCoord(aiMesh.TextureCoordinateChannels[i])); } for (var i = 0; i < aiMesh.VertexColorChannelCount; i++) { geom.Vertices.Channels.Add(VertexChannelNames.Color(i), ToXnaColors(aiMesh.VertexColorChannels[i])); } return(geom); }
} // Process #endregion #region Process Vertex Channel /// <summary> /// Processes geometry content vertex channels at the specified index. /// </summary> protected override void ProcessVertexChannel(GeometryContent geometry, int vertexChannelIndex, ContentProcessorContext context) { // Compressed Vertex Data VertexChannelCollection channels = geometry.Vertices.Channels; string name = channels[vertexChannelIndex].Name; if (name == VertexChannelNames.Normal()) { channels.ConvertChannelContent <NormalizedShort4>(vertexChannelIndex); } else if (name == VertexChannelNames.TextureCoordinate(0)) { // If the resource has texture coordinates outside the range [-1, 1] the values will be clamped. channels.ConvertChannelContent <HalfVector2>(vertexChannelIndex); } else if (name == VertexChannelNames.TextureCoordinate(1)) { channels.Remove(VertexChannelNames.TextureCoordinate(1)); } else if (name == VertexChannelNames.TextureCoordinate(2)) { channels.Remove(VertexChannelNames.TextureCoordinate(2)); } else if (name == VertexChannelNames.TextureCoordinate(3)) { channels.Remove(VertexChannelNames.TextureCoordinate(3)); } else if (name == VertexChannelNames.TextureCoordinate(4)) { channels.Remove(VertexChannelNames.TextureCoordinate(4)); } else if (name == VertexChannelNames.TextureCoordinate(5)) { channels.Remove(VertexChannelNames.TextureCoordinate(5)); } else if (name == VertexChannelNames.TextureCoordinate(6)) { channels.Remove(VertexChannelNames.TextureCoordinate(6)); } else if (name == VertexChannelNames.TextureCoordinate(7)) { channels.Remove(VertexChannelNames.TextureCoordinate(7)); } else if (name == VertexChannelNames.Color(0)) { channels.Remove(VertexChannelNames.Color(0)); } else if (name == VertexChannelNames.Tangent(0)) { channels.ConvertChannelContent <NormalizedShort4>(vertexChannelIndex); } else if (name == VertexChannelNames.Binormal(0)) { // Not need to get rid of the binormal data because the model will use more than 32 bytes per vertex. // We can actually try to align the data to 64 bytes per vertex. channels.ConvertChannelContent <NormalizedShort4>(vertexChannelIndex); } else { // Blend indices, blend weights and everything else. // Don't use "BlendWeight0" as a name, nor weights0. Both names don't work. base.ProcessVertexChannel(geometry, vertexChannelIndex, context); channels.ConvertChannelContent <Byte4>("BlendIndices0"); channels.ConvertChannelContent <NormalizedShort4>(VertexChannelNames.EncodeName(VertexElementUsage.BlendWeight, 0)); } } // ProcessVertexChannel
internal Mesh(PopulatedStructure mesh, BlenderFile file) { FileBlock materialArray; int pointerSize = mesh["mat"].Size; Name = new string(mesh["id.name"].GetValueAsCharArray()).Split('\0')[0].Substring(2); ulong mat = mesh["mat"].GetValueAsPointer(); if (mat == 0 || (materialArray = file.GetBlockByAddress(mat)).Size % pointerSize != 0) { Materials = new Material[0]; } else { int count = materialArray.Size % pointerSize; Materials = new Material[count]; for (int i = 0; i < count; i++) { Materials[i] = Material.GetOrCreateMaterial( file, file.GetStructuresByAddress( pointerSize == 4 ? BitConverter.ToUInt32(materialArray.Data, count * pointerSize) : BitConverter.ToUInt64(materialArray.Data, count * pointerSize) )[0] ); } } float[] vectorTemp = mesh["loc"].GetValueAsFloatArray(); Location = new Vector3(vectorTemp[0], vectorTemp[1], vectorTemp[2]); vectorTemp = mesh["rot"].GetValueAsFloatArray(); Rotation = new Vector3(vectorTemp[0], vectorTemp[1], vectorTemp[2]); vectorTemp = mesh["size"].GetValueAsFloatArray(); Size = new Vector3(vectorTemp[0], vectorTemp[1], vectorTemp[2]); MeshBuilder primordialMesh = MeshBuilder.StartMesh(Name); // both structures use the same vertex structure List <Vector3> verts = new List <Vector3>(); List <short[]> unconvertedNormals = new List <short[]>(); foreach (PopulatedStructure s in file.GetStructuresByAddress(mesh["mvert"].GetValueAsPointer())) { float[] vector = s["co"].GetValueAsFloatArray(); unconvertedNormals.Add(s["no"].GetValueAsShortArray()); verts.Add(new Vector3(vector[0], vector[1], vector[2])); } List <Vector3> normals = convertNormals(unconvertedNormals); VertexPositionNormalTexture[] vertices; BasicMaterialContent bmc; // todo: not yet sure which format versions of Blender between 2.62 and 2.65 use. if (float.Parse(file.VersionNumber) >= 2.66f) // uses edges, loops, and polys (Blender 2.66+) { vertices = loadNewModel(file, mesh, verts, normals, out bmc); } else // uses MFace (Blender 2.49-2.61) { vertices = loadOldModel(file, mesh, verts, normals, out bmc); } MeshBuilder mb = MeshBuilder.StartMesh(Name); foreach (VertexPositionNormalTexture v in vertices) { mb.CreatePosition(v.Position); } int uvChannel = mb.CreateVertexChannel <Vector2>(VertexChannelNames.TextureCoordinate(0)); int normalChannel = mb.CreateVertexChannel <Vector3>(VertexChannelNames.Normal()); int j = 0; foreach (VertexPositionNormalTexture v in vertices) { mb.SetVertexChannelData(uvChannel, v.TextureCoordinate); mb.SetVertexChannelData(normalChannel, v.Normal); mb.AddTriangleVertex(j++); } }
public override NodeContent Import(string filename, ContentImporterContext context) { ContentIdentity identity = new ContentIdentity(filename, GetType().Name); const int MAX_BONE_WEIGHTS = 4; VertexBoneWeightLimitConfig boneConfig = new VertexBoneWeightLimitConfig(MAX_BONE_WEIGHTS); AssimpImporter importer = new AssimpImporter(); importer.SetConfig(boneConfig); importer.AttachLogStream(new LogStream((msg, userData) => context.Logger.LogMessage(msg))); Scene scene = importer.ImportFile(filename, PostProcessSteps.FlipUVs | PostProcessSteps.JoinIdenticalVertices | PostProcessSteps.Triangulate | PostProcessSteps.SortByPrimitiveType | PostProcessSteps.FindInvalidData | PostProcessSteps.LimitBoneWeights | PostProcessSteps.FixInFacingNormals); // Root node NodeContent rootNode = new NodeContent { Name = scene.RootNode.Name, Identity = identity, Transform = Matrix.Transpose(ToXna(scene.RootNode.Transform)) }; // Materials MaterialContent[] materials = new MaterialContent[scene.MaterialCount]; for (int m = 0; m < scene.MaterialCount; m++) { materials[m] = new BasicMaterialContent(); materials[m].Identity = identity; // For some reason, there is all kinds of nasty junk in this string: materials[m].Name = CleanInput(scene.Materials[m].Name); for (int t = 0; t < scene.Materials[m].GetTextureCount(TextureType.Diffuse); t++) { TextureSlot diffuseMap = scene.Materials[m].GetTexture(TextureType.Diffuse, t); if (!String.IsNullOrEmpty(diffuseMap.FilePath)) { materials[m].Textures.Add("Texture" + (t > 0 ? t.ToString() : ""), new ExternalReference <TextureContent>(diffuseMap.FilePath, identity)); } } } // Bones // We find 'mesh container' nodes with the best names for those meshes while looking for the bones, // and will need them later when we create the MeshContents. I have a feeling that this won't work // in general, and may need to be made more robust. Dictionary <Mesh, string> meshNames = new Dictionary <Mesh, string>(); Dictionary <Node, BoneContent> nodeToBoneMap = new Dictionary <Node, BoneContent>(); BoneContent skeleton = null; // The root bone for the model. List <Node> hierarchyNodes = scene.RootNode.Children.SelectDeep(n => n.Children).ToList(); foreach (Node node in hierarchyNodes) { BoneContent bone = new BoneContent { Name = node.Name, Transform = Matrix.Transpose(ToXna(node.Transform)) }; if (node.MeshIndices != null) { // This node is a 'mesh container' instead of a bone, so we only care about extracting the name of the mesh. foreach (int meshIndex in node.MeshIndices) { if (!meshNames.ContainsKey(scene.Meshes[meshIndex])) { meshNames.Add(scene.Meshes[meshIndex], node.Name); } } } else if (node.Parent == scene.RootNode) { if (skeleton == null) { // This will be our skeleton so put the animations here: if (scene.HasAnimations) { foreach (Animation assimpAnim in scene.Animations) { if (assimpAnim.HasNodeAnimations) { AnimationContent newAnim = new AnimationContent(); newAnim.Identity = identity; newAnim.Duration = TimeSpan.FromSeconds(assimpAnim.DurationInTicks / assimpAnim.TicksPerSecond); newAnim.Name = assimpAnim.Name; foreach (NodeAnimationChannel nac in assimpAnim.NodeAnimationChannels) { Node animatedNode = hierarchyNodes.Find(n => n.Name == nac.NodeName); AnimationChannel newChan = BuildAnimtionChannel(animatedNode, nac); newAnim.Channels.Add(nac.NodeName, newChan); } if (String.IsNullOrEmpty(assimpAnim.Name)) { bone.Animations.Add("SkelematorNoAnimationName", newAnim); } else { bone.Animations.Add(assimpAnim.Name, newAnim); } } } } rootNode.Children.Add(bone); skeleton = bone; } else { context.Logger.LogWarning(null, identity, "Found multiple skeletons in the model, throwing extras away..."); } } else { BoneContent parent = nodeToBoneMap[node.Parent]; parent.Children.Add(bone); } nodeToBoneMap.Add(node, bone); } // Meshes Dictionary <Mesh, MeshContent> meshes = new Dictionary <Mesh, MeshContent>(); foreach (Mesh sceneMesh in scene.Meshes) { // See comment about meshNames at the beginning of the bone section. MeshBuilder mb = MeshBuilder.StartMesh(meshNames[sceneMesh]); mb.SwapWindingOrder = true; // Appears to require this... int positionIndex = -1; for (int v = 0; v < sceneMesh.VertexCount; v++) { Vector3D vert = sceneMesh.Vertices[v]; // CreatePosition should just return a 0-based index of the newly added vertex. positionIndex = mb.CreatePosition(new Vector3(vert.X, vert.Y, vert.Z)); if (positionIndex != v) { throw new InvalidContentException("Something unexpected happened while building a MeshContent from the Assimp scene mesh's vertices. The scene mesh may contains duplicate vertices."); } } if (positionIndex + 1 < 3) { throw new InvalidContentException("There were not enough vertices in the Assimp scene mesh."); } // Create vertex channels int normalVertexChannelIndex = mb.CreateVertexChannel <Vector3>(VertexChannelNames.Normal()); int[] texCoordVertexChannelIndex = new int[sceneMesh.TextureCoordsChannelCount]; for (int x = 0; x < sceneMesh.TextureCoordsChannelCount; x++) { texCoordVertexChannelIndex[x] = mb.CreateVertexChannel <Vector2>(VertexChannelNames.TextureCoordinate(x)); } int boneWeightVertexChannelIndex = -1; if (sceneMesh.HasBones) { boneWeightVertexChannelIndex = mb.CreateVertexChannel <BoneWeightCollection>(VertexChannelNames.Weights()); } // Prepare vertex channel data BoneWeightCollection[] boneWeightData = null; if (sceneMesh.HasBones) { boneWeightData = new BoneWeightCollection[sceneMesh.VertexCount]; for (int v = 0; v < sceneMesh.VertexCount; v++) { boneWeightData[v] = new BoneWeightCollection(); } foreach (Bone sceneMeshBone in sceneMesh.Bones) { // We have to assume that the bone's name matches up with a node, and therefore one of our BoneContents. foreach (VertexWeight sceneMeshBoneWeight in sceneMeshBone.VertexWeights) { boneWeightData[sceneMeshBoneWeight.VertexID].Add(new BoneWeight(sceneMeshBone.Name, sceneMeshBoneWeight.Weight)); } } for (int v = 0; v < sceneMesh.VertexCount; v++) { if (boneWeightData[v].Count <= 0) { throw new InvalidContentException("Encountered vertices without bone weights."); } boneWeightData[v].NormalizeWeights(); } } // Set the per-geometry data mb.SetMaterial(materials[sceneMesh.MaterialIndex]); mb.SetOpaqueData(new OpaqueDataDictionary()); // Add each vertex for (int f = 0; f < sceneMesh.FaceCount; f++) { if (sceneMesh.Faces[f].IndexCount != 3) { throw new InvalidContentException("Only triangular faces allowed."); } for (int t = 0; t < 3; t++) { mb.SetVertexChannelData(normalVertexChannelIndex, ToXna(sceneMesh.Normals[sceneMesh.Faces[f].Indices[t]])); for (int x = 0; x < sceneMesh.TextureCoordsChannelCount; x++) { mb.SetVertexChannelData(texCoordVertexChannelIndex[x], ToXnaVector2((sceneMesh.GetTextureCoords(x))[sceneMesh.Faces[f].Indices[t]])); } if (sceneMesh.HasBones) { mb.SetVertexChannelData(boneWeightVertexChannelIndex, boneWeightData[sceneMesh.Faces[f].Indices[t]]); } mb.AddTriangleVertex((int)(sceneMesh.Faces[f].Indices[t])); } } MeshContent mesh = mb.FinishMesh(); rootNode.Children.Add(mesh); meshes.Add(sceneMesh, mesh); } return(rootNode); }