public static string GetXNAName(VertexAttribute attr) { switch (attr.usage) { case COLOR: return(VertexChannelNames.Color(0)); case NORMAL: return(VertexChannelNames.Normal()); case TEX_COORD: return(VertexChannelNames.TextureCoordinate(attr.attrIndex)); case BONE_WEIGHT: return(VertexChannelNames.Weights(attr.attrIndex)); case TANGENT: return(VertexChannelNames.Tangent(0)); case BINORMAL: return(VertexChannelNames.Binormal(0)); } return(null); }
// Generates the tangent frames for all meshes. Note: for the normal generation, the // texture channel is needed. Unfortunatly the arena model doesn't seem to be consistent // in the used texture channels. We therefore have to find the correct channel by analysing // all the used channel indices for all geometry batches in each mesh. private void GenerateTangents(NodeContent input, ContentProcessorContext context) { MeshContent mesh = input as MeshContent; int channel = -1; // find the index of the texture channel (sometimes 0, sometimes 1) if (mesh != null) { // loop through all geometry batches foreach (GeometryContent geometryBatch in mesh.Geometry) { // check the index of the texture channel foreach (VertexChannel vertexChannel in geometryBatch.Vertices.Channels) { // is this a texture channel if (vertexChannel.Name.Contains("Texture")) { // extract index (last letter, convert it to int) char c = vertexChannel.Name[vertexChannel.Name.Length - 1]; int curChannel = (int)(c - '0'); if (channel == -1) { // first time we see a texture channel for this mesh: store index channel = curChannel; } else if (channel != curChannel) { // we have already seen a texture channel for this mesh, but with a // different index => signal error channel = -2; } } } } // have we found a valid texture channel? if (channel >= 0) { // compute tangent frames MeshHelper.CalculateTangentFrames(mesh, VertexChannelNames.TextureCoordinate(channel), VertexChannelNames.Tangent(0), VertexChannelNames.Binormal(0)); } } // recurse to all children foreach (NodeContent child in input.Children) { GenerateTangents(child, context); } }
private void AddTangentFrames(MeshContent mesh, ModelDescription modelDescription, MeshDescription meshDescription) { string textureCoordinateChannelName = VertexChannelNames.TextureCoordinate(0); string tangentChannelName = VertexChannelNames.Tangent(0); string binormalChannelName = VertexChannelNames.Binormal(0); bool normalsCalculated = false; for (int i = 0; i < mesh.Geometry.Count; i++) { var geometry = mesh.Geometry[i]; // Check whether tangent frames are required. var submeshDescription = (meshDescription != null) ? meshDescription.GetSubmeshDescription(i) : null; if (submeshDescription != null && submeshDescription.GenerateTangentFrames || meshDescription != null && meshDescription.GenerateTangentFrames || modelDescription != null && modelDescription.GenerateTangentFrames) { // Ensure that normals are set. if (!normalsCalculated) { CalculateNormals(mesh, false); normalsCalculated = true; } var channels = geometry.Vertices.Channels; bool tangentsMissing = !channels.Contains(tangentChannelName); bool binormalsMissing = !channels.Contains(binormalChannelName); if (tangentsMissing || binormalsMissing) { // Texture coordinates are required for calculating tangent frames. if (!channels.Contains(textureCoordinateChannelName)) { _context.Logger.LogWarning( null, mesh.Identity, "Texture coordinates missing in mesh '{0}', submesh {1}. Texture coordinates are required " + "for calculating tangent frames.", mesh.Name, i); channels.Add <Vector2>(textureCoordinateChannelName, null); } CalculateTangentFrames( geometry, textureCoordinateChannelName, tangentsMissing ? tangentChannelName : null, binormalsMissing ? binormalChannelName : null); } } } }
/// <summary> /// Recursively processes a node from the input data tree. /// </summary> private void ProcessNode(NodeContent node) { // Is this node in fact a mesh? MeshContent mesh = node as MeshContent; if (mesh != null) { MeshHelper.OptimizeForCache(mesh); // create texture coordinates of 0 if none are present var texCoord0 = VertexChannelNames.TextureCoordinate(0); foreach (var item in mesh.Geometry) { if (!item.Vertices.Channels.Contains(texCoord0)) { item.Vertices.Channels.Add <Vector2>(texCoord0, null); } } // calculate tangent frames for normal mapping var hasTangents = GeometryContainsChannel(mesh.Geometry, VertexChannelNames.Tangent(0)); var hasBinormals = GeometryContainsChannel(mesh.Geometry, VertexChannelNames.Binormal(0)); if (!hasTangents || !hasBinormals) { var tangentName = hasTangents ? null : VertexChannelNames.Tangent(0); var binormalName = hasBinormals ? null : VertexChannelNames.Binormal(0); MeshHelper.CalculateTangentFrames(mesh, VertexChannelNames.TextureCoordinate(0), tangentName, binormalName); } //var outputMesh = new MyreMeshContent(); //outputMesh.Parent = mesh.Parent; //outputMesh.BoundingSphere = BoundingSphere.CreateFromPoints(mesh.Positions); // Process all the geometry in the mesh. foreach (GeometryContent geometry in mesh.Geometry) { ProcessGeometry(geometry, outputModel); } //outputModel.AddMesh(outputMesh); } // Recurse over any child nodes. foreach (NodeContent child in node.Children) { ProcessNode(child); } }
/// <summary> /// Recursively adds calculated tangent frames to all meshes. /// </summary> void CalculateTangentFrames(NodeContent input, ContentProcessorContext context) { MeshContent inputMesh = input as MeshContent; if (inputMesh != null) { MeshHelper.CalculateTangentFrames(inputMesh, VertexChannelNames.TextureCoordinate(0), VertexChannelNames.Tangent(0), null); } foreach (NodeContent childNode in input.Children) { CalculateTangentFrames(childNode, context); } }
/// <summary> /// Recursively calls MeshHelper.CalculateTangentFrames for every MeshContent /// object in the NodeContent scene. This function could be changed to add /// more per vertex data as needed. /// </summary> /// <param name="input">A node in the scene. The function should be called /// with the root of the scene.</param> private void PreprocessSceneHierarchy(NodeContent input) { MeshContent mesh = input as MeshContent; if (mesh != null) { MeshHelper.CalculateTangentFrames(mesh, VertexChannelNames.TextureCoordinate(0), VertexChannelNames.Tangent(0), VertexChannelNames.Binormal(0)); LookUpShaderAndAddToTextures(mesh); } foreach (NodeContent child in input.Children) { PreprocessSceneHierarchy(child); } }
/// <summary> /// Generate tangents helper method, x files do not have tangents /// exported, we have to generate them ourselfs. /// </summary> /// <param name="input">Input data</param> /// <param name="context">Context for logging</param> private void GenerateTangents( NodeContent input, ContentProcessorContext context) { MeshContent mesh = input as MeshContent; if (mesh != null) { // Generate trangents for the mesh. We don't want binormals, // so null is passed in for the last parameter. MeshHelper.CalculateTangentFrames(mesh, VertexChannelNames.TextureCoordinate(0), VertexChannelNames.Tangent(0), null); } // Go through all childs foreach (NodeContent child in input.Children) { GenerateTangents(child, context); } }
public override ModelContent Process(NodeContent input, ContentProcessorContext context) { MeshContent mesh = input as MeshContent; if (mesh != null) { MeshHelper.CalculateTangentFrames(mesh, VertexChannelNames.TextureCoordinate(0), VertexChannelNames.Tangent(0), VertexChannelNames.Binormal(0)); } // Use base ModelProcessor class to do the actual model processing ModelContent model = base.Process(input, context); return(model); }
protected virtual void ProcessMesh(MeshContent mesh, ContentProcessorContext context) { if (bakeMeshTransform) { Matrix vertexTransform = mesh.AbsoluteTransform; // Transform the position of all the vertices for (int i = 0; i < mesh.Positions.Count; i++) { mesh.Positions[i] = Vector3.Transform(mesh.Positions[i], vertexTransform); } } if (generateTangentFrame) { MeshHelper.CalculateTangentFrames(mesh, VertexChannelNames.TextureCoordinate(0), VertexChannelNames.Tangent(0), VertexChannelNames.Binormal(0)); } }
private ModelMeshContent ProcessMesh(MeshContent mesh, ModelBoneContent parent, ContentProcessorContext context) { var parts = new List <ModelMeshPartContent>(); var vertexBuffer = new VertexBufferContent(); var indexBuffer = new IndexCollection(); if (GenerateTangentFrames) { context.Logger.LogMessage("Generating tangent frames."); foreach (GeometryContent geom in mesh.Geometry) { if (!geom.Vertices.Channels.Contains(VertexChannelNames.Normal(0))) { MeshHelper.CalculateNormals(geom, true); } if (!geom.Vertices.Channels.Contains(VertexChannelNames.Tangent(0)) || !geom.Vertices.Channels.Contains(VertexChannelNames.Binormal(0))) { MeshHelper.CalculateTangentFrames(geom, VertexChannelNames.TextureCoordinate(0), VertexChannelNames.Tangent(0), VertexChannelNames.Binormal(0)); } } } var startVertex = 0; foreach (var geometry in mesh.Geometry) { var vertices = geometry.Vertices; var vertexCount = vertices.VertexCount; ModelMeshPartContent partContent; if (vertexCount == 0) { partContent = new ModelMeshPartContent(); } else { var geomBuffer = geometry.Vertices.CreateVertexBuffer(); vertexBuffer.Write(vertexBuffer.VertexData.Length, 1, geomBuffer.VertexData); var startIndex = indexBuffer.Count; indexBuffer.AddRange(geometry.Indices); partContent = new ModelMeshPartContent(vertexBuffer, indexBuffer, startVertex, vertexCount, startIndex, geometry.Indices.Count / 3); // Geoms are supposed to all have the same decl, so just steal one of these vertexBuffer.VertexDeclaration = geomBuffer.VertexDeclaration; startVertex += vertexCount; } partContent.Material = geometry.Material; parts.Add(partContent); } var bounds = new BoundingSphere(); if (mesh.Positions.Count > 0) { bounds = BoundingSphere.CreateFromPoints(mesh.Positions); } return(new ModelMeshContent(mesh.Name, mesh, parent, bounds, parts)); }
} // Process #endregion #region Process Vertex Channel /// <summary> /// Processes geometry content vertex channels at the specified index. /// </summary> protected override void ProcessVertexChannel(GeometryContent geometry, int vertexChannelIndex, ContentProcessorContext context) { // Compressed Vertex Data VertexChannelCollection channels = geometry.Vertices.Channels; string name = channels[vertexChannelIndex].Name; if (name == VertexChannelNames.Normal()) { channels.ConvertChannelContent <NormalizedShort4>(vertexChannelIndex); } else if (name == VertexChannelNames.TextureCoordinate(0)) { // If the resource has texture coordinates outside the range [-1, 1] the values will be clamped. channels.ConvertChannelContent <HalfVector2>(vertexChannelIndex); } else if (name == VertexChannelNames.TextureCoordinate(1)) { channels.Remove(VertexChannelNames.TextureCoordinate(1)); } else if (name == VertexChannelNames.TextureCoordinate(2)) { channels.Remove(VertexChannelNames.TextureCoordinate(2)); } else if (name == VertexChannelNames.TextureCoordinate(3)) { channels.Remove(VertexChannelNames.TextureCoordinate(3)); } else if (name == VertexChannelNames.TextureCoordinate(4)) { channels.Remove(VertexChannelNames.TextureCoordinate(4)); } else if (name == VertexChannelNames.TextureCoordinate(5)) { channels.Remove(VertexChannelNames.TextureCoordinate(5)); } else if (name == VertexChannelNames.TextureCoordinate(6)) { channels.Remove(VertexChannelNames.TextureCoordinate(6)); } else if (name == VertexChannelNames.TextureCoordinate(7)) { channels.Remove(VertexChannelNames.TextureCoordinate(7)); } else if (name == VertexChannelNames.Color(0)) { channels.Remove(VertexChannelNames.Color(0)); } else if (name == VertexChannelNames.Tangent(0)) { channels.ConvertChannelContent <NormalizedShort4>(vertexChannelIndex); } else if (name == VertexChannelNames.Binormal(0)) { // Not need to get rid of the binormal data because the model will use more than 32 bytes per vertex. // We can actually try to align the data to 64 bytes per vertex. channels.ConvertChannelContent <NormalizedShort4>(vertexChannelIndex); } else { // Blend indices, blend weights and everything else. // Don't use "BlendWeight0" as a name, nor weights0. Both names don't work. base.ProcessVertexChannel(geometry, vertexChannelIndex, context); channels.ConvertChannelContent <Byte4>("BlendIndices0"); channels.ConvertChannelContent <NormalizedShort4>(VertexChannelNames.EncodeName(VertexElementUsage.BlendWeight, 0)); } } // ProcessVertexChannel
/// <summary> /// Generate tangents and binormals for the model data at the given node. Recursively generates for children. /// </summary> /// <param name="content">The node to process.</param> private void GenerateNTBData(NodeContent content) { MeshContent mesh = content as MeshContent; if (mesh != null) { MeshHelper.CalculateTangentFrames(mesh, VertexChannelNames.TextureCoordinate(0), VertexChannelNames.Tangent(0), VertexChannelNames.Binormal(0)); } foreach (NodeContent child in content.Children) { GenerateNTBData(child); } }
private MeshContent ExtractMesh(aiMesh aiMesh) { if (!String.IsNullOrEmpty(aiMesh.mName.Data)) { log("modelname " + aiMesh.mName.Data); meshBuilder = MeshBuilder.StartMesh(aiMesh.mName.Data); } else { meshBuilder = MeshBuilder.StartMesh(Path.GetFileNameWithoutExtension(filename)); } if (!aiMesh.HasPositions()) { throw new Exception("MOdel does not have Position"); } // Add additional vertex channels for texture coordinates and normals if (aiMesh.HasTextureCoords(0)) { textureCoordinateDataIndex = meshBuilder.CreateVertexChannel <Vector2>(VertexChannelNames.TextureCoordinate(0)); } else if (aiMesh.HasVertexColors(0)) { colorCoordinateDataIndex = meshBuilder.CreateVertexChannel <Vector4>(VertexChannelNames.Color(0)); } if (aiMesh.HasNormals()) { normalDataIndex = meshBuilder.CreateVertexChannel <Vector3>(VertexChannelNames.Normal()); } if (aiMesh.HasTangentsAndBitangents()) { tangentDataIndex = meshBuilder.CreateVertexChannel <Vector3>(VertexChannelNames.Tangent(0)); binormalDataIndex = meshBuilder.CreateVertexChannel <Vector3>(VertexChannelNames.Binormal(0)); } if (aiMesh.HasBones()) { boneDataIndex = meshBuilder.CreateVertexChannel <BoneWeightCollection>(VertexChannelNames.Weights(0)); } var numFaces = (int)aiMesh.mNumFaces; var numVertices = (int)aiMesh.mNumVertices; var aiPositions = aiMesh.mVertices; var aiNormals = aiMesh.mNormals; var aiTextureCoordsAll = aiMesh.mTextureCoords; var aiTextureCoords = (aiTextureCoordsAll != null) ? aiTextureCoordsAll[0] : null; for (int j = 0; j < aiMesh.mNumVertices; j++) { meshBuilder.CreatePosition(aiMesh.mVertices[j].x, aiMesh.mVertices[j].y, aiMesh.mVertices[j].z); } meshBuilder.SetMaterial(GetMaterial(aiMesh)); var aiFaces = aiMesh.mFaces; var dxIndices = new uint[numFaces * 3]; for (int k = 0; k < numFaces; ++k) { var aiFace = aiFaces[k]; var aiIndices = aiFace.mIndices; for (int j = 0; j < 3; ++j) { int index = (int)aiIndices[j]; if (aiMesh.HasTextureCoords(0)) { meshBuilder.SetVertexChannelData(textureCoordinateDataIndex, new Vector2(aiMesh.mTextureCoords[0][index].x, aiMesh.mTextureCoords[0][index].y)); } else if (aiMesh.HasVertexColors(0)) { meshBuilder.SetVertexChannelData(colorCoordinateDataIndex, new Vector4(aiMesh.mColors[0][index].r, aiMesh.mColors[0][index].g, aiMesh.mColors[0][index].b, aiMesh.mColors[0][index].a)); } if (aiMesh.HasNormals()) { meshBuilder.SetVertexChannelData(normalDataIndex, new Vector3(aiMesh.mNormals[index].x, aiMesh.mNormals[index].y, aiMesh.mNormals[index].z)); } if (aiMesh.HasTangentsAndBitangents()) { meshBuilder.SetVertexChannelData(tangentDataIndex, new Vector3(aiMesh.mTangents[index].x, aiMesh.mTangents[index].y, aiMesh.mTangents[index].z)); meshBuilder.SetVertexChannelData(binormalDataIndex, new Vector3(aiMesh.mBitangents[index].x, aiMesh.mBitangents[index].y, aiMesh.mBitangents[index].z)); } if (aiMesh.HasBones()) { BoneWeightCollection BoneWeightCollection = new BoneWeightCollection(); if (wbone.ContainsKey(index)) { foreach (var item in wbone[index]) { BoneWeightCollection.Add(new BoneWeight(item.Key, item.Value)); } } meshBuilder.SetVertexChannelData(boneDataIndex, BoneWeightCollection); } meshBuilder.AddTriangleVertex(index); } } MeshContent meshContent = meshBuilder.FinishMesh(); return(meshContent); }
/// <summary> /// Recursively calls MeshHelper.CalculateTangentFrames for every MeshContent /// object in the NodeContent scene. This function could be changed to add /// more per vertex data as needed. /// </summary> /// <param initialFileName="input">A node in the scene. The function should be called /// with the root of the scene.</param> private void PreprocessSceneHierarchy(NodeContent input, ContentProcessorContext context, string inputName) { MeshContent mesh = input as MeshContent; if (mesh != null) { MeshHelper.CalculateTangentFrames(mesh, VertexChannelNames.TextureCoordinate(0), VertexChannelNames.Tangent(0), VertexChannelNames.Binormal(0)); foreach (GeometryContent geometry in mesh.Geometry) { if (false == geometry.Material.Textures.ContainsKey(TextureMapKey)) { geometry.Material.Textures.Add(TextureMapKey, new ExternalReference <TextureContent>( "null_color.tga")); } else { context.Logger.LogImportantMessage("has: " + geometry.Material.Textures[TextureMapKey].Filename); string fileName = Path.GetFileName(geometry.Material.Textures[TextureMapKey].Filename); if (fileName != null && fileName.StartsWith("ship") && fileName.EndsWith("_c.tga")) { InsertMissedMapTextures(geometry.Material.Textures, fileName.Substring(0, fileName.Length - "_c.tga".Length), context); } } if (false == geometry.Material.Textures.ContainsKey(NormalMapKey)) { geometry.Material.Textures.Add(NormalMapKey, new ExternalReference <TextureContent>( "null_normal.tga")); } else { context.Logger.LogImportantMessage("has: " + geometry.Material.Textures[NormalMapKey].Filename); } if (false == geometry.Material.Textures.ContainsKey(SpecularMapKey)) { geometry.Material.Textures.Add(SpecularMapKey, new ExternalReference <TextureContent>( "null_specular.tga")); } else { context.Logger.LogImportantMessage("has: " + geometry.Material.Textures[SpecularMapKey].Filename); } if (false == geometry.Material.Textures.ContainsKey(GlowMapKey)) { geometry.Material.Textures.Add(GlowMapKey, new ExternalReference <TextureContent>( "null_glow.tga")); } else { context.Logger.LogImportantMessage("has: " + geometry.Material.Textures[GlowMapKey].Filename); } } } foreach (NodeContent child in input.Children) { PreprocessSceneHierarchy(child, context, inputName); } }
} // Process #endregion #region Process Vertex Channel /// <summary> /// Processes geometry content vertex channels at the specified index. /// </summary> protected override void ProcessVertexChannel(GeometryContent geometry, int vertexChannelIndex, ContentProcessorContext context) { VertexChannelCollection channels = geometry.Vertices.Channels; // If the model has only position and normals a UV channel is added. // http://xnafinalengine.codeplex.com/wikipage?title=Compressed%20Vertex%20Data if (channels.Count == 1 && channels.Contains(VertexChannelNames.Normal())) { channels.Add<Vector2>(VertexChannelNames.TextureCoordinate(0), null); } // If the model has position, normal and UV then the data is packed on 32 bytes aliagned vertex data. if (channels.Count == 2 && channels.Contains(VertexChannelNames.Normal()) && channels.Contains(VertexChannelNames.TextureCoordinate(0))) { // No compressed Vertex Data base.ProcessVertexChannel(geometry, vertexChannelIndex, context); } else // If not then the data is compressed. { string name = channels[vertexChannelIndex].Name; if (name == VertexChannelNames.Normal()) { channels.ConvertChannelContent<NormalizedShort4>(vertexChannelIndex); } else if (name == VertexChannelNames.TextureCoordinate(0)) { // Clamp values. /*for (int i = 0; i < channels[vertexChannelIndex].Count; i++) { Vector2 uv = (Vector2)channels[vertexChannelIndex][i]; if (uv.X < 0) uv.X *= -1; if (uv.Y < 0) uv.Y *= -1; Vector2 uvCampled = new Vector2(uv.X - (float)Math.Truncate(uv.X), uv.Y - (float)Math.Truncate(uv.Y)); channels[vertexChannelIndex][i] = uvCampled; } // If the resource has texture coordinates outside the range [-1, 1] the values will be clamped. channels.ConvertChannelContent<NormalizedShort2>(vertexChannelIndex);*/ // Sometimes you can't just clamp values, because the distance between vertices surpass 1 uv unit. // And given that I am not removing the binormals I won't normalize the UVs. channels.ConvertChannelContent<HalfVector2>(vertexChannelIndex); } else if (name == VertexChannelNames.TextureCoordinate(1)) channels.Remove(VertexChannelNames.TextureCoordinate(1)); else if (name == VertexChannelNames.TextureCoordinate(2)) channels.Remove(VertexChannelNames.TextureCoordinate(2)); else if (name == VertexChannelNames.TextureCoordinate(3)) channels.Remove(VertexChannelNames.TextureCoordinate(3)); else if (name == VertexChannelNames.TextureCoordinate(4)) channels.Remove(VertexChannelNames.TextureCoordinate(4)); else if (name == VertexChannelNames.TextureCoordinate(5)) channels.Remove(VertexChannelNames.TextureCoordinate(5)); else if (name == VertexChannelNames.TextureCoordinate(6)) channels.Remove(VertexChannelNames.TextureCoordinate(6)); else if (name == VertexChannelNames.TextureCoordinate(7)) channels.Remove(VertexChannelNames.TextureCoordinate(7)); else if (name == VertexChannelNames.Color(0)) channels.Remove(VertexChannelNames.Color(0)); else if (name == VertexChannelNames.Tangent(0)) { channels.ConvertChannelContent<NormalizedShort4>(vertexChannelIndex); } else if (name == VertexChannelNames.Binormal(0)) { channels.ConvertChannelContent<NormalizedShort4>(vertexChannelIndex); // If the binormal is removed then the position, the normal, // the tangent and one texture coordinate can be fetched in one single block of 32 bytes. // Still, it is more fast to just pass the value. At least on the test I made. //channels.Remove(VertexChannelNames.Binormal(0)); } else { base.ProcessVertexChannel(geometry, vertexChannelIndex, context); } } } // ProcessVertexChannel
MeshData ProcessMesh(MeshContent mesh, ContentProcessorContext context, string rootPath, Dictionary <string, object> processedContent, SkeletonData skeletonData, AnimationData[] animations, ref int geometryCount) { MeshHelper.TransformScene(mesh, mesh.AbsoluteTransform); string[] normalMapNames = new string[] { "Bump0", "Bump", "NormalMap", "Normalmap", "Normals", "BumpMap" }; MeshHelper.OptimizeForCache(mesh); foreach (GeometryContent geom in mesh.Geometry) { if (geom.Material != null) { string map = MaterialTexture(geom.Material, rootPath, null, null, normalMapNames); if (map != null && map.Length > 0) { generateTangents = true; } } } if (generateTangents) { MeshHelper.CalculateNormals(mesh, false); bool hasNoTangent = !GeometryContainsChannel(mesh, VertexChannelNames.Tangent(0)); bool hasNoBinorm = !GeometryContainsChannel(mesh, VertexChannelNames.Binormal(0)); if (hasNoTangent || hasNoBinorm) { string tangentChannelName = hasNoTangent ? VertexChannelNames.Tangent(0) : null; string binormalChannelName = hasNoBinorm ? VertexChannelNames.Binormal(0) : null; MeshHelper.CalculateTangentFrames(mesh, VertexChannelNames.TextureCoordinate(0), tangentChannelName, binormalChannelName); } } if (swapWinding) { MeshHelper.SwapWindingOrder(mesh); } List <GeometryData> geometry = new List <GeometryData>(); BoneContent skeleton = MeshHelper.FindSkeleton(mesh); Dictionary <string, int> boneIndices = null; if (skeleton != null) { boneIndices = FlattenSkeleton(skeleton); } foreach (GeometryContent geom in mesh.Geometry) { this.ProcessVertexChannels(geom, context, rootPath, boneIndices, null); MeshHelper.MergeDuplicateVertices(geom); MaterialData material = new MaterialData( MaterialValue <float>("Alpha", geom.Material, 1), MaterialValue <float>("SpecularPower", geom.Material, 24), MaterialValue <Vector3>("DiffuseColor", geom.Material, Vector3.One), MaterialValue <Vector3>("EmissiveColor", geom.Material, Vector3.Zero), MaterialValue <Vector3>("SpecularColor", geom.Material, Vector3.Zero), MaterialTexture(geom.Material, rootPath, context, processedContent, "Texture"), MaterialTexture(geom.Material, rootPath, context, processedContent, normalMapNames), MaterialValue <bool>("VertexColorEnabled", geom.Material, true) && geom.Vertices.Channels.Contains(VertexChannelNames.Color(0))); VertexBufferContent vb; VertexElement[] ve; geom.Vertices.CreateVertexBuffer(out vb, out ve, context.TargetPlatform); int[] indices = new int[geom.Indices.Count]; geom.Indices.CopyTo(indices, 0); geometry.Add(new GeometryData(geometryCount++, geom.Name, ve, vb.VertexData, indices, material, skeletonData, animations, context.TargetPlatform == TargetPlatform.Xbox360)); } return(new MeshData(mesh.Name, geometry.ToArray(), animations)); }
public override ModelContent Process(NodeContent input, ContentProcessorContext context) { ValidateMesh(input, context, null); //Generate Tangents/Normals for shader MeshContent mesh = input as MeshContent; if (mesh != null) { MeshHelper.CalculateTangentFrames(mesh, VertexChannelNames.TextureCoordinate(0), VertexChannelNames.Tangent(0), VertexChannelNames.Binormal(0)); } // Find the skeleton. BoneContent skeleton = MeshHelper.FindSkeleton(input); if (skeleton == null) { throw new InvalidContentException("Input skeleton not found."); } // We don't want to have to worry about different parts of the model being // in different local coordinate systems, so let's just bake everything. FlattenTransforms(input, skeleton); // Read the bind pose and skeleton hierarchy data. IList <BoneContent> bones = MeshHelper.FlattenSkeleton(skeleton); if (bones.Count > SkinnedEffect.MaxBones) { throw new InvalidContentException(string.Format( "Skeleton has {0} bones, but the maximum supported is {1}.", bones.Count, SkinnedEffect.MaxBones)); } List <Matrix> bindPose = new List <Matrix>(); List <Matrix> inverseBindPose = new List <Matrix>(); List <int> skeletonHierarchy = new List <int>(); foreach (BoneContent bone in bones) { bindPose.Add(bone.Transform); inverseBindPose.Add(Matrix.Invert(bone.AbsoluteTransform)); skeletonHierarchy.Add(bones.IndexOf(bone.Parent as BoneContent)); } // Chain to the base BShiftModelProcessor class so it can convert the model data. ModelContent model = base.Process(input, context); // Convert animation data to our runtime format. Dictionary <string, AnimationClip> animationClips; animationClips = ProcessAnimations(skeleton.Animations, bones); ((Dictionary <string, object>)model.Tag).Add("SkinningData", new SkinningData(animationClips, bindPose, inverseBindPose, skeletonHierarchy)); return(model); }
public override NodeContent Import(string filename, ContentImporterContext context) { context.Logger.LogMessage("Importing H3D file: {0}", filename); _identity = new ContentIdentity(filename, GetType().Name); _rootNode = new NodeContent() { Identity = _identity, Name = "RootNode" }; var scene = FormatIdentifier.IdentifyAndOpen(filename); var model = scene.Models[0]; if (!scene.Textures.Any()) { var path = Path.Combine(Path.GetDirectoryName(filename), $"{Path.GetFileNameWithoutExtension(filename)}@Textures{Path.GetExtension(filename)}"); if (File.Exists(path)) { context.Logger.LogMessage($"Found texture file {path}. Loading data..."); scene.Merge(FormatIdentifier.IdentifyAndOpen(path, model.Skeleton)); } else { context.Logger.LogMessage($"Couldn't find texture file {path}!"); } } // Textures var textures = new Dictionary <string, Texture2DContent>(); foreach (var texture in scene.Textures) { var bitmapContent = new PixelBitmapContent <Color>(texture.Width, texture.Height) { Identity = _identity, Name = texture.Name }; bitmapContent.SetPixelData(texture.ToRGBA()); var textureContent = new Texture2DContent() { Identity = _identity, Name = texture.Name }; textureContent.Faces[0].Add(bitmapContent); textures.Add(textureContent.Name, textureContent); } // Materials var materials = new Dictionary <string, H3DMaterialContent>(); foreach (var material in model.Materials) { #if DEBUG var hlslCode = new HLSLShaderGenerator(material.MaterialParams) { BoneCount = model.Skeleton.Count }.GetShader(); var glslCode = new GLSLFragmentShaderGenerator(material.MaterialParams).GetFragShader(); #endif var materialContent = new H3DMaterialContent() { Identity = _identity, Name = material.Name, Effect = new EffectContent { Identity = _identity, Name = "H3DEffect", EffectCode = new HLSLShaderGenerator(material.MaterialParams) { BoneCount = model.Skeleton.Count }.GetShader() }, Material = material.Name, FaceCulling = (H3DFaceCulling?)material.MaterialParams.FaceCulling, EmissionColor = material.MaterialParams.EmissionColor.ToXNA(), AmbientColor = material.MaterialParams.AmbientColor.ToXNA(), DiffuseColor = material.MaterialParams.DiffuseColor.ToXNA(), Specular0Color = material.MaterialParams.Specular0Color.ToXNA(), Specular1Color = material.MaterialParams.Specular1Color.ToXNA(), Constant0Color = material.MaterialParams.Constant0Color.ToXNA(), Constant1Color = material.MaterialParams.Constant1Color.ToXNA(), Constant2Color = material.MaterialParams.Constant2Color.ToXNA(), Constant3Color = material.MaterialParams.Constant3Color.ToXNA(), Constant4Color = material.MaterialParams.Constant4Color.ToXNA(), Constant5Color = material.MaterialParams.Constant5Color.ToXNA(), BlendColor = material.MaterialParams.BlendColor.ToXNA(), DepthBufferRead = material.MaterialParams.DepthBufferRead, DepthBufferWrite = material.MaterialParams.DepthBufferWrite, StencilBufferRead = material.MaterialParams.StencilBufferRead, StencilBufferWrite = material.MaterialParams.StencilBufferWrite, }; var texCount = 0; if (material.EnabledTextures[0]) { texCount++; } if (material.EnabledTextures[1]) { texCount++; } if (material.EnabledTextures[2]) { texCount++; } materialContent.TextureList = new Texture2DContent[texCount]; if (material.EnabledTextures[0]) { materialContent.TextureList[0] = textures[material.Texture0Name]; } if (material.EnabledTextures[1]) { materialContent.TextureList[1] = textures[material.Texture1Name]; } if (material.EnabledTextures[2]) { materialContent.TextureList[2] = textures[material.Texture2Name]; } materialContent.TextureSamplerSettings = material.TextureMappers.Select(tm => new TextureSamplerSettings() { WrapU = tm.WrapU.ToXNAWrap(), WrapV = tm.WrapV.ToXNAWrap(), MagFilter = (TextureSamplerSettings.TextureMagFilter)tm.MagFilter, MinFilter = (TextureSamplerSettings.TextureMinFilter)tm.MinFilter }).ToArray(); materials.Add(material.Name, materialContent); } // Geometry var meshes = new List <MeshContent>(); for (var i = 0; i < model.Meshes.Count; i++) { var modelMesh = model.Meshes[i]; if (modelMesh.Type == H3DMeshType.Silhouette) { continue; } var mesh = new MeshContent() { Identity = _identity, Name = $"{model.Materials[modelMesh.MaterialIndex].Name}_node{i}", }; var geometry = new GeometryContent { Identity = _identity, Material = materials[model.Materials[modelMesh.MaterialIndex].Name] }; var vertices = GetWorldSpaceVertices(model.Skeleton, modelMesh); var baseVertex = mesh.Positions.Count; foreach (var vertex in vertices) { mesh.Positions.Add(vertex.Position.ToVector3()); } geometry.Vertices.AddRange(Enumerable.Range(baseVertex, vertices.Length)); foreach (var attribute in modelMesh.Attributes) { if (attribute.Name >= PICAAttributeName.BoneIndex) { continue; } switch (attribute.Name) { case PICAAttributeName.Position: break; // Already added case PICAAttributeName.Normal: geometry.Vertices.Channels.Add(VertexChannelNames.Normal(0), vertices.Select(vertex => vertex.Normal.ToVector3())); break; case PICAAttributeName.Tangent: geometry.Vertices.Channels.Add(VertexChannelNames.Tangent(0), vertices.Select(vertex => vertex.Tangent.ToVector3())); break; case PICAAttributeName.Color: geometry.Vertices.Channels.Add(VertexChannelNames.Color(0), vertices.Select(vertex => vertex.Color.ToColor())); break; case PICAAttributeName.TexCoord0: geometry.Vertices.Channels.Add(VertexChannelNames.TextureCoordinate(0), vertices.Select(vertex => vertex.TexCoord0.ToVector2().ToUV())); break; case PICAAttributeName.TexCoord1: geometry.Vertices.Channels.Add(VertexChannelNames.TextureCoordinate(1), vertices.Select(vertex => vertex.TexCoord1.ToVector2().ToUV())); break; case PICAAttributeName.TexCoord2: geometry.Vertices.Channels.Add(VertexChannelNames.TextureCoordinate(2), vertices.Select(vertex => vertex.TexCoord2.ToVector2().ToUV())); break; } } var vertexOffset = 0; var xnaWeights = new List <BoneWeightCollection>(); foreach (var modelSubMesh in modelMesh.SubMeshes) { geometry.Indices.AddRange(modelSubMesh.Indices.Select(index => (int)index)); var vertexCount = modelSubMesh.MaxIndex + 1 - vertexOffset; var subMeshVertices = vertices.Skip(vertexOffset).Take(vertexCount).ToList(); if (modelSubMesh.Skinning == H3DSubMeshSkinning.Smooth) { foreach (var vertex in subMeshVertices) { var list = new BoneWeightCollection(); for (var index = 0; index < 4; index++) { var bIndex = vertex.Indices[index]; var weight = vertex.Weights[index]; if (weight == 0) { break; } if (bIndex < modelSubMesh.BoneIndicesCount && bIndex > -1) { bIndex = modelSubMesh.BoneIndices[bIndex]; } else { bIndex = 0; } list.Add(new BoneWeight(model.Skeleton[bIndex].Name, weight)); } xnaWeights.Add(list); } } else { foreach (var vertex in vertices) { var bIndex = vertex.Indices[0]; if (bIndex < modelSubMesh.BoneIndices.Length && bIndex > -1) { bIndex = modelSubMesh.BoneIndices[bIndex]; } else { bIndex = 0; } xnaWeights.Add(new BoneWeightCollection() { new BoneWeight(model.Skeleton[bIndex].Name, 0) }); } } vertexOffset += vertexCount; } geometry.Vertices.Channels.Add(VertexChannelNames.Weights(0), xnaWeights); mesh.Geometry.Add(geometry); meshes.Add(mesh); } foreach (var mesh in meshes) { _rootNode.Children.Add(mesh); } var rootBone = ImportBones(model); _rootNode.Children.Add(rootBone); if (!scene.SkeletalAnimations.Any()) { var path = Path.Combine(Path.GetDirectoryName(filename), $"{Path.GetFileNameWithoutExtension(filename)}@Animations{Path.GetExtension(filename)}"); if (File.Exists(path)) { context.Logger.LogMessage($"Found animation file {path}. Loading data..."); scene.Merge(FormatIdentifier.IdentifyAndOpen(path, model.Skeleton)); } else { context.Logger.LogMessage($"Couldn't find animation file {path}!"); } } foreach (var animation in ImportSkeletalAnimations(scene)) { rootBone.Animations.Add(animation.Name, animation); } foreach (var animation in ImportMaterialAnimations(scene)) { _rootNode.Children.Add(animation); } return(_rootNode); }