Пример #1
0
        public static string GetXNAName(VertexAttribute attr)
        {
            switch (attr.usage)
            {
            case COLOR:
                return(VertexChannelNames.Color(0));

            case NORMAL:
                return(VertexChannelNames.Normal());

            case TEX_COORD:
                return(VertexChannelNames.TextureCoordinate(attr.attrIndex));

            case BONE_WEIGHT:
                return(VertexChannelNames.Weights(attr.attrIndex));

            case TANGENT:
                return(VertexChannelNames.Tangent(0));

            case BINORMAL:
                return(VertexChannelNames.Binormal(0));
            }

            return(null);
        }
        // Generates the tangent frames for all meshes. Note: for the normal generation, the
        // texture channel is needed. Unfortunatly the arena model doesn't seem to be consistent
        // in the used texture channels. We therefore have to find the correct channel by analysing
        // all the used channel indices for all geometry batches in each mesh.
        private void GenerateTangents(NodeContent input, ContentProcessorContext context)
        {
            MeshContent mesh    = input as MeshContent;
            int         channel = -1;

            // find the index of the texture channel (sometimes 0, sometimes 1)
            if (mesh != null)
            {
                // loop through all geometry batches
                foreach (GeometryContent geometryBatch in mesh.Geometry)
                {
                    // check the index of the texture channel
                    foreach (VertexChannel vertexChannel in geometryBatch.Vertices.Channels)
                    {
                        // is this a texture channel
                        if (vertexChannel.Name.Contains("Texture"))
                        {
                            // extract index (last letter, convert it to int)
                            char c          = vertexChannel.Name[vertexChannel.Name.Length - 1];
                            int  curChannel = (int)(c - '0');


                            if (channel == -1)
                            {
                                // first time we see a texture channel for this mesh: store index
                                channel = curChannel;
                            }
                            else if (channel != curChannel)
                            {
                                // we have already seen a texture channel for this mesh, but with a
                                // different index => signal error
                                channel = -2;
                            }
                        }
                    }
                }

                // have we found a valid texture channel?
                if (channel >= 0)
                {
                    // compute tangent frames
                    MeshHelper.CalculateTangentFrames(mesh,
                                                      VertexChannelNames.TextureCoordinate(channel),
                                                      VertexChannelNames.Tangent(0),
                                                      VertexChannelNames.Binormal(0));
                }
            }

            // recurse to all children
            foreach (NodeContent child in input.Children)
            {
                GenerateTangents(child, context);
            }
        }
Пример #3
0
        private void AddTangentFrames(MeshContent mesh, ModelDescription modelDescription, MeshDescription meshDescription)
        {
            string textureCoordinateChannelName = VertexChannelNames.TextureCoordinate(0);
            string tangentChannelName           = VertexChannelNames.Tangent(0);
            string binormalChannelName          = VertexChannelNames.Binormal(0);

            bool normalsCalculated = false;

            for (int i = 0; i < mesh.Geometry.Count; i++)
            {
                var geometry = mesh.Geometry[i];

                // Check whether tangent frames are required.
                var submeshDescription = (meshDescription != null) ? meshDescription.GetSubmeshDescription(i) : null;
                if (submeshDescription != null && submeshDescription.GenerateTangentFrames ||
                    meshDescription != null && meshDescription.GenerateTangentFrames ||
                    modelDescription != null && modelDescription.GenerateTangentFrames)
                {
                    // Ensure that normals are set.
                    if (!normalsCalculated)
                    {
                        CalculateNormals(mesh, false);
                        normalsCalculated = true;
                    }

                    var  channels         = geometry.Vertices.Channels;
                    bool tangentsMissing  = !channels.Contains(tangentChannelName);
                    bool binormalsMissing = !channels.Contains(binormalChannelName);
                    if (tangentsMissing || binormalsMissing)
                    {
                        // Texture coordinates are required for calculating tangent frames.
                        if (!channels.Contains(textureCoordinateChannelName))
                        {
                            _context.Logger.LogWarning(
                                null, mesh.Identity,
                                "Texture coordinates missing in mesh '{0}', submesh {1}. Texture coordinates are required " +
                                "for calculating tangent frames.",
                                mesh.Name, i);

                            channels.Add <Vector2>(textureCoordinateChannelName, null);
                        }

                        CalculateTangentFrames(
                            geometry,
                            textureCoordinateChannelName,
                            tangentsMissing ? tangentChannelName : null,
                            binormalsMissing ? binormalChannelName : null);
                    }
                }
            }
        }
Пример #4
0
        /// <summary>
        /// Recursively processes a node from the input data tree.
        /// </summary>
        private void ProcessNode(NodeContent node)
        {
            // Is this node in fact a mesh?
            MeshContent mesh = node as MeshContent;

            if (mesh != null)
            {
                MeshHelper.OptimizeForCache(mesh);

                // create texture coordinates of 0 if none are present
                var texCoord0 = VertexChannelNames.TextureCoordinate(0);
                foreach (var item in mesh.Geometry)
                {
                    if (!item.Vertices.Channels.Contains(texCoord0))
                    {
                        item.Vertices.Channels.Add <Vector2>(texCoord0, null);
                    }
                }

                // calculate tangent frames for normal mapping
                var hasTangents  = GeometryContainsChannel(mesh.Geometry, VertexChannelNames.Tangent(0));
                var hasBinormals = GeometryContainsChannel(mesh.Geometry, VertexChannelNames.Binormal(0));
                if (!hasTangents || !hasBinormals)
                {
                    var tangentName  = hasTangents ? null : VertexChannelNames.Tangent(0);
                    var binormalName = hasBinormals ? null : VertexChannelNames.Binormal(0);
                    MeshHelper.CalculateTangentFrames(mesh, VertexChannelNames.TextureCoordinate(0), tangentName, binormalName);
                }

                //var outputMesh = new MyreMeshContent();
                //outputMesh.Parent = mesh.Parent;
                //outputMesh.BoundingSphere = BoundingSphere.CreateFromPoints(mesh.Positions);

                // Process all the geometry in the mesh.
                foreach (GeometryContent geometry in mesh.Geometry)
                {
                    ProcessGeometry(geometry, outputModel);
                }

                //outputModel.AddMesh(outputMesh);
            }

            // Recurse over any child nodes.
            foreach (NodeContent child in node.Children)
            {
                ProcessNode(child);
            }
        }
        /// <summary>
        /// Recursively calls MeshHelper.CalculateTangentFrames for every MeshContent
        /// object in the NodeContent scene. This function could be changed to add
        /// more per vertex data as needed.
        /// </summary>
        /// <param name="input">A node in the scene.  The function should be called
        /// with the root of the scene.</param>
        private void PreprocessSceneHierarchy(NodeContent input)
        {
            MeshContent mesh = input as MeshContent;

            if (mesh != null)
            {
                MeshHelper.CalculateTangentFrames(mesh,
                                                  VertexChannelNames.TextureCoordinate(0),
                                                  VertexChannelNames.Tangent(0),
                                                  VertexChannelNames.Binormal(0));

                LookUpShaderAndAddToTextures(mesh);
            }

            foreach (NodeContent child in input.Children)
            {
                PreprocessSceneHierarchy(child);
            }
        }
Пример #6
0
        public override ModelContent Process(NodeContent input,
                                             ContentProcessorContext context)
        {
            MeshContent mesh = input as MeshContent;

            if (mesh != null)
            {
                MeshHelper.CalculateTangentFrames(mesh,
                                                  VertexChannelNames.TextureCoordinate(0),
                                                  VertexChannelNames.Tangent(0),
                                                  VertexChannelNames.Binormal(0));
            }



            // Use base ModelProcessor class to do the actual model processing
            ModelContent model = base.Process(input, context);

            return(model);
        }
Пример #7
0
        protected virtual void ProcessMesh(MeshContent mesh, ContentProcessorContext context)
        {
            if (bakeMeshTransform)
            {
                Matrix vertexTransform = mesh.AbsoluteTransform;

                // Transform the position of all the vertices
                for (int i = 0; i < mesh.Positions.Count; i++)
                {
                    mesh.Positions[i] = Vector3.Transform(mesh.Positions[i], vertexTransform);
                }
            }

            if (generateTangentFrame)
            {
                MeshHelper.CalculateTangentFrames(mesh,
                                                  VertexChannelNames.TextureCoordinate(0),
                                                  VertexChannelNames.Tangent(0),
                                                  VertexChannelNames.Binormal(0));
            }
        }
Пример #8
0
        public override ModelContent Process(NodeContent input,
                                             ContentProcessorContext context)
        {
            ValidateMesh(input, context, null);

            //Generate Tangents/Normals for shader
            MeshContent mesh = input as MeshContent;

            if (mesh != null)
            {
                MeshHelper.CalculateTangentFrames(mesh,
                                                  VertexChannelNames.TextureCoordinate(0),
                                                  VertexChannelNames.Tangent(0),
                                                  VertexChannelNames.Binormal(0));
            }

            // Find the skeleton.
            BoneContent skeleton = MeshHelper.FindSkeleton(input);

            if (skeleton == null)
            {
                throw new InvalidContentException("Input skeleton not found.");
            }

            // We don't want to have to worry about different parts of the model being
            // in different local coordinate systems, so let's just bake everything.
            FlattenTransforms(input, skeleton);

            // Read the bind pose and skeleton hierarchy data.
            IList <BoneContent> bones = MeshHelper.FlattenSkeleton(skeleton);

            if (bones.Count > SkinnedEffect.MaxBones)
            {
                throw new InvalidContentException(string.Format(
                                                      "Skeleton has {0} bones, but the maximum supported is {1}.",
                                                      bones.Count, SkinnedEffect.MaxBones));
            }

            List <Matrix> bindPose          = new List <Matrix>();
            List <Matrix> inverseBindPose   = new List <Matrix>();
            List <int>    skeletonHierarchy = new List <int>();

            foreach (BoneContent bone in bones)
            {
                bindPose.Add(bone.Transform);
                inverseBindPose.Add(Matrix.Invert(bone.AbsoluteTransform));
                skeletonHierarchy.Add(bones.IndexOf(bone.Parent as BoneContent));
            }


            // Chain to the base BShiftModelProcessor class so it can convert the model data.
            ModelContent model = base.Process(input, context);

            // Convert animation data to our runtime format.
            Dictionary <string, AnimationClip> animationClips;

            animationClips = ProcessAnimations(skeleton.Animations, bones);

            ((Dictionary <string, object>)model.Tag).Add("SkinningData", new SkinningData(animationClips, bindPose,
                                                                                          inverseBindPose, skeletonHierarchy));

            return(model);
        }
Пример #9
0
        private ModelMeshContent ProcessMesh(MeshContent mesh, ModelBoneContent parent, ContentProcessorContext context)
        {
            var parts        = new List <ModelMeshPartContent>();
            var vertexBuffer = new VertexBufferContent();
            var indexBuffer  = new IndexCollection();

            if (GenerateTangentFrames)
            {
                context.Logger.LogMessage("Generating tangent frames.");
                foreach (GeometryContent geom in mesh.Geometry)
                {
                    if (!geom.Vertices.Channels.Contains(VertexChannelNames.Normal(0)))
                    {
                        MeshHelper.CalculateNormals(geom, true);
                    }

                    if (!geom.Vertices.Channels.Contains(VertexChannelNames.Tangent(0)) ||
                        !geom.Vertices.Channels.Contains(VertexChannelNames.Binormal(0)))
                    {
                        MeshHelper.CalculateTangentFrames(geom, VertexChannelNames.TextureCoordinate(0), VertexChannelNames.Tangent(0),
                                                          VertexChannelNames.Binormal(0));
                    }
                }
            }

            var startVertex = 0;

            foreach (var geometry in mesh.Geometry)
            {
                var vertices    = geometry.Vertices;
                var vertexCount = vertices.VertexCount;
                ModelMeshPartContent partContent;
                if (vertexCount == 0)
                {
                    partContent = new ModelMeshPartContent();
                }
                else
                {
                    var geomBuffer = geometry.Vertices.CreateVertexBuffer();
                    vertexBuffer.Write(vertexBuffer.VertexData.Length, 1, geomBuffer.VertexData);

                    var startIndex = indexBuffer.Count;
                    indexBuffer.AddRange(geometry.Indices);

                    partContent = new ModelMeshPartContent(vertexBuffer, indexBuffer, startVertex, vertexCount, startIndex, geometry.Indices.Count / 3);

                    // Geoms are supposed to all have the same decl, so just steal one of these
                    vertexBuffer.VertexDeclaration = geomBuffer.VertexDeclaration;

                    startVertex += vertexCount;
                }

                partContent.Material = geometry.Material;
                parts.Add(partContent);
            }

            var bounds = new BoundingSphere();

            if (mesh.Positions.Count > 0)
            {
                bounds = BoundingSphere.CreateFromPoints(mesh.Positions);
            }

            return(new ModelMeshContent(mesh.Name, mesh, parent, bounds, parts));
        }
Пример #10
0
        } // Process

        #endregion

        #region Process Vertex Channel

        /// <summary>
        /// Processes geometry content vertex channels at the specified index.
        /// </summary>
        protected override void ProcessVertexChannel(GeometryContent geometry, int vertexChannelIndex, ContentProcessorContext context)
        {
            // Compressed Vertex Data
            VertexChannelCollection channels = geometry.Vertices.Channels;
            string name = channels[vertexChannelIndex].Name;

            if (name == VertexChannelNames.Normal())
            {
                channels.ConvertChannelContent <NormalizedShort4>(vertexChannelIndex);
            }
            else if (name == VertexChannelNames.TextureCoordinate(0))
            {
                // If the resource has texture coordinates outside the range [-1, 1] the values will be clamped.
                channels.ConvertChannelContent <HalfVector2>(vertexChannelIndex);
            }
            else if (name == VertexChannelNames.TextureCoordinate(1))
            {
                channels.Remove(VertexChannelNames.TextureCoordinate(1));
            }
            else if (name == VertexChannelNames.TextureCoordinate(2))
            {
                channels.Remove(VertexChannelNames.TextureCoordinate(2));
            }
            else if (name == VertexChannelNames.TextureCoordinate(3))
            {
                channels.Remove(VertexChannelNames.TextureCoordinate(3));
            }
            else if (name == VertexChannelNames.TextureCoordinate(4))
            {
                channels.Remove(VertexChannelNames.TextureCoordinate(4));
            }
            else if (name == VertexChannelNames.TextureCoordinate(5))
            {
                channels.Remove(VertexChannelNames.TextureCoordinate(5));
            }
            else if (name == VertexChannelNames.TextureCoordinate(6))
            {
                channels.Remove(VertexChannelNames.TextureCoordinate(6));
            }
            else if (name == VertexChannelNames.TextureCoordinate(7))
            {
                channels.Remove(VertexChannelNames.TextureCoordinate(7));
            }
            else if (name == VertexChannelNames.Color(0))
            {
                channels.Remove(VertexChannelNames.Color(0));
            }
            else if (name == VertexChannelNames.Tangent(0))
            {
                channels.ConvertChannelContent <NormalizedShort4>(vertexChannelIndex);
            }
            else if (name == VertexChannelNames.Binormal(0))
            {
                // Not need to get rid of the binormal data because the model will use more than 32 bytes per vertex.
                // We can actually try to align the data to 64 bytes per vertex.
                channels.ConvertChannelContent <NormalizedShort4>(vertexChannelIndex);
            }
            else
            {
                // Blend indices, blend weights and everything else.
                // Don't use "BlendWeight0" as a name, nor weights0. Both names don't work.
                base.ProcessVertexChannel(geometry, vertexChannelIndex, context);
                channels.ConvertChannelContent <Byte4>("BlendIndices0");
                channels.ConvertChannelContent <NormalizedShort4>(VertexChannelNames.EncodeName(VertexElementUsage.BlendWeight, 0));
            }
        } // ProcessVertexChannel
Пример #11
0
        /// <summary>
        /// Generate tangents and binormals for the model data at the given node.  Recursively generates for children.
        /// </summary>
        /// <param name="content">The node to process.</param>
        private void GenerateNTBData(NodeContent content)
        {
            MeshContent mesh = content as MeshContent;

            if (mesh != null)
            {
                MeshHelper.CalculateTangentFrames(mesh, VertexChannelNames.TextureCoordinate(0), VertexChannelNames.Tangent(0), VertexChannelNames.Binormal(0));
            }

            foreach (NodeContent child in content.Children)
            {
                GenerateNTBData(child);
            }
        }
Пример #12
0
        // Build a SubmeshInfo for each GeometryContent.
        private SubmeshInfo[] BuildSubmeshInfos(MeshContent mesh, List <MeshContent> inputMorphs)
        {
            bool hasMorphTargets = (inputMorphs != null && inputMorphs.Count > 0);

            // A lookup table that maps each material to its index.
            // The key is the name of the XML file (string) or the local material (MaterialContent).
            var materialLookupTable = new Dictionary <object, int>();

            int numberOfSubmeshes = mesh.Geometry.Count;
            var submeshInfos      = new SubmeshInfo[numberOfSubmeshes];

            for (int i = 0; i < numberOfSubmeshes; i++)
            {
                var geometry = mesh.Geometry[i];

                // Build morph targets for current submesh.
                List <DRMorphTargetContent> morphTargets = null;
                if (hasMorphTargets)
                {
                    morphTargets = BuildMorphTargets(geometry, inputMorphs, i);
                    if (morphTargets != null && morphTargets.Count > 0)
                    {
                        // When morph targets are used remove the "BINORMAL" channel. (Otherwise,
                        // the number of vertex attributes would exceed the limit. Binormals need
                        // to be reconstructed from normal and tangent in the vertex shader.)
                        string binormalName          = VertexChannelNames.Binormal(0);
                        bool   containsTangentFrames = geometry.Vertices.Channels.Remove(binormalName);

                        if (containsTangentFrames)
                        {
                            // A submesh cannot use vertex colors and tangents at the same time.
                            // This would also exceed the vertex attribute limit.
                            string colorName = VertexChannelNames.Color(0);
                            if (geometry.Vertices.Channels.Contains(colorName))
                            {
                                geometry.Vertices.Channels.Remove(colorName);
                            }
                        }
                    }
                }

                var submeshInfo = new SubmeshInfo
                {
                    Geometry      = geometry,
                    OriginalIndex = i,
                    VertexBuffer  = geometry.Vertices.CreateVertexBuffer(),
                    MorphTargets  = morphTargets
                };
                submeshInfo.VertexBufferIndex = GetVertexBufferIndex(submeshInfo.VertexBuffer.VertexDeclaration);

                // Get material file or local material.
                object material = (object)GetExternalMaterial(mesh, geometry) ?? geometry.Material;
                if (material == null)
                {
                    var message = string.Format(CultureInfo.InvariantCulture, "Mesh \"{0}\" does not have a material.", mesh);
                    throw new InvalidContentException(message, mesh.Identity);
                }

                int materialIndex;
                if (!materialLookupTable.TryGetValue(material, out materialIndex))
                {
                    materialIndex = materialLookupTable.Count;
                    materialLookupTable.Add(material, materialIndex);
                }

                submeshInfo.MaterialIndex = materialIndex;
                submeshInfo.Material      = material;

                submeshInfos[i] = submeshInfo;
            }

            return(submeshInfos);
        }
Пример #13
0
        /// <summary>
        /// Recursively calls MeshHelper.CalculateTangentFrames for every MeshContent
        /// object in the NodeContent scene. This function could be changed to add
        /// more per vertex data as needed.
        /// </summary>
        /// <param initialFileName="input">A node in the scene.  The function should be called
        /// with the root of the scene.</param>
        private void PreprocessSceneHierarchy(NodeContent input,
                                              ContentProcessorContext context, string inputName)
        {
            MeshContent mesh = input as MeshContent;

            if (mesh != null)
            {
                MeshHelper.CalculateTangentFrames(mesh,
                                                  VertexChannelNames.TextureCoordinate(0),
                                                  VertexChannelNames.Tangent(0),
                                                  VertexChannelNames.Binormal(0));

                foreach (GeometryContent geometry in mesh.Geometry)
                {
                    if (false == geometry.Material.Textures.ContainsKey(TextureMapKey))
                    {
                        geometry.Material.Textures.Add(TextureMapKey,
                                                       new ExternalReference <TextureContent>(
                                                           "null_color.tga"));
                    }
                    else
                    {
                        context.Logger.LogImportantMessage("has: " + geometry.Material.Textures[TextureMapKey].Filename);
                        string fileName = Path.GetFileName(geometry.Material.Textures[TextureMapKey].Filename);
                        if (fileName != null && fileName.StartsWith("ship") && fileName.EndsWith("_c.tga"))
                        {
                            InsertMissedMapTextures(geometry.Material.Textures,
                                                    fileName.Substring(0, fileName.Length - "_c.tga".Length), context);
                        }
                    }

                    if (false == geometry.Material.Textures.ContainsKey(NormalMapKey))
                    {
                        geometry.Material.Textures.Add(NormalMapKey,
                                                       new ExternalReference <TextureContent>(
                                                           "null_normal.tga"));
                    }
                    else
                    {
                        context.Logger.LogImportantMessage("has: " + geometry.Material.Textures[NormalMapKey].Filename);
                    }

                    if (false == geometry.Material.Textures.ContainsKey(SpecularMapKey))
                    {
                        geometry.Material.Textures.Add(SpecularMapKey,
                                                       new ExternalReference <TextureContent>(
                                                           "null_specular.tga"));
                    }
                    else
                    {
                        context.Logger.LogImportantMessage("has: " + geometry.Material.Textures[SpecularMapKey].Filename);
                    }

                    if (false == geometry.Material.Textures.ContainsKey(GlowMapKey))
                    {
                        geometry.Material.Textures.Add(GlowMapKey,
                                                       new ExternalReference <TextureContent>(
                                                           "null_glow.tga"));
                    }
                    else
                    {
                        context.Logger.LogImportantMessage("has: " + geometry.Material.Textures[GlowMapKey].Filename);
                    }
                }
            }

            foreach (NodeContent child in input.Children)
            {
                PreprocessSceneHierarchy(child, context, inputName);
            }
        }
        } // Process

        #endregion

        #region Process Vertex Channel

        /// <summary>
        /// Processes geometry content vertex channels at the specified index.
        /// </summary>
        protected override void ProcessVertexChannel(GeometryContent geometry, int vertexChannelIndex, ContentProcessorContext context)
        {
            VertexChannelCollection channels = geometry.Vertices.Channels;

            // If the model has only position and normals a UV channel is added.
            // http://xnafinalengine.codeplex.com/wikipage?title=Compressed%20Vertex%20Data
            if (channels.Count == 1 && channels.Contains(VertexChannelNames.Normal()))
            {
                channels.Add<Vector2>(VertexChannelNames.TextureCoordinate(0), null);
            }

            // If the model has position, normal and UV then the data is packed on 32 bytes aliagned vertex data.
            if (channels.Count == 2 && channels.Contains(VertexChannelNames.Normal()) && channels.Contains(VertexChannelNames.TextureCoordinate(0)))
            {
                // No compressed Vertex Data
                base.ProcessVertexChannel(geometry, vertexChannelIndex, context);
            }
            else // If not then the data is compressed.
            {
                string name = channels[vertexChannelIndex].Name;

                if (name == VertexChannelNames.Normal())
                {
                    channels.ConvertChannelContent<NormalizedShort4>(vertexChannelIndex);
                }
                else if (name == VertexChannelNames.TextureCoordinate(0))
                {
                    // Clamp values.
                    /*for (int i = 0; i < channels[vertexChannelIndex].Count; i++)
                    {
                        Vector2 uv = (Vector2)channels[vertexChannelIndex][i];
                        if (uv.X < 0) 
                            uv.X *= -1;
                        if (uv.Y < 0) 
                            uv.Y *= -1;
                        Vector2 uvCampled = new Vector2(uv.X - (float)Math.Truncate(uv.X), uv.Y - (float)Math.Truncate(uv.Y));
                        channels[vertexChannelIndex][i] = uvCampled;
                    }
                    // If the resource has texture coordinates outside the range [-1, 1] the values will be clamped.
                    channels.ConvertChannelContent<NormalizedShort2>(vertexChannelIndex);*/
                    // Sometimes you can't just clamp values, because the distance between vertices surpass 1 uv unit.
                    // And given that I am not removing the binormals I won't normalize the UVs.
                    channels.ConvertChannelContent<HalfVector2>(vertexChannelIndex);
                }
                else if (name == VertexChannelNames.TextureCoordinate(1))
                    channels.Remove(VertexChannelNames.TextureCoordinate(1));
                else if (name == VertexChannelNames.TextureCoordinate(2))
                    channels.Remove(VertexChannelNames.TextureCoordinate(2));
                else if (name == VertexChannelNames.TextureCoordinate(3))
                    channels.Remove(VertexChannelNames.TextureCoordinate(3));
                else if (name == VertexChannelNames.TextureCoordinate(4))
                    channels.Remove(VertexChannelNames.TextureCoordinate(4));
                else if (name == VertexChannelNames.TextureCoordinate(5))
                    channels.Remove(VertexChannelNames.TextureCoordinate(5));
                else if (name == VertexChannelNames.TextureCoordinate(6))
                    channels.Remove(VertexChannelNames.TextureCoordinate(6));
                else if (name == VertexChannelNames.TextureCoordinate(7))
                    channels.Remove(VertexChannelNames.TextureCoordinate(7));
                else if (name == VertexChannelNames.Color(0))
                    channels.Remove(VertexChannelNames.Color(0));
                else if (name == VertexChannelNames.Tangent(0))
                {
                    channels.ConvertChannelContent<NormalizedShort4>(vertexChannelIndex);
                }
                else if (name == VertexChannelNames.Binormal(0))
                {
                    channels.ConvertChannelContent<NormalizedShort4>(vertexChannelIndex);
                    // If the binormal is removed then the position, the normal,
                    // the tangent and one texture coordinate can be fetched in one single block of 32 bytes.
                    // Still, it is more fast to just pass the value. At least on the test I made.
                    //channels.Remove(VertexChannelNames.Binormal(0));
                }
                else
                {
                    base.ProcessVertexChannel(geometry, vertexChannelIndex, context);
                }
            }
        } // ProcessVertexChannel
Пример #15
0
        MeshData ProcessMesh(MeshContent mesh, ContentProcessorContext context, string rootPath, Dictionary <string, object> processedContent, SkeletonData skeletonData, AnimationData[] animations, ref int geometryCount)
        {
            MeshHelper.TransformScene(mesh, mesh.AbsoluteTransform);

            string[] normalMapNames = new string[] { "Bump0", "Bump", "NormalMap", "Normalmap", "Normals", "BumpMap" };
            MeshHelper.OptimizeForCache(mesh);

            foreach (GeometryContent geom in mesh.Geometry)
            {
                if (geom.Material != null)
                {
                    string map = MaterialTexture(geom.Material, rootPath, null, null, normalMapNames);
                    if (map != null && map.Length > 0)
                    {
                        generateTangents = true;
                    }
                }
            }

            if (generateTangents)
            {
                MeshHelper.CalculateNormals(mesh, false);

                bool hasNoTangent = !GeometryContainsChannel(mesh, VertexChannelNames.Tangent(0));
                bool hasNoBinorm  = !GeometryContainsChannel(mesh, VertexChannelNames.Binormal(0));
                if (hasNoTangent || hasNoBinorm)
                {
                    string tangentChannelName  = hasNoTangent ? VertexChannelNames.Tangent(0) : null;
                    string binormalChannelName = hasNoBinorm ? VertexChannelNames.Binormal(0) : null;
                    MeshHelper.CalculateTangentFrames(mesh, VertexChannelNames.TextureCoordinate(0), tangentChannelName, binormalChannelName);
                }
            }
            if (swapWinding)
            {
                MeshHelper.SwapWindingOrder(mesh);
            }

            List <GeometryData> geometry = new List <GeometryData>();

            BoneContent skeleton = MeshHelper.FindSkeleton(mesh);
            Dictionary <string, int> boneIndices = null;

            if (skeleton != null)
            {
                boneIndices = FlattenSkeleton(skeleton);
            }

            foreach (GeometryContent geom in mesh.Geometry)
            {
                this.ProcessVertexChannels(geom, context, rootPath, boneIndices, null);
                MeshHelper.MergeDuplicateVertices(geom);

                MaterialData material = new MaterialData(
                    MaterialValue <float>("Alpha", geom.Material, 1),
                    MaterialValue <float>("SpecularPower", geom.Material, 24),
                    MaterialValue <Vector3>("DiffuseColor", geom.Material, Vector3.One),
                    MaterialValue <Vector3>("EmissiveColor", geom.Material, Vector3.Zero),
                    MaterialValue <Vector3>("SpecularColor", geom.Material, Vector3.Zero),
                    MaterialTexture(geom.Material, rootPath, context, processedContent, "Texture"),
                    MaterialTexture(geom.Material, rootPath, context, processedContent, normalMapNames),
                    MaterialValue <bool>("VertexColorEnabled", geom.Material, true) && geom.Vertices.Channels.Contains(VertexChannelNames.Color(0)));

                VertexBufferContent vb;
                VertexElement[]     ve;
                geom.Vertices.CreateVertexBuffer(out vb, out ve, context.TargetPlatform);

                int[] indices = new int[geom.Indices.Count];
                geom.Indices.CopyTo(indices, 0);

                geometry.Add(new GeometryData(geometryCount++, geom.Name, ve, vb.VertexData, indices, material, skeletonData, animations, context.TargetPlatform == TargetPlatform.Xbox360));
            }

            return(new MeshData(mesh.Name, geometry.ToArray(), animations));
        }
        private MeshContent ExtractMesh(aiMesh aiMesh)
        {
            if (!String.IsNullOrEmpty(aiMesh.mName.Data))
            {
                log("modelname " + aiMesh.mName.Data);
                meshBuilder = MeshBuilder.StartMesh(aiMesh.mName.Data);
            }
            else
            {
                meshBuilder = MeshBuilder.StartMesh(Path.GetFileNameWithoutExtension(filename));
            }

            if (!aiMesh.HasPositions())
            {
                throw new Exception("MOdel does not have Position");
            }

            // Add additional vertex channels for texture coordinates and normals
            if (aiMesh.HasTextureCoords(0))
            {
                textureCoordinateDataIndex = meshBuilder.CreateVertexChannel <Vector2>(VertexChannelNames.TextureCoordinate(0));
            }
            else if (aiMesh.HasVertexColors(0))
            {
                colorCoordinateDataIndex = meshBuilder.CreateVertexChannel <Vector4>(VertexChannelNames.Color(0));
            }
            if (aiMesh.HasNormals())
            {
                normalDataIndex = meshBuilder.CreateVertexChannel <Vector3>(VertexChannelNames.Normal());
            }
            if (aiMesh.HasTangentsAndBitangents())
            {
                tangentDataIndex  = meshBuilder.CreateVertexChannel <Vector3>(VertexChannelNames.Tangent(0));
                binormalDataIndex = meshBuilder.CreateVertexChannel <Vector3>(VertexChannelNames.Binormal(0));
            }
            if (aiMesh.HasBones())
            {
                boneDataIndex = meshBuilder.CreateVertexChannel <BoneWeightCollection>(VertexChannelNames.Weights(0));
            }

            var numFaces           = (int)aiMesh.mNumFaces;
            var numVertices        = (int)aiMesh.mNumVertices;
            var aiPositions        = aiMesh.mVertices;
            var aiNormals          = aiMesh.mNormals;
            var aiTextureCoordsAll = aiMesh.mTextureCoords;
            var aiTextureCoords    = (aiTextureCoordsAll != null) ? aiTextureCoordsAll[0] : null;

            for (int j = 0; j < aiMesh.mNumVertices; j++)
            {
                meshBuilder.CreatePosition(aiMesh.mVertices[j].x, aiMesh.mVertices[j].y, aiMesh.mVertices[j].z);
            }

            meshBuilder.SetMaterial(GetMaterial(aiMesh));

            var aiFaces   = aiMesh.mFaces;
            var dxIndices = new uint[numFaces * 3];

            for (int k = 0; k < numFaces; ++k)
            {
                var aiFace    = aiFaces[k];
                var aiIndices = aiFace.mIndices;
                for (int j = 0; j < 3; ++j)
                {
                    int index = (int)aiIndices[j];
                    if (aiMesh.HasTextureCoords(0))
                    {
                        meshBuilder.SetVertexChannelData(textureCoordinateDataIndex, new Vector2(aiMesh.mTextureCoords[0][index].x, aiMesh.mTextureCoords[0][index].y));
                    }
                    else if (aiMesh.HasVertexColors(0))
                    {
                        meshBuilder.SetVertexChannelData(colorCoordinateDataIndex, new Vector4(aiMesh.mColors[0][index].r, aiMesh.mColors[0][index].g, aiMesh.mColors[0][index].b, aiMesh.mColors[0][index].a));
                    }
                    if (aiMesh.HasNormals())
                    {
                        meshBuilder.SetVertexChannelData(normalDataIndex, new Vector3(aiMesh.mNormals[index].x, aiMesh.mNormals[index].y, aiMesh.mNormals[index].z));
                    }

                    if (aiMesh.HasTangentsAndBitangents())
                    {
                        meshBuilder.SetVertexChannelData(tangentDataIndex, new Vector3(aiMesh.mTangents[index].x, aiMesh.mTangents[index].y, aiMesh.mTangents[index].z));
                        meshBuilder.SetVertexChannelData(binormalDataIndex, new Vector3(aiMesh.mBitangents[index].x, aiMesh.mBitangents[index].y, aiMesh.mBitangents[index].z));
                    }
                    if (aiMesh.HasBones())
                    {
                        BoneWeightCollection BoneWeightCollection = new BoneWeightCollection();
                        if (wbone.ContainsKey(index))
                        {
                            foreach (var item in wbone[index])
                            {
                                BoneWeightCollection.Add(new BoneWeight(item.Key, item.Value));
                            }
                        }
                        meshBuilder.SetVertexChannelData(boneDataIndex, BoneWeightCollection);
                    }

                    meshBuilder.AddTriangleVertex(index);
                }
            }

            MeshContent meshContent = meshBuilder.FinishMesh();

            return(meshContent);
        }