Ejemplo n.º 1
0
        private static void ProcessColorChannel(GeometryContent geometry, int vertexChannelIndex)
        {
            VertexChannelCollection channels = geometry.Vertices.Channels;

            try
            {
                channels.ConvertChannelContent <Color>(vertexChannelIndex);
            }
            catch (NotSupportedException)
            {
                throw new InvalidCastException("Unable to convert mesh embedded colour channel to Vector4");
            }
        }
Ejemplo n.º 2
0
        } // Process

        #endregion

        #region Process Vertex Channel

        /// <summary>
        /// Processes geometry content vertex channels at the specified index.
        /// </summary>
        protected override void ProcessVertexChannel(GeometryContent geometry, int vertexChannelIndex, ContentProcessorContext context)
        {
            // Compressed Vertex Data
            VertexChannelCollection channels = geometry.Vertices.Channels;
            string name = channels[vertexChannelIndex].Name;

            if (name == VertexChannelNames.Normal())
            {
                channels.ConvertChannelContent <NormalizedShort4>(vertexChannelIndex);
            }
            else if (name == VertexChannelNames.TextureCoordinate(0))
            {
                // If the resource has texture coordinates outside the range [-1, 1] the values will be clamped.
                channels.ConvertChannelContent <HalfVector2>(vertexChannelIndex);
            }
            else if (name == VertexChannelNames.TextureCoordinate(1))
            {
                channels.Remove(VertexChannelNames.TextureCoordinate(1));
            }
            else if (name == VertexChannelNames.TextureCoordinate(2))
            {
                channels.Remove(VertexChannelNames.TextureCoordinate(2));
            }
            else if (name == VertexChannelNames.TextureCoordinate(3))
            {
                channels.Remove(VertexChannelNames.TextureCoordinate(3));
            }
            else if (name == VertexChannelNames.TextureCoordinate(4))
            {
                channels.Remove(VertexChannelNames.TextureCoordinate(4));
            }
            else if (name == VertexChannelNames.TextureCoordinate(5))
            {
                channels.Remove(VertexChannelNames.TextureCoordinate(5));
            }
            else if (name == VertexChannelNames.TextureCoordinate(6))
            {
                channels.Remove(VertexChannelNames.TextureCoordinate(6));
            }
            else if (name == VertexChannelNames.TextureCoordinate(7))
            {
                channels.Remove(VertexChannelNames.TextureCoordinate(7));
            }
            else if (name == VertexChannelNames.Color(0))
            {
                channels.Remove(VertexChannelNames.Color(0));
            }
            else if (name == VertexChannelNames.Tangent(0))
            {
                channels.ConvertChannelContent <NormalizedShort4>(vertexChannelIndex);
            }
            else if (name == VertexChannelNames.Binormal(0))
            {
                // Not need to get rid of the binormal data because the model will use more than 32 bytes per vertex.
                // We can actually try to align the data to 64 bytes per vertex.
                channels.ConvertChannelContent <NormalizedShort4>(vertexChannelIndex);
            }
            else
            {
                // Blend indices, blend weights and everything else.
                // Don't use "BlendWeight0" as a name, nor weights0. Both names don't work.
                base.ProcessVertexChannel(geometry, vertexChannelIndex, context);
                channels.ConvertChannelContent <Byte4>("BlendIndices0");
                channels.ConvertChannelContent <NormalizedShort4>(VertexChannelNames.EncodeName(VertexElementUsage.BlendWeight, 0));
            }
        } // ProcessVertexChannel
        } // Process

        #endregion

        #region Process Vertex Channel

        /// <summary>
        /// Processes geometry content vertex channels at the specified index.
        /// </summary>
        protected override void ProcessVertexChannel(GeometryContent geometry, int vertexChannelIndex, ContentProcessorContext context)
        {
            VertexChannelCollection channels = geometry.Vertices.Channels;

            // If the model has only position and normals a UV channel is added.
            // http://xnafinalengine.codeplex.com/wikipage?title=Compressed%20Vertex%20Data
            if (channels.Count == 1 && channels.Contains(VertexChannelNames.Normal()))
            {
                channels.Add<Vector2>(VertexChannelNames.TextureCoordinate(0), null);
            }

            // If the model has position, normal and UV then the data is packed on 32 bytes aliagned vertex data.
            if (channels.Count == 2 && channels.Contains(VertexChannelNames.Normal()) && channels.Contains(VertexChannelNames.TextureCoordinate(0)))
            {
                // No compressed Vertex Data
                base.ProcessVertexChannel(geometry, vertexChannelIndex, context);
            }
            else // If not then the data is compressed.
            {
                string name = channels[vertexChannelIndex].Name;

                if (name == VertexChannelNames.Normal())
                {
                    channels.ConvertChannelContent<NormalizedShort4>(vertexChannelIndex);
                }
                else if (name == VertexChannelNames.TextureCoordinate(0))
                {
                    // Clamp values.
                    /*for (int i = 0; i < channels[vertexChannelIndex].Count; i++)
                    {
                        Vector2 uv = (Vector2)channels[vertexChannelIndex][i];
                        if (uv.X < 0) 
                            uv.X *= -1;
                        if (uv.Y < 0) 
                            uv.Y *= -1;
                        Vector2 uvCampled = new Vector2(uv.X - (float)Math.Truncate(uv.X), uv.Y - (float)Math.Truncate(uv.Y));
                        channels[vertexChannelIndex][i] = uvCampled;
                    }
                    // If the resource has texture coordinates outside the range [-1, 1] the values will be clamped.
                    channels.ConvertChannelContent<NormalizedShort2>(vertexChannelIndex);*/
                    // Sometimes you can't just clamp values, because the distance between vertices surpass 1 uv unit.
                    // And given that I am not removing the binormals I won't normalize the UVs.
                    channels.ConvertChannelContent<HalfVector2>(vertexChannelIndex);
                }
                else if (name == VertexChannelNames.TextureCoordinate(1))
                    channels.Remove(VertexChannelNames.TextureCoordinate(1));
                else if (name == VertexChannelNames.TextureCoordinate(2))
                    channels.Remove(VertexChannelNames.TextureCoordinate(2));
                else if (name == VertexChannelNames.TextureCoordinate(3))
                    channels.Remove(VertexChannelNames.TextureCoordinate(3));
                else if (name == VertexChannelNames.TextureCoordinate(4))
                    channels.Remove(VertexChannelNames.TextureCoordinate(4));
                else if (name == VertexChannelNames.TextureCoordinate(5))
                    channels.Remove(VertexChannelNames.TextureCoordinate(5));
                else if (name == VertexChannelNames.TextureCoordinate(6))
                    channels.Remove(VertexChannelNames.TextureCoordinate(6));
                else if (name == VertexChannelNames.TextureCoordinate(7))
                    channels.Remove(VertexChannelNames.TextureCoordinate(7));
                else if (name == VertexChannelNames.Color(0))
                    channels.Remove(VertexChannelNames.Color(0));
                else if (name == VertexChannelNames.Tangent(0))
                {
                    channels.ConvertChannelContent<NormalizedShort4>(vertexChannelIndex);
                }
                else if (name == VertexChannelNames.Binormal(0))
                {
                    channels.ConvertChannelContent<NormalizedShort4>(vertexChannelIndex);
                    // If the binormal is removed then the position, the normal,
                    // the tangent and one texture coordinate can be fetched in one single block of 32 bytes.
                    // Still, it is more fast to just pass the value. At least on the test I made.
                    //channels.Remove(VertexChannelNames.Binormal(0));
                }
                else
                {
                    base.ProcessVertexChannel(geometry, vertexChannelIndex, context);
                }
            }
        } // ProcessVertexChannel