/// <summary> /// Process converts the encoded normals to the NormalizedByte4 format and /// generates mipmaps. /// </summary> /// <param name="input"></param> /// <param name="context"></param> /// <returns></returns> public override TextureContent Process(TextureContent input, ContentProcessorContext context) { // convert to vector4 format, so that we know what kind of data we're // working with. input.ConvertBitmapType(typeof(PixelBitmapContent <Vector4>)); Texture2DContent output = new Texture2DContent(); // expand the encoded normals; values ranging from 0 to 1 should be // expanded to range to -1 to 1. // NOTE: in almost all cases, the input normalmap will be a // Texture2DContent, and will only have one face. just to be safe, // we'll do the conversion for every face in the texture. int mmIndex = 0; foreach (MipmapChain mipmapChain in input.Faces) { int bmpIndex = 0; foreach (PixelBitmapContent <Vector4> bitmap in mipmapChain) { // Copy original bitmap to the new texture output.Faces[mmIndex].Add(bitmap); PixelBitmapContent <NormalizedByte4> normalizedBitmap = new PixelBitmapContent <NormalizedByte4>(bitmap.Width, bitmap.Height); for (int x = 0; x < bitmap.Width; x++) { for (int y = 0; y < bitmap.Height; y++) { Vector4 encoded = 2 * bitmap.GetPixel(x, y) - Vector4.One; normalizedBitmap.SetPixel(x, y, new NormalizedByte4(encoded)); } } // now that the conversion to -1 to 1 ranges is finished, convert to the // runtime-ready format NormalizedByte4. output.Faces[mmIndex][bmpIndex++] = normalizedBitmap; } mmIndex++; } // Overwriting mipmaps isn't needed here. output.GenerateMipmaps(false); return(output); }
public override TextureCubeContent Process(TextureCubeContent input, ContentProcessorContext context) { // System.Diagnostics.Debugger.Launch(); TextureCubeContent tc = new TextureCubeContent(); tc.Name = input.Name; tc.Identity = input.Identity; int i = 0; foreach (var item in input.Faces) { PixelBitmapContent <Color> bmpInput = (PixelBitmapContent <Color>)item[0]; // Create Intermediate Content Texture2DContent texMipMap = new Texture2DContent(); // Add decoded Vector4 texMipMap.Mipmaps.Add(Decode2(bmpInput)); // Generate Mip Maps texMipMap.GenerateMipmaps(true); MipmapChain mc = new MipmapChain(); // Convert each bitmap to Gamma Encoded SurfaceFormat.Color for (int mi = 0; mi < texMipMap.Mipmaps.Count; mi++) { // Get Mip Map PixelBitmapContent <Rgba1010102> bmpMipMap = (PixelBitmapContent <Rgba1010102>)texMipMap.Mipmaps[mi]; if (EncodeAfter) { PixelBitmapContent <Rgba1010102> bmpColor = Encode2(bmpMipMap); mc.Add(bmpColor); } else { mc.Add(bmpMipMap); } } tc.Faces[i++] = mc; } return(tc); }
public bool AddOutline(Texture2DContent tex) { SurfaceFormat fmt; if (!tex.Mipmaps[0].TryGetFormat(out fmt)) { return(false); } if (fmt != SurfaceFormat.Color) { context_.Logger.LogImportantMessage("Converting from format {0} to Color.", fmt.ToString()); tex.ConvertBitmapType(typeof(PixelBitmapContent <Color>)); } byte[] data = tex.Mipmaps[0].GetPixelData(); int n = AddOutline(data, tex.Mipmaps[0].Width, tex.Mipmaps[0].Height); tex.Mipmaps[0].SetPixelData(data); context_.Logger.LogMessage("Converting bitmap {0}x{1} touches {2} pixels.", tex.Mipmaps[0].Width, tex.Mipmaps[0].Height, n); tex.GenerateMipmaps(true); return(true); }