public override ModelContent Process(NodeContent input, ContentProcessorContext context) { if (input == null) { throw new ArgumentNullException("input"); } //we always want to generate tangent frames, as we use tangent space normal mapping GenerateTangentFrames = true; //merge transforms MeshHelper.TransformScene(input, input.Transform); input.Transform = Matrix.Identity; if (!_isSkinned) MergeTransforms(input); ModelContent model = base.Process(input, context); //gather some information that will be useful in run time MeshMetadata metadata = new MeshMetadata(); BoundingBox aabb = new BoundingBox(); metadata.BoundingBox = ComputeBoundingBox(input, ref aabb, metadata); //assign it to our Tag model.Tag = metadata; return model; }
/// <summary> /// Converts a material. /// </summary> public override MaterialContent Process(MaterialContent input, ContentProcessorContext context) { // Create a new effect material. EffectMaterialContent customMaterial = new EffectMaterialContent(); // Point the new material at our custom effect file. string effectFile = Path.GetFullPath("Effects\\PreEffects\\TexturingEffect.fx"); customMaterial.Effect = new ExternalReference<EffectContent>(effectFile); // Copy texture data across from the original material. BasicMaterialContent basicMaterial = (BasicMaterialContent)input; if (basicMaterial.Texture != null) { customMaterial.Textures.Add("DefaultTexture", basicMaterial.Texture); customMaterial.OpaqueData.Add("TextureEnabled", true); } // Add the reflection texture. //string envmap = Path.GetFullPath(EnvironmentMap); //customMaterial.Textures.Add("EnvironmentMap", //new ExternalReference<TextureContent>(envmap)); // Chain to the base material processor. return base.Process(customMaterial, context); }
protected override MaterialContent ConvertMaterial(MaterialContent material, ContentProcessorContext context) { MaterialData mat = incomingMaterials.Single(m => m.Name == material.Name); EffectMaterialContent emc = new EffectMaterialContent(); emc.Effect = new ExternalReference<EffectContent>(Path.Combine(contentPath, mat.CustomEffect)); emc.Name = material.Name; emc.Identity = material.Identity; foreach (KeyValuePair<String, ExternalReference<TextureContent>> texture in material.Textures) { if (texture.Key == "Texture") { emc.Textures.Add(texture.Key, texture.Value); } else { context.Logger.LogWarning(null, material.Identity, "There were some other textures referenced by the model, but we can't properly assign them to the correct effect parameter."); } } foreach (EffectParam ep in mat.EffectParams) { if (ep.Category == EffectParamCategory.OpaqueData) { emc.OpaqueData.Add(ep.Name, ep.Value); } else if (ep.Category == EffectParamCategory.Texture) { emc.Textures.Add(ep.Name, new ExternalReference<TextureContent>((string)(ep.Value))); } } return base.ConvertMaterial(emc, context); }
/// <summary> /// Creates new material with the new effect file. /// </summary> public override MaterialContent Process(MaterialContent input, ContentProcessorContext context) { if (string.IsNullOrEmpty(customEffect)) throw new ArgumentException("Custom Effect not set to an effect file"); // Create a new effect material. EffectMaterialContent customMaterial = new EffectMaterialContent(); // Point the new material at the custom effect file. string effectFile = Path.GetFullPath(customEffect); customMaterial.Effect = new ExternalReference<EffectContent>(effectFile); // Loop over the textures in the current material adding them to // the new material. foreach (KeyValuePair<string, ExternalReference<TextureContent>> textureContent in input.Textures) { customMaterial.Textures.Add(textureContent.Key, textureContent.Value); } // Loop over the opaque data in the current material adding them to // the new material. foreach (KeyValuePair<string, Object> opaqueData in input.OpaqueData) { customMaterial.OpaqueData.Add(opaqueData.Key, opaqueData.Value); } // Call the base material processor to continue the rest of the processing. return base.Process(customMaterial, context); }
public override ModelContent Process(NodeContent input, ContentProcessorContext context) { ModelContent model = base.Process(input, context); //results will be stored in this collection List<Vector3> points = new List<Vector3>(); //loop throught each mesh at the center of each of its bounding spheres foreach (ModelMeshContent mesh in model.Meshes) { //we will need to transform the center by the meshes parent bone atrix //if we don't they will all be at the same position Matrix transform; if (mesh.ParentBone.Transform != null) transform = mesh.ParentBone.Transform; var p = Vector3.Transform(mesh.BoundingSphere.Center, mesh.ParentBone.Transform); //using the property above we can make decisions if (PreservePointHeight) points.Add(p); else points.Add(new Vector3(p.X,0,p.Z)); } //we always store the additional data in the Tag property of the object model.Tag = points; return model; }
/// <summary> /// The main Process method converts an intermediate format content pipeline /// NodeContent tree to a ModelContent object with embedded animation data. /// </summary> public override ModelContent Process(NodeContent input, ContentProcessorContext context) { contentPath = Environment.CurrentDirectory; using (XmlReader reader = XmlReader.Create(MaterialDataFilePath)) { incomingMaterials = IntermediateSerializer.Deserialize<List<MaterialData>>(reader, null); } context.AddDependency(Path.Combine(Environment.CurrentDirectory, MaterialDataFilePath)); // Chain to the base ModelProcessor class so it can convert the model data. ModelContent model = base.Process(input, context); // Put the material's flags into the ModelMeshPartContent's Tag property. foreach (ModelMeshContent mmc in model.Meshes) { foreach (ModelMeshPartContent mmpc in mmc.MeshParts) { MaterialData mat = incomingMaterials.Single(m => m.Name == mmpc.Material.Name); MaterialInfo extraInfo = new MaterialInfo(); extraInfo.HandlingFlags = mat.HandlingFlags; extraInfo.RenderState = mat.RenderState; mmpc.Tag = extraInfo; } } return model; }
/// <summary> /// Iterates all of the tile sets and builds external references to the textures. Useful if you want to just /// load the resulting map and not have to also load up textures. The external reference is stored on the /// TileSet's Texture field so make sure you serialize that if you call this method. /// </summary> public static void BuildTileSetTextures(MapContent input, ContentProcessorContext context, string textureRoot = "") { foreach (var tileSet in input.TileSets) { // get the real path to the image string path = Path.Combine(textureRoot, tileSet.Image); // the asset name is the entire path, minus extension, after the content directory string asset = string.Empty; if (path.StartsWith(Directory.GetCurrentDirectory())) asset = path.Remove(tileSet.Image.LastIndexOf('.')).Substring(Directory.GetCurrentDirectory().Length + 1); else asset = Path.GetFileNameWithoutExtension(path); // build the asset as an external reference OpaqueDataDictionary data = new OpaqueDataDictionary(); data.Add("GenerateMipmaps", false); data.Add("ResizeToPowerOfTwo", false); data.Add("TextureFormat", TextureProcessorOutputFormat.Color); data.Add("ColorKeyEnabled", tileSet.ColorKey.HasValue); data.Add("ColorKeyColor", tileSet.ColorKey.HasValue ? tileSet.ColorKey.Value : Microsoft.Xna.Framework.Color.Magenta); tileSet.Texture = context.BuildAsset<Texture2DContent, Texture2DContent>( new ExternalReference<Texture2DContent>(path), null, data, null, asset); } }
/// <summary> /// The main Process method converts an intermediate format content pipeline /// NodeContent tree to a ModelContent object with embedded animation data. /// </summary> public override ModelContent Process(NodeContent input, ContentProcessorContext context) { SkinningData skinningData = SkinningHelpers.GetSkinningData(input, context, SkinnedEffect.MaxBones); ModelContent model = base.Process(input, context); model.Tag = skinningData; return model; }
private Exception ProcessErrorsAndWarnings(string errorsAndWarnings, EffectContent input, ContentProcessorContext context) { // Split the errors by lines. var errors = errorsAndWarnings.Split('\n'); // Process each error line extracting the location and message information. for (var i = 0; i < errors.Length; i++) { // Skip blank lines. if (errors[i].StartsWith(Environment.NewLine)) break; // find some unique characters in the error string var openIndex = errors[i].IndexOf('('); var closeIndex = errors[i].IndexOf(')'); // can't process the message if it has no line counter if (openIndex == -1 || closeIndex == -1) continue; // find the error number, then move forward into the message var errorIndex = errors[i].IndexOf('X', closeIndex); if (errorIndex < 0) return new InvalidContentException(errors[i], input.Identity); // trim out the data we need to feed the logger var fileName = errors[i].Remove(openIndex); var lineAndColumn = errors[i].Substring(openIndex + 1, closeIndex - openIndex - 1); var description = errors[i].Substring(errorIndex); // when the file name is not present, the error can be found in the root file if (string.IsNullOrEmpty(fileName)) fileName = input.Identity.SourceFilename; // ensure that the file data points toward the correct file var fileInfo = new FileInfo(fileName); if (!fileInfo.Exists) { var parentFile = new FileInfo(input.Identity.SourceFilename); fileInfo = new FileInfo(Path.Combine(parentFile.Directory.FullName, fileName)); } fileName = fileInfo.FullName; // construct the temporary content identity and file the error or warning var identity = new ContentIdentity(fileName, input.Identity.SourceTool, lineAndColumn); if (errors[i].Contains("warning")) { description = "A warning was generated when compiling the effect.\n" + description; context.Logger.LogWarning(string.Empty, identity, description, string.Empty); } else if (errors[i].Contains("error")) { description = "Unable to compile the effect.\n" + description; return new InvalidContentException(description, identity); } } // if no exceptions were created in the above loop, generate a generic one here return new InvalidContentException(errorsAndWarnings, input.Identity); }
/// <summary> /// The main method in charge of processing the content. /// </summary> public override ModelContent Process(NodeContent input, ContentProcessorContext context) { // Chain to the base ModelProcessor class. ModelContent model = base.Process(input, context); // Look up the input vertex positions. FindVertices(input); // You can store any type of object in the model Tag property. This // sample only uses built-in types such as string, Vector3, BoundingSphere, // dictionaries, and arrays, which the content pipeline knows how to // serialize by default. We could also attach custom data types here, but // then we would have to provide a ContentTypeWriter and ContentTypeReader // implementation to tell the pipeline how to serialize our custom type. // // We are setting our model Tag to a dictionary that maps strings to // objects, and then storing two different kinds of custom data into that // dictionary. This is a useful pattern because it allows processors to // combine many different kinds of information inside the single Tag value. Dictionary<string, object> tagData = new Dictionary<string, object>(); model.Tag = tagData; // Store vertex information in the tag data, as an array of Vector3. tagData.Add("Vertices", vertices.ToArray()); // Also store a custom bounding sphere. tagData.Add("BoundingSphere", BoundingSphere.CreateFromPoints(vertices)); return model; }
private TextureContent GenerateCubemap(TextureContent input, ContentProcessorContext context) { if (input.Faces[1].Count != 0) { //its already a cubemap return base.Process(input, context); } TextureCubeContent cubeContent = new TextureCubeContent(); // Convert the input data to Color format, for ease of processing. input.ConvertBitmapType(typeof(PixelBitmapContent<Color>)); int height = input.Faces[0][0].Height; int width = input.Faces[0][0].Width / 6; //split the image into 6 pieces, setup: X+,X-, Y+,Y-, Z+, Z- cubeContent.Faces[(int)CubeMapFace.PositiveX] = CreateFace(input.Faces[0][0], width, height, 0); cubeContent.Faces[(int)CubeMapFace.NegativeX] = CreateFace(input.Faces[0][0], width, height, width * 1); cubeContent.Faces[(int)CubeMapFace.PositiveY] = CreateFace(input.Faces[0][0], width, height, width * 2); cubeContent.Faces[(int)CubeMapFace.NegativeY] = CreateFace(input.Faces[0][0], width, height, width * 3); cubeContent.Faces[(int)CubeMapFace.PositiveZ] = CreateFace(input.Faces[0][0], width, height, width * 4); cubeContent.Faces[(int)CubeMapFace.NegativeZ] = CreateFace(input.Faces[0][0], width, height, width * 5); // Calculate mipmap data. cubeContent.GenerateMipmaps(true); // Compress the cubemap into DXT1 format. cubeContent.ConvertBitmapType(typeof(Dxt1BitmapContent)); return cubeContent; }
public override TextureContent Process( TextureContent input, ContentProcessorContext context ) { logger = context.Logger; logger.LogMessage( "sending texture to base TextureProcessor for initial processing" ); var textureContent = base.Process( input, context ); var bmp = (PixelBitmapContent<Color>)textureContent.Faces[0][0]; var destData = bmp.getData(); // process the data if( flattenImage ) { logger.LogMessage( "flattening image" ); destData = TextureUtils.createFlatHeightmap( destData, opaqueColor, transparentColor ); } if( blurType != BlurType.None ) { logger.LogMessage( "blurring image width blurDeviation: {0}", blurDeviation ); if( blurType == BlurType.Color ) destData = TextureUtils.createBlurredTexture( destData, bmp.Width, bmp.Height, (double)blurDeviation ); else destData = TextureUtils.createBlurredGrayscaleTexture( destData, bmp.Width, bmp.Height, (double)blurDeviation ); } logger.LogMessage( "generating normal map with {0}", edgeDetectionFilter ); destData = TextureUtils.createNormalMap( destData, edgeDetectionFilter, bmp.Width, bmp.Height, normalStrength, invertX, invertY ); bmp.setData( destData ); return textureContent; }
protected override MaterialContent ConvertMaterial(MaterialContent material, ContentProcessorContext context) { var myMaterial = new EffectMaterialContent(); var effectPath = Path.GetFullPath("Effect/Phong.fx"); myMaterial.Effect = new ExternalReference<EffectContent>(effectPath); // マテリアル名をエフェクトに渡す(TODO: これ渡せてないなあ・・・) Console.WriteLine("Material name : " + material.Name); myMaterial.Effect.Name = material.Name; if (material is BasicMaterialContent) { var basicMaterial = (BasicMaterialContent)material; myMaterial.OpaqueData.Add("DiffuseColor", basicMaterial.DiffuseColor); myMaterial.OpaqueData.Add("Alpha", basicMaterial.Alpha); myMaterial.OpaqueData.Add("EmissiveColor", basicMaterial.EmissiveColor); myMaterial.OpaqueData.Add("SpecularColor", basicMaterial.SpecularColor); myMaterial.OpaqueData.Add("SpecularPower", 4.0f); } else if (material is EffectMaterialContent) { var effectMaterial = (EffectMaterialContent)material; } else { throw new Exception("unknown material"); } return base.ConvertMaterial(myMaterial, context); }
public TerrainModelContent Build(ContentProcessorContext context) { // TODO: i think numlevels is log2, or something like that // calculate number of levels, based on patch size int nCurrent = (_patchSize - 1) * 2; int numLevels = 0; while (nCurrent != 1) { nCurrent /= 2; numLevels++; } int numPatchesX = (_heightMap.Width - 1) / (_patchSize - 1); int numPatchesY = (_heightMap.Height - 1) / (_patchSize - 1); // create patches PatchContent[,] patches = new PatchContent[numPatchesX, numPatchesY]; for (int y = 0; y < numPatchesY; y++) for (int x = 0; x < numPatchesX; x++) { PatchContentBuilder patchContentBuilder = new PatchContentBuilder(_patchSize, x, y, _heightMap, numLevels, _detailTextureTiling, _horizontalScale); patches[x, y] = patchContentBuilder.Build(); } return new TerrainModelContent { NumPatchesX = numPatchesX, NumPatchesY = numPatchesY, Patches = patches, HeightMap = _heightMap, Material = _material }; }
public override ModelContent Process(NodeContent input, ContentProcessorContext context) { model = base.Process(input, context); AnimationClips clips = ProcessAnimations(model, input, context); model.Tag = clips; return model; }
/// <summary> /// Changes all the materials to use our skinned model effect. /// </summary> protected override MaterialContent ConvertMaterial(MaterialContent material, ContentProcessorContext context) { BasicMaterialContent basicMaterial = material as BasicMaterialContent; if (basicMaterial == null) { throw new InvalidContentException(string.Format( "InstancedSkinnedModelProcessor only supports BasicMaterialContent, " + "but input mesh uses {0}.", material.GetType())); } EffectMaterialContent effectMaterial = new EffectMaterialContent(); // Store a reference to our skinned mesh effect. string effectPath = Path.GetFullPath("SkinnedModelInstancing.fx"); effectMaterial.Effect = new ExternalReference<EffectContent>(effectPath); // Copy texture settings from the input // BasicMaterialContent over to our new material. if (basicMaterial.Texture != null) effectMaterial.Textures.Add("Texture", basicMaterial.Texture); // Chain to the base ModelProcessor converter. return base.ConvertMaterial(effectMaterial, context); }
protected FragmentContent LoadFragmentContent(ContentProcessorContext context, string fileName, ContentIdentity relativeToContent = null) { ExternalReference<FragmentContent> externalReference = (relativeToContent != null) ? new ExternalReference<FragmentContent>(fileName, relativeToContent) : new ExternalReference<FragmentContent>(fileName); return context.BuildAndLoadAsset<FragmentContent, FragmentContent>(externalReference, null); }
protected override MaterialContent ConvertMaterial(MaterialContent material, ContentProcessorContext context) { var customMaterial = new EffectMaterialContent(); customMaterial.Effect = new ExternalReference<EffectContent>(effectPath); //var basicMaterial = (BasicMaterialContent) material; //if (basicMaterial.Texture != null) //{ // customMaterial.Textures.Add(skyMapKey, basicMaterial.Texture); //} foreach (var texture in material.Textures) { customMaterial.Textures.Add(texture.Key, texture.Value); } var parameters = new OpaqueDataDictionary(); parameters["ColorKeyColor"] = ColorKeyColor; parameters["ColorKeyEnabled"] = ColorKeyEnabled; parameters["TextureFormat"] = TextureFormat; parameters["GenerateMipmaps"] = GenerateMipmaps; parameters["ResizeTexturesToPowerOfTwo"] = ResizeTexturesToPowerOfTwo; return context.Convert<MaterialContent, MaterialContent>( customMaterial, typeof(MaterialProcessor).Name, parameters); }
/// <summary> /// Imports the asset. /// </summary> /// <param name="context">Contains any required custom process parameters.</param> public void Import(ContentProcessorContext context) { if (context == null) throw new ArgumentNullException("context"); if (string.IsNullOrWhiteSpace(ModelDescription.FileName)) throw new InvalidContentException("The attribute 'File' is not set in the model description (.drmdl file).", Identity); var fileName = ContentHelper.FindFile(ModelDescription.FileName, Identity); var asset = new ExternalReference<NodeContent>(fileName); var node = context.BuildAndLoadAsset<NodeContent, NodeContent>(asset, null, null, ModelDescription.Importer); // BuildAndLoadAsset does not return root node in MonoGame. while (node.Parent != null) node = node.Parent; if (node.GetType() == typeof(NodeContent)) { // Root node is of type NodeContent. // --> Copy root node content and children. Name = node.Name; Transform = node.Transform; Animations.AddRange(node.Animations); OpaqueData.AddRange(node.OpaqueData); var children = node.Children.ToArray(); node.Children.Clear(); // Clear parents. Children.AddRange(children); } else { // Root node is a derived type. // --> Add node as child. Children.Add(node); } }
private static void MergeAnimation(string animationFilePath, AnimationContentDictionary animationDictionary, ContentIdentity contentIdentity, ContentProcessorContext context) { // Use content pipeline to build the asset. NodeContent mergeModel = context.BuildAndLoadAsset<NodeContent, NodeContent>(new ExternalReference<NodeContent>(animationFilePath), null); // Find the skeleton. BoneContent mergeRoot = MeshHelper.FindSkeleton(mergeModel); if (mergeRoot == null) { context.Logger.LogWarning(null, contentIdentity, "Animation model file '{0}' has no root bone. Cannot merge animations.", animationFilePath); return; } // Merge all animations of the skeleton root node. foreach (string animationName in mergeRoot.Animations.Keys) { if (animationDictionary.ContainsKey(animationName)) { context.Logger.LogWarning(null, contentIdentity, "Replacing animation '{0}' from '{1}' with merged animation.", animationName, animationFilePath); animationDictionary[animationName] = mergeRoot.Animations[animationName]; } else { context.Logger.LogImportantMessage("Merging animation '{0}' from '{1}'.", animationName, animationFilePath); animationDictionary.Add(animationName, mergeRoot.Animations[animationName]); } } }
public override ModelContent Process(NodeContent input, ContentProcessorContext context) { ValidateMesh(input, context, null); BoneContent skeleton = MeshHelper.FindSkeleton(input); if (skeleton == null) throw new InvalidContentException("Input skeleton not found."); //Bakes everything FlattenTransforms(input, skeleton); //Read bind pse and skeleton hierarchy data. IList<BoneContent> bones = MeshHelper.FlattenSkeleton(skeleton); if (bones.Count > SkinnedEffect.MaxBones) { throw new InvalidContentException(string.Format("Skeleton has {0} bones, but the max is {1}.", bones.Count, SkinnedEffect.MaxBones)); } List<Matrix> bindPose = new List<Matrix>(); List<Matrix> inverseBindPose = new List<Matrix>(); List<int> skeletonHierarchy = new List<int>(); Dictionary<string, int> boneIndices = new Dictionary<string, int>(); foreach (BoneContent bone in bones) { bindPose.Add(bone.Transform); inverseBindPose.Add(Matrix.Invert(bone.AbsoluteTransform)); skeletonHierarchy.Add(bones.IndexOf(bone.Parent as BoneContent)); boneIndices.Add(bone.Name, boneIndices.Count); } ModelContent model = base.Process(input, context); model.Tag = new SkinningDataStorage(bindPose, inverseBindPose, skeletonHierarchy, boneIndices); return model; }
public override SpriteFontContent Process(Texture2DContent input, ContentProcessorContext context) { if(context.TargetPlatform == TargetPlatform.Windows) CreateExEnOutput(input, context); return base.Process(input, context); }
public override SpriteFontContent Process(FontDescription input, ContentProcessorContext context) { { //ファイル読み込み string fullPath = Path.GetFullPath("kana_list.txt"); context.AddDependency(fullPath); string letters = File.ReadAllText(fullPath, System.Text.Encoding.UTF8); //フォントの追加 foreach (char c in letters) { input.Characters.Add(c); } } { //ファイル読み込み string fullPath = Path.GetFullPath("kanji_list.txt"); context.AddDependency(fullPath); string letters = File.ReadAllText(fullPath, System.Text.Encoding.UTF8); //フォントの追加 foreach (char c in letters) { input.Characters.Add(c); } } return base.Process(input, context); }
public override TextureContent Process(TextureContent input, ContentProcessorContext context) { input.ConvertBitmapType(typeof(PixelBitmapContent<Color>)); foreach (MipmapChain mipChain in input.Faces) { foreach (PixelBitmapContent<Color> bitmap in mipChain) { for (int y = 0; y < bitmap.Height; y++) { for (int x = 0; x < bitmap.Width; x++) { Color c = bitmap.GetPixel(x, y); c.R = (byte)(c.R * c.A / 255); c.G = (byte)(c.G * c.A / 255); c.B = (byte)(c.B * c.A / 255); bitmap.SetPixel(x, y, c); } } } } return base.Process(input, context); }
public override ModelContent Process(NodeContent input, ContentProcessorContext context) { // Break up the mesh to separate triangles. NodeContent processedNode = ProcessMesh(input); return base.Process(processedNode, context); }
private void ProcessTextures(MaterialContent input, SkinnedModelMaterialContent skinnedModelMaterial, ContentProcessorContext context) { foreach (string key in input.Textures.Keys) { ExternalReference<TextureContent> texture = input.Textures[key]; if (!String.IsNullOrEmpty(texturePath)) { string fullFilePath; if (texturePathType == PathType.Relative) { // If relative path string sourceAssetPath = Path.GetDirectoryName(input.Identity.SourceFilename); fullFilePath = Path.GetFullPath( Path.Combine(sourceAssetPath, texturePath)); } else { fullFilePath = texturePath; } texture.Filename = Path.Combine(fullFilePath, Path.GetFileName(texture.Filename)); } ProcessTexture(key, texture, skinnedModelMaterial, context); } }
/// <summary> /// Stores the current selected technique and if the texture uses alpha /// into the mesh name for each mesh part. /// </summary> private void StoreEffectTechniqueInMeshName( NodeContent input, ContentProcessorContext context) { MeshContent mesh = input as MeshContent; if (mesh != null) { foreach (GeometryContent geom in mesh.Geometry) { EffectMaterialContent effectMaterial = geom.Material as EffectMaterialContent; if (effectMaterial != null) { if (effectMaterial.OpaqueData.ContainsKey("technique")) { // Store technique here! (OpaqueData["technique"] is an int32) input.Name = input.Name + effectMaterial.OpaqueData["technique"]; } // if } // if } // foreach } // if // Go through all childs foreach (NodeContent child in input.Children) { StoreEffectTechniqueInMeshName(child, context); } // foreach }
public override CompiledEffectContent Process(Microsoft.Xna.Framework.Content.Pipeline.Graphics.EffectContent input, ContentProcessorContext context) { this.DebugMode = EffectProcessorDebugMode.Optimize; if (context.Parameters.ContainsKey("Defines")) this.Defines = context.Parameters["Defines"].ToString(); return base.Process(input, context); }
public override CompiledEffectContent Process(EffectContent input, ContentProcessorContext context) { //System.Diagnostics.Debugger.Launch(); // If this isn't a MonoGame platform then do the default processing. var platform = ContentHelper.GetMonoGamePlatform(); if (platform == MonoGamePlatform.None) return base.Process(input, context); var options = new Options(); options.SourceFile = input.Identity.SourceFilename; options.Profile = platform == MonoGamePlatform.Windows8 ? ShaderProfile.DirectX_11 : ShaderProfile.OpenGL; options.Debug = DebugMode == EffectProcessorDebugMode.Debug; options.OutputFile = context.OutputFilename; // Parse the MGFX file expanding includes, macros, and returning the techniques. ShaderInfo shaderInfo; try { shaderInfo = ShaderInfo.FromFile(options.SourceFile, options); foreach (var dep in shaderInfo.Dependencies) context.AddDependency(dep); } catch (Exception ex) { // TODO: Extract good line numbers from mgfx parser! throw new InvalidContentException(ex.Message, input.Identity, ex); } // Create the effect object. EffectObject effect = null; var shaderErrorsAndWarnings = string.Empty; try { effect = EffectObject.CompileEffect(shaderInfo, out shaderErrorsAndWarnings); } catch (ShaderCompilerException) { throw ProcessErrorsAndWarnings(shaderErrorsAndWarnings, input, context); } // Write out the effect to a runtime format. CompiledEffectContent result; try { using (var stream = new MemoryStream()) { using (var writer = new BinaryWriter(stream)) effect.Write(writer, options); result = new CompiledEffectContent(stream.GetBuffer()); } } catch (Exception ex) { throw new InvalidContentException("Failed to serialize the effect!", input.Identity, ex); } return result; }
/// <summary> /// The main Process method converts an intermediate format content pipeline /// NodeContent tree to a ModelConte nt object with embedded animation data. /// </summary> public override ModelContent Process(NodeContent input, ContentProcessorContext context) { ValidateMesh(input, context, null); List<int> boneHierarchy = new List<int>(); // Chain to the base ModelProcessor class so it can convert the model data. ModelContent model = base.Process(input, context); // Add each of the bones foreach (ModelBoneContent bone in model.Bones) { boneHierarchy.Add(model.Bones.IndexOf(bone.Parent as ModelBoneContent)); } // Animation clips inside the object (mesh) Dictionary<string, ModelAnimationClip> animationClips = new Dictionary<string, ModelAnimationClip>(); // Animation clips at the root of the object Dictionary<string, ModelAnimationClip> rootClips = new Dictionary<string, ModelAnimationClip>(); // Process the animations ProcessAnimations(input, model, animationClips, rootClips); // Store the data for the model model.Tag = new ModelData(animationClips, rootClips, null, null, boneHierarchy); return model; }
private ModelMeshContent ProcessMesh(MeshContent mesh, ModelBoneContent parent, ContentProcessorContext context) { var parts = new List <ModelMeshPartContent>(); var vertexBuffer = new VertexBufferContent(); var indexBuffer = new IndexCollection(); var startVertex = 0; foreach (var geometry in mesh.Geometry) { var vertices = geometry.Vertices; var vertexCount = vertices.VertexCount; ModelMeshPartContent partContent; if (vertexCount == 0) { partContent = new ModelMeshPartContent(); } else { var geomBuffer = geometry.Vertices.CreateVertexBuffer(); vertexBuffer.Write(vertexBuffer.VertexData.Length, 1, geomBuffer.VertexData); var startIndex = indexBuffer.Count; indexBuffer.AddRange(geometry.Indices); partContent = new ModelMeshPartContent(vertexBuffer, indexBuffer, startVertex, vertexCount, startIndex, geometry.Indices.Count / 3); // Geoms are supposed to all have the same decl, so just steal one of these vertexBuffer.VertexDeclaration = geomBuffer.VertexDeclaration; startVertex += vertexCount; } partContent.Material = geometry.Material; parts.Add(partContent); } var bounds = new BoundingSphere(); if (mesh.Positions.Count > 0) { bounds = BoundingSphere.CreateFromPoints(mesh.Positions); } return(new ModelMeshContent(mesh.Name, mesh, parent, bounds, parts)); }
private ModelBoneContent ProcessNode(NodeContent node, ModelBoneContent parent, List <ModelBoneContent> boneList, List <ModelMeshContent> meshList, ContentProcessorContext context) { var result = new ModelBoneContent(node.Name, boneList.Count, node.Transform, parent); boneList.Add(result); if (node is MeshContent) { meshList.Add(ProcessMesh(node as MeshContent, result, context)); } var children = new List <ModelBoneContent>(); foreach (var child in node.Children) { children.Add(ProcessNode(child, result, boneList, meshList, context)); } result.Children = new ModelBoneContentCollection(children); return(result); }
internal static string AssetOutputFilename(ContentIdentity identity, ContentProcessorContext context, string suffixDotExtension) { string assetName = Path.GetFileNameWithoutExtension(context.OutputFilename); return(Path.Combine(GetOutputDirectory(identity), assetName + suffixDotExtension)); }
public override BitmapFontProcessorResult Process(BitmapFontFile bitmapFontFile, ContentProcessorContext context) { try { context.Logger.LogMessage("Processing BMFont, Kerning pairs: {0}", bitmapFontFile.kernings.Count); var result = new BitmapFontProcessorResult(bitmapFontFile); result.packTexturesIntoXnb = packTexturesIntoXnb; foreach (var fontPage in bitmapFontFile.pages) { // find our texture so we can embed it in the xnb asset var imageFile = Path.Combine(Path.GetDirectoryName(bitmapFontFile.file), fontPage.file); context.Logger.LogMessage("looking for image file: {0}", imageFile); if (!File.Exists(imageFile)) { throw new Exception(string.Format("Could not locate font atlas file {0} relative to folder {1}", fontPage.file, Directory.GetCurrentDirectory())); } if (packTexturesIntoXnb) { context.Logger.LogMessage("Found texture: {0}. Packing into xnb", imageFile); var textureReference = new ExternalReference <TextureContent>(imageFile); var texture = context.BuildAndLoadAsset <TextureContent, TextureContent>(textureReference, "TextureProcessor"); var textureContent = new Texture2DContent(); textureContent.Mipmaps.Add(texture.Faces[0][0]); if (compressTextures) { textureContent.ConvertBitmapType(typeof(Dxt5BitmapContent)); } result.textures.Add(textureContent); } else { var textureFilename = PathHelper.makeRelativePath(Directory.GetCurrentDirectory(), imageFile).Replace("Content/", string.Empty); textureFilename = Path.ChangeExtension(textureFilename, null); context.Logger.LogMessage("Not writing texture but it is expected to exist and be processed: {0}", textureFilename); result.textureNames.Add(textureFilename); result.textureOrigins.Add(new Vector2(fontPage.x, fontPage.y)); } } return(result); } catch (Exception ex) { context.Logger.LogMessage("Error {0}", ex); throw; } }
/// <summary> /// The main Process method converts an intermediate format content pipeline /// NodeContent tree to a ModelContent object with embedded animation data. /// </summary> public override InstancedSkinnedModelContent Process(NodeContent input, ContentProcessorContext context) { ValidateMesh(input, context, null); // Find the skeleton. BoneContent skeleton = MeshHelper.FindSkeleton(input); if (skeleton == null) { throw new InvalidContentException("Input skeleton not found."); } //Bake the transforms so everything is in the same coordinate system FlattenTransforms(input, skeleton); // This is a helper function that wasn't in the SkinnedModelSample, // but was implemented here. Go through and remove meshes that don't have bone weights RemoveInvalidGeometry(input, context); // Read the bind pose and skeleton hierarchy data. IList <BoneContent> bones = MeshHelper.FlattenSkeleton(skeleton); // Collect bone information foreach (BoneContent bone in bones) { bindPose.Add(bone.Transform); inverseBindPose.Add(Matrix.Invert(bone.AbsoluteTransform)); skeletonHierarchy.Add(bones.IndexOf(bone.Parent as BoneContent)); } // We're going to keep a list of all the rows of animation matrices // We'll eventually turn this list into a texture List <Matrix[]> keyFrameMatrices = new List <Matrix[]>(); // Get a list of animation clips, and at the same time, populate our keyFrameMatrices list Dictionary <string, InstancedAnimationClip> animationClips; animationClips = ProcessAnimations(skeleton.Animations, bones, keyFrameMatrices); // Create a content object that will hold the animation texture data PixelBitmapContent <Vector4> animationContent = GetEncodedTexture(keyFrameMatrices, bones.Count); // Create a texture 2D content object, and populate it with our data TextureContent animationTexture = new Texture2DContent(); animationTexture.Faces[0].Add(animationContent); // We're going to create an instance of the original ModelProcessor, // with our minor material modification. The reason we do this is that // the ModelProcessor does the heavy lifting for us of doing some BoneWeight // stuff, and we don't want to reimplement that when ModelProcessor does // such a fine job! ModelProcessor processor = new ModifiedModelProcessor(); // Chain to the base ModelProcessor class so it can convert the model data. ModelContent model = processor.Process(input, context); InstancedSkinningDataContent data = new InstancedSkinningDataContent(animationClips, animationTexture); InstancedSkinnedModelContent instancedModel = new InstancedSkinnedModelContent(model, data); return(instancedModel); }
public override LevelIndexContent Process(Project input, ContentProcessorContext context) { if (!Directory.Exists("build")) { Directory.CreateDirectory("build"); } string asset = context.OutputFilename.Remove(context.OutputFilename.LastIndexOf('.')).Substring(context.OutputDirectory.Length); // Stage and bulid Object Pools Dictionary <ObjectPool, string> objectPoolAssetIndex = new Dictionary <ObjectPool, string>(); int id = 0; foreach (ObjectPool pool in input.ObjectPoolManager.Pools) { string asset_reg = asset + "_objectpool_" + id; string build_reg = "build\\" + asset_reg + ".tlo"; Project poolContent = new Project(); poolContent.ObjectPoolManager.Pools.Add(pool); using (FileStream fs = File.Create(build_reg)) { XmlWriter writer = XmlTextWriter.Create(fs); //XmlSerializer ser = new XmlSerializer(typeof(ProjectXmlProxy)); //ser.Serialize(writer, Project.ToXmlProxy(poolContent)); XmlSerializer ser = new XmlSerializer(typeof(LibraryX)); ser.Serialize(writer, Library.ToXProxy(poolContent.DefaultLibrary)); writer.Close(); } OpaqueDataDictionary data = new OpaqueDataDictionary(); data.Add("ProjectKey", asset); data.Add("ObjectPoolKey", pool.Name); data.Add("ObjectPoolId", id); context.BuildAsset <ObjectRegistryContent, ObjectRegistryContent>( new ExternalReference <ObjectRegistryContent>(build_reg), "TloProcessor", data, "TloImporter", asset_reg); objectPoolAssetIndex[pool] = asset_reg; id++; } // Stage and build Tile Pools Dictionary <TilePool, string> tilesetAssetIndex = new Dictionary <TilePool, string>(); id = 0; foreach (TilePool pool in input.TilePoolManager.Pools) { string asset_reg = asset + "_tileset_map_" + id; string build_reg = "build\\" + asset_reg + ".tlr"; using (FileStream fs = File.Create(build_reg)) { XmlWriter writer = XmlTextWriter.Create(fs); //input.WriteXmlTilesets(writer); XmlSerializer ser = new XmlSerializer(typeof(LibraryX.TilePoolX)); ser.Serialize(writer, TilePool.ToXProxy(pool)); writer.Close(); } OpaqueDataDictionary data = new OpaqueDataDictionary(); data.Add("ProjectKey", asset); data.Add("TilesetKey", pool.Name); data.Add("TilesetId", id); context.BuildAsset <TileRegistryContent, TileRegistryContent>( new ExternalReference <TileRegistryContent>(build_reg), "TlrProcessor", data, "TlrImporter", asset_reg); tilesetAssetIndex[pool] = asset_reg; id++; } // Stage and build levels LevelIndexContent content = new LevelIndexContent(); id = 0; foreach (Level level in input.Levels) { string asset_level = asset + "_level_" + id; string build_level = "build\\" + asset_level + ".tlv"; List <TilePool> pools = TilePoolsByLevel(level); List <string> poolAssets = new List <string>(); foreach (TilePool pool in pools) { poolAssets.Add(tilesetAssetIndex[pool]); } List <ObjectPool> objectPools = ObjectPoolsByLevel(level); List <string> objectPoolAssets = new List <string>(); foreach (ObjectPool pool in objectPools) { objectPoolAssets.Add(objectPoolAssetIndex[pool]); } using (FileStream fs = File.Create(build_level)) { XmlWriter writer = XmlTextWriter.Create(fs); XmlSerializer ser = new XmlSerializer(typeof(LevelX)); ser.Serialize(writer, Level.ToXProxy(level)); //input.WriteXml(writer); writer.Close(); } OpaqueDataDictionary data = new OpaqueDataDictionary(); data.Add("ProjectKey", asset); data.Add("LevelKey", level.Name); data.Add("TilesetAssets", string.Join(";", poolAssets)); data.Add("ObjectPoolAssets", string.Join(";", objectPoolAssets)); context.BuildAsset <LevelContent, LevelContent>( new ExternalReference <LevelContent>(build_level), "TlvProcessor", data, "TlvImporter", asset_level); LevelIndexEntry entry = new LevelIndexEntry(id, level.Name); entry.Asset = asset_level; foreach (Property prop in level.PropertyManager.CustomProperties) { entry.Properties.Add(prop); } content.Levels.Add(entry); id++; } return(content); }
/// <summary> /// Converts an array of sprite filenames into a sprite sheet object. /// </summary> public override BackgroundContent Process(BackgroundXMLData aInput, ContentProcessorContext aContext) { BackgroundContent output = new BackgroundContent(); FreeImageAPI.FIBITMAP b = FreeImageAPI.FreeImage.LoadEx(aInput.file); FreeImageAPI.BITMAPINFO info = FreeImageAPI.FreeImage.GetInfoEx(b); //Resize to power of two int newWidth = RoundUpPowerOf2(info.bmiHeader.biWidth); int newHeight = RoundUpPowerOf2(info.bmiHeader.biHeight); b = FreeImageAPI.FreeImage.Rescale(b, newWidth, newHeight, FreeImageAPI.FREE_IMAGE_FILTER.FILTER_BICUBIC); b = FreeImageAPI.FreeImage.RotateClassic(b, 180); FreeImageAPI.FreeImage.FlipHorizontal(b); int nbW = 1, nbH = 1; if (newWidth > TILE_SIZE) { nbW = newWidth / TILE_SIZE; } if (newHeight > TILE_SIZE) { nbH = newHeight / TILE_SIZE; } output.Width = newWidth; output.Height = newHeight; BackgroundTileContent[][] tilesContent = new BackgroundTileContent[nbW][]; for (int i = 0; i < nbW; i++) { tilesContent[i] = new BackgroundTileContent[nbH]; for (int j = 0; j < nbH; j++) { BackgroundTileContent tileContent = new BackgroundTileContent(); tileContent.X = i * 512; tileContent.Y = j * 512; tileContent.Tex = new Texture2DContent(); BitmapContent tile = new PixelBitmapContent <Color>(Math.Min(TILE_SIZE, newWidth), Math.Min(TILE_SIZE, newHeight)); Rectangle source = new Rectangle(i * TILE_SIZE, j * TILE_SIZE, Math.Min(TILE_SIZE, newWidth), Math.Min(TILE_SIZE, newHeight)); Rectangle dest = new Rectangle(0, 0, Math.Min(TILE_SIZE, newWidth), Math.Min(TILE_SIZE, newHeight)); for (int k = 0; k < Math.Min(512, newWidth); k++) { for (int l = 0; l < Math.Min(512, newHeight); l++) { FreeImageAPI.RGBQUAD rgb; FreeImageAPI.FreeImage.GetPixelColor(b, (uint)(tileContent.X + k), (uint)(tileContent.Y + l), out rgb); ((PixelBitmapContent <Color>)tile).SetPixel(k, l, new Color(rgb.rgbRed, rgb.rgbGreen, rgb.rgbBlue)); } } tileContent.Tex.Mipmaps.Add(tile); tilesContent[i][j] = tileContent; } } output.TilesContent = tilesContent; output.Speed = aInput.speed; return(output); }
public override BodyContainer Process(List <RawBodyTemplate> input, ContentProcessorContext context) { if (ScaleFactor < 1) { throw new Exception("Pixel to meter ratio must be greater than zero."); } if (BezierIterations < 1) { throw new Exception("Cubic bézier iterations must be greater than zero."); } Matrix matScale = Matrix.CreateScale(_scaleFactor, _scaleFactor, 1f); SVGPathParser parser = new SVGPathParser(_bezierIterations); BodyContainer bodies = new BodyContainer(); foreach (RawBodyTemplate rawBody in input) { if (rawBody.Name == "importer_default_path_container") { continue; } BodyTemplate currentBody = new BodyTemplate(); currentBody.Mass = rawBody.Mass; currentBody.BodyType = rawBody.BodyType; foreach (RawFixtureTemplate rawFixture in rawBody.Fixtures) { List <Polygon> paths = parser.ParseSVGPath(rawFixture.Path, rawFixture.Transformation * matScale); for (int i = 0; i < paths.Count; i++) { if (paths[i].Closed) { List <Vertices> partition = Triangulate.ConvexPartition(paths[i].Vertices, TriangulationAlgorithm.Bayazit); foreach (Vertices v in partition) { currentBody.Fixtures.Add(new FixtureTemplate() { Shape = new PolygonShape(v, rawFixture.Density), Restitution = rawFixture.Restitution, Friction = rawFixture.Friction, Name = rawFixture.Name }); } } else { Shape shape; if (paths[i].Vertices.Count > 2) { shape = new ChainShape(paths[i].Vertices); } else { shape = new EdgeShape(paths[i].Vertices[0], paths[i].Vertices[1]); } currentBody.Fixtures.Add(new FixtureTemplate() { Shape = shape, Restitution = rawFixture.Restitution, Friction = rawFixture.Friction, Name = rawFixture.Name }); } } } if (currentBody.Fixtures.Count > 0) { bodies[rawBody.Name] = currentBody; currentBody = null; } } return(bodies); }
public override OgmoMap Process(TInput input, ContentProcessorContext context) { return(Newtonsoft.Json.JsonConvert.DeserializeObject <OgmoMap>(input)); }
public override SongContent Process(AudioContent input, ContentProcessorContext context) { // Fallback if we aren't buiding for iOS. var platform = ContentHelper.GetMonoGamePlatform(); if (platform != MonoGamePlatform.iOS) { return(base.Process(input, context)); } //TODO: If quality isn't best and it's a .wma, don't compress to MP3. Leave it as a .wav instead string outputFilename = Path.ChangeExtension(context.OutputFilename, "mp3"); string directoryName = Path.GetDirectoryName(outputFilename); if (!Directory.Exists(directoryName)) { Directory.CreateDirectory(directoryName); } var inputFilename = Path.GetFullPath(input.FileName); // XNA's songprocessor converts the bitrate on the input file based // on it's conversion quality. //http://blogs.msdn.com/b/etayrien/archive/2008/09/22/audio-input-and-output-formats.aspx int desiredOutputBitRate = 0; switch (this.Quality) { case ConversionQuality.Low: desiredOutputBitRate = 96000; break; case ConversionQuality.Medium: desiredOutputBitRate = 128000; break; case ConversionQuality.Best: desiredOutputBitRate = 192000; break; } // Create a new file if we need to. FileStream outputStream = input.FileType != AudioFileType.Mp3 ? new FileStream(outputFilename, FileMode.Create) : null; // If the file's not already an mp3, encode it to .wav switch (input.FileType) { // File was already an .mp3. Don't do lossy compression twice. case AudioFileType.Mp3: File.Copy(inputFilename, outputFilename, true); break; case AudioFileType.Wav: case AudioFileType.Wma: AudioConverter.ConvertFile(inputFilename, outputStream, AudioFileType.Mp3, desiredOutputBitRate, input.Format.BitsPerSample, input.Format.ChannelCount); break; } outputStream.Close(); context.AddOutputFile(outputFilename); // SoundEffectContent is a sealed class, construct it using reflection var type = typeof(SongContent); ConstructorInfo c = type.GetConstructor(BindingFlags.NonPublic | BindingFlags.Instance, null, new Type[] { typeof(string), typeof(int) }, null); var outputSongContent = (SongContent)c.Invoke(new Object[] { Path.GetFileName(outputFilename), (int)input.Duration.TotalMilliseconds }); return(outputSongContent); }
public override string[] Process(string[] input, ContentProcessorContext context) { return(input); }
private static void ProcessErrorsAndWarnings(bool buildFailed, string shaderErrorsAndWarnings, EffectContent input, ContentProcessorContext context) { // Split the errors and warnings into individual lines. var errorsAndWarningArray = shaderErrorsAndWarnings.Split(new[] { "\n", "\r", Environment.NewLine }, StringSplitOptions.RemoveEmptyEntries); var errorOrWarning = new Regex(@"(.*)\(([0-9]*(,[0-9]+(-[0-9]+)?)?)\)\s*:\s*(.*)", RegexOptions.Compiled); ContentIdentity identity = null; var allErrorsAndWarnings = string.Empty; // Process all the lines. for (var i = 0; i < errorsAndWarningArray.Length; i++) { var match = errorOrWarning.Match(errorsAndWarningArray[i]); if (!match.Success || match.Groups.Count != 4) { // Just log anything we don't recognize as a warning. if (buildFailed) { allErrorsAndWarnings += errorsAndWarningArray[i] + Environment.NewLine; } else { context.Logger.LogWarning(string.Empty, input.Identity, errorsAndWarningArray[i]); } continue; } var fileName = match.Groups[1].Value; var lineAndColumn = match.Groups[2].Value; var message = match.Groups[3].Value; // Try to ensure a good file name for the error message. if (string.IsNullOrEmpty(fileName)) { fileName = input.Identity.SourceFilename; } else if (!File.Exists(fileName)) { var folder = Path.GetDirectoryName(input.Identity.SourceFilename); fileName = Path.Combine(folder, fileName); } // If we got an exception then we'll be throwing an exception // below, so just gather the lines to throw later. if (buildFailed) { if (identity == null) { identity = new ContentIdentity(fileName, input.Identity.SourceTool, lineAndColumn); allErrorsAndWarnings = errorsAndWarningArray[i] + Environment.NewLine; } else { allErrorsAndWarnings += errorsAndWarningArray[i] + Environment.NewLine; } } else { identity = new ContentIdentity(fileName, input.Identity.SourceTool, lineAndColumn); context.Logger.LogWarning(string.Empty, identity, message, string.Empty); } } if (buildFailed) { throw new InvalidContentException(allErrorsAndWarnings, identity ?? input.Identity); } }
/// <summary> /// Procesar cada vértice del modelo /// </summary> /// <param name="geometry">Información de geometría</param> /// <param name="vertexChannelIndex">Indice del canal</param> /// <param name="context">Contexto</param> protected override void ProcessVertexChannel(GeometryContent geometry, int vertexChannelIndex, ContentProcessorContext context) { // Método base del procesador de modelos base.ProcessVertexChannel(geometry, vertexChannelIndex, context); // Extraer todos los triángulos del modelo List <Triangle> primitives = new List <Triangle>(); for (int i = 0; i < (geometry.Indices.Count - 2); i += 3) { // Ontener los vértices Vector3 vertex1 = geometry.Vertices.Positions[geometry.Indices[i]]; Vector3 vertex2 = geometry.Vertices.Positions[geometry.Indices[i + 1]]; Vector3 vertex3 = geometry.Vertices.Positions[geometry.Indices[i + 2]]; // Crear el triángulo que forman Triangle triangle = new Triangle(vertex1, vertex2, vertex3); // Añadir a la lista de triángulos primitives.Add(triangle); } // Añadir la lista de triángulos a la lista de primitivas this.m_PrimitiveInfo.AddTriangles(geometry.Parent.Name, primitives.ToArray()); }
public override TOutput Process(TInput input, ContentProcessorContext context) { return(default(TOutput)); }
public override ParticleDesignerProcessorResult Process(ParticleDesignerContent input, ContentProcessorContext context) { logger = context.Logger; var result = new ParticleDesignerProcessorResult(); // check for an embedded tiff texture if (input.emitterConfig.texture.data != null) { context.Logger.LogMessage("pex file has an embedded tiff. Extracting now."); using (var memoryStream = new MemoryStream(Convert.FromBase64String(input.emitterConfig.texture.data), writable: false)) { using (var stream = new GZipStream(memoryStream, CompressionMode.Decompress)) { const int size = 4096; byte[] buffer = new byte[size]; using (var memory = new MemoryStream()) { int count = 0; do { count = stream.Read(buffer, 0, size); if (count > 0) { memory.Write(buffer, 0, count); } } while(count > 0); result.textureTiffData = memory.ToArray(); } } } var tempFile = Path.Combine(Path.GetTempPath(), "tempParticleTexture.tif"); File.WriteAllBytes(tempFile, result.textureTiffData); context.Logger.LogMessage("writing tiff to temp file: {0}", tempFile); context.Logger.LogMessage("running TextureImportor on tiff"); var textureImporter = new TextureImporter(); result.texture = textureImporter.Import(tempFile, input.context) as Texture2DContent; result.texture.Name = input.emitterConfig.texture.name; context.Logger.LogMessage("deleting temp file"); File.Delete(tempFile); // process context.Logger.LogMessage("processing TextureContent"); var textureProcessor = new TextureProcessor { GenerateMipmaps = false, TextureFormat = TextureProcessorOutputFormat.Color }; result.texture = (Texture2DContent)textureProcessor.Process(result.texture, context); context.Logger.LogMessage("TextureContent processed"); } result.particleEmitterConfig = input.emitterConfig; return(result); }
public override Texture2DContent Process(Splatter splatter, ContentProcessorContext context) { var input = splatter.Layers.Select(layer => layer != null ? layer.Filename : null).ToArray(); if (input.Length > 4) { context.Logger.LogWarning(null, null, "SplatterTextureProcessor supports at most 4 textures. Additional textures will be discarded"); } int width = 0; int height = 0; Texture2DContent texture = null; PixelBitmapContent <float> bitmapR = null; PixelBitmapContent <float> bitmapG = null; PixelBitmapContent <float> bitmapB = null; PixelBitmapContent <float> bitmapA = null; if (input.Length > 0 && !string.IsNullOrEmpty(input[0])) { texture = context.BuildAndLoadAsset <TextureContent, Texture2DContent>( new ExternalReference <TextureContent>(input[0]), null); texture.ConvertBitmapType(typeof(PixelBitmapContent <float>)); bitmapR = (PixelBitmapContent <float>)texture.Mipmaps[0]; width = bitmapR.Width; height = bitmapR.Height; } if (input.Length > 1 && !string.IsNullOrEmpty(input[1])) { texture = context.BuildAndLoadAsset <TextureContent, Texture2DContent>( new ExternalReference <TextureContent>(input[1]), null); texture.ConvertBitmapType(typeof(PixelBitmapContent <float>)); bitmapG = (PixelBitmapContent <float>)texture.Mipmaps[0]; width = bitmapG.Width; height = bitmapG.Height; } if (input.Length > 2 && !string.IsNullOrEmpty(input[2])) { texture = context.BuildAndLoadAsset <TextureContent, Texture2DContent>( new ExternalReference <TextureContent>(input[2]), null); texture.ConvertBitmapType(typeof(PixelBitmapContent <float>)); bitmapB = (PixelBitmapContent <float>)texture.Mipmaps[0]; width = bitmapB.Width; height = bitmapB.Height; } if (input.Length > 3 && !string.IsNullOrEmpty(input[3])) { texture = context.BuildAndLoadAsset <TextureContent, Texture2DContent>( new ExternalReference <TextureContent>(input[3]), null); texture.ConvertBitmapType(typeof(PixelBitmapContent <float>)); bitmapA = (PixelBitmapContent <float>)texture.Mipmaps[0]; width = bitmapA.Width; height = bitmapA.Height; } // In case no alpha texture is specified. width = Math.Max(1, width); height = Math.Max(1, height); PixelBitmapContent <Vector4> bitmap = new PixelBitmapContent <Vector4>(width, height); for (int y = 0; y < height; ++y) { for (int x = 0; x < width; ++x) { var color = new Vector4( bitmapR != null ? bitmapR.GetPixel(x, y) : GenerateBaseLayer ? 1 : 0, bitmapG != null ? bitmapG.GetPixel(x, y) : 0, bitmapB != null ? bitmapB.GetPixel(x, y) : 0, bitmapA != null ? bitmapA.GetPixel(x, y) : 0); color.Z = Math.Min(color.Z, 1 - color.W); color.Y = Math.Min(color.Y, 1 - color.W - color.Z); color.X = Math.Min(color.X, 1 - color.W - color.Z - color.Y); bitmap.SetPixel(x, y, color); } } Texture2DContent result = new Texture2DContent(); result.Mipmaps = new MipmapChain(bitmap); result.ConvertBitmapType(typeof(PixelBitmapContent <Color>)); return(result); }
static void setTilesetTextureIfNecessary(TmxTileset tileset, ContentProcessorContext context) { if (tileset.image != null) { return; } tileset.isStandardTileset = false; var imagePaths = new List <string>(); foreach (var tile in tileset.tiles) { if (tile.image != null && !imagePaths.Contains(tile.image.source)) { imagePaths.Add(tile.image.source); } } context.Logger.LogMessage("\n\t --- need to pack images: {0}\n", imagePaths.Count); var sourceSprites = new List <BitmapContent>(); // Loop over each input sprite filename foreach (var inputFilename in imagePaths) { // Store the name of this sprite. var spriteName = Path.GetFileName(inputFilename); var absolutePath = PathHelper.getAbsolutePath(inputFilename, tileset.mapFolder); context.Logger.LogMessage("Adding texture: {0}", spriteName); // Load the sprite texture into memory. var textureReference = new ExternalReference <TextureContent>(absolutePath); var texture = context.BuildAndLoadAsset <TextureContent, TextureContent>(textureReference, "TextureProcessor"); sourceSprites.Add(texture.Faces[0][0]); } var spriteRectangles = new List <Rectangle>(); // pack all the sprites into a single large texture. var packedSprites = TextureAtlasPacker.packSprites(sourceSprites, spriteRectangles, false, context); context.Logger.LogMessage("packed: {0}", packedSprites); // save out a PNG with our atlas var bm = new System.Drawing.Bitmap(packedSprites.Width, packedSprites.Height); for (var x = 0; x < packedSprites.Width; x++) { for (var y = 0; y < packedSprites.Height; y++) { var col = packedSprites.GetPixel(x, y); var color = System.Drawing.Color.FromArgb(col.A, col.R, col.G, col.B); bm.SetPixel(x, y, color); } } var atlasFilename = tileset.name + "-atlas.png"; bm.Save(Path.Combine(tileset.mapFolder, atlasFilename), System.Drawing.Imaging.ImageFormat.Png); context.Logger.LogImportantMessage("\n-- generated atlas {0}. Make sure you add it to the Pipeline tool!", atlasFilename); // set the new atlas as our tileset source image tileset.image = new TmxImage(); tileset.image.source = atlasFilename; // last step: set the new atlas info and source rectangle for each tile foreach (var tile in tileset.tiles) { if (tile.image == null) { continue; } tile.sourceRect = spriteRectangles[imagePaths.IndexOf(tile.image.source)]; } }
public override SoundEffectContent Process(AudioContent input, ContentProcessorContext context) { throw new NotImplementedException(); }
protected virtual MaterialContent ConvertMaterial(MaterialContent material, ContentProcessorContext context) { // Do nothing for now return(material); }
public override SpriteFontContent Process(SpriteFontContent input, ContentProcessorContext context) { var font = new Font(input.FontName, input.FontSize); // Make sure that this font is installed on the system. // Creating a font object with a font that's not contained will default to MS Sans Serif: // http://msdn.microsoft.com/en-us/library/zet4c3fa.aspx if (font.FontFamily.Name == "Microsoft Sans Serif" && input.FontName != "Microsoft Sans Serif") { throw new Exception(string.Format("Font {0} is not installed on this computer.", input.FontName)); } var estimatedSurfaceArea = 0; var largestHeight = 0; var widthsAndHeights = new List <Point>(); // Calculate the bounds of each rect var bmp = new Bitmap((int)(font.Size * 1.5), (int)(font.Size * 1.5)); using (var temp = System.Drawing.Graphics.FromImage(bmp)) { // Calculate and save the size of each character foreach (var ch in input.CharacterMap) { var charSize = temp.MeasureString(ch.ToString(), font); var width = (int)charSize.Width; var height = (int)charSize.Height; estimatedSurfaceArea += width; largestHeight = Math.Max(largestHeight, height); widthsAndHeights.Add(new Point(width, height)); } // TODO: Using the largest height will give us some empty space // This can be optimized to pack a smaller texture if necessary estimatedSurfaceArea *= largestHeight; } // calculate the best height and width for our output texture. // TODO: GetMonoGamePlatform() var texBounds = calculateOutputTextureBounds(estimatedSurfaceArea, context.BuildConfiguration.ToUpper().Contains("IOS")); // Create our texture var outputBitmap = new Bitmap(texBounds.X, texBounds.Y); using (var g = System.Drawing.Graphics.FromImage(outputBitmap)) { g.FillRectangle(Brushes.Magenta, new System.Drawing.Rectangle(0, 0, outputBitmap.Width, outputBitmap.Height)); int x = 0; int y = 0; // Draw each glyph into the image. for (int i = 0; i < input.CharacterMap.Count; i++) { input.Glyphs.Add(new Microsoft.Xna.Framework.Rectangle(x, y, input.Glyphs[x].Width, input.Glyphs[x].Height)); g.DrawString(input.CharacterMap[x].ToString(), font, Brushes.White, new PointF(x, y)); x += input.Glyphs[x].Width; if (x >= texBounds.X) { x = 0; y += largestHeight; } } using (var ms = new MemoryStream()) { outputBitmap.Save(ms, System.Drawing.Imaging.ImageFormat.MemoryBmp); var texData = new byte[ms.Length]; ms.Read(texData, 0, (int)ms.Length); var bitmapContent = (BitmapContent)Activator.CreateInstance(typeof(PixelBitmapContent <Microsoft.Xna.Framework.Color>), new object[] { outputBitmap.Width, outputBitmap.Height }); bitmapContent.SetPixelData(texData); input.Texture.Faces[0].Add(bitmapContent); var tp = new TextureProcessor(); tp.Process(input.Texture, context); } } return(input); }
public override GifAnimationContent Process(GifAnimationContent input, ContentProcessorContext context) { return(input); }
public override TextureContent Process(TextureContent input, ContentProcessorContext context) { if (ColorKeyEnabled) { var replaceColor = System.Drawing.Color.FromArgb(0); for (var x = 0; x < input._bitmap.Width; x++) { for (var y = 0; y < input._bitmap.Height; y++) { var col = input._bitmap.GetPixel(x, y); if (col.ColorsEqual(ColorKeyColor)) { input._bitmap.SetPixel(x, y, replaceColor); } } } } var face = input.Faces[0][0]; if (ResizeToPowerOfTwo) { if (!GraphicsUtil.IsPowerOfTwo(face.Width) || !GraphicsUtil.IsPowerOfTwo(face.Height)) { input.Resize(GraphicsUtil.GetNextPowerOfTwo(face.Width), GraphicsUtil.GetNextPowerOfTwo(face.Height)); } } if (PremultiplyAlpha) { for (var x = 0; x < input._bitmap.Width; x++) { for (var y = 0; y < input._bitmap.Height; y++) { var oldCol = input._bitmap.GetPixel(x, y); var preMultipliedColor = Color.FromNonPremultiplied(oldCol.R, oldCol.G, oldCol.B, oldCol.A); input._bitmap.SetPixel(x, y, System.Drawing.Color.FromArgb(preMultipliedColor.A, preMultipliedColor.R, preMultipliedColor.G, preMultipliedColor.B)); } } } if (GenerateMipmaps) { throw new NotImplementedException(); } // TODO: Set all mip level data input.Faces[0][0].SetPixelData(input._bitmap.GetData()); if (TextureFormat == TextureProcessorOutputFormat.NoChange) { return(input); } if (TextureFormat != TextureProcessorOutputFormat.Color) { throw new NotImplementedException(); } return(input); }
/// <summary> /// Converts mesh content to a <see cref="Shape"/>. /// </summary> /// <param name="input">The root node content.</param> /// <param name="context">Context for the specified processor.</param> /// <returns>The <see cref="Shape"/>.</returns> public override Shape Process(NodeContent input, ContentProcessorContext context) { // ----- Apply Scale factor. if (Scale != 1f) { // The user has set a scale. Use MeshHelper to apply the scale to the whole model. Matrix transform = Matrix.CreateScale(Scale); MeshHelper.TransformScene(input, transform); } // ----- Convert Mesh to Shapes // The input node is usually a tree of nodes. We need to collect all MeshContent nodes // in the tree. The DigitalRune Helper library provides a TreeHelper that can be used // to traverse trees using LINQ. // The following returns an IEnumerable that contains all nodes of the tree. IEnumerable <NodeContent> nodes = TreeHelper.GetSubtree(input, n => n.Children); // We only need nodes of type MeshContent. IEnumerable <MeshContent> meshes = nodes.OfType <MeshContent>(); // For each MeshContent we extract one shape and its pose (position and orientation). List <Pose> poses = new List <Pose>(); List <Shape> shapes = new List <Shape>(); foreach (var mesh in meshes) { if (mesh.Positions.Count == 0) { continue; } Pose pose = Pose.Identity; Shape shape = null; // The meshes in the imported file must follow a naming convention. The end of the name // of each mesh must be "Box", "Sphere" or "Convex" to tell us which kind of collision // shape we must create. if (mesh.Name.EndsWith("Box")) { LoadBox(mesh, out pose, out shape); } else if (mesh.Name.EndsWith("Sphere")) { LoadSphere(mesh, out pose, out shape); } else if (mesh.Name.EndsWith("Convex")) { LoadConvex(mesh, out pose, out shape); } if (shape != null) { poses.Add(pose); shapes.Add(shape); } } // The CollisionShapeProcessor exports a single shape. Shape collisionShape; if (shapes.Count == 0) { // We did not find any collision shapes. --> Return a dummy shape. collisionShape = Shape.Empty; } else if (shapes.Count == 1) { // We have found 1 shape. if (poses[0].HasRotation || poses[0].HasTranslation) { // The shape is not centered in origin of the model space or it is rotated, // therefore we create a TransformedShape that applies the transformation. collisionShape = new TransformedShape(new GeometricObject(shapes[0], poses[0])); } else { // Use the shape directly, there is no translation or rotation we have to apply. collisionShape = shapes[0]; } } else { // We have found several collision shapes. --> Combine all shapes into one CompositeShape. CompositeShape compositeShape = new CompositeShape(); for (int i = 0; i < shapes.Count; i++) { compositeShape.Children.Add(new GeometricObject(shapes[i], poses[i])); } // If the composite shape has many children, the performance is improved if the composite // shape uses a spatial partition. //compositeShape.Partition = new CompressedAabbTree(); collisionShape = compositeShape; } return(collisionShape); }
public static StitchedEffectSymbol BuildStitchedEffect( StitchedEffectContent stitchedEffectContent, ContentProcessorContext context) { if (stitchedEffectContent.StitchedEffectNode.Fragments == null) { stitchedEffectContent.StitchedEffectNode.Fragments = new FragmentBlockNode { FragmentDeclarations = new Dictionary <string, FragmentSource>() } } ; // If fragments inside technique passes were declared with literal strings, replace them // with identifiers, so that the rest of the code can treat them all as identifiers. // This should really be done as a separate pass. int autoIndex = 0; foreach (TechniqueNode techniqueNode in stitchedEffectContent.StitchedEffectNode.Techniques.Techniques) { foreach (TechniquePassNode passNode in techniqueNode.Passes) { for (int i = 0; i < passNode.Fragments.Count; ++i) { if (passNode.Fragments[i].Type == TokenType.Literal) { string autoName = "_auto_" + autoIndex++; stitchedEffectContent.StitchedEffectNode.Fragments.FragmentDeclarations.Add( autoName, StitchedEffectParser.GetFragmentSource(((StringToken)passNode.Fragments[i]).Value, stitchedEffectContent.Identity)); passNode.Fragments[i] = new IdentifierToken(autoName, passNode.Fragments[i].SourcePath, passNode.Fragments[i].Position); } } } } //System.Diagnostics.Debugger.Launch(); // Load fragments. Dictionary <string, FragmentContent> fragmentDictionary = stitchedEffectContent.StitchedEffectNode.Fragments.FragmentDeclarations.ToDictionary(fd => fd.Key, fd => fd.Value.LoadFragmentContent(context)); // Load into intermediate objects which keep track of each fragment's unique name. Dictionary <string, StitchedFragmentSymbol> stitchedFragmentDictionary = fragmentDictionary .Select((f, i) => new KeyValuePair <string, StitchedFragmentSymbol>(f.Key, new StitchedFragmentSymbol(f.Value.FragmentNode.Name + i, f.Value.FragmentNode))) .ToDictionary(kvp => kvp.Key, kvp => kvp.Value); // Load into techniques. List <TechniqueSymbol> techniques = stitchedEffectContent.StitchedEffectNode.Techniques.Techniques .Select(tn => new TechniqueSymbol { Name = tn.Name, Passes = tn.Passes.Select(tpn => new TechniquePassSymbol { Name = tpn.Name, Fragments = tpn.Fragments .Select(t => stitchedFragmentDictionary[((IdentifierToken)t).Identifier]) .ToList() }).ToList() }).ToList(); return(new StitchedEffectSymbol { StitchedFragments = stitchedFragmentDictionary.Values.ToList(), Techniques = techniques }); } }
protected override void ProcessVertexChannel(GeometryContent geometry, int vertexChannelIndex, ContentProcessorContext context) { String vertexChannelName = geometry.Vertices.Channels[vertexChannelIndex].Name; //If this channel has an acceptable names, process it as normal. if (AcceptableVertexChannelNames.Contains(vertexChannelName)) { base.ProcessVertexChannel(geometry, vertexChannelIndex, context); } else { geometry.Vertices.Channels.Remove(vertexChannelName); } }
/// <summary> /// Generates a terrain mesh from an input heightfield texture. /// </summary> public override ModelContent Process(Texture2DContent input, ContentProcessorContext context) { MeshBuilder builder = MeshBuilder.StartMesh("terrain"); // Convert the input texture to float format, for ease of processing. input.ConvertBitmapType(typeof(PixelBitmapContent <float>)); PixelBitmapContent <float> heightfield; heightfield = (PixelBitmapContent <float>)input.Mipmaps[0]; // Create the terrain vertices. for (int y = 0; y < heightfield.Height; y++) { for (int x = 0; x < heightfield.Width; x++) { Vector3 position; position.X = (x - heightfield.Width / 2) * terrainScale; position.Z = (y - heightfield.Height / 2) * terrainScale; position.Y = (heightfield.GetPixel(x, y) - 1) * terrainBumpiness; builder.CreatePosition(position); } } // Create a material, and point it at our terrain texture. BasicMaterialContent material = new BasicMaterialContent(); string directory = Path.GetDirectoryName(input.Identity.SourceFilename); string texture = Path.Combine(directory, terrainTexture); material.Texture = new ExternalReference <TextureContent>(texture); builder.SetMaterial(material); // Create a vertex channel for holding texture coordinates. int texCoordId = builder.CreateVertexChannel <Vector2>( VertexChannelNames.TextureCoordinate(0)); // Create the individual triangles that make up our terrain. for (int y = 0; y < heightfield.Height - 1; y++) { for (int x = 0; x < heightfield.Width - 1; x++) { AddVertex(builder, texCoordId, heightfield.Width, x, y); AddVertex(builder, texCoordId, heightfield.Width, x + 1, y); AddVertex(builder, texCoordId, heightfield.Width, x + 1, y + 1); AddVertex(builder, texCoordId, heightfield.Width, x, y); AddVertex(builder, texCoordId, heightfield.Width, x + 1, y + 1); AddVertex(builder, texCoordId, heightfield.Width, x, y + 1); } } // Chain to the ModelProcessor so it can convert the mesh we just generated. MeshContent terrainMesh = builder.FinishMesh(); return(context.Convert <MeshContent, ModelContent>(terrainMesh, "ModelProcessor")); }
public override TiledTileSetProcessorResult Process(TiledTileSetContent input, ContentProcessorContext context) { foreach (var item in input.Image.Source) { input.Image.Source = Path.GetFileNameWithoutExtension(input.Image.Source); } return(new TiledTileSetProcessorResult(input, context.Logger)); }
private void CreateExEnOutput(NuclexSpriteFontContent spriteFontContent, FontDescription input, ContentProcessorContext context) { ExEnFontWriter.CreateOutputDirectory(input.Identity); ExEnFontWriter.WriteTexture(spriteFontContent, true, context, ExEnFontWriter.AssetOutputFilename(input.Identity, context, "-exenfont.png")); ExEnFontWriter.WriteMetrics(spriteFontContent, context, ExEnFontWriter.AssetOutputFilename(input.Identity, context, "-exenfont.exenfont")); // Retina Display input.BecomeAt2x(); NuclexSpriteFontContent spriteFontContentAt2x = base.Process(input, context); ExEnFontWriter.WriteTexture(spriteFontContentAt2x, true, context, ExEnFontWriter.AssetOutputFilename(input.Identity, context, "*****@*****.**")); ExEnFontWriter.WriteMetrics(spriteFontContentAt2x, context, ExEnFontWriter.AssetOutputFilename(input.Identity, context, "*****@*****.**")); }
public override String Process(String input, ContentProcessorContext context) { return(input); }
public override TOutput Process(TInput input, ContentProcessorContext context) { // TODO: process the input object, and return the modified data. throw new NotImplementedException(); }