public bool Bake(PipelineBakeContext context) { var path = context.Depends["source"]; int voxelCount = 0; var lines = File.ReadAllLines(path); foreach (var line in lines) { if (line.StartsWith("#")) { continue; } voxelCount++; } context.BakedWriter.Write(voxelCount); foreach (var line in lines) { if (line.StartsWith("#")) { continue; } //TODO - make tokenizing framework var parts = line.Split(' '); var x = short.Parse(parts[0]); var y = short.Parse(parts[1]); var z = short.Parse(parts[2]); var ucolor = uint.Parse(parts[3], System.Globalization.NumberStyles.HexNumber); var fcolor = PackUintColor(ucolor); context.BakedWriter.Write(fcolor); context.BakedWriter.Write(x); context.BakedWriter.Write(y); context.BakedWriter.Write(z); context.BakedWriter.Write((short)0); //pad } return(true); }
public bool Load(ContentLoadContext context) { bool loaded = false; //try loading the baked content PipelineLoadBakedContext loadBakedContext = new PipelineLoadBakedContext(); loadBakedContext.ContentPath = name; var fpBaked = Path.Combine(directoryOwner.BakedContentDiskRoot, name); var bakeable = content.Manager.PipelineConnector.GetPipeline(content); var bakedLoader = content as IBakedLoader; //what is this hot garbage?? I would like a special context for preparing which has more limited information bakeContext = new PipelineBakeContext() { PipelineConnector = content.Manager.PipelineConnector, Content = content, RawContentDiskRoot = directoryOwner.RawContentDiskRoot, ContentPath = name, RawContentDiskPath = Path.Combine(directoryOwner.RawContentDiskRoot, name), ForOven = directoryOwner.Manager.DumpBakedContent, Attributes = content.attributes ?? new object[0] }; bakeable.Prepare(bakeContext); //TODO - dont even do this unless we support hot loading or arent bruted or something, I dont know bool resolvedDependencies = bakeContext.ResolveDependencies(name); //analyze whether we need to bake bool satisfied = true; //if the output doesnt exist, we do need to bake var fiTo = new FileInfo(fpBaked); if (!fiTo.Exists) { satisfied = false; } //check timestamps - we may need to bake if deps are out of date if (satisfied) { foreach (var dep in bakeContext.resolvedDependencies.Values) { var fiFrom = new FileInfo(dep); var fiFromTime = fiFrom.LastWriteTimeUtc; if (fiTo.LastWriteTimeUtc < fiFromTime) { satisfied = false; break; } } } //TODO: if we're the oven AND we're doing a clean operation, force to be unsatisfied if (satisfied) { if (directoryOwner.Manager.DumpBakedContent) { //it's already there. nothing to do //i GUESS this is what we can return return(true); } //TODO: engine special functions to open files and report existence without two operations using (var fs = new FileStream(fpBaked, FileMode.Open, FileAccess.Read, FileShare.None)) { loadBakedContext.BakedReader = new BinaryReader(fs); loaded = bakedLoader.LoadBaked(loadBakedContext); } } //if it loaded, we're done if (loaded) { return(true); } //if it didnt load, we need to load it raw //that consists actually of baking it, and if that succeeded, loading that as baked //if we couldnt resolve dependencies, we can't bake if (!resolvedDependencies) { return(false); } var msBake = new MemoryStream(); var bwBake = new BinaryWriter(msBake); bakeContext.BakedWriter = bwBake; bool baked = bakeable.Bake(bakeContext); //if it didnt bake, we definitely can't load it if (!baked) { return(false); } bwBake.Flush(); msBake.Position = 0; //dump newly baked content (depending on configuration) if (content.Manager.DumpBakedContent) { Directory.CreateDirectory(Path.GetDirectoryName(fpBaked)); using (var fs = new FileStream(fpBaked, FileMode.Create, FileAccess.Write, FileShare.None)) fs.Write(msBake.GetBuffer(), 0, (int)msBake.Length); //unclear what default timestamp should be in case there's no inputs //probably need to set it equal to the build time of the executing assembly - pipe that in from the oven? DateTime timestamp = DateTime.MinValue; foreach (var dep in bakeContext.resolvedDependencies.Values) { var fiFrom = new FileInfo(dep); DateTime nextTs = fiFrom.LastWriteTimeUtc; if (nextTs > timestamp) { timestamp = nextTs; } } if (timestamp != DateTime.MinValue) { fiTo.LastWriteTimeUtc = timestamp; } //no actual loading to do in this case return(true); } //now, load the baked content var bakedReader = new BinaryReader(msBake); loadBakedContext.BakedReader = bakedReader; loaded = bakedLoader.LoadBaked(loadBakedContext); //TODO: add sophisticated diagnostics log system and report this as a FAILED load //well, whether or not it worked, that's all we can do return(loaded); }
public void Prepare(PipelineBakeContext context) { context.Depend("source", context.RawContentDiskPath + ".txt"); }
public bool Bake(PipelineBakeContext context) { var configFile = context.Content as ConfigFile; var zoowriter = context.BakedWriter; var path = context.Depends["source"]; var iniConfig = new IniParser.Model.Configuration.IniParserConfiguration() { CaseInsensitive = true }; var iniParser = new IniParser.Parser.IniDataParser(iniConfig); FileIniDataParser fileIniData = new FileIniDataParser(iniParser); IniData parsedData = fileIniData.ReadFile(path); //foreach (var key in parsedData.Global) var ms = new MemoryStream(); var bw = new BinaryWriter(ms); int count = 0; foreach (var mfi in configFile.reflectionData.Values) { string val; bool has = parsedData.TryGetKey(mfi.FieldInfo.Name, out val); if (!has) { continue; } switch (mfi.Type) { case ConfigFile.FieldType.String: DumpValue(bw, mfi); mfi.FieldInfo.SetValue(this, val); count++; break; case ConfigFile.FieldType.Int32: { int temp; if (int.TryParse(val, out temp)) { count++; DumpValue(bw, mfi); bw.Write(temp); } break; } case ConfigFile.FieldType.Float: { float temp; if (float.TryParse(val, out temp)) { count++; DumpValue(bw, mfi); bw.Write(temp); } break; } } } bw.Flush(); zoowriter.Write(count); zoowriter.Write(ms.ToArray()); return(true); }
public unsafe bool Bake(PipelineBakeContext context) { Console.WriteLine("Art.Bake: " + context.ContentPath); float umin = 0; float vmin = 0; float umax = 1; float vmax = 1; var path = context.Depends["source"]; var imageBuffer = ImageLoading.LoadImage(path); //TODO: we can only handle certain input formats here (no compressed formats) //but we can output any format (after we crop it) int width = imageBuffer.Width; int height = imageBuffer.Height; int physwidth = width; int physheight = height; int ox = 0, oy = 0; //NOTE: EVERYTHING BELOW IS EXPERIMENTAL. ITS A TOTAL MESS //TODO - apply art-specific operations (cropping, etc.) //TODO - make controllable //TODO - handle errors var conversionResult = imageBuffer.ConvertToAlphaProcessableFormat(false); bool doTrim = true; if (conversionResult.ConversionResult == ConversionResult.Error_InputFormatHasNoAlpha) { doTrim = false; } if (doTrim) { //accept the converted image imageBuffer = conversionResult.ResultImage; var alphaTrimResult = imageBuffer.AlphaTrim(); imageBuffer = alphaTrimResult.ResultImage; ox = alphaTrimResult.x; oy = alphaTrimResult.y; physwidth = alphaTrimResult.Width; physheight = alphaTrimResult.Height; } bool doPadPow2 = true; if (!imageBuffer.IsAlphaProcessableFormat()) { doPadPow2 = false; } if (doPadPow2) { int widthRound = PipelineMath.TextureUptoPow2(physwidth); int heightRound = PipelineMath.TextureUptoPow2(physheight); if (widthRound != physwidth || heightRound != physheight) { imageBuffer = imageBuffer.ExpandDownRight(widthRound, heightRound); } } var fmtAttribute = context.Attributes.FirstOrDefault(a => a is TextureFormatAttribute); if (fmtAttribute != null) { var toFormat = ((TextureFormatAttribute)fmtAttribute).Format; ImageConversionContext imageContext = new ImageConversionContext(); imageContext.From = imageBuffer; imageContext.NewAlpha = 0xFF; imageContext.ToFormat = toFormat; ImageLoading.Convert(imageContext); imageBuffer = imageContext.Output; } umax = (ox + physwidth) / imageBuffer.Width; vmax = (oy + physheight) / imageBuffer.Height; //the texture goes first... var textureBakingContext = new PipelineConnector_TextureBaking() { Image = imageBuffer, Writer = context.BakedWriter }; context.PipelineConnector.BakeTexture(textureBakingContext); //..then art-specific stuff context.BakedWriter.Write(width); context.BakedWriter.Write(height); context.BakedWriter.Write(ox); context.BakedWriter.Write(oy); context.BakedWriter.Write(umin); context.BakedWriter.Write(vmin); context.BakedWriter.Write(umax); context.BakedWriter.Write(vmax); return(true); }
public void Prepare(PipelineBakeContext context) { }