// This can write to the same data, because it always gets the information it needs before back-writing public unsafe static RLADAudio Encode(RawAudio raw, bool stats, PipelineLogger logger) { // Get the pointers and the chunk sizes short *srcPtr = (short *)raw.Data.ToPointer(); byte * dstPtr = (byte *)raw.Data.ToPointer(); uint realSize = raw.FrameCount - (raw.FrameCount % 8); uint chunkCount = realSize / 8; // The chunks Chunk[] chunks = new Chunk[chunkCount]; // Used to track the final data length byte *dstStart = dstPtr; // Generate all of the initial chunks Stopwatch timer = Stopwatch.StartNew(); if (raw.Stereo) { // Tracks the running full sample value short c1f = srcPtr[14], c2f = srcPtr[15]; // The first chunk is always full 16-bit samples chunks[0].Type = FULL_TYPE; chunks[0].Extra = 0; srcPtr += 16; dstPtr += 32; // Temp array to hold the differences for each chunk int *diffs = stackalloc int[16]; // Loop over each chunk and assign the chunk values on a first order (ignore runs) uint dstLen = 0; // Assign this before each loop end for (uint ci = 1; ci < chunkCount; ++ci, srcPtr += 16, dstPtr += dstLen) { // Generate the differences, and find the max difference at the same time int maxdiff = 0; for (uint si = 0; si < 16; si += 2) { int d1 = diffs[si] = srcPtr[si] - c1f; int d2 = diffs[si + 1] = srcPtr[si + 1] - c2f; c1f = srcPtr[si]; c2f = srcPtr[si + 1]; int md = Math.Max(Math.Abs(d1), Math.Abs(d2)); if (md > maxdiff) { maxdiff = md; } } // Get the size type int stype = (maxdiff <= SMALL_DIFF) ? SMALL_TYPE : (maxdiff <= MED_DIFF) ? MED_TYPE : FULL_TYPE; // Write the input to the output if (stype == FULL_TYPE) { Buffer.MemoryCopy(srcPtr, dstPtr, 32, 32); dstLen = 32; } else if (stype == SMALL_TYPE) { sbyte *dp2 = (sbyte *)dstPtr; for (uint di = 0; di < 16; ++di) { dp2[di] = (sbyte)diffs[di]; } dstLen = 16; } else { for (uint di = 0, dsti = 0; di < 16; di += 4, dsti += 6) { // Create the 12-bit differences int c1d1 = (diffs[di] & 0x7FF) | ((diffs[di] < 0) ? 0x800 : 0); int c2d1 = (diffs[di + 1] & 0x7FF) | ((diffs[di + 1] < 0) ? 0x800 : 0); int c1d2 = (diffs[di + 2] & 0x7FF) | ((diffs[di + 2] < 0) ? 0x800 : 0); int c2d2 = (diffs[di + 3] & 0x7FF) | ((diffs[di + 3] < 0) ? 0x800 : 0); // Write the packed values *((int *)(dstPtr + dsti)) = c1d1 | (c2d1 << 12); *((int *)(dstPtr + dsti + 3)) = c1d2 | (c2d2 << 12); } dstLen = 24; } // Save the chunk info chunks[ci].Type = (ushort)stype; chunks[ci].Extra = 0; } } else // Mono { // Tracks the running full sample value short c1f = srcPtr[7]; // The first chunk is always full 16-bit samples chunks[0].Type = FULL_TYPE; chunks[0].Extra = 0; srcPtr += 8; dstPtr += 16; // Temp array to hold the differences for each chunk int *diffs = stackalloc int[8]; // Loop over each chunk and assign the chunk values on a first order (ignore runs) uint dstLen = 0; // Assign this before each loop end for (uint ci = 1; ci < chunkCount; ++ci, srcPtr += 8, dstPtr += dstLen) { // Generate the differences, and find the max difference at the same time int maxdiff = 0; for (uint si = 0; si < 8; si += 2) { int d1 = diffs[si] = srcPtr[si] - c1f; int d2 = diffs[si + 1] = srcPtr[si + 1] - srcPtr[si]; c1f = srcPtr[si + 1]; int md = Math.Max(Math.Abs(d1), Math.Abs(d2)); if (md > maxdiff) { maxdiff = md; } } // Get the size type int stype = (maxdiff <= SMALL_DIFF) ? SMALL_TYPE : (maxdiff <= MED_DIFF) ? MED_TYPE : FULL_TYPE; // Write the input to the output if (stype == FULL_TYPE) { Buffer.MemoryCopy(srcPtr, dstPtr, 16, 16); dstLen = 16; } else if (stype == SMALL_TYPE) { sbyte *dp2 = (sbyte *)dstPtr; for (uint di = 0; di < 8; ++di) { dp2[di] = (sbyte)diffs[di]; } dstLen = 8; } else { for (uint di = 0, dsti = 0; di < 8; di += 4, dsti += 6) { // Create the 12-bit differences int d1 = (diffs[di] & 0x7FF) | ((diffs[di] < 0) ? 0x800 : 0); int d2 = (diffs[di + 1] & 0x7FF) | ((diffs[di + 1] < 0) ? 0x800 : 0); int d3 = (diffs[di + 2] & 0x7FF) | ((diffs[di + 2] < 0) ? 0x800 : 0); int d4 = (diffs[di + 3] & 0x7FF) | ((diffs[di + 3] < 0) ? 0x800 : 0); // Write the packed values *((int *)(dstPtr + dsti)) = d1 | (d2 << 12); *((int *)(dstPtr + dsti + 3)) = d3 | (d4 << 12); } dstLen = 12; } // Save the chunk info chunks[ci].Type = (ushort)stype; chunks[ci].Extra = 0; } } // Stats values uint *stc = stackalloc uint[3] { 0, 0, 0 }; uint *stl = stackalloc uint[3] { 0, 0, 0 }; uint *stmin = stackalloc uint[3] { 0, 0, 0 }; uint *stmax = stackalloc uint[3] { 0, 0, 0 }; // Compactify the chunks by combining adjacent ones of the same size type uint wi = 0, // The chunk index to write ri = 0; // The chunk index to read while (ri < chunkCount) { ushort stype = chunks[ri].Type; uint rem = Math.Min(chunkCount - ri, 64); uint count = 1; while ((count < rem) && (chunks[ri + count].Type == stype)) { ++count; } chunks[wi].Type = stype; chunks[wi++].Extra = (ushort)(count - 1); ri += count; if (stats) { stc[stype] += 1; stl[stype] += count; if (count == 1) { stmin[stype] += 1; } else if (count == 64) { stmax[stype] += 1; } } } // Report stats uint dataLen = (uint)(dstPtr - dstStart); if (stats) { logger.Stats($"Chunk Size Types: " + $"S={stc[0]} ({stc[0]*100/(float)wi:0.000}%) " + $"M={stc[1]} ({stc[1]*100/(float)wi:0.000}%) " + $"F={stc[2]} ({stc[2]*100/(float)wi:0.000}%)"); logger.Stats($"Average Chunk Run Lengths: " + $"S={stl[0]/(float)stc[0]:0.00} " + $"M={stl[1]/(float)stc[1]:0.00} " + $"F={stl[2]/(float)stc[2]:0.00} " + $"Overall={(stl[0] + stl[1] + stl[2]) / (float)wi:0.00}"); logger.Stats($"Chunk Extrema (min/max): " + $"S={stmin[0]}/{stmax[0]} ({stmin[0]*100/(float)stc[0]:0.00}%/{stmax[0]*100/(float)stc[0]:0.00}%) " + $"M={stmin[1]}/{stmax[1]} ({stmin[1]*100/(float)stc[1]:0.00}%/{stmax[1]*100/(float)stc[1]:0.00}%) " + $"F={stmin[2]}/{stmax[2]} ({stmin[2]*100/(float)stc[2]:0.00}%/{stmax[2]*100/(float)stc[2]:0.00}%)"); float startSize = realSize * 2 * (raw.Stereo ? 2 : 1); logger.Stats($"Compression Stats: Ratio={dataLen/startSize:0.0000} Speed={realSize/timer.Elapsed.TotalSeconds/1024/1024:0.00} MB/s"); } // Return the object return(new RLADAudio(raw, raw.TakeData(), chunkCount * 8, dataLen, chunks, wi)); }
internal RuleSourceBuilder() { _Source = new List <Source>(); _Logger = new PipelineLogger(); }
// The function that runs on the thread private void _thread_func(bool rebuild) { Stopwatch _timer = new Stopwatch(); PipelineLogger _logger = new PipelineLogger(Engine); Results.Reset(); // Create the content stream var cStream = new ContentStream(); // Iterate over the tasks while (!Manager.ShouldStop && Manager.GetTaskItem(out BuildEvent current)) { // Report start Engine.Logger.ItemStart(current); _timer.Restart(); _logger.UseEvent(current); Results.UseItem(current); // Check the source file exists if (current.InputTime == BuildEvent.ERROR_TIME) { Engine.Logger.ItemFailed(current, "Could not find the source file for the item"); continue; } // Check for the requested importer and processor if (!_importers.ContainsKey(current.ImporterName)) { if (Engine.StageCache.Importers.ContainsKey(current.ImporterName)) { _importers.Add(current.ImporterName, new ImporterInstance(Engine.StageCache.Importers[current.ImporterName])); } else { Engine.Logger.ItemFailed(current, "The item requested an importer type that does not exist"); continue; } } if (!_processors.ContainsKey(current.ProcessorName)) { if (Engine.StageCache.Processors.ContainsKey(current.ProcessorName)) { _processors.Add(current.ProcessorName, new ProcessorInstance(Engine.StageCache.Processors[current.ProcessorName])); } else { Engine.Logger.ItemFailed(current, "The item requested an processor type that does not exist"); continue; } } var importer = _importers[current.ImporterName]; var processor = _processors[current.ProcessorName]; // Validate stage compatibility if (!processor.Type.InputType.IsAssignableFrom(importer.Type.OutputType)) { Engine.Logger.ItemFailed(current, "The item specified incompatible stages"); continue; } // Compare the current and cached build events to see if we can skip the build // If we are forcing a rebuild we have to build so we can skip the check bool compress = (processor.Policy == CompressionPolicy.Always) || (processor.Policy == CompressionPolicy.ReleaseOnly && Engine.IsRelease) || (processor.Policy == CompressionPolicy.Default && Engine.Compress) || false; // policy is never, compress = false if (!rebuild) { var cached = BuildEvent.FromCacheFile(Engine, current.Item); if (!current.NeedsBuild(cached, processor, compress)) { Engine.Logger.ItemSkipped(current); Results.PassItem(cached.UCSize, true); if (compress) { Results.UpdatePreviousItem(current.RealSize); // Update with the real (compressed) size of the data } continue; } } // Early stop check if (Manager.ShouldStop) { Engine.Logger.ItemFailed(current, "The build process was stopped while the item was being built"); break; } // Run the importer FileStream importStream = null; FileInfo importInfo = null; try { importInfo = new FileInfo(current.Paths.SourcePath); importStream = importInfo.Open(FileMode.Open, FileAccess.Read, FileShare.Read); } catch (Exception e) { Engine.Logger.ItemFailed(current, $"The item source file could not be opened, {e.Message}"); continue; } object importedData = null; try { _logger.UpdateStageName(current.ImporterName); ImporterContext ctx = new ImporterContext(this, _logger, importInfo); importedData = importer.Instance.Import(importStream, ctx); if (importedData == null) { Engine.Logger.ItemFailed(current, "The importer for the item did not produce any data"); continue; } // Save the dependencies to the event if (ctx.Dependencies.Count > 0) { foreach (var ed in ctx.Dependencies) { current.ExternalDependencies.Add((ed, File.GetLastWriteTimeUtc(ed))); } } } catch (Exception e) { int pos = e.StackTrace.IndexOf(" at "); string loc = e.StackTrace.Substring(pos + 4).Split('\n')[0]; Engine.Logger.ItemFailed(current, $"Unhandled exception in importer, {e.Message} ({e.GetType().Name})\n Source: {loc}"); if (e.InnerException != null) { Engine.Logger.ItemFailed(current, $"Inner Exception ({e.InnerException.GetType().Name}): {e.InnerException.Message}"); } continue; } finally { importStream.Dispose(); } // Early stop check if (Manager.ShouldStop) { Engine.Logger.ItemFailed(current, "The build process was stopped while the item was being built"); break; } // Run the processor Engine.Logger.ItemContinue(current, BuildLogger.ContinueStage.Processing); object processedData = null; try { _logger.UpdateStageName(current.ProcessorName); ProcessorContext ctx = new ProcessorContext(this, _logger, importInfo); processor.UpdateFields(Engine, current); processedData = processor.Instance.Process(importedData, ctx); if (processedData == null) { Engine.Logger.ItemFailed(current, "The processor for the item did not produce any data"); continue; } } catch (Exception e) { int pos = e.StackTrace.IndexOf(" at "); string loc = e.StackTrace.Substring(pos + 4).Split('\n')[0]; Engine.Logger.ItemFailed(current, $"Unhandled exception in processor, {e.Message} ({e.GetType().Name})\n Source: {loc}"); if (e.InnerException != null) { Engine.Logger.ItemFailed(current, $"Inner Exception ({e.InnerException.GetType().Name}): {e.InnerException.Message}"); } continue; } // Early stop check if (Manager.ShouldStop) { Engine.Logger.ItemFailed(current, "The build process was stopped while the item was being built"); break; } // Delete the output and cache files try { if (File.Exists(current.Paths.OutputPath)) { File.Delete(current.Paths.OutputPath); } if (File.Exists(current.CachePath)) { File.Delete(current.CachePath); } } catch { Engine.Logger.ItemFailed(current, "Could not delete the output file to rebuild the item"); continue; } // Run the writer Engine.Logger.ItemContinue(current, BuildLogger.ContinueStage.Writing); try { _logger.UpdateStageName(processor.Type.WriterType.Name); uint lastRealSize = cStream.Reset(current.Paths.OutputPath, compress); if (lastRealSize != 0) { Results.UpdatePreviousItem(lastRealSize); } WriterContext ctx = new WriterContext(this, _logger, importInfo); processor.WriterInstance.Write(processedData, cStream, ctx); cStream.Flush(); } catch (Exception e) { int pos = e.StackTrace.IndexOf(" at "); string loc = e.StackTrace.Substring(pos + 4).Split('\n')[0]; Engine.Logger.ItemFailed(current, $"Unhandled exception in writer, {e.Message} ({e.GetType().Name})\n Source: {loc}"); if (e.InnerException != null) { Engine.Logger.ItemFailed(current, $"Inner Exception ({e.InnerException.GetType().Name}): {e.InnerException.Message}"); } continue; } // Save the cache current.SaveCache(Engine, cStream.OutputSize, compress); // Report end Engine.Logger.ItemFinished(current, _timer.Elapsed); Results.PassItem(cStream.OutputSize, false); } // Wait for the final output to be complete uint realsize = cStream.Reset(null, false); if (realsize != 0) { Results.UpdatePreviousItem(realsize); } Results.UseItem(null); // In the case that the last item fails }