public static void unpack_aaf(JAIInitSection[] data, BeBinaryReader br, string projDir, string initName) { var sectList = new List <JAIInitSection>(data); try { Directory.CreateDirectory(projDir); Directory.CreateDirectory(projDir + "/include"); } catch (Exception e) { cmdarg.assert("Could not create project directory: {0}", e.Message); } var pFile = new jAAFProjectFile(); pFile.originalFile = initName; pFile.projectName = projDir; var banks = sectList.FindAll(p => p.type == JAIInitSectionType.IBNK); var waves = sectList.FindAll(p => p.type == JAIInitSectionType.WSYS); pFile.banks = new jAAFIncludeRecord[banks.Count]; pFile.waves = new jAAFIncludeRecord[waves.Count]; int bankIdx = 0; int waveIdx = 0; Console.WriteLine("Unpack AAF"); Console.WriteLine("Unpacking clusters first..."); for (int i = 0; i < banks.Count; i++) { var cSect = banks[i]; var size = aaf_GetSectionSize(cSect, br); br.BaseStream.Position = cSect.start; var sectionData = br.ReadBytes(size); var extension = util.GetFileExtension(cSect); File.WriteAllBytes($"{projDir}/include/{bankIdx}{extension}", sectionData); Console.WriteLine($"->\tWrote {bankIdx}{extension}"); pFile.banks[i] = new jAAFIncludeRecord() { hash = cSect.raw_header, path = $"include/{bankIdx}{extension}", type = cSect.type.ToString(), uid = cSect.number, flags = cSect.flags, }; bankIdx++; } for (int i = 0; i < waves.Count; i++) { var cSect = waves[i]; var size = aaf_GetSectionSize(cSect, br); br.BaseStream.Position = cSect.start; var sectionData = br.ReadBytes(size); var extension = util.GetFileExtension(cSect); File.WriteAllBytes($"{projDir}/include/{waveIdx}{extension}", sectionData); Console.WriteLine($"->\tWrote {waveIdx}{extension}"); pFile.waves[i] = new jAAFIncludeRecord() { hash = cSect.raw_header, path = $"include/{waveIdx}{extension}", type = cSect.type.ToString(), uid = cSect.number, flags = cSect.flags, }; waveIdx++; } Console.WriteLine("Unpacking sections...."); var wavCluster = false; var instCluster = false; pFile.includes = new jAAFIncludeRecord[data.Length - (waves.Count + banks.Count) + 2]; var fileIndex = 0; var sectCounter = 0; for (int i = 0; i < data.Length; i++) { var cSect = data[i]; if (cSect.type == JAIInitSectionType.IBNK || cSect.type == JAIInitSectionType.WSYS) { switch (cSect.type) { case JAIInitSectionType.IBNK: if (instCluster == true) { continue; } pFile.includes[sectCounter] = new jAAFIncludeRecord() { hash = cSect.number, path = $"This is a marker for the 'banks' cluster.", type = "INSTRUMENT_CLUSTER", uid = fileIndex, flags = 0xFFFFFFF, }; sectCounter++; instCluster = true; continue; case JAIInitSectionType.WSYS: if (wavCluster == true) { continue; } pFile.includes[sectCounter] = new jAAFIncludeRecord() { hash = cSect.number, path = $"This is a marker for the 'waves' cluster.", type = "WAVE_CLUSTER", uid = fileIndex, flags = 0x7777777, }; sectCounter++; wavCluster = true; continue; } } var size = aaf_GetSectionSize(cSect, br); // load section size from function (because BAA omits data for a lot of this :) ) br.BaseStream.Position = cSect.start; var sectionData = br.ReadBytes(size); var extension = util.GetFileExtension(cSect); File.WriteAllBytes($"{projDir}/include/{fileIndex}{extension}", sectionData); Console.WriteLine($"->\tWrote {fileIndex}{extension}"); pFile.includes[sectCounter] = new jAAFIncludeRecord() { hash = cSect.raw_header, path = $"include/{fileIndex}{extension}", type = cSect.type.ToString(), uid = cSect.number, flags = cSect.flags, }; sectCounter++; fileIndex++; } var convertedData = JsonConvert.SerializeObject(pFile, Formatting.Indented); File.WriteAllText($"{projDir}/project.json", convertedData); Console.WriteLine("Done."); }
public static void pack_aaf(string projectDir, jAAFProjectFile project, string fileName) { if (fileName == null) { fileName = project.originalFile; } var blockStrm = File.OpenWrite(fileName); var blockWrite = new BeBinaryWriter(blockStrm); Console.WriteLine("Prewriting AAF header data"); for (int i = 0; i < project.includes.Length; i++) { var w = project.includes[i]; var sz = aaf_GetSectionHeaderInfo(w); switch (w.type) { case "INSTRUMENT_CLUSTER": Console.WriteLine("INST CLUSTER."); blockWrite.Write(2); for (int k = 0; k < project.banks.Length; k++) { blockWrite.Write((int)0x01); // Offset blockWrite.Write((int)0x02); // Size blockWrite.Write((int)0x03); // Flags } blockWrite.Write((int)0x00); // Cluster end indicator. break; case "WAVE_CLUSTER": blockWrite.Write(3); for (int k = 0; k < project.waves.Length; k++) { blockWrite.Write((int)0x04); // Offset blockWrite.Write((int)0x05); // Size blockWrite.Write((int)0x06); // Flags } blockWrite.Write((int)0x00); // Cluster end indicator. break; default: blockWrite.Write(w.hash); for (int k = 0; k < sz.size; k++) { blockWrite.Write((int)0x00); } break; } } while ((blockWrite.BaseStream.Position % 32) != 0) { blockWrite.Write((byte)0x00); // oh god im sorry blockWrite.Flush(); } //blockWrite.Flush(); //Environment.Exit(0); var head_anchor = 0L; var tail_anchor = blockWrite.BaseStream.Position; Console.WriteLine($"Header ends at 0x{tail_anchor:X}"); Console.WriteLine($"-> Project has {project.includes.Length} includes."); Console.WriteLine("-> Building project..."); for (int i = 0; i < project.includes.Length; i++) { var dep = project.includes[i]; // load include switch (dep.type) { case "INSTRUMENT_CLUSTER": blockWrite.BaseStream.Position = head_anchor; blockWrite.Write((int)2); // Cluster end indicator. head_anchor = blockWrite.BaseStream.Position; blockWrite.BaseStream.Position = tail_anchor; for (int k = 0; k < project.banks.Length; k++) { var waveDep = project.banks[k]; var waveData = File.ReadAllBytes($"{projectDir}/{waveDep.path}"); // read include data Console.WriteLine($"->\t{projectDir}/{waveDep.path}\t(bnk)L:0x{waveData.Length:X} added."); var startPos = tail_anchor; // set start pos to tail anchor blockWrite.Write(waveData); // sprawl data into file while ((blockWrite.BaseStream.Position % 32) != 0) { blockWrite.Write((byte)0x00); // oh god im sorry blockWrite.Flush(); } var endPos = waveData.Length; // store end position tail_anchor = blockWrite.BaseStream.Position; // set tail anchor to end pos blockWrite.BaseStream.Position = head_anchor; // jump to head anchor blockWrite.Write((int)startPos); blockWrite.Write((int)endPos); blockWrite.Write((int)waveDep.flags); head_anchor = blockWrite.BaseStream.Position; // update head anchor. blockWrite.BaseStream.Position = tail_anchor; // reseek to tail anchor. } blockWrite.BaseStream.Position = head_anchor; blockWrite.Write((int)0x00); // Cluster end indicator. head_anchor = blockWrite.BaseStream.Position; blockWrite.BaseStream.Position = tail_anchor; continue; case "WAVE_CLUSTER": { blockWrite.BaseStream.Position = head_anchor; blockWrite.Write((int)3); // Cluster end indicator. head_anchor = blockWrite.BaseStream.Position; blockWrite.BaseStream.Position = tail_anchor; for (int k = 0; k < project.waves.Length; k++) { var waveDep = project.waves[k]; var waveData = File.ReadAllBytes($"{projectDir}/{waveDep.path}"); // read include data Console.WriteLine($"->\t{projectDir}/{waveDep.path}\t(wsy)L:0x{waveData.Length:X} added."); var startPos = tail_anchor; // set start pos to tail anchor blockWrite.Write(waveData); // sprawl data into file while ((blockWrite.BaseStream.Position % 32) != 0) { blockWrite.Write((byte)0x00); // oh god im sorry blockWrite.Flush(); } var endPos = waveData.Length; // store end position tail_anchor = blockWrite.BaseStream.Position; // set tail anchor to end pos blockWrite.BaseStream.Position = head_anchor; // jump to head anchor blockWrite.Write((int)startPos); blockWrite.Write((int)endPos); blockWrite.Write((int)waveDep.flags); head_anchor = blockWrite.BaseStream.Position; // update head anchor. blockWrite.BaseStream.Position = tail_anchor; // reseek to tail anchor. } blockWrite.BaseStream.Position = head_anchor; blockWrite.Write((int)0x00); // Cluster end indicator. head_anchor = blockWrite.BaseStream.Position; blockWrite.BaseStream.Position = tail_anchor; continue; } } var data = File.ReadAllBytes($"{projectDir}/{dep.path}"); // read include data Console.WriteLine($"->\t{projectDir}/{dep.path}\tL:0x{data.Length:X} added."); var sPos = tail_anchor; // set start pos to tail anchor blockWrite.Write(data); // sprawl data into file while ((blockWrite.BaseStream.Position & 0xF) != 0) { blockWrite.Write((byte)0x00); // oh god im sorry blockWrite.Flush(); } var ePos = blockWrite.BaseStream.Position; // store end position tail_anchor = ePos; // set tail anchor to end pos blockWrite.BaseStream.Position = head_anchor; // jump to head anchor aaf_PackSection((int)sPos, (int)data.Length, dep, blockWrite); // write section header head_anchor = blockWrite.BaseStream.Position; // update head anchor. blockWrite.BaseStream.Position = tail_anchor; // reseek to tail anchor. // repeat :) } Console.WriteLine($"-> Flushing into {fileName}"); blockWrite.Flush(); blockStrm.Flush(); blockWrite.Close(); blockStrm.Close(); Console.WriteLine("Done."); }