public unsafe bool Save(Project originalProject, int songId, int loopCount, string ffmpegExecutable, string filename, int resX, int resY, bool halfFrameRate, int channelMask, int audioBitRate, int videoBitRate, int pianoRollZoom) { if (channelMask == 0 || loopCount < 1) { return(false); } Log.LogMessage(LogSeverity.Info, "Detecting FFmpeg..."); if (!DetectFFmpeg(ffmpegExecutable)) { return(false); } videoResX = resX; videoResY = resY; var project = originalProject.DeepClone(); var song = project.GetSong(songId); ExtendSongForLooping(song, loopCount); Log.LogMessage(LogSeverity.Info, "Initializing channels..."); var frameRateNumerator = song.Project.PalMode ? 5000773 : 6009883; if (halfFrameRate) { frameRateNumerator /= 2; } var frameRate = frameRateNumerator.ToString() + "/100000"; var numChannels = Utils.NumberOfSetBits(channelMask); var channelResXFloat = videoResX / (float)numChannels; var channelResX = videoResY; var channelResY = (int)channelResXFloat; var longestChannelName = 0.0f; var channelGraphics = new RenderGraphics(channelResX, channelResY); var videoGraphics = new RenderGraphics(videoResX, videoResY); var theme = RenderTheme.CreateResourcesForGraphics(videoGraphics); var bmpWatermark = videoGraphics.CreateBitmapFromResource("VideoWatermark"); // Generate WAV data for each individual channel for the oscilloscope. var channelStates = new List <VideoChannelState>(); List <short[]> channelsWavData = new List <short[]>(); var maxAbsSample = 0; for (int i = 0, channelIndex = 0; i < song.Channels.Length; i++) { if ((channelMask & (1 << i)) == 0) { continue; } var pattern = song.Channels[i].PatternInstances[0]; var state = new VideoChannelState(); state.videoChannelIndex = channelIndex; state.songChannelIndex = i; state.channel = song.Channels[i]; state.patternIndex = 0; state.channelText = state.channel.Name + (state.channel.IsExpansionChannel ? $" ({song.Project.ExpansionAudioShortName})" : ""); state.wav = new WavPlayer(SampleRate, 1, 1 << i).GetSongSamples(song, song.Project.PalMode, -1); channelStates.Add(state); channelIndex++; // Find maximum absolute value to rescale the waveform. foreach (short s in state.wav) { maxAbsSample = Math.Max(maxAbsSample, Math.Abs(s)); } // Measure the longest text. longestChannelName = Math.Max(longestChannelName, channelGraphics.MeasureString(state.channelText, ThemeBase.FontBigUnscaled)); } // Tweak some cosmetic stuff that depends on resolution. var smallChannelText = longestChannelName + 32 + ChannelIconTextSpacing > channelResY * 0.8f; var bmpSuffix = smallChannelText ? "" : "@2x"; var font = smallChannelText ? ThemeBase.FontMediumUnscaled : ThemeBase.FontBigUnscaled; var textOffsetY = smallChannelText ? 1 : 4; var pianoRollScaleX = Math.Max(0.6f, resY / 1080.0f); var pianoRollScaleY = channelResY < VeryThinNoteThreshold ? 0.5f : (channelResY < ThinNoteThreshold ? 0.667f : 1.0f); var channelLineWidth = channelResY < ThinNoteThreshold ? 3 : 5; foreach (var s in channelStates) { s.bmp = videoGraphics.CreateBitmapFromResource(ChannelType.Icons[s.channel.Type] + bmpSuffix); } // Generate the metadata for the video so we know what's happening at every frame var metadata = new VideoMetadataPlayer(SampleRate, 1).GetVideoMetadata(song, song.Project.PalMode, -1); var oscScale = maxAbsSample != 0 ? short.MaxValue / (float)maxAbsSample : 1.0f; var oscLookback = (metadata[1].wavOffset - metadata[0].wavOffset) / 2; #if FAMISTUDIO_LINUX || FAMISTUDIO_MACOS var dummyControl = new DummyGLControl(); dummyControl.Move(0, 0, videoResX, videoResY); #endif // Setup piano roll and images. var pianoRoll = new PianoRoll(); #if FAMISTUDIO_LINUX || FAMISTUDIO_MACOS pianoRoll.Move(0, 0, channelResX, channelResY); #else pianoRoll.Width = channelResX; pianoRoll.Height = channelResY; #endif pianoRoll.StartVideoRecording(channelGraphics, song, pianoRollZoom, pianoRollScaleX, pianoRollScaleY, out var noteSizeY); // Build the scrolling data. var numVisibleNotes = (int)Math.Floor(channelResY / (float)noteSizeY); ComputeChannelsScroll(metadata, channelMask, numVisibleNotes); if (song.UsesFamiTrackerTempo) { SmoothFamiTrackerTempo(metadata); } var videoImage = new byte[videoResY * videoResX * 4]; var channelImage = new byte[channelResY * channelResX * 4]; var oscilloscope = new float[channelResY, 2]; #if FAMISTUDIO_LINUX || FAMISTUDIO_MACOS var badAlpha = DetectBadOpenGLAlpha(dummyControl, videoGraphics, videoImage); #endif // Start ffmpeg with pipe input. var tempFolder = Utils.GetTemporaryDiretory(); var tempVideoFile = Path.Combine(tempFolder, "temp.h264"); var tempAudioFile = Path.Combine(tempFolder, "temp.wav"); try { Log.LogMessage(LogSeverity.Info, "Exporting audio..."); // Save audio to temporary file. WaveFile.Save(song, tempAudioFile, SampleRate, 1, -1, channelMask); var process = LaunchFFmpeg(ffmpegExecutable, $"-y -f rawvideo -pix_fmt argb -s {videoResX}x{videoResY} -r {frameRate} -i - -i \"{tempAudioFile}\" -c:v h264 -pix_fmt yuv420p -b:v {videoBitRate}K -c:a aac -b:a {audioBitRate}k \"{filename}\"", true, false); // Generate each of the video frames. using (var stream = new BinaryWriter(process.StandardInput.BaseStream)) { for (int f = 0; f < metadata.Length; f++) { if (Log.ShouldAbortOperation) { break; } if ((f % 100) == 0) { Log.LogMessage(LogSeverity.Info, $"Rendering frame {f} / {metadata.Length}"); } Log.ReportProgress(f / (float)(metadata.Length - 1)); if (halfFrameRate && (f & 1) != 0) { continue; } var frame = metadata[f]; // Render the full screen overlay. #if FAMISTUDIO_LINUX || FAMISTUDIO_MACOS videoGraphics.BeginDraw(dummyControl, videoResY); #else videoGraphics.BeginDraw(); #endif videoGraphics.Clear(Color.FromArgb(0, 0, 0, 0)); foreach (var s in channelStates) { int channelPosX0 = (int)Math.Round((s.videoChannelIndex + 0) * channelResXFloat); int channelPosX1 = (int)Math.Round((s.videoChannelIndex + 1) * channelResXFloat); var channelNameSizeX = videoGraphics.MeasureString(s.channelText, font); var channelIconPosX = channelPosX0 + channelResY / 2 - (channelNameSizeX + s.bmp.Size.Width + ChannelIconTextSpacing) / 2; videoGraphics.FillRectangle(channelIconPosX, ChannelIconPosY, channelIconPosX + s.bmp.Size.Width, ChannelIconPosY + s.bmp.Size.Height, theme.DarkGreyLineBrush2); videoGraphics.DrawBitmap(s.bmp, channelIconPosX, ChannelIconPosY); videoGraphics.DrawText(s.channelText, font, channelIconPosX + s.bmp.Size.Width + ChannelIconTextSpacing, ChannelIconPosY + textOffsetY, theme.LightGreyFillBrush1); if (s.videoChannelIndex > 0) { videoGraphics.DrawLine(channelPosX0, 0, channelPosX0, videoResY, theme.BlackBrush, channelLineWidth); } var oscMinY = (int)(ChannelIconPosY + s.bmp.Size.Height + 10); var oscMaxY = (int)(oscMinY + 100.0f * (resY / 1080.0f)); GenerateOscilloscope(s.wav, frame.wavOffset, (int)Math.Round(SampleRate * OscilloscopeWindowSize), oscLookback, oscScale, channelPosX0 + 10, oscMinY, channelPosX1 - 10, oscMaxY, oscilloscope); videoGraphics.AntiAliasing = true; videoGraphics.DrawLine(oscilloscope, theme.LightGreyFillBrush1); videoGraphics.AntiAliasing = false; } videoGraphics.DrawBitmap(bmpWatermark, videoResX - bmpWatermark.Size.Width, videoResY - bmpWatermark.Size.Height); videoGraphics.EndDraw(); videoGraphics.GetBitmap(videoImage); // Render the piano rolls for each channels. foreach (var s in channelStates) { s.volume = frame.channelVolumes[s.songChannelIndex]; s.note = frame.channelNotes[s.songChannelIndex]; var color = Color.Transparent; if (s.note.IsMusical) { if (s.channel.Type == ChannelType.Dpcm) { var mapping = project.GetDPCMMapping(s.note.Value); if (mapping != null && mapping.Sample != null) { color = mapping.Sample.Color; } } else { color = Color.FromArgb(128 + s.volume * 127 / 15, s.note.Instrument != null ? s.note.Instrument.Color : ThemeBase.DarkGreyFillColor2); } } #if FAMISTUDIO_LINUX || FAMISTUDIO_MACOS channelGraphics.BeginDraw(pianoRoll, channelResY); #else channelGraphics.BeginDraw(); #endif pianoRoll.RenderVideoFrame(channelGraphics, Channel.ChannelTypeToIndex(s.channel.Type), frame.playPattern, frame.playNote, frame.scroll[s.songChannelIndex], s.note.Value, color); channelGraphics.EndDraw(); channelGraphics.GetBitmap(channelImage); // Composite the channel image with the full screen video overlay on the CPU. int channelPosX = (int)Math.Round(s.videoChannelIndex * channelResXFloat); int channelPosY = 0; for (int y = 0; y < channelResY; y++) { for (int x = 0; x < channelResX; x++) { int videoIdx = (channelPosY + x) * videoResX * 4 + (channelPosX + y) * 4; int channelIdx = (channelResY - y - 1) * channelResX * 4 + (channelResX - x - 1) * 4; byte videoA = videoImage[videoIdx + 3]; byte gradientA = (byte)(x < 255 ? 255 - x : 0); // Doing the gradient on CPU to look same on GL/D2D. byte channelR = channelImage[channelIdx + 0]; byte channelG = channelImage[channelIdx + 1]; byte channelB = channelImage[channelIdx + 2]; if (videoA != 0 || gradientA != 0) { #if FAMISTUDIO_LINUX || FAMISTUDIO_MACOS // Fix bad sRGB alpha. if (badAlpha) { videoA = SRGBToLinear[videoA]; } #endif videoA = Math.Max(videoA, gradientA); int videoR = videoImage[videoIdx + 0]; int videoG = videoImage[videoIdx + 1]; int videoB = videoImage[videoIdx + 2]; // Integer alpha blend. // Note that alpha is pre-multiplied, so we if we multiply again, image will look aliased. channelR = (byte)((channelR * (255 - videoA) + videoR * 255 /*videoA*/) >> 8); channelG = (byte)((channelG * (255 - videoA) + videoG * 255 /*videoA*/) >> 8); channelB = (byte)((channelB * (255 - videoA) + videoB * 255 /*videoA*/) >> 8); } // We byteswap here to match what ffmpeg expect. videoImage[videoIdx + 3] = channelR; videoImage[videoIdx + 2] = channelG; videoImage[videoIdx + 1] = channelB; videoImage[videoIdx + 0] = 255; // To export images to debug. //videoImage[videoIdx + 0] = channelR; //videoImage[videoIdx + 1] = channelG; //videoImage[videoIdx + 2] = channelB; //videoImage[videoIdx + 3] = 255; } } var prevChannelEndPosX = (int)Math.Round((s.videoChannelIndex - 1) * channelResXFloat) + channelResY; // HACK: Since we round the channels positions, we can end up with columns of pixels that arent byteswapped. if (s.videoChannelIndex > 0 && channelPosX != prevChannelEndPosX) { for (int y = 0; y < videoResY; y++) { int videoIdx = y * videoResX * 4 + (channelPosX - 1) * 4; byte videoR = videoImage[videoIdx + 0]; byte videoG = videoImage[videoIdx + 1]; byte videoB = videoImage[videoIdx + 2]; videoImage[videoIdx + 3] = videoR; videoImage[videoIdx + 2] = videoG; videoImage[videoIdx + 1] = videoB; videoImage[videoIdx + 0] = 255; } } } stream.Write(videoImage); // Dump debug images. #if FAMISTUDIO_LINUX || FAMISTUDIO_MACOS //var pb = new Gdk.Pixbuf(channelImage, true, 8, channelResX, channelResY, channelResX * 4); //pb.Save($"/home/mat/Downloads/channel.png", "png"); //var pb = new Gdk.Pixbuf(videoImage, true, 8, videoResX, videoResY, videoResX * 4); //pb.Save($"/home/mat/Downloads/frame_{f:D4}.png", "png"); #else //fixed (byte* vp = &videoImage[0]) //{ // var b = new System.Drawing.Bitmap(videoResX, videoResY, videoResX * 4, System.Drawing.Imaging.PixelFormat.Format32bppArgb, new IntPtr(vp)); // b.Save($"d:\\dump\\pr\\frame_{f:D4}.png"); //} #endif } } process.WaitForExit(); process.Dispose(); process = null; File.Delete(tempAudioFile); } catch (Exception e) { Log.LogMessage(LogSeverity.Error, "Error exporting video."); Log.LogMessage(LogSeverity.Error, e.Message); } finally { pianoRoll.EndVideoRecording(); foreach (var c in channelStates) { c.bmp.Dispose(); } theme.Terminate(); bmpWatermark.Dispose(); channelGraphics.Dispose(); videoGraphics.Dispose(); } return(true); }
public static Project Load(string filename) { var project = new Project(); var envelopes = new Dictionary <int, Envelope>[Envelope.Max] { new Dictionary <int, Envelope>(), new Dictionary <int, Envelope>(), new Dictionary <int, Envelope>() }; var duties = new Dictionary <int, int>(); var instruments = new Dictionary <int, Instrument>(); var dpcms = new Dictionary <int, DPCMSample>(); var columns = new int[5] { 1, 1, 1, 1, 1 }; var noteLookup = new Dictionary <string, int> { ["A-"] = 9, ["A#"] = 10, ["B-"] = 11, ["C-"] = 0, ["C#"] = 1, ["D-"] = 2, ["D#"] = 3, ["E-"] = 4, ["F-"] = 5, ["F#"] = 6, ["G-"] = 7, ["G#"] = 8 }; DPCMSample currentDpcm = null; int dpcmWriteIdx = 0; Song song = null; string patternName = ""; var lines = File.ReadAllLines(filename); for (int i = 0; i < lines.Length; i++) { var line = lines[i].Trim(); if (line.StartsWith("TITLE")) { project.Name = line.Substring(5).Trim(' ', '"'); } else if (line.StartsWith("AUTHOR")) { project.Author = line.Substring(6).Trim(' ', '"'); } else if (line.StartsWith("COPYRIGHT")) { project.Copyright = line.Substring(9).Trim(' ', '"'); } else if (line.StartsWith("MACRO")) { var halves = line.Substring(5).Split(':'); var param = halves[0].Split(new[] { ' ' }, StringSplitOptions.RemoveEmptyEntries); var curve = halves[1].Split(new[] { ' ' }, StringSplitOptions.RemoveEmptyEntries); var type = int.Parse(param[0]); var idx = int.Parse(param[1]); var loop = int.Parse(param[2]); var rel = int.Parse(param[3]); if (type < 3) { var env = new Envelope(); env.Length = curve.Length; env.Loop = loop; env.Release = type == Envelope.Volume ? rel : -1; for (int j = 0; j < curve.Length; j++) { env.Values[j] = sbyte.Parse(curve[j]); } if (type == 2) { env.ConvertToAbsolute(); } envelopes[type][idx] = env; } else if (type == 4) { duties[idx] = int.Parse(curve[0]); } } else if (line.StartsWith("DPCMDEF")) { var param = SplitStringKeepQuotes(line.Substring(7)); var name = param[2]; var j = 2; while (!project.IsDPCMSampleNameUnique(name)) { name = param[2] + "-" + j++; } currentDpcm = project.CreateDPCMSample(name, new byte[int.Parse(param[1])]); dpcms[int.Parse(param[0])] = currentDpcm; dpcmWriteIdx = 0; } else if (line.StartsWith("DPCM")) { var param = line.Substring(6).Split(new[] { ' ' }, StringSplitOptions.RemoveEmptyEntries); foreach (var s in param) { currentDpcm.Data[dpcmWriteIdx++] = Convert.ToByte(s, 16); } } else if (line.StartsWith("KEYDPCM")) { var param = line.Substring(7).Split(new[] { ' ' }, StringSplitOptions.RemoveEmptyEntries); if (param[0] == "0") { int octave = int.Parse(param[1]); int semitone = int.Parse(param[2]); int note = octave * 12 + semitone + 1; if (project.NoteSupportsDPCM(note)) { int dpcm = int.Parse(param[3]); int pitch = int.Parse(param[4]); int loop = int.Parse(param[5]); project.MapDPCMSample(note, dpcms[dpcm], pitch, loop != 0); } } } else if (line.StartsWith("INST2A03")) { var param = SplitStringKeepQuotes(line.Substring(8)); int idx = int.Parse(param[0]); int vol = int.Parse(param[1]); int arp = int.Parse(param[2]); int pit = int.Parse(param[3]); int dut = int.Parse(param[5]); var name = param[6]; var j = 2; if (!project.IsInstrumentNameUnique(name)) { name = param[6] + "-" + j++; } var instrument = project.CreateInstrument(name); if (vol >= 0) { instrument.Envelopes[0] = envelopes[0][vol].Clone(); } if (arp >= 0) { instrument.Envelopes[1] = envelopes[1][arp].Clone(); } if (pit >= 0) { instrument.Envelopes[2] = envelopes[2][pit].Clone(); } if (dut >= 0) { instrument.DutyCycle = duties[dut]; } instruments[idx] = instrument; } else if (line.StartsWith("TRACK")) { var param = SplitStringKeepQuotes(line.Substring(5)); song = project.CreateSong(param[3]); song.Length = 0; song.PatternLength = int.Parse(param[0]); song.Speed = int.Parse(param[1]); song.Tempo = int.Parse(param[2]); } else if (line.StartsWith("COLUMNS")) { var param = line.Substring(7).Split(new[] { ' ', ':' }, StringSplitOptions.RemoveEmptyEntries); for (int j = 0; j < 5; j++) { columns[j] = int.Parse(param[j]); } } else if (line.StartsWith("ORDER")) { var orderIdx = Convert.ToInt32(line.Substring(6, 2), 16); var values = line.Substring(5).Split(':')[1].Split(new[] { ' ' }, StringSplitOptions.RemoveEmptyEntries); var order = new int[5]; for (int j = 0; j < 5; j++) { int patternIdx = Convert.ToInt32(values[j], 16); var name = values[j]; var pattern = song.Channels[j].GetPattern(name); if (pattern == null) { pattern = song.Channels[j].CreatePattern(name); } song.Channels[j].PatternInstances[orderIdx] = pattern; } song.Length++; } else if (line.StartsWith("PATTERN")) { patternName = line.Substring(8); } else if (line.StartsWith("ROW")) { var channels = line.Split(new[] { ':' }, StringSplitOptions.RemoveEmptyEntries); var rowIdx = Convert.ToInt32(channels[0].Substring(4, 2), 16); for (int j = 1; j <= 5; j++) { var noteData = channels[j].Split(new[] { ' ' }, StringSplitOptions.RemoveEmptyEntries); var pattern = song.Channels[j - 1].GetPattern(patternName); if (pattern == null) { continue; } // Note if (noteData[0] == "---") { pattern.Notes[rowIdx].Value = Note.NoteStop; } else if (noteData[0] == "===") { pattern.Notes[rowIdx].Value = Note.NoteRelease; } else if (noteData[0] != "...") { int famitoneNote; if (j == 4) { famitoneNote = (Convert.ToInt32(noteData[0].Substring(0, 1), 16) + 31) + 1; } else { int semitone = noteLookup[noteData[0].Substring(0, 2)]; int octave = noteData[0][2] - '0'; famitoneNote = octave * 12 + semitone + 1; } if (famitoneNote >= Note.NoteMin && famitoneNote <= Note.NoteMax) { pattern.Notes[rowIdx].Value = (byte)famitoneNote; pattern.Notes[rowIdx].Instrument = j == 5 ? null : instruments[Convert.ToInt32(noteData[1], 16)]; } else { // Note outside of range. } } // Volume if (noteData[2] != ".") { pattern.Notes[rowIdx].Volume = Convert.ToByte(noteData[2], 16); } // Read FX. for (int k = 0; k < columns[j - 1]; k++) { string fx = noteData[3 + k]; switch (fx[0]) { case 'B': // Jump pattern.Notes[rowIdx].Effect = Note.EffectJump; break; case 'D': // Skip pattern.Notes[rowIdx].Effect = Note.EffectSkip; break; case 'F': // Tempo pattern.Notes[rowIdx].Effect = Note.EffectSpeed; break; default: continue; } pattern.Notes[rowIdx].EffectParam = Convert.ToByte(fx.Substring(1), 16); } } } } foreach (var s in project.Songs) { s.RemoveEmptyPatterns(); foreach (var c in s.Channels) { c.ColorizePatterns(); } } return(project); }
public static bool Save(Project originalProject, string filename, int[] songIds) { var project = originalProject.Clone(); project.RemoveAllSongsBut(songIds); ConvertPitchEnvelopes(project); var envelopes = MergeIdenticalEnvelopes(project); var lines = new List <string>(); lines.Add("# FamiTracker text export 0.4.2"); lines.Add(""); lines.Add("# Song information"); lines.Add("TITLE \"" + project.Name + "\""); lines.Add("AUTHOR \"" + project.Author + "\""); lines.Add("COPYRIGHT \"" + project.Copyright + "\""); lines.Add(""); lines.Add("# Global settings"); lines.Add("MACHINE 0"); lines.Add("FRAMERATE 0"); lines.Add("EXPANSION 0"); lines.Add("VIBRATO 1"); lines.Add("SPLIT 21"); lines.Add(""); lines.Add("# Macros"); for (int i = 0; i < Envelope.Max; i++) { var envArray = envelopes[i]; for (int j = 0; j < envArray.Length; j++) { var env = envArray[j]; lines.Add($"MACRO{i,8} {j,4} {env.Loop,4} {env.Release,4} 0 : {string.Join(" ", env.Values.Take(env.Length))}"); } } lines.Add($"MACRO{4,8} {0,4} {-1} -1 0 : 0"); lines.Add($"MACRO{4,8} {1,4} {-1} -1 0 : 1"); lines.Add($"MACRO{4,8} {2,4} {-1} -1 0 : 2"); lines.Add($"MACRO{4,8} {3,4} {-1} -1 0 : 3"); lines.Add(""); if (project.UsesSamples) { lines.Add("# DPCM samples"); for (int i = 0; i < project.Samples.Count; i++) { var sample = project.Samples[i]; lines.Add($"DPCMDEF{i,4}{sample.Data.Length,6} \"{sample.Name}\""); lines.Add($"DPCM : {String.Join(" ", sample.Data.Select(x => $"{x:X2}"))}"); } lines.Add(""); } lines.Add("# Instruments"); for (int i = 0; i < project.Instruments.Count; i++) { var instrument = project.Instruments[i]; int volEnvIdx = instrument.Envelopes[Envelope.Volume].Length > 0 ? Array.IndexOf(envelopes[Envelope.Volume], instrument.Envelopes[Envelope.Volume]) : -1; int arpEnvIdx = instrument.Envelopes[Envelope.Arpeggio].Length > 0 ? Array.IndexOf(envelopes[Envelope.Arpeggio], instrument.Envelopes[Envelope.Arpeggio]) : -1; int pitEnvIdx = instrument.Envelopes[Envelope.Pitch].Length > 0 ? Array.IndexOf(envelopes[Envelope.Pitch], instrument.Envelopes[Envelope.Pitch]) : -1; lines.Add($"INST2A03{i,4}{volEnvIdx,6}{arpEnvIdx,4}{pitEnvIdx,4}{-1,4}{instrument.DutyCycle,4} \"{instrument.Name}\""); } if (project.UsesSamples) { lines.Add($"INST2A03{project.Instruments.Count,4}{-1,6}{-1,4}{-1,4}{-1,4}{-1,4} \"DPCM\""); for (int i = 0; i < project.SamplesMapping.Length; i++) { var mapping = project.SamplesMapping[i]; if (mapping != null && mapping.Sample != null) { int note = i + Note.DPCMNoteMin; var octave = (note - 1) / 12; var semitone = (note - 1) % 12; var idx = project.Samples.IndexOf(mapping.Sample); var loop = mapping.Loop ? 1 : 0; lines.Add($"KEYDPCM{project.Instruments.Count,4}{octave,4}{semitone,4}{idx,6}{mapping.Pitch,4}{loop,4}{0,6}{-1,4}"); } } } lines.Add(""); lines.Add("# Tracks"); for (int i = 0; i < project.Songs.Count; i++) { var song = project.Songs[i]; CreateMissingPatterns(song); lines.Add($"TRACK{song.PatternLength,4}{song.Speed,4}{song.Tempo,4} \"{song.Name}\""); lines.Add($"COLUMNS : 1 1 1 1 1"); lines.Add(""); for (int j = 0; j < song.Length; j++) { var line = $"ORDER {j:X2} :"; for (int k = 0; k < Channel.Count; k++) { line += $" {song.Channels[k].Patterns.IndexOf(song.Channels[k].PatternInstances[j]):X2}"; } lines.Add(line); } lines.Add(""); int maxPatternCount = -1; foreach (var channel in song.Channels) { maxPatternCount = Math.Max(maxPatternCount, channel.Patterns.Count); } for (int j = 0; j < maxPatternCount; j++) { lines.Add($"PATTERN {j:X2}"); for (int k = 0; k < song.PatternLength; k++) { var line = $"ROW {k:X2}"; for (int l = 0; l < Channel.Count; l++) { var channel = song.Channels[l]; if (j >= channel.Patterns.Count) { line += " : ... .. . ..."; } else { var pattern = channel.Patterns[j]; var note = pattern.Notes[k]; var noteString = GetFamiTrackerNoteName(l, note); var volumeString = note.HasVolume ? note.Volume.ToString("X") : "."; var instrumentString = note.IsValid && !note.IsStop ? (note.Instrument == null ? project.Instruments.Count : project.Instruments.IndexOf(note.Instrument)).ToString("X2") : ".."; var effectString = "..."; switch (note.Effect) { case Note.EffectJump: effectString = $"B{note.EffectParam:X2}"; break; case Note.EffectSkip: effectString = $"D{note.EffectParam:X2}"; break; case Note.EffectSpeed: effectString = $"F{note.EffectParam:X2}"; break; } line += $" : {noteString} {instrumentString} {volumeString} {effectString}"; } } lines.Add(line); } lines.Add(""); } } File.WriteAllLines(filename, lines); return(true); }
public Project Load(string filename) { var idx = 0; bytes = File.ReadAllBytes(filename); var id = Encoding.ASCII.GetString(bytes, idx, FileHeaderId.Length); idx += FileHeaderId.Length; if (id != FileHeaderId) { Log.LogMessage(LogSeverity.Error, "Invalid FTM file ID."); return(null); } var version = BitConverter.ToUInt32(bytes, idx); idx += sizeof(uint); if (version < MinVersion || version > MaxVersion) { Log.LogMessage(LogSeverity.Error, "Unsupported file version. Only FTM version 0.4.4 to 0.4.6 are supported."); return(null); } var blockToc = new Dictionary <string, BlockInfo>(); while (bytes[idx + 0] != 'E' || bytes[idx + 1] != 'N' || bytes[idx + 2] != 'D') { var blockId = Encoding.ASCII.GetString(bytes, idx, BlockNameLength).TrimEnd('\0'); idx += BlockNameLength; var blockVer = BitConverter.ToInt32(bytes, idx); idx += sizeof(uint); var blockSize = BitConverter.ToInt32(bytes, idx); idx += sizeof(uint); blockToc[blockId] = new BlockInfo() { offset = idx, version = blockVer, size = blockSize }; idx += blockSize; } // We read block in a specific order to minimize the amound of bookeeping we need to do. var blocksToRead = new Dictionary <string, ReadBlockDelegate> { { "PARAMS", ReadParams }, { "INFO", ReadInfo }, { "HEADER", ReadHeader }, { "DPCM SAMPLES", ReadDpcmSamples }, { "SEQUENCES", ReadSequences }, { "SEQUENCES_VRC6", ReadSequencesVrc6 }, { "SEQUENCES_N163", ReadSequencesN163 }, { "INSTRUMENTS", ReadInstruments }, { "FRAMES", ReadFrames }, { "PATTERNS", ReadPatterns }, }; project = new Project(); project.TempoMode = TempoType.FamiTracker; foreach (var kv in blocksToRead) { var blockName = kv.Key; var blockFunc = kv.Value; if (blockToc.TryGetValue(blockName, out var info)) { blockSize = info.size; blockVersion = info.version; if (!blockFunc(info.offset)) { return(null); } } } FinishImport(); return(project); }