public void Advance(Song song, int patternIdx, int noteIdx) { var channel = song.GetChannelByType(channelType); var pattern = channel.PatternInstances[patternIdx]; if (pattern == null) { return; } var newNote = pattern.Notes[noteIdx]; if (newNote.IsValid) { slideStep = 0; if (newNote.IsSlideNote) { var noteTable = NesApu.GetNoteTableForChannelType(channel.Type, false); if (channel.ComputeSlideNoteParams(patternIdx, noteIdx, noteTable, out slidePitch, out slideStep, out _)) { newNote.Value = (byte)newNote.SlideNoteTarget; } } PlayNote(newNote); } else if (newNote.HasVolume) { note.Volume = newNote.Volume; } }
public ChannelState(int apu, int type) { apuIdx = apu; channelType = type; noteTable = NesApu.GetNoteTableForChannelType(channelType, false); note.Value = Note.NoteStop; note.Volume = Note.VolumeMax; }
public ChannelState(int apu, int type, bool pal, int numN163Channels = 1) { apuIdx = apu; channelType = type; maximumPeriod = NesApu.GetPitchLimitForChannelType(channelType); noteTable = NesApu.GetNoteTableForChannelType(channelType, pal, numN163Channels); note.Value = Note.NoteStop; note.Volume = Note.VolumeMax; note.FinePitch = 0; Channel.GetShiftsForType(type, numN163Channels, out pitchShift, out slideShift); }
public ChannelState(IPlayerInterface play, int apu, int type, bool pal, int numN163Channels = 1) { player = play; apuIdx = apu; channelType = type; palPlayback = pal; maximumPeriod = NesApu.GetPitchLimitForChannelType(channelType); noteTable = NesApu.GetNoteTableForChannelType(channelType, pal, numN163Channels); note.Value = Note.NoteStop; note.FinePitch = 0; Channel.GetShiftsForType(type, numN163Channels, out pitchShift, out slideShift); }
private bool UpdateChannel(int p, int n, Channel channel, ChannelState state) { var project = channel.Song.Project; var channelIdx = Channel.ChannelTypeToIndex(channel.Type); var hasNote = false; if (channel.Type == ChannelType.Dpcm) { var len = NsfGetState(nsf, channel.Type, STATE_DPCMSAMPLELENGTH, 0); if (len > 0) { // Subtracting one here is not correct. But it is a fact that a lot of games // seemed to favor tight sample packing and did not care about playing one // extra sample of garbage. if (!preserveDpcmPadding) { Debug.Assert((len & 0xf) == 1); len--; Debug.Assert((len & 0xf) == 0); } var sampleData = new byte[len]; for (int i = 0; i < len; i++) { sampleData[i] = (byte)NsfGetState(nsf, channel.Type, STATE_DPCMSAMPLEDATA, i); } var sample = project.FindMatchingSample(sampleData); if (sample == null) { sample = project.CreateDPCMSampleFromDmcData($"Sample {project.Samples.Count + 1}", sampleData); } var loop = NsfGetState(nsf, channel.Type, STATE_DPCMLOOP, 0) != 0; var pitch = NsfGetState(nsf, channel.Type, STATE_DPCMPITCH, 0); var note = project.FindDPCMSampleMapping(sample, pitch, loop); if (note == -1) { for (int i = Note.DPCMNoteMin + 1; i <= Note.DPCMNoteMax; i++) { if (project.GetDPCMMapping(i) == null) { note = i; project.MapDPCMSample(i, sample, pitch, loop); break; } } } if (note != -1) { var pattern = GetOrCreatePattern(channel, p).GetOrCreateNoteAt(n).Value = (byte)note; hasNote = true; } } } else { var period = NsfGetState(nsf, channel.Type, STATE_PERIOD, 0); var volume = NsfGetState(nsf, channel.Type, STATE_VOLUME, 0); var duty = NsfGetState(nsf, channel.Type, STATE_DUTYCYCLE, 0); var force = false; var stop = false; var release = false; var octave = -1; // VRC6 has a much larger volume range (6-bit) than our volume (4-bit). if (channel.Type == ChannelType.Vrc6Saw) { volume >>= 2; } else if (channel.Type == ChannelType.FdsWave) { volume = Math.Min(Note.VolumeMax, volume >> 1); } else if (channel.Type >= ChannelType.Vrc7Fm1 && channel.Type <= ChannelType.Vrc7Fm6) { volume = 15 - volume; } var hasTrigger = true; var hasPeriod = true; var hasOctave = channel.Type >= ChannelType.Vrc7Fm1 && channel.Type <= ChannelType.Vrc7Fm6; var hasVolume = channel.Type != ChannelType.Triangle; var hasPitch = channel.Type != ChannelType.Noise; var hasDuty = channel.Type == ChannelType.Square1 || channel.Type == ChannelType.Square2 || channel.Type == ChannelType.Noise || channel.Type == ChannelType.Vrc6Square1 || channel.Type == ChannelType.Vrc6Square2 || channel.Type == ChannelType.Mmc5Square1 || channel.Type == ChannelType.Mmc5Square2; if (channel.Type >= ChannelType.Vrc7Fm1 && channel.Type <= ChannelType.Vrc7Fm6) { var trigger = NsfGetState(nsf, channel.Type, STATE_VRC7TRIGGER, 0) != 0; var sustain = NsfGetState(nsf, channel.Type, STATE_VRC7SUSTAIN, 0) != 0; var triggerState = trigger ? ChannelState.Triggered : (sustain ? ChannelState.Released : ChannelState.Stopped); if (triggerState != state.trigger) { stop = triggerState == ChannelState.Stopped; release = triggerState == ChannelState.Released; force |= true; state.trigger = triggerState; } octave = NsfGetState(nsf, channel.Type, STATE_VRC7OCTAVE, 0); } else { if (hasTrigger) { var trigger = volume != 0 && (channel.Type == ChannelType.Noise || period != 0) ? ChannelState.Triggered : ChannelState.Stopped; if (trigger != state.trigger) { stop = trigger == ChannelState.Stopped; force |= true; state.trigger = trigger; } } } if (hasVolume) { if (state.volume != volume && volume != 0) { var pattern = GetOrCreatePattern(channel, p).GetOrCreateNoteAt(n).Volume = (byte)volume; state.volume = volume; } } Instrument instrument = null; if (hasDuty) { instrument = GetDutyInstrument(channel, duty); } else if (channel.Type == ChannelType.FdsWave) { var wavEnv = new sbyte[64]; var modEnv = new sbyte[32]; for (int i = 0; i < 64; i++) { wavEnv[i] = (sbyte)NsfGetState(nsf, channel.Type, STATE_FDSWAVETABLE, i); } for (int i = 0; i < 32; i++) { modEnv[i] = (sbyte)NsfGetState(nsf, channel.Type, STATE_FDSMODULATIONTABLE, i); } Envelope.ConvertFdsModulationToAbsolute(modEnv); var masterVolume = (byte)NsfGetState(nsf, channel.Type, STATE_FDSMASTERVOLUME, 0); instrument = GetFdsInstrument(wavEnv, modEnv, masterVolume); int modDepth = NsfGetState(nsf, channel.Type, STATE_FDSMODULATIONDEPTH, 0); int modSpeed = NsfGetState(nsf, channel.Type, STATE_FDSMODULATIONSPEED, 0); if (state.fdsModDepth != modDepth) { var pattern = GetOrCreatePattern(channel, p).GetOrCreateNoteAt(n).FdsModDepth = (byte)modDepth; state.fdsModDepth = modDepth; } if (state.fdsModSpeed != modSpeed) { var pattern = GetOrCreatePattern(channel, p).GetOrCreateNoteAt(n).FdsModSpeed = (ushort)modSpeed; state.fdsModSpeed = modSpeed; } } else if (channel.Type >= ChannelType.N163Wave1 && channel.Type <= ChannelType.N163Wave8) { var wavePos = (byte)NsfGetState(nsf, channel.Type, STATE_N163WAVEPOS, 0); var waveLen = (byte)NsfGetState(nsf, channel.Type, STATE_N163WAVESIZE, 0); if (waveLen > 0) { var waveData = new sbyte[waveLen]; for (int i = 0; i < waveLen; i++) { waveData[i] = (sbyte)NsfGetState(nsf, channel.Type, STATE_N163WAVE, wavePos + i); } instrument = GetN163Instrument(waveData, wavePos); } period >>= 2; } else if (channel.Type >= ChannelType.Vrc7Fm1 && channel.Type <= ChannelType.Vrc7Fm6) { var patch = (byte)NsfGetState(nsf, channel.Type, STATE_VRC7PATCH, 0); var regs = new byte[8]; if (patch == 0) { for (int i = 0; i < 8; i++) { regs[i] = (byte)NsfGetState(nsf, channel.Type, STATE_VRC7PATCHREG, i); } } instrument = GetVrc7Instrument(patch, regs); } else if (channel.Type >= ChannelType.S5BSquare1 && channel.Type <= ChannelType.S5BSquare3) { instrument = GetS5BInstrument(); } else { instrument = GetDutyInstrument(channel, 0); } if ((hasPeriod && state.period != period) || (hasOctave && state.octave != octave) || (instrument != state.instrument) || force) { var noteTable = NesApu.GetNoteTableForChannelType(channel.Type, project.PalMode, project.ExpansionNumChannels); var note = release ? Note.NoteRelease : (stop ? Note.NoteStop : state.note); var finePitch = 0; if (!stop && !release && state.trigger != ChannelState.Stopped) { if (channel.Type == ChannelType.Noise) { note = (period ^ 0x0f) + 32; } else { note = (byte)GetBestMatchingNote(period, noteTable, out finePitch); } if (hasOctave) { while (note > 12) { note -= 12; octave++; } note += octave * 12; period *= (1 << octave); finePitch = period - noteTable[note]; } } if (note < Note.MusicalNoteMin || note > Note.MusicalNoteMax) { instrument = null; } if ((state.note != note) || (state.instrument != instrument && instrument != null) || force) { var pattern = GetOrCreatePattern(channel, p); var newNote = pattern.GetOrCreateNoteAt(n); newNote.Value = (byte)note; newNote.Instrument = instrument; state.note = note; state.octave = octave; if (instrument != null) { state.instrument = instrument; } hasNote = note != 0; } if (hasPitch && !stop) { Channel.GetShiftsForType(channel.Type, project.ExpansionNumChannels, out int pitchShift, out _); // We scale all pitches changes (slides, fine pitch, pitch envelopes) for // some channels with HUGE pitch values (N163, VRC7). finePitch >>= pitchShift; var pitch = (sbyte)Utils.Clamp(finePitch, Note.FinePitchMin, Note.FinePitchMax); if (pitch != state.pitch) { var pattern = GetOrCreatePattern(channel, p).GetOrCreateNoteAt(n).FinePitch = pitch; state.pitch = pitch; } } state.period = period; } } return(hasNote); }
private void CreateSlideNotes(Song s, Dictionary <Pattern, RowFxData[, ]> patternFxData) { var processedPatterns = new HashSet <Pattern>(); // Convert slide notes + portamento to our format. foreach (var c in s.Channels) { if (!c.SupportsSlideNotes) { continue; } var songSpeed = s.FamitrackerSpeed; var lastNoteInstrument = (Instrument)null; var lastNoteArpeggio = (Arpeggio)null; var lastNoteValue = (byte)Note.NoteInvalid; var portamentoSpeed = 0; var slideSpeed = 0; var slideShift = c.IsN163WaveChannel ? 2 : 0; var slideSign = c.IsN163WaveChannel || c.IsFdsWaveChannel || c.IsVrc7FmChannel ? -1 : 1; // Inverted channels. for (int p = 0; p < s.Length; p++) { var pattern = c.PatternInstances[p]; if (pattern == null) { continue; } var patternLen = s.GetPatternLength(p); for (var it = pattern.GetDenseNoteIterator(0, patternLen); !it.Done; it.Next()) { var location = new NoteLocation(p, it.CurrentTime); var note = it.CurrentNote; // Look for speed changes. s.ApplySpeedEffectAt(location, ref songSpeed); if (!patternFxData.ContainsKey(pattern) || processedPatterns.Contains(pattern)) { continue; } var fxData = patternFxData[pattern]; var slideTarget = 0; for (int i = 0; i < fxData.GetLength(1); i++) { var fx = fxData[location.NoteIndex, i]; if (fx.param != 0) { // When the effect it turned on, we need to add a note. if ((fx.fx == Effect_PortaUp || fx.fx == Effect_PortaDown || fx.fx == Effect_SlideUp || fx.fx == Effect_SlideDown) && lastNoteValue >= Note.MusicalNoteMin && lastNoteValue <= Note.MusicalNoteMax && (note == null || !note.IsValid)) { if (note == null) { note = pattern.GetOrCreateNoteAt(location.NoteIndex); it.Resync(); } note.Value = lastNoteValue; note.Instrument = lastNoteInstrument; note.Arpeggio = lastNoteArpeggio; note.HasAttack = false; } } if (fx.fx == Effect_PortaUp) { // If we have a Qxx/Rxx on the same row as a 1xx/2xx, things get weird. if (slideTarget == 0) { slideSpeed = (-fx.param * slideSign) << slideShift; } } if (fx.fx == Effect_PortaDown) { // If we have a Qxx/Rxx on the same row as a 1xx/2xx, things get weird. if (slideTarget == 0) { slideSpeed = (fx.param * slideSign) << slideShift; } } if (fx.fx == Effect_Portamento) { portamentoSpeed = fx.param; } if (fx.fx == Effect_SlideUp && note != null && note.IsMusical) { slideTarget = Utils.Clamp(note.Value + (fx.param & 0xf), Note.MusicalNoteMin, Note.MusicalNoteMax); slideSpeed = (-((fx.param >> 4) * 2 + 1)) << slideShift; } if (fx.fx == Effect_SlideDown && note != null && note.IsMusical) { slideTarget = Utils.Clamp(note.Value - (fx.param & 0xf), Note.MusicalNoteMin, Note.MusicalNoteMax); slideSpeed = (((fx.param >> 4) * 2 + 1)) << slideShift; } } // Create a slide note. if (note != null && !note.IsSlideNote) { if (note.IsMusical) { var slideSource = note.Value; var noteTable = NesApu.GetNoteTableForChannelType(c.Type, s.Project.PalMode, s.Project.ExpansionNumChannels); var pitchLimit = NesApu.GetPitchLimitForChannelType(c.Type); // If we have a new note with auto-portamento enabled, we need to // swap the notes since our slide notes work backward compared to // FamiTracker. if (portamentoSpeed != 0) { // Ignore notes with no attack since we created them to handle a previous slide. if (note.HasAttack && lastNoteValue >= Note.MusicalNoteMin && lastNoteValue <= Note.MusicalNoteMax) { slideSpeed = portamentoSpeed; slideTarget = note.Value; slideSource = lastNoteValue; note.Value = lastNoteValue; } } // Our implementation of VRC7 pitches is quite different from FamiTracker. // Compensate for larger pitches in higher octaves by shifting. We cant shift by // a large amount because the period is 9-bit and FamiTracker is restricted to // this for slides (octave never changes). var octaveSlideShift = c.IsVrc7FmChannel && note.Value >= 12 ? 1 : 0; // 3xx/Qxy/Rxy : We know which note we are sliding to and the speed, but we // don't know how many frames it will take to get there. if (slideTarget != 0) { // Advance in the song until we have the correct number of frames. var numFrames = Math.Max(1, Math.Abs((noteTable[slideSource] - noteTable[slideTarget]) / (slideSpeed << octaveSlideShift))); note.SlideNoteTarget = (byte)slideTarget; // TODO: Here we consider if the start note has a delay, but ignore the end note. It might have one too. var nextLocation = location; s.AdvanceNumberOfFrames(ref nextLocation, numFrames, note.HasNoteDelay ? -note.NoteDelay : 0, songSpeed, s.Project.PalMode); // Still to see if there is a note between the current one and the // next note, this could append if you add a note before the slide // is supposed to finish. if (FindNextSlideEffect(c, location, out var nextLocation2, patternFxData)) { nextLocation = NoteLocation.Min(nextLocation, nextLocation2); // If the slide is interrupted by another slide effect, we will not reach // the final target, but rather some intermediate note. Let's do our best // to interpolate and figure out the best note. var numFramesUntilNextSlide = s.CountFramesBetween(location, nextLocation, songSpeed, s.Project.PalMode); var ratio = Utils.Clamp(numFramesUntilNextSlide / numFrames, 0.0f, 1.0f); var intermediatePitch = (int)Math.Round(Utils.Lerp(noteTable[slideSource], noteTable[slideTarget], ratio)); slideTarget = FindBestMatchingNote(noteTable, intermediatePitch, Math.Sign(slideSpeed)); note.SlideNoteTarget = (byte)slideTarget; } if (nextLocation.PatternIndex < s.Length) { // Add an extra note with no attack to stop the slide. var nextPattern = c.PatternInstances[nextLocation.PatternIndex]; if (!nextPattern.Notes.TryGetValue(nextLocation.NoteIndex, out var nextNote) || !nextNote.IsValid) { nextNote = nextPattern.GetOrCreateNoteAt(nextLocation.NoteIndex); nextNote.Instrument = note.Instrument; nextNote.Value = (byte)slideTarget; nextNote.HasAttack = false; it.Resync(); } else if (nextNote != null && nextNote.IsRelease) { Log.LogMessage(LogSeverity.Warning, $"A slide note ends on a release note. This is currently unsupported and will require manual correction. {GetPatternString(nextPattern, nextLocation.NoteIndex)}"); } } // 3xx, Qxx and Rxx stops when its done. slideSpeed = 0; } // 1xx/2xy : We know the speed at which we are sliding, but need to figure out what makes it stop. else if (slideSpeed != 0 && FindNextSlideEffect(c, location, out var nextLocation, patternFxData)) { // See how many frames until the slide stops. var numFrames = (int)Math.Round(s.CountFramesBetween(location, nextLocation, songSpeed, s.Project.PalMode)); // TODO: Here we consider if the start note has a delay, but ignore the end note. It might have one too. numFrames = Math.Max(1, numFrames - (note.HasNoteDelay ? note.NoteDelay : 0)); // Compute the pitch delta and find the closest target note. var newNotePitch = Utils.Clamp(noteTable[slideSource] + numFrames * (slideSpeed << octaveSlideShift), 0, pitchLimit); var newNote = FindBestMatchingNote(noteTable, newNotePitch, Math.Sign(slideSpeed)); note.SlideNoteTarget = (byte)newNote; // If the FX was turned off, we need to add an extra note. var nextPattern = c.PatternInstances[nextLocation.PatternIndex]; if (!nextPattern.Notes.TryGetValue(nextLocation.NoteIndex, out var nextNote) || !nextNote.IsValid) { nextNote = nextPattern.GetOrCreateNoteAt(nextLocation.NoteIndex); nextNote.Instrument = note.Instrument; nextNote.Value = (byte)newNote; nextNote.HasAttack = false; it.Resync(); } else if (nextNote != null && nextNote.IsRelease) { Log.LogMessage(LogSeverity.Warning, $"A slide note ends on a release note. This is currently unsupported and will require manual correction. {GetPatternString(nextPattern, nextLocation.NoteIndex)}"); } } } } if (note != null && (note.IsMusical || note.IsStop)) { lastNoteValue = note.IsSlideNote ? note.SlideNoteTarget : note.Value; lastNoteInstrument = note.Instrument; lastNoteArpeggio = note.Arpeggio; } } processedPatterns.Add(pattern); } } }
private void CreateSlideNotes(Song s, Dictionary <Pattern, RowFxData[, ]> patternFxData) { var processedPatterns = new HashSet <Pattern>(); // Convert slide notes + portamento to our format. foreach (var c in s.Channels) { if (!c.SupportsSlideNotes) { continue; } var songSpeed = s.FamitrackerSpeed; var lastNoteInstrument = (Instrument)null; var lastNoteValue = (byte)Note.NoteInvalid; var portamentoSpeed = 0; var slideSpeed = 0; var slideShift = c.IsN163WaveChannel ? 2 : 0; var slideSign = c.IsN163WaveChannel || c.IsFdsWaveChannel ? -1 : 1; // Inverted channels. for (int p = 0; p < s.Length; p++) { var pattern = c.PatternInstances[p]; if (pattern == null || !patternFxData.ContainsKey(pattern) || processedPatterns.Contains(pattern)) { continue; } processedPatterns.Add(pattern); var fxData = patternFxData[pattern]; var patternLen = s.GetPatternLength(p); for (var it = pattern.GetNoteIterator(0, patternLen); !it.Done; it.Next()) { var n = it.CurrentTime; var note = it.CurrentNote; // Look for speed changes. foreach (var c2 in s.Channels) { var pattern2 = c2.PatternInstances[p]; if (pattern2 != null && pattern2.Notes.TryGetValue(n, out var note2) && note2.HasSpeed) { songSpeed = note2.Speed; } } var slideTarget = 0; for (int i = 0; i < fxData.GetLength(1); i++) { var fx = fxData[n, i]; if (fx.param != 0) { // When the effect it turned on, we need to add a note. if ((fx.fx == Effect_PortaUp || fx.fx == Effect_PortaDown || fx.fx == Effect_SlideUp || fx.fx == Effect_SlideDown) && lastNoteValue >= Note.MusicalNoteMin && lastNoteValue <= Note.MusicalNoteMax && (note == null || !note.IsValid)) { if (note == null) { note = pattern.GetOrCreateNoteAt(n); it.Resync(); } note.Value = lastNoteValue; note.Instrument = lastNoteInstrument; note.HasAttack = false; } } if (fx.fx == Effect_PortaUp) { // If we have a Qxx/Rxx on the same row as a 1xx/2xx, things get weird. if (slideTarget == 0) { slideSpeed = (-fx.param * slideSign) << slideShift; } } if (fx.fx == Effect_PortaDown) { // If we have a Qxx/Rxx on the same row as a 1xx/2xx, things get weird. if (slideTarget == 0) { slideSpeed = (fx.param * slideSign) << slideShift; } } if (fx.fx == Effect_Portamento) { portamentoSpeed = fx.param; } if (fx.fx == Effect_SlideUp) { slideTarget = note.Value + (fx.param & 0xf); slideSpeed = (-((fx.param >> 4) * 2 + 1)) << slideShift; } if (fx.fx == Effect_SlideDown) { slideTarget = note.Value - (fx.param & 0xf); slideSpeed = (((fx.param >> 4) * 2 + 1)) << slideShift; } } // Create a slide note. if (note != null && !note.IsSlideNote) { if (note.IsMusical) { var slideSource = note.Value; var noteTable = NesApu.GetNoteTableForChannelType(c.Type, false, s.Project.ExpansionNumChannels); var pitchLimit = NesApu.GetPitchLimitForChannelType(c.Type); // If we have a new note with auto-portamento enabled, we need to // swap the notes since our slide notes work backward compared to // FamiTracker. if (portamentoSpeed != 0) { // Ignore notes with no attack since we created them to handle a previous slide. if (note.HasAttack && lastNoteValue >= Note.MusicalNoteMin && lastNoteValue <= Note.MusicalNoteMax) { slideSpeed = portamentoSpeed; slideTarget = note.Value; slideSource = lastNoteValue; note.Value = lastNoteValue; } } // Our implementation of VRC7 pitches is quite different from FamiTracker. // Compensate for larger pitches in higher octaves by shifting. We cant shift by // a large amount because the period is 9-bit and FamiTracker is restricted to // this for slides (octave never changes). var octaveSlideShift = c.IsVrc7FmChannel && note.Value >= 12 ? 1 : 0; if (slideTarget != 0) { // TODO: We assume a tempo of 150 here. This is wrong. var numFrames = Math.Max(1, Math.Abs((noteTable[slideSource] - noteTable[slideTarget]) / ((slideSpeed << octaveSlideShift) * songSpeed))); note.SlideNoteTarget = (byte)slideTarget; var nn = n + numFrames; var np = p; while (nn >= s.GetPatternLength(np)) { nn -= s.GetPatternLength(np); np++; } if (np >= s.Length) { np = s.Length; nn = 0; } // Still to see if there is a note between the current one and the // next note, this could append if you add a note before the slide // is supposed to finish. if (FindNextNoteForSlide(c, p, n, out var np2, out var nn2, patternFxData)) { if (np2 < np) { np = np2; nn = nn2; } else if (np2 == np) { nn = Math.Min(nn, nn2); } } if (np < s.Length) { // Add an extra note with no attack to stop the slide. var nextPattern = c.PatternInstances[np]; if (!nextPattern.Notes.TryGetValue(nn, out var nextNote) || !nextNote.IsValid) { nextNote = nextPattern.GetOrCreateNoteAt(nn); nextNote.Instrument = note.Instrument; nextNote.Value = (byte)slideTarget; nextNote.HasAttack = false; it.Resync(); } } // 3xx, Qxx and Rxx stops when its done. slideSpeed = 0; } // Find the next note that would stop the slide or change the FX settings. else if (slideSpeed != 0 && FindNextNoteForSlide(c, p, n, out var np, out var nn, patternFxData)) { // Compute the pitch delta and find the closest target note. var numFrames = (s.GetPatternStartNote(np, nn) - s.GetPatternStartNote(p, n)) * songSpeed; // TODO: PAL. var newNotePitch = Utils.Clamp(noteTable[slideSource] + numFrames * (slideSpeed << octaveSlideShift), 0, pitchLimit); var newNote = FindBestMatchingNote(noteTable, newNotePitch, Math.Sign(slideSpeed)); note.SlideNoteTarget = (byte)newNote; // If the FX was turned off, we need to add an extra note. var nextPattern = c.PatternInstances[np]; if (!nextPattern.Notes.TryGetValue(nn, out var nextNote) || !nextNote.IsValid) { nextNote = nextPattern.GetOrCreateNoteAt(nn); nextNote.Instrument = note.Instrument; nextNote.Value = (byte)newNote; nextNote.HasAttack = false; it.Resync(); } } } } if (note != null && (note.IsMusical || note.IsStop)) { lastNoteValue = note.IsSlideNote ? note.SlideNoteTarget : note.Value; lastNoteInstrument = note.Instrument; } } } } }
private int OutputSong(Song song, int songIdx, int speedChannel, int factor, bool test) { var packedPatternBuffers = new List <List <string> >(globalPacketPatternBuffers); var size = 0; var emptyPattern = new Pattern(-1, song, 0, ""); var emptyNote = new Note(Note.NoteInvalid); for (int c = 0; c < song.Channels.Length; c++) { if (!test) { lines.Add($"\n{ll}song{songIdx}ch{c}:"); } var channel = song.Channels[c]; var currentSpeed = song.FamitrackerSpeed; var isSpeedChannel = c == speedChannel; var instrument = (Instrument)null; var previousNoteLength = song.NoteLength; if (isSpeedChannel && project.UsesFamiTrackerTempo) { if (!test) { lines.Add($"\t{db} $fb, ${song.FamitrackerSpeed:x2}"); } size += 2; } for (int p = 0; p < song.Length; p++) { var prevNoteValue = Note.NoteInvalid; var pattern = channel.PatternInstances[p] == null ? emptyPattern : channel.PatternInstances[p]; var patternBuffer = new List <string>(); if (p == song.LoopPoint) { if (!test) { lines.Add($"{ll}song{songIdx}ch{c}loop:"); } // Clear stored instrument to force a reset. We might be looping // to a section where the instrument was set from a previous pattern. instrument = null; } if (isSpeedChannel && project.UsesFamiStudioTempo && machine != MachineType.NTSC) { var noteLength = song.GetPatternNoteLength(p); if (noteLength != previousNoteLength || (p == song.LoopPoint && p != 0)) { if (!test) { patternBuffer.Add($"$fb"); patternBuffer.Add($"{lo}({ll}tempo_env{noteLength})"); patternBuffer.Add($"{hi}({ll}tempo_env{noteLength})"); previousNoteLength = noteLength; } size += 3; } } var patternLength = song.GetPatternLength(p); var numValidNotes = patternLength; for (var it = pattern.GetNoteIterator(0, patternLength); !it.Done;) { var time = it.CurrentTime; var note = it.CurrentNote; if (note == null) { note = emptyNote; } if (isSpeedChannel && song.UsesFamiTrackerTempo) { var speed = FindEffectParam(song, p, time, Note.EffectSpeed); if (speed >= 0) { currentSpeed = speed; patternBuffer.Add($"${0xfb:x2}"); patternBuffer.Add($"${(byte)speed:x2}"); } } it.Next(); if (note.HasVolume) { patternBuffer.Add($"${(byte)(0x70 | note.Volume):x2}"); } if (note.HasFinePitch) { patternBuffer.Add($"${0x65:x2}"); patternBuffer.Add($"${note.FinePitch:x2}"); } if (note.HasVibrato) { patternBuffer.Add($"${0x63:x2}"); patternBuffer.Add($"{lo}({vibratoEnvelopeNames[note.RawVibrato]})"); patternBuffer.Add($"{hi}({vibratoEnvelopeNames[note.RawVibrato]})"); if (note.RawVibrato == 0) { patternBuffer.Add($"${0x64:x2}"); } } if (note.HasFdsModSpeed) { patternBuffer.Add($"${0x66:x2}"); patternBuffer.Add($"${(note.FdsModSpeed >> 0) & 0xff:x2}"); patternBuffer.Add($"${(note.FdsModSpeed >> 8) & 0xff:x2}"); } if (note.HasFdsModDepth) { patternBuffer.Add($"${0x67:x2}"); patternBuffer.Add($"${note.FdsModDepth:x2}"); } if (note.IsValid) { // Instrument change. if (note.IsMusical) { if (note.Instrument != instrument) { int idx = instrumentIndices[note.Instrument]; patternBuffer.Add($"${(byte)(0x80 | (idx << 1)):x2}"); instrument = note.Instrument; } else if (!note.HasAttack) { // TODO: Remove note entirely after a slide that matches the next note with no attack. patternBuffer.Add($"${0x62:x2}"); } } int numNotes = 0; if (kernel != FamiToneKernel.FamiStudio) { // Note -> Empty -> Note special encoding. if (time < patternLength - 2) { pattern.Notes.TryGetValue(time + 1, out var nextNote1); pattern.Notes.TryGetValue(time + 2, out var nextNote2); var valid1 = (nextNote1 != null && nextNote1.IsValid) || (isSpeedChannel && FindEffectParam(song, p, time + 1, Note.EffectSpeed) >= 0); var valid2 = (nextNote2 != null && nextNote2.IsValid) || (isSpeedChannel && FindEffectParam(song, p, time + 2, Note.EffectSpeed) >= 0); if (!valid1 && valid2) { it.Next(); numValidNotes--; numNotes = 1; } } } if (note.IsSlideNote) { var noteTableNtsc = NesApu.GetNoteTableForChannelType(channel.Type, false, song.Project.ExpansionNumChannels); var noteTablePal = NesApu.GetNoteTableForChannelType(channel.Type, true, song.Project.ExpansionNumChannels); var found = true; found &= channel.ComputeSlideNoteParams(note, p, time, currentSpeed, Song.NativeTempoNTSC, noteTableNtsc, out _, out int stepSizeNtsc, out _); found &= channel.ComputeSlideNoteParams(note, p, time, currentSpeed, Song.NativeTempoNTSC, noteTablePal, out _, out int stepSizePal, out _); if (song.Project.UsesExpansionAudio || machine == MachineType.NTSC) { stepSizePal = stepSizeNtsc; } else if (machine == MachineType.PAL) { stepSizeNtsc = stepSizePal; } if (found) { // Take the (signed) maximum of both notes so that we are garantee to reach our note. var stepSize = Math.Max(Math.Abs(stepSizeNtsc), Math.Abs(stepSizePal)) * Math.Sign(stepSizeNtsc); patternBuffer.Add($"${0x61:x2}"); patternBuffer.Add($"${(byte)stepSize:x2}"); patternBuffer.Add($"${EncodeNoteValue(c, note.Value):x2}"); patternBuffer.Add($"${EncodeNoteValue(c, note.SlideNoteTarget):x2}"); continue; } } patternBuffer.Add($"${EncodeNoteValue(c, note.Value, numNotes):x2}"); prevNoteValue = note.Value; } else { int numEmptyNotes = 0; while (!it.Done) { time = it.CurrentTime; note = it.CurrentNote; if (note == null) { note = emptyNote; } if (numEmptyNotes >= maxRepeatCount || note.IsValid || note.HasVolume || note.HasVibrato || note.HasFinePitch || note.HasFdsModSpeed || note.HasFdsModDepth || (isSpeedChannel && FindEffectParam(song, p, time, Note.EffectSpeed) >= 0)) { break; } numEmptyNotes++; it.Next(); } numValidNotes -= numEmptyNotes; patternBuffer.Add($"${(byte)(0x81 | (numEmptyNotes << 1)):x2}"); } } int matchingPatternIdx = -1; if (patternBuffer.Count > 0) { if (patternBuffer.Count > 4) { for (int j = 0; j < packedPatternBuffers.Count; j++) { if (packedPatternBuffers[j].SequenceEqual(patternBuffer)) { matchingPatternIdx = j; break; } } } if (matchingPatternIdx < 0) { if (packedPatternBuffers.Count > MaxPackedPatterns) { return(-1); // TODO: Error. } packedPatternBuffers.Add(patternBuffer); size += patternBuffer.Count; if (!test) { lines.Add($"{ll}ref{packedPatternBuffers.Count - 1}:"); lines.Add($"\t{db} {String.Join(",", patternBuffer)}"); } } else { if (!test) { lines.Add($"\t{db} $ff,${numValidNotes:x2}"); lines.Add($"\t{dw} {ll}ref{matchingPatternIdx}"); } size += 4; } } } if (!test) { lines.Add($"\t{db} $fd"); lines.Add($"\t{dw} {ll}song{songIdx}ch{c}loop"); } size += 3; } if (!test) { globalPacketPatternBuffers = packedPatternBuffers; } return(size); }
public static bool Save(Project originalProject, string filename, int[] songIds) { var project = originalProject.Clone(); project.RemoveAllSongsBut(songIds); ConvertPitchEnvelopes(project); var envelopes = MergeIdenticalEnvelopes(project); var lines = new List <string>(); lines.Add("# FamiTracker text export 0.4.2"); lines.Add(""); lines.Add("# Song information"); lines.Add("TITLE \"" + project.Name + "\""); lines.Add("AUTHOR \"" + project.Author + "\""); lines.Add("COPYRIGHT \"" + project.Copyright + "\""); lines.Add(""); lines.Add("# Global settings"); lines.Add("MACHINE 0"); lines.Add("FRAMERATE 0"); lines.Add("EXPANSION " + project.ExpansionAudio); lines.Add("VIBRATO 1"); lines.Add("SPLIT 21"); lines.Add(""); lines.Add("# Macros"); for (int i = 0; i < Envelope.Max; i++) { var envArray = envelopes[Project.ExpansionNone, i]; for (int j = 0; j < envArray.Length; j++) { var env = envArray[j]; lines.Add($"MACRO{i,8} {j,4} {env.Loop,4} {(env.Release >= 0 ? env.Release - 1 : -1),4} 0 : {string.Join(" ", env.Values.Take(env.Length))}"); } } lines.Add($"MACRO{4,8} {0,4} {-1} -1 0 : 0"); lines.Add($"MACRO{4,8} {1,4} {-1} -1 0 : 1"); lines.Add($"MACRO{4,8} {2,4} {-1} -1 0 : 2"); lines.Add($"MACRO{4,8} {3,4} {-1} -1 0 : 3"); if (project.ExpansionAudio == Project.ExpansionVrc6) { for (int i = 0; i < Envelope.Max; i++) { var envArray = envelopes[Project.ExpansionVrc6, i]; for (int j = 0; j < envArray.Length; j++) { var env = envArray[j]; lines.Add($"MACROVRC6{i,8} {j,4} {env.Loop,4} {(env.Release >= 0 ? env.Release - 1 : -1),4} 0 : {string.Join(" ", env.Values.Take(env.Length))}"); } } lines.Add($"MACROVRC6{4,8} {0,4} {-1} -1 0 : 0"); lines.Add($"MACROVRC6{4,8} {1,4} {-1} -1 0 : 1"); lines.Add($"MACROVRC6{4,8} {2,4} {-1} -1 0 : 2"); lines.Add($"MACROVRC6{4,8} {3,4} {-1} -1 0 : 3"); lines.Add($"MACROVRC6{4,8} {4,4} {-1} -1 0 : 4"); lines.Add($"MACROVRC6{4,8} {5,4} {-1} -1 0 : 5"); lines.Add($"MACROVRC6{4,8} {6,4} {-1} -1 0 : 6"); lines.Add($"MACROVRC6{4,8} {7,4} {-1} -1 0 : 7"); } lines.Add(""); if (project.UsesSamples) { lines.Add("# DPCM samples"); for (int i = 0; i < project.Samples.Count; i++) { var sample = project.Samples[i]; lines.Add($"DPCMDEF{i,4}{sample.Data.Length,6} \"{sample.Name}\""); lines.Add($"DPCM : {String.Join(" ", sample.Data.Select(x => $"{x:X2}"))}"); } lines.Add(""); } lines.Add("# Instruments"); for (int i = 0; i < project.Instruments.Count; i++) { var instrument = project.Instruments[i]; var expIdx = instrument.IsExpansionInstrument ? 1 : 0; int volEnvIdx = instrument.Envelopes[Envelope.Volume].Length > 0 ? Array.IndexOf(envelopes[expIdx, Envelope.Volume], instrument.Envelopes[Envelope.Volume]) : -1; int arpEnvIdx = instrument.Envelopes[Envelope.Arpeggio].Length > 0 ? Array.IndexOf(envelopes[expIdx, Envelope.Arpeggio], instrument.Envelopes[Envelope.Arpeggio]) : -1; int pitEnvIdx = instrument.Envelopes[Envelope.Pitch].Length > 0 ? Array.IndexOf(envelopes[expIdx, Envelope.Pitch], instrument.Envelopes[Envelope.Pitch]) : -1; if (instrument.ExpansionType == Project.ExpansionNone) { lines.Add($"INST2A03{i,4}{volEnvIdx,6}{arpEnvIdx,4}{pitEnvIdx,4}{-1,4}{instrument.DutyCycle,4} \"{instrument.Name}\""); } else if (instrument.ExpansionType == Project.ExpansionVrc6) { lines.Add($"INSTVRC6{i,4}{volEnvIdx,6}{arpEnvIdx,4}{pitEnvIdx,4}{-1,4}{instrument.DutyCycle,4} \"{instrument.Name}\""); } } if (project.UsesSamples) { lines.Add($"INST2A03{project.Instruments.Count,4}{-1,6}{-1,4}{-1,4}{-1,4}{-1,4} \"DPCM\""); for (int i = 0; i < project.SamplesMapping.Length; i++) { var mapping = project.SamplesMapping[i]; if (mapping != null && mapping.Sample != null) { int note = i + Note.DPCMNoteMin; var octave = (note - 1) / 12; var semitone = (note - 1) % 12; var idx = project.Samples.IndexOf(mapping.Sample); var loop = mapping.Loop ? 1 : 0; lines.Add($"KEYDPCM{project.Instruments.Count,4}{octave,4}{semitone,4}{idx,6}{mapping.Pitch,4}{loop,4}{0,6}{-1,4}"); } } } lines.Add(""); lines.Add("# Tracks"); for (int i = 0; i < project.Songs.Count; i++) { var song = project.Songs[i]; song.CleanupUnusedPatterns(); CreateMissingPatterns(song); // Find all the places where we need to turn of 1xx/2xx/3xx after we are done. //var portamentoTransitions = new Dictionary<Pattern, List<int>>(); //var slideTransitions = new Dictionary<Pattern, List<int>>(); //FindSlideNoteTransitions(song, portamentoTransitions, slideTransitions); lines.Add($"TRACK{song.PatternLength,4}{song.Speed,4}{song.Tempo,4} \"{song.Name}\""); lines.Add($"COLUMNS : {string.Join(" ", Enumerable.Repeat(3, song.Channels.Length))}"); lines.Add(""); for (int j = 0; j < song.Length; j++) { var line = $"ORDER {j:X2} :"; for (int k = 0; k < song.Channels.Length; k++) { line += $" {song.Channels[k].Patterns.IndexOf(song.Channels[k].PatternInstances[j]):X2}"; } lines.Add(line); } lines.Add(""); int maxPatternCount = -1; foreach (var channel in song.Channels) { maxPatternCount = Math.Max(maxPatternCount, channel.Patterns.Count); } var patternRows = new Dictionary <Pattern, List <string> >(); for (int c = 0; c < song.Channels.Length; c++) { var channel = song.Channels[c]; var prevNoteValue = Note.NoteInvalid; var prevSlideEffect = '\0'; for (int p = 0; p < song.Length; p++) { var pattern = channel.PatternInstances[p]; if (patternRows.ContainsKey(pattern)) { continue; } var patternLines = new List <string>(); for (int n = 0; n < song.PatternLength; n++) { var note = pattern.Notes[n]; var noteString = GetFamiTrackerNoteName(c, note); var volumeString = note.HasVolume ? note.Volume.ToString("X") : "."; var instrumentString = note.IsValid && !note.IsStop ? (note.Instrument == null ? project.Instruments.Count : project.Instruments.IndexOf(note.Instrument)).ToString("X2") : ".."; var effectString = ""; var noAttack = !note.HasAttack && prevNoteValue == note.Value && (prevSlideEffect == '\0' || prevSlideEffect == 'Q' || prevSlideEffect == '3'); if (note.IsSlideNote && note.IsMusical) { // TODO: PAL. var noteTable = NesApu.GetNoteTableForChannelType(channel.Type, false); channel.ComputeSlideNoteParams(p, n, noteTable, out _, out int stepSize, out _); var absNoteDelta = Math.Abs(note.Value - note.SlideNoteTarget); // See if we can use Qxy/Rxy (slide up/down y semitones, at speed x), this is preferable. if (absNoteDelta < 16) { if (prevSlideEffect == '1' || prevSlideEffect == '2' || prevSlideEffect == '3') { effectString += $" {prevSlideEffect}00"; } // FamiTracker use 2x + 1, find the number that is just above our speed. var speed = 0; for (int x = 14; x >= 0; x--) { if ((2 * x + 1) < Math.Abs(stepSize / 2.0f)) { speed = x + 1; break; } } if (note.SlideNoteTarget > note.Value) { effectString += $" Q{speed:X1}{absNoteDelta:X1}"; } else { effectString += $" R{speed:X1}{absNoteDelta:X1}"; } prevSlideEffect = 'Q'; } else { // We have one bit of fraction. FramiTracker does not. var ceilStepSize = Utils.SignedCeil(stepSize / 2.0f); // If the previous note matched too, we can use 3xx (auto-portamento). if (prevNoteValue == note.Value) { if (prevSlideEffect == '1' || prevSlideEffect == '2') { effectString += $" 100"; } noteString = GetFamiTrackerNoteName(c, new Note(note.SlideNoteTarget)); effectString += $" 3{Math.Abs(ceilStepSize):X2}"; prevSlideEffect = '3'; noAttack = false; // Need to force attack when starting auto-portamento unfortunately. } else { // We have one bit of fraction. FramiTracker does not. var floorStepSize = Utils.SignedFloor(stepSize / 2.0f); if (prevSlideEffect == '3') { effectString += $" 300"; } if (stepSize > 0) { effectString += $" 2{ floorStepSize:X2}"; prevSlideEffect = '2'; } else if (stepSize < 0) { effectString += $" 1{-floorStepSize:X2}"; prevSlideEffect = '1'; } } } } else if ((note.IsMusical || note.IsStop) && prevSlideEffect != '\0') { if (prevSlideEffect == '1' || prevSlideEffect == '2' || prevSlideEffect == '3') { effectString += $" {prevSlideEffect}00"; } prevSlideEffect = '\0'; } if (note.HasJump) { effectString += $" B{note.Jump:X2}"; } if (note.HasSkip) { effectString += $" D{note.Skip:X2}"; } if (note.HasSpeed) { effectString += $" F{note.Speed:X2}"; } if (note.HasVibrato) { effectString += $" 4{VibratoSpeedExportLookup[note.VibratoSpeed]:X1}{note.VibratoDepth:X1}"; } while (effectString.Length < 12) { effectString += " ..."; } if (noAttack) { noteString = "..."; instrumentString = ".."; } var line = $" : {noteString} {instrumentString} {volumeString}{effectString}"; if (note.IsMusical || note.IsStop) { prevNoteValue = note.IsSlideNote ? note.SlideNoteTarget : note.Value; } patternLines.Add(line); } patternRows[pattern] = patternLines; } } for (int j = 0; j < maxPatternCount; j++) { lines.Add($"PATTERN {j:X2}"); for (int p = 0; p < song.PatternLength; p++) { var line = $"ROW {p:X2}"; for (int c = 0; c < song.Channels.Length; c++) { var channel = song.Channels[c]; if (j >= channel.Patterns.Count) { line += " : ... .. . ... ... ..."; } else { line += patternRows[channel.Patterns[j]][p]; } } lines.Add(line); } lines.Add(""); } } File.WriteAllLines(filename, lines); return(true); }
private static void CreateSlideNotes(Song s, Dictionary <Pattern, RowFxData[, ]> patternFxData) { // Convert slide notes + portamento to our format. foreach (var c in s.Channels) { if (!c.SupportsSlideNotes) { continue; } var lastNoteInstrument = (Instrument)null; var lastNoteValue = (byte)Note.NoteInvalid; var portamentoSpeed = 0; for (int p = 0; p < s.Length; p++) { var pattern = c.PatternInstances[p]; var fxData = patternFxData[pattern]; for (int n = 0; n < s.PatternLength; n++) { var note = pattern.Notes[n]; var slideSpeed = 0; var slideTarget = 0; for (int i = 0; i < fxData.GetLength(1); i++) { var fx = fxData[n, i]; if (fx.param != 0) { // When the effect it turned on, we need to add a note. if ((fx.fx == '1' || fx.fx == '2' || fx.fx == 'Q' || fx.fx == 'R') && lastNoteValue >= Note.MusicalNoteMin && lastNoteValue <= Note.MusicalNoteMax && !note.IsValid) { pattern.Notes[n].Value = lastNoteValue; pattern.Notes[n].Instrument = lastNoteInstrument; pattern.Notes[n].HasAttack = false; note = pattern.Notes[n]; } if (fx.fx == '1') { slideSpeed = -fx.param; } if (fx.fx == '2') { slideSpeed = fx.param; } if (fx.fx == '3') { portamentoSpeed = fx.param; } if (fx.fx == 'Q') { slideTarget = note.Value + (fx.param & 0xf); slideSpeed = -((fx.param >> 4) * 2 + 1); } if (fx.fx == 'R') { slideTarget = note.Value - (fx.param & 0xf); slideSpeed = ((fx.param >> 4) * 2 + 1); } } else if (fx.fx == '3') { portamentoSpeed = 0; } } // Create a slide note. if (!note.IsSlideNote) { if (note.IsMusical) { var noteTable = NesApu.GetNoteTableForChannelType(c.Type, false); var pitchLimit = NesApu.GetPitchLimitForChannelType(c.Type); // If we have a new note with auto-portamento enabled, we need to // swap the notes since our slide notes work backward compared to // FamiTracker. if (portamentoSpeed != 0) { if (lastNoteValue >= Note.MusicalNoteMin && lastNoteValue <= Note.MusicalNoteMax) { pattern.Notes[n].SlideNoteTarget = pattern.Notes[n].Value; pattern.Notes[n].Value = lastNoteValue; } } else if (slideTarget != 0) { var numFrames = Math.Abs((noteTable[note.Value] - noteTable[slideTarget]) / (slideSpeed * s.Speed)); pattern.Notes[n].SlideNoteTarget = (byte)slideTarget; var nn = n + numFrames; var np = p; while (nn >= s.PatternLength) { nn -= s.PatternLength; np++; } if (np >= s.Length) { np = s.Length; nn = 0; } // Still to see if there is a note between the current one and the // next note, this could append if you add a note before the slide // is supposed to finish. if (FindNextNoteForSlide(c, p, n, out var np2, out var nn2, patternFxData)) { if (np2 < np) { np = np2; nn = nn2; } else if (np2 == np) { nn = Math.Min(nn, nn2); } } // Add an extra note with no attack to stop the slide. if (!c.PatternInstances[np].Notes[nn].IsValid) { c.PatternInstances[np].Notes[nn].Instrument = note.Instrument; c.PatternInstances[np].Notes[nn].Value = (byte)slideTarget; c.PatternInstances[np].Notes[nn].HasAttack = false; } } // Find the next note that would stop the slide or change the FX settings. else if (slideSpeed != 0 && FindNextNoteForSlide(c, p, n, out var np, out var nn, patternFxData)) { // Compute the pitch delta and find the closest target note. var numFrames = ((np * s.PatternLength + nn) - (p * s.PatternLength + n)) * s.Speed; // TODO: PAL. var newNotePitch = Utils.Clamp(noteTable[note.Value] + numFrames * slideSpeed, 0, pitchLimit); var newNote = FindBestMatchingNote(noteTable, newNotePitch, Math.Sign(slideSpeed)); pattern.Notes[n].SlideNoteTarget = (byte)newNote; // If the FX was turned off, we need to add an extra note. if (!c.PatternInstances[np].Notes[nn].IsMusical && !c.PatternInstances[np].Notes[nn].IsStop) { c.PatternInstances[np].Notes[nn].Instrument = note.Instrument; c.PatternInstances[np].Notes[nn].Value = (byte)newNote; c.PatternInstances[np].Notes[nn].HasAttack = false; } } } } if (note.IsMusical || note.IsStop) { lastNoteValue = note.Value; lastNoteInstrument = note.Instrument; } } } } }
public bool Save(Project originalProject, string filename, int[] songIds) { var project = originalProject.DeepClone(); project.RemoveAllSongsBut(songIds); if (project.UsesFamiStudioTempo) { project.ConvertToFamiTrackerTempo(false); } ConvertPitchEnvelopes(project); var envelopes = MergeIdenticalEnvelopes(project); var lines = new List <string>(); lines.Add("# FamiTracker text export 0.4.2"); lines.Add(""); lines.Add("# Song information"); lines.Add("TITLE \"" + project.Name + "\""); lines.Add("AUTHOR \"" + project.Author + "\""); lines.Add("COPYRIGHT \"" + project.Copyright + "\""); lines.Add(""); lines.Add("# Song comment"); lines.Add("COMMENT \"\""); lines.Add("# Global settings"); lines.Add("MACHINE 0"); lines.Add("FRAMERATE 0"); lines.Add("EXPANSION " + (project.ExpansionAudio != Project.ExpansionNone ? (1 << (project.ExpansionAudio - 1)) : 0)); lines.Add("VIBRATO 1"); lines.Add("SPLIT 32"); lines.Add(""); var realNumExpansionChannels = project.ExpansionNumChannels; if (project.ExpansionAudio == Project.ExpansionN163) { lines.Add("# Namco 163 global settings"); lines.Add($"N163CHANNELS {project.ExpansionNumChannels}"); lines.Add(""); // The text format always export all 8 channels, even if there are less. project.SetExpansionAudio(Project.ExpansionN163, 8); } lines.Add("# Macros"); for (int i = 0; i < Envelope.RegularCount; i++) { var envArray = envelopes[Project.ExpansionNone, i]; for (int j = 0; j < envArray.Length; j++) { var env = envArray[j]; lines.Add($"MACRO{ReverseEnvelopeTypeLookup[i],8} {j,4} {env.Loop,4} {(env.Release >= 0 ? env.Release - 1 : -1),4} 0 : {string.Join(" ", env.Values.Take(env.Length))}"); } } if (project.ExpansionAudio == Project.ExpansionVrc6 || project.ExpansionAudio == Project.ExpansionN163) { var suffix = project.ExpansionAudio == Project.ExpansionVrc6 ? "VRC6" : "N163"; for (int i = 0; i < Envelope.RegularCount; i++) { var envArray = envelopes[1, i]; for (int j = 0; j < envArray.Length; j++) { var env = envArray[j]; lines.Add($"MACRO{suffix}{i,8} {j,4} {env.Loop,4} {(env.Release >= 0 ? env.Release - 1 : -1),4} 0 : {string.Join(" ", env.Values.Take(env.Length))}"); } } } lines.Add(""); if (project.UsesSamples) { lines.Add("# DPCM samples"); for (int i = 0; i < project.Samples.Count; i++) { var sample = project.Samples[i]; lines.Add($"DPCMDEF{i,4}{sample.Data.Length,6} \"{sample.Name}\""); lines.Add($"DPCM : {String.Join(" ", sample.Data.Select(x => $"{x:X2}"))}"); } lines.Add(""); } lines.Add("# Instruments"); for (int i = 0; i < project.Instruments.Count; i++) { var instrument = project.Instruments[i]; var volEnv = instrument.Envelopes[Envelope.Volume]; var arpEnv = instrument.Envelopes[Envelope.Arpeggio]; var pitEnv = instrument.Envelopes[Envelope.Pitch]; var dutEnv = instrument.Envelopes[Envelope.DutyCycle]; var expIdx = instrument.IsExpansionInstrument ? 1 : 0; int volEnvIdx = volEnv != null && volEnv.Length > 0 ? Array.IndexOf(envelopes[expIdx, Envelope.Volume], instrument.Envelopes[Envelope.Volume]) : -1; int arpEnvIdx = arpEnv != null && arpEnv.Length > 0 ? Array.IndexOf(envelopes[expIdx, Envelope.Arpeggio], instrument.Envelopes[Envelope.Arpeggio]) : -1; int pitEnvIdx = pitEnv != null && pitEnv.Length > 0 ? Array.IndexOf(envelopes[expIdx, Envelope.Pitch], instrument.Envelopes[Envelope.Pitch]) : -1; int dutEnvIdx = dutEnv != null && dutEnv.Length > 0 ? Array.IndexOf(envelopes[expIdx, Envelope.DutyCycle], instrument.Envelopes[Envelope.DutyCycle]) : -1; if (instrument.ExpansionType == Project.ExpansionNone) { lines.Add($"INST2A03{i,4}{volEnvIdx,6}{arpEnvIdx,4}{pitEnvIdx,4}{-1,4}{dutEnvIdx,4} \"{instrument.Name}\""); } else if (instrument.ExpansionType == Project.ExpansionVrc6) { lines.Add($"INSTVRC6{i,4}{volEnvIdx,6}{arpEnvIdx,4}{pitEnvIdx,4}{-1,4}{dutEnvIdx,4} \"{instrument.Name}\""); } else if (instrument.ExpansionType == Project.ExpansionVrc7) { lines.Add($"INSTVRC7{i,4}{instrument.Vrc7Patch,4} {String.Join(" ", instrument.Vrc7PatchRegs.Select(x => $"{x:X2}"))} \"{instrument.Name}\""); } else if (instrument.ExpansionType == Project.ExpansionN163) { lines.Add($"INSTN163{i,4}{volEnvIdx,6}{arpEnvIdx,4}{pitEnvIdx,4}{-1,4}{dutEnvIdx,4}{instrument.N163WaveSize,4}{instrument.N163WavePos,4}{1,4} \"{instrument.Name}\""); var wavEnv = instrument.Envelopes[Envelope.N163Waveform]; lines.Add($"N163WAVE{i,4}{0,6} : {string.Join(" ", wavEnv.Values.Take(wavEnv.Length))}"); } else if (instrument.ExpansionType == Project.ExpansionFds) { lines.Add($"INSTFDS{i,5}{1,6}{instrument.FdsModSpeed,4}{instrument.FdsModDepth,4}{instrument.FdsModDelay,4} \"{instrument.Name}\""); var wavEnv = instrument.Envelopes[Envelope.FdsWaveform]; lines.Add($"FDSWAVE{i,5} : {string.Join(" ", wavEnv.Values.Take(wavEnv.Length))}"); var modEnv = instrument.Envelopes[Envelope.FdsModulation].BuildFdsModulationTable(); lines.Add($"FDSMOD{i,6} : {string.Join(" ", modEnv.Take(modEnv.Length))}"); for (int j = 0; j <= Envelope.Pitch; j++) { var env = instrument.Envelopes[j]; if (!env.IsEmpty) { lines.Add($"FDSMACRO{i,4} {j,5} {env.Loop,4} {(env.Release >= 0 ? env.Release - 1 : -1),4} 0 : {string.Join(" ", env.Values.Take(env.Length))}"); } } } } if (project.UsesSamples) { lines.Add($"INST2A03{project.Instruments.Count,4}{-1,6}{-1,4}{-1,4}{-1,4}{-1,4} \"DPCM\""); for (int i = 0; i < project.SamplesMapping.Length; i++) { var mapping = project.SamplesMapping[i]; if (mapping != null && mapping.Sample != null) { int note = i + Note.DPCMNoteMin; var octave = (note - 1) / 12; var semitone = (note - 1) % 12; var idx = project.Samples.IndexOf(mapping.Sample); var loop = mapping.Loop ? 1 : 0; lines.Add($"KEYDPCM{project.Instruments.Count,4}{octave,4}{semitone,4}{idx,6}{mapping.Pitch,4}{loop,4}{0,6}{-1,4}"); } } } lines.Add(""); lines.Add("# Tracks"); for (int i = 0; i < project.Songs.Count; i++) { var song = project.Songs[i]; TruncateLongPatterns(song); CreateMissingPatterns(song); song.CleanupUnusedPatterns(); song.DuplicateInstancesWithDifferentLengths(); lines.Add($"TRACK{song.PatternLength,4}{song.FamitrackerSpeed,4}{song.FamitrackerTempo,4} \"{song.Name}\""); lines.Add($"COLUMNS : {string.Join(" ", Enumerable.Repeat(3, song.Channels.Length))}"); lines.Add(""); for (int j = 0; j < song.Length; j++) { var line = $"ORDER {j:X2} :"; for (int k = 0; k < song.Channels.Length; k++) { line += $" {song.Channels[k].Patterns.IndexOf(song.Channels[k].PatternInstances[j]):X2}"; } lines.Add(line); } lines.Add(""); int maxPatternCount = -1; foreach (var channel in song.Channels) { maxPatternCount = Math.Max(maxPatternCount, channel.Patterns.Count); } var patternRows = new Dictionary <Pattern, List <string> >(); for (int c = 0; c < song.Channels.Length; c++) { var channel = song.Channels[c]; var prevNoteValue = Note.NoteInvalid; var prevInstrument = (Instrument)null; var prevSlideEffect = Effect_None; for (int p = 0; p < song.Length; p++) { var pattern = channel.PatternInstances[p]; var patternLen = song.GetPatternLength(p); if (patternRows.ContainsKey(pattern)) { continue; } var patternLines = new List <string>(); for (var it = pattern.GetNoteIterator(0, song.PatternLength); !it.Done; it.Next()) { var time = it.CurrentTime; var note = it.CurrentNote; // Keeps the code a lot simpler. if (note == null) { note = Note.EmptyNote; } var line = " : ... .. . ... ... ..."; var noteString = GetFamiTrackerNoteName(c, note); var volumeString = note.HasVolume ? note.Volume.ToString("X") : "."; var instrumentString = note.IsValid && !note.IsStop ? (note.Instrument == null ? project.Instruments.Count : project.Instruments.IndexOf(note.Instrument)).ToString("X2") : ".."; var effectString = ""; var noAttack = !note.HasAttack && prevNoteValue == note.Value && (prevSlideEffect == Effect_None || prevSlideEffect == Effect_SlideUp || prevSlideEffect == Effect_Portamento); if (note.IsSlideNote && note.IsMusical) { var noteTable = NesApu.GetNoteTableForChannelType(channel.Type, false, realNumExpansionChannels); var noteValue = note.Value; var slideTarget = note.SlideNoteTarget; // FamiTracker only has 12-pitches and doesnt change the octave when doing // slides. This helps make the slides more compatible, but its not great. if (channel.IsVrc7FmChannel) { while (noteValue >= 12 && slideTarget >= 12) { noteValue -= 12; slideTarget -= 12; } } // TODO: We use the initial FamiTracker speed here, this is wrong, it might have changed. Also we assume NTSC here. var stepSizeFloat = channel.ComputeRawSlideNoteParams(noteValue, slideTarget, p, time, song.FamitrackerSpeed, Song.NativeTempoNTSC, noteTable); if (channel.IsN163WaveChannel) { stepSizeFloat /= 4.0f; } // Undo any kind of shifting we had done. This will kill the 1-bit of fraction we have on most channel. var absNoteDelta = Math.Abs(note.Value - note.SlideNoteTarget); // See if we can use Qxy/Rxy (slide up/down y semitones, at speed x), this is preferable. if (absNoteDelta < 16) { if (prevSlideEffect == Effect_PortaUp || prevSlideEffect == Effect_PortaDown || prevSlideEffect == Effect_Portamento) { effectString += $" {EffectToTextLookup[prevSlideEffect]}00"; } // FamiTracker use 2x + 1, find the number that is just above our speed. var speed = 0; for (int x = 14; x >= 0; x--) { if ((2 * x + 1) < Math.Abs(stepSizeFloat)) { speed = x + 1; break; } } if (note.SlideNoteTarget > note.Value) { effectString += $" Q{speed:X1}{absNoteDelta:X1}"; } else { effectString += $" R{speed:X1}{absNoteDelta:X1}"; } prevSlideEffect = Effect_SlideUp; } else { // We have one bit of fraction. FramiTracker does not. var ceilStepSize = Utils.SignedCeil(stepSizeFloat); // If the previous note matched too, we can use 3xx (auto-portamento). // Avoid using portamento on instrument with relative pitch envelopes, their previous pitch isnt reliable. if (prevNoteValue == note.Value && (prevInstrument == null || prevInstrument.Envelopes[Envelope.Pitch].IsEmpty || !prevInstrument.Envelopes[Envelope.Pitch].Relative)) { if (prevSlideEffect == Effect_PortaUp || prevSlideEffect == Effect_PortaDown) { effectString += $" 100"; } noteString = GetFamiTrackerNoteName(c, new Note(note.SlideNoteTarget)); effectString += $" 3{Math.Min(0xff, Math.Abs(ceilStepSize)):X2}"; prevSlideEffect = Effect_Portamento; noAttack = false; // Need to force attack when starting auto-portamento unfortunately. } else { // Inverted channels. if (channel.IsFdsWaveChannel || channel.IsN163WaveChannel) { stepSizeFloat = -stepSizeFloat; } var absFloorStepSize = Math.Abs(Utils.SignedFloor(stepSizeFloat)); if (prevSlideEffect == Effect_Portamento) { effectString += $" 300"; } if (note.SlideNoteTarget > note.Value) { effectString += $" 1{Math.Min(0xff, absFloorStepSize):X2}"; prevSlideEffect = Effect_PortaUp; } else if (note.SlideNoteTarget < note.Value) { effectString += $" 2{Math.Min(0xff, absFloorStepSize):X2}"; prevSlideEffect = Effect_PortaDown; } } } } else if ((note.IsMusical || note.IsStop) && prevSlideEffect != Effect_None) { if (prevSlideEffect == Effect_PortaUp || prevSlideEffect == Effect_PortaDown || prevSlideEffect == Effect_Portamento) { effectString += $" {EffectToTextLookup[prevSlideEffect]}00"; } prevSlideEffect = Effect_None; } if (time == patternLen - 1) { if (p == song.Length - 1 && song.LoopPoint >= 0) { effectString += $" B{song.LoopPoint:X2}"; } else if (patternLen != song.PatternLength) { effectString += $" D00"; } } if (note.HasSpeed) { effectString += $" F{note.Speed:X2}"; } if (note.HasVibrato) { effectString += $" 4{VibratoSpeedExportLookup[note.VibratoSpeed]:X1}{note.VibratoDepth:X1}"; } if (note.HasFinePitch) { effectString += $" P{(byte)(-note.FinePitch + 0x80):X2}"; } if (note.HasFdsModDepth) { effectString += $" H{note.FdsModDepth:X2}"; } if (note.HasFdsModSpeed) { effectString += $" I{(note.FdsModSpeed >> 8) & 0xff:X2}"; effectString += $" J{(note.FdsModSpeed >> 0) & 0xff:X2}"; } while (effectString.Length < 12) { effectString += " ..."; } if (noAttack) { noteString = "..."; instrumentString = ".."; } line = $" : {noteString} {instrumentString} {volumeString}{effectString}"; if (note.IsMusical || note.IsStop) { prevNoteValue = note.IsSlideNote ? note.SlideNoteTarget : note.Value; if (note.IsMusical) { prevInstrument = note.Instrument; } } patternLines.Add(line); } patternRows[pattern] = patternLines; } } for (int j = 0; j < maxPatternCount; j++) { lines.Add($"PATTERN {j:X2}"); for (int p = 0; p < song.PatternLength; p++) { var line = $"ROW {p:X2}"; for (int c = 0; c < song.Channels.Length; c++) { var channel = song.Channels[c]; if (j >= channel.Patterns.Count) { line += " : ... .. . ... ... ..."; } else { line += patternRows[channel.Patterns[j]][p]; } } lines.Add(line); } lines.Add(""); } } lines.Add("# End of export"); File.WriteAllLines(filename, lines); return(true); }
private int OutputSong(Song song, int songIdx, int speedChannel, int factor, bool test) { var packedPatternBuffers = new List <List <string> >(globalPacketPatternBuffers); var size = 0; var loopPoint = Math.Max(0, FindEffectParam(song, Note.EffectJump)) * factor; var emptyPattern = new Pattern(-1, song, 0, ""); for (int c = 0; c < song.Channels.Length; c++) { if (!test) { lines.Add($"\n{ll}song{songIdx}ch{c}:"); } var channel = song.Channels[c]; var isSpeedChannel = c == speedChannel; var instrument = (Instrument)null; if (isSpeedChannel) { if (!test) { lines.Add($"\t{db} $fb, ${song.Speed:x2}"); } size += 2; } var isSkipping = false; for (int p = 0; p < song.Length; p++) { var prevNoteValue = Note.NoteInvalid; var pattern = channel.PatternInstances[p] == null ? emptyPattern : channel.PatternInstances[p]; var patternBuffer = new List <string>(); // If we had split the song and we find a skip to the next // pattern, we need to ignore the extra patterns we generated. if (isSkipping && (p % factor) != 0) { continue; } if (!test && p == loopPoint) { lines.Add($"{ll}song{songIdx}ch{c}loop:"); } var i = 0; var patternLength = FindEffectPosition(song, p, Note.EffectSkip); var jumpFound = false; if (patternLength >= 0) { patternLength++; isSkipping = true; } else { isSkipping = false; patternLength = FindEffectPosition(song, p, Note.EffectJump); if (patternLength >= 0) { patternLength++; jumpFound = true; } else { patternLength = song.PatternLength; } } var numValidNotes = patternLength; while (i < patternLength) { var note = pattern.Notes[i]; if (isSpeedChannel) { var speed = FindEffectParam(song, p, i, Note.EffectSpeed); if (speed >= 0) { patternBuffer.Add($"${0xfb:x2}"); patternBuffer.Add($"${(byte)speed:x2}"); } } i++; if (note.HasVolume) { patternBuffer.Add($"${(byte)(0x70 | note.Volume):x2}"); } if (note.HasVibrato) { patternBuffer.Add($"${0x63:x2}"); patternBuffer.Add($"{lo}({vibratoEnvelopeNames[note.Vibrato]})"); patternBuffer.Add($"{hi}({vibratoEnvelopeNames[note.Vibrato]})"); if (note.Vibrato == 0) { patternBuffer.Add($"${0x64:x2}"); } } if (note.IsValid) { // Instrument change. if (note.IsMusical) { if (note.Instrument != instrument) { int idx = project.Instruments.IndexOf(note.Instrument); patternBuffer.Add($"${(byte)(0x80 | (idx << 1)):x2}"); instrument = note.Instrument; } else if (!note.HasAttack) { // TODO: Remove note entirely after a slide that matches the next note with no attack. patternBuffer.Add($"${0x62:x2}"); } } int numNotes = 0; if (kernel == FamiToneKernel.FamiTone2) { // Note -> Empty -> Note special encoding. if (i < patternLength - 1) { var nextNote1 = pattern.Notes[i + 0]; var nextNote2 = pattern.Notes[i + 1]; var valid1 = nextNote1.IsValid || (isSpeedChannel && FindEffectParam(song, p, i + 0, Note.EffectSpeed) >= 0); var valid2 = nextNote2.IsValid || (isSpeedChannel && FindEffectParam(song, p, i + 1, Note.EffectSpeed) >= 0); if (!valid1 && valid2) { i++; numValidNotes--; numNotes = 1; } } } if (note.IsSlideNote) { var noteTable = NesApu.GetNoteTableForChannelType(channel.Type, false); if (channel.ComputeSlideNoteParams(p, i - 1, noteTable, out _, out int stepSize, out _)) { patternBuffer.Add($"${0x61:x2}"); patternBuffer.Add($"${(byte)stepSize:x2}"); patternBuffer.Add($"${EncodeNoteValue(c, note.Value):x2}"); patternBuffer.Add($"${EncodeNoteValue(c, note.SlideNoteTarget):x2}"); continue; } } patternBuffer.Add($"${EncodeNoteValue(c, note.Value, numNotes):x2}"); prevNoteValue = note.Value; } else { int numEmptyNotes = 0; while (i < patternLength) { var emptyNote = pattern.Notes[i]; if (numEmptyNotes >= maxRepeatCount || emptyNote.IsValid || emptyNote.HasVolume || emptyNote.HasVibrato || (isSpeedChannel && FindEffectParam(song, p, i, Note.EffectSpeed) >= 0)) { break; } i++; numEmptyNotes++; } numValidNotes -= numEmptyNotes; patternBuffer.Add($"${(byte)(0x81 | (numEmptyNotes << 1)):x2}"); } } int matchingPatternIdx = -1; if (patternBuffer.Count > 0) { if (patternBuffer.Count > 4) { for (int j = 0; j < packedPatternBuffers.Count; j++) { if (packedPatternBuffers[j].SequenceEqual(patternBuffer)) { matchingPatternIdx = j; break; } } } if (matchingPatternIdx < 0) { if (packedPatternBuffers.Count > MaxPackedPatterns) { return(-1); // TODO: Error. } packedPatternBuffers.Add(patternBuffer); size += patternBuffer.Count; if (!test) { lines.Add($"{ll}ref{packedPatternBuffers.Count - 1}:"); lines.Add($"\t{db} {String.Join(",", patternBuffer)}"); } } else { if (!test) { lines.Add($"\t{db} $ff,${numValidNotes:x2}"); lines.Add($"\t{dw} {ll}ref{matchingPatternIdx}"); } size += 4; } } if (jumpFound) { break; } } if (!test) { lines.Add($"\t{db} $fd"); lines.Add($"\t{dw} {ll}song{songIdx}ch{c}loop"); } size += 3; } if (!test) { globalPacketPatternBuffers = packedPatternBuffers; } return(size); }