public void AddProperties() { firstPropIdx = props.PropertyCount; if (song.UsesFamiTrackerTempo) { if (patternIdx < 0) { famitrackerTempoPropIdx = props.AddNumericUpDown("Tempo :", song.FamitrackerTempo, 32, 255, TempoTooltip); // 0 famitrackerSpeedPropIdx = props.AddNumericUpDown("Speed :", song.FamitrackerSpeed, 1, 31, SpeedTooltip); // 1 } var notesPerBeat = patternIdx < 0 ? song.BeatLength : song.GetPatternBeatLength(patternIdx); var notesPerPattern = patternIdx < 0 ? song.PatternLength : song.GetPatternLength(patternIdx); var bpm = Song.ComputeFamiTrackerBPM(song.Project.PalMode, song.FamitrackerSpeed, song.FamitrackerTempo, notesPerBeat); notesPerBeatPropIdx = props.AddNumericUpDown("Notes per Beat :", notesPerBeat, 1, 256, NotesPerBeatTooltip); // 2 notesPerPatternPropIdx = props.AddNumericUpDown("Notes per Pattern :", notesPerPattern, 1, Pattern.MaxLength, NotesPerPatternTooltip); // 3 bpmLabelPropIdx = props.AddLabel("BPM :", bpm.ToString("n1"), false, BPMTooltip); // 4 props.ShowWarnings = true; UpdateWarnings(); } else { var noteLength = (patternIdx < 0 ? song.NoteLength : song.GetPatternNoteLength(patternIdx)); var notesPerBeat = (patternIdx < 0 ? song.BeatLength : song.GetPatternBeatLength(patternIdx)); var notesPerPattern = (patternIdx < 0 ? song.PatternLength : song.GetPatternLength(patternIdx)); var groove = (patternIdx < 0 ? song.Groove : song.GetPatternGroove(patternIdx)); tempoList = FamiStudioTempoUtils.GetAvailableTempos(song.Project.PalMode, notesPerBeat / noteLength); var tempoIndex = FamiStudioTempoUtils.FindTempoFromGroove(tempoList, groove); Debug.Assert(tempoIndex >= 0); tempoStrings = tempoList.Select(t => t.bpm.ToString("n1") + (t.groove.Length == 1 ? " *": "")).ToArray(); var grooveList = FamiStudioTempoUtils.GetAvailableGrooves(tempoList[tempoIndex].groove); var grooveIndex = Array.FindIndex(grooveList, g => Utils.CompareArrays(g, groove) == 0); Debug.Assert(grooveIndex >= 0); grooveStrings = grooveList.Select(g => string.Join("-", g)).ToArray(); famistudioBpmPropIdx = props.AddDropDownList("BPM : ", tempoStrings, tempoStrings[tempoIndex], BPMTooltip); // 0 notesPerBeatPropIdx = props.AddNumericUpDown("Notes per Beat : ", notesPerBeat / noteLength, 1, 256, NotesPerBeatTooltip); // 1 notesPerPatternPropIdx = props.AddNumericUpDown("Notes per Pattern : ", notesPerPattern / noteLength, 1, Pattern.MaxLength / noteLength, NotesPerPatternTooltip); // 2 framesPerNotePropIdx = props.AddLabel("Frames per Note :", noteLength.ToString(), false, FramesPerNoteTooltip); // 3 props.ShowWarnings = true; props.BeginAdvancedProperties(); groovePropIdx = props.AddDropDownList("Groove : ", grooveStrings, grooveStrings[grooveIndex], GrooveTooltip); // 4 groovePadPropIdx = props.AddDropDownList("Groove Padding : ", GroovePaddingType.Names, GroovePaddingType.Names[song.GroovePaddingMode], GroovePaddingTooltip); // 5 originalNoteLength = noteLength; originalNotesPerBeat = notesPerBeat; UpdateWarnings(); } }
protected void ApplyHaltEffect(Song s, Dictionary<Pattern, RowFxData[,]> patternFxData) { // Find the first Cxx effect and truncate the song. for (int p = 0; p < s.Length; p++) { for (int c = 0; c < s.Channels.Length; c++) { var pattern = s.Channels[c].PatternInstances[p]; var patternLength = s.GetPatternLength(p); if (patternFxData.TryGetValue(pattern, out var fxData)) { for (int i = 0; i < fxData.GetLength(0) && i < patternLength; i++) { for (int j = 0; j < fxData.GetLength(1); j++) { var fx = fxData[i, j]; if (fx.fx == Effect_Halt) { if (s.PatternHasCustomSettings(p)) s.GetPatternCustomSettings(p).patternLength = i + 1; else s.SetPatternCustomSettings(p, i + 1, s.BeatLength); s.SetLength(p + 1); s.SetLoopPoint(-1); return; } } } } } } }
public bool AdvanceSong(int songLength, LoopMode loopMode) { bool advancedPattern = false; bool forceResetTempo = false; if (++playNote >= song.GetPatternLength(playPattern)) { playNote = 0; if (loopMode != LoopMode.Pattern) { playPattern++; advancedPattern = true; forceResetTempo = playPattern == song.LoopPoint; } } if (playPattern >= songLength) { loopCount++; if (maxLoopCount > 0 && loopCount >= maxLoopCount) { return(false); } if (loopMode == LoopMode.LoopPoint) // This loop mode is actually unused. { if (song.LoopPoint >= 0) { playPattern = song.LoopPoint; playNote = 0; advancedPattern = true; forceResetTempo = true; loopCount++; } else { return(false); } } else if (loopMode == LoopMode.Song) { playPattern = Math.Max(0, song.LoopPoint); playNote = 0; advancedPattern = true; forceResetTempo = true; } else if (loopMode == LoopMode.None) { return(false); } } if (advancedPattern) { ResetFamiStudioTempo(forceResetTempo); } return(true); }
private void CreateArpeggios(Song s, Dictionary<Pattern, RowFxData[,]> patternFxData) { var processedPatterns = new HashSet<Pattern>(); foreach (var c in s.Channels) { if (!c.SupportsArpeggios) continue; var lastNoteInstrument = (Instrument)null; var lastNoteArpeggio = (Arpeggio)null; var lastNoteValue = (byte)Note.NoteInvalid; for (int p = 0; p < s.Length; p++) { var pattern = c.PatternInstances[p]; if (pattern == null || !patternFxData.ContainsKey(pattern) || processedPatterns.Contains(pattern)) continue; processedPatterns.Add(pattern); var fxData = patternFxData[pattern]; var patternLen = s.GetPatternLength(p); for (var it = pattern.GetDenseNoteIterator(0, patternLen); !it.Done; it.Next()) { var n = it.CurrentTime; var note = it.CurrentNote; for (int i = 0; i < fxData.GetLength(1); i++) { var fx = fxData[n, i]; if (fx.fx == Effect_Arpeggio) { if (note == null) { note = pattern.GetOrCreateNoteAt(n); note.Value = lastNoteValue; note.Instrument = lastNoteInstrument; note.HasAttack = false; it.Resync(); } note.Arpeggio = GetOrCreateArpeggio(fx.param); } } if (note != null && note.IsValid) { lastNoteValue = note.Value; lastNoteInstrument = note.Instrument; lastNoteArpeggio = note.Arpeggio; } } } } }
private int FindEffectPosition(Song song, int patternIdx, int effect) { for (int i = 0; i < song.GetPatternLength(patternIdx); i++) { var fx = FindEffectParam(song, patternIdx, i, effect); if (fx >= 0) { return(i); } } return(-1); }
public bool AdvanceSong(int songLength, LoopMode loopMode) { bool resetTempo = false; if (++playNote >= song.GetPatternLength(playPattern)) { playNote = 0; if (loopMode != LoopMode.Pattern) { playPattern++; resetTempo = playPattern == song.LoopPoint; } } if (playPattern >= songLength) { if (loopMode == LoopMode.LoopPoint) { if (song.LoopPoint >= 0) { playPattern = song.LoopPoint; playNote = 0; resetTempo = true; } else { return(false); } } else if (loopMode == LoopMode.Song) { playPattern = Math.Max(0, song.LoopPoint); playNote = 0; resetTempo = true; } else if (loopMode == LoopMode.None) { return(false); } } if (resetTempo) { ResetFamiStudioTempo(resetTempo); } return(true); }
private int FindEffectParam(Song song, int effect) { for (int p = 0; p < song.Length; p++) { for (int i = 0; i < song.GetPatternLength(p); i++) { int fx = FindEffectParam(song, p, i, effect); if (fx >= 0) { return(fx); } } } return(-1); }
private void TruncateLongPatterns(Song song) { if (song.PatternLength > 256) { song.SetDefaultPatternLength(256); } // FamiTracker can only shorten patterns using skips. // We allow patterns to be longer than the default, so we will truncate those. for (int i = 0; i < song.Length; i++) { if (song.GetPatternLength(i) > song.PatternLength) { song.ClearPatternCustomSettings(i); } } song.DeleteNotesPastMaxInstanceLength(); }
public void DuplicateInstancesWithDifferentLengths() { var instanceLengthMap = new Dictionary <Pattern, int>(); for (int p = 0; p < song.Length; p++) { var pattern = patternInstances[p]; var patternLen = song.GetPatternLength(p); if (pattern != null) { if (instanceLengthMap.TryGetValue(pattern, out var prevLength)) { if (patternLen != prevLength) { pattern = pattern.ShallowClone(); patternInstances[p] = pattern; } } instanceLengthMap[pattern] = patternLen; } } }
private void CreateSlideNotes(Song s, Dictionary <Pattern, RowFxData[, ]> patternFxData) { var processedPatterns = new HashSet <Pattern>(); // Convert slide notes + portamento to our format. foreach (var c in s.Channels) { if (!c.SupportsSlideNotes) { continue; } var songSpeed = s.FamitrackerSpeed; var lastNoteInstrument = (Instrument)null; var lastNoteArpeggio = (Arpeggio)null; var lastNoteValue = (byte)Note.NoteInvalid; var portamentoSpeed = 0; var slideSpeed = 0; var slideShift = c.IsN163WaveChannel ? 2 : 0; var slideSign = c.IsN163WaveChannel || c.IsFdsWaveChannel || c.IsVrc7FmChannel ? -1 : 1; // Inverted channels. for (int p = 0; p < s.Length; p++) { var pattern = c.PatternInstances[p]; if (pattern == null) { continue; } var patternLen = s.GetPatternLength(p); for (var it = pattern.GetDenseNoteIterator(0, patternLen); !it.Done; it.Next()) { var location = new NoteLocation(p, it.CurrentTime); var note = it.CurrentNote; // Look for speed changes. s.ApplySpeedEffectAt(location, ref songSpeed); if (!patternFxData.ContainsKey(pattern) || processedPatterns.Contains(pattern)) { continue; } var fxData = patternFxData[pattern]; var slideTarget = 0; for (int i = 0; i < fxData.GetLength(1); i++) { var fx = fxData[location.NoteIndex, i]; if (fx.param != 0) { // When the effect it turned on, we need to add a note. if ((fx.fx == Effect_PortaUp || fx.fx == Effect_PortaDown || fx.fx == Effect_SlideUp || fx.fx == Effect_SlideDown) && lastNoteValue >= Note.MusicalNoteMin && lastNoteValue <= Note.MusicalNoteMax && (note == null || !note.IsValid)) { if (note == null) { note = pattern.GetOrCreateNoteAt(location.NoteIndex); it.Resync(); } note.Value = lastNoteValue; note.Instrument = lastNoteInstrument; note.Arpeggio = lastNoteArpeggio; note.HasAttack = false; } } if (fx.fx == Effect_PortaUp) { // If we have a Qxx/Rxx on the same row as a 1xx/2xx, things get weird. if (slideTarget == 0) { slideSpeed = (-fx.param * slideSign) << slideShift; } } if (fx.fx == Effect_PortaDown) { // If we have a Qxx/Rxx on the same row as a 1xx/2xx, things get weird. if (slideTarget == 0) { slideSpeed = (fx.param * slideSign) << slideShift; } } if (fx.fx == Effect_Portamento) { portamentoSpeed = fx.param; } if (fx.fx == Effect_SlideUp && note != null && note.IsMusical) { slideTarget = Utils.Clamp(note.Value + (fx.param & 0xf), Note.MusicalNoteMin, Note.MusicalNoteMax); slideSpeed = (-((fx.param >> 4) * 2 + 1)) << slideShift; } if (fx.fx == Effect_SlideDown && note != null && note.IsMusical) { slideTarget = Utils.Clamp(note.Value - (fx.param & 0xf), Note.MusicalNoteMin, Note.MusicalNoteMax); slideSpeed = (((fx.param >> 4) * 2 + 1)) << slideShift; } } // Create a slide note. if (note != null && !note.IsSlideNote) { if (note.IsMusical) { var slideSource = note.Value; var noteTable = NesApu.GetNoteTableForChannelType(c.Type, s.Project.PalMode, s.Project.ExpansionNumChannels); var pitchLimit = NesApu.GetPitchLimitForChannelType(c.Type); // If we have a new note with auto-portamento enabled, we need to // swap the notes since our slide notes work backward compared to // FamiTracker. if (portamentoSpeed != 0) { // Ignore notes with no attack since we created them to handle a previous slide. if (note.HasAttack && lastNoteValue >= Note.MusicalNoteMin && lastNoteValue <= Note.MusicalNoteMax) { slideSpeed = portamentoSpeed; slideTarget = note.Value; slideSource = lastNoteValue; note.Value = lastNoteValue; } } // Our implementation of VRC7 pitches is quite different from FamiTracker. // Compensate for larger pitches in higher octaves by shifting. We cant shift by // a large amount because the period is 9-bit and FamiTracker is restricted to // this for slides (octave never changes). var octaveSlideShift = c.IsVrc7FmChannel && note.Value >= 12 ? 1 : 0; // 3xx/Qxy/Rxy : We know which note we are sliding to and the speed, but we // don't know how many frames it will take to get there. if (slideTarget != 0) { // Advance in the song until we have the correct number of frames. var numFrames = Math.Max(1, Math.Abs((noteTable[slideSource] - noteTable[slideTarget]) / (slideSpeed << octaveSlideShift))); note.SlideNoteTarget = (byte)slideTarget; // TODO: Here we consider if the start note has a delay, but ignore the end note. It might have one too. var nextLocation = location; s.AdvanceNumberOfFrames(ref nextLocation, numFrames, note.HasNoteDelay ? -note.NoteDelay : 0, songSpeed, s.Project.PalMode); // Still to see if there is a note between the current one and the // next note, this could append if you add a note before the slide // is supposed to finish. if (FindNextSlideEffect(c, location, out var nextLocation2, patternFxData)) { nextLocation = NoteLocation.Min(nextLocation, nextLocation2); // If the slide is interrupted by another slide effect, we will not reach // the final target, but rather some intermediate note. Let's do our best // to interpolate and figure out the best note. var numFramesUntilNextSlide = s.CountFramesBetween(location, nextLocation, songSpeed, s.Project.PalMode); var ratio = Utils.Clamp(numFramesUntilNextSlide / numFrames, 0.0f, 1.0f); var intermediatePitch = (int)Math.Round(Utils.Lerp(noteTable[slideSource], noteTable[slideTarget], ratio)); slideTarget = FindBestMatchingNote(noteTable, intermediatePitch, Math.Sign(slideSpeed)); note.SlideNoteTarget = (byte)slideTarget; } if (nextLocation.PatternIndex < s.Length) { // Add an extra note with no attack to stop the slide. var nextPattern = c.PatternInstances[nextLocation.PatternIndex]; if (!nextPattern.Notes.TryGetValue(nextLocation.NoteIndex, out var nextNote) || !nextNote.IsValid) { nextNote = nextPattern.GetOrCreateNoteAt(nextLocation.NoteIndex); nextNote.Instrument = note.Instrument; nextNote.Value = (byte)slideTarget; nextNote.HasAttack = false; it.Resync(); } else if (nextNote != null && nextNote.IsRelease) { Log.LogMessage(LogSeverity.Warning, $"A slide note ends on a release note. This is currently unsupported and will require manual correction. {GetPatternString(nextPattern, nextLocation.NoteIndex)}"); } } // 3xx, Qxx and Rxx stops when its done. slideSpeed = 0; } // 1xx/2xy : We know the speed at which we are sliding, but need to figure out what makes it stop. else if (slideSpeed != 0 && FindNextSlideEffect(c, location, out var nextLocation, patternFxData)) { // See how many frames until the slide stops. var numFrames = (int)Math.Round(s.CountFramesBetween(location, nextLocation, songSpeed, s.Project.PalMode)); // TODO: Here we consider if the start note has a delay, but ignore the end note. It might have one too. numFrames = Math.Max(1, numFrames - (note.HasNoteDelay ? note.NoteDelay : 0)); // Compute the pitch delta and find the closest target note. var newNotePitch = Utils.Clamp(noteTable[slideSource] + numFrames * (slideSpeed << octaveSlideShift), 0, pitchLimit); var newNote = FindBestMatchingNote(noteTable, newNotePitch, Math.Sign(slideSpeed)); note.SlideNoteTarget = (byte)newNote; // If the FX was turned off, we need to add an extra note. var nextPattern = c.PatternInstances[nextLocation.PatternIndex]; if (!nextPattern.Notes.TryGetValue(nextLocation.NoteIndex, out var nextNote) || !nextNote.IsValid) { nextNote = nextPattern.GetOrCreateNoteAt(nextLocation.NoteIndex); nextNote.Instrument = note.Instrument; nextNote.Value = (byte)newNote; nextNote.HasAttack = false; it.Resync(); } else if (nextNote != null && nextNote.IsRelease) { Log.LogMessage(LogSeverity.Warning, $"A slide note ends on a release note. This is currently unsupported and will require manual correction. {GetPatternString(nextPattern, nextLocation.NoteIndex)}"); } } } } if (note != null && (note.IsMusical || note.IsStop)) { lastNoteValue = note.IsSlideNote ? note.SlideNoteTarget : note.Value; lastNoteInstrument = note.Instrument; lastNoteArpeggio = note.Arpeggio; } } processedPatterns.Add(pattern); } } }
private void CreateSlideNotes(Song s, Dictionary <Pattern, RowFxData[, ]> patternFxData) { var processedPatterns = new HashSet <Pattern>(); // Convert slide notes + portamento to our format. foreach (var c in s.Channels) { if (!c.SupportsSlideNotes) { continue; } var songSpeed = s.FamitrackerSpeed; var lastNoteInstrument = (Instrument)null; var lastNoteValue = (byte)Note.NoteInvalid; var portamentoSpeed = 0; var slideSpeed = 0; var slideShift = c.IsN163WaveChannel ? 2 : 0; var slideSign = c.IsN163WaveChannel || c.IsFdsWaveChannel ? -1 : 1; // Inverted channels. for (int p = 0; p < s.Length; p++) { var pattern = c.PatternInstances[p]; if (pattern == null || !patternFxData.ContainsKey(pattern) || processedPatterns.Contains(pattern)) { continue; } processedPatterns.Add(pattern); var fxData = patternFxData[pattern]; var patternLen = s.GetPatternLength(p); for (var it = pattern.GetNoteIterator(0, patternLen); !it.Done; it.Next()) { var n = it.CurrentTime; var note = it.CurrentNote; // Look for speed changes. foreach (var c2 in s.Channels) { var pattern2 = c2.PatternInstances[p]; if (pattern2 != null && pattern2.Notes.TryGetValue(n, out var note2) && note2.HasSpeed) { songSpeed = note2.Speed; } } var slideTarget = 0; for (int i = 0; i < fxData.GetLength(1); i++) { var fx = fxData[n, i]; if (fx.param != 0) { // When the effect it turned on, we need to add a note. if ((fx.fx == Effect_PortaUp || fx.fx == Effect_PortaDown || fx.fx == Effect_SlideUp || fx.fx == Effect_SlideDown) && lastNoteValue >= Note.MusicalNoteMin && lastNoteValue <= Note.MusicalNoteMax && (note == null || !note.IsValid)) { if (note == null) { note = pattern.GetOrCreateNoteAt(n); it.Resync(); } note.Value = lastNoteValue; note.Instrument = lastNoteInstrument; note.HasAttack = false; } } if (fx.fx == Effect_PortaUp) { // If we have a Qxx/Rxx on the same row as a 1xx/2xx, things get weird. if (slideTarget == 0) { slideSpeed = (-fx.param * slideSign) << slideShift; } } if (fx.fx == Effect_PortaDown) { // If we have a Qxx/Rxx on the same row as a 1xx/2xx, things get weird. if (slideTarget == 0) { slideSpeed = (fx.param * slideSign) << slideShift; } } if (fx.fx == Effect_Portamento) { portamentoSpeed = fx.param; } if (fx.fx == Effect_SlideUp) { slideTarget = note.Value + (fx.param & 0xf); slideSpeed = (-((fx.param >> 4) * 2 + 1)) << slideShift; } if (fx.fx == Effect_SlideDown) { slideTarget = note.Value - (fx.param & 0xf); slideSpeed = (((fx.param >> 4) * 2 + 1)) << slideShift; } } // Create a slide note. if (note != null && !note.IsSlideNote) { if (note.IsMusical) { var slideSource = note.Value; var noteTable = NesApu.GetNoteTableForChannelType(c.Type, false, s.Project.ExpansionNumChannels); var pitchLimit = NesApu.GetPitchLimitForChannelType(c.Type); // If we have a new note with auto-portamento enabled, we need to // swap the notes since our slide notes work backward compared to // FamiTracker. if (portamentoSpeed != 0) { // Ignore notes with no attack since we created them to handle a previous slide. if (note.HasAttack && lastNoteValue >= Note.MusicalNoteMin && lastNoteValue <= Note.MusicalNoteMax) { slideSpeed = portamentoSpeed; slideTarget = note.Value; slideSource = lastNoteValue; note.Value = lastNoteValue; } } // Our implementation of VRC7 pitches is quite different from FamiTracker. // Compensate for larger pitches in higher octaves by shifting. We cant shift by // a large amount because the period is 9-bit and FamiTracker is restricted to // this for slides (octave never changes). var octaveSlideShift = c.IsVrc7FmChannel && note.Value >= 12 ? 1 : 0; if (slideTarget != 0) { // TODO: We assume a tempo of 150 here. This is wrong. var numFrames = Math.Max(1, Math.Abs((noteTable[slideSource] - noteTable[slideTarget]) / ((slideSpeed << octaveSlideShift) * songSpeed))); note.SlideNoteTarget = (byte)slideTarget; var nn = n + numFrames; var np = p; while (nn >= s.GetPatternLength(np)) { nn -= s.GetPatternLength(np); np++; } if (np >= s.Length) { np = s.Length; nn = 0; } // Still to see if there is a note between the current one and the // next note, this could append if you add a note before the slide // is supposed to finish. if (FindNextNoteForSlide(c, p, n, out var np2, out var nn2, patternFxData)) { if (np2 < np) { np = np2; nn = nn2; } else if (np2 == np) { nn = Math.Min(nn, nn2); } } if (np < s.Length) { // Add an extra note with no attack to stop the slide. var nextPattern = c.PatternInstances[np]; if (!nextPattern.Notes.TryGetValue(nn, out var nextNote) || !nextNote.IsValid) { nextNote = nextPattern.GetOrCreateNoteAt(nn); nextNote.Instrument = note.Instrument; nextNote.Value = (byte)slideTarget; nextNote.HasAttack = false; it.Resync(); } } // 3xx, Qxx and Rxx stops when its done. slideSpeed = 0; } // Find the next note that would stop the slide or change the FX settings. else if (slideSpeed != 0 && FindNextNoteForSlide(c, p, n, out var np, out var nn, patternFxData)) { // Compute the pitch delta and find the closest target note. var numFrames = (s.GetPatternStartNote(np, nn) - s.GetPatternStartNote(p, n)) * songSpeed; // TODO: PAL. var newNotePitch = Utils.Clamp(noteTable[slideSource] + numFrames * (slideSpeed << octaveSlideShift), 0, pitchLimit); var newNote = FindBestMatchingNote(noteTable, newNotePitch, Math.Sign(slideSpeed)); note.SlideNoteTarget = (byte)newNote; // If the FX was turned off, we need to add an extra note. var nextPattern = c.PatternInstances[np]; if (!nextPattern.Notes.TryGetValue(nn, out var nextNote) || !nextNote.IsValid) { nextNote = nextPattern.GetOrCreateNoteAt(nn); nextNote.Instrument = note.Instrument; nextNote.Value = (byte)newNote; nextNote.HasAttack = false; it.Resync(); } } } } if (note != null && (note.IsMusical || note.IsStop)) { lastNoteValue = note.IsSlideNote ? note.SlideNoteTarget : note.Value; lastNoteInstrument = note.Instrument; } } } } }
private int OutputSong(Song song, int songIdx, int speedChannel, int factor, bool test) { var packedPatternBuffers = new List <List <string> >(globalPacketPatternBuffers); var size = 0; var emptyPattern = new Pattern(-1, song, 0, ""); var emptyNote = new Note(Note.NoteInvalid); for (int c = 0; c < song.Channels.Length; c++) { if (!test) { lines.Add($"\n{ll}song{songIdx}ch{c}:"); } var channel = song.Channels[c]; var currentSpeed = song.FamitrackerSpeed; var isSpeedChannel = c == speedChannel; var instrument = (Instrument)null; var previousNoteLength = song.NoteLength; if (isSpeedChannel && project.UsesFamiTrackerTempo) { if (!test) { lines.Add($"\t{db} $fb, ${song.FamitrackerSpeed:x2}"); } size += 2; } for (int p = 0; p < song.Length; p++) { var prevNoteValue = Note.NoteInvalid; var pattern = channel.PatternInstances[p] == null ? emptyPattern : channel.PatternInstances[p]; var patternBuffer = new List <string>(); if (p == song.LoopPoint) { if (!test) { lines.Add($"{ll}song{songIdx}ch{c}loop:"); } // Clear stored instrument to force a reset. We might be looping // to a section where the instrument was set from a previous pattern. instrument = null; } if (isSpeedChannel && project.UsesFamiStudioTempo && machine != MachineType.NTSC) { var noteLength = song.GetPatternNoteLength(p); if (noteLength != previousNoteLength || (p == song.LoopPoint && p != 0)) { if (!test) { patternBuffer.Add($"$fb"); patternBuffer.Add($"{lo}({ll}tempo_env{noteLength})"); patternBuffer.Add($"{hi}({ll}tempo_env{noteLength})"); previousNoteLength = noteLength; } size += 3; } } var patternLength = song.GetPatternLength(p); var numValidNotes = patternLength; for (var it = pattern.GetNoteIterator(0, patternLength); !it.Done;) { var time = it.CurrentTime; var note = it.CurrentNote; if (note == null) { note = emptyNote; } if (isSpeedChannel && song.UsesFamiTrackerTempo) { var speed = FindEffectParam(song, p, time, Note.EffectSpeed); if (speed >= 0) { currentSpeed = speed; patternBuffer.Add($"${0xfb:x2}"); patternBuffer.Add($"${(byte)speed:x2}"); } } it.Next(); if (note.HasVolume) { patternBuffer.Add($"${(byte)(0x70 | note.Volume):x2}"); } if (note.HasFinePitch) { patternBuffer.Add($"${0x65:x2}"); patternBuffer.Add($"${note.FinePitch:x2}"); } if (note.HasVibrato) { patternBuffer.Add($"${0x63:x2}"); patternBuffer.Add($"{lo}({vibratoEnvelopeNames[note.RawVibrato]})"); patternBuffer.Add($"{hi}({vibratoEnvelopeNames[note.RawVibrato]})"); if (note.RawVibrato == 0) { patternBuffer.Add($"${0x64:x2}"); } } if (note.HasFdsModSpeed) { patternBuffer.Add($"${0x66:x2}"); patternBuffer.Add($"${(note.FdsModSpeed >> 0) & 0xff:x2}"); patternBuffer.Add($"${(note.FdsModSpeed >> 8) & 0xff:x2}"); } if (note.HasFdsModDepth) { patternBuffer.Add($"${0x67:x2}"); patternBuffer.Add($"${note.FdsModDepth:x2}"); } if (note.IsValid) { // Instrument change. if (note.IsMusical) { if (note.Instrument != instrument) { int idx = instrumentIndices[note.Instrument]; patternBuffer.Add($"${(byte)(0x80 | (idx << 1)):x2}"); instrument = note.Instrument; } else if (!note.HasAttack) { // TODO: Remove note entirely after a slide that matches the next note with no attack. patternBuffer.Add($"${0x62:x2}"); } } int numNotes = 0; if (kernel != FamiToneKernel.FamiStudio) { // Note -> Empty -> Note special encoding. if (time < patternLength - 2) { pattern.Notes.TryGetValue(time + 1, out var nextNote1); pattern.Notes.TryGetValue(time + 2, out var nextNote2); var valid1 = (nextNote1 != null && nextNote1.IsValid) || (isSpeedChannel && FindEffectParam(song, p, time + 1, Note.EffectSpeed) >= 0); var valid2 = (nextNote2 != null && nextNote2.IsValid) || (isSpeedChannel && FindEffectParam(song, p, time + 2, Note.EffectSpeed) >= 0); if (!valid1 && valid2) { it.Next(); numValidNotes--; numNotes = 1; } } } if (note.IsSlideNote) { var noteTableNtsc = NesApu.GetNoteTableForChannelType(channel.Type, false, song.Project.ExpansionNumChannels); var noteTablePal = NesApu.GetNoteTableForChannelType(channel.Type, true, song.Project.ExpansionNumChannels); var found = true; found &= channel.ComputeSlideNoteParams(note, p, time, currentSpeed, Song.NativeTempoNTSC, noteTableNtsc, out _, out int stepSizeNtsc, out _); found &= channel.ComputeSlideNoteParams(note, p, time, currentSpeed, Song.NativeTempoNTSC, noteTablePal, out _, out int stepSizePal, out _); if (song.Project.UsesExpansionAudio || machine == MachineType.NTSC) { stepSizePal = stepSizeNtsc; } else if (machine == MachineType.PAL) { stepSizeNtsc = stepSizePal; } if (found) { // Take the (signed) maximum of both notes so that we are garantee to reach our note. var stepSize = Math.Max(Math.Abs(stepSizeNtsc), Math.Abs(stepSizePal)) * Math.Sign(stepSizeNtsc); patternBuffer.Add($"${0x61:x2}"); patternBuffer.Add($"${(byte)stepSize:x2}"); patternBuffer.Add($"${EncodeNoteValue(c, note.Value):x2}"); patternBuffer.Add($"${EncodeNoteValue(c, note.SlideNoteTarget):x2}"); continue; } } patternBuffer.Add($"${EncodeNoteValue(c, note.Value, numNotes):x2}"); prevNoteValue = note.Value; } else { int numEmptyNotes = 0; while (!it.Done) { time = it.CurrentTime; note = it.CurrentNote; if (note == null) { note = emptyNote; } if (numEmptyNotes >= maxRepeatCount || note.IsValid || note.HasVolume || note.HasVibrato || note.HasFinePitch || note.HasFdsModSpeed || note.HasFdsModDepth || (isSpeedChannel && FindEffectParam(song, p, time, Note.EffectSpeed) >= 0)) { break; } numEmptyNotes++; it.Next(); } numValidNotes -= numEmptyNotes; patternBuffer.Add($"${(byte)(0x81 | (numEmptyNotes << 1)):x2}"); } } int matchingPatternIdx = -1; if (patternBuffer.Count > 0) { if (patternBuffer.Count > 4) { for (int j = 0; j < packedPatternBuffers.Count; j++) { if (packedPatternBuffers[j].SequenceEqual(patternBuffer)) { matchingPatternIdx = j; break; } } } if (matchingPatternIdx < 0) { if (packedPatternBuffers.Count > MaxPackedPatterns) { return(-1); // TODO: Error. } packedPatternBuffers.Add(patternBuffer); size += patternBuffer.Count; if (!test) { lines.Add($"{ll}ref{packedPatternBuffers.Count - 1}:"); lines.Add($"\t{db} {String.Join(",", patternBuffer)}"); } } else { if (!test) { lines.Add($"\t{db} $ff,${numValidNotes:x2}"); lines.Add($"\t{dw} {ll}ref{matchingPatternIdx}"); } size += 4; } } } if (!test) { lines.Add($"\t{db} $fd"); lines.Add($"\t{dw} {ll}song{songIdx}ch{c}loop"); } size += 3; } if (!test) { globalPacketPatternBuffers = packedPatternBuffers; } return(size); }
protected bool AdvanceSong(int songLength, LoopMode loopMode) { bool advancedPattern = false; if (++playLocation.NoteIndex >= song.GetPatternLength(playLocation.PatternIndex)) { playLocation.NoteIndex = 0; if (loopMode != LoopMode.Pattern) { playLocation.PatternIndex++; advancedPattern = true; } else { // Make sure the selection is valid, updated on another thread, so could be // sketchy. var minPatternIdx = minSelectedPattern; var maxPatternIdx = maxSelectedPattern; if (minPatternIdx >= 0 && maxPatternIdx >= 0 && maxPatternIdx >= minPatternIdx && minPatternIdx < song.Length) { if (playLocation.PatternIndex + 1 > maxPatternIdx) { playLocation.PatternIndex = minPatternIdx; } else { playLocation.PatternIndex++; advancedPattern = true; } } } } if (playLocation.PatternIndex >= songLength) { loopCount++; if (maxLoopCount > 0 && loopCount >= maxLoopCount) { return(false); } if (loopMode == LoopMode.LoopPoint) // This loop mode is actually unused. { if (song.LoopPoint >= 0) { playLocation.PatternIndex = song.LoopPoint; playLocation.NoteIndex = 0; advancedPattern = true; } else { return(false); } } else if (loopMode == LoopMode.Song) { playLocation.PatternIndex = Math.Max(0, song.LoopPoint); playLocation.NoteIndex = 0; advancedPattern = true; } else if (loopMode == LoopMode.None) { return(false); } } if (advancedPattern) { ResetFamiStudioTempo(); } UpdateBeat(); return(true); }