private bool ReadPatterns(int idx) { var maxIdx = idx + blockSize; while (idx < maxIdx) { var songIdx = BitConverter.ToInt32(bytes, idx); idx += sizeof(int); var chanIdx = BitConverter.ToInt32(bytes, idx); idx += sizeof(int); var patIdx = BitConverter.ToInt32(bytes, idx); idx += sizeof(int); var items = BitConverter.ToInt32(bytes, idx); idx += sizeof(int); var song = project.Songs[songIdx]; var channel = song.Channels[chanIdx]; var pattern = channel.GetPattern($"{patIdx:X2}"); // Famitracker can have patterns that arent actually used in the song. // Skip with a dummy pattern. if (pattern == null) { pattern = new Pattern(); } var fxCount = songEffectColumnCount[song][chanIdx]; var fxData = new RowFxData[song.PatternLength, fxCount + 1]; patternFxData[pattern] = fxData; for (int i = 0; i < items; i++) { var n = BitConverter.ToInt32(bytes, idx); idx += sizeof(int); var note = bytes[idx++]; var octave = bytes[idx++]; var instrument = bytes[idx++]; var volume = bytes[idx++]; // This happens when some patterns are longer than the song pattern length. // The TNMT song from FamiTracker has this. if (n < song.PatternLength) { if (volume != 16) { pattern.GetOrCreateNoteAt(n).Volume = (byte)(volume & 0x0f); } if (blockVersion < 5 && project.ExpansionAudio == Project.ExpansionFds && channel.Type == Channel.FdsWave && octave < 6 && octave != 0) { octave += 2; } if (note != 0 && octave != 0) { switch (note) { case 13: pattern.GetOrCreateNoteAt(n).Value = Note.NoteRelease; break; case 14: pattern.GetOrCreateNoteAt(n).Value = Note.NoteStop; break; default: if (instrument < MaxInstruments && channel.Type != Channel.Dpcm) { pattern.GetOrCreateNoteAt(n).Instrument = instruments[instrument]; } if (channel.Type == Channel.Noise) { pattern.GetOrCreateNoteAt(n).Value = (byte)(octave * 12 + note + 15); } else { pattern.GetOrCreateNoteAt(n).Value = (byte)(octave * 12 + note); } break; } } } for (int j = 0; j < fxCount + 1; ++j) { RowFxData fx; fx.fx = bytes[idx++]; fx.param = bytes[idx++]; // See comment above. if (n < song.PatternLength) { fxData[n, j] = fx; } ApplySimpleEffects(fx, pattern, n, patternLengths); } } } return(true); }
public void Serialize(ref Pattern pattern, Channel channel) { int patternId = pattern == null ? -1 : pattern.Id; Serialize(ref patternId); }
private int OutputSong(Song song, int songIdx, int speedChannel, int factor, bool test) { var packedPatternBuffers = new List <List <string> >(globalPacketPatternBuffers); var size = 0; var emptyPattern = new Pattern(-1, song, 0, ""); var emptyNote = new Note(Note.NoteInvalid); for (int c = 0; c < song.Channels.Length; c++) { if (!test) { lines.Add($"\n{ll}song{songIdx}ch{c}:"); } var channel = song.Channels[c]; var currentSpeed = song.FamitrackerSpeed; var isSpeedChannel = c == speedChannel; var instrument = (Instrument)null; var previousNoteLength = song.NoteLength; if (isSpeedChannel && project.UsesFamiTrackerTempo) { if (!test) { lines.Add($"\t{db} $fb, ${song.FamitrackerSpeed:x2}"); } size += 2; } for (int p = 0; p < song.Length; p++) { var prevNoteValue = Note.NoteInvalid; var pattern = channel.PatternInstances[p] == null ? emptyPattern : channel.PatternInstances[p]; var patternBuffer = new List <string>(); if (p == song.LoopPoint) { if (!test) { lines.Add($"{ll}song{songIdx}ch{c}loop:"); } // Clear stored instrument to force a reset. We might be looping // to a section where the instrument was set from a previous pattern. instrument = null; } if (isSpeedChannel && project.UsesFamiStudioTempo) { var noteLength = song.GetPatternNoteLength(p); if (noteLength != previousNoteLength || (p == song.LoopPoint && p != 0)) { if (!test) { patternBuffer.Add($"$fb"); patternBuffer.Add($"{lo}({ll}tempo_env{noteLength})"); patternBuffer.Add($"{hi}({ll}tempo_env{noteLength})"); previousNoteLength = noteLength; } size += 3; } } var patternLength = song.GetPatternLength(p); var numValidNotes = patternLength; for (var it = pattern.GetNoteIterator(0, patternLength); !it.Done;) { var time = it.CurrentTime; var note = it.CurrentNote; if (note == null) { note = emptyNote; } if (isSpeedChannel && song.UsesFamiTrackerTempo) { var speed = FindEffectParam(song, p, time, Note.EffectSpeed); if (speed >= 0) { currentSpeed = speed; patternBuffer.Add($"${0xfb:x2}"); patternBuffer.Add($"${(byte)speed:x2}"); } } it.Next(); if (note.HasVolume) { patternBuffer.Add($"${(byte)(0x70 | note.Volume):x2}"); } if (note.HasFinePitch) { patternBuffer.Add($"${0x65:x2}"); patternBuffer.Add($"${note.FinePitch:x2}"); } if (note.HasVibrato) { patternBuffer.Add($"${0x63:x2}"); patternBuffer.Add($"{lo}({vibratoEnvelopeNames[note.RawVibrato]})"); patternBuffer.Add($"{hi}({vibratoEnvelopeNames[note.RawVibrato]})"); if (note.RawVibrato == 0) { patternBuffer.Add($"${0x64:x2}"); } } if (note.HasFdsModSpeed) { patternBuffer.Add($"${0x66:x2}"); patternBuffer.Add($"${(note.FdsModSpeed >> 0) & 0xff:x2}"); patternBuffer.Add($"${(note.FdsModSpeed >> 8) & 0xff:x2}"); } if (note.HasFdsModDepth) { patternBuffer.Add($"${0x67:x2}"); patternBuffer.Add($"${note.FdsModDepth:x2}"); } if (note.IsValid) { // Instrument change. if (note.IsMusical) { if (note.Instrument != instrument) { int idx = instrumentIndices[note.Instrument]; patternBuffer.Add($"${(byte)(0x80 | (idx << 1)):x2}"); instrument = note.Instrument; } else if (!note.HasAttack) { // TODO: Remove note entirely after a slide that matches the next note with no attack. patternBuffer.Add($"${0x62:x2}"); } } int numNotes = 0; if (kernel != FamiToneKernel.FamiStudio) { // Note -> Empty -> Note special encoding. if (time < patternLength - 2) { pattern.Notes.TryGetValue(time + 1, out var nextNote1); pattern.Notes.TryGetValue(time + 2, out var nextNote2); var valid1 = (nextNote1 != null && nextNote1.IsValid) || (isSpeedChannel && FindEffectParam(song, p, time + 1, Note.EffectSpeed) >= 0); var valid2 = (nextNote2 != null && nextNote2.IsValid) || (isSpeedChannel && FindEffectParam(song, p, time + 2, Note.EffectSpeed) >= 0); if (!valid1 && valid2) { it.Next(); numValidNotes--; numNotes = 1; } } } if (note.IsSlideNote) { var noteTableNtsc = NesApu.GetNoteTableForChannelType(channel.Type, false, song.Project.ExpansionNumChannels); var noteTablePal = NesApu.GetNoteTableForChannelType(channel.Type, true, song.Project.ExpansionNumChannels); var found = true; found &= channel.ComputeSlideNoteParams(note, p, time, currentSpeed, Song.NativeTempoNTSC, noteTableNtsc, out _, out int stepSizeNtsc, out _); found &= channel.ComputeSlideNoteParams(note, p, time, currentSpeed, Song.NativeTempoNTSC, noteTablePal, out _, out int stepSizePal, out _); if (song.Project.UsesExpansionAudio || machine == MachineType.NTSC) { stepSizePal = stepSizeNtsc; } else if (machine == MachineType.PAL) { stepSizeNtsc = stepSizePal; } if (found) { // Take the (signed) maximum of both notes so that we are garantee to reach our note. var stepSize = Math.Max(Math.Abs(stepSizeNtsc), Math.Abs(stepSizePal)) * Math.Sign(stepSizeNtsc); patternBuffer.Add($"${0x61:x2}"); patternBuffer.Add($"${(byte)stepSize:x2}"); patternBuffer.Add($"${EncodeNoteValue(c, note.Value):x2}"); patternBuffer.Add($"${EncodeNoteValue(c, note.SlideNoteTarget):x2}"); continue; } } patternBuffer.Add($"${EncodeNoteValue(c, note.Value, numNotes):x2}"); prevNoteValue = note.Value; } else { int numEmptyNotes = 0; while (!it.Done) { time = it.CurrentTime; note = it.CurrentNote; if (note == null) { note = emptyNote; } if (numEmptyNotes >= maxRepeatCount || note.IsValid || note.HasVolume || note.HasVibrato || note.HasFinePitch || note.HasFdsModSpeed || note.HasFdsModDepth || (isSpeedChannel && FindEffectParam(song, p, time, Note.EffectSpeed) >= 0)) { break; } numEmptyNotes++; it.Next(); } numValidNotes -= numEmptyNotes; patternBuffer.Add($"${(byte)(0x81 | (numEmptyNotes << 1)):x2}"); } } int matchingPatternIdx = -1; if (patternBuffer.Count > 0) { if (patternBuffer.Count > 4) { for (int j = 0; j < packedPatternBuffers.Count; j++) { if (packedPatternBuffers[j].SequenceEqual(patternBuffer)) { matchingPatternIdx = j; break; } } } if (matchingPatternIdx < 0) { if (packedPatternBuffers.Count > MaxPackedPatterns) { return(-1); // TODO: Error. } packedPatternBuffers.Add(patternBuffer); size += patternBuffer.Count; if (!test) { lines.Add($"{ll}ref{packedPatternBuffers.Count - 1}:"); lines.Add($"\t{db} {String.Join(",", patternBuffer)}"); } } else { if (!test) { lines.Add($"\t{db} $ff,${numValidNotes:x2}"); lines.Add($"\t{dw} {ll}ref{matchingPatternIdx}"); } size += 4; } } } if (!test) { lines.Add($"\t{db} $fd"); lines.Add($"\t{dw} {ll}song{songIdx}ch{c}loop"); } size += 3; } if (!test) { globalPacketPatternBuffers = packedPatternBuffers; } return(size); }
private int OutputSong(Song song, int songIdx, int speedChannel, int factor, bool test) { var packedPatternBuffers = new List <List <string> >(globalPacketPatternBuffers); var size = 0; var loopPoint = Math.Max(0, FindEffectParam(song, Note.EffectJump)) * factor; var emptyPattern = new Pattern(-1, song, 0, ""); for (int c = 0; c < song.Channels.Length; c++) { if (!test) { lines.Add($"\n{ll}song{songIdx}ch{c}:"); } var channel = song.Channels[c]; var isSpeedChannel = c == speedChannel; var instrument = (Instrument)null; if (isSpeedChannel) { if (!test) { lines.Add($"\t{db} $fb, ${song.Speed:x2}"); } size += 2; } var isSkipping = false; for (int p = 0; p < song.Length; p++) { var prevNoteValue = Note.NoteInvalid; var pattern = channel.PatternInstances[p] == null ? emptyPattern : channel.PatternInstances[p]; var patternBuffer = new List <string>(); // If we had split the song and we find a skip to the next // pattern, we need to ignore the extra patterns we generated. if (isSkipping && (p % factor) != 0) { continue; } if (!test && p == loopPoint) { lines.Add($"{ll}song{songIdx}ch{c}loop:"); } var i = 0; var patternLength = FindEffectPosition(song, p, Note.EffectSkip); var jumpFound = false; if (patternLength >= 0) { patternLength++; isSkipping = true; } else { isSkipping = false; patternLength = FindEffectPosition(song, p, Note.EffectJump); if (patternLength >= 0) { patternLength++; jumpFound = true; } else { patternLength = song.PatternLength; } } var numValidNotes = patternLength; while (i < patternLength) { var note = pattern.Notes[i]; if (isSpeedChannel) { var speed = FindEffectParam(song, p, i, Note.EffectSpeed); if (speed >= 0) { patternBuffer.Add($"${0xfb:x2}"); patternBuffer.Add($"${(byte)speed:x2}"); } } i++; if (note.HasVolume) { patternBuffer.Add($"${(byte)(0x70 | note.Volume):x2}"); } if (note.HasVibrato) { patternBuffer.Add($"${0x63:x2}"); patternBuffer.Add($"{lo}({vibratoEnvelopeNames[note.Vibrato]})"); patternBuffer.Add($"{hi}({vibratoEnvelopeNames[note.Vibrato]})"); if (note.Vibrato == 0) { patternBuffer.Add($"${0x64:x2}"); } } if (note.IsValid) { // Instrument change. if (note.IsMusical) { if (note.Instrument != instrument) { int idx = project.Instruments.IndexOf(note.Instrument); patternBuffer.Add($"${(byte)(0x80 | (idx << 1)):x2}"); instrument = note.Instrument; } else if (!note.HasAttack) { // TODO: Remove note entirely after a slide that matches the next note with no attack. patternBuffer.Add($"${0x62:x2}"); } } int numNotes = 0; if (kernel == FamiToneKernel.FamiTone2) { // Note -> Empty -> Note special encoding. if (i < patternLength - 1) { var nextNote1 = pattern.Notes[i + 0]; var nextNote2 = pattern.Notes[i + 1]; var valid1 = nextNote1.IsValid || (isSpeedChannel && FindEffectParam(song, p, i + 0, Note.EffectSpeed) >= 0); var valid2 = nextNote2.IsValid || (isSpeedChannel && FindEffectParam(song, p, i + 1, Note.EffectSpeed) >= 0); if (!valid1 && valid2) { i++; numValidNotes--; numNotes = 1; } } } if (note.IsSlideNote) { var noteTable = NesApu.GetNoteTableForChannelType(channel.Type, false); if (channel.ComputeSlideNoteParams(p, i - 1, noteTable, out _, out int stepSize, out _)) { patternBuffer.Add($"${0x61:x2}"); patternBuffer.Add($"${(byte)stepSize:x2}"); patternBuffer.Add($"${EncodeNoteValue(c, note.Value):x2}"); patternBuffer.Add($"${EncodeNoteValue(c, note.SlideNoteTarget):x2}"); continue; } } patternBuffer.Add($"${EncodeNoteValue(c, note.Value, numNotes):x2}"); prevNoteValue = note.Value; } else { int numEmptyNotes = 0; while (i < patternLength) { var emptyNote = pattern.Notes[i]; if (numEmptyNotes >= maxRepeatCount || emptyNote.IsValid || emptyNote.HasVolume || emptyNote.HasVibrato || (isSpeedChannel && FindEffectParam(song, p, i, Note.EffectSpeed) >= 0)) { break; } i++; numEmptyNotes++; } numValidNotes -= numEmptyNotes; patternBuffer.Add($"${(byte)(0x81 | (numEmptyNotes << 1)):x2}"); } } int matchingPatternIdx = -1; if (patternBuffer.Count > 0) { if (patternBuffer.Count > 4) { for (int j = 0; j < packedPatternBuffers.Count; j++) { if (packedPatternBuffers[j].SequenceEqual(patternBuffer)) { matchingPatternIdx = j; break; } } } if (matchingPatternIdx < 0) { if (packedPatternBuffers.Count > MaxPackedPatterns) { return(-1); // TODO: Error. } packedPatternBuffers.Add(patternBuffer); size += patternBuffer.Count; if (!test) { lines.Add($"{ll}ref{packedPatternBuffers.Count - 1}:"); lines.Add($"\t{db} {String.Join(",", patternBuffer)}"); } } else { if (!test) { lines.Add($"\t{db} $ff,${numValidNotes:x2}"); lines.Add($"\t{dw} {ll}ref{matchingPatternIdx}"); } size += 4; } } if (jumpFound) { break; } } if (!test) { lines.Add($"\t{db} $fd"); lines.Add($"\t{dw} {ll}song{songIdx}ch{c}loop"); } size += 3; } if (!test) { globalPacketPatternBuffers = packedPatternBuffers; } return(size); }
protected override void OnMouseUp(MouseEventArgs e) { base.OnMouseUp(e); if (captureOperation != CaptureOperation.None) { if (captureOperation == CaptureOperation.ClickPattern) { if (GetPatternForCoord(e.X, e.Y, out int channelIdx, out int patternIdx)) { minSelectedChannelIdx = channelIdx; maxSelectedChannelIdx = channelIdx; minSelectedPatternIdx = patternIdx; maxSelectedPatternIdx = patternIdx; ConditionalInvalidate(); } else { ClearSelection(); } } else if (captureOperation == CaptureOperation.DragSelection) { bool copy = ModifierKeys.HasFlag(Keys.Control); int centerX = e.X - selectionDragAnchorX + PatternSizeX / 2; int basePatternIdx = (centerX - trackNameSizeX + scrollX) / PatternSizeX; Pattern[,] tmpPatterns = new Pattern[maxSelectedChannelIdx - minSelectedChannelIdx + 1, maxSelectedPatternIdx - minSelectedPatternIdx + 1]; App.UndoRedoManager.BeginTransaction(TransactionScope.Song, Song.Id); for (int i = minSelectedChannelIdx; i <= maxSelectedChannelIdx; i++) { for (int j = minSelectedPatternIdx; j <= maxSelectedPatternIdx; j++) { tmpPatterns[i - minSelectedChannelIdx, j - minSelectedPatternIdx] = Song.Channels[i].PatternInstances[j]; if (!copy) { Song.Channels[i].PatternInstances[j] = null; } } } for (int i = minSelectedChannelIdx; i <= maxSelectedChannelIdx; i++) { for (int j = minSelectedPatternIdx; j <= maxSelectedPatternIdx; j++) { Song.Channels[i].PatternInstances[j + basePatternIdx - minSelectedPatternIdx] = tmpPatterns[i - minSelectedChannelIdx, j - minSelectedPatternIdx]; } } App.UndoRedoManager.EndTransaction(); ClearSelection(); ConditionalInvalidate(); } Capture = false; captureOperation = CaptureOperation.None; }
private unsafe RenderBitmap GetPatternBitmapFromCache(RenderGraphics g, Pattern p) { int patternSizeX = Song.PatternLength - 1; int patternSizeY = trackSizeY - patternHeaderSizeY - 1; RenderBitmap bmp; if (patternBitmapCache.TryGetValue(p.Id, out bmp)) { if (bmp.Size.Width == patternSizeX) { return(bmp); } else { patternBitmapCache.Remove(p.Id); bmp.Dispose(); bmp = null; } } uint[] data = new uint[patternSizeX * patternSizeY]; Note minNote; Note maxNote; if (p.GetMinMaxNote(out minNote, out maxNote)) { if (maxNote.Value == minNote.Value) { minNote.Value = (byte)(minNote.Value - 5); maxNote.Value = (byte)(maxNote.Value + 5); } else { minNote.Value = (byte)(minNote.Value - 2); maxNote.Value = (byte)(maxNote.Value + 2); } Note lastValid = new Note { Value = Note.NoteInvalid }; for (int i = 0; i < Song.PatternLength - 1; i++) // TODO: We always skip the last note. { var n = p.Notes[i]; if (n.IsValid && !n.IsStop) { lastValid = p.Notes[i]; } if (lastValid.IsValid) { float scaleY = (patternSizeY - noteSizeY) / (float)patternSizeY; int x = i; int y = Math.Min((int)Math.Round((lastValid.Value - minNote.Value) / (float)(maxNote.Value - minNote.Value) * scaleY * patternSizeY), patternSizeY - noteSizeY); var instrument = lastValid.Instrument; var color = instrument == null ? ThemeBase.LightGreyFillColor1 : instrument.Color; for (int j = 0; j < noteSizeY; j++) { data[(patternSizeY - 1 - (y + j)) * patternSizeX + x] = (uint)color.ToArgb(); } } //if (n.HasEffect) //{ // for (int y = 0; y < patternSizeY; y++) // { // data[y * patternSizeX + i] = 0xff000000; // } //} } } bmp = g.CreateBitmap(patternSizeX, patternSizeY, data); patternBitmapCache[p.Id] = bmp; return(bmp); }
public void NotifyPatternChange(Pattern pattern) { patternBitmapCache.Remove(pattern.Id); }