public static IEnumerable <ILengthedObject> GetNotesAndRests(this TrackChunk trackChunk, RestSeparationPolicy restSeparationPolicy) { ThrowIfArgument.IsNull(nameof(trackChunk), trackChunk); ThrowIfArgument.IsInvalidEnumValue(nameof(restSeparationPolicy), restSeparationPolicy); return(trackChunk.GetNotes().GetNotesAndRests(restSeparationPolicy)); }
/// <summary> /// Secondary Thread to play the notes of a MIDI track /// </summary> public void PlayTrack(object track) { Utils.Pprint($"Playback of the track has started! KeyPress Mode: {(bUsingkeybdevent ? "keybd_event" : "SendInput")}\n", ConsoleColor.Green); dNoteOriginalStartTime = Utils.GetTime(); dNoteStartOffset = -1; lastUtcTimeSincePause = 0; TrackChunk trackChunk = (TrackChunk)track; var noteQueue = new Queue <Note>(trackChunk.GetNotes().ToList()); bIsPlaying = true; bHasStartedPlaying = true; while (noteQueue.Any()) { if (bIsPlaying) { var note = noteQueue.Dequeue(); var noteTime = ((double)note.Time / Division); var noteName = Utils.ConvertToKBNote(note.NoteNumber); if (!bDisableNoteEvents) { Utils.PrintNote(note, noteTime); } if (dNoteStartOffset == -1) { dNoteStartOffset = noteTime; } NotePressSleep(noteTime); Utils.PressKeys(noteName, bUsingkeybdevent ? Utils.KeyPressMode.KBDEVENT : Utils.KeyPressMode.SENDINPUT); } } bIsPlaying = false; bHasStartedPlaying = false; Utils.Pprint("Playback of the track was finished!\n", ConsoleColor.Green); if (Application.OpenForms.OfType <MidiToVPianoMain>().Any()) { Application.OpenForms.OfType <MidiToVPianoMain>().First().btnPlay.Invoke(new MethodInvoker(() => { Application.OpenForms.OfType <MidiToVPianoMain>().First().btnPlay.Text = "Play"; Application.OpenForms.OfType <MidiToVPianoMain>().First().rBSendInput.Enabled = true; Application.OpenForms.OfType <MidiToVPianoMain>().First().rBkeybdevent.Enabled = true; Application.OpenForms.OfType <MidiToVPianoMain>().First().tBTempo.Enabled = true; })); } }
public MidiTrack(TrackChunk track, TempoMap tempoMap) { this.trackName = ""; this.pitchBends = new List <MidiPitchBend>(); this.notes = new List <MidiNote>(); this.timeSignatures = new List <MidiTimeSignature>(); this.instrument = new MidiInstrument(0, 0); parseEvents(track.GetTimedEvents(), tempoMap); parseNotes(track.GetNotes(), tempoMap); if (timeSignatures.Count == 0) // default value if timesignature doesnt exist in midi { timeSignatures.Add(new MidiTimeSignature(0, "4/4", " 24 clocks/click", " 8 /32nd/beat", tempoMap)); } }
private void GetNotes_DetectionSettings_EventsCollection( ContainerType containerType, NoteDetectionSettings settings, ICollection <MidiEvent> midiEvents, ICollection <Note> expectedNotes) { switch (containerType) { case ContainerType.EventsCollection: { var eventsCollection = new EventsCollection(); eventsCollection.AddRange(midiEvents); var notes = eventsCollection.GetNotes(settings); MidiAsserts.AreEqual(expectedNotes, notes, "Notes are invalid."); var timedObjects = eventsCollection.GetObjects(ObjectType.Note, new ObjectDetectionSettings { NoteDetectionSettings = settings }); MidiAsserts.AreEqual(expectedNotes, timedObjects, "Notes are invalid from GetObjects."); } break; case ContainerType.TrackChunk: { var trackChunk = new TrackChunk(midiEvents); var notes = trackChunk.GetNotes(settings); MidiAsserts.AreEqual(expectedNotes, notes, "Notes are invalid."); var timedObjects = trackChunk.GetObjects(ObjectType.Note, new ObjectDetectionSettings { NoteDetectionSettings = settings }); MidiAsserts.AreEqual(expectedNotes, timedObjects, "Notes are invalid from GetObjects."); } break; case ContainerType.TrackChunks: case ContainerType.File: { GetNotes_DetectionSettings_TrackChunks( containerType == ContainerType.File, settings, new[] { midiEvents }, expectedNotes); } break; } }
/// <summary> /// /// </summary> /// <param name="trackChunk"></param> /// <param name="trackNumber"></param> /// <param name="song"></param> /// <returns></returns> internal static Dictionary <long, ConfigContainer> ReadConfigs(this TrackChunk trackChunk, int trackNumber, BmpSong song) { var configContainers = new Dictionary <long, ConfigContainer>(); if (trackChunk.GetNotes().Count == 0 && trackChunk.GetTimedEvents().All(x => x.Event.EventType != MidiEventType.Lyric)) { BmpLog.I(BmpLog.Source.Transmogrify, "Skipping track " + trackNumber + " as it contains no notes and contains no lyric events."); return(configContainers); } var trackName = (trackChunk.Events.OfType <SequenceTrackNameEvent>().FirstOrDefault()?.Text ?? "").Replace(" ", "").ToLower(); if (trackName.Contains("ignore")) { BmpLog.I(BmpLog.Source.Transmogrify, "Skipping track " + trackNumber + " as the track title contains \"Ignore\""); return(configContainers); } var groups = trackName.Split('|'); var modifier = new Regex(@"^([A-Za-z0-9]+)([-+]\d)?"); for (var groupCounter = 0; groupCounter < groups.Length; groupCounter++) { var configContainer = new ConfigContainer(); var fields = groups[groupCounter].Split(';'); if (fields.Length == 0) { continue; } // bmp 2.x style group name if (fields[0].StartsWith("vst:") || fields[0].Equals("lyrics")) { var subfields = fields[0].Split(':'); switch (subfields[0]) { case "vst" when subfields.Length < 2: BmpLog.W(BmpLog.Source.Transmogrify, "Skipping VST on track " + trackNumber + " due to the configuration not specifying a tone."); continue; case "vst": var manualToneConfig = (VSTProcessorConfig)(configContainer.ProcessorConfig = new VSTProcessorConfig { Track = trackNumber }); manualToneConfig.InstrumentTone = InstrumentTone.Parse(subfields[1]); if (manualToneConfig.InstrumentTone.Equals(InstrumentTone.None)) { BmpLog.W(BmpLog.Source.Transmogrify, "Skipping VST on track " + trackNumber + " due to the configuration specifying an invalid tone."); continue; } if (subfields.Length > 2) { var shifts = subfields[2].Split(','); foreach (var shift in shifts) { var toneIndexAndOctaveRange = modifier.Match(shift); if (!toneIndexAndOctaveRange.Success) { BmpLog.W(BmpLog.Source.Transmogrify, "Skipping invalid VST octave setting \"" + shift + "\" on track " + trackNumber); continue; } if (!toneIndexAndOctaveRange.Groups[1].Success) { BmpLog.W(BmpLog.Source.Transmogrify, "Skipping invalid VST octave setting \"" + shift + "\" on track " + trackNumber + " because \"" + toneIndexAndOctaveRange.Groups[1].Value + "\" is not a valid tone number"); continue; } if (!int.TryParse(toneIndexAndOctaveRange.Groups[1].Value, out var toneIndex) || toneIndex < 0 || toneIndex > 4) { BmpLog.W(BmpLog.Source.Transmogrify, "Skipping invalid VST octave setting \"" + shift + "\" on track " + trackNumber + " because \"" + toneIndexAndOctaveRange.Groups[1].Value + "\" is not a valid tone number"); continue; } var octaveRange = OctaveRange.C3toC6; if (toneIndexAndOctaveRange.Groups[2].Success) { octaveRange = OctaveRange.Parse(toneIndexAndOctaveRange.Groups[2].Value); } if (octaveRange.Equals(OctaveRange.Invalid)) { octaveRange = OctaveRange.C3toC6; } manualToneConfig.OctaveRanges[toneIndex] = octaveRange; } } ParseAdditionalOptions(trackNumber, manualToneConfig, song, fields); BmpLog.I(BmpLog.Source.Transmogrify, "Found VST Config Group with on track " + manualToneConfig.Track + " ;bards=" + manualToneConfig.PlayerCount + ";include=" + string.Join(",", manualToneConfig.IncludedTracks)); configContainers.Add(groupCounter, configContainer); continue; case "lyrics": var lyricConfig = (LyricProcessorConfig)(configContainer.ProcessorConfig = new LyricProcessorConfig { Track = trackNumber }); ParseAdditionalOptions(trackNumber, lyricConfig, song, fields); BmpLog.I(BmpLog.Source.Transmogrify, "Found Lyric Config on track " + lyricConfig.Track + " ;bards=" + lyricConfig.PlayerCount + ";include=" + string.Join(",", lyricConfig.IncludedTracks)); configContainers.Add(groupCounter, configContainer); continue; } } // bmp 1.x style group name else { var classicConfig = (ClassicProcessorConfig)(configContainer.ProcessorConfig = new ClassicProcessorConfig { Track = trackNumber }); var instrumentAndOctaveRange = modifier.Match(fields[0]); if (!instrumentAndOctaveRange.Success) { continue; // Invalid Instrument name. } if (instrumentAndOctaveRange.Groups[1].Success) { classicConfig.Instrument = Instrument.Parse(instrumentAndOctaveRange.Groups[1].Value); } if (classicConfig.Instrument.Equals(Instrument.None)) { continue; // Invalid Instrument name. } if (instrumentAndOctaveRange.Groups[2].Success) { classicConfig.OctaveRange = OctaveRange.Parse(instrumentAndOctaveRange.Groups[2].Value); } if (classicConfig.OctaveRange.Equals(OctaveRange.Invalid)) { classicConfig.OctaveRange = OctaveRange.C3toC6; } ParseAdditionalOptions(trackNumber, classicConfig, song, fields); BmpLog.I(BmpLog.Source.Transmogrify, "Found Classic Config Instrument " + classicConfig.Instrument.Name + " OctaveRange " + classicConfig.OctaveRange.Name + " on track " + classicConfig.Track + " ;bards=" + classicConfig.PlayerCount + ";include=" + string.Join(",", classicConfig.IncludedTracks)); configContainers.Add(groupCounter, configContainer); } } if (configContainers.Count == 0) { BmpLog.I(BmpLog.Source.Transmogrify, "Found 0 configurations on track " + trackNumber + ", and the keyword \"Ignore\" is not in the track title. Adding a default harp."); configContainers.Add(0, new ConfigContainer { ProcessorConfig = new ClassicProcessorConfig { Track = trackNumber } }); } return(configContainers); }
public static MemoryStream ScrubFile(string filePath) { MidiFile midiFile; IEnumerable <TrackChunk> originalTrackChunks; TempoMap tempoMap; MidiFile newMidiFile; ConcurrentDictionary <int, TrackChunk> newTrackChunks; try { string md5 = CalculateMD5(filePath); if (lastMD5.Equals(md5) && lastFile != null) { var oldfile = new MemoryStream(); lastFile.Write(oldfile, MidiFileFormat.MultiTrack, new WritingSettings { CompressionPolicy = CompressionPolicy.NoCompression }); oldfile.Flush(); oldfile.Position = 0; return(oldfile); } if (Path.GetExtension(filePath).ToLower().Equals(".mmsong")) { var mmSongStream = MMSong.Open(filePath).GetMidiFile(false, true); lastFile = MidiFile.Read(mmSongStream); lastMD5 = md5; mmSongStream.Position = 0; return(mmSongStream); } midiFile = MidiFile.Read(filePath, new ReadingSettings { ReaderSettings = new ReaderSettings { ReadFromMemory = true }, InvalidChunkSizePolicy = InvalidChunkSizePolicy.Ignore, InvalidMetaEventParameterValuePolicy = InvalidMetaEventParameterValuePolicy.SnapToLimits, InvalidChannelEventParameterValuePolicy = InvalidChannelEventParameterValuePolicy.SnapToLimits, InvalidSystemCommonEventParameterValuePolicy = InvalidSystemCommonEventParameterValuePolicy.SnapToLimits, MissedEndOfTrackPolicy = MissedEndOfTrackPolicy.Ignore, NotEnoughBytesPolicy = NotEnoughBytesPolicy.Ignore, UnexpectedTrackChunksCountPolicy = UnexpectedTrackChunksCountPolicy.Ignore, UnknownChannelEventPolicy = UnknownChannelEventPolicy.SkipStatusByteAndOneDataByte, UnknownChunkIdPolicy = UnknownChunkIdPolicy.ReadAsUnknownChunk }); #region Require if (midiFile == null) { throw new ArgumentNullException(); } else { try { if (midiFile.Chunks.Count < 1) { throw new NotSupportedException(); } MidiFileFormat fileFormat = midiFile.OriginalFormat; if (fileFormat == MidiFileFormat.MultiSequence) { throw new NotSupportedException(); } } catch (Exception exception) when(exception is UnknownFileFormatException || exception is InvalidOperationException) { throw exception; } } #endregion var trackZeroName = midiFile.GetTrackChunks().First().Events.OfType <SequenceTrackNameEvent>().FirstOrDefault()?.Text; if (!string.IsNullOrEmpty(trackZeroName) && (trackZeroName.ToLower().Contains("mogamp") || trackZeroName.ToLower().Contains("mognotate"))) { var notateConfig = NotateConfig.GenerateConfigFromMidiFile(filePath); var mmSongStream = notateConfig.Transmogrify().GetMidiFile(false, true); lastFile = MidiFile.Read(mmSongStream); lastMD5 = md5; mmSongStream.Position = 0; return(mmSongStream); } Console.WriteLine("Scrubbing " + filePath); var loaderWatch = Stopwatch.StartNew(); originalTrackChunks = midiFile.GetTrackChunks(); tempoMap = midiFile.GetTempoMap(); newTrackChunks = new ConcurrentDictionary <int, TrackChunk>(); long firstNote = originalTrackChunks.GetNotes().First().GetTimedNoteOnEvent().TimeAs <MetricTimeSpan>(tempoMap).TotalMicroseconds / 1000; TrackChunk allTracks = new TrackChunk(); allTracks.AddNotes(originalTrackChunks.GetNotes()); midiFile.Chunks.Add(allTracks); originalTrackChunks = midiFile.GetTrackChunks(); Parallel.ForEach(originalTrackChunks.Where(x => x.GetNotes().Count() > 0), (originalChunk, loopState, index) => { var watch = Stopwatch.StartNew(); int noteVelocity = int.Parse(index.ToString()) + 1; Dictionary <int, Dictionary <long, Note> > allNoteEvents = new Dictionary <int, Dictionary <long, Note> >(); for (int i = 0; i < 127; i++) { allNoteEvents.Add(i, new Dictionary <long, Note>()); } foreach (Note note in originalChunk.GetNotes()) { long noteOnMS = 0; long noteOffMS = 0; try { noteOnMS = 5000 + (note.GetTimedNoteOnEvent().TimeAs <MetricTimeSpan>(tempoMap).TotalMicroseconds / 1000) - firstNote; noteOffMS = 5000 + (note.GetTimedNoteOffEvent().TimeAs <MetricTimeSpan>(tempoMap).TotalMicroseconds / 1000) - firstNote; } catch (Exception) { continue; } int noteNumber = note.NoteNumber; Note newNote = new Note(noteNumber: (SevenBitNumber)noteNumber, time: noteOnMS, length: noteOffMS - noteOnMS ) { Channel = (FourBitNumber)0, Velocity = (SevenBitNumber)noteVelocity, OffVelocity = (SevenBitNumber)noteVelocity }; if (allNoteEvents[noteNumber].ContainsKey(noteOnMS)) { Note previousNote = allNoteEvents[noteNumber][noteOnMS]; if (previousNote.Length < note.Length) { allNoteEvents[noteNumber][noteOnMS] = newNote; } } else { allNoteEvents[noteNumber].Add(noteOnMS, newNote); } } watch.Stop(); Debug.WriteLine("step 1: " + noteVelocity + ": " + watch.ElapsedMilliseconds); watch = Stopwatch.StartNew(); TrackChunk newChunk = new TrackChunk(); for (int i = 0; i < 127; i++) { long lastNoteTimeStamp = -1; foreach (var noteEvent in allNoteEvents[i]) { if (lastNoteTimeStamp >= 0 && allNoteEvents[i][lastNoteTimeStamp].Length + lastNoteTimeStamp >= noteEvent.Key) { allNoteEvents[i][lastNoteTimeStamp].Length = allNoteEvents[i][lastNoteTimeStamp].Length - (allNoteEvents[i][lastNoteTimeStamp].Length + lastNoteTimeStamp + 1 - noteEvent.Key); } lastNoteTimeStamp = noteEvent.Key; } } newChunk.AddNotes(allNoteEvents.SelectMany(s => s.Value).Select(s => s.Value).ToArray()); allNoteEvents = null; watch.Stop(); Debug.WriteLine("step 2: " + noteVelocity + ": " + watch.ElapsedMilliseconds); watch = Stopwatch.StartNew(); Note[] notesToFix = newChunk.GetNotes().Reverse().ToArray(); for (int i = 1; i < notesToFix.Count(); i++) { int noteNum = notesToFix[i].NoteNumber; long time = (notesToFix[i].GetTimedNoteOnEvent().Time); long dur = notesToFix[i].Length; int velocity = notesToFix[i].Velocity; long lowestParent = notesToFix[0].GetTimedNoteOnEvent().Time; for (int k = i - 1; k >= 0; k--) { long lastOn = notesToFix[k].GetTimedNoteOnEvent().Time; if (lastOn < lowestParent) { lowestParent = lastOn; } } if (lowestParent <= time + 50) { time = lowestParent - 50; if (time < 0) { continue; } notesToFix[i].Time = time; dur = 25; notesToFix[i].Length = dur; } } watch.Stop(); Debug.WriteLine("step 3: " + noteVelocity + ": " + watch.ElapsedMilliseconds); watch = Stopwatch.StartNew(); notesToFix = notesToFix.Reverse().ToArray(); List <Note> fixedNotes = new List <Note>(); for (int j = 0; j < notesToFix.Count(); j++) { var noteNum = notesToFix[j].NoteNumber; var time = notesToFix[j].Time; var dur = notesToFix[j].Length; var channel = notesToFix[j].Channel; var velocity = notesToFix[j].Velocity; if (j + 1 < notesToFix.Count()) { if (notesToFix[j + 1].Time <= notesToFix[j].Time + notesToFix[j].Length + 25) { dur = notesToFix[j + 1].Time - notesToFix[j].Time - 25; dur = dur < 25 ? 1 : dur; } } fixedNotes.Add(new Note(noteNum, dur, time) { Channel = channel, Velocity = velocity, OffVelocity = velocity }); } notesToFix = null; watch.Stop(); Debug.WriteLine("step 4: " + noteVelocity + ": " + watch.ElapsedMilliseconds); watch = Stopwatch.StartNew(); int octaveShift = 0; string trackName = originalChunk.Events.OfType <SequenceTrackNameEvent>().FirstOrDefault()?.Text; if (trackName == null) { trackName = ""; } trackName = trackName.ToLower().Trim().Replace(" ", String.Empty); Regex rex = new Regex(@"^([A-Za-z]+)([-+]\d)?"); if (rex.Match(trackName) is Match match) { if (!string.IsNullOrEmpty(match.Groups[1].Value)) { trackName = match.Groups[1].Value; if (!string.IsNullOrEmpty(match.Groups[2].Value)) { if (int.TryParse(match.Groups[2].Value, out int os)) { octaveShift = os; } } (bool success, string parsedTrackName) = TrackNameToEnumInstrumentName(trackName); if (success) { trackName = parsedTrackName; } else { (success, parsedTrackName) = TrackNameToStringInstrumentName(trackName); if (success) { trackName = parsedTrackName; } else { var originalInstrument = originalChunk.Events.OfType <ProgramChangeEvent>().FirstOrDefault()?.ProgramNumber; if (!(originalInstrument is null) && originalInstrument.Equals(typeof(SevenBitNumber))) { (success, parsedTrackName) = ProgramToStringInstrumentName((SevenBitNumber)originalInstrument); } if (success) { trackName = parsedTrackName; } } }
public static Sequence ScrubFile(string filePath) { MidiFile midiFile; IEnumerable <TrackChunk> originalTrackChunks; TempoMap tempoMap; MidiFile newMidiFile; ConcurrentDictionary <int, TrackChunk> newTrackChunks; Sequence sequence = null; try { midiFile = MidiFile.Read(filePath, new ReadingSettings { ReaderSettings = new ReaderSettings { ReadFromMemory = true } }); bool explode = false; #region Require if (midiFile == null) { throw new ArgumentNullException(); } else { try { if (midiFile.Chunks.Count < 1) { throw new NotSupportedException(); } MidiFileFormat fileFormat = midiFile.OriginalFormat; if (fileFormat == MidiFileFormat.MultiSequence) { throw new NotSupportedException(); } else if (fileFormat == MidiFileFormat.SingleTrack) { explode = true; } } catch (Exception exception) when(exception is UnknownFileFormatException || exception is InvalidOperationException) { throw exception; } } #endregion Console.WriteLine("Scrubbing " + filePath); var loaderWatch = Stopwatch.StartNew(); if (explode || midiFile.Chunks.Count == 1) { originalTrackChunks = midiFile.GetTrackChunks().First().Explode(); } else { originalTrackChunks = midiFile.GetTrackChunks(); } tempoMap = midiFile.GetTempoMap(); newTrackChunks = new ConcurrentDictionary <int, TrackChunk>(); long firstNote = originalTrackChunks.GetNotes().First().GetTimedNoteOnEvent().TimeAs <MetricTimeSpan>(tempoMap).TotalMicroseconds / 1000; Parallel.ForEach(originalTrackChunks.Where(x => x.GetNotes().Count() > 0), (originalChunk, loopState, index) => { var watch = Stopwatch.StartNew(); int noteVelocity = int.Parse(index.ToString()) + 1; Dictionary <int, Dictionary <long, Note> > allNoteEvents = new Dictionary <int, Dictionary <long, Note> >(); for (int i = 0; i < 127; i++) { allNoteEvents.Add(i, new Dictionary <long, Note>()); } // Fill the track dictionary and remove duplicate notes foreach (Note note in originalChunk.GetNotes()) { long noteOnMS = 0; long noteOffMS = 0; try { noteOnMS = note.GetTimedNoteOnEvent().TimeAs <MetricTimeSpan>(tempoMap).TotalMicroseconds / 1000 - firstNote; noteOffMS = note.GetTimedNoteOffEvent().TimeAs <MetricTimeSpan>(tempoMap).TotalMicroseconds / 1000 - firstNote; } catch (Exception) { continue; } // malformed note, most common is a note on missing a note off. int noteNumber = note.NoteNumber; Note newNote = new Note(noteNumber: (SevenBitNumber)noteNumber, time: noteOnMS, length: noteOffMS - noteOnMS ) { Channel = (FourBitNumber)0, Velocity = (SevenBitNumber)noteVelocity, OffVelocity = (SevenBitNumber)noteVelocity }; if (allNoteEvents[noteNumber].ContainsKey(noteOnMS)) { Note previousNote = allNoteEvents[noteNumber][noteOnMS]; if (previousNote.Length < note.Length) { allNoteEvents[noteNumber][noteOnMS] = newNote; // keep the longest of all duplicates } } else { allNoteEvents[noteNumber].Add(noteOnMS, newNote); } } watch.Stop(); Debug.WriteLine("step 1: " + noteVelocity + ": " + watch.ElapsedMilliseconds); watch = Stopwatch.StartNew(); // Merge all the dictionaries into one collection TrackChunk newChunk = new TrackChunk(); for (int i = 0; i < 127; i++) { long lastNoteTimeStamp = -1; foreach (var noteEvent in allNoteEvents[i]) { if (lastNoteTimeStamp >= 0 && allNoteEvents[i][lastNoteTimeStamp].Length + lastNoteTimeStamp >= noteEvent.Key) { allNoteEvents[i][lastNoteTimeStamp].Length = allNoteEvents[i][lastNoteTimeStamp].Length - (allNoteEvents[i][lastNoteTimeStamp].Length + lastNoteTimeStamp + 1 - noteEvent.Key); } lastNoteTimeStamp = noteEvent.Key; } } newChunk.AddNotes(allNoteEvents.SelectMany(s => s.Value).Select(s => s.Value).ToArray()); allNoteEvents = null; watch.Stop(); Debug.WriteLine("step 2: " + noteVelocity + ": " + watch.ElapsedMilliseconds); watch = Stopwatch.StartNew(); // auto arpeggiate Note[] notesToFix = newChunk.GetNotes().Reverse().ToArray(); for (int i = 1; i < notesToFix.Count(); i++) { int noteNum = notesToFix[i].NoteNumber; long time = (notesToFix[i].GetTimedNoteOnEvent().Time); long dur = notesToFix[i].Length; int velocity = notesToFix[i].Velocity; long lowestParent = notesToFix[0].GetTimedNoteOnEvent().Time; for (int k = i - 1; k >= 0; k--) { long lastOn = notesToFix[k].GetTimedNoteOnEvent().Time; if (lastOn < lowestParent) { lowestParent = lastOn; } } if (lowestParent <= time + 50) { time = lowestParent - 50; if (time < 0) { continue; } notesToFix[i].Time = time; dur = 25; notesToFix[i].Length = dur; } } watch.Stop(); Debug.WriteLine("step 3: " + noteVelocity + ": " + watch.ElapsedMilliseconds); watch = Stopwatch.StartNew(); notesToFix = notesToFix.Reverse().ToArray(); List <Note> fixedNotes = new List <Note>(); for (int j = 0; j < notesToFix.Count(); j++) { var noteNum = notesToFix[j].NoteNumber; var time = notesToFix[j].Time; var dur = notesToFix[j].Length; var channel = notesToFix[j].Channel; var velocity = notesToFix[j].Velocity; if (j + 1 < notesToFix.Count()) { if (notesToFix[j + 1].Time <= notesToFix[j].Time + notesToFix[j].Length + 25) { dur = notesToFix[j + 1].Time - notesToFix[j].Time - 25; dur = dur < 25 ? 1 : dur; } } fixedNotes.Add(new Note(noteNum, dur, time) { Channel = channel, Velocity = velocity, OffVelocity = velocity }); } notesToFix = null; watch.Stop(); Debug.WriteLine("step 4: " + noteVelocity + ": " + watch.ElapsedMilliseconds); watch = Stopwatch.StartNew(); // Discover the instrument name from the track title, and from program changes if that fails int octaveShift = 0; string trackName = originalChunk.Events.OfType <SequenceTrackNameEvent>().FirstOrDefault()?.Text; if (trackName == null) { trackName = ""; } trackName = trackName.ToLower().Trim().Replace(" ", String.Empty); Regex rex = new Regex(@"^([A-Za-z]+)([-+]\d)?"); if (rex.Match(trackName) is Match match) { if (!string.IsNullOrEmpty(match.Groups[1].Value)) { trackName = match.Groups[1].Value; if (!string.IsNullOrEmpty(match.Groups[2].Value)) { if (int.TryParse(match.Groups[2].Value, out int os)) { octaveShift = os; } } (bool success, string parsedTrackName) = TrackNameToEnumInstrumentName(trackName); if (success) { trackName = parsedTrackName; } else { (success, parsedTrackName) = TrackNameToStringInstrumentName(trackName); if (success) { trackName = parsedTrackName; } else { var originalInstrument = originalChunk.Events.OfType <ProgramChangeEvent>().FirstOrDefault()?.ProgramNumber; if (!(originalInstrument is null) && originalInstrument.Equals(typeof(SevenBitNumber))) { (success, parsedTrackName) = ProgramToStringInstrumentName((SevenBitNumber)originalInstrument); } if (success) { trackName = parsedTrackName; } } }
/// <summary> /// /// </summary> /// <param name="trackChunk"></param> /// <param name="trackNumber"></param> /// <param name="song"></param> /// <returns></returns> internal static Dictionary <long, ConfigContainer> ReadConfigs(this TrackChunk trackChunk, int trackNumber, BmpSong song) { var configContainers = new Dictionary <long, ConfigContainer>(); if (trackChunk.GetNotes().Count == 0 && trackChunk.GetTimedEvents().All(x => x.Event.EventType != MidiEventType.Lyric)) { BmpLog.I(BmpLog.Source.Transmogrify, "Skipping track " + trackNumber + " as it contains no notes and contains no lyric events."); return(configContainers); } var trackName = (trackChunk.Events.OfType <SequenceTrackNameEvent>().FirstOrDefault()?.Text ?? "").Replace(" ", "").ToLower(); if (trackName.Contains("ignore")) { BmpLog.I(BmpLog.Source.Transmogrify, "Skipping track " + trackNumber + " as the track title contains \"Ignore\""); return(configContainers); } var groups = trackName.Split('|'); var modifier = new Regex(@"^([A-Za-z0-9]+)([-+]\d)?"); for (var groupCounter = 0; groupCounter < groups.Length; groupCounter++) { var configContainer = new ConfigContainer(); var fields = groups[groupCounter].Split(';'); if (fields.Length == 0) { continue; } // bmp 2.x style group name if (fields[0].StartsWith("manualtone:") || fields[0].StartsWith("notetone:") || fields[0].StartsWith("autotone:") || fields[0].StartsWith("drumtone:") || fields[0].Equals("drumtone") || fields[0].StartsWith("octavetone") || fields[0].Equals("lyric")) { var subfields = fields[0].Split(':'); switch (subfields[0]) { case "manualtone" when subfields.Length < 2: BmpLog.W(BmpLog.Source.Transmogrify, "Skipping ManualTone on track " + trackNumber + " due to the configuration not specifying a tone."); continue; case "manualtone": var manualToneConfig = (ManualToneProcessorConfig)(configContainer.ProcessorConfig = new ManualToneProcessorConfig { Track = trackNumber }); manualToneConfig.InstrumentTone = InstrumentTone.Parse(subfields[1]); if (manualToneConfig.InstrumentTone.Equals(InstrumentTone.None)) { BmpLog.W(BmpLog.Source.Transmogrify, "Skipping ManualTone on track " + trackNumber + " due to the configuration specifying an invalid tone."); continue; } if (subfields.Length > 2) { var shifts = subfields[2].Split(','); foreach (var shift in shifts) { var toneIndexAndOctaveRange = modifier.Match(shift); if (!toneIndexAndOctaveRange.Success) { BmpLog.W(BmpLog.Source.Transmogrify, "Skipping invalid ManualTone octave setting \"" + shift + "\" on track " + trackNumber); continue; } if (!toneIndexAndOctaveRange.Groups[1].Success) { BmpLog.W(BmpLog.Source.Transmogrify, "Skipping invalid ManualTone octave setting \"" + shift + "\" on track " + trackNumber + " because \"" + toneIndexAndOctaveRange.Groups[1].Value + "\" is not a valid tone number"); continue; } if (!int.TryParse(toneIndexAndOctaveRange.Groups[1].Value, out var toneIndex) || toneIndex < 0 || toneIndex > 4) { BmpLog.W(BmpLog.Source.Transmogrify, "Skipping invalid ManualTone octave setting \"" + shift + "\" on track " + trackNumber + " because \"" + toneIndexAndOctaveRange.Groups[1].Value + "\" is not a valid tone number"); continue; } var octaveRange = OctaveRange.C3toC6; if (toneIndexAndOctaveRange.Groups[2].Success) { octaveRange = OctaveRange.Parse(toneIndexAndOctaveRange.Groups[2].Value); } if (octaveRange.Equals(OctaveRange.Invalid)) { octaveRange = OctaveRange.C3toC6; } manualToneConfig.OctaveRanges[toneIndex] = octaveRange; } } ParseAdditionalOptions(trackNumber, manualToneConfig, song, fields); BmpLog.I(BmpLog.Source.Transmogrify, "Found ManualTone Config Group with on track " + manualToneConfig.Track + " ;bards=" + manualToneConfig.PlayerCount + ";include=" + string.Join(",", manualToneConfig.IncludedTracks)); configContainers.Add(groupCounter, configContainer); continue; case "notetone" when subfields.Length < 2: BmpLog.W(BmpLog.Source.Transmogrify, "Skipping NoteTone on track " + trackNumber + " due to the configuration not specifying a tone."); continue; case "notetone": { var noteToneConfig = (NoteToneProcessorConfig)(configContainer.ProcessorConfig = new NoteToneProcessorConfig { Track = trackNumber }); noteToneConfig.InstrumentTone = InstrumentTone.Parse(subfields[1]); if (noteToneConfig.InstrumentTone.Equals(InstrumentTone.None)) { BmpLog.W(BmpLog.Source.Transmogrify, "Skipping NoteTone on track " + trackNumber + " due to the configuration specifying an invalid tone."); continue; } var noteToneSubConfigurations = 0; if (subfields.Length > 2) { subfields = subfields.Skip(2).ToArray(); foreach (var mapping in subfields) { var split = mapping.Split(','); if (split.Length != 3) { BmpLog.W(BmpLog.Source.Transmogrify, "Skipping invalid NoteTone mapping \"" + mapping + "\" on track " + trackNumber); continue; } if (!int.TryParse(split[0], out var sourceNote) || sourceNote > 120 || sourceNote < 12) { BmpLog.W(BmpLog.Source.Transmogrify, "Skipping invalid NoteTone mapping \"" + mapping + "\" on track " + trackNumber + " because source note \"" + split[0] + "\" is more then 120 or less then 12"); continue; } if (!int.TryParse(split[1], out var toneIndex) || toneIndex < -1 || toneIndex > 4 || (toneIndex == 0 && noteToneConfig.InstrumentTone.Tone0.Equals(Instrument.None)) || (toneIndex == 1 && noteToneConfig.InstrumentTone.Tone1.Equals(Instrument.None)) || (toneIndex == 2 && noteToneConfig.InstrumentTone.Tone2.Equals(Instrument.None)) || (toneIndex == 3 && noteToneConfig.InstrumentTone.Tone3.Equals(Instrument.None)) || (toneIndex == 4 && noteToneConfig.InstrumentTone.Tone4.Equals(Instrument.None))) { BmpLog.W(BmpLog.Source.Transmogrify, "Skipping invalid NoteTone mapping \"" + mapping + "\" on track " + trackNumber + " because \"" + split[1] + "\" is not a valid tone number for Tone " + noteToneConfig.InstrumentTone.Name); continue; } if (!int.TryParse(split[2], out var destinationNote) || destinationNote < -1 || destinationNote > 36) { BmpLog.W(BmpLog.Source.Transmogrify, "Skipping invalid NoteTone mapping \"" + mapping + "\" on track " + trackNumber + " because destination note \"" + split[2] + "\" is more then 36 or less then -1"); continue; } noteToneConfig.Mapper[sourceNote] = (toneIndex, destinationNote); noteToneSubConfigurations++; } } if (noteToneSubConfigurations == 0) { BmpLog.W(BmpLog.Source.Transmogrify, "Skipping NoteTone on track " + trackNumber + " because no mappings are specified."); continue; } ParseAdditionalOptions(trackNumber, noteToneConfig, song, fields); BmpLog.I(BmpLog.Source.Transmogrify, "Found NoteTone Config Group " + noteToneConfig.InstrumentTone.Name + " with " + noteToneSubConfigurations + " mappings on track " + noteToneConfig.Track + " ;bards=" + noteToneConfig.PlayerCount + ";include=" + string.Join(",", noteToneConfig.IncludedTracks)); configContainers.Add(groupCounter, configContainer); continue; } case "octavetone" when subfields.Length < 2: BmpLog.W(BmpLog.Source.Transmogrify, "Skipping OctaveTone on track " + trackNumber + " due to the configuration not specifying a tone."); continue; case "octavetone": var octaveToneConfig = (OctaveToneProcessorConfig)(configContainer.ProcessorConfig = new OctaveToneProcessorConfig { Track = trackNumber }); octaveToneConfig.InstrumentTone = InstrumentTone.Parse(subfields[1]); if (octaveToneConfig.InstrumentTone.Equals(InstrumentTone.None)) { BmpLog.W(BmpLog.Source.Transmogrify, "Skipping OctaveTone on track " + trackNumber + " due to the configuration specifying an invalid tone."); continue; } var octaveToneSubConfigurations = 0; if (subfields.Length > 2) { subfields = subfields.Skip(2).ToArray(); foreach (var mapping in subfields) { var split = mapping.Split(','); if (split.Length != 3) { BmpLog.W(BmpLog.Source.Transmogrify, "Skipping invalid OctaveTone mapping \"" + mapping + "\" on track " + trackNumber); continue; } if (!int.TryParse(split[0], out var sourceOctave) || sourceOctave > 8 || sourceOctave < 0) { BmpLog.W(BmpLog.Source.Transmogrify, "Skipping invalid OctaveTone mapping \"" + mapping + "\" on track " + trackNumber + " because source octave \"" + split[0] + "\" is more then 8 or less then 0"); continue; } if (!int.TryParse(split[1], out var toneIndex) || toneIndex < -1 || toneIndex > 4 || (toneIndex == 0 && octaveToneConfig.InstrumentTone.Tone0.Equals(Instrument.None)) || (toneIndex == 1 && octaveToneConfig.InstrumentTone.Tone1.Equals(Instrument.None)) || (toneIndex == 2 && octaveToneConfig.InstrumentTone.Tone2.Equals(Instrument.None)) || (toneIndex == 3 && octaveToneConfig.InstrumentTone.Tone3.Equals(Instrument.None)) || (toneIndex == 4 && octaveToneConfig.InstrumentTone.Tone4.Equals(Instrument.None))) { BmpLog.W(BmpLog.Source.Transmogrify, "Skipping invalid OctaveTone mapping \"" + mapping + "\" on track " + trackNumber + " because \"" + split[1] + "\" is not a valid tone number for Tone " + octaveToneConfig.InstrumentTone.Name); continue; } if (!int.TryParse(split[2], out var destinationOctave) || destinationOctave < -1 || destinationOctave > 3) { BmpLog.W(BmpLog.Source.Transmogrify, "Skipping invalid OctaveTone mapping \"" + mapping + "\" on track " + trackNumber + " because destination octave \"" + split[2] + "\" is more then 3 or less then -1"); continue; } octaveToneConfig.Mapper[sourceOctave] = (toneIndex, destinationOctave); octaveToneSubConfigurations++; } } if (octaveToneSubConfigurations == 0) { BmpLog.W(BmpLog.Source.Transmogrify, "Skipping OctaveTone on track " + trackNumber + " because no mappings are specified."); continue; } ParseAdditionalOptions(trackNumber, octaveToneConfig, song, fields); BmpLog.I(BmpLog.Source.Transmogrify, "Found OctaveTone Config Group " + octaveToneConfig.InstrumentTone.Name + " with " + octaveToneSubConfigurations + " mappings on track " + octaveToneConfig.Track + " ;bards=" + octaveToneConfig.PlayerCount + ";include=" + string.Join(",", octaveToneConfig.IncludedTracks)); configContainers.Add(groupCounter, configContainer); continue; case "autotone" when subfields.Length < 2: BmpLog.W(BmpLog.Source.Transmogrify, "Skipping AutoTone on track " + trackNumber + " due to the configuration not specifying an autotone group."); continue; case "autotone": { var autoToneConfig = (AutoToneProcessorConfig)(configContainer.ProcessorConfig = new AutoToneProcessorConfig { Track = trackNumber }); var instrumentAndOctaveRange = modifier.Match(subfields[1]); if (!instrumentAndOctaveRange.Success) { BmpLog.W(BmpLog.Source.Transmogrify, "Skipping AutoTone on track " + trackNumber + " due to the configuration specifying an invalid autotone group."); continue; } if (instrumentAndOctaveRange.Groups[1].Success) { autoToneConfig.AutoToneInstrumentGroup = AutoToneInstrumentGroup.Parse(instrumentAndOctaveRange.Groups[1].Value); } if (autoToneConfig.AutoToneInstrumentGroup.Equals(AutoToneInstrumentGroup.Invalid)) { BmpLog.W(BmpLog.Source.Transmogrify, "Skipping AutoTone on track " + trackNumber + " due to the configuration specifying an invalid autotone group."); continue; } if (instrumentAndOctaveRange.Groups[2].Success) { autoToneConfig.AutoToneOctaveRange = AutoToneOctaveRange.Parse(instrumentAndOctaveRange.Groups[2].Value); } if (autoToneConfig.AutoToneOctaveRange.Equals(AutoToneOctaveRange.Invalid)) { autoToneConfig.AutoToneOctaveRange = AutoToneOctaveRange.C2toC7; } ParseAdditionalOptions(trackNumber, autoToneConfig, song, fields); BmpLog.I(BmpLog.Source.Transmogrify, "Found AutoTone Config Group " + autoToneConfig.AutoToneInstrumentGroup.Name + " OctaveRange " + autoToneConfig.AutoToneOctaveRange.Name + " on track " + autoToneConfig.Track + " ;bards=" + autoToneConfig.PlayerCount + ";include=" + string.Join(",", autoToneConfig.IncludedTracks)); configContainers.Add(groupCounter, configContainer); continue; } case "drumtone": var drumToneConfig = (DrumToneProcessorConfig)(configContainer.ProcessorConfig = new DrumToneProcessorConfig { Track = trackNumber }); var drumToneSubConfigurations = 0; if (subfields.Length > 1) { subfields = subfields.Skip(1).ToArray(); foreach (var mapping in subfields) { var split = mapping.Split(','); if (split.Length != 3) { BmpLog.W(BmpLog.Source.Transmogrify, "Skipping invalid DrumTone mapping \"" + mapping + "\" on track " + trackNumber); continue; } if (!int.TryParse(split[0], out var sourceNote) || sourceNote > 87 || sourceNote < 27) { BmpLog.W(BmpLog.Source.Transmogrify, "Skipping invalid DrumTone mapping \"" + mapping + "\" on track " + trackNumber + " because source note \"" + split[0] + "\" is more then 87 or less then 27"); continue; } if (!int.TryParse(split[1], out var toneIndex) || toneIndex < -1 || toneIndex > 4 || (toneIndex == 0 && InstrumentTone.Drums.Tone0.Equals(Instrument.None)) || (toneIndex == 1 && InstrumentTone.Drums.Tone1.Equals(Instrument.None)) || (toneIndex == 2 && InstrumentTone.Drums.Tone2.Equals(Instrument.None)) || (toneIndex == 3 && InstrumentTone.Drums.Tone3.Equals(Instrument.None)) || (toneIndex == 4 && InstrumentTone.Drums.Tone4.Equals(Instrument.None))) { BmpLog.W(BmpLog.Source.Transmogrify, "Skipping invalid DrumTone mapping \"" + mapping + "\" on track " + trackNumber + " because \"" + split[1] + "\" is not a valid tone number for Tone " + InstrumentTone.Drums.Name); continue; } if (!int.TryParse(split[2], out var destinationNote) || destinationNote < -1 || destinationNote > 36) { BmpLog.W(BmpLog.Source.Transmogrify, "Skipping invalid DrumTone mapping \"" + mapping + "\" on track " + trackNumber + " because destination note \"" + split[2] + "\" is more then 36 or less then -1"); continue; } drumToneConfig.Mapper[sourceNote] = (toneIndex, destinationNote); drumToneSubConfigurations++; } } ParseAdditionalOptions(trackNumber, drumToneConfig, song, fields); BmpLog.I(BmpLog.Source.Transmogrify, "Found DrumTone Config Group with " + drumToneSubConfigurations + " override mappings on track " + drumToneConfig.Track + " ;bards=" + drumToneConfig.PlayerCount + ";include=" + string.Join(",", drumToneConfig.IncludedTracks)); configContainers.Add(groupCounter, configContainer); continue; case "lyric": var lyricConfig = (LyricProcessorConfig)(configContainer.ProcessorConfig = new LyricProcessorConfig { Track = trackNumber }); ParseAdditionalOptions(trackNumber, lyricConfig, song, fields); BmpLog.I(BmpLog.Source.Transmogrify, "Found Lyric Config on track " + lyricConfig.Track + " ;bards=" + lyricConfig.PlayerCount + ";include=" + string.Join(",", lyricConfig.IncludedTracks)); configContainers.Add(groupCounter, configContainer); continue; } } // bmp 1.x style group name else { var classicConfig = (ClassicProcessorConfig)(configContainer.ProcessorConfig = new ClassicProcessorConfig { Track = trackNumber }); var instrumentAndOctaveRange = modifier.Match(fields[0]); if (!instrumentAndOctaveRange.Success) { continue; // Invalid Instrument name. } if (instrumentAndOctaveRange.Groups[1].Success) { classicConfig.Instrument = Instrument.Parse(instrumentAndOctaveRange.Groups[1].Value); } if (classicConfig.Instrument.Equals(Instrument.None)) { continue; // Invalid Instrument name. } if (instrumentAndOctaveRange.Groups[2].Success) { classicConfig.OctaveRange = OctaveRange.Parse(instrumentAndOctaveRange.Groups[2].Value); } if (classicConfig.OctaveRange.Equals(OctaveRange.Invalid)) { classicConfig.OctaveRange = OctaveRange.C3toC6; } ParseAdditionalOptions(trackNumber, classicConfig, song, fields); BmpLog.I(BmpLog.Source.Transmogrify, "Found Classic Config Instrument " + classicConfig.Instrument.Name + " OctaveRange " + classicConfig.OctaveRange.Name + " on track " + classicConfig.Track + " ;bards=" + classicConfig.PlayerCount + ";include=" + string.Join(",", classicConfig.IncludedTracks)); configContainers.Add(groupCounter, configContainer); } } if (configContainers.Count == 0) { BmpLog.I(BmpLog.Source.Transmogrify, "Found 0 configurations on track " + trackNumber + ", and the keyword \"Ignore\" is not in the track title. Adding a default AutoTone."); configContainers.Add(0, new ConfigContainer { ProcessorConfig = new AutoToneProcessorConfig { Track = trackNumber } }); } return(configContainers); }