public void Dispatch(MidiEvent message) { #region Require if (message == null) { throw new ArgumentNullException("message"); } #endregion switch (message.Type) { case MessageType.Channel: OnChannelMessageDispatched(message); break; case MessageType.SystemExclusive: OnSysExMessageDispatched(message); break; case MessageType.Meta: OnMetaMessageDispatched(message); break; case MessageType.SystemCommon: OnSysCommonMessageDispatched(message); break; case MessageType.SystemRealtime: break; } }
protected virtual void OnChannelMessageDispatched(MidiEvent e) { EventHandler<MidiEvent> handler = ChannelMessageDispatched; if (handler != null) { handler(this, e); } }
protected virtual void OnPlaying(State state,int score, MidiEvent[] ev) { Action<State, int, MidiEvent[]> handler = PlayEvent; if (handler != null) { handler(state, score, ev); } }
protected virtual void OnLearing(Boolean right, MidiEvent ev) { Action<Boolean, MidiEvent> handler = LearningEvent; if (handler != null) { handler(right, ev); } }
public void Chase(MidiEvent ev) { switch (ev.EventFlag) { case MidiFile.EventControlChange: case MidiFile.EventProgramChange: case MidiFile.EventChannelPressure: case MidiFile.EventPitchBend: OnChannelMessageDispatched(ev); break; } }
public void ProcessMidiEvent(MidiEvent midiEvent) { if (midiEvent.midiChannelEvent != MidiHelper.MidiChannelEvent.None) { switch (midiEvent.midiChannelEvent) { case MidiHelper.MidiChannelEvent.Program_Change: if (midiEvent.channel != 9) { if (midiEvent.parameter1 < synth.SoundBank.InstrumentCount) { currentPrograms[midiEvent.channel] = midiEvent.parameter1; } } else //its the drum channel { if (midiEvent.parameter1 < synth.SoundBank.DrumCount) { currentPrograms[midiEvent.channel] = midiEvent.parameter1; } } break; case MidiHelper.MidiChannelEvent.Note_On: if (blockList.Contains(midiEvent.channel)) { return; } if (this.NoteOnEvent != null) { this.NoteOnEvent(midiEvent.channel, midiEvent.parameter1, midiEvent.parameter2); } synth.NoteOn(midiEvent.channel, midiEvent.parameter1 + this.eunjinset, midiEvent.parameter2, currentPrograms[midiEvent.channel]); break; case MidiHelper.MidiChannelEvent.Note_Off: if (this.NoteOffEvent != null) { this.NoteOffEvent(midiEvent.channel, midiEvent.parameter1); } synth.NoteOff(midiEvent.channel, midiEvent.parameter1); break; case MidiHelper.MidiChannelEvent.Pitch_Bend: //Store PitchBend as the # of semitones higher or lower synth.TunePositions[midiEvent.channel] = (double)midiEvent.Parameters[1] * PitchWheelSemitoneRange; break; case MidiHelper.MidiChannelEvent.Controller: switch (midiEvent.GetControllerType()) { case MidiHelper.ControllerType.AllNotesOff: synth.NoteOffAll(true); break; case MidiHelper.ControllerType.MainVolume: synth.VolPositions[midiEvent.channel] = midiEvent.parameter2 / 127.0f; break; case MidiHelper.ControllerType.Pan: synth.PanPositions[midiEvent.channel] = (midiEvent.parameter2 - 64) == 63 ? 1.00f : (midiEvent.parameter2 - 64) / 64.0f; break; case MidiHelper.ControllerType.ResetControllers: ResetControllers(); break; default: break; } break; default: break; } } else { switch (midiEvent.midiMetaEvent) { case MidiHelper.MidiMetaEvent.Tempo: _MidiFile.BeatsPerMinute = MidiHelper.MicroSecondsPerMinute / System.Convert.ToUInt32(midiEvent.Parameters[0]); break; default: break; } } }
// ------------------------------------------------------------------------------------------------------ private void _onMidiEvent(MidiEvent ev) { try { string msg = _midiEventToLog(ev); _appendText(msg + Environment.NewLine); int shortType = ev.Status & 0xF0; int cc = ev.AllData[1]; if ((shortType == 0xb0)) { if (cc == nudBrightness.Value) { EEGLogMain.sendBrightnessOsc(ev.AllData[2]); } if (cc == this.nudOffsetAlphaL.Value) { _sendChannelOffsetChanged("AlphaL", (int)(100 * ev.AllData[2] * 1.0f / 127)); } if (cc == this.nudOffsetAlphaH.Value) { _sendChannelOffsetChanged("AlphaH", (int)(100 * ev.AllData[2] * 1.0f / 127)); } if (cc == this.nudOffsetBetaL.Value) { _sendChannelOffsetChanged("BetaL", (int)(100 * ev.AllData[2] * 1.0f / 127)); } if (cc == this.nudOffsetBetaH.Value) { _sendChannelOffsetChanged("BetaH", (int)(100 * ev.AllData[2] * 1.0f / 127)); } if (cc == this.nudOffsetGammaL.Value) { _sendChannelOffsetChanged("GammaL", (int)(100 * ev.AllData[2] * 1.0f / 127)); } if (cc == this.nudOffsetGammaH.Value) { _sendChannelOffsetChanged("GammaH", (int)(100 * ev.AllData[2] * 1.0f / 127)); } if (cc == this.nudOffsetMeditation.Value) { _sendChannelOffsetChanged("Meditation", (int)(100 * ev.AllData[2] * 1.0f / 127)); } if (cc == this.nudOffsetAttention.Value) { _sendChannelOffsetChanged("Attention", (int)(100 * ev.AllData[2] * 1.0f / 127)); } if (cc == this.nudOffsetTheta.Value) { _sendChannelOffsetChanged("Theta", (int)(100 * ev.AllData[2] * 1.0f / 127)); } if (cc == this.nudOffsetDelta.Value) { _sendChannelOffsetChanged("Delta", (int)(100 * ev.AllData[2] * 1.0f / 127)); } if (cc == this.nudFx1Param1.Value) { _sendMidiCC("FX1/Param1", ev.AllData[2]); } if (cc == this.nudFx1Param2.Value) { _sendMidiCC("FX1/Param2", ev.AllData[2]); } if (cc == this.nudFx1Param3.Value) { _sendMidiCC("FX1/Param3", ev.AllData[2]); } if (cc == this.nudFx2Param1.Value) { _sendMidiCC("FX2/Param1", ev.AllData[2]); } if (cc == this.nudFx2Param2.Value) { _sendMidiCC("FX2/Param2", ev.AllData[2]); } if (cc == this.nudFx2Param3.Value) { _sendMidiCC("FX2/Param3", ev.AllData[2]); } if (cc == this.nudFx3Param1.Value) { _sendMidiCC("FX3/Param1", ev.AllData[2]); } if (cc == this.nudFx3Param2.Value) { _sendMidiCC("FX3/Param2", ev.AllData[2]); } if (cc == this.nudAttentionVolume.Value) { _sendMidiCC("Mixer/AttentionLevel", ev.AllData[2]); } if (cc == this.nudMeditationVolume.Value) { _sendMidiCC("Mixer/MeditationLevel", ev.AllData[2]); } if (cc == this.nudMasterVolume.Value) { _sendMidiCC("Mixer/MasterLevel", ev.AllData[2]); } } } catch (Exception) { _stopMonitoring(); } }
public void CheckTheFirstChord() { var sequence = MidiSequence.Import(MIDI_for_whom_the_bell_tolls); var guitar1Track = sequence.GetTracks()[2]; var listOfChords = new List <List <NoteOn> >(); List <NoteOn> chordNotes = null; bool isPause = false; foreach (MidiEvent midiEvent in guitar1Track.Events) { if (midiEvent is NoteOn) { var note = (NoteOn)midiEvent; if (note.DeltaTime > 0) { if ((chordNotes != null) && (chordNotes.Count > 0)) { listOfChords.Add(chordNotes); } chordNotes = new List <NoteOn>(); isPause = (note.Velocity == 0); //used for the next notes of the acord (same time) } if ((note.Velocity > 0) && (!isPause)) { chordNotes.Add(note); } } } //start on 1680 from the beggining { var chord = listOfChords[0]; Assert.AreEqual(3, chord.Count); Assert.AreEqual(1680, chord[0].DeltaTime); Assert.AreEqual(100, chord[0].Velocity); Assert.AreEqual("C#4", MidiEvent.GetNoteName(chord[0].Note)); Assert.AreEqual("F#4", MidiEvent.GetNoteName(chord[1].Note)); Assert.AreEqual("F#3", MidiEvent.GetNoteName(chord[2].Note)); } //start on 40 after the last event { var chord = listOfChords[1]; Assert.AreEqual(3, chord.Count); Assert.AreEqual(40, chord[0].DeltaTime); Assert.AreEqual(100, chord[0].Velocity); Assert.AreEqual("C#4", MidiEvent.GetNoteName(chord[0].Note)); Assert.AreEqual("F#4", MidiEvent.GetNoteName(chord[1].Note)); Assert.AreEqual("F#3", MidiEvent.GetNoteName(chord[2].Note)); } //and so on... /* * track.Events.Add(new NoteOn(1680, 1, "C#4", 100)); * track.Events.Add(new NoteOn(0, 1, "F#4", 100)); * track.Events.Add(new NoteOn(0, 1, "F#3", 100)); * track.Events.Add(new NoteOn(40, 1, "F#3", 0)); * track.Events.Add(new NoteOn(0, 1, "F#4", 0)); * track.Events.Add(new NoteOn(0, 1, "C#4", 0)); * track.Events.Add(new NoteOn(0, 1, "C#4", 100)); * track.Events.Add(new NoteOn(0, 1, "F#4", 100)); * track.Events.Add(new NoteOn(0, 1, "F#3", 100)); * track.Events.Add(new NoteOn(40, 1, "F#3", 0)); * track.Events.Add(new NoteOn(0, 1, "F#4", 0)); * track.Events.Add(new NoteOn(0, 1, "C#4", 0)); * track.Events.Add(new NoteOn(40, 1, "C#4", 100)); * track.Events.Add(new NoteOn(0, 1, "F#4", 100)); * track.Events.Add(new NoteOn(0, 1, "F#3", 100)); */ }
private void ReadThreadStart() { byte[] buf = new byte[512]; byte[] overflowBuf = null; // Compute number of pollfd structures required to poll int numPollStructs = AlsaNativeMethods.Snd_rawmidi_poll_descriptors_count(handle); // Allocate space for the poll structures IntPtr pollAreaPtr = Marshal.AllocHGlobal(numPollStructs * AlsaNativeMethods.POLLFDSZ); int st = AlsaNativeMethods.Snd_rawmidi_poll_descriptors(handle, pollAreaPtr, (uint)numPollStructs); while (!stop) { st = AlsaNativeMethods.Poll(pollAreaPtr, numPollStructs, 200); if (st < 0) { var errno = Marshal.GetLastWin32Error(); if (errno == 4 /* EINTR */) { continue; } Console.WriteLine("MidiIn: Cannot poll - errno = " + errno); break; } if (st == 0) { continue; } // We got something - scan the poll struct ushort revent = 0; st = AlsaNativeMethods.Snd_rawmidi_poll_descriptors_revents( handle, pollAreaPtr, (uint)numPollStructs, ref revent); if (st < 0) { Console.WriteLine("MidiIn: Cannot parse poll - " + AlsaUtils.StrError(st)); break; } AlsaNativeMethods.EPoll evt = (AlsaNativeMethods.EPoll)revent; if (evt.HasFlag(AlsaNativeMethods.EPoll.POLLERR) || evt.HasFlag(AlsaNativeMethods.EPoll.POLLHUP)) { break; } if (evt.HasFlag(AlsaNativeMethods.EPoll.POLLIN)) { // We have something to read - read it int nRead = (int)AlsaNativeMethods.Snd_rawmidi_read(handle, buf, (uint)buf.Length); if (nRead < 0) { int errno = Marshal.GetLastWin32Error(); if (errno == 4 /* EINTR */) { continue; } Console.WriteLine("MidiIn: Cannot parse poll - " + AlsaUtils.StrError(st)); break; } // Combine with overflow buffer if we had an overflow last time if (overflowBuf != null) { var newBuf = new byte[buf.Length + overflowBuf.Length]; Array.Copy(overflowBuf, 0, newBuf, 0, overflowBuf.Length); Array.Copy(buf, 0, newBuf, overflowBuf.Length, buf.Length); buf = newBuf; overflowBuf = null; } // Translate the input to midi message int nParsed = 0; while (true) { st = ParseBuffer(buf, nParsed, nRead, out MidiMessage midiMessage); if (st < 0) { // buffer contains an incomplete message overflowBuf = buf; buf = new byte[overflowBuf.Length]; } else if (st == 0) { // Message not supported - drop everything. break; } else // st > 0 { // Send message var midiEvent = new MidiEvent(midiMessage, 0); var midiEventArg = new WindowsMultiMedia.WindowsMidiEventArgs(midiEvent, IntPtr.Zero, IntPtr.Zero); MidiInputReceived?.Invoke(this, midiEventArg); // Update counters nParsed += st; nRead -= st; if (nRead <= 0) { // Normal end break; } } } } } Marshal.FreeHGlobal(pollAreaPtr); }
/// <inheritdoc /> public void AddBend(int track, int tick, byte channel, int value) { var message = new MidiEvent(tick, MakeCommand((byte)MidiEventType.PitchBend, channel), 0, FixValue(value)); _midiFile.AddEvent(message); }
private bool TryPlayNoteEvent(NotePlaybackEventMetadata noteMetadata, MidiEvent midiEvent, bool isNoteOnEvent, TimeSpan time, out Note note) { note = null; if (noteMetadata == null) { return(false); } var notePlaybackData = noteMetadata.NotePlaybackData; var noteCallback = NoteCallback; if (noteCallback != null && midiEvent is NoteOnEvent) { notePlaybackData = noteCallback(noteMetadata.RawNotePlaybackData, noteMetadata.RawNote.Time, noteMetadata.RawNote.Length, time); noteMetadata.SetCustomNotePlaybackData(notePlaybackData); } note = noteMetadata.RawNote; if (noteMetadata.IsCustomNotePlaybackDataSet) { if (notePlaybackData == null || !notePlaybackData.PlayNote) { midiEvent = null; } else { note = noteMetadata.GetEffectiveNote(); midiEvent = midiEvent is NoteOnEvent ? (MidiEvent)notePlaybackData.GetNoteOnEvent() : notePlaybackData.GetNoteOffEvent(); } } else if (midiEvent == null) { midiEvent = isNoteOnEvent ? (MidiEvent)notePlaybackData.GetNoteOnEvent() : notePlaybackData.GetNoteOffEvent(); } if (midiEvent != null) { SendEvent(midiEvent); if (midiEvent is NoteOnEvent) { _activeNotesMetadata.TryAdd(noteMetadata, 0); } else { byte value; _activeNotesMetadata.TryRemove(noteMetadata, out value); } } else { note = null; } return(true); }
/// <summary> /// Handles tick events generated by the tick generator. /// </summary> /// <param name="sender"> /// The tick generator responsible for the tick event. /// </param> /// <param name="e"> /// Information about the event. /// </param> private void TickHandler(object sender, EventArgs e) { // Guard. if(endReached) return; // While it is time to play the current MIDI event. while(currentEvent.Ticks == 0) { // Visit the MIDI message. currentEvent.Message.Accept(this); //JB if (TrackEvent!=null) TrackEvent(this, currentEvent); // Move to the next MIDI event in the track. endReached = !enumerator.MoveNext(); // If the end of the track has been reached, raise event. if(endReached) { if(EndOfTrackReached != null) EndOfTrackReached(this, EventArgs.Empty); return; } // Else the end of the track has not been reached, get next // MIDI event. else { currentEvent = (MidiEvent)enumerator.Current; } } // Move the current event forward in time by one tick. currentEvent.Ticks--; }
/// <summary> /// Seeks the specified position in the track. /// </summary> /// <param name="position"> /// The position in ticks to seek. /// </param> public void Seek(int position) { // Enforce preconditions. if(position < 0) throw new ArgumentOutOfRangeException("position", position, "Position out of range."); enumerator = trk.GetEnumerator(); enumerator.MoveNext(); currentEvent = (MidiEvent)enumerator.Current; endReached = false; // Tick accumulator. int ticks = currentEvent.Ticks; // Create MIDI chaser to chase MIDI messages so that the sequence // is updated correctly at the specified position. MidiChaser chaser = new MidiChaser(this._midiSender, tickGen); if(ticks <= position) { // Add first message. chaser.Add(currentEvent.Message); } // While the position being sought has not been reached. while(ticks < position) { // Move to the next event in the track. endReached = !enumerator.MoveNext(); // If the position being sought lies beyond the end of the // track, trigger event to notify listeners and return. if(endReached) { if(EndOfTrackReached != null) EndOfTrackReached(this, EventArgs.Empty); // Chase MIDI messages so that the sequence sounds correctly from // the specified position. chaser.Chase(); return; } // Else the position has not yet been reached. else { // Get the current MIDI event. currentEvent = (MidiEvent)enumerator.Current; // Accumulate ticks. ticks += currentEvent.Ticks; // If we haven't gone beyond the specified position. if(ticks <= position) { // Add message to chaser. chaser.Add(currentEvent.Message); } } } // Initialize the current MIDI event ticks to the number of ticks // remaining until the specified position is reached. currentEvent.Ticks = ticks - position; // Chase MIDI messages so that the sequence sounds correctly from // the specified position. chaser.Chase(); }
/// <summary> /// Moves to the next MIDI event in the track. /// </summary> /// <returns> /// <b>true</b> if the end of the track has not yet been reached; /// otherwise, <b>false</b>. /// </returns> public bool MoveNext() { // Enforce preconditions. if(version != owner.Version) throw new InvalidOperationException( "The track was modified after the enumerator was created."); // Move to the next event in the track. eventIndex++; // If the end of the track has not been reached. if(eventIndex < owner.Count) { // Get the event at the current position. currentEvent = owner[eventIndex]; // Indicate that the end of the track has not yet been // reached. return true; } // Else the end of the track has been reached. else { // Indicate that the end of the track has been reached. return false; } }
/// <summary> /// Initializes a new instance of the Track class. /// </summary> public Track() { MetaMessage msg = new MetaMessage(MetaType.EndOfTrack, 0); MidiEvent e = new MidiEvent(msg, 0); midiEvents.Add(e); }
/// <summary> /// Inserts a MidiEvent into the Track at the specified index. /// </summary> /// <param name="index"> /// The zero-based index at which <i>e</i> should be inserted. /// </param> /// <param name="e"> /// The MidiEvent to insert. /// </param> /// <exception cref="ArgumentOutOfRangeException"> /// Thrown if index is less than zero or greater than or equal to /// Count. /// </exception> public void Insert(int index, MidiEvent e) { // Enforce preconditions. if(IsLocked()) throw new InvalidOperationException( "Cannot modify track. It is currently locked"); else if(index < 0 || index >= Count) throw new ArgumentOutOfRangeException("index", index, "Index into track out of range."); midiEvents.Insert(index, e); version++; }
/// <summary> /// Add a Midi event to the end of the track. /// </summary> /// <param name="e"> /// The Midi event to add to the track. /// </param> public void Add(MidiEvent e) { // Enforce preconditions. if(IsLocked()) throw new InvalidOperationException( "Cannot modify track. It is currently locked"); // Inserting the next MIDI event before the last event ensures // that the track ends with an end of track message. midiEvents.Insert(Count - 1, e); version++; }
public virtual void send( MidiEvent[] events ) { unsafe { MemoryManager mman = null; try { mman = new MemoryManager(); int nEvents = events.Length; VstEvents* pVSTEvents = (VstEvents*)mman.malloc( sizeof( VstEvent ) + nEvents * sizeof( VstEvent* ) ).ToPointer(); pVSTEvents->numEvents = 0; pVSTEvents->reserved = (VstIntPtr)0; for ( int i = 0; i < nEvents; i++ ) { MidiEvent pProcessEvent = events[i]; //byte event_code = (byte)pProcessEvent.firstByte; VstEvent* pVSTEvent = (VstEvent*)0; VstMidiEvent* pMidiEvent; pMidiEvent = (VstMidiEvent*)mman.malloc( (int)(sizeof( VstMidiEvent ) + (pProcessEvent.data.Length + 1) * sizeof( byte )) ).ToPointer(); pMidiEvent->byteSize = sizeof( VstMidiEvent ); pMidiEvent->deltaFrames = 0; pMidiEvent->detune = 0; pMidiEvent->flags = 1; pMidiEvent->noteLength = 0; pMidiEvent->noteOffset = 0; pMidiEvent->noteOffVelocity = 0; pMidiEvent->reserved1 = 0; pMidiEvent->reserved2 = 0; pMidiEvent->type = VstEventTypes.kVstMidiType; pMidiEvent->midiData[0] = (byte)(0xff & pProcessEvent.firstByte); for ( int j = 0; j < pProcessEvent.data.Length; j++ ) { pMidiEvent->midiData[j + 1] = (byte)(0xff & pProcessEvent.data[j]); } pVSTEvents->events[pVSTEvents->numEvents++] = (int)(VstEvent*)pMidiEvent; } aEffect.Dispatch( AEffectXOpcodes.effProcessEvents, 0, 0, new IntPtr( pVSTEvents ), 0 ); } catch ( Exception ex ) { serr.println( "vstidrv#send; ex=" + ex ); } finally { if ( mman != null ) { try { mman.dispose(); } catch ( Exception ex2 ) { serr.println( "vstidrv#send; ex2=" + ex2 ); } } } } }
private int PackShortEvent(MidiEvent midiEvent) { var bytes = _midiEventToBytesConverter.Convert(midiEvent, ChannelEventBufferSize); return(bytes[0] + (bytes[1] << 8) + (bytes[2] << 16)); }
/** Create a new Midi tempo event, with the given tempo */ private static MidiEvent CreateTempoEvent(int tempo) { MidiEvent mevent = new MidiEvent(); mevent.DeltaTime = 0; mevent.StartTime = 0; mevent.HasEventflag = true; mevent.EventFlag = MetaEvent; mevent.Metaevent = MetaEventTempo; mevent.Metalength = 3; mevent.Tempo = tempo; return mevent; }
public int sendEvent(byte[] src, int[] deltaFrames /*, int numEvents*/, int targetTrack) { lock ( locker ) { int count; int numEvents = deltaFrames.Length; if (targetTrack == 0) { if (g_tempoList == null) { g_tempoList = new Vector <TempoInfo>(); } else { g_tempoList.clear(); } if (numEvents <= 0) { g_numTempoList = 1; TempoInfo ti = new TempoInfo(); ti.Clock = 0; ti.Tempo = DEF_TEMPO; ti.TotalSec = 0.0; g_tempoList.add(ti); } else { if (deltaFrames[0] == 0) { g_numTempoList = numEvents; } else { g_numTempoList = numEvents + 1; TempoInfo ti = new TempoInfo(); ti.Clock = 0; ti.Tempo = DEF_TEMPO; ti.TotalSec = 0.0; g_tempoList.add(ti); } int prev_tempo = DEF_TEMPO; int prev_clock = 0; double total = 0.0; count = -3; for (int i = 0; i < numEvents; i++) { count += 3; int tempo = (int)(src[count + 2] | (src[count + 1] << 8) | (src[count] << 16)); total += (deltaFrames[i] - prev_clock) * (double)prev_tempo / (1000.0 * TIME_FORMAT); TempoInfo ti = new TempoInfo(); ti.Clock = deltaFrames[i]; ti.Tempo = tempo; ti.TotalSec = total; g_tempoList.add(ti); prev_tempo = tempo; prev_clock = deltaFrames[i]; } } } // 与えられたイベント情報をs_track_eventsに収納 count = -3; int pPrev = 0; s_track_events.get(targetTrack).clear(); #if VOCALO_DRIVER_PRINT_EVENTS sout.println("VocaloidDriver#SendEvent"); byte msb = 0x0; byte lsb = 0x0; #endif for (int i = 0; i < numEvents; i++) { count += 3; MidiEvent pEvent = new MidiEvent(); //pEvent = &(new MIDI_EVENT()); //pEvent->pNext = NULL; pEvent.clock = (uint)deltaFrames[i]; //pEvent.dwOffset = 0; if (targetTrack == 0) { pEvent.firstByte = 0xff; pEvent.data = new int[5]; pEvent.data[0] = 0x51; pEvent.data[1] = 0x03; pEvent.data[2] = src[count]; pEvent.data[3] = src[count + 1]; pEvent.data[4] = src[count + 2]; } else { #if VOCALO_DRIVER_PRINT_EVENTS if (src[count + 1] == 0x63) { msb = src[count + 2]; } else if (src[count + 1] == 0x62) { lsb = src[count + 2]; } else { String str = (src[count + 1] == 0x06) ? ("0x" + PortUtil.toHexString(src[count + 2], 2)) : " "; str += (src[count + 1] == 0x26) ? (" 0x" + PortUtil.toHexString(src[count + 2], 2)) : ""; int nrpn = msb << 8 | lsb; sout.println("VocaloidDriver#SendEvent; NRPN: 0x" + PortUtil.toHexString(nrpn, 4) + " " + str); } #endif pEvent.firstByte = src[count]; pEvent.data = new int[3]; pEvent.data[0] = src[count + 1]; pEvent.data[1] = src[count + 2]; pEvent.data[2] = 0x00; } s_track_events.get(targetTrack).add(pEvent); } } return(TRUE); }
/** Search the events for a ControlChange event with the same * channel and control number. If a matching event is found, * update the control value. Else, add a new ControlChange event. */ private static void UpdateControlChange(List<MidiEvent> newevents, MidiEvent changeEvent) { foreach (MidiEvent mevent in newevents) { if ((mevent.EventFlag == changeEvent.EventFlag) && (mevent.Channel == changeEvent.Channel) && (mevent.ControlNum == changeEvent.ControlNum)) { mevent.ControlValue = changeEvent.ControlValue; return; } } newevents.Add(changeEvent); }
/// <inheritdoc /> public void AddProgramChange(int track, int tick, byte channel, byte program) { var message = new MidiEvent(tick, MakeCommand((byte)MidiEventType.ProgramChange, channel), FixValue(program), 0); _midiFile.AddEvent(message); }
/** Parse a single Midi track into a list of MidiEvents. * Entering this function, the file offset should be at the start of * the MTrk header. Upon exiting, the file offset should be at the * start of the next MTrk header. */ private List<MidiEvent> ReadTrack(MidiFileReader file) { List<MidiEvent> result = new List<MidiEvent>(20); int starttime = 0; string id = file.ReadAscii(4); if (id != "MTrk") { throw new MidiFileException("Bad MTrk header", file.GetOffset() - 4); } int tracklen = file.ReadInt(); int trackend = tracklen + file.GetOffset(); int eventflag = 0; while (file.GetOffset() < trackend) { // If the midi file is truncated here, we can still recover. // Just return what we've parsed so far. int startoffset, deltatime; byte peekevent; try { startoffset = file.GetOffset(); deltatime = file.ReadVarlen(); starttime += deltatime; peekevent = file.Peek(); } catch (MidiFileException e) { return result; } MidiEvent mevent = new MidiEvent(); result.Add(mevent); mevent.DeltaTime = deltatime; mevent.StartTime = starttime; if (peekevent >= EventNoteOff) { mevent.HasEventflag = true; eventflag = file.ReadByte(); } // Console.WriteLine("offset {0}: event {1} {2} start {3} delta {4}", // startoffset, eventflag, EventName(eventflag), // starttime, mevent.DeltaTime); if (eventflag >= EventNoteOn && eventflag < EventNoteOn + 16) { mevent.EventFlag = EventNoteOn; mevent.Channel = (byte)(eventflag - EventNoteOn); mevent.Notenumber = file.ReadByte(); mevent.Velocity = file.ReadByte(); } else if (eventflag >= EventNoteOff && eventflag < EventNoteOff + 16) { mevent.EventFlag = EventNoteOff; mevent.Channel = (byte)(eventflag - EventNoteOff); mevent.Notenumber = file.ReadByte(); mevent.Velocity = file.ReadByte(); } else if (eventflag >= EventKeyPressure && eventflag < EventKeyPressure + 16) { mevent.EventFlag = EventKeyPressure; mevent.Channel = (byte)(eventflag - EventKeyPressure); mevent.Notenumber = file.ReadByte(); mevent.KeyPressure = file.ReadByte(); } else if (eventflag >= EventControlChange && eventflag < EventControlChange + 16) { mevent.EventFlag = EventControlChange; mevent.Channel = (byte)(eventflag - EventControlChange); mevent.ControlNum = file.ReadByte(); mevent.ControlValue = file.ReadByte(); } else if (eventflag >= EventProgramChange && eventflag < EventProgramChange + 16) { mevent.EventFlag = EventProgramChange; mevent.Channel = (byte)(eventflag - EventProgramChange); mevent.Instrument = file.ReadByte(); } else if (eventflag >= EventChannelPressure && eventflag < EventChannelPressure + 16) { mevent.EventFlag = EventChannelPressure; mevent.Channel = (byte)(eventflag - EventChannelPressure); mevent.ChanPressure = file.ReadByte(); } else if (eventflag >= EventPitchBend && eventflag < EventPitchBend + 16) { mevent.EventFlag = EventPitchBend; mevent.Channel = (byte)(eventflag - EventPitchBend); mevent.PitchBend = file.ReadShort(); } else if (eventflag == SysexEvent1) { mevent.EventFlag = SysexEvent1; mevent.Metalength = file.ReadVarlen(); mevent.Value = file.ReadBytes(mevent.Metalength); } else if (eventflag == SysexEvent2) { mevent.EventFlag = SysexEvent2; mevent.Metalength = file.ReadVarlen(); mevent.Value = file.ReadBytes(mevent.Metalength); } else if (eventflag == MetaEvent) { mevent.EventFlag = MetaEvent; mevent.Metaevent = file.ReadByte(); mevent.Metalength = file.ReadVarlen(); mevent.Value = file.ReadBytes(mevent.Metalength); if (mevent.Metaevent == MetaEventTimeSignature) { if (mevent.Metalength < 2) { // throw new MidiFileException( // "Meta Event Time Signature len == " + mevent.Metalength + // " != 4", file.GetOffset()); mevent.Numerator = (byte)0; mevent.Denominator = (byte)4; } else if (mevent.Metalength >= 2 && mevent.Metalength < 4) { mevent.Numerator = (byte)mevent.Value[0]; mevent.Denominator = (byte)System.Math.Pow(2, mevent.Value[1]); } else { mevent.Numerator = (byte)mevent.Value[0]; mevent.Denominator = (byte)System.Math.Pow(2, mevent.Value[1]); } } else if (mevent.Metaevent == MetaEventTempo) { if (mevent.Metalength != 3) { throw new MidiFileException( "Meta Event Tempo len == " + mevent.Metalength + " != 3", file.GetOffset()); } mevent.Tempo = ( (mevent.Value[0] << 16) | (mevent.Value[1] << 8) | mevent.Value[2]); } else if (mevent.Metaevent == MetaEventEndOfTrack) { /* break; */ } } else { throw new MidiFileException("Unknown event " + mevent.EventFlag, file.GetOffset()-1); } } return result; }
/// <remarks> /// This function is not intended to provide complete correctness of MIDI parsing. /// For now the goal is to correctly parse "note start" and "note end" events and correctly delimit all events. /// </remarks> private void readEvent(byte[] data, string senderId, ref int i, out byte eventType, out byte key, out byte velocity) { byte statusType = data[i++]; // continuation messages: // need running status to be interpreted correctly if (statusType <= 0x7F) { if (!runningStatus.ContainsKey(senderId)) { throw new InvalidDataException($"Received running status of sender {senderId}, but no event type was stored"); } eventType = runningStatus[senderId]; key = statusType; velocity = data[i++]; return; } // real-time messages: // 0 additional data bytes always, do not reset running status if (statusType >= 0xF8) { eventType = statusType; key = velocity = 0; return; } // system common messages: // variable number of additional data bytes, reset running status if (statusType >= 0xF0) { eventType = statusType; // system exclusive message // vendor-specific, terminated by 0xF7 // ignoring their whole contents for now since we can't do anything with them anyway if (statusType == 0xF0) { while (data[i - 1] != 0xF7) { i++; } key = velocity = 0; } // other common system messages // fixed size given by MidiEvent.FixedDataSize else { key = MidiEvent.FixedDataSize(statusType) >= 1 ? data[i++] : (byte)0; velocity = MidiEvent.FixedDataSize(statusType) == 2 ? data[i++] : (byte)0; } runningStatus.Remove(senderId); return; } // channel messages // fixed size (varying per event type), set running status eventType = statusType; key = MidiEvent.FixedDataSize(statusType) >= 1 ? data[i++] : (byte)0; velocity = MidiEvent.FixedDataSize(statusType) == 2 ? data[i++] : (byte)0; runningStatus[senderId] = eventType; }
protected virtual void OnSysRealtimeMessageDispatched(MidiEvent e) { EventHandler<MidiEvent> handler = SysRealtimeMessageDispatched; if (handler != null) { handler(this, e); } }
// ------------------------------------------------------------------------------------------------------ private static string _getShortMessageDescription(MidiEvent ev) { string retVal; var shortType = ev.Status & 0xF0; switch (shortType) { case 0x80: retVal = "NoteOff " + ev.AllData[1] + " Velo=" + ev.AllData[2]; break; case 0x90: retVal = "NoteOn " + ev.AllData[1] + " Velo=" + ev.AllData[2]; break; case 0xA0: retVal = "PollyAftertouch " + ev.AllData[1] + " Val=" + ev.AllData[2]; break; case 0xB0: retVal = "CC " + ev.AllData[1] + " Val=" + ev.AllData[2]; break; case 0xC0: retVal = "ProgramChange"; break; default: retVal = "unknown/invalid"; break; } return retVal; }
public void SendEvent(MidiEvent me, byte? channel, int miditick, ref MidiClock cmc) { if (channel == null) return; var m = _insts[Channels[channel.GetValueOrDefault()].Inst]; if ((m.Wave == null) && (channel.GetValueOrDefault() != 9)) return; //ノートオン if (miditick != -1) { var @event = me as NoteEvent; if (@event != null) { //Tone a; var ne = @event; if ((ne.Note < 120) && (ne.Note > 11) && (ne.Channel.Value != 9)) { if (ne.Velocity == 0) Tones[(int) channel][ne.Note].Stop(); if (Tones[(int) channel].ContainsKey(ne.Note)) { Tones[(int) channel][ne.Note].Abort(); Tones[(int) channel].Remove(ne.Note); } BTone[ne.Channel.Value] = NowTone; var t = new Tone(Pitchnames[ne.Note % 12], (ne.Note - 12) / 12, m.Wave, new Envelope(m.A, m.D, m.S, m.R), 255, 0, ne.Velocity, m.Noiseoption); LastTone[(byte) channel] = t; Tones[(int) channel].Add(ne.Note, t); Tones[(int) channel][ne.Note].StartPlay(miditick, ne.Gate); _tonequeue.Enqueue(t); NowTone = t; Portamenttick = 0; Bms = DX.GetNowCount(); } else { DX.PlaySoundMem(HPercs[ne.Note], DX.DX_PLAYTYPE_BACK); foreach (var handle in HPercs) { if (handle == 0) continue; DX.ChangePanSoundMem((Channels[9].Panpot - 64) * 4, HPercs[ne.Note]); DX.ChangeVolumeSoundMem( (int) (255 * (Channels[9].Volume / 127.0) * (Channels[9].Expression / 127.0) * (ne.Velocity / 127.0) * (Volume / 100f)), HPercs[ne.Note]); } } } } else { var onEvent = me as NoteOnEvent; if (onEvent != null) { //Tone a; var ne = onEvent; if ((ne.Note < 120) && (ne.Note > 11) && (ne.Channel.Value != 9)) { if (ne.Velocity == 0) Tones[(int) channel][ne.Note].Stop(); if (Tones[(int) channel].ContainsKey(ne.Note)) { Tones[(int) channel][ne.Note].Abort(); Tones[(int) channel].Remove(ne.Note); } var t = new Tone(Pitchnames[ne.Note % 12], (ne.Note - 12) / 12, m.Wave, new Envelope(m.A, m.D, m.S, m.R), 255, 0, ne.Velocity, m.Noiseoption); LastTone[(byte) channel] = t; Tones[(int) channel].Add(ne.Note, t); Tones[(int) channel][ne.Note].StartPlay(-1, -1); _tonequeue.Enqueue(t); } else { DX.PlaySoundMem(HPercs[ne.Note], DX.DX_PLAYTYPE_BACK); foreach (var handle in HPercs) { if (handle == 0) continue; DX.ChangePanSoundMem((Channels[9].Panpot - 64) * 4, HPercs[ne.Note]); DX.ChangeVolumeSoundMem( (int) (255 * (Channels[9].Volume / 127.0) * (Channels[9].Expression / 127.0) * (ne.Velocity / 127.0)), HPercs[ne.Note]); } } } //ノートオフ var @event = me as NoteOffEvent; if (@event != null) { var noe = @event; Tones[(int) channel][noe.Note].Stop(); } } var programEvent = me as ProgramEvent; if (programEvent != null) Channels[(int) channel].Inst = programEvent.Value; var controlEvent = me as ControlEvent; if (controlEvent != null) { var ce = controlEvent; switch (ce.Number) { case 10: Channels[(int) channel].Panpot = ce.Value; break; case 7: Channels[(int) channel].Volume = ce.Value; break; case 11: Channels[(int) channel].Expression = ce.Value; break; case 101: _rpns[0] = ce; break; case 100: _rpns[1] = ce; break; case 6: _rpns[2] = ce; if (_rpns[1] == null) break; switch (_rpns[1].Value) { case 0: Channels[(int) channel].BendRange = new Rpn(_rpns[2].Value); break; case 2: Channels[(int) channel].NoteShift = new Rpn((short) (_rpns[2].Value - 64)); break; } break; case 38: _rpns[3] = ce; if ((_rpns[1] == null) || (_rpns[2] == null)) break; if (_rpns[1].Value == 1) Channels[(int) channel].Tweak = new Rpn((short) (_rpns[2].Value * 128 + _rpns[3].Value - 8192)); break; case 111: Loop = ce.Tick; break; case 65: Channels[(int) channel].Portament = ce.Value; break; case 5: Channels[(int) channel].PortamentTime = ce.Value; break; } } if (me is PitchEvent) { var native = me.ToNativeEvent(); var pitchdata = native[2] * 128 + native[1] - 8192; Channels[(int) channel].Pitchbend = pitchdata; //Console.WriteLine("Decimal: " + pe.Value + "Hexa: " + pe.Value.ToString("X2") + "Binary: " + Convert.ToString(2, pe.Value)); } if ((cmc != null) && me is MidiEndOfTrack) { Channels[me.Channel.GetValueOrDefault()].End = true; var allend = true; foreach (var c in Channels) if (!c.End) { allend = false; break; } if (allend) if (Loop == -1) cmc.Stop(); else cmc.TickCount = Loop; } }
public void begin( long total_samples, WorkerState state ) { var mDriver = getDriver(); #if DEBUG sout.println( "AquesToneRenderingRunner#begin; (mDriver==null)=" + (mDriver == null) ); String file = System.IO.Path.Combine( System.Windows.Forms.Application.StartupPath, "AquesToneWaveGenerator.txt" ); log = new System.IO.StreamWriter( file ); log.AutoFlush = true; #endif if ( mDriver == null ) { #if DEBUG log.WriteLine( "mDriver==null" ); log.Close(); #endif exitBegin(); state.reportComplete(); return; } #if DEBUG sout.println( "AquesToneRenderingRunner#begin; mDriver.loaded=" + mDriver.loaded ); #endif if ( !mDriver.loaded ) { #if DEBUG log.WriteLine( "mDriver.loaded=" + mDriver.loaded ); log.Close(); #endif exitBegin(); state.reportComplete(); return; } mRunning = true; //mAbortRequired = false; mTotalSamples = total_samples; #if DEBUG sout.println( "AquesToneWaveGenerator#begin; mTotalSamples=" + mTotalSamples ); log.WriteLine( "mTotalSamples=" + mTotalSamples ); log.WriteLine( "mTrimRemain=" + mTrimRemain ); #endif VsqTrack track = mVsq.Track.get( mTrack ); int BUFLEN = mSampleRate / 10; double[] left = new double[BUFLEN]; double[] right = new double[BUFLEN]; long saProcessed = 0; int saRemain = 0; int lastClock = 0; // 最後に処理されたゲートタイム // 最初にダミーの音を鳴らす // (最初に入るノイズを回避するためと、前回途中で再生停止した場合に無音から始まるようにするため) mDriver.resetAllParameters(); mDriver.process( left, right, BUFLEN ); MidiEvent f_noteon = new MidiEvent(); f_noteon.firstByte = 0x90; f_noteon.data = new int[] { 0x40, 0x40 }; f_noteon.clock = 0; mDriver.send( new MidiEvent[] { f_noteon } ); mDriver.process( left, right, BUFLEN ); MidiEvent f_noteoff = new MidiEvent(); f_noteoff.firstByte = 0x80; f_noteoff.data = new int[] { 0x40, 0x7F }; mDriver.send( new MidiEvent[] { f_noteoff } ); for ( int i = 0; i < 3; i++ ) { mDriver.process( left, right, BUFLEN ); } #if DEBUG log.WriteLine( "pre-process done" ); log.WriteLine( "-----------------------------------------------------" ); VsqTrack vsq_track = mVsq.Track.get( mTrack ); for ( Iterator<VsqEvent> itr = vsq_track.getNoteEventIterator(); itr.hasNext(); ) { VsqEvent item = itr.next(); log.WriteLine( "c" + item.Clock + "; " + item.ID.LyricHandle.L0.Phrase ); } #endif // レンダリング開始位置での、パラメータの値をセットしておく for ( Iterator<VsqEvent> itr = track.getNoteEventIterator(); itr.hasNext(); ) { VsqEvent item = itr.next(); #if DEBUG sout.println( "AquesToneWaveGenerator#begin; item.Clock=" + item.Clock ); log.WriteLine( "*********************************************************" ); log.WriteLine( "item.Clock=" + item.Clock ); #endif long saNoteStart = (long)(mVsq.getSecFromClock( item.Clock ) * mSampleRate); long saNoteEnd = (long)(mVsq.getSecFromClock( item.Clock + item.ID.getLength() ) * mSampleRate); #if DEBUG log.WriteLine( "saNoteStart=" + saNoteStart + "; saNoteEnd=" + saNoteEnd ); #endif EventQueueSequence list = generateMidiEvent( mVsq, mTrack, lastClock, item.Clock + item.ID.getLength() ); lastClock = item.Clock + item.ID.Length + 1; for ( Iterator<Integer> itr2 = list.keyIterator(); itr2.hasNext(); ) { // まず直前までの分を合成 Integer clock = itr2.next(); #if DEBUG log.WriteLine( "-------------------------------------------------------" ); sout.println( "AquesToneWaveGenerator#begin; clock=" + clock ); #endif long saStart = (long)(mVsq.getSecFromClock( clock ) * mSampleRate); saRemain = (int)(saStart - saProcessed); #if DEBUG log.WriteLine( "saStart=" + saStart ); log.WriteLine( "saRemain=" + saRemain ); #endif while ( saRemain > 0 ) { if ( state.isCancelRequested() ) { goto heaven; } int len = saRemain > BUFLEN ? BUFLEN : saRemain; mDriver.process( left, right, len ); waveIncoming( left, right, len ); saRemain -= len; saProcessed += len; state.reportProgress( saProcessed ); //mTotalAppend += len; <- waveIncomingで計算されるので } // MIDiイベントを送信 MidiEventQueue queue = list.get( clock ); // まずnoteoff boolean noteoff_send = false; if ( queue.noteoff.size() > 0 ) { #if DEBUG for ( int i = 0; i < queue.noteoff.size(); i++ ) { String str = ""; MidiEvent itemi = queue.noteoff.get( i ); str += "0x" + PortUtil.toHexString( itemi.firstByte, 2 ) + " "; for ( int j = 0; j < itemi.data.Length; j++ ) { str += "0x" + PortUtil.toHexString( itemi.data[j], 2 ) + " "; } sout.println( typeof( AquesToneWaveGenerator ) + "#begin; noteoff; " + str ); } #endif mDriver.send( queue.noteoff.toArray( new MidiEvent[] { } ) ); noteoff_send = true; } // parameterの変更 if ( queue.param.size() > 0 ) { for ( Iterator<ParameterEvent> itr3 = queue.param.iterator(); itr3.hasNext(); ) { ParameterEvent pe = itr3.next(); #if DEBUG sout.println( typeof( AquesToneWaveGenerator ) + "#begin; param; index=" + pe.index + "; value=" + pe.value ); #endif mDriver.setParameter( pe.index, pe.value ); } } // ついでnoteon if ( queue.noteon.size() > 0 ) { // 同ゲートタイムにピッチベンドも指定されている場合、同時に送信しないと反映されないようだ! if ( queue.pit.size() > 0 ) { queue.noteon.addAll( queue.pit ); queue.pit.clear(); } #if DEBUG for ( int i = 0; i < queue.noteon.size(); i++ ) { String str = ""; MidiEvent itemi = queue.noteon.get( i ); str += "0x" + PortUtil.toHexString( itemi.firstByte, 2 ) + " "; for ( int j = 0; j < itemi.data.Length; j++ ) { str += "0x" + PortUtil.toHexString( itemi.data[j], 2 ) + " "; } sout.println( typeof( AquesToneWaveGenerator ) + "#begin; noteon; " + str ); } #endif mDriver.send( queue.noteon.toArray( new MidiEvent[] { } ) ); } // PIT if ( queue.pit.size() > 0 && !noteoff_send ) { #if DEBUG for ( int i = 0; i < queue.pit.size(); i++ ) { String str = ""; MidiEvent itemi = queue.pit.get( i ); str += "0x" + PortUtil.toHexString( itemi.firstByte, 2 ) + " "; for ( int j = 0; j < itemi.data.Length; j++ ) { str += "0x" + PortUtil.toHexString( itemi.data[j], 2 ) + " "; } sout.println( typeof( AquesToneWaveGenerator ) + "#begin; pit; " + str ); } #endif mDriver.send( queue.pit.toArray( new MidiEvent[] { } ) ); } if ( mDriver.getUi( mMainWindow ) != null ) { mDriver.getUi( mMainWindow ).invalidateUi(); } } } // totalSamplesに足りなかったら、追加してレンダリング saRemain = (int)(mTotalSamples - mTotalAppend); #if DEBUG sout.println( "AquesToneRenderingRunner#run; totalSamples=" + mTotalSamples + "; mTotalAppend=" + mTotalAppend + "; saRemain=" + saRemain ); #endif while ( saRemain > 0 ) { if ( state.isCancelRequested() ) { goto heaven; } int len = saRemain > BUFLEN ? BUFLEN : saRemain; mDriver.process( left, right, len ); waveIncoming( left, right, len ); saRemain -= len; saProcessed += len; state.reportProgress( saProcessed ); //mTotalAppend += len; } heaven: #if DEBUG log.Close(); #endif exitBegin(); state.reportComplete(); }
/// <summary>Creates an object creation expression for an event.</summary> /// <param name="ev">The event to create.</param> /// <returns>The object creation expression for the event.</returns> private static CodeObjectCreateExpression CreateVoiceEvent(MidiEvent ev) { CodeObjectCreateExpression newEvent = null; CodeExpression delta = new CodePrimitiveExpression(ev.DeltaTime); // NOTE ON if (ev is NoteOn) { NoteOn midiEvent = (NoteOn)ev; newEvent = new CodeObjectCreateExpression( typeof(NoteOn), new CodeExpression[] { delta, new CodePrimitiveExpression(midiEvent.Channel), new CodePrimitiveExpression(MidiEvent.GetNoteName(midiEvent.Note)), new CodePrimitiveExpression(midiEvent.Velocity) }); } // NOTE OFF else if (ev is NoteOff) { NoteOff midiEvent = (NoteOff)ev; newEvent = new CodeObjectCreateExpression( typeof(NoteOff), new CodeExpression[] { delta, new CodePrimitiveExpression(midiEvent.Channel), new CodePrimitiveExpression(MidiEvent.GetNoteName(midiEvent.Note)), new CodePrimitiveExpression(midiEvent.Velocity) }); } // AFTERTOUCH else if (ev is Aftertouch) { Aftertouch midiEvent = (Aftertouch)ev; newEvent = new CodeObjectCreateExpression( typeof(Aftertouch), new CodeExpression[] { delta, new CodePrimitiveExpression(midiEvent.Channel), new CodePrimitiveExpression(MidiEvent.GetNoteName(midiEvent.Note)), new CodePrimitiveExpression(midiEvent.Pressure) }); } // PROGRAM CHANGE else if (ev is ProgramChange) { ProgramChange midiEvent = (ProgramChange)ev; newEvent = new CodeObjectCreateExpression( typeof(ProgramChange), new CodeExpression[] { delta, new CodePrimitiveExpression(midiEvent.Channel), new CodeCastExpression(typeof(GeneralMidiInstruments), new CodePrimitiveExpression(midiEvent.Number)) }); } // CONTROLLER else if (ev is Controller) { Controller midiEvent = (Controller)ev; newEvent = new CodeObjectCreateExpression( typeof(Controller), new CodeExpression[] { delta, new CodePrimitiveExpression(midiEvent.Channel), new CodeCastExpression(typeof(Controllers), new CodePrimitiveExpression(midiEvent.Number)), new CodePrimitiveExpression(midiEvent.Value), }); } // CHANNEL PRESSURE else if (ev is ChannelPressure) { ChannelPressure midiEvent = (ChannelPressure)ev; newEvent = new CodeObjectCreateExpression( typeof(ChannelPressure), new CodeExpression[] { delta, new CodePrimitiveExpression(midiEvent.Channel), new CodePrimitiveExpression(midiEvent.Pressure) }); } // PITCH WHEEL else if (ev is PitchWheel) { PitchWheel midiEvent = (PitchWheel)ev; newEvent = new CodeObjectCreateExpression( typeof(PitchWheel), new CodeExpression[] { delta, new CodePrimitiveExpression(midiEvent.Channel), new CodePrimitiveExpression(midiEvent.UpperBits), new CodePrimitiveExpression(midiEvent.LowerBits) }); } // Return the event return(newEvent); }
private MidiEvent createNoteOffEvent( int clock, int note ) { var result = new MidiEvent(); result.clock = clock; result.firstByte = 0x80; result.data = new int[] { note, 0x40 }; return result; }
public override void OnFinishEventReading(MidiEvent midiEvent, long absoluteTime) { BadHandledCount++; }
/// <summary>Creates an object creation expression for an event.</summary> /// <param name="ev">The event to create.</param> /// <returns>The object creation expression for the event.</returns> private static CodeObjectCreateExpression CreateMetaEvent(MidiEvent ev) { CodeObjectCreateExpression newEvent = null; CodeExpression delta = new CodePrimitiveExpression(ev.DeltaTime); // SEQUENCE NUMBER if (ev is SequenceNumber) { SequenceNumber midiEvent = (SequenceNumber)ev; newEvent = new CodeObjectCreateExpression( typeof(SequenceNumber), new CodeExpression[] { delta, new CodePrimitiveExpression(midiEvent.Number) }); } // TEXT else if (ev is Text) { Text midiEvent = (Text)ev; newEvent = new CodeObjectCreateExpression( typeof(Text), new CodeExpression[] { delta, new CodePrimitiveExpression(midiEvent.Text) }); } // Copyright else if (ev is Copyright) { Copyright midiEvent = (Copyright)ev; newEvent = new CodeObjectCreateExpression( typeof(Copyright), new CodeExpression[] { delta, new CodePrimitiveExpression(midiEvent.Text) }); } // SEQUENCE TRACK NAME else if (ev is SequenceTrackName) { SequenceTrackName midiEvent = (SequenceTrackName)ev; newEvent = new CodeObjectCreateExpression( typeof(SequenceTrackName), new CodeExpression[] { delta, new CodePrimitiveExpression(midiEvent.Text) }); } // INSTRUMENT else if (ev is Instrument) { Instrument midiEvent = (Instrument)ev; newEvent = new CodeObjectCreateExpression( typeof(Instrument), new CodeExpression[] { delta, new CodePrimitiveExpression(midiEvent.Text) }); } // Lyric else if (ev is Lyric) { Lyric midiEvent = (Lyric)ev; newEvent = new CodeObjectCreateExpression( typeof(Lyric), new CodeExpression[] { delta, new CodePrimitiveExpression(midiEvent.Text) }); } // Marker else if (ev is Marker) { Marker midiEvent = (Marker)ev; newEvent = new CodeObjectCreateExpression( typeof(Marker), new CodeExpression[] { delta, new CodePrimitiveExpression(midiEvent.Text) }); } // CuePoint else if (ev is CuePoint) { CuePoint midiEvent = (CuePoint)ev; newEvent = new CodeObjectCreateExpression( typeof(CuePoint), new CodeExpression[] { delta, new CodePrimitiveExpression(midiEvent.Text) }); } // ProgramName else if (ev is ProgramName) { ProgramName midiEvent = (ProgramName)ev; newEvent = new CodeObjectCreateExpression( typeof(ProgramName), new CodeExpression[] { delta, new CodePrimitiveExpression(midiEvent.Text) }); } // ProgramName else if (ev is DeviceName) { DeviceName midiEvent = (DeviceName)ev; newEvent = new CodeObjectCreateExpression( typeof(DeviceName), new CodeExpression[] { delta, new CodePrimitiveExpression(midiEvent.Text) }); } // ChannelPrefix else if (ev is ChannelPrefix) { ChannelPrefix midiEvent = (ChannelPrefix)ev; newEvent = new CodeObjectCreateExpression( typeof(ChannelPrefix), new CodeExpression[] { delta, new CodePrimitiveExpression(midiEvent.Prefix) }); } // MidiPort else if (ev is MidiPort) { MidiPort midiEvent = (MidiPort)ev; newEvent = new CodeObjectCreateExpression( typeof(MidiPort), new CodeExpression[] { delta, new CodePrimitiveExpression(midiEvent.Port) }); } // EndOfTrack else if (ev is EndOfTrack) { EndOfTrack midiEvent = (EndOfTrack)ev; newEvent = new CodeObjectCreateExpression( typeof(EndOfTrack), new CodeExpression[] { delta }); } // Tempo else if (ev is Tempo) { Tempo midiEvent = (Tempo)ev; newEvent = new CodeObjectCreateExpression( typeof(Tempo), new CodeExpression[] { delta, new CodePrimitiveExpression(midiEvent.Value) }); } // SMPTEOffset else if (ev is SMPTEOffset) { SMPTEOffset midiEvent = (SMPTEOffset)ev; newEvent = new CodeObjectCreateExpression( typeof(SMPTEOffset), new CodeExpression[] { delta, new CodePrimitiveExpression(midiEvent.Hours), new CodePrimitiveExpression(midiEvent.Minutes), new CodePrimitiveExpression(midiEvent.Seconds), new CodePrimitiveExpression(midiEvent.Frames), new CodePrimitiveExpression(midiEvent.FractionalFrames) }); } // TimeSignature else if (ev is TimeSignature) { TimeSignature midiEvent = (TimeSignature)ev; newEvent = new CodeObjectCreateExpression( typeof(TimeSignature), new CodeExpression[] { delta, new CodePrimitiveExpression(midiEvent.Numerator), new CodePrimitiveExpression(midiEvent.Denominator), new CodePrimitiveExpression(midiEvent.MidiClocksPerClick), new CodePrimitiveExpression(midiEvent.NumberOfNotated32nds) }); } // KeySignature else if (ev is KeySignature) { KeySignature midiEvent = (KeySignature)ev; newEvent = new CodeObjectCreateExpression( typeof(KeySignature), new CodeExpression[] { delta, new CodeCastExpression(typeof(Key), new CodePrimitiveExpression((byte)midiEvent.Key)), new CodeCastExpression(typeof(Tonality), new CodePrimitiveExpression((byte)midiEvent.Tonality)) }); } // Proprietary else if (ev is Proprietary) { Proprietary midiEvent = (Proprietary)ev; newEvent = new CodeObjectCreateExpression( typeof(Proprietary), new CodeExpression[] { delta, CreateDataArray(midiEvent.Data) }); } // UnknownMetaMidiEvent else if (ev is UnknownMetaMidiEvent) { UnknownMetaMidiEvent midiEvent = (UnknownMetaMidiEvent)ev; newEvent = new CodeObjectCreateExpression( typeof(UnknownMetaMidiEvent), new CodeExpression[] { delta, new CodePrimitiveExpression(midiEvent.MetaEventID), CreateDataArray(midiEvent.Data) }); } // Return the event return(newEvent); }
/// <summary> /// Note On のための MIDI イベント列を作成する /// </summary> /// <param name="note">ノート番号</param> /// <param name="dynamics">Dynamics</param> /// <param name="phrase">歌詞</param> /// <returns>Note On のための MIDI イベント列</returns> public MidiEvent[] createNoteOnEvent( int note, int dynamics, String phrase ) { var matcher = new SyllableMatcher(); var syllable = matcher.find( phrase ); if ( syllableMap.ContainsKey( syllable ) ) { var position = syllableMap[syllable]; int lineIndex = position.lineIndex; int columnIndex = position.columnIndex; var result = new List<MidiEvent>(); { MidiEvent moveLine = new MidiEvent(); moveLine.firstByte = 0xB0; moveLine.data = new[] { 0x31, lineIndex }; result.Add( moveLine ); } for ( int i = 1; i <= columnIndex; ++i ) { { MidiEvent dummyNoteOn = new MidiEvent(); dummyNoteOn.firstByte = 0x90; dummyNoteOn.data = new int[] { note, 0x40 }; result.Add( dummyNoteOn ); } { MidiEvent dummyNoteOff = new MidiEvent(); dummyNoteOff.firstByte = 0x80; dummyNoteOff.data = new int[] { note, 0x40 }; result.Add( dummyNoteOff ); } } { MidiEvent noteOn = new MidiEvent(); noteOn.firstByte = 0x90; noteOn.data = new int[] { note, dynamics }; result.Add( noteOn ); } return result.ToArray(); } else { return new MidiEvent[] { }; } }
/// <summary> /// Reads the data for the next track from the Midi file. /// </summary> /// <param name="trackNum"> /// The track number. /// </param> private void ReadNextTrack(int trackNum) { int status = 0; int runningStatus = 0; // Read length of track. binReader.ReadBytes(LengthByteCount); // Continue reading Midi events until the end of the track. while(true) { // Next Midi message in track. IMidiMessage msg = null; // Ticks for next Midi event. int ticks = ReadVariableLengthQuantity(); // Read status byte for the next Midi message. status = binReader.ReadByte(); // If this is a status byte. if((status & StatusFlag) == StatusFlag) { // If the next Midi message is a channel message. if(ChannelMessage.IsChannelMessage(status)) { // Read channel message from the Midi file. msg = ReadChannelMessage(status); // Update running status. runningStatus = status; } // Else if the next Midi message is a meta message. else if(MetaMessage.IsMetaMessage(status)) { // Read the type of meta message. MetaType mType = (MetaType)binReader.ReadByte(); // If this is the end of the track. if(mType == MetaType.EndOfTrack) { // Make sure end of track message has the same // ticks value as the end of track message in the // file. tracks[trackNum].Slide(tracks[trackNum].Count - 1, ticks); // Break out of loop - finished with this track. break; } // Read the length of the meta message data. int length = ReadVariableLengthQuantity(); // Read the meta message data. byte[] data = binReader.ReadBytes(length); // Create meta message. msg = new MetaMessage(mType, data); } // Else if the next Midi message is a system exclusive // message. else if(SysExMessage.IsSysExMessage(status)) { // The type of system exclusive message. SysExType type = (SysExType)status; // Read the length of the system exclusive data. int length = ReadVariableLengthQuantity(); // Read the system exclusive data. byte[] data = binReader.ReadBytes(length); // Create system exclusive message. msg = new SysExMessage(type, data); } } // Assumes running status. else { // Create channel message. msg = ReadChannelMessage(runningStatus, status); } // Create the next Midi event and store it in the specified // track. MidiEvent e = new MidiEvent(msg, ticks); tracks[trackNum].Add(e); } }
/// <summary> /// Called by Midi to add an event to the MidiTrack. /// </summary> /// <param name="evt">The event to add</param> public void AddEvent(MidiEvent evt) { _events.Add(evt); }
private void SendShortEvent(MidiEvent midiEvent) { var message = PackShortEvent(midiEvent); ProcessMmResult(MidiOutWinApi.midiOutShortMsg(_handle, (uint)message)); }
internal MidiEventPlayedEventArgs(MidiEvent midiEvent) { Event = midiEvent; }
private void OnEventSent(MidiEvent midiEvent) { EventSent?.Invoke(this, new MidiEventSentEventArgs(midiEvent)); }
/// <summary>Parse a voice event from the data stream.</summary> /// <param name="track">The track that owns this event.</param> /// <param name="deltaTime">The previously parsed delta-time for this event.</param> /// <param name="messageType">The previously parsed type of message we're expecting to find.</param> /// <param name="channel">The previously parsed channel for this message.</param> /// <param name="data">The data stream from which to read the event information.</param> /// <param name="pos">The position of the start of the event information.</param> /// <returns>The parsed voice MIDI event.</returns> private static MidiEvent ParseVoiceEvent(MidiTrack track, long deltaTime, byte messageType, byte channel, byte[] data, ref long pos) { try { MidiEvent tempEvent = null; // Create the correct voice event based on its message id/type switch (messageType) { // NOTE OFF case OffNoteVoiceMidiEvent.CategoryId: tempEvent = new OffNoteVoiceMidiEvent(track, deltaTime, channel, data[pos], data[pos + 1]); track.Channel = channel; pos += 2; break; // NOTE ON case OnNoteVoiceMidiEvent.CategoryId: tempEvent = new OnNoteVoiceMidiEvent(track, deltaTime, channel, data[pos], data[pos + 1]); track.Channel = channel; pos += 2; break; // AFTERTOUCH case AftertouchNoteVoiceMidiEvent.CategoryId: tempEvent = new AftertouchNoteVoiceMidiEvent(track, deltaTime, channel, data[pos], data[pos + 1]); track.Channel = channel; pos += 2; break; // CONTROLLER case ControllerVoiceMidiEvent.CategoryId: tempEvent = new ControllerVoiceMidiEvent(track, deltaTime, channel, data[pos], data[pos + 1]); pos += 2; break; // PROGRAM CHANGE case ProgramChangeVoiceMidiEvent.CategoryId: tempEvent = new ProgramChangeVoiceMidiEvent(track, deltaTime, channel, data[pos]); pos += 1; break; // CHANNEL PRESSURE case ChannelPressureVoiceMidiEvent.CategoryId: tempEvent = new ChannelPressureVoiceMidiEvent(track, deltaTime, channel, data[pos]); pos += 1; break; // PITCH WHEEL case PitchWheelVoiceMidiEvent.CategoryId: int position = ((data[pos] << 8) | data[pos + 1]); byte upper, lower; MidiEvent.Split14BitsToBytes(position, out upper, out lower); tempEvent = new PitchWheelVoiceMidiEvent(track, deltaTime, channel, upper, lower); pos += 2; break; // UH OH! default: Validate.ThrowOutOfRange("messageType", messageType, 0x8, 0xE); break; } // Return the newly parsed event return(tempEvent); } // Something bad happened; wrap it in a parser exception catch (Exception exc) { throw new MidiParserException("Unable to parse voice MIDI event.", exc, pos); } }
private void OnEventPlayed(MidiEvent midiEvent) { EventPlayed?.Invoke(this, new MidiEventPlayedEventArgs(midiEvent)); }
private void OnEventReceived(MidiEvent midiEvent, int milliseconds) { EventReceived?.Invoke(this, new MidiEventReceivedEventArgs(midiEvent, _startTime.AddMilliseconds(milliseconds))); }
/// <summary> /// /// </summary> /// <param name="total_samples"></param> /// <param name="mode_infinite"></param> /// <param name="sample_rate"></param> /// <param name="runner">このドライバを駆動しているRenderingRunnerのオブジェクト</param> /// <returns></returns> public int startRendering(long total_samples, boolean mode_infinite, int sample_rate, IWaveIncoming runner, WorkerState state) { #if DEBUG sout.println("VocaloidDriver#startRendering; entry; total_samples=" + total_samples + "; sample_rate=" + sample_rate); #endif lock ( locker ) { rendering = true; //g_cancelRequired = false; g_progress = 0.0; sampleRate = sample_rate; Vector <MidiEvent> lpEvents = merge_events(s_track_events.get(0), s_track_events.get(1)); int current_count = -1; MidiEvent current = new MidiEvent();// = lpEvents; MemoryManager mman = null; float * left_ch; float * right_ch; float ** out_buffer; try { mman = new MemoryManager(); left_ch = (float *)mman.malloc(sizeof(float) * sampleRate).ToPointer(); right_ch = (float *)mman.malloc(sizeof(float) * sampleRate).ToPointer(); out_buffer = (float **)mman.malloc(sizeof(float *) * 2).ToPointer(); out_buffer[0] = left_ch; out_buffer[1] = right_ch; double[] buffer_l = new double[sampleRate]; double[] buffer_r = new double[sampleRate]; #if TEST org.kbinani.debug.push_log(" calling initial dispatch..."); #endif #if DEBUG sout.println("VocaloidDriver#startRendering; sampleRate=" + sampleRate); #endif aEffect.Dispatch(AEffectOpcodes.effSetSampleRate, 0, 0, IntPtr.Zero, (float)sampleRate); aEffect.Dispatch(AEffectOpcodes.effMainsChanged, 0, 1, IntPtr.Zero, 0); // ここではブロックサイズ=サンプリングレートということにする aEffect.Dispatch(AEffectOpcodes.effSetBlockSize, 0, sampleRate, IntPtr.Zero, 0); // レンダリングの途中で停止した場合,ここでProcessする部分が無音でない場合がある for (int i = 0; i < 3; i++) { aEffect.ProcessReplacing(IntPtr.Zero, new IntPtr(out_buffer), sampleRate); } #if TEST org.kbinani.debug.push_log(" ...done"); #endif int delay = 0; int duration = 0; int dwNow = 0; int dwPrev = 0; int dwDelta; int dwDelay = 0; int dwDeltaDelay = 0; int addr_msb = 0, addr_lsb = 0; int data_msb = 0, data_lsb = 0; int total_processed = 0; int total_processed2 = 0; #if TEST org.kbinani.debug.push_log(" getting dwDelay..."); #endif dwDelay = 0; Vector <MidiEvent> list = s_track_events.get(1); int list_size = list.size(); for (int i = 0; i < list_size; i++) { MidiEvent work = list.get(i); if ((work.firstByte & 0xf0) == 0xb0) { switch (work.data[0]) { case 0x63: addr_msb = work.data[1]; addr_lsb = 0; break; case 0x62: addr_lsb = work.data[1]; break; case 0x06: data_msb = work.data[1]; break; case 0x26: data_lsb = work.data[1]; if (addr_msb == 0x50 && addr_lsb == 0x01) { dwDelay = (data_msb & 0xff) << 7 | (data_lsb & 0x7f); } break; } } if (dwDelay > 0) { break; } } #if TEST org.kbinani.debug.push_log(" ...done; dwDelay=" + dwDelay); #endif while (!state.isCancelRequested()) { int process_event_count = current_count; int nEvents = 0; #if TEST org.kbinani.debug.push_log("lpEvents.Count=" + lpEvents.size()); #endif if (current_count < 0) { current_count = 0; current = lpEvents.get(current_count); process_event_count = current_count; } while (current.clock == dwNow) { // durationを取得 if ((current.firstByte & 0xf0) == 0xb0) { switch (current.data[0]) { case 0x63: addr_msb = current.data[1]; addr_lsb = 0; break; case 0x62: addr_lsb = current.data[1]; break; case 0x06: data_msb = current.data[1]; break; case 0x26: data_lsb = current.data[1]; // Note Duration in millisec if (addr_msb == 0x50 && addr_lsb == 0x4) { duration = data_msb << 7 | data_lsb; } break; } } nEvents++; if (current_count + 1 < lpEvents.size()) { current_count++; current = lpEvents.get(current_count); } else { break; } } if (current_count + 1 >= lpEvents.size()) { break; } double msNow = msec_from_clock(dwNow); dwDelta = (int)(msNow / 1000.0 * sampleRate) - total_processed; #if TEST org.kbinani.debug.push_log("dwNow=" + dwNow); org.kbinani.debug.push_log("dwPrev=" + dwPrev); org.kbinani.debug.push_log("dwDelta=" + dwDelta); #endif VstEvents *pVSTEvents = (VstEvents *)mman.malloc(sizeof(VstEvent) + nEvents * sizeof(VstEvent *)).ToPointer(); pVSTEvents->numEvents = 0; pVSTEvents->reserved = (VstIntPtr)0; for (int i = 0; i < nEvents; i++) { MidiEvent pProcessEvent = lpEvents.get(process_event_count); int event_code = pProcessEvent.firstByte; VstEvent * pVSTEvent = (VstEvent *)0; VstMidiEvent *pMidiEvent; switch (event_code) { case 0xf0: case 0xf7: case 0xff: break; default: pMidiEvent = (VstMidiEvent *)mman.malloc((int)(sizeof(VstMidiEvent) + (pProcessEvent.data.Length + 1) * sizeof(byte))).ToPointer(); pMidiEvent->byteSize = sizeof(VstMidiEvent); pMidiEvent->deltaFrames = dwDelta; pMidiEvent->detune = 0; pMidiEvent->flags = 1; pMidiEvent->noteLength = 0; pMidiEvent->noteOffset = 0; pMidiEvent->noteOffVelocity = 0; pMidiEvent->reserved1 = 0; pMidiEvent->reserved2 = 0; pMidiEvent->type = VstEventTypes.kVstMidiType; pMidiEvent->midiData[0] = (byte)(0xff & pProcessEvent.firstByte); for (int j = 0; j < pProcessEvent.data.Length; j++) { pMidiEvent->midiData[j + 1] = (byte)(0xff & pProcessEvent.data[j]); } pVSTEvents->events[pVSTEvents->numEvents++] = (int)(VstEvent *)pMidiEvent; break; } process_event_count++; //pProcessEvent = lpEvents[process_event_count]; } #if TEST org.kbinani.debug.push_log("calling Dispatch with effProcessEvents..."); #endif aEffect.Dispatch(AEffectXOpcodes.effProcessEvents, 0, 0, new IntPtr(pVSTEvents), 0); #if TEST org.kbinani.debug.push_log("...done"); #endif while (dwDelta > 0 && !state.isCancelRequested()) { int dwFrames = dwDelta > sampleRate ? sampleRate : dwDelta; #if TEST org.kbinani.debug.push_log("calling ProcessReplacing..."); #endif aEffect.ProcessReplacing(IntPtr.Zero, new IntPtr(out_buffer), dwFrames); #if TEST org.kbinani.debug.push_log("...done"); #endif int iOffset = dwDelay - dwDeltaDelay; if (iOffset > (int)dwFrames) { iOffset = (int)dwFrames; } if (iOffset == 0) { for (int i = 0; i < (int)dwFrames; i++) { buffer_l[i] = out_buffer[0][i]; buffer_r[i] = out_buffer[1][i]; } total_processed2 += dwFrames; runner.waveIncomingImpl(buffer_l, buffer_r, dwFrames, state); } else { dwDeltaDelay += iOffset; } dwDelta -= dwFrames; total_processed += dwFrames; } dwPrev = dwNow; dwNow = (int)current.clock; g_progress = total_processed / (double)total_samples * 100.0; } double msLast = msec_from_clock(dwNow); dwDelta = (int)(sampleRate * ((double)duration + (double)delay) / 1000.0 + dwDeltaDelay); if (total_samples - total_processed2 > dwDelta) { dwDelta = (int)total_samples - total_processed2; } while (dwDelta > 0 && !state.isCancelRequested()) { int dwFrames = dwDelta > sampleRate ? sampleRate : dwDelta; #if TEST org.kbinani.debug.push_log("calling ProcessReplacing..."); #endif aEffect.ProcessReplacing(IntPtr.Zero, new IntPtr(out_buffer), dwFrames); #if TEST org.kbinani.debug.push_log("...done"); #endif for (int i = 0; i < (int)dwFrames; i++) { buffer_l[i] = out_buffer[0][i]; buffer_r[i] = out_buffer[1][i]; } total_processed2 += dwFrames; runner.waveIncomingImpl(buffer_l, buffer_r, dwFrames, state); dwDelta -= dwFrames; total_processed += dwFrames; } #if TEST sout.println("vstidrv::StartRendering; total_processed=" + total_processed); #endif if (mode_infinite) { for (int i = 0; i < sampleRate; i++) { buffer_l[i] = 0.0; buffer_r[i] = 0.0; } while (!state.isCancelRequested()) { total_processed2 += sampleRate; runner.waveIncomingImpl(buffer_l, buffer_r, sampleRate, state); } } aEffect.Dispatch(AEffectOpcodes.effMainsChanged, 0, 0, IntPtr.Zero, 0); lpEvents.clear(); #if DEBUG sout.println("VocaloidDriver#startRendering; done; total_processed=" + total_processed + "; total_processed2=" + total_processed2); #endif } catch (Exception ex) { serr.println("VocaloidDriver#startRendering; ex=" + ex); } finally { if (mman != null) { try { mman.dispose(); } catch (Exception ex2) { serr.println("VocaloidDriver#startRendering; ex2=" + ex2); } } } rendering = false; g_saProcessed = 0; for (int i = 0; i < s_track_events.size(); i++) { s_track_events.get(i).clear(); } g_tempoList.clear(); //g_cancelRequired = false; } return(1); }
// Token: 0x0600095A RID: 2394 RVA: 0x0002250B File Offset: 0x0002090B public static MIDINotesMapper.MIDINote FromMidiEvent(MidiEvent evt) { return(MIDINotesMapper.FromNoteIndex(evt.noteIndex)); }
/// <inheritdoc /> public void AddControlChange(int track, int tick, byte channel, byte controller, byte value) { var message = new MidiEvent(tick, MakeCommand((byte)MidiEventType.Controller, channel), FixValue(controller), FixValue(value)); _midiFile.AddEvent(message); }
/// <summary> /// Initializes a new instance of the <see cref="TimedEvent"/> with the /// specified MIDI event. /// </summary> /// <param name="midiEvent">An event to wrap into <see cref="TimedEvent"/>.</param> public TimedEvent(MidiEvent midiEvent) { ThrowIfArgument.IsNull(nameof(midiEvent), midiEvent); Event = midiEvent; }
/// <summary> /// Adds the given midi event a the correct time position into the file. /// </summary> /// <param name="e"></param> public void AddEvent(MidiEvent e) => Events.Add(e);
/// <summary> /// Initializes a new instance of the <see cref="TimedEvent"/> with the /// specified MIDI event and absolute time. /// </summary> /// <param name="midiEvent">An event to wrap into <see cref="TimedEvent"/>.</param> /// <param name="time">Absolute time of an event in units defined by the time division of a MIDI file.</param> public TimedEvent(MidiEvent midiEvent, long time) : this(midiEvent) { Time = time; }
//*************************************************************** // PUBLIC UTILITIES //*************************************************************** //--------------------------------------------------------------- // returns list of drumset instrument names //--------------------------------------------------------------- /* ## TO DO * function getDrumset(){ * return array( * 35=>'Acoustic Bass Drum', * 36=>'Bass Drum 1', * 37=>'Side Stick', * 38=>'Acoustic Snare', * 39=>'Hand Clap', * 40=>'Electric Snare', * 41=>'Low Floor Tom', * 42=>'Closed Hi-Hat', * 43=>'High Floor Tom', * 44=>'Pedal Hi-Hat', * 45=>'Low Tom', * 46=>'Open Hi-Hat', * 47=>'Low Mid Tom', * 48=>'High Mid Tom', * 49=>'Crash Cymbal 1', * 50=>'High Tom', * 51=>'Ride Cymbal 1', * 52=>'Chinese Cymbal', * 53=>'Ride Bell', * 54=>'Tambourine', * 55=>'Splash Cymbal', * 56=>'Cowbell', * 57=>'Crash Cymbal 2', * 58=>'Vibraslap', * 59=>'Ride Cymbal 2', * 60=>'High Bongo', * 61=>'Low Bongo', * 62=>'Mute High Conga', * 63=>'Open High Conga', * 64=>'Low Conga', * 65=>'High Timbale', * 66=>'Low Timbale', * //35..66 * 67=>'High Agogo', * 68=>'Low Agogo', * 69=>'Cabase', * 70=>'Maracas', * 71=>'Short Whistle', * 72=>'Long Whistle', * 73=>'Short Guiro', * 74=>'Long Guiro', * 75=>'Claves', * 76=>'High Wood Block', * 77=>'Low Wood Block', * 78=>'Mute Cuica', * 79=>'Open Cuica', * 80=>'Mute Triangle', * 81=>'Open Triangle'); * } */ //--------------------------------------------------------------- // returns list of standard drum kit names //--------------------------------------------------------------- /* ## TO DO * function getDrumkitList(){ * return array( * 1 => 'Dry', * 9 => 'Room', * 19 => 'Power', * 25 => 'Electronic', * 33 => 'Jazz', * 41 => 'Brush', * 57 => 'SFX', * 128 => 'Default' * ); * } */ //--------------------------------------------------------------- // returns list of note names //--------------------------------------------------------------- /* * function getNoteList(){ * //note 69 (A6) = A440 * //note 60 (C6) = Middle C * return array( * //Do Re Mi Fa So La Ti * 'C0', 'Cs0', 'D0', 'Ds0', 'E0', 'F0', 'Fs0', 'G0', 'Gs0', 'A0', 'As0', 'B0', * 'C1', 'Cs1', 'D1', 'Ds1', 'E1', 'F1', 'Fs1', 'G1', 'Gs1', 'A1', 'As1', 'B1', * 'C2', 'Cs2', 'D2', 'Ds2', 'E2', 'F2', 'Fs2', 'G2', 'Gs2', 'A2', 'As2', 'B2', * 'C3', 'Cs3', 'D3', 'Ds3', 'E3', 'F3', 'Fs3', 'G3', 'Gs3', 'A3', 'As3', 'B3', * 'C4', 'Cs4', 'D4', 'Ds4', 'E4', 'F4', 'Fs4', 'G4', 'Gs4', 'A4', 'As4', 'B4', * 'C5', 'Cs5', 'D5', 'Ds5', 'E5', 'F5', 'Fs5', 'G5', 'Gs5', 'A5', 'As5', 'B5', * 'C6', 'Cs6', 'D6', 'Ds6', 'E6', 'F6', 'Fs6', 'G6', 'Gs6', 'A6', 'As6', 'B6', * 'C7', 'Cs7', 'D7', 'Ds7', 'E7', 'F7', 'Fs7', 'G7', 'Gs7', 'A7', 'As7', 'B7', * 'C8', 'Cs8', 'D8', 'Ds8', 'E8', 'F8', 'Fs8', 'G8', 'Gs8', 'A8', 'As8', 'B8', * 'C9', 'Cs9', 'D9', 'Ds9', 'E9', 'F9', 'Fs9', 'G9', 'Gs9', 'A9', 'As9', 'B9', * 'C10','Cs10','D10','Ds10','E10','F10','Fs10','G10'); * } */ #endregion private MidiTrack parseTrack(byte[] data, int trackNumber) { MidiTrack track = new MidiTrack(); int trackLen = data.Length; int position = 0; long currentTicks = 0; int currentDelta; byte eventType; int eventTypeHigh; int eventTypeLow; byte meta; int num; int len; byte tmp; byte c; string txt; MidiEvent evt; int currentTempo = 0; long lastTempoTicks = -1; while (position < trackLen) { // timedelta currentDelta = readVarLen(ref data, ref position); currentTicks += currentDelta; eventType = data[position]; eventTypeHigh = (eventType >> 4); eventTypeLow = (eventType - eventTypeHigh * 16); switch (eventTypeHigh) { case MidiEvents.EVT_PROGRAM_CHANGE: //PrCh = ProgramChange evt = new MidiEvent(currentTicks, eventTypeHigh, eventTypeLow + 1, data[position + 1]); evt.Description = " PrCh ch=" + evt.Channel + " p=" + evt.Param0; track.Add(evt); position += 2; break; case MidiEvents.EVT_NOTE_ON: //On evt = new MidiEvent(currentTicks, eventTypeHigh, eventTypeLow + 1, data[position + 1], data[position + 2]); evt.Description = " On ch=" + evt.Channel + " n=" + evt.Param0 + " v=" + evt.Param1; track.Add(evt); position += 3; break; case MidiEvents.EVT_NOTE_OFF: //Off evt = new MidiEvent(currentTicks, eventTypeHigh, eventTypeLow + 1, data[position + 1], data[position + 2]); evt.Description = " Off ch=" + evt.Channel + " n=" + evt.Param0 + " v=" + evt.Param1; track.Add(evt); position += 3; break; case MidiEvents.EVT_POLY_PRESSURE: //PoPr = PolyPressure evt = new MidiEvent(currentTicks, eventTypeHigh, eventTypeLow + 1, data[position + 1], data[position + 2]); evt.Description = " PoPr ch=" + evt.Channel + " n=" + evt.Param0 + " v=" + evt.Param1; track.Add(evt); position += 3; break; case MidiEvents.EVT_CONTROLLER_CHANGE: //Par = ControllerChange evt = new MidiEvent(currentTicks, eventTypeHigh, eventTypeLow + 1, data[position + 1], data[position + 2]); evt.Description = " Par ch=" + evt.Channel + " c=" + evt.Param0 + " v=" + evt.Param1; track.Add(evt); position += 3; break; case MidiEvents.EVT_CHANNEL_PRESSURE: //ChPr = ChannelPressure evt = new MidiEvent(currentTicks, eventTypeHigh, eventTypeLow + 1, data[position + 1]); evt.Description = " ChPr ch=" + evt.Channel + " v=" + evt.Param0; track.Add(evt); position += 2; break; case MidiEvents.EVT_PITCH_BEND: //Pb = PitchBend evt = new MidiEvent(currentTicks, eventTypeHigh, eventTypeLow + 1, (data[position + 1] & 0x7F) | ((data[position + 2] & 0x7F) << 7)); evt.Description = " Pb ch=" + evt.Channel + " v=" + evt.Param0; track.Add(evt); position += 3; break; default: switch (eventType) { case 0xFF: // Meta meta = data[position + 1]; switch (meta) { case MidiEvents.META_SEQUENCE_NUM: // sequence_number tmp = data[position + 2]; if (tmp == 0x00) { num = trackNumber; position += 3; } else { num = 1; position += 5; } evt = new MidiEvent(currentTicks, meta, -1, num); evt.isMetaEvent = true; evt.Description = " Seqnr " + evt.Param0; track.Add(evt); break; case MidiEvents.META_TEXT: // Meta Text case MidiEvents.META_COPYRIGHT: // Meta Copyright case MidiEvents.META_TRACK_NAME: // Meta TrackName ???sequence_name??? case MidiEvents.META_INSTRUMENT_NAME: // Meta InstrumentName case MidiEvents.META_LYRICS: // Meta Lyrics case MidiEvents.META_MARKER: // Meta Marker case MidiEvents.META_CUE: // Meta Cue string[] texttypes = new string[7] { "Text", "Copyright", "TrkName", "InstrName", "Lyric", "Marker", "Cue" }; string textType = texttypes[meta - 1]; position += 2; len = readVarLen(ref data, ref position); if ((len + position) > trackLen) { throw new InvalidDataException("Meta " + textType + " has corrupt variable length field (" + len + ") [track: " + trackNumber + " dt: " + currentDelta + "]"); } txt = Encoding.ASCII.GetString(data, position, len); if (MidiEvents.META_TEXT == meta || MidiEvents.META_TRACK_NAME == meta || MidiEvents.META_MARKER == meta) { comment.Append(txt).Append(Settings.InternalValueSeparator); } else if (MidiEvents.META_COPYRIGHT == meta) { tagData.IntegrateValue(TagData.TAG_FIELD_COPYRIGHT, txt); } evt = new MidiEvent(currentTicks, meta, -1, meta - 1); evt.isMetaEvent = true; evt.Description = " Meta " + textType + " \"" + txt + "\""; track.Add(evt); position += len; break; case MidiEvents.META_CHANNEL_PREFIX: // ChannelPrefix evt = new MidiEvent(currentTicks, meta, -1, data[position + 3]); evt.isMetaEvent = true; evt.Description = " Meta ChannelPrefix " + evt.Param0; track.Add(evt); position += 4; break; case MidiEvents.META_CHANNEL_PREFIX_PORT: // ChannelPrefixOrPort evt = new MidiEvent(currentTicks, meta, -1, data[position + 3]); evt.isMetaEvent = true; evt.Description = " Meta ChannelPrefixOrPort " + evt.Param0; track.Add(evt); position += 4; break; case MidiEvents.META_TRACK_END: // Meta TrkEnd evt = new MidiEvent(currentTicks, meta, -1, -1); evt.isMetaEvent = true; evt.Description = " Meta TrkEnd"; track.Add(evt); track.Ticks = currentTicks; if (lastTempoTicks > -1) // there has been at least one tempo change in the track { track.Duration += (currentTicks - lastTempoTicks) * currentTempo; } else { track.Duration = currentTicks * this.tempo; } return(track); //ignore rest case MidiEvents.META_TEMPO: // Tempo // Adds (ticks since last tempo event)*current tempo to track duration if (lastTempoTicks > -1) { track.Duration += (currentTicks - lastTempoTicks) * currentTempo; } lastTempoTicks = currentTicks; currentTempo = data[position + 3] * 0x010000 + data[position + 4] * 0x0100 + data[position + 5]; if (0 == currentTempo) { currentTempo = DEFAULT_TEMPO; } evt = new MidiEvent(currentTicks, meta, -1, currentTempo); evt.isMetaEvent = true; evt.Description = " Meta Tempo " + evt.Param0 + " (duration :" + track.Duration + ")"; track.Add(evt); // Sets song tempo as last tempo event of 1st track // according to some MIDI files convention if (0 == trackNumber /* && 0 == this.tempo*/) { this.tempo = currentTempo; this.tempoMsgNum = track.events.Count - 1; } position += 6; break; case MidiEvents.META_SMPTE_OFFSET: // SMPTE offset byte h = data[position + 3]; byte m = data[position + 4]; byte s = data[position + 5]; byte f = data[position + 6]; byte fh = data[position + 7]; // TODO : store the arguments in a solid structure within MidiEvent evt = new MidiEvent(currentTicks, meta, -1, -1); evt.isMetaEvent = true; evt.Description = " Meta SMPTE " + h + " " + m + " " + s + " " + f + " " + fh; track.Add(evt); position += 8; break; case MidiEvents.META_TIME_SIGNATURE: // TimeSig byte z = data[position + 3]; int t = 2 ^ data[position + 4]; byte mc = data[position + 5]; c = data[position + 6]; // TODO : store the arguments in a solid structure within MidiEvent evt = new MidiEvent(currentTicks, meta, -1, -1); evt.isMetaEvent = true; evt.Description = " Meta TimeSig " + z + "/" + t + " " + mc + " " + c; track.Add(evt); position += 7; break; case MidiEvents.META_KEY_SIGNATURE: // KeySig evt = new MidiEvent(currentTicks, meta, -1, data[position + 3], data[position + 4]); evt.isMetaEvent = true; evt.Description = " Meta KeySig vz=" + evt.Param0 + " " + (evt.Param1 == 0 ? "major" : "minor"); track.Add(evt); position += 5; break; case MidiEvents.META_SEQUENCER_DATA: // Sequencer specific data position += 2; len = readVarLen(ref data, ref position); if ((len + position) > trackLen) { throw new InvalidDataException("SeqSpec has corrupt variable length field (" + len + ") [track: " + trackNumber + " dt: " + currentDelta + "]"); } position -= 3; { //String str = Encoding.ASCII.GetString(data, position + 3, len); //data.=' '.sprintf("%02x",(byte)($data[$p+3+$i])); evt = new MidiEvent(currentTicks, meta, -1, currentTempo); evt.isMetaEvent = true; evt.Description = " Meta SeqSpec"; track.Add(evt); } position += len + 3; break; default: // "unknown" Meta-Events byte metacode = data[position + 1]; position += 2; len = readVarLen(ref data, ref position); if ((len + position) > trackLen) { throw new InvalidDataException("Meta " + metacode + " has corrupt variable length field (" + len + ") [track: " + trackNumber + " dt: " + currentDelta + "]"); } position -= 3; { String str = Encoding.ASCII.GetString(data, position + 3, len); //sprintf("%02x",(byte)($data[$p+3+$i])); evt = new MidiEvent(currentTicks, meta, -1, currentTempo); evt.isMetaEvent = true; evt.Description = " Meta 0x" + metacode + " " + str; track.Add(evt); } position += len + 3; break; } // switch meta break; // End Meta case MidiEvents.EVT_SYSEX: // SysEx position += 1; len = readVarLen(ref data, ref position); if ((len + position) > trackLen) { throw new InvalidDataException("SysEx has corrupt variable length field (" + len + ") [track: " + trackNumber + " dt: " + currentDelta + " p: " + position + "]"); } { //String str = "f0" + Encoding.ASCII.GetString(data, position + 2, len); //str+=' '.sprintf("%02x",(byte)(data[p+2+i])); evt = new MidiEvent(currentTicks, eventTypeHigh, -1, currentTempo); evt.isMetaEvent = true; evt.Description = " SysEx"; track.Add(evt); } position += len; break; default: // Repetition of last event? if ((track.LastEvent.Type == MidiEvents.EVT_NOTE_ON) || (track.LastEvent.Type == MidiEvents.EVT_NOTE_OFF)) { evt = new MidiEvent(currentTicks, track.LastEvent.Type, track.LastEvent.Channel, data[position], data[position + 1]); evt.Description = " " + (track.LastEvent.Type == MidiEvents.EVT_NOTE_ON ? "On" : "Off") + " ch=" + evt.Channel + " n=" + evt.Param0 + " v=" + evt.Param1; track.Add(evt); position += 2; } else if (track.LastEvent.Type == MidiEvents.EVT_PROGRAM_CHANGE) { evt = new MidiEvent(currentTicks, track.LastEvent.Type, track.LastEvent.Channel, data[position]); evt.Description = " PrCh ch=" + evt.Channel + " p=" + evt.Param0; track.Add(evt); position += 1; } else if (track.LastEvent.Type == MidiEvents.EVT_POLY_PRESSURE) { evt = new MidiEvent(currentTicks, track.LastEvent.Type, track.LastEvent.Channel, data[position + 1], data[position + 2]); evt.Description = " PoPr ch=" + evt.Channel + " n=" + evt.Param0 + " v=" + evt.Param1; track.Add(evt); position += 2; } else if (track.LastEvent.Type == MidiEvents.EVT_CHANNEL_PRESSURE) { evt = new MidiEvent(currentTicks, track.LastEvent.Type, track.LastEvent.Channel, data[position]); evt.Description = " ChPr ch=" + evt.Channel + " v=" + evt.Param0; track.Add(evt); position += 1; } else if (track.LastEvent.Type == MidiEvents.EVT_CONTROLLER_CHANGE) { evt = new MidiEvent(currentTicks, track.LastEvent.Type, track.LastEvent.Channel, data[position], data[position + 1]); evt.Description = " Par ch=" + evt.Channel + " c=" + evt.Param0 + " v=" + evt.Param1; track.Add(evt); position += 2; } else if (track.LastEvent.Type == MidiEvents.EVT_PITCH_BEND) { evt = new MidiEvent(currentTicks, track.LastEvent.Type, track.LastEvent.Channel, (data[position] & 0x7F) | ((data[position + 1] & 0x7F) << 7)); evt.Description = " Pb ch=" + evt.Channel + " v=" + evt.Param0; track.Add(evt); position += 2; } //default: // MM: ToDo: Repetition of SysEx and META-events? with <last>?? \n"; // _err("unknown repetition: $last"); break; } // eventType break; } // $high } // while p < trackLen track.Ticks = currentTicks; if (lastTempoTicks > -1) { track.Duration += (currentTicks - lastTempoTicks) * currentTempo; } else { track.Duration = currentTicks * this.tempo; } return(track); }
/// <summary> /// /// </summary> /// <param name="vsq"></param> /// <param name="track"></param> /// <param name="clock_start"></param> /// <param name="clock_end"></param> /// <returns></returns> private TreeMap<Integer, MidiEventQueue> generateMidiEvent( VsqFileEx vsq, int track, int clock_start, int clock_end ) { TreeMap<Integer, MidiEventQueue> list = new TreeMap<Integer, MidiEventQueue>(); VsqTrack t = vsq.Track.get( track ); // 歌手変更 for ( Iterator<VsqEvent> itr = t.getSingerEventIterator(); itr.hasNext(); ) { VsqEvent item = itr.next(); if ( clock_start <= item.Clock && item.Clock <= clock_end ) { if ( item.ID.IconHandle == null ) { continue; } int program = item.ID.IconHandle.Program; if ( 0 > program || program >= AquesToneDriver.SINGERS.Length ) { program = 0; } ParameterEvent singer = new ParameterEvent(); singer.index = mDriver.phontParameterIndex; singer.value = program + 0.01f; if ( !list.containsKey( item.Clock ) ) { list.put( item.Clock, new MidiEventQueue() ); } MidiEventQueue queue = list.get( item.Clock ); if ( queue.param == null ) { queue.param = new Vector<ParameterEvent>(); } queue.param.add( singer ); } else if ( clock_end < item.Clock ) { break; } } // ノートon, off Vector<Point> pit_send = new Vector<Point>(); // PITが追加されたゲートタイム。音符先頭の分を重複して送信するのを回避するために必要。 VsqBPList pit = t.getCurve( "pit" ); VsqBPList pbs = t.getCurve( "pbs" ); VsqBPList dyn = t.getCurve( "dyn" ); VsqBPList bre = t.getCurve( "bre" ); VsqBPList cle = t.getCurve( "cle" ); VsqBPList por = t.getCurve( "por" ); for ( Iterator<VsqEvent> itr = t.getNoteEventIterator(); itr.hasNext(); ) { VsqEvent item = itr.next(); int endclock = item.Clock + item.ID.getLength(); boolean contains_start = clock_start <= item.Clock && item.Clock <= clock_end; boolean contains_end = clock_start <= endclock && endclock <= clock_end; if ( contains_start || contains_end ) { if ( contains_start ) { #region contains_start // noteonのゲートタイムが,範囲に入っている // noteon MIDIイベントを作成 String lyric = item.ID.LyricHandle.L0.Phrase; String katakana = KanaDeRomanization.hiragana2katakana( KanaDeRomanization.Attach( lyric ) ); int index = -1; for ( int i = 0; i < AquesToneDriver.PHONES.Length; i++ ) { if ( katakana.Equals( AquesToneDriver.PHONES[i] ) ) { index = i; break; } } if ( index >= 0 ) { if ( !list.containsKey( item.Clock ) ) { list.put( item.Clock, new MidiEventQueue() ); } MidiEventQueue queue = list.get( item.Clock ); if ( queue.noteon == null ) { queue.noteon = new Vector<MidiEvent>(); } // index行目に移動するコマンドを贈る MidiEvent moveline = new MidiEvent(); moveline.firstByte = 0xb0; moveline.data = new[] { 0x0a, index }; MidiEvent noteon = new MidiEvent(); noteon.firstByte = 0x90; noteon.data = new int[] { item.ID.Note, item.ID.Dynamics }; Vector<MidiEvent> add = Arrays.asList( new MidiEvent[] { moveline, noteon } ); queue.noteon.addAll( add ); pit_send.add( new Point( item.Clock, item.Clock ) ); } /* 音符頭で設定するパラメータ */ // Release MidiEventQueue q = null; if ( !list.containsKey( item.Clock ) ) { q = new MidiEventQueue(); } else { q = list.get( item.Clock ); } if ( q.param == null ) { q.param = new Vector<ParameterEvent>(); } String strRelease = VsqFileEx.getEventTag( item, VsqFileEx.TAG_VSQEVENT_AQUESTONE_RELEASE ); int release = 64; try { release = str.toi( strRelease ); } catch ( Exception ex ) { Logger.write( typeof( AquesToneWaveGenerator ) + ".generateMidiEvent; ex=" + ex + "\n" ); release = 64; } ParameterEvent pe = new ParameterEvent(); pe.index = mDriver.releaseParameterIndex; pe.value = release / 127.0f; q.param.add( pe ); // dyn int dynAtStart = dyn.getValue( item.Clock ); ParameterEvent peDyn = new ParameterEvent(); peDyn.index = mDriver.volumeParameterIndex; peDyn.value = (float)(dynAtStart - dyn.getMinimum()) / (float)(dyn.getMaximum() - dyn.getMinimum()); q.param.add( peDyn ); // bre int breAtStart = bre.getValue( item.Clock ); ParameterEvent peBre = new ParameterEvent(); peBre.index = mDriver.haskyParameterIndex; peBre.value = (float)(breAtStart - bre.getMinimum()) / (float)(bre.getMaximum() - bre.getMinimum()); q.param.add( peBre ); // cle int cleAtStart = cle.getValue( item.Clock ); ParameterEvent peCle = new ParameterEvent(); peCle.index = mDriver.resonancParameterIndex; peCle.value = (float)(cleAtStart - cle.getMinimum()) / (float)(cle.getMaximum() - cle.getMinimum()); q.param.add( peCle ); // por int porAtStart = por.getValue( item.Clock ); ParameterEvent pePor = new ParameterEvent(); pePor.index = mDriver.portaTimeParameterIndex; pePor.value = (float)(porAtStart - por.getMinimum()) / (float)(por.getMaximum() - por.getMinimum()); q.param.add( pePor ); #endregion } // ビブラート // ビブラートが存在する場合、PBSは勝手に変更する。 if ( item.ID.VibratoHandle == null ) { if ( contains_start ) { // 音符頭のPIT, PBSを強制的に指定 int notehead_pit = pit.getValue( item.Clock ); MidiEvent pit0 = getPitMidiEvent( notehead_pit ); if ( !list.containsKey( item.Clock ) ) { list.put( item.Clock, new MidiEventQueue() ); } MidiEventQueue queue = list.get( item.Clock ); if ( queue.pit == null ) { queue.pit = new Vector<MidiEvent>(); } else { queue.pit.clear(); } queue.pit.add( pit0 ); int notehead_pbs = pbs.getValue( item.Clock ); ParameterEvent pe = new ParameterEvent(); pe.index = mDriver.bendLblParameterIndex; pe.value = notehead_pbs / 13.0f; if ( queue.param == null ) { queue.param = new Vector<ParameterEvent>(); } queue.param.add( pe ); } } else { int delta_clock = 5; //ピッチを取得するクロック間隔 int tempo = 120; double sec_start_act = vsq.getSecFromClock( item.Clock ); double sec_end_act = vsq.getSecFromClock( item.Clock + item.ID.getLength() ); double delta_sec = delta_clock / (8.0 * tempo); //ピッチを取得する時間間隔 float pitmax = 0.0f; int st = item.Clock; if ( st < clock_start ) { st = clock_start; } int end = item.Clock + item.ID.getLength(); if ( clock_end < end ) { end = clock_end; } pit_send.add( new Point( st, end ) ); // ビブラートが始まるまでのピッチを取得 double sec_vibstart = vsq.getSecFromClock( item.Clock + item.ID.VibratoDelay ); int pit_count = (int)((sec_vibstart - sec_start_act) / delta_sec); TreeMap<Integer, Float> pit_change = new TreeMap<Integer, Float>(); for ( int i = 0; i < pit_count; i++ ) { double gtime = sec_start_act + delta_sec * i; int clock = (int)vsq.getClockFromSec( gtime ); float pvalue = (float)t.getPitchAt( clock ); pitmax = Math.Max( pitmax, Math.Abs( pvalue ) ); pit_change.put( clock, pvalue ); } // ビブラート部分のピッチを取得 Vector<PointD> ret = new Vector<PointD>(); Iterator<PointD> itr2 = new VibratoPointIteratorBySec( vsq, item.ID.VibratoHandle.getRateBP(), item.ID.VibratoHandle.getStartRate(), item.ID.VibratoHandle.getDepthBP(), item.ID.VibratoHandle.getStartDepth(), item.Clock + item.ID.VibratoDelay, item.ID.getLength() - item.ID.VibratoDelay, (float)delta_sec ); for ( ; itr2.hasNext(); ) { PointD p = itr2.next(); float gtime = (float)p.getX(); int clock = (int)vsq.getClockFromSec( gtime ); float pvalue = (float)(t.getPitchAt( clock ) + p.getY() * 100.0); pitmax = Math.Max( pitmax, Math.Abs( pvalue ) ); pit_change.put( clock, pvalue ); } // ピッチベンドの最大値を実現するのに必要なPBS int required_pbs = (int)Math.Ceiling( pitmax / 100.0 ); #if DEBUG sout.println( "AquesToneRenderingRunner#generateMidiEvent; required_pbs=" + required_pbs ); #endif if ( required_pbs > 13 ) { required_pbs = 13; } if ( !list.containsKey( item.Clock ) ) { list.put( item.Clock, new MidiEventQueue() ); } MidiEventQueue queue = list.get( item.Clock ); ParameterEvent pe = new ParameterEvent(); pe.index = mDriver.bendLblParameterIndex; pe.value = required_pbs / 13.0f; if ( queue.param == null ) { queue.param = new Vector<ParameterEvent>(); } queue.param.add( pe ); // PITを順次追加 for ( Iterator<Integer> itr3 = pit_change.keySet().iterator(); itr3.hasNext(); ) { Integer clock = itr3.next(); if ( clock_start <= clock && clock <= clock_end ) { float pvalue = pit_change.get( clock ); int pit_value = (int)(8192.0 / (double)required_pbs * pvalue / 100.0); if ( !list.containsKey( clock ) ) { list.put( clock, new MidiEventQueue() ); } MidiEventQueue q = list.get( clock ); MidiEvent me = getPitMidiEvent( pit_value ); if ( q.pit == null ) { q.pit = new Vector<MidiEvent>(); } else { q.pit.clear(); } q.pit.add( me ); } else if ( clock_end < clock ) { break; } } } //pit_send.add( pit_send_p ); // noteoff MIDIイベントを作成 if ( contains_end ) { MidiEvent noteoff = new MidiEvent(); noteoff.firstByte = 0x80; noteoff.data = new int[] { item.ID.Note, 0x40 }; // ここのvel Vector<MidiEvent> a_noteoff = Arrays.asList( new MidiEvent[] { noteoff } ); if ( !list.containsKey( endclock ) ) { list.put( endclock, new MidiEventQueue() ); } MidiEventQueue q = list.get( endclock ); if ( q.noteoff == null ) { q.noteoff = new Vector<MidiEvent>(); } q.noteoff.addAll( a_noteoff ); pit_send.add( new Point( endclock, endclock ) ); // PITの送信を抑制するために必要 } } if ( clock_end < item.Clock ) { break; } } // pitch bend sensitivity // RPNで送信するのが上手くいかないので、parameterを直接いぢる if ( pbs != null ) { int keycount = pbs.size(); for ( int i = 0; i < keycount; i++ ) { int clock = pbs.getKeyClock( i ); if ( clock_start <= clock && clock <= clock_end ) { int value = pbs.getElementA( i ); ParameterEvent pbse = new ParameterEvent(); pbse.index = mDriver.bendLblParameterIndex; pbse.value = value / 13.0f; MidiEventQueue queue = null; if ( list.containsKey( clock ) ) { queue = list.get( clock ); } else { queue = new MidiEventQueue(); } if ( queue.param == null ) { queue.param = new Vector<ParameterEvent>(); } queue.param.add( pbse ); list.put( clock, queue ); } else if ( clock_end < clock ) { break; } } } // pitch bend if ( pit != null ) { int keycount = pit.size(); for ( int i = 0; i < keycount; i++ ) { int clock = pit.getKeyClock( i ); if ( clock_start <= clock && clock <= clock_end ) { boolean contains = false; for ( Iterator<Point> itr = pit_send.iterator(); itr.hasNext(); ) { Point p = itr.next(); if ( p.x <= clock && clock <= p.y ) { contains = true; break; } } if ( contains ) { continue; } int value = pit.getElementA( i ); MidiEvent pbs0 = getPitMidiEvent( value ); MidiEventQueue queue = null; if ( list.containsKey( clock ) ) { queue = list.get( clock ); } else { queue = new MidiEventQueue(); } if ( queue.pit == null ) { queue.pit = new Vector<MidiEvent>(); } else { queue.pit.clear(); } queue.pit.add( pbs0 ); list.put( clock, queue ); } else if ( clock_end < clock ) { break; } } } appendParameterEvents( list, dyn, mDriver.volumeParameterIndex, clock_start, clock_end ); appendParameterEvents( list, bre, mDriver.haskyParameterIndex, clock_start, clock_end ); appendParameterEvents( list, cle, mDriver.resonancParameterIndex, clock_start, clock_end ); appendParameterEvents( list, por, mDriver.portaTimeParameterIndex, clock_start, clock_end ); return list; }
public abstract void Write(MidiEvent e);
private static MidiEvent getPitMidiEvent( int pitch_bend ) { int value = (0x3fff & (pitch_bend + 0x2000)); int msb = 0xff & (value >> 7); int lsb = 0xff & (value - (msb << 7)); MidiEvent pbs0 = new MidiEvent(); pbs0.firstByte = 0xE0; pbs0.data = new int[] { lsb, msb }; return pbs0; }
internal MidiEventReceivedEventArgs(MidiEvent midiEvent, DateTime time) { Event = midiEvent; Time = time; }
/// <summary>Parses a byte array into a track's worth of events.</summary> /// <param name="data">The data to be parsed.</param> /// <returns>The track containing the parsed events.</returns> public static MidiTrack ParseToTrack(byte [] data) { long pos = 0; // current position in data bool running = false; // whether we're in running status int status = 0; // the current status byte bool sysExContinue = false; // whether we're in a multi-segment system exclusive message byte [] sysExData = null; // system exclusive data up to this point from a multi-segment message try { // Create the new track MidiTrack track = new MidiTrack(); // Process all bytes, turning them into events while (pos < data.Length) { // Read in the delta time long deltaTime = ReadVariableLength(data, ref pos); // Get the next character byte nextValue = data[pos]; // Are we continuing a sys ex? If so, the next value better be 0x7F if (sysExContinue && (nextValue != 0x7f)) { throw new MidiParserException("Expected to find a system exclusive continue byte.", pos); } // Are we in running status? Determine whether we're running and // what the current status byte is. if ((nextValue & 0x80) == 0) { // We're now in running status... if the last status was 0, uh oh! if (status == 0) { throw new MidiParserException("Status byte required for running status.", pos); } // Keep the last iteration's status byte, and now we're in running mode running = true; } else { // Not running, so store the current status byte and mark running as false status = nextValue; running = false; } // Grab the 4-bit identifier byte messageType = (byte)((status >> 4) & 0xF); MidiEvent tempEvent = null; // Handle voice events if (messageType >= 0x8 && messageType <= 0xE) { if (!running) { pos++; // if we're running, we don't advance; if we're not running, we do } byte channel = (byte)(status & 0xF); // grab the channel from the status byte tempEvent = ParseVoiceEvent(deltaTime, messageType, channel, data, ref pos); } // Handle meta events else if (status == 0xFF) { pos++; byte eventType = data[pos]; pos++; tempEvent = ParseMetaEvent(deltaTime, eventType, data, ref pos); } // Handle system exclusive events else if (status == 0xF0) { pos++; long length = ReadVariableLength(data, ref pos); // figure out how much data to read // If this is single-segment message, process the whole thing if (data[pos + length - 1] == 0xF7) { sysExData = new byte[length - 1]; Array.Copy(data, (int)pos, sysExData, 0, (int)length - 1); tempEvent = new SystemExclusiveMidiEvent(deltaTime, sysExData); } // It's multi-segment, so add the new data to the previously aquired data else { // Add to previously aquired sys ex data int oldLength = (sysExData == null ? 0 : sysExData.Length); byte [] newSysExData = new byte[oldLength + length]; if (sysExData != null) { sysExData.CopyTo(newSysExData, 0); } Array.Copy(data, (int)pos, newSysExData, oldLength, (int)length); sysExData = newSysExData; sysExContinue = true; } pos += length; } // Handle system exclusive continuations else if (status == 0xF7) { if (!sysExContinue) { sysExData = null; } // Figure out how much data there is pos++; long length = ReadVariableLength(data, ref pos); // Add to previously aquired sys ex data int oldLength = (sysExData == null ? 0 : sysExData.Length); byte [] newSysExData = new byte[oldLength + length]; if (sysExData != null) { sysExData.CopyTo(newSysExData, 0); } Array.Copy(data, (int)pos, newSysExData, oldLength, (int)length); sysExData = newSysExData; // Make it a system message if necessary (i.e. if we find an end marker) if (data[pos + length - 1] == 0xF7) { tempEvent = new SystemExclusiveMidiEvent(deltaTime, sysExData); sysExData = null; sysExContinue = false; } } // Nothing we know about else { throw new MidiParserException("Invalid status byte found.", pos); } // Add the newly parsed event if we got one if (tempEvent != null) { track.Events.Add(tempEvent); } } // Return the newly populated track return(track); } // Let MidiParserExceptions through catch (MidiParserException) { throw; } // Wrap all other exceptions in MidiParserExceptions catch (Exception exc) { throw new MidiParserException("Failed to parse MIDI file.", exc, pos); } }
// ------------------------------------------------------------------------------------------------------ private static string _midiEventToLog(MidiEvent ev) { if (ev.MidiEventType == EMidiEventType.Short) { return ev.Hex + "| " + ev.Status.ToString("X2").ToUpper() + " | " + (ev.Status & 0xF0).ToString("X2").ToUpper() + " | " + ((ev.Status & 0x0F) + 1).ToString("X2").ToUpper() + " | " + ev.AllData[1].ToString("X2").ToUpper() + " | " + ev.AllData[2].ToString("X2").ToUpper() + " | " + _getShortMessageDescription(ev); } return ev.Hex + "| SYSEX"; }
/// <summary>Parse a meta MIDI event from the data stream.</summary> /// <param name="deltaTime">The previously parsed delta-time for this event.</param> /// <param name="eventType">The previously parsed type of message we're expecting to find.</param> /// <param name="data">The data stream from which to read the event information.</param> /// <param name="pos">The position of the start of the event information.</param> /// <returns>The parsed meta MIDI event.</returns> private static MidiEvent ParseMetaEvent(long deltaTime, byte eventType, byte [] data, ref long pos) { try { MidiEvent tempEvent = null; // Create the correct meta event based on its meta event id/type switch (eventType) { // Sequence number case 0x00: pos++; // skip past the 0x02 int number = ((data[pos] << 8) | data[pos + 1]); tempEvent = new SequenceNumber(deltaTime, number); pos += 2; // skip read values break; // Text events (copyright, lyrics, etc) case 0x01: tempEvent = new Text(deltaTime, ReadASCIIText(data, ref pos)); break; case 0x02: tempEvent = new Copyright(deltaTime, ReadASCIIText(data, ref pos)); break; case 0x03: tempEvent = new SequenceTrackName(deltaTime, ReadASCIIText(data, ref pos)); break; case 0x04: tempEvent = new Instrument(deltaTime, ReadASCIIText(data, ref pos)); break; case 0x05: tempEvent = new Lyric(deltaTime, ReadASCIIText(data, ref pos)); break; case 0x06: tempEvent = new Marker(deltaTime, ReadASCIIText(data, ref pos)); break; case 0x07: tempEvent = new CuePoint(deltaTime, ReadASCIIText(data, ref pos)); break; case 0x08: tempEvent = new ProgramName(deltaTime, ReadASCIIText(data, ref pos)); break; case 0x09: tempEvent = new DeviceName(deltaTime, ReadASCIIText(data, ref pos)); break; // Channel prefix case 0x20: pos++; // skip 0x1 tempEvent = new ChannelPrefix(deltaTime, data[pos]); pos++; // skip read value break; // Port number case 0x21: pos++; // skip 0x1 tempEvent = new MidiPort(deltaTime, data[pos]); pos++; // skip read value break; // End of track case 0x2F: pos++; // skip 0x0 tempEvent = new EndOfTrack(deltaTime); break; // Tempo case 0x51: pos++; // skip 0x3 int tempo = ((data[pos] << 16) | data[pos + 1] << 8 | data[pos + 2]); tempEvent = new Tempo(deltaTime, tempo); pos += 3; break; // SMPTE offset case 0x54: pos++; // skip 0x5 tempEvent = new SMPTEOffset(deltaTime, data[pos], data[pos + 1], data[pos + 2], data[pos + 3], data[pos + 4]); pos += 5; break; // Time signature case 0x58: pos++; // skip past 0x4 tempEvent = new TimeSignature(deltaTime, data[pos], data[pos + 1], data[pos + 2], data[pos + 3]); pos += 4; break; // Key signature case 0x59: pos++; // skip past 0x2 tempEvent = new KeySignature(deltaTime, (Key)data[pos], (Tonality)data[pos + 1]); pos += 2; break; // Proprietary case 0x7F: // Read in the variable length and that much data, then store it long length = ReadVariableLength(data, ref pos); byte [] propData = new byte[length]; Array.Copy(data, (int)pos, propData, 0, (int)length); tempEvent = new Proprietary(deltaTime, propData); pos += length; break; // An unknown meta event! default: // Read in the variable length and that much data, then store it length = ReadVariableLength(data, ref pos); byte [] unknownData = new byte[length]; Array.Copy(data, (int)pos, unknownData, 0, (int)length); tempEvent = new UnknownMetaMidiEvent(deltaTime, eventType, unknownData); pos += length; break; } return(tempEvent); } // Something bad happened; wrap it in a parser exception catch (Exception exc) { throw new MidiParserException("Unable to parse meta MIDI event.", exc, pos); } }
/// <summary> /// Writes the next MIDI event to the stream. /// </summary> /// <param name="e"> /// The next MIDI event to write. /// </param> public void WriteNextEvent(MidiEvent e) { WriteVariableLengthQuantity(e.Ticks); e.Message.Accept(this); }
/// <summary>Parse a voice event from the data stream.</summary> /// <param name="deltaTime">The previously parsed delta-time for this event.</param> /// <param name="messageType">The previously parsed type of message we're expecting to find.</param> /// <param name="channel">The previously parsed channel for this message.</param> /// <param name="data">The data stream from which to read the event information.</param> /// <param name="pos">The position of the start of the event information.</param> /// <returns>The parsed voice MIDI event.</returns> private static MidiEvent ParseVoiceEvent(long deltaTime, byte messageType, byte channel, byte [] data, ref long pos) { try { MidiEvent tempEvent = null; // Create the correct voice event based on its message id/type switch (messageType) { // NOTE OFF case 0x8: tempEvent = new NoteOff(deltaTime, channel, data[pos], data[pos + 1]); pos += 2; break; // NOTE ON case 0x9: tempEvent = new NoteOn(deltaTime, channel, data[pos], data[pos + 1]); pos += 2; break; // AFTERTOUCH case 0xA: tempEvent = new Aftertouch(deltaTime, channel, data[pos], data[pos + 1]); pos += 2; break; // CONTROLLER case 0xB: tempEvent = new Controller(deltaTime, channel, data[pos], data[pos + 1]); pos += 2; break; // PROGRAM CHANGE case 0xC: tempEvent = new ProgramChange(deltaTime, channel, data[pos]); pos += 1; break; // CHANNEL PRESSURE case 0xD: tempEvent = new ChannelPressure(deltaTime, channel, data[pos]); pos += 1; break; // PITCH WHEEL case 0xE: int position = ((data[pos] << 8) | data[pos + 1]); byte upper, lower; MidiEvent.Split14BitsToBytes(position, out upper, out lower); tempEvent = new PitchWheel(deltaTime, channel, upper, lower); pos += 2; break; // UH OH! default: throw new ArgumentOutOfRangeException("messageType", messageType, "Not a voice message."); } // Return the newly parsed event return(tempEvent); } // Something bad happened; wrap it in a parser exception catch (Exception exc) { throw new MidiParserException("Unable to parse voice MIDI event.", exc, pos); } }
private void tp_TrackEvent(object sender, MidiEvent anEvent) { anEvent.Message.Accept((TrackPlayer)sender); }
/// <summary> /// ピッチとピッチベンドセンシティビティをイベントキューに追加する /// </summary> /// <param name="track"></param> /// <param name="sequence"></param> private void appendPitchEvent( VsqTrack track, EventQueueSequence sequence ) { // 実際に AquesTone2 に送信する pbs の値と、pbs カーブに入っている値とのマップ const int maxPitchBendSensitivity = 23; int[] map = new int[maxPitchBendSensitivity + 1] { 0, 5, 15, 35, 44, 54, 64, 74, 84, 93, 103, 113, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, }; var pbs = track.MetaText.PBS; for ( int i = 0; i < pbs.size(); ++i ) { var clock = pbs.getKeyClock( i ); { // RPN MSB = 0x00 var e = new MidiEvent(); e.firstByte = 0xB0; e.data = new int[] { 0x65, 0x00 }; e.clock = clock; sequence.get( clock ).pit.add( e ); } { // RPN LSB = 0x00 var e = new MidiEvent(); e.firstByte = 0xB0; e.data = new int[] { 0x64, 0x00 }; e.clock = clock; sequence.get( clock ).pit.add( e ); } { // RPN data MSB var e = new MidiEvent(); e.firstByte = 0xB0; int value = Math.Max( 0, Math.Min( maxPitchBendSensitivity, pbs.getElementA( i ) ) ); e.data = new int[] { 0x06, map[value] }; e.clock = clock; sequence.get( clock ).pit.add( e ); } } var pit = track.MetaText.PIT; for ( int i = 0; i < pit.size(); ++i ) { var clock = pit.getKeyClock( i ); var e = new MidiEvent(); e.firstByte = 0xE0; var value = pit.getElementA( i ) + 8192; var msb = 0x7F & value; var lsb = 0x7F & (value >> 7); e.data = new int[] { msb, lsb }; sequence.get( clock ).pit.add( e ); } }