unsafe void PlayerThread(object o) { var lastNoteWasRelease = false; var lastReleaseTime = DateTime.Now; var activeChannel = -1; var waitEvents = new WaitHandle[] { stopEvent, frameEvent }; NesApu.InitAndReset(apuIndex, sampleRate, palMode, expansionAudio, numExpansionChannels, dmcCallback); for (int i = 0; i < channelStates.Length; i++) { NesApu.EnableChannel(apuIndex, i, 0); } while (true) { int idx = WaitHandle.WaitAny(waitEvents); if (idx == 0) { break; } if (!noteQueue.IsEmpty) { PlayerNote lastNote = new PlayerNote(); while (noteQueue.TryDequeue(out PlayerNote note)) { lastNote = note; } activeChannel = lastNote.channel; if (activeChannel >= 0) { channelStates[activeChannel].PlayNote(lastNote.note); if (lastNote.note.IsRelease) { lastNoteWasRelease = true; lastReleaseTime = DateTime.Now; } else { lastNoteWasRelease = false; } } for (int i = 0; i < channelStates.Length; i++) { NesApu.EnableChannel(apuIndex, i, i == activeChannel ? 1 : 0); } } if (lastNoteWasRelease && activeChannel >= 0 && Settings.InstrumentStopTime >= 0 && DateTime.Now.Subtract(lastReleaseTime).TotalSeconds >= Settings.InstrumentStopTime) { NesApu.EnableChannel(apuIndex, activeChannel, 0); activeChannel = -1; } if (activeChannel >= 0) { channelStates[activeChannel].UpdateEnvelopes(); channelStates[activeChannel].UpdateAPU(); for (int i = 0; i < Envelope.Count; i++) { envelopeFrames[i] = channelStates[activeChannel].GetEnvelopeFrame(i); } } else { for (int i = 0; i < Envelope.Count; i++) { envelopeFrames[i] = 0; } foreach (var channel in channelStates) { channel.ClearNote(); } } EndFrame(); } audioStream.Stop(); while (sampleQueue.TryDequeue(out _)) { ; } }
unsafe void PlayerThread(object o) { var startInfo = (SongPlayerStartInfo)o; var song = startInfo.song; var channels = PlayerBase.CreateChannelStates(song.Project, apuIndex); var advance = true; var tempoCounter = 0; var playPattern = 0; var playNote = 0; var jumpPattern = -1; var jumpNote = -1; var speed = song.Speed; playPosition = startInfo.frame; NesApu.InitAndReset(apuIndex, SampleRate, GetNesApuExpansionAudio(song.Project), dmcCallback); if (startInfo.frame != 0) { NesApu.StartSeeking(apuIndex); #if DEBUG NesApu.seeking = true; #endif while (playPattern * song.PatternLength + playNote < startInfo.frame) { var dummyAdvance = false; if (!AdvanceTempo(song, speed, LoopMode.None, ref tempoCounter, ref playPattern, ref playNote, ref jumpPattern, ref jumpNote, ref dummyAdvance)) { break; } foreach (var channel in channels) { channel.Advance(song, playPattern, playNote); channel.ProcessEffects(song, playPattern, playNote, ref jumpPattern, ref jumpNote, ref speed); channel.UpdateEnvelopes(); channel.UpdateAPU(); } } NesApu.StopSeeking(apuIndex); #if DEBUG NesApu.seeking = false; #endif jumpPattern = -1; jumpNote = -1; } var waitEvents = new WaitHandle[] { stopEvent, frameEvent }; while (true) { int idx = WaitHandle.WaitAny(waitEvents); if (idx == 0) { break; } // !advance is to handle first frame. if (!advance && !AdvanceTempo(song, speed, loopMode, ref tempoCounter, ref playPattern, ref playNote, ref jumpPattern, ref jumpNote, ref advance)) { break; } // Advance to next note. if (advance) { playPosition = playPattern * song.PatternLength + playNote; foreach (var channel in channels) { channel.Advance(song, playPattern, playNote); channel.ProcessEffects(song, playPattern, playNote, ref jumpPattern, ref jumpNote, ref speed); } advance = false; } // Update envelopes + APU registers. foreach (var channel in channels) { channel.UpdateEnvelopes(); channel.UpdateAPU(); } // Mute. for (int i = 0; i < channels.Length; i++) { NesApu.EnableChannel(apuIndex, i, (channelMask & (1 << i))); } EndFrameAndQueueSamples(); } audioStream.Stop(); while (sampleQueue.TryDequeue(out _)) { ; } }