示例#1
0
        public bool BeginPlaySong(Song s, bool pal, int startNote, IRegisterListener listener = null)
        {
            song                   = s;
            famitrackerTempo       = song.UsesFamiTrackerTempo;
            famitrackerSpeed       = song.FamitrackerSpeed;
            famitrackerNativeTempo = pal ? Song.NativeTempoPAL : Song.NativeTempoNTSC;
            palPlayback            = pal;
            playPosition           = startNote;
            playPattern            = 0;
            playNote               = 0;
            tempoCounter           = 0;
            ResetFamiStudioTempo(true);
            channelStates = CreateChannelStates(song.Project, apuIndex, song.Project.ExpansionNumChannels, palPlayback, listener);

            NesApu.InitAndReset(apuIndex, sampleRate, palPlayback, GetNesApuExpansionAudio(song.Project), song.Project.ExpansionNumChannels, dmcCallback);

            UpdateChannelsMuting();

            //Debug.WriteLine($"START SEEKING!!");

            if (startNote != 0)
            {
                NesApu.StartSeeking(apuIndex);

                while (song.GetPatternStartNote(playPattern) + playNote < startNote)
                {
                    //Debug.WriteLine($"Seek Frame {song.GetPatternStartNote(playPattern) + playNote}!");

                    int numFramesToRun = UpdateTempoEnvelope();

                    for (int i = 0; i < numFramesToRun; i++)
                    {
                        //Debug.WriteLine($"  Seeking Frame {song.GetPatternStartNote(playPattern) + playNote}!");

                        AdvanceChannels();
                        UpdateChannelsEnvelopesAndAPU();

                        if (!AdvanceSong(song.Length, LoopMode.None))
                        {
                            return(false);
                        }
                    }
                }

                NesApu.StopSeeking(apuIndex);
            }

            AdvanceChannels();
            UpdateChannelsEnvelopesAndAPU();
            EndFrame();

            playPosition = song.GetPatternStartNote(playPattern) + playNote;

            return(true);
        }
示例#2
0
        public bool BeginPlaySong(Song s, bool pal, int startNote)
        {
            song                    = s;
            famitrackerTempo        = song.UsesFamiTrackerTempo;
            famitrackerSpeed        = song.FamitrackerSpeed;
            palPlayback             = pal;
            playPosition            = startNote;
            playLocation            = new NoteLocation(0, 0);
            frameNumber             = 0;
            famitrackerTempoCounter = 0;
            channelStates           = CreateChannelStates(this, song.Project, apuIndex, song.Project.ExpansionNumN163Channels, palPlayback);

            NesApu.InitAndReset(apuIndex, sampleRate, palPlayback, tndMode, song.Project.ExpansionAudioMask, song.Project.ExpansionNumN163Channels, dmcCallback);

            ResetFamiStudioTempo();
            UpdateChannelsMuting();

            //Debug.WriteLine($"START SEEKING!!");

            if (startNote != 0)
            {
                seeking = true;
                NesApu.StartSeeking(apuIndex);

                AdvanceChannels();
                UpdateChannels();
                UpdateTempo();

                while (playLocation.ToAbsoluteNoteIndex(song) < startNote - 1)
                {
                    if (!PlaySongFrameInternal(true))
                    {
                        break;
                    }
                }

                NesApu.StopSeeking(apuIndex);
                seeking = false;
            }
            else
            {
                AdvanceChannels();
                UpdateChannels();
                UpdateTempo();
            }

            playPosition = playLocation.ToAbsoluteNoteIndex(song);
            UpdateBeat();

            EndFrame();

            return(true);
        }
示例#3
0
        public bool BeginPlaySong(Song s, bool pal, int startNote)
        {
            song                   = s;
            famitrackerTempo       = song.UsesFamiTrackerTempo;
            famitrackerSpeed       = song.FamitrackerSpeed;
            famitrackerNativeTempo = pal ? Song.NativeTempoPAL : Song.NativeTempoNTSC;
            palMode                = pal;
            playPosition           = startNote;
            playPattern            = 0;
            playNote               = 0;
            tempoCounter           = 0;
            firstFrame             = true;
            ResetFamiStudioTempo(true);
            channelStates = CreateChannelStates(song.Project, apuIndex, song.Project.ExpansionNumChannels, palMode);

            NesApu.InitAndReset(apuIndex, SampleRate, palMode, GetNesApuExpansionAudio(song.Project), song.Project.ExpansionNumChannels, dmcCallback);

            if (startNote != 0)
            {
                NesApu.StartSeeking(apuIndex);
#if DEBUG
                NesApu.seeking = true;
#endif

                while (song.GetPatternStartNote(playPattern) + playNote < startNote)
                {
                    foreach (var channel in channelStates)
                    {
                        channel.Advance(song, playPattern, playNote, famitrackerSpeed, famitrackerNativeTempo);
                        channel.ProcessEffects(song, playPattern, playNote, ref famitrackerSpeed);
                        channel.UpdateEnvelopes();
                        channel.UpdateAPU();
                    }

                    if (!AdvanceSong(song.Length, LoopMode.None))
                    {
                        return(false);
                    }

                    //Debug.WriteLine($"Seeking Frame {song.GetPatternStartNote(playPattern) + playNote}!");

                    UpdateFrameSkip();
                }

                NesApu.StopSeeking(apuIndex);
#if DEBUG
                NesApu.seeking = false;
#endif
            }

            return(true);
        }
示例#4
0
        public bool BeginPlaySong(Song s, bool pal, int startNote)
        {
            song                    = s;
            famitrackerTempo        = song.UsesFamiTrackerTempo;
            famitrackerSpeed        = song.FamitrackerSpeed;
            palPlayback             = pal;
            playPosition            = startNote;
            playPattern             = 0;
            playNote                = 0;
            frameNumber             = 0;
            famitrackerTempoCounter = 0;
            channelStates           = CreateChannelStates(this, song.Project, apuIndex, song.Project.ExpansionNumChannels, palPlayback);

            NesApu.InitAndReset(apuIndex, sampleRate, palPlayback, GetNesApuExpansionAudio(song.Project.ExpansionAudio), song.Project.ExpansionNumChannels, dmcCallback);

            ResetFamiStudioTempo(true);
            UpdateChannelsMuting();

            //Debug.WriteLine($"START SEEKING!!");

            if (startNote != 0)
            {
                NesApu.StartSeeking(apuIndex);

                AdvanceChannels();
                UpdateChannels();
                UpdateFamitrackerTempo(famitrackerSpeed, song.FamitrackerTempo);

                while (song.GetPatternStartNote(playPattern) + playNote < startNote)
                {
                    if (!PlaySongFrameInternal(true))
                    {
                        break;
                    }
                }

                NesApu.StopSeeking(apuIndex);
            }
            else
            {
                AdvanceChannels();
                UpdateChannels();
                UpdateFamitrackerTempo(famitrackerSpeed, song.FamitrackerTempo);
            }

            EndFrame();

            playPosition = song.GetPatternStartNote(playPattern) + playNote;

            return(true);
        }
示例#5
0
        unsafe void PlayerThread(object o)
        {
            var lastNoteWasRelease = false;
            var lastReleaseTime    = DateTime.Now;

            var activeChannel = -1;
            var waitEvents    = new WaitHandle[] { stopEvent, bufferSemaphore };

            NesApu.InitAndReset(apuIndex, sampleRate, palPlayback, GetNesApuExpansionAudio(expansionAudio), numExpansionChannels, dmcCallback);
            for (int i = 0; i < channelStates.Length; i++)
            {
                NesApu.EnableChannel(apuIndex, i, 0);
            }

            while (true)
            {
                int idx = WaitHandle.WaitAny(waitEvents);

                if (idx == 0)
                {
                    break;
                }

                if (!noteQueue.IsEmpty)
                {
                    PlayerNote lastNote = new PlayerNote();
                    while (noteQueue.TryDequeue(out PlayerNote note))
                    {
                        lastNote = note;
                    }

                    activeChannel = lastNote.channel;
                    if (activeChannel >= 0)
                    {
                        if (lastNote.note.IsMusical)
                        {
                            channelStates[activeChannel].ForceInstrumentReload();
                        }

                        channelStates[activeChannel].PlayNote(lastNote.note);

                        if (lastNote.note.IsRelease)
                        {
                            lastNoteWasRelease = true;
                            lastReleaseTime    = DateTime.Now;
                        }
                        else
                        {
                            lastNoteWasRelease = false;
                        }
                    }

                    for (int i = 0; i < channelStates.Length; i++)
                    {
                        NesApu.EnableChannel(apuIndex, i, i == activeChannel ? 1 : 0);
                    }
                }

                if (lastNoteWasRelease &&
                    activeChannel >= 0 &&
                    Settings.InstrumentStopTime >= 0 &&
                    DateTime.Now.Subtract(lastReleaseTime).TotalSeconds >= Settings.InstrumentStopTime)
                {
                    NesApu.EnableChannel(apuIndex, activeChannel, 0);
                    activeChannel = -1;
                }

                if (activeChannel >= 0)
                {
                    channelStates[activeChannel].Update();

                    for (int i = 0; i < EnvelopeType.Count; i++)
                    {
                        envelopeFrames[i] = channelStates[activeChannel].GetEnvelopeFrame(i);
                    }
                }
                else
                {
                    for (int i = 0; i < EnvelopeType.Count; i++)
                    {
                        envelopeFrames[i] = 0;
                    }
                    foreach (var channel in channelStates)
                    {
                        channel.ClearNote();
                    }
                }

                EndFrame();
            }

            audioStream.Stop();
            while (sampleQueue.TryDequeue(out _))
            {
                ;
            }
        }
示例#6
0
        public unsafe static void Save(Song song, string filename, int sampleRate)
        {
            var advance           = true;
            var tempoCounter      = 0;
            var playPattern       = 0;
            var playNote          = 0;
            var jumpPattern       = -1;
            var jumpNote          = -1;
            var speed             = song.Speed;
            var wavBytes          = new List <byte>();
            var apuIndex          = NesApu.APU_WAV_EXPORT;
            var dmcCallback       = new NesApu.DmcReadDelegate(NesApu.DmcReadCallback);
            var dmcCallbackHandle = GCHandle.Alloc(dmcCallback); // Needed since callback can be collected.
            var channels          = PlayerBase.CreateChannelStates(song.Project, apuIndex);

            NesApu.InitAndReset(apuIndex, sampleRate, PlayerBase.GetNesApuExpansionAudio(song.Project), dmcCallback);
            for (int i = 0; i < channels.Length; i++)
            {
                NesApu.EnableChannel(apuIndex, i, 1);
            }

            while (true)
            {
                // Advance to next note.
                if (advance)
                {
                    foreach (var channel in channels)
                    {
                        channel.ProcessEffects(song, playPattern, playNote, ref jumpPattern, ref jumpNote, ref speed, false);
                    }

                    foreach (var channel in channels)
                    {
                        channel.Advance(song, playPattern, playNote);
                    }

                    advance = false;
                }

                // Update envelopes + APU registers.
                foreach (var channel in channels)
                {
                    channel.UpdateEnvelopes();
                    channel.UpdateAPU();
                }

                NesApu.EndFrame(apuIndex);

                int    numTotalSamples = NesApu.SamplesAvailable(apuIndex);
                byte[] samples         = new byte[numTotalSamples * 2];

                fixed(byte *ptr = &samples[0])
                {
                    NesApu.ReadSamples(apuIndex, new IntPtr(ptr), numTotalSamples);
                }

                wavBytes.AddRange(samples);

                if (!PlayerBase.AdvanceTempo(song, speed, LoopMode.None, ref tempoCounter, ref playPattern, ref playNote, ref jumpPattern, ref jumpNote, ref advance))
                {
                    break;
                }
            }

            using (var file = new FileStream(filename, FileMode.Create))
            {
                var header = new WaveHeader();

                // RIFF WAVE Header
                header.chunkId[0] = (byte)'R';
                header.chunkId[1] = (byte)'I';
                header.chunkId[2] = (byte)'F';
                header.chunkId[3] = (byte)'F';
                header.format[0]  = (byte)'W';
                header.format[1]  = (byte)'A';
                header.format[2]  = (byte)'V';
                header.format[3]  = (byte)'E';

                // Format subchunk
                header.subChunk1Id[0] = (byte)'f';
                header.subChunk1Id[1] = (byte)'m';
                header.subChunk1Id[2] = (byte)'t';
                header.subChunk1Id[3] = (byte)' ';
                header.audioFormat    = 1;          // FOR PCM
                header.numChannels    = 1;          // 1 for MONO, 2 for stereo
                header.sampleRate     = sampleRate; // ie 44100 hertz, cd quality audio
                header.bitsPerSample  = 16;         //
                header.byteRate       = header.sampleRate * header.numChannels * header.bitsPerSample / 8;
                header.blockAlign     = (short)(header.numChannels * header.bitsPerSample / 8);

                // Data subchunk
                header.subChunk2Id[0] = (byte)'d';
                header.subChunk2Id[1] = (byte)'a';
                header.subChunk2Id[2] = (byte)'t';
                header.subChunk2Id[3] = (byte)'a';

                // All sizes for later:
                // chuckSize = 4 + (8 + subChunk1Size) + (8 + subChubk2Size)
                // subChunk1Size is constanst, i'm using 16 and staying with PCM
                // subChunk2Size = nSamples * nChannels * bitsPerSample/8
                // Whenever a sample is added:
                //    chunkSize += (nChannels * bitsPerSample/8)
                //    subChunk2Size += (nChannels * bitsPerSample/8)
                header.subChunk1Size = 16;
                header.subChunk2Size = wavBytes.Count;
                header.chunkSize     = 4 + (8 + header.subChunk1Size) + (8 + header.subChunk2Size);

                var headerBytes = new byte[sizeof(WaveHeader)];
                Marshal.Copy(new IntPtr(&header), headerBytes, 0, headerBytes.Length);
                file.Write(headerBytes, 0, headerBytes.Length);
                file.Write(wavBytes.ToArray(), 0, wavBytes.Count);
            }

            dmcCallbackHandle.Free();
        }
示例#7
0
        unsafe void PlayerThread(object o)
        {
#if !DEBUG
            try
            {
#endif
            activeChannel = -1;

            var lastNoteWasRelease = false;
            var lastReleaseTime    = DateTime.Now;

            var waitEvents = new WaitHandle[] { stopEvent, bufferSemaphore };

            NesApu.InitAndReset(apuIndex, sampleRate, palPlayback, NesApu.TND_MODE_SINGLE, expansionMask, numN163Channels, dmcCallback);
            for (int i = 0; i < channelStates.Length; i++)
            {
                EnableChannelType(channelStates[i].InnerChannelType, false);
            }

            while (true)
            {
                int idx = WaitHandle.WaitAny(waitEvents);

                if (idx == 0)
                {
                    break;
                }

                if (!noteQueue.IsEmpty)
                {
                    PlayerNote lastNote = new PlayerNote();
                    while (noteQueue.TryDequeue(out PlayerNote note))
                    {
                        lastNote = note;
                    }

                    activeChannel = lastNote.channel;
                    if (activeChannel >= 0)
                    {
                        if (lastNote.note.IsMusical)
                        {
                            channelStates[activeChannel].ForceInstrumentReload();
                        }

                        channelStates[activeChannel].PlayNote(lastNote.note);

                        if (lastNote.note.IsRelease)
                        {
                            lastNoteWasRelease = true;
                            lastReleaseTime    = DateTime.Now;
                        }
                        else
                        {
                            lastNoteWasRelease = false;
                        }
                    }

                    for (int i = 0; i < channelStates.Length; i++)
                    {
                        EnableChannelType(channelStates[i].InnerChannelType, i == activeChannel);
                    }
                }

                if (lastNoteWasRelease &&
                    activeChannel >= 0 &&
                    Settings.InstrumentStopTime >= 0 &&
                    DateTime.Now.Subtract(lastReleaseTime).TotalSeconds >= Settings.InstrumentStopTime)
                {
                    EnableChannelType(channelStates[activeChannel].InnerChannelType, false);
                    activeChannel = -1;
                }

                if (activeChannel >= 0)
                {
                    var channel = channelStates[activeChannel];
                    channel.Update();

                    for (int i = 0; i < EnvelopeType.Count; i++)
                    {
                        envelopeFrames[i] = channel.GetEnvelopeFrame(i);
                    }

                    playingNote = channel.CurrentNote != null && channel.CurrentNote.IsMusical ? channel.CurrentNote.Value : Note.NoteInvalid;
                }
                else
                {
                    for (int i = 0; i < EnvelopeType.Count; i++)
                    {
                        envelopeFrames[i] = 0;
                    }
                    foreach (var channel in channelStates)
                    {
                        channel.ClearNote();
                    }

                    playingNote = Note.NoteInvalid;
                }

                EndFrame();
            }

            audioStream.Stop();
            while (sampleQueue.TryDequeue(out _))
            {
                ;
            }

#if !DEBUG
        }

        catch (Exception e)
        {
            Debug.WriteLine(e.Message);
            if (Debugger.IsAttached)
            {
                Debugger.Break();
            }
        }
#endif
        }
示例#8
0
        unsafe void PlayerThread(object o)
        {
            var startInfo = (SongPlayerStartInfo)o;
            var song      = startInfo.song;

            var channels = PlayerBase.CreateChannelStates(song.Project, apuIndex);

            var advance      = true;
            var tempoCounter = 0;
            var playPattern  = 0;
            var playNote     = 0;
            var jumpPattern  = -1;
            var jumpNote     = -1;
            var speed        = song.Speed;

            playPosition = startInfo.frame;

            NesApu.InitAndReset(apuIndex, SampleRate, GetNesApuExpansionAudio(song.Project), dmcCallback);

            if (startInfo.frame != 0)
            {
                NesApu.StartSeeking(apuIndex);
                #if DEBUG
                NesApu.seeking = true;
                #endif

                while (playPattern * song.PatternLength + playNote < startInfo.frame)
                {
                    var dummyAdvance = false;
                    if (!AdvanceTempo(song, speed, LoopMode.None, ref tempoCounter, ref playPattern, ref playNote, ref jumpPattern, ref jumpNote, ref dummyAdvance))
                    {
                        break;
                    }

                    foreach (var channel in channels)
                    {
                        channel.Advance(song, playPattern, playNote);
                        channel.ProcessEffects(song, playPattern, playNote, ref jumpPattern, ref jumpNote, ref speed);
                        channel.UpdateEnvelopes();
                        channel.UpdateAPU();
                    }
                }

                NesApu.StopSeeking(apuIndex);
#if DEBUG
                NesApu.seeking = false;
#endif

                jumpPattern = -1;
                jumpNote    = -1;
            }

            var waitEvents = new WaitHandle[] { stopEvent, frameEvent };

            while (true)
            {
                int idx = WaitHandle.WaitAny(waitEvents);

                if (idx == 0)
                {
                    break;
                }

                // !advance is to handle first frame.
                if (!advance && !AdvanceTempo(song, speed, loopMode, ref tempoCounter, ref playPattern, ref playNote, ref jumpPattern, ref jumpNote, ref advance))
                {
                    break;
                }

                // Advance to next note.
                if (advance)
                {
                    playPosition = playPattern * song.PatternLength + playNote;

                    foreach (var channel in channels)
                    {
                        channel.Advance(song, playPattern, playNote);
                        channel.ProcessEffects(song, playPattern, playNote, ref jumpPattern, ref jumpNote, ref speed);
                    }

                    advance = false;
                }

                // Update envelopes + APU registers.
                foreach (var channel in channels)
                {
                    channel.UpdateEnvelopes();
                    channel.UpdateAPU();
                }

                // Mute.
                for (int i = 0; i < channels.Length; i++)
                {
                    NesApu.EnableChannel(apuIndex, i, (channelMask & (1 << i)));
                }

                EndFrameAndQueueSamples();
            }

            audioStream.Stop();
            while (sampleQueue.TryDequeue(out _))
            {
                ;
            }
        }