Exemplo n.º 1
0
 protected void UpdateChannelsMuting()
 {
     for (int i = 0; i < channelStates.Length; i++)
     {
         NesApu.EnableChannel(apuIndex, i, (channelMask & (1 << i)));
     }
 }
Exemplo n.º 2
0
        public void Advance(Song song, int patternIdx, int noteIdx)
        {
            var channel = song.GetChannelByType(channelType);
            var pattern = channel.PatternInstances[patternIdx];

            if (pattern == null)
            {
                return;
            }

            var newNote = pattern.Notes[noteIdx];

            if (newNote.IsValid)
            {
                slideStep = 0;

                if (newNote.IsSlideNote)
                {
                    var noteTable = NesApu.GetNoteTableForChannelType(channel.Type, false);

                    if (channel.ComputeSlideNoteParams(patternIdx, noteIdx, noteTable, out slidePitch, out slideStep, out _))
                    {
                        newNote.Value = (byte)newNote.SlideNoteTarget;
                    }
                }

                PlayNote(newNote);
            }
            else if (newNote.HasVolume)
            {
                note.Volume = newNote.Volume;
            }
        }
Exemplo n.º 3
0
        protected unsafe void EndFrameAndQueueSamples()
        {
            NesApu.NesApuEndFrame(apuIndex);

            int numTotalSamples = NesApu.NesApuSamplesAvailable(apuIndex);

            short[] samples = new short[numTotalSamples];

            fixed(short *ptr = &samples[0])
            {
                NesApu.NesApuReadSamples(apuIndex, new IntPtr(ptr), numTotalSamples);
            }

            sampleQueue.Enqueue(samples);

            // Wait until we have queued as many frames as XAudio buffers to start
            // the audio thread, otherwise, we risk starving on the first frame.
            if (!xaudio2Stream.IsStarted)
            {
                if (sampleQueue.Count == NumAudioBuffers)
                {
                    xaudio2Stream.Start();
                }
                else
                {
                    frameEvent.Set();
                }
            }
        }
Exemplo n.º 4
0
        static unsafe void Main(string[] args)
        {
#if FAMISTUDIO_WINDOWS
            try
            {
                // This is only supported in Windows 8.1+.
                SetProcessDpiAwareness(1 /*Process_System_DPI_Aware*/);
            }
            catch { }
#endif

            Settings.Load();
            Cursors.Initialize();
            RenderTheme.Initialize();
            PlatformUtils.Initialize();
            ClipboardUtils.Initialize();
            FamiStudioTempoUtils.Initialize();
            NesApu.InitializeNoteTables();

#if FAMISTUDIO_WINDOWS
            PerformanceCounter.Initialize();
            Application.EnableVisualStyles();
            Application.SetCompatibleTextRenderingDefault(false);
#endif


            var famiStudio = new FamiStudio(args.Length > 0 ? args[0] : null);
            famiStudio.Run();

            Settings.Save();
        }
Exemplo n.º 5
0
        protected void EnableChannelType(int channelType, bool enable)
        {
            var exp = ChannelType.GetExpansionTypeForChannelType(channelType);
            var idx = ChannelType.GetExpansionChannelIndexForChannelType(channelType);

            NesApu.EnableChannel(apuIndex, exp, idx, enable ? 1 : 0);
        }
Exemplo n.º 6
0
        static unsafe void Main(string[] args)
        {
#if FAMISTUDIO_WINDOWS
            try
            {
                // This is only supported in Windows 8.1+.
                SetProcessDpiAwareness(1 /*Process_System_DPI_Aware*/);
            }
            catch { }

            if (!PlatformUtils.IsVS2015RuntimeInstalled())
            {
                if (MessageBox.Show("You seem to be missing the VS 2015 C++ Runtime which is required to run FamiStudio, would you like to visit the FamiStudio website for instruction on how to install it?", "Missing Component", MessageBoxButtons.YesNo) == DialogResult.Yes)
                {
                    Utils.OpenUrl("https://famistudio.org/doc/#windows");
                }

                return;
            }

            if (!XAudio2Stream.TryDetectXAudio2())
            {
                if (MessageBox.Show("You seem to be missing parts of DirectX which is required to run FamiStudio, would you like to visit the FamiStudio website for instruction on how to install it?", "Missing Component", MessageBoxButtons.YesNo) == DialogResult.Yes)
                {
                    Utils.OpenUrl("https://famistudio.org/doc/#windows");
                }

                return;
            }
#endif

            Settings.Load();
            PlatformUtils.Initialize();
            RenderTheme.Initialize();
            NesApu.InitializeNoteTables();

#if FAMISTUDIO_WINDOWS
            WinUtils.Initialize();
            PerformanceCounter.Initialize();
            Application.EnableVisualStyles();
            Application.SetCompatibleTextRenderingDefault(false);
#elif FAMISTUDIO_LINUX
            LinuxUtils.SetProcessName("FamiStudio");
#endif

            var cli = new CommandLineInterface(args);

            if (!cli.Run())
            {
                var famiStudio = new FamiStudio(args.Length > 0 ? args[0] : null);
                famiStudio.Run();
            }

            Settings.Save();

#if FAMISTUDIO_LINUX
            // We sometimes gets stuck here on Linux, lets abort.
            Environment.Exit(0);
#endif
        }
Exemplo n.º 7
0
 public ChannelState(int apu, int type)
 {
     apuIdx      = apu;
     channelType = type;
     noteTable   = NesApu.GetNoteTableForChannelType(channelType, false);
     note.Value  = Note.NoteStop;
     note.Volume = Note.VolumeMax;
 }
Exemplo n.º 8
0
        protected void WriteRegister(int reg, int data)
        {
            NesApu.WriteRegister(apuIdx, reg, data);

            if (registerListener != null)
            {
                registerListener.WriteRegister(apuIdx, reg, data);
            }
        }
Exemplo n.º 9
0
        public bool BeginPlaySong(Song s, bool pal, int startNote, IRegisterListener listener = null)
        {
            song                   = s;
            famitrackerTempo       = song.UsesFamiTrackerTempo;
            famitrackerSpeed       = song.FamitrackerSpeed;
            famitrackerNativeTempo = pal ? Song.NativeTempoPAL : Song.NativeTempoNTSC;
            palPlayback            = pal;
            playPosition           = startNote;
            playPattern            = 0;
            playNote               = 0;
            tempoCounter           = 0;
            ResetFamiStudioTempo(true);
            channelStates = CreateChannelStates(song.Project, apuIndex, song.Project.ExpansionNumChannels, palPlayback, listener);

            NesApu.InitAndReset(apuIndex, sampleRate, palPlayback, GetNesApuExpansionAudio(song.Project), song.Project.ExpansionNumChannels, dmcCallback);

            UpdateChannelsMuting();

            //Debug.WriteLine($"START SEEKING!!");

            if (startNote != 0)
            {
                NesApu.StartSeeking(apuIndex);

                while (song.GetPatternStartNote(playPattern) + playNote < startNote)
                {
                    //Debug.WriteLine($"Seek Frame {song.GetPatternStartNote(playPattern) + playNote}!");

                    int numFramesToRun = UpdateTempoEnvelope();

                    for (int i = 0; i < numFramesToRun; i++)
                    {
                        //Debug.WriteLine($"  Seeking Frame {song.GetPatternStartNote(playPattern) + playNote}!");

                        AdvanceChannels();
                        UpdateChannelsEnvelopesAndAPU();

                        if (!AdvanceSong(song.Length, LoopMode.None))
                        {
                            return(false);
                        }
                    }
                }

                NesApu.StopSeeking(apuIndex);
            }

            AdvanceChannels();
            UpdateChannelsEnvelopesAndAPU();
            EndFrame();

            playPosition = song.GetPatternStartNote(playPattern) + playNote;

            return(true);
        }
Exemplo n.º 10
0
 public ChannelState(int apu, int type, bool pal, int numN163Channels = 1)
 {
     apuIdx         = apu;
     channelType    = type;
     maximumPeriod  = NesApu.GetPitchLimitForChannelType(channelType);
     noteTable      = NesApu.GetNoteTableForChannelType(channelType, pal, numN163Channels);
     note.Value     = Note.NoteStop;
     note.Volume    = Note.VolumeMax;
     note.FinePitch = 0;
     Channel.GetShiftsForType(type, numN163Channels, out pitchShift, out slideShift);
 }
Exemplo n.º 11
0
 public void StopSeeking()
 {
     seeking = false;
     for (int i = 0; i < shadowRegisters.Length; i++)
     {
         if (shadowRegisters[i] >= 0)
         {
             NesApu.NesApuWriteRegister(apuIdx, 0x4000 + i, shadowRegisters[i]);
         }
     }
 }
Exemplo n.º 12
0
 public ChannelState(IPlayerInterface play, int apu, int type, bool pal, int numN163Channels = 1)
 {
     player         = play;
     apuIdx         = apu;
     channelType    = type;
     palPlayback    = pal;
     maximumPeriod  = NesApu.GetPitchLimitForChannelType(channelType);
     noteTable      = NesApu.GetNoteTableForChannelType(channelType, pal, numN163Channels);
     note.Value     = Note.NoteStop;
     note.FinePitch = 0;
     Channel.GetShiftsForType(type, numN163Channels, out pitchShift, out slideShift);
 }
Exemplo n.º 13
0
        public bool BeginPlaySong(Song s, bool pal, int startNote)
        {
            song                    = s;
            famitrackerTempo        = song.UsesFamiTrackerTempo;
            famitrackerSpeed        = song.FamitrackerSpeed;
            palPlayback             = pal;
            playPosition            = startNote;
            playLocation            = new NoteLocation(0, 0);
            frameNumber             = 0;
            famitrackerTempoCounter = 0;
            channelStates           = CreateChannelStates(this, song.Project, apuIndex, song.Project.ExpansionNumN163Channels, palPlayback);

            NesApu.InitAndReset(apuIndex, sampleRate, palPlayback, tndMode, song.Project.ExpansionAudioMask, song.Project.ExpansionNumN163Channels, dmcCallback);

            ResetFamiStudioTempo();
            UpdateChannelsMuting();

            //Debug.WriteLine($"START SEEKING!!");

            if (startNote != 0)
            {
                seeking = true;
                NesApu.StartSeeking(apuIndex);

                AdvanceChannels();
                UpdateChannels();
                UpdateTempo();

                while (playLocation.ToAbsoluteNoteIndex(song) < startNote - 1)
                {
                    if (!PlaySongFrameInternal(true))
                    {
                        break;
                    }
                }

                NesApu.StopSeeking(apuIndex);
                seeking = false;
            }
            else
            {
                AdvanceChannels();
                UpdateChannels();
                UpdateTempo();
            }

            playPosition = playLocation.ToAbsoluteNoteIndex(song);
            UpdateBeat();

            EndFrame();

            return(true);
        }
Exemplo n.º 14
0
        public bool BeginPlaySong(Song s, bool pal, int startNote)
        {
            song                   = s;
            famitrackerTempo       = song.UsesFamiTrackerTempo;
            famitrackerSpeed       = song.FamitrackerSpeed;
            famitrackerNativeTempo = pal ? Song.NativeTempoPAL : Song.NativeTempoNTSC;
            palMode                = pal;
            playPosition           = startNote;
            playPattern            = 0;
            playNote               = 0;
            tempoCounter           = 0;
            firstFrame             = true;
            ResetFamiStudioTempo(true);
            channelStates = CreateChannelStates(song.Project, apuIndex, song.Project.ExpansionNumChannels, palMode);

            NesApu.InitAndReset(apuIndex, SampleRate, palMode, GetNesApuExpansionAudio(song.Project), song.Project.ExpansionNumChannels, dmcCallback);

            if (startNote != 0)
            {
                NesApu.StartSeeking(apuIndex);
#if DEBUG
                NesApu.seeking = true;
#endif

                while (song.GetPatternStartNote(playPattern) + playNote < startNote)
                {
                    foreach (var channel in channelStates)
                    {
                        channel.Advance(song, playPattern, playNote, famitrackerSpeed, famitrackerNativeTempo);
                        channel.ProcessEffects(song, playPattern, playNote, ref famitrackerSpeed);
                        channel.UpdateEnvelopes();
                        channel.UpdateAPU();
                    }

                    if (!AdvanceSong(song.Length, LoopMode.None))
                    {
                        return(false);
                    }

                    //Debug.WriteLine($"Seeking Frame {song.GetPatternStartNote(playPattern) + playNote}!");

                    UpdateFrameSkip();
                }

                NesApu.StopSeeking(apuIndex);
#if DEBUG
                NesApu.seeking = false;
#endif
            }

            return(true);
        }
Exemplo n.º 15
0
        public bool BeginPlaySong(Song s, bool pal, int startNote)
        {
            song                    = s;
            famitrackerTempo        = song.UsesFamiTrackerTempo;
            famitrackerSpeed        = song.FamitrackerSpeed;
            palPlayback             = pal;
            playPosition            = startNote;
            playPattern             = 0;
            playNote                = 0;
            frameNumber             = 0;
            famitrackerTempoCounter = 0;
            channelStates           = CreateChannelStates(this, song.Project, apuIndex, song.Project.ExpansionNumChannels, palPlayback);

            NesApu.InitAndReset(apuIndex, sampleRate, palPlayback, GetNesApuExpansionAudio(song.Project.ExpansionAudio), song.Project.ExpansionNumChannels, dmcCallback);

            ResetFamiStudioTempo(true);
            UpdateChannelsMuting();

            //Debug.WriteLine($"START SEEKING!!");

            if (startNote != 0)
            {
                NesApu.StartSeeking(apuIndex);

                AdvanceChannels();
                UpdateChannels();
                UpdateFamitrackerTempo(famitrackerSpeed, song.FamitrackerTempo);

                while (song.GetPatternStartNote(playPattern) + playNote < startNote)
                {
                    if (!PlaySongFrameInternal(true))
                    {
                        break;
                    }
                }

                NesApu.StopSeeking(apuIndex);
            }
            else
            {
                AdvanceChannels();
                UpdateChannels();
                UpdateFamitrackerTempo(famitrackerSpeed, song.FamitrackerTempo);
            }

            EndFrame();

            playPosition = song.GetPatternStartNote(playPattern) + playNote;

            return(true);
        }
Exemplo n.º 16
0
 public static bool IsVS2015RuntimeInstalled()
 {
     try
     {
         // Super ghetto way of detecting if the runtime is installed is simply by calling
         // any function that will cause a C++ DLL to be loaded.
         NesApu.GetAudioExpansion(0);
         return(true);
     }
     catch
     {
         return(false);
     }
 }
Exemplo n.º 17
0
        protected virtual unsafe short[] EndFrame()
        {
            NesApu.EndFrame(apuIndex);

            int numTotalSamples = NesApu.SamplesAvailable(apuIndex);

            short[] samples = new short[numTotalSamples];

            fixed(short *ptr = &samples[0])
            {
                NesApu.ReadSamples(apuIndex, new IntPtr(ptr), numTotalSamples);
            }

            return(samples);
        }
Exemplo n.º 18
0
 protected void WriteApuRegister(int register, int data)
 {
     if (seeking)
     {
         int idx = register - 0x4000;
         // Not caching DPCM register for now.
         if (idx < shadowRegisters.Length)
         {
             shadowRegisters[idx] = data;
         }
     }
     else
     {
         NesApu.NesApuWriteRegister(apuIdx, register, data);
     }
 }
Exemplo n.º 19
0
        static unsafe void Main(string[] args)
        {
#if FAMISTUDIO_WINDOWS
            try
            {
                // This is only supported in Windows 8.1+.
                SetProcessDpiAwareness(1 /*Process_System_DPI_Aware*/);
            }
            catch { }
#endif

            Settings.Load();
            RenderTheme.Initialize();
            PlatformUtils.Initialize();
            Cursors.Initialize();
            FamiStudioTempoUtils.Initialize();
            NesApu.InitializeNoteTables();

#if FAMISTUDIO_WINDOWS
            WinUtils.Initialize();
            PerformanceCounter.Initialize();
            Application.EnableVisualStyles();
            Application.SetCompatibleTextRenderingDefault(false);
#endif
#if FAMISTUDIO_LINUX
            LinuxUtils.SetProcessName("FamiStudio");
#endif

            var cli = new CommandLineInterface(args);

            if (!cli.Run())
            {
                var famiStudio = new FamiStudio(args.Length > 0 ? args[0] : null);
                famiStudio.Run();
            }

            Settings.Save();

#if FAMISTUDIO_LINUX
            // We sometimes gets stuck here on Linux, lets abort.
            Environment.Exit(0);
#endif
        }
Exemplo n.º 20
0
        public bool PlaySongFrame()
        {
            do
            {
                if (firstFrame || UpdateTempo(famitrackerSpeed, song.FamitrackerTempo))
                {
                    // Advance to next note.
                    if (!firstFrame && !AdvanceSong(song.Length, loopMode))
                    {
                        return(false);
                    }

                    foreach (var channel in channelStates)
                    {
                        channel.Advance(song, playPattern, playNote, famitrackerSpeed, famitrackerNativeTempo);
                        channel.ProcessEffects(song, playPattern, playNote, ref famitrackerSpeed);
                    }

                    playPosition = song.GetPatternStartNote(playPattern) + playNote;
                }

                // Debug.WriteLine($"Running Frame {playPosition}!");

                // Update envelopes + APU registers.
                foreach (var channel in channelStates)
                {
                    channel.UpdateEnvelopes();
                    channel.UpdateAPU();
                }
            }while (!firstFrame && UpdateFrameSkip());

            firstFrame = false;

            // Mute.
            for (int i = 0; i < channelStates.Length; i++)
            {
                NesApu.EnableChannel(apuIndex, i, (channelMask & (1 << i)));
            }

            EndFrame();

            return(true);
        }
Exemplo n.º 21
0
        private void WriteN163Register(int reg, int data)
        {
            // HACK : There are conflicts between N163 registers and S5B register, a N163 addr write
            // can be interpreted as a S5B data write. To prevent this, we select a dummy register
            // for S5B so that the write is discarded.
            //
            // N163:
            //   f800-ffff (addr)
            //   4800-4fff (data)
            // S5B:
            //   c000-e000 (addr)
            //   f000-ffff (data)

            if ((NesApu.GetAudioExpansions(apuIdx) & NesApu.APU_EXPANSION_MASK_SUNSOFT) != 0)
            {
                WriteRegister(NesApu.S5B_ADDR, NesApu.S5B_REG_IO_A);
            }

            WriteRegister(NesApu.N163_ADDR, reg);
            WriteRegister(NesApu.N163_DATA, data);
        }
Exemplo n.º 22
0
        private int OutputSong(Song song, int songIdx, int speedChannel, int factor, bool test)
        {
            var packedPatternBuffers = new List <List <string> >(globalPacketPatternBuffers);
            var size         = 0;
            var emptyPattern = new Pattern(-1, song, 0, "");
            var emptyNote    = new Note(Note.NoteInvalid);

            for (int c = 0; c < song.Channels.Length; c++)
            {
                if (!test)
                {
                    lines.Add($"\n{ll}song{songIdx}ch{c}:");
                }

                var channel            = song.Channels[c];
                var currentSpeed       = song.FamitrackerSpeed;
                var isSpeedChannel     = c == speedChannel;
                var instrument         = (Instrument)null;
                var previousNoteLength = song.NoteLength;

                if (isSpeedChannel && project.UsesFamiTrackerTempo)
                {
                    if (!test)
                    {
                        lines.Add($"\t{db} $fb, ${song.FamitrackerSpeed:x2}");
                    }
                    size += 2;
                }

                for (int p = 0; p < song.Length; p++)
                {
                    var prevNoteValue = Note.NoteInvalid;
                    var pattern       = channel.PatternInstances[p] == null ? emptyPattern : channel.PatternInstances[p];
                    var patternBuffer = new List <string>();

                    if (p == song.LoopPoint)
                    {
                        if (!test)
                        {
                            lines.Add($"{ll}song{songIdx}ch{c}loop:");
                        }

                        // Clear stored instrument to force a reset. We might be looping
                        // to a section where the instrument was set from a previous pattern.
                        instrument = null;
                    }

                    if (isSpeedChannel && project.UsesFamiStudioTempo && machine != MachineType.NTSC)
                    {
                        var noteLength = song.GetPatternNoteLength(p);

                        if (noteLength != previousNoteLength || (p == song.LoopPoint && p != 0))
                        {
                            if (!test)
                            {
                                patternBuffer.Add($"$fb");
                                patternBuffer.Add($"{lo}({ll}tempo_env{noteLength})");
                                patternBuffer.Add($"{hi}({ll}tempo_env{noteLength})");
                                previousNoteLength = noteLength;
                            }

                            size += 3;
                        }
                    }

                    var patternLength = song.GetPatternLength(p);
                    var numValidNotes = patternLength;

                    for (var it = pattern.GetNoteIterator(0, patternLength); !it.Done;)
                    {
                        var time = it.CurrentTime;
                        var note = it.CurrentNote;

                        if (note == null)
                        {
                            note = emptyNote;
                        }

                        if (isSpeedChannel && song.UsesFamiTrackerTempo)
                        {
                            var speed = FindEffectParam(song, p, time, Note.EffectSpeed);
                            if (speed >= 0)
                            {
                                currentSpeed = speed;
                                patternBuffer.Add($"${0xfb:x2}");
                                patternBuffer.Add($"${(byte)speed:x2}");
                            }
                        }

                        it.Next();

                        if (note.HasVolume)
                        {
                            patternBuffer.Add($"${(byte)(0x70 | note.Volume):x2}");
                        }

                        if (note.HasFinePitch)
                        {
                            patternBuffer.Add($"${0x65:x2}");
                            patternBuffer.Add($"${note.FinePitch:x2}");
                        }

                        if (note.HasVibrato)
                        {
                            patternBuffer.Add($"${0x63:x2}");
                            patternBuffer.Add($"{lo}({vibratoEnvelopeNames[note.RawVibrato]})");
                            patternBuffer.Add($"{hi}({vibratoEnvelopeNames[note.RawVibrato]})");

                            if (note.RawVibrato == 0)
                            {
                                patternBuffer.Add($"${0x64:x2}");
                            }
                        }

                        if (note.HasFdsModSpeed)
                        {
                            patternBuffer.Add($"${0x66:x2}");
                            patternBuffer.Add($"${(note.FdsModSpeed >> 0) & 0xff:x2}");
                            patternBuffer.Add($"${(note.FdsModSpeed >> 8) & 0xff:x2}");
                        }

                        if (note.HasFdsModDepth)
                        {
                            patternBuffer.Add($"${0x67:x2}");
                            patternBuffer.Add($"${note.FdsModDepth:x2}");
                        }

                        if (note.IsValid)
                        {
                            // Instrument change.
                            if (note.IsMusical)
                            {
                                if (note.Instrument != instrument)
                                {
                                    int idx = instrumentIndices[note.Instrument];
                                    patternBuffer.Add($"${(byte)(0x80 | (idx << 1)):x2}");
                                    instrument = note.Instrument;
                                }
                                else if (!note.HasAttack)
                                {
                                    // TODO: Remove note entirely after a slide that matches the next note with no attack.
                                    patternBuffer.Add($"${0x62:x2}");
                                }
                            }

                            int numNotes = 0;

                            if (kernel != FamiToneKernel.FamiStudio)
                            {
                                // Note -> Empty -> Note special encoding.
                                if (time < patternLength - 2)
                                {
                                    pattern.Notes.TryGetValue(time + 1, out var nextNote1);
                                    pattern.Notes.TryGetValue(time + 2, out var nextNote2);

                                    var valid1 = (nextNote1 != null && nextNote1.IsValid) || (isSpeedChannel && FindEffectParam(song, p, time + 1, Note.EffectSpeed) >= 0);
                                    var valid2 = (nextNote2 != null && nextNote2.IsValid) || (isSpeedChannel && FindEffectParam(song, p, time + 2, Note.EffectSpeed) >= 0);

                                    if (!valid1 && valid2)
                                    {
                                        it.Next();
                                        numValidNotes--;
                                        numNotes = 1;
                                    }
                                }
                            }

                            if (note.IsSlideNote)
                            {
                                var noteTableNtsc = NesApu.GetNoteTableForChannelType(channel.Type, false, song.Project.ExpansionNumChannels);
                                var noteTablePal  = NesApu.GetNoteTableForChannelType(channel.Type, true, song.Project.ExpansionNumChannels);

                                var found = true;
                                found &= channel.ComputeSlideNoteParams(note, p, time, currentSpeed, Song.NativeTempoNTSC, noteTableNtsc, out _, out int stepSizeNtsc, out _);
                                found &= channel.ComputeSlideNoteParams(note, p, time, currentSpeed, Song.NativeTempoNTSC, noteTablePal, out _, out int stepSizePal, out _);

                                if (song.Project.UsesExpansionAudio || machine == MachineType.NTSC)
                                {
                                    stepSizePal = stepSizeNtsc;
                                }
                                else if (machine == MachineType.PAL)
                                {
                                    stepSizeNtsc = stepSizePal;
                                }

                                if (found)
                                {
                                    // Take the (signed) maximum of both notes so that we are garantee to reach our note.
                                    var stepSize = Math.Max(Math.Abs(stepSizeNtsc), Math.Abs(stepSizePal)) * Math.Sign(stepSizeNtsc);
                                    patternBuffer.Add($"${0x61:x2}");
                                    patternBuffer.Add($"${(byte)stepSize:x2}");
                                    patternBuffer.Add($"${EncodeNoteValue(c, note.Value):x2}");
                                    patternBuffer.Add($"${EncodeNoteValue(c, note.SlideNoteTarget):x2}");
                                    continue;
                                }
                            }

                            patternBuffer.Add($"${EncodeNoteValue(c, note.Value, numNotes):x2}");
                            prevNoteValue = note.Value;
                        }
                        else
                        {
                            int numEmptyNotes = 0;

                            while (!it.Done)
                            {
                                time = it.CurrentTime;
                                note = it.CurrentNote;

                                if (note == null)
                                {
                                    note = emptyNote;
                                }

                                if (numEmptyNotes >= maxRepeatCount ||
                                    note.IsValid ||
                                    note.HasVolume ||
                                    note.HasVibrato ||
                                    note.HasFinePitch ||
                                    note.HasFdsModSpeed ||
                                    note.HasFdsModDepth ||
                                    (isSpeedChannel && FindEffectParam(song, p, time, Note.EffectSpeed) >= 0))
                                {
                                    break;
                                }

                                numEmptyNotes++;
                                it.Next();
                            }

                            numValidNotes -= numEmptyNotes;
                            patternBuffer.Add($"${(byte)(0x81 | (numEmptyNotes << 1)):x2}");
                        }
                    }

                    int matchingPatternIdx = -1;

                    if (patternBuffer.Count > 0)
                    {
                        if (patternBuffer.Count > 4)
                        {
                            for (int j = 0; j < packedPatternBuffers.Count; j++)
                            {
                                if (packedPatternBuffers[j].SequenceEqual(patternBuffer))
                                {
                                    matchingPatternIdx = j;
                                    break;
                                }
                            }
                        }

                        if (matchingPatternIdx < 0)
                        {
                            if (packedPatternBuffers.Count > MaxPackedPatterns)
                            {
                                return(-1); // TODO: Error.
                            }
                            packedPatternBuffers.Add(patternBuffer);

                            size += patternBuffer.Count;

                            if (!test)
                            {
                                lines.Add($"{ll}ref{packedPatternBuffers.Count - 1}:");
                                lines.Add($"\t{db} {String.Join(",", patternBuffer)}");
                            }
                        }
                        else
                        {
                            if (!test)
                            {
                                lines.Add($"\t{db} $ff,${numValidNotes:x2}");
                                lines.Add($"\t{dw} {ll}ref{matchingPatternIdx}");
                            }

                            size += 4;
                        }
                    }
                }

                if (!test)
                {
                    lines.Add($"\t{db} $fd");
                    lines.Add($"\t{dw} {ll}song{songIdx}ch{c}loop");
                }

                size += 3;
            }

            if (!test)
            {
                globalPacketPatternBuffers = packedPatternBuffers;
            }

            return(size);
        }
Exemplo n.º 23
0
        private bool UpdateChannel(int p, int n, Channel channel, ChannelState state)
        {
            var project    = channel.Song.Project;
            var channelIdx = Channel.ChannelTypeToIndex(channel.Type);
            var hasNote    = false;

            if (channel.Type == ChannelType.Dpcm)
            {
                var len = NsfGetState(nsf, channel.Type, STATE_DPCMSAMPLELENGTH, 0);

                if (len > 0)
                {
                    // Subtracting one here is not correct. But it is a fact that a lot of games
                    // seemed to favor tight sample packing and did not care about playing one
                    // extra sample of garbage.
                    if (!preserveDpcmPadding)
                    {
                        Debug.Assert((len & 0xf) == 1);
                        len--;
                        Debug.Assert((len & 0xf) == 0);
                    }

                    var sampleData = new byte[len];
                    for (int i = 0; i < len; i++)
                    {
                        sampleData[i] = (byte)NsfGetState(nsf, channel.Type, STATE_DPCMSAMPLEDATA, i);
                    }

                    var sample = project.FindMatchingSample(sampleData);
                    if (sample == null)
                    {
                        sample = project.CreateDPCMSampleFromDmcData($"Sample {project.Samples.Count + 1}", sampleData);
                    }

                    var loop  = NsfGetState(nsf, channel.Type, STATE_DPCMLOOP, 0) != 0;
                    var pitch = NsfGetState(nsf, channel.Type, STATE_DPCMPITCH, 0);

                    var note = project.FindDPCMSampleMapping(sample, pitch, loop);
                    if (note == -1)
                    {
                        for (int i = Note.DPCMNoteMin + 1; i <= Note.DPCMNoteMax; i++)
                        {
                            if (project.GetDPCMMapping(i) == null)
                            {
                                note = i;
                                project.MapDPCMSample(i, sample, pitch, loop);
                                break;
                            }
                        }
                    }

                    if (note != -1)
                    {
                        var pattern = GetOrCreatePattern(channel, p).GetOrCreateNoteAt(n).Value = (byte)note;
                        hasNote = true;
                    }
                }
            }
            else
            {
                var period  = NsfGetState(nsf, channel.Type, STATE_PERIOD, 0);
                var volume  = NsfGetState(nsf, channel.Type, STATE_VOLUME, 0);
                var duty    = NsfGetState(nsf, channel.Type, STATE_DUTYCYCLE, 0);
                var force   = false;
                var stop    = false;
                var release = false;
                var octave  = -1;

                // VRC6 has a much larger volume range (6-bit) than our volume (4-bit).
                if (channel.Type == ChannelType.Vrc6Saw)
                {
                    volume >>= 2;
                }
                else if (channel.Type == ChannelType.FdsWave)
                {
                    volume = Math.Min(Note.VolumeMax, volume >> 1);
                }
                else if (channel.Type >= ChannelType.Vrc7Fm1 && channel.Type <= ChannelType.Vrc7Fm6)
                {
                    volume = 15 - volume;
                }

                var hasTrigger = true;
                var hasPeriod  = true;
                var hasOctave  = channel.Type >= ChannelType.Vrc7Fm1 && channel.Type <= ChannelType.Vrc7Fm6;
                var hasVolume  = channel.Type != ChannelType.Triangle;
                var hasPitch   = channel.Type != ChannelType.Noise;
                var hasDuty    = channel.Type == ChannelType.Square1 || channel.Type == ChannelType.Square2 || channel.Type == ChannelType.Noise || channel.Type == ChannelType.Vrc6Square1 || channel.Type == ChannelType.Vrc6Square2 || channel.Type == ChannelType.Mmc5Square1 || channel.Type == ChannelType.Mmc5Square2;

                if (channel.Type >= ChannelType.Vrc7Fm1 && channel.Type <= ChannelType.Vrc7Fm6)
                {
                    var trigger      = NsfGetState(nsf, channel.Type, STATE_VRC7TRIGGER, 0) != 0;
                    var sustain      = NsfGetState(nsf, channel.Type, STATE_VRC7SUSTAIN, 0) != 0;
                    var triggerState = trigger ? ChannelState.Triggered : (sustain ? ChannelState.Released : ChannelState.Stopped);

                    if (triggerState != state.trigger)
                    {
                        stop          = triggerState == ChannelState.Stopped;
                        release       = triggerState == ChannelState.Released;
                        force        |= true;
                        state.trigger = triggerState;
                    }

                    octave = NsfGetState(nsf, channel.Type, STATE_VRC7OCTAVE, 0);
                }
                else
                {
                    if (hasTrigger)
                    {
                        var trigger = volume != 0 && (channel.Type == ChannelType.Noise || period != 0) ? ChannelState.Triggered : ChannelState.Stopped;

                        if (trigger != state.trigger)
                        {
                            stop          = trigger == ChannelState.Stopped;
                            force        |= true;
                            state.trigger = trigger;
                        }
                    }
                }

                if (hasVolume)
                {
                    if (state.volume != volume && volume != 0)
                    {
                        var pattern = GetOrCreatePattern(channel, p).GetOrCreateNoteAt(n).Volume = (byte)volume;
                        state.volume = volume;
                    }
                }

                Instrument instrument = null;

                if (hasDuty)
                {
                    instrument = GetDutyInstrument(channel, duty);
                }
                else if (channel.Type == ChannelType.FdsWave)
                {
                    var wavEnv = new sbyte[64];
                    var modEnv = new sbyte[32];

                    for (int i = 0; i < 64; i++)
                    {
                        wavEnv[i] = (sbyte)NsfGetState(nsf, channel.Type, STATE_FDSWAVETABLE, i);
                    }
                    for (int i = 0; i < 32; i++)
                    {
                        modEnv[i] = (sbyte)NsfGetState(nsf, channel.Type, STATE_FDSMODULATIONTABLE, i);
                    }

                    Envelope.ConvertFdsModulationToAbsolute(modEnv);

                    var masterVolume = (byte)NsfGetState(nsf, channel.Type, STATE_FDSMASTERVOLUME, 0);

                    instrument = GetFdsInstrument(wavEnv, modEnv, masterVolume);

                    int modDepth = NsfGetState(nsf, channel.Type, STATE_FDSMODULATIONDEPTH, 0);
                    int modSpeed = NsfGetState(nsf, channel.Type, STATE_FDSMODULATIONSPEED, 0);

                    if (state.fdsModDepth != modDepth)
                    {
                        var pattern = GetOrCreatePattern(channel, p).GetOrCreateNoteAt(n).FdsModDepth = (byte)modDepth;
                        state.fdsModDepth = modDepth;
                    }

                    if (state.fdsModSpeed != modSpeed)
                    {
                        var pattern = GetOrCreatePattern(channel, p).GetOrCreateNoteAt(n).FdsModSpeed = (ushort)modSpeed;
                        state.fdsModSpeed = modSpeed;
                    }
                }
                else if (channel.Type >= ChannelType.N163Wave1 &&
                         channel.Type <= ChannelType.N163Wave8)
                {
                    var wavePos = (byte)NsfGetState(nsf, channel.Type, STATE_N163WAVEPOS, 0);
                    var waveLen = (byte)NsfGetState(nsf, channel.Type, STATE_N163WAVESIZE, 0);

                    if (waveLen > 0)
                    {
                        var waveData = new sbyte[waveLen];
                        for (int i = 0; i < waveLen; i++)
                        {
                            waveData[i] = (sbyte)NsfGetState(nsf, channel.Type, STATE_N163WAVE, wavePos + i);
                        }

                        instrument = GetN163Instrument(waveData, wavePos);
                    }

                    period >>= 2;
                }
                else if (channel.Type >= ChannelType.Vrc7Fm1 &&
                         channel.Type <= ChannelType.Vrc7Fm6)
                {
                    var patch = (byte)NsfGetState(nsf, channel.Type, STATE_VRC7PATCH, 0);
                    var regs  = new byte[8];

                    if (patch == 0)
                    {
                        for (int i = 0; i < 8; i++)
                        {
                            regs[i] = (byte)NsfGetState(nsf, channel.Type, STATE_VRC7PATCHREG, i);
                        }
                    }

                    instrument = GetVrc7Instrument(patch, regs);
                }
                else if (channel.Type >= ChannelType.S5BSquare1 && channel.Type <= ChannelType.S5BSquare3)
                {
                    instrument = GetS5BInstrument();
                }
                else
                {
                    instrument = GetDutyInstrument(channel, 0);
                }

                if ((hasPeriod && state.period != period) || (hasOctave && state.octave != octave) || (instrument != state.instrument) || force)
                {
                    var noteTable = NesApu.GetNoteTableForChannelType(channel.Type, project.PalMode, project.ExpansionNumChannels);
                    var note      = release ? Note.NoteRelease : (stop ? Note.NoteStop : state.note);
                    var finePitch = 0;

                    if (!stop && !release && state.trigger != ChannelState.Stopped)
                    {
                        if (channel.Type == ChannelType.Noise)
                        {
                            note = (period ^ 0x0f) + 32;
                        }
                        else
                        {
                            note = (byte)GetBestMatchingNote(period, noteTable, out finePitch);
                        }

                        if (hasOctave)
                        {
                            while (note > 12)
                            {
                                note -= 12;
                                octave++;
                            }
                            note     += octave * 12;
                            period   *= (1 << octave);
                            finePitch = period - noteTable[note];
                        }
                    }

                    if (note < Note.MusicalNoteMin || note > Note.MusicalNoteMax)
                    {
                        instrument = null;
                    }

                    if ((state.note != note) || (state.instrument != instrument && instrument != null) || force)
                    {
                        var pattern = GetOrCreatePattern(channel, p);
                        var newNote = pattern.GetOrCreateNoteAt(n);
                        newNote.Value      = (byte)note;
                        newNote.Instrument = instrument;
                        state.note         = note;
                        state.octave       = octave;
                        if (instrument != null)
                        {
                            state.instrument = instrument;
                        }
                        hasNote = note != 0;
                    }

                    if (hasPitch && !stop)
                    {
                        Channel.GetShiftsForType(channel.Type, project.ExpansionNumChannels, out int pitchShift, out _);

                        // We scale all pitches changes (slides, fine pitch, pitch envelopes) for
                        // some channels with HUGE pitch values (N163, VRC7).
                        finePitch >>= pitchShift;

                        var pitch = (sbyte)Utils.Clamp(finePitch, Note.FinePitchMin, Note.FinePitchMax);

                        if (pitch != state.pitch)
                        {
                            var pattern = GetOrCreatePattern(channel, p).GetOrCreateNoteAt(n).FinePitch = pitch;
                            state.pitch = pitch;
                        }
                    }

                    state.period = period;
                }
            }

            return(hasNote);
        }
Exemplo n.º 24
0
 protected void WriteRegister(int reg, int data)
 {
     NesApu.WriteRegister(apuIdx, reg, data);
     player.NotifyRegisterWrite(apuIdx, reg, data);
 }
Exemplo n.º 25
0
        private void CreateSlideNotes(Song s, Dictionary <Pattern, RowFxData[, ]> patternFxData)
        {
            var processedPatterns = new HashSet <Pattern>();

            // Convert slide notes + portamento to our format.
            foreach (var c in s.Channels)
            {
                if (!c.SupportsSlideNotes)
                {
                    continue;
                }

                var songSpeed          = s.FamitrackerSpeed;
                var lastNoteInstrument = (Instrument)null;
                var lastNoteArpeggio   = (Arpeggio)null;
                var lastNoteValue      = (byte)Note.NoteInvalid;
                var portamentoSpeed    = 0;
                var slideSpeed         = 0;
                var slideShift         = c.IsN163WaveChannel ? 2 : 0;
                var slideSign          = c.IsN163WaveChannel || c.IsFdsWaveChannel || c.IsVrc7FmChannel ? -1 : 1; // Inverted channels.

                for (int p = 0; p < s.Length; p++)
                {
                    var pattern = c.PatternInstances[p];

                    if (pattern == null)
                    {
                        continue;
                    }

                    var patternLen = s.GetPatternLength(p);

                    for (var it = pattern.GetDenseNoteIterator(0, patternLen); !it.Done; it.Next())
                    {
                        var location = new NoteLocation(p, it.CurrentTime);
                        var note     = it.CurrentNote;

                        // Look for speed changes.
                        s.ApplySpeedEffectAt(location, ref songSpeed);

                        if (!patternFxData.ContainsKey(pattern) || processedPatterns.Contains(pattern))
                        {
                            continue;
                        }

                        var fxData      = patternFxData[pattern];
                        var slideTarget = 0;

                        for (int i = 0; i < fxData.GetLength(1); i++)
                        {
                            var fx = fxData[location.NoteIndex, i];

                            if (fx.param != 0)
                            {
                                // When the effect it turned on, we need to add a note.
                                if ((fx.fx == Effect_PortaUp ||
                                     fx.fx == Effect_PortaDown ||
                                     fx.fx == Effect_SlideUp ||
                                     fx.fx == Effect_SlideDown) &&
                                    lastNoteValue >= Note.MusicalNoteMin &&
                                    lastNoteValue <= Note.MusicalNoteMax && (note == null || !note.IsValid))
                                {
                                    if (note == null)
                                    {
                                        note = pattern.GetOrCreateNoteAt(location.NoteIndex);
                                        it.Resync();
                                    }

                                    note.Value      = lastNoteValue;
                                    note.Instrument = lastNoteInstrument;
                                    note.Arpeggio   = lastNoteArpeggio;
                                    note.HasAttack  = false;
                                }
                            }

                            if (fx.fx == Effect_PortaUp)
                            {
                                // If we have a Qxx/Rxx on the same row as a 1xx/2xx, things get weird.
                                if (slideTarget == 0)
                                {
                                    slideSpeed = (-fx.param * slideSign) << slideShift;
                                }
                            }
                            if (fx.fx == Effect_PortaDown)
                            {
                                // If we have a Qxx/Rxx on the same row as a 1xx/2xx, things get weird.
                                if (slideTarget == 0)
                                {
                                    slideSpeed = (fx.param * slideSign) << slideShift;
                                }
                            }
                            if (fx.fx == Effect_Portamento)
                            {
                                portamentoSpeed = fx.param;
                            }
                            if (fx.fx == Effect_SlideUp && note != null && note.IsMusical)
                            {
                                slideTarget = Utils.Clamp(note.Value + (fx.param & 0xf), Note.MusicalNoteMin, Note.MusicalNoteMax);
                                slideSpeed  = (-((fx.param >> 4) * 2 + 1)) << slideShift;
                            }
                            if (fx.fx == Effect_SlideDown && note != null && note.IsMusical)
                            {
                                slideTarget = Utils.Clamp(note.Value - (fx.param & 0xf), Note.MusicalNoteMin, Note.MusicalNoteMax);
                                slideSpeed  = (((fx.param >> 4) * 2 + 1)) << slideShift;
                            }
                        }

                        // Create a slide note.
                        if (note != null && !note.IsSlideNote)
                        {
                            if (note.IsMusical)
                            {
                                var slideSource = note.Value;
                                var noteTable   = NesApu.GetNoteTableForChannelType(c.Type, s.Project.PalMode, s.Project.ExpansionNumChannels);
                                var pitchLimit  = NesApu.GetPitchLimitForChannelType(c.Type);

                                // If we have a new note with auto-portamento enabled, we need to
                                // swap the notes since our slide notes work backward compared to
                                // FamiTracker.
                                if (portamentoSpeed != 0)
                                {
                                    // Ignore notes with no attack since we created them to handle a previous slide.
                                    if (note.HasAttack && lastNoteValue >= Note.MusicalNoteMin && lastNoteValue <= Note.MusicalNoteMax)
                                    {
                                        slideSpeed  = portamentoSpeed;
                                        slideTarget = note.Value;
                                        slideSource = lastNoteValue;
                                        note.Value  = lastNoteValue;
                                    }
                                }

                                // Our implementation of VRC7 pitches is quite different from FamiTracker.
                                // Compensate for larger pitches in higher octaves by shifting. We cant shift by
                                // a large amount because the period is 9-bit and FamiTracker is restricted to
                                // this for slides (octave never changes).
                                var octaveSlideShift = c.IsVrc7FmChannel && note.Value >= 12 ? 1 : 0;

                                // 3xx/Qxy/Rxy : We know which note we are sliding to and the speed, but we
                                //               don't know how many frames it will take to get there.
                                if (slideTarget != 0)
                                {
                                    // Advance in the song until we have the correct number of frames.
                                    var numFrames = Math.Max(1, Math.Abs((noteTable[slideSource] - noteTable[slideTarget]) / (slideSpeed << octaveSlideShift)));
                                    note.SlideNoteTarget = (byte)slideTarget;

                                    // TODO: Here we consider if the start note has a delay, but ignore the end note. It might have one too.
                                    var nextLocation = location;
                                    s.AdvanceNumberOfFrames(ref nextLocation, numFrames, note.HasNoteDelay ? -note.NoteDelay : 0, songSpeed, s.Project.PalMode);

                                    // Still to see if there is a note between the current one and the
                                    // next note, this could append if you add a note before the slide
                                    // is supposed to finish.
                                    if (FindNextSlideEffect(c, location, out var nextLocation2, patternFxData))
                                    {
                                        nextLocation = NoteLocation.Min(nextLocation, nextLocation2);

                                        // If the slide is interrupted by another slide effect, we will not reach
                                        // the final target, but rather some intermediate note. Let's do our best
                                        // to interpolate and figure out the best note.
                                        var numFramesUntilNextSlide = s.CountFramesBetween(location, nextLocation, songSpeed, s.Project.PalMode);
                                        var ratio             = Utils.Clamp(numFramesUntilNextSlide / numFrames, 0.0f, 1.0f);
                                        var intermediatePitch = (int)Math.Round(Utils.Lerp(noteTable[slideSource], noteTable[slideTarget], ratio));

                                        slideTarget          = FindBestMatchingNote(noteTable, intermediatePitch, Math.Sign(slideSpeed));
                                        note.SlideNoteTarget = (byte)slideTarget;
                                    }

                                    if (nextLocation.PatternIndex < s.Length)
                                    {
                                        // Add an extra note with no attack to stop the slide.
                                        var nextPattern = c.PatternInstances[nextLocation.PatternIndex];
                                        if (!nextPattern.Notes.TryGetValue(nextLocation.NoteIndex, out var nextNote) || !nextNote.IsValid)
                                        {
                                            nextNote            = nextPattern.GetOrCreateNoteAt(nextLocation.NoteIndex);
                                            nextNote.Instrument = note.Instrument;
                                            nextNote.Value      = (byte)slideTarget;
                                            nextNote.HasAttack  = false;
                                            it.Resync();
                                        }
                                        else if (nextNote != null && nextNote.IsRelease)
                                        {
                                            Log.LogMessage(LogSeverity.Warning, $"A slide note ends on a release note. This is currently unsupported and will require manual correction. {GetPatternString(nextPattern, nextLocation.NoteIndex)}");
                                        }
                                    }

                                    // 3xx, Qxx and Rxx stops when its done.
                                    slideSpeed = 0;
                                }

                                // 1xx/2xy : We know the speed at which we are sliding, but need to figure out what makes it stop.
                                else if (slideSpeed != 0 && FindNextSlideEffect(c, location, out var nextLocation, patternFxData))
                                {
                                    // See how many frames until the slide stops.
                                    var numFrames = (int)Math.Round(s.CountFramesBetween(location, nextLocation, songSpeed, s.Project.PalMode));

                                    // TODO: Here we consider if the start note has a delay, but ignore the end note. It might have one too.
                                    numFrames = Math.Max(1, numFrames - (note.HasNoteDelay ? note.NoteDelay : 0));

                                    // Compute the pitch delta and find the closest target note.
                                    var newNotePitch = Utils.Clamp(noteTable[slideSource] + numFrames * (slideSpeed << octaveSlideShift), 0, pitchLimit);
                                    var newNote      = FindBestMatchingNote(noteTable, newNotePitch, Math.Sign(slideSpeed));

                                    note.SlideNoteTarget = (byte)newNote;

                                    // If the FX was turned off, we need to add an extra note.
                                    var nextPattern = c.PatternInstances[nextLocation.PatternIndex];
                                    if (!nextPattern.Notes.TryGetValue(nextLocation.NoteIndex, out var nextNote) || !nextNote.IsValid)
                                    {
                                        nextNote            = nextPattern.GetOrCreateNoteAt(nextLocation.NoteIndex);
                                        nextNote.Instrument = note.Instrument;
                                        nextNote.Value      = (byte)newNote;
                                        nextNote.HasAttack  = false;
                                        it.Resync();
                                    }
                                    else if (nextNote != null && nextNote.IsRelease)
                                    {
                                        Log.LogMessage(LogSeverity.Warning, $"A slide note ends on a release note. This is currently unsupported and will require manual correction. {GetPatternString(nextPattern, nextLocation.NoteIndex)}");
                                    }
                                }
                            }
                        }

                        if (note != null && (note.IsMusical || note.IsStop))
                        {
                            lastNoteValue      = note.IsSlideNote ? note.SlideNoteTarget : note.Value;
                            lastNoteInstrument = note.Instrument;
                            lastNoteArpeggio   = note.Arpeggio;
                        }
                    }

                    processedPatterns.Add(pattern);
                }
            }
        }
Exemplo n.º 26
0
        public unsafe static void Save(Song song, string filename, int sampleRate)
        {
            var advance      = true;
            var tempoCounter = 0;
            var playPattern  = 0;
            var playNote     = 0;
            var speed        = song.Speed;
            var wavBytes     = new List <byte>();
            var apuIndex     = NesApu.APU_WAV_EXPORT;
            var dmcCallback  = new NesApu.DmcReadDelegate(NesApu.DmcReadCallback);

            NesApu.NesApuInit(apuIndex, sampleRate, dmcCallback);
            NesApu.Reset(apuIndex);

            var channels = new ChannelState[5]
            {
                new SquareChannelState(apuIndex, 0),
                new SquareChannelState(apuIndex, 1),
                new TriangleChannelState(apuIndex, 2),
                new NoiseChannelState(apuIndex, 3),
                new DPCMChannelState(apuIndex, 4)
            };

            for (int i = 0; i < 5; i++)
            {
                NesApu.NesApuEnableChannel(apuIndex, i, 1);
            }

            while (true)
            {
                // Advance to next note.
                if (advance)
                {
                    foreach (var channel in channels)
                    {
                        channel.ProcessEffects(song, ref playPattern, ref playNote, ref speed, false);
                    }

                    foreach (var channel in channels)
                    {
                        channel.Advance(song, playPattern, playNote);
                    }

                    advance = false;
                }

                // Update envelopes + APU registers.
                foreach (var channel in channels)
                {
                    channel.UpdateEnvelopes();
                    channel.UpdateAPU();
                }

                NesApu.NesApuEndFrame(apuIndex);

                int    numTotalSamples = NesApu.NesApuSamplesAvailable(apuIndex);
                byte[] samples         = new byte[numTotalSamples * 2];

                fixed(byte *ptr = &samples[0])
                {
                    NesApu.NesApuReadSamples(apuIndex, new IntPtr(ptr), numTotalSamples);
                }

                wavBytes.AddRange(samples);

                int dummy1 = 0;
                if (!PlayerBase.AdvanceTempo(song, speed, LoopMode.None, ref tempoCounter, ref playPattern, ref playNote, ref dummy1, ref advance))
                {
                    break;
                }
            }

            using (var file = new FileStream(filename, FileMode.Create))
            {
                var header = new WaveHeader();

                // RIFF WAVE Header
                header.chunkId[0] = (byte)'R';
                header.chunkId[1] = (byte)'I';
                header.chunkId[2] = (byte)'F';
                header.chunkId[3] = (byte)'F';
                header.format[0]  = (byte)'W';
                header.format[1]  = (byte)'A';
                header.format[2]  = (byte)'V';
                header.format[3]  = (byte)'E';

                // Format subchunk
                header.subChunk1Id[0] = (byte)'f';
                header.subChunk1Id[1] = (byte)'m';
                header.subChunk1Id[2] = (byte)'t';
                header.subChunk1Id[3] = (byte)' ';
                header.audioFormat    = 1;          // FOR PCM
                header.numChannels    = 1;          // 1 for MONO, 2 for stereo
                header.sampleRate     = sampleRate; // ie 44100 hertz, cd quality audio
                header.bitsPerSample  = 16;         //
                header.byteRate       = header.sampleRate * header.numChannels * header.bitsPerSample / 8;
                header.blockAlign     = (short)(header.numChannels * header.bitsPerSample / 8);

                // Data subchunk
                header.subChunk2Id[0] = (byte)'d';
                header.subChunk2Id[1] = (byte)'a';
                header.subChunk2Id[2] = (byte)'t';
                header.subChunk2Id[3] = (byte)'a';

                // All sizes for later:
                // chuckSize = 4 + (8 + subChunk1Size) + (8 + subChubk2Size)
                // subChunk1Size is constanst, i'm using 16 and staying with PCM
                // subChunk2Size = nSamples * nChannels * bitsPerSample/8
                // Whenever a sample is added:
                //    chunkSize += (nChannels * bitsPerSample/8)
                //    subChunk2Size += (nChannels * bitsPerSample/8)
                header.subChunk1Size = 16;
                header.subChunk2Size = wavBytes.Count;
                header.chunkSize     = 4 + (8 + header.subChunk1Size) + (8 + header.subChunk2Size);

                var headerBytes = new byte[sizeof(WaveHeader)];
                Marshal.Copy(new IntPtr(&header), headerBytes, 0, headerBytes.Length);
                file.Write(headerBytes, 0, headerBytes.Length);
                file.Write(wavBytes.ToArray(), 0, wavBytes.Count);
            }
        }
Exemplo n.º 27
0
        unsafe void PlayerThread(object o)
        {
            var lastNoteWasRelease = false;
            var lastReleaseTime    = DateTime.Now;

            var activeChannel = -1;
            var waitEvents    = new WaitHandle[] { stopEvent, bufferSemaphore };

            NesApu.InitAndReset(apuIndex, sampleRate, palPlayback, GetNesApuExpansionAudio(expansionAudio), numExpansionChannels, dmcCallback);
            for (int i = 0; i < channelStates.Length; i++)
            {
                NesApu.EnableChannel(apuIndex, i, 0);
            }

            while (true)
            {
                int idx = WaitHandle.WaitAny(waitEvents);

                if (idx == 0)
                {
                    break;
                }

                if (!noteQueue.IsEmpty)
                {
                    PlayerNote lastNote = new PlayerNote();
                    while (noteQueue.TryDequeue(out PlayerNote note))
                    {
                        lastNote = note;
                    }

                    activeChannel = lastNote.channel;
                    if (activeChannel >= 0)
                    {
                        if (lastNote.note.IsMusical)
                        {
                            channelStates[activeChannel].ForceInstrumentReload();
                        }

                        channelStates[activeChannel].PlayNote(lastNote.note);

                        if (lastNote.note.IsRelease)
                        {
                            lastNoteWasRelease = true;
                            lastReleaseTime    = DateTime.Now;
                        }
                        else
                        {
                            lastNoteWasRelease = false;
                        }
                    }

                    for (int i = 0; i < channelStates.Length; i++)
                    {
                        NesApu.EnableChannel(apuIndex, i, i == activeChannel ? 1 : 0);
                    }
                }

                if (lastNoteWasRelease &&
                    activeChannel >= 0 &&
                    Settings.InstrumentStopTime >= 0 &&
                    DateTime.Now.Subtract(lastReleaseTime).TotalSeconds >= Settings.InstrumentStopTime)
                {
                    NesApu.EnableChannel(apuIndex, activeChannel, 0);
                    activeChannel = -1;
                }

                if (activeChannel >= 0)
                {
                    channelStates[activeChannel].Update();

                    for (int i = 0; i < EnvelopeType.Count; i++)
                    {
                        envelopeFrames[i] = channelStates[activeChannel].GetEnvelopeFrame(i);
                    }
                }
                else
                {
                    for (int i = 0; i < EnvelopeType.Count; i++)
                    {
                        envelopeFrames[i] = 0;
                    }
                    foreach (var channel in channelStates)
                    {
                        channel.ClearNote();
                    }
                }

                EndFrame();
            }

            audioStream.Stop();
            while (sampleQueue.TryDequeue(out _))
            {
                ;
            }
        }
Exemplo n.º 28
0
        private void CreateSlideNotes(Song s, Dictionary <Pattern, RowFxData[, ]> patternFxData)
        {
            var processedPatterns = new HashSet <Pattern>();

            // Convert slide notes + portamento to our format.
            foreach (var c in s.Channels)
            {
                if (!c.SupportsSlideNotes)
                {
                    continue;
                }

                var songSpeed          = s.FamitrackerSpeed;
                var lastNoteInstrument = (Instrument)null;
                var lastNoteValue      = (byte)Note.NoteInvalid;
                var portamentoSpeed    = 0;
                var slideSpeed         = 0;
                var slideShift         = c.IsN163WaveChannel ? 2 : 0;
                var slideSign          = c.IsN163WaveChannel || c.IsFdsWaveChannel ? -1 : 1; // Inverted channels.

                for (int p = 0; p < s.Length; p++)
                {
                    var pattern = c.PatternInstances[p];

                    if (pattern == null || !patternFxData.ContainsKey(pattern) || processedPatterns.Contains(pattern))
                    {
                        continue;
                    }

                    processedPatterns.Add(pattern);

                    var fxData     = patternFxData[pattern];
                    var patternLen = s.GetPatternLength(p);

                    for (var it = pattern.GetNoteIterator(0, patternLen); !it.Done; it.Next())
                    {
                        var n    = it.CurrentTime;
                        var note = it.CurrentNote;

                        // Look for speed changes.
                        foreach (var c2 in s.Channels)
                        {
                            var pattern2 = c2.PatternInstances[p];

                            if (pattern2 != null && pattern2.Notes.TryGetValue(n, out var note2) && note2.HasSpeed)
                            {
                                songSpeed = note2.Speed;
                            }
                        }

                        var slideTarget = 0;

                        for (int i = 0; i < fxData.GetLength(1); i++)
                        {
                            var fx = fxData[n, i];

                            if (fx.param != 0)
                            {
                                // When the effect it turned on, we need to add a note.
                                if ((fx.fx == Effect_PortaUp ||
                                     fx.fx == Effect_PortaDown ||
                                     fx.fx == Effect_SlideUp ||
                                     fx.fx == Effect_SlideDown) &&
                                    lastNoteValue >= Note.MusicalNoteMin &&
                                    lastNoteValue <= Note.MusicalNoteMax && (note == null || !note.IsValid))
                                {
                                    if (note == null)
                                    {
                                        note = pattern.GetOrCreateNoteAt(n);
                                        it.Resync();
                                    }

                                    note.Value      = lastNoteValue;
                                    note.Instrument = lastNoteInstrument;
                                    note.HasAttack  = false;
                                }
                            }

                            if (fx.fx == Effect_PortaUp)
                            {
                                // If we have a Qxx/Rxx on the same row as a 1xx/2xx, things get weird.
                                if (slideTarget == 0)
                                {
                                    slideSpeed = (-fx.param * slideSign) << slideShift;
                                }
                            }
                            if (fx.fx == Effect_PortaDown)
                            {
                                // If we have a Qxx/Rxx on the same row as a 1xx/2xx, things get weird.
                                if (slideTarget == 0)
                                {
                                    slideSpeed = (fx.param * slideSign) << slideShift;
                                }
                            }
                            if (fx.fx == Effect_Portamento)
                            {
                                portamentoSpeed = fx.param;
                            }
                            if (fx.fx == Effect_SlideUp)
                            {
                                slideTarget = note.Value + (fx.param & 0xf);
                                slideSpeed  = (-((fx.param >> 4) * 2 + 1)) << slideShift;
                            }
                            if (fx.fx == Effect_SlideDown)
                            {
                                slideTarget = note.Value - (fx.param & 0xf);
                                slideSpeed  = (((fx.param >> 4) * 2 + 1)) << slideShift;
                            }
                        }

                        // Create a slide note.
                        if (note != null && !note.IsSlideNote)
                        {
                            if (note.IsMusical)
                            {
                                var slideSource = note.Value;
                                var noteTable   = NesApu.GetNoteTableForChannelType(c.Type, false, s.Project.ExpansionNumChannels);
                                var pitchLimit  = NesApu.GetPitchLimitForChannelType(c.Type);

                                // If we have a new note with auto-portamento enabled, we need to
                                // swap the notes since our slide notes work backward compared to
                                // FamiTracker.
                                if (portamentoSpeed != 0)
                                {
                                    // Ignore notes with no attack since we created them to handle a previous slide.
                                    if (note.HasAttack && lastNoteValue >= Note.MusicalNoteMin && lastNoteValue <= Note.MusicalNoteMax)
                                    {
                                        slideSpeed  = portamentoSpeed;
                                        slideTarget = note.Value;
                                        slideSource = lastNoteValue;
                                        note.Value  = lastNoteValue;
                                    }
                                }

                                // Our implementation of VRC7 pitches is quite different from FamiTracker.
                                // Compensate for larger pitches in higher octaves by shifting. We cant shift by
                                // a large amount because the period is 9-bit and FamiTracker is restricted to
                                // this for slides (octave never changes).
                                var octaveSlideShift = c.IsVrc7FmChannel && note.Value >= 12 ? 1 : 0;

                                if (slideTarget != 0)
                                {
                                    // TODO: We assume a tempo of 150 here. This is wrong.
                                    var numFrames = Math.Max(1, Math.Abs((noteTable[slideSource] - noteTable[slideTarget]) / ((slideSpeed << octaveSlideShift) * songSpeed)));
                                    note.SlideNoteTarget = (byte)slideTarget;

                                    var nn = n + numFrames;
                                    var np = p;
                                    while (nn >= s.GetPatternLength(np))
                                    {
                                        nn -= s.GetPatternLength(np);
                                        np++;
                                    }
                                    if (np >= s.Length)
                                    {
                                        np = s.Length;
                                        nn = 0;
                                    }

                                    // Still to see if there is a note between the current one and the
                                    // next note, this could append if you add a note before the slide
                                    // is supposed to finish.
                                    if (FindNextNoteForSlide(c, p, n, out var np2, out var nn2, patternFxData))
                                    {
                                        if (np2 < np)
                                        {
                                            np = np2;
                                            nn = nn2;
                                        }
                                        else if (np2 == np)
                                        {
                                            nn = Math.Min(nn, nn2);
                                        }
                                    }

                                    if (np < s.Length)
                                    {
                                        // Add an extra note with no attack to stop the slide.
                                        var nextPattern = c.PatternInstances[np];
                                        if (!nextPattern.Notes.TryGetValue(nn, out var nextNote) || !nextNote.IsValid)
                                        {
                                            nextNote            = nextPattern.GetOrCreateNoteAt(nn);
                                            nextNote.Instrument = note.Instrument;
                                            nextNote.Value      = (byte)slideTarget;
                                            nextNote.HasAttack  = false;
                                            it.Resync();
                                        }
                                    }

                                    // 3xx, Qxx and Rxx stops when its done.
                                    slideSpeed = 0;
                                }
                                // Find the next note that would stop the slide or change the FX settings.
                                else if (slideSpeed != 0 && FindNextNoteForSlide(c, p, n, out var np, out var nn, patternFxData))
                                {
                                    // Compute the pitch delta and find the closest target note.
                                    var numFrames = (s.GetPatternStartNote(np, nn) - s.GetPatternStartNote(p, n)) * songSpeed;

                                    // TODO: PAL.
                                    var newNotePitch = Utils.Clamp(noteTable[slideSource] + numFrames * (slideSpeed << octaveSlideShift), 0, pitchLimit);
                                    var newNote      = FindBestMatchingNote(noteTable, newNotePitch, Math.Sign(slideSpeed));

                                    note.SlideNoteTarget = (byte)newNote;

                                    // If the FX was turned off, we need to add an extra note.
                                    var nextPattern = c.PatternInstances[np];
                                    if (!nextPattern.Notes.TryGetValue(nn, out var nextNote) || !nextNote.IsValid)
                                    {
                                        nextNote            = nextPattern.GetOrCreateNoteAt(nn);
                                        nextNote.Instrument = note.Instrument;
                                        nextNote.Value      = (byte)newNote;
                                        nextNote.HasAttack  = false;
                                        it.Resync();
                                    }
                                }
                            }
                        }

                        if (note != null && (note.IsMusical || note.IsStop))
                        {
                            lastNoteValue      = note.IsSlideNote ? note.SlideNoteTarget : note.Value;
                            lastNoteInstrument = note.Instrument;
                        }
                    }
                }
            }
        }
Exemplo n.º 29
0
 public virtual void Initialize()
 {
     dmcCallback = new NesApu.DmcReadDelegate(DmcReadCallback);
     NesApu.NesApuInit(apuIndex, SampleRate, dmcCallback);
     xaudio2Stream = new XAudio2Stream(SampleRate, 16, 1, BufferSize, NumAudioBuffers, AudioBufferFillCallback);
 }
Exemplo n.º 30
0
        unsafe void PlayerThread(object o)
        {
            var channels = new ChannelState[5]
            {
                new SquareChannelState(apuIndex, 0),
                new SquareChannelState(apuIndex, 1),
                new TriangleChannelState(apuIndex, 2),
                new NoiseChannelState(apuIndex, 3),
                new DPCMChannelState(apuIndex, 4)
            };

            var startInfo = (SongPlayerStartInfo)o;
            var song      = startInfo.song;

            bool advance      = true;
            int  tempoCounter = 0;
            int  playPattern  = 0;
            int  playNote     = 0;
            int  speed        = song.Speed;

            NesApu.Reset(apuIndex);

            if (startInfo.frame != 0)
            {
                foreach (var channel in channels)
                {
                    channel.StartSeeking();
                }

                while (playPattern * song.PatternLength + playNote != startInfo.frame)
                {
                    foreach (var channel in channels)
                    {
                        channel.ProcessEffects(song, ref playPattern, ref playNote, ref speed);
                    }

                    foreach (var channel in channels)
                    {
                        channel.Advance(song, playPattern, playNote);
                        channel.UpdateEnvelopes();
                        channel.UpdateAPU();
                    }

                    int  dummy1 = 0;
                    bool dummy2 = false;
                    if (!AdvanceTempo(song, speed, LoopMode.None, ref tempoCounter, ref playPattern, ref playNote, ref dummy1, ref dummy2))
                    {
                        break;
                    }
                }

                foreach (var channel in channels)
                {
                    channel.StopSeeking();
                }
            }

            var waitEvents = new WaitHandle[] { stopEvent, frameEvent };

            while (true)
            {
                int idx = WaitHandle.WaitAny(waitEvents);

                if (idx == 0)
                {
                    break;
                }

                // Advance to next note.
                if (advance)
                {
                    // We process the effects before since one channel may have
                    // a skip/jump and we need to process that first before advancing
                    // the song.
                    foreach (var channel in channels)
                    {
                        channel.ProcessEffects(song, ref playPattern, ref playNote, ref speed);
                    }

                    foreach (var channel in channels)
                    {
                        channel.Advance(song, playPattern, playNote);
                    }

                    advance = false;
                }

                // Update envelopes + APU registers.
                foreach (var channel in channels)
                {
                    channel.UpdateEnvelopes();
                    channel.UpdateAPU();
                }

                // Mute.
                for (int i = 0; i < 5; i++)
                {
                    NesApu.NesApuEnableChannel(apuIndex, i, (channelMask & (1 << i)));
                }

                EndFrameAndQueueSamples();

                if (!AdvanceTempo(song, speed, loopMode, ref tempoCounter, ref playPattern, ref playNote, ref playFrame, ref advance))
                {
                    break;
                }
            }

            audioStream.Stop();
            while (sampleQueue.TryDequeue(out _))
            {
                ;
            }
        }