public ChannelState[] CreateChannelStates(Project project, int apuIdx, int expNumChannels, bool pal) { var channelCount = project.GetActiveChannelCount(); var states = new ChannelState[channelCount]; int idx = 0; for (int i = 0; i < Channel.Count; i++) { if (project.IsChannelActive(i)) { states[idx++] = CreateChannelState(apuIdx, i, expNumChannels, pal); } } return(states); }
protected ChannelState[] CreateChannelStates(IPlayerInterface player, Project project, int apuIdx, int expNumChannels, bool pal) { var channelCount = project.GetActiveChannelCount(); var states = new ChannelState[channelCount]; int idx = 0; for (int i = 0; i < ChannelType.Count; i++) { if (project.IsChannelActive(i)) { var state = CreateChannelState(apuIdx, i, expNumChannels, pal); states[idx++] = state; } } return(states); }
public ChannelState[] CreateChannelStates(Project project, int apuIdx, int expNumChannels, bool pal, IRegisterListener listener) { var channelCount = project.GetActiveChannelCount(); var states = new ChannelState[channelCount]; int idx = 0; for (int i = 0; i < Channel.Count; i++) { if (project.IsChannelActive(i)) { var state = CreateChannelState(apuIdx, i, expNumChannels, pal); if (listener != null) { state.SetRegisterListener(listener); } states[idx++] = state; } } return(states); }
public Project Load(string filename, int songIndex, int duration, int patternLength, int startFrame, bool removeIntroSilence, bool reverseDpcm, bool preserveDpcmPad) { nsf = NsfOpen(filename); if (nsf == null) { return(null); } var trackCount = NsfGetTrackCount(nsf); if (songIndex < 0 || songIndex > trackCount) { return(null); } preserveDpcmPadding = preserveDpcmPad; var palSource = (NsfIsPal(nsf) & 1) == 1; var numFrames = duration * (palSource ? 50 : 60); project = new Project(); project.Name = Marshal.PtrToStringAnsi(NsfGetTitle(nsf)); project.Author = Marshal.PtrToStringAnsi(NsfGetArtist(nsf)); project.Copyright = Marshal.PtrToStringAnsi(NsfGetCopyright(nsf)); project.PalMode = palSource; switch (NsfGetExpansion(nsf)) { case EXTSOUND_VRC6: project.SetExpansionAudio(ExpansionType.Vrc6); break; case EXTSOUND_VRC7: project.SetExpansionAudio(ExpansionType.Vrc7); break; case EXTSOUND_FDS: project.SetExpansionAudio(ExpansionType.Fds); break; case EXTSOUND_MMC5: project.SetExpansionAudio(ExpansionType.Mmc5); break; case EXTSOUND_N163: project.SetExpansionAudio(ExpansionType.N163, GetNumNamcoChannels(filename, songIndex, numFrames)); break; case EXTSOUND_S5B: project.SetExpansionAudio(ExpansionType.S5B); break; case 0: break; default: NsfClose(nsf); // Unsupported expansion combination. return(null); } var songName = Marshal.PtrToStringAnsi(NsfGetTrackName(nsf, songIndex)); song = project.CreateSong(string.IsNullOrEmpty(songName) ? $"Song {songIndex + 1}" : songName); channelStates = new ChannelState[song.Channels.Length]; NsfSetTrack(nsf, songIndex); song.ResizeNotes(1, false); song.SetDefaultPatternLength(patternLength); for (int i = 0; i < song.Channels.Length; i++) { channelStates[i] = new ChannelState(); } var foundFirstNote = !removeIntroSilence; var p = 0; var n = 0; var f = startFrame; for (int i = 0; i < numFrames; i++) { p = f / song.PatternLength; n = f % song.PatternLength; if (p >= Song.MaxLength - 1) { break; } var playCalled = 0; do { playCalled = NsfRunFrame(nsf); }while (playCalled == 0); for (int c = 0; c < song.Channels.Length; c++) { foundFirstNote |= UpdateChannel(p, n, song.Channels[c], channelStates[c]); } if (foundFirstNote) { f++; } else { // Reset everything until we find our first note. project.DeleteAllInstrument(); project.DeleteAllSamples(); for (int c = 0; c < song.Channels.Length; c++) { channelStates[c] = new ChannelState(); } } } song.SetLength(p + 1); NsfClose(nsf); var factors = Utils.GetFactors(song.PatternLength, Song.MaxNoteLength); if (factors.Length > 0) { var noteLen = factors[0]; // Look for a factor that generates a note length < 10 and gives a pattern length that is a multiple of 16. foreach (var factor in factors) { if (factor <= 10) { noteLen = factor; if (((song.PatternLength / noteLen) % 16) == 0) { break; } } } song.ResizeNotes(noteLen, false); } else { song.ResizeNotes(1, false); } song.SetSensibleBeatLength(); song.DeleteEmptyPatterns(); song.UpdatePatternStartNotes(); project.DeleteUnusedInstruments(); project.UpdateAllLastValidNotesAndVolume(); foreach (var sample in project.Samples) { sample.ReverseBits = reverseDpcm; } return(project); }
private bool UpdateChannel(int p, int n, Channel channel, ChannelState state) { var project = channel.Song.Project; var channelIdx = Channel.ChannelTypeToIndex(channel.Type); var hasNote = false; if (channel.Type == ChannelType.Dpcm) { var len = NsfGetState(nsf, channel.Type, STATE_DPCMSAMPLELENGTH, 0); if (len > 0) { // Subtracting one here is not correct. But it is a fact that a lot of games // seemed to favor tight sample packing and did not care about playing one // extra sample of garbage. if (!preserveDpcmPadding) { Debug.Assert((len & 0xf) == 1); len--; Debug.Assert((len & 0xf) == 0); } var sampleData = new byte[len]; for (int i = 0; i < len; i++) { sampleData[i] = (byte)NsfGetState(nsf, channel.Type, STATE_DPCMSAMPLEDATA, i); } var sample = project.FindMatchingSample(sampleData); if (sample == null) { sample = project.CreateDPCMSampleFromDmcData($"Sample {project.Samples.Count + 1}", sampleData); } var loop = NsfGetState(nsf, channel.Type, STATE_DPCMLOOP, 0) != 0; var pitch = NsfGetState(nsf, channel.Type, STATE_DPCMPITCH, 0); var note = project.FindDPCMSampleMapping(sample, pitch, loop); if (note == -1) { for (int i = Note.DPCMNoteMin + 1; i <= Note.DPCMNoteMax; i++) { if (project.GetDPCMMapping(i) == null) { note = i; project.MapDPCMSample(i, sample, pitch, loop); break; } } } if (note != -1) { var pattern = GetOrCreatePattern(channel, p).GetOrCreateNoteAt(n).Value = (byte)note; hasNote = true; } } } else { var period = NsfGetState(nsf, channel.Type, STATE_PERIOD, 0); var volume = NsfGetState(nsf, channel.Type, STATE_VOLUME, 0); var duty = NsfGetState(nsf, channel.Type, STATE_DUTYCYCLE, 0); var force = false; var stop = false; var release = false; var octave = -1; // VRC6 has a much larger volume range (6-bit) than our volume (4-bit). if (channel.Type == ChannelType.Vrc6Saw) { volume >>= 2; } else if (channel.Type == ChannelType.FdsWave) { volume = Math.Min(Note.VolumeMax, volume >> 1); } else if (channel.Type >= ChannelType.Vrc7Fm1 && channel.Type <= ChannelType.Vrc7Fm6) { volume = 15 - volume; } var hasTrigger = true; var hasPeriod = true; var hasOctave = channel.Type >= ChannelType.Vrc7Fm1 && channel.Type <= ChannelType.Vrc7Fm6; var hasVolume = channel.Type != ChannelType.Triangle; var hasPitch = channel.Type != ChannelType.Noise; var hasDuty = channel.Type == ChannelType.Square1 || channel.Type == ChannelType.Square2 || channel.Type == ChannelType.Noise || channel.Type == ChannelType.Vrc6Square1 || channel.Type == ChannelType.Vrc6Square2 || channel.Type == ChannelType.Mmc5Square1 || channel.Type == ChannelType.Mmc5Square2; if (channel.Type >= ChannelType.Vrc7Fm1 && channel.Type <= ChannelType.Vrc7Fm6) { var trigger = NsfGetState(nsf, channel.Type, STATE_VRC7TRIGGER, 0) != 0; var sustain = NsfGetState(nsf, channel.Type, STATE_VRC7SUSTAIN, 0) != 0; var triggerState = trigger ? ChannelState.Triggered : (sustain ? ChannelState.Released : ChannelState.Stopped); if (triggerState != state.trigger) { stop = triggerState == ChannelState.Stopped; release = triggerState == ChannelState.Released; force |= true; state.trigger = triggerState; } octave = NsfGetState(nsf, channel.Type, STATE_VRC7OCTAVE, 0); } else { if (hasTrigger) { var trigger = volume != 0 && (channel.Type == ChannelType.Noise || period != 0) ? ChannelState.Triggered : ChannelState.Stopped; if (trigger != state.trigger) { stop = trigger == ChannelState.Stopped; force |= true; state.trigger = trigger; } } } if (hasVolume) { if (state.volume != volume && volume != 0) { var pattern = GetOrCreatePattern(channel, p).GetOrCreateNoteAt(n).Volume = (byte)volume; state.volume = volume; } } Instrument instrument = null; if (hasDuty) { instrument = GetDutyInstrument(channel, duty); } else if (channel.Type == ChannelType.FdsWave) { var wavEnv = new sbyte[64]; var modEnv = new sbyte[32]; for (int i = 0; i < 64; i++) { wavEnv[i] = (sbyte)NsfGetState(nsf, channel.Type, STATE_FDSWAVETABLE, i); } for (int i = 0; i < 32; i++) { modEnv[i] = (sbyte)NsfGetState(nsf, channel.Type, STATE_FDSMODULATIONTABLE, i); } Envelope.ConvertFdsModulationToAbsolute(modEnv); var masterVolume = (byte)NsfGetState(nsf, channel.Type, STATE_FDSMASTERVOLUME, 0); instrument = GetFdsInstrument(wavEnv, modEnv, masterVolume); int modDepth = NsfGetState(nsf, channel.Type, STATE_FDSMODULATIONDEPTH, 0); int modSpeed = NsfGetState(nsf, channel.Type, STATE_FDSMODULATIONSPEED, 0); if (state.fdsModDepth != modDepth) { var pattern = GetOrCreatePattern(channel, p).GetOrCreateNoteAt(n).FdsModDepth = (byte)modDepth; state.fdsModDepth = modDepth; } if (state.fdsModSpeed != modSpeed) { var pattern = GetOrCreatePattern(channel, p).GetOrCreateNoteAt(n).FdsModSpeed = (ushort)modSpeed; state.fdsModSpeed = modSpeed; } } else if (channel.Type >= ChannelType.N163Wave1 && channel.Type <= ChannelType.N163Wave8) { var wavePos = (byte)NsfGetState(nsf, channel.Type, STATE_N163WAVEPOS, 0); var waveLen = (byte)NsfGetState(nsf, channel.Type, STATE_N163WAVESIZE, 0); if (waveLen > 0) { var waveData = new sbyte[waveLen]; for (int i = 0; i < waveLen; i++) { waveData[i] = (sbyte)NsfGetState(nsf, channel.Type, STATE_N163WAVE, wavePos + i); } instrument = GetN163Instrument(waveData, wavePos); } period >>= 2; } else if (channel.Type >= ChannelType.Vrc7Fm1 && channel.Type <= ChannelType.Vrc7Fm6) { var patch = (byte)NsfGetState(nsf, channel.Type, STATE_VRC7PATCH, 0); var regs = new byte[8]; if (patch == 0) { for (int i = 0; i < 8; i++) { regs[i] = (byte)NsfGetState(nsf, channel.Type, STATE_VRC7PATCHREG, i); } } instrument = GetVrc7Instrument(patch, regs); } else if (channel.Type >= ChannelType.S5BSquare1 && channel.Type <= ChannelType.S5BSquare3) { instrument = GetS5BInstrument(); } else { instrument = GetDutyInstrument(channel, 0); } if ((hasPeriod && state.period != period) || (hasOctave && state.octave != octave) || (instrument != state.instrument) || force) { var noteTable = NesApu.GetNoteTableForChannelType(channel.Type, project.PalMode, project.ExpansionNumChannels); var note = release ? Note.NoteRelease : (stop ? Note.NoteStop : state.note); var finePitch = 0; if (!stop && !release && state.trigger != ChannelState.Stopped) { if (channel.Type == ChannelType.Noise) { note = (period ^ 0x0f) + 32; } else { note = (byte)GetBestMatchingNote(period, noteTable, out finePitch); } if (hasOctave) { while (note > 12) { note -= 12; octave++; } note += octave * 12; period *= (1 << octave); finePitch = period - noteTable[note]; } } if (note < Note.MusicalNoteMin || note > Note.MusicalNoteMax) { instrument = null; } if ((state.note != note) || (state.instrument != instrument && instrument != null) || force) { var pattern = GetOrCreatePattern(channel, p); var newNote = pattern.GetOrCreateNoteAt(n); newNote.Value = (byte)note; newNote.Instrument = instrument; state.note = note; state.octave = octave; if (instrument != null) { state.instrument = instrument; } hasNote = note != 0; } if (hasPitch && !stop) { Channel.GetShiftsForType(channel.Type, project.ExpansionNumChannels, out int pitchShift, out _); // We scale all pitches changes (slides, fine pitch, pitch envelopes) for // some channels with HUGE pitch values (N163, VRC7). finePitch >>= pitchShift; var pitch = (sbyte)Utils.Clamp(finePitch, Note.FinePitchMin, Note.FinePitchMax); if (pitch != state.pitch) { var pattern = GetOrCreatePattern(channel, p).GetOrCreateNoteAt(n).FinePitch = pitch; state.pitch = pitch; } } state.period = period; } } return(hasNote); }
public unsafe static void Save(Song song, string filename, int sampleRate) { var advance = true; var tempoCounter = 0; var playPattern = 0; var playNote = 0; var speed = song.Speed; var wavBytes = new List <byte>(); var apuIndex = NesApu.APU_WAV_EXPORT; var dmcCallback = new NesApu.DmcReadDelegate(NesApu.DmcReadCallback); NesApu.NesApuInit(apuIndex, sampleRate, dmcCallback); NesApu.Reset(apuIndex); var channels = new ChannelState[5] { new SquareChannelState(apuIndex, 0), new SquareChannelState(apuIndex, 1), new TriangleChannelState(apuIndex, 2), new NoiseChannelState(apuIndex, 3), new DPCMChannelState(apuIndex, 4) }; for (int i = 0; i < 5; i++) { NesApu.NesApuEnableChannel(apuIndex, i, 1); } while (true) { // Advance to next note. if (advance) { foreach (var channel in channels) { channel.ProcessEffects(song, ref playPattern, ref playNote, ref speed, false); } foreach (var channel in channels) { channel.Advance(song, playPattern, playNote); } advance = false; } // Update envelopes + APU registers. foreach (var channel in channels) { channel.UpdateEnvelopes(); channel.UpdateAPU(); } NesApu.NesApuEndFrame(apuIndex); int numTotalSamples = NesApu.NesApuSamplesAvailable(apuIndex); byte[] samples = new byte[numTotalSamples * 2]; fixed(byte *ptr = &samples[0]) { NesApu.NesApuReadSamples(apuIndex, new IntPtr(ptr), numTotalSamples); } wavBytes.AddRange(samples); int dummy1 = 0; if (!PlayerBase.AdvanceTempo(song, speed, LoopMode.None, ref tempoCounter, ref playPattern, ref playNote, ref dummy1, ref advance)) { break; } } using (var file = new FileStream(filename, FileMode.Create)) { var header = new WaveHeader(); // RIFF WAVE Header header.chunkId[0] = (byte)'R'; header.chunkId[1] = (byte)'I'; header.chunkId[2] = (byte)'F'; header.chunkId[3] = (byte)'F'; header.format[0] = (byte)'W'; header.format[1] = (byte)'A'; header.format[2] = (byte)'V'; header.format[3] = (byte)'E'; // Format subchunk header.subChunk1Id[0] = (byte)'f'; header.subChunk1Id[1] = (byte)'m'; header.subChunk1Id[2] = (byte)'t'; header.subChunk1Id[3] = (byte)' '; header.audioFormat = 1; // FOR PCM header.numChannels = 1; // 1 for MONO, 2 for stereo header.sampleRate = sampleRate; // ie 44100 hertz, cd quality audio header.bitsPerSample = 16; // header.byteRate = header.sampleRate * header.numChannels * header.bitsPerSample / 8; header.blockAlign = (short)(header.numChannels * header.bitsPerSample / 8); // Data subchunk header.subChunk2Id[0] = (byte)'d'; header.subChunk2Id[1] = (byte)'a'; header.subChunk2Id[2] = (byte)'t'; header.subChunk2Id[3] = (byte)'a'; // All sizes for later: // chuckSize = 4 + (8 + subChunk1Size) + (8 + subChubk2Size) // subChunk1Size is constanst, i'm using 16 and staying with PCM // subChunk2Size = nSamples * nChannels * bitsPerSample/8 // Whenever a sample is added: // chunkSize += (nChannels * bitsPerSample/8) // subChunk2Size += (nChannels * bitsPerSample/8) header.subChunk1Size = 16; header.subChunk2Size = wavBytes.Count; header.chunkSize = 4 + (8 + header.subChunk1Size) + (8 + header.subChunk2Size); var headerBytes = new byte[sizeof(WaveHeader)]; Marshal.Copy(new IntPtr(&header), headerBytes, 0, headerBytes.Length); file.Write(headerBytes, 0, headerBytes.Length); file.Write(wavBytes.ToArray(), 0, wavBytes.Count); } }
unsafe void PlayerThread(object o) { var channels = new ChannelState[5] { new SquareChannelState(apuIndex, 0), new SquareChannelState(apuIndex, 1), new TriangleChannelState(apuIndex, 2), new NoiseChannelState(apuIndex, 3), new DPCMChannelState(apuIndex, 4) }; var startInfo = (SongPlayerStartInfo)o; var song = startInfo.song; bool advance = true; int tempoCounter = 0; int playPattern = 0; int playNote = 0; int speed = song.Speed; NesApu.Reset(apuIndex); if (startInfo.frame != 0) { foreach (var channel in channels) { channel.StartSeeking(); } while (playPattern * song.PatternLength + playNote != startInfo.frame) { foreach (var channel in channels) { channel.ProcessEffects(song, ref playPattern, ref playNote, ref speed); } foreach (var channel in channels) { channel.Advance(song, playPattern, playNote); channel.UpdateEnvelopes(); channel.UpdateAPU(); } int dummy1 = 0; bool dummy2 = false; if (!AdvanceTempo(song, speed, LoopMode.None, ref tempoCounter, ref playPattern, ref playNote, ref dummy1, ref dummy2)) { break; } } foreach (var channel in channels) { channel.StopSeeking(); } } var waitEvents = new WaitHandle[] { stopEvent, frameEvent }; while (true) { int idx = WaitHandle.WaitAny(waitEvents); if (idx == 0) { break; } // Advance to next note. if (advance) { // We process the effects before since one channel may have // a skip/jump and we need to process that first before advancing // the song. foreach (var channel in channels) { channel.ProcessEffects(song, ref playPattern, ref playNote, ref speed); } foreach (var channel in channels) { channel.Advance(song, playPattern, playNote); } advance = false; } // Update envelopes + APU registers. foreach (var channel in channels) { channel.UpdateEnvelopes(); channel.UpdateAPU(); } // Mute. for (int i = 0; i < 5; i++) { NesApu.NesApuEnableChannel(apuIndex, i, (channelMask & (1 << i))); } EndFrameAndQueueSamples(); if (!AdvanceTempo(song, speed, loopMode, ref tempoCounter, ref playPattern, ref playNote, ref playFrame, ref advance)) { break; } } audioStream.Stop(); while (sampleQueue.TryDequeue(out _)) { ; } }
unsafe void PlayerThread(object o) { var channels = new ChannelState[5] { new SquareChannelState(apuIndex, 0), new SquareChannelState(apuIndex, 1), new TriangleChannelState(apuIndex, 2), new NoiseChannelState(apuIndex, 3), new DPCMChannelState(apuIndex, 4) }; var startInfo = (SongPlayerStartInfo)o; var song = startInfo.song; bool advance = true; int tempoCounter = 0; int playPattern = 0; int playNote = 0; int speed = song.Speed; NesApu.Reset(apuIndex); if (startInfo.frame != 0) { foreach (var channel in channels) { channel.StartSeeking(); } while (playPattern * song.PatternLength + playNote != startInfo.frame) { foreach (var channel in channels) { channel.ProcessEffects(song, ref playPattern, ref playNote, ref speed); channel.Advance(song, playPattern, playNote); channel.UpdateEnvelopes(); channel.UpdateAPU(); } // Tempo/speed logic. tempoCounter += song.Tempo * 256 / 150; // NTSC if ((tempoCounter >> 8) == speed) { tempoCounter -= (speed << 8); if (++playNote == song.PatternLength) { playNote = 0; if (++playPattern == song.Length) { playPattern = 0; } } } } foreach (var channel in channels) { channel.StopSeeking(); } } var waitEvents = new WaitHandle[] { stopEvent, frameEvent }; while (true) { int idx = WaitHandle.WaitAny(waitEvents); if (idx == 0) { break; } // Advance to next note. if (advance) { foreach (var channel in channels) { channel.ProcessEffects(song, ref playPattern, ref playNote, ref speed); channel.Advance(song, playPattern, playNote); } advance = false; } // Update envelopes + APU registers. foreach (var channel in channels) { channel.UpdateEnvelopes(); channel.UpdateAPU(); } // Mute. for (int i = 0; i < 5; i++) { NesApu.NesApuEnableChannel(apuIndex, i, (channelMask & (1 << i))); } EndFrameAndQueueSamples(); // Tempo/speed logic. tempoCounter += song.Tempo * 256 / 150; // NTSC if ((tempoCounter >> 8) == speed) { tempoCounter -= (speed << 8); if (++playNote == song.PatternLength) { playNote = 0; if (loopMode != LoopMode.Pattern) { if (++playPattern == song.Length) { if (loopMode == LoopMode.None) { break; } playPattern = 0; } } } playFrame = playPattern * song.PatternLength + playNote; advance = true; } } xaudio2Stream.Stop(); while (sampleQueue.TryDequeue(out _)) { ; } }
unsafe void PlayerThread(object o) { var channels = new ChannelState[5] { new SquareChannelState(apuIndex, 0), new SquareChannelState(apuIndex, 1), new TriangleChannelState(apuIndex, 2), new NoiseChannelState(apuIndex, 3), new DPCMChannelState(apuIndex, 4) }; var activeChannel = -1; var waitEvents = new WaitHandle[] { stopEvent, frameEvent }; NesApu.Reset(apuIndex); for (int i = 0; i < 5; i++) { NesApu.NesApuEnableChannel(apuIndex, i, 0); } while (true) { int idx = WaitHandle.WaitAny(waitEvents); if (idx == 0) { break; } if (!noteQueue.IsEmpty) { PlayerNote lastNote = new PlayerNote(); while (noteQueue.TryDequeue(out PlayerNote note)) { lastNote = note; } activeChannel = lastNote.channel; if (activeChannel >= 0) { channels[activeChannel].PlayNote(lastNote.note); } for (int i = 0; i < 5; i++) { NesApu.NesApuEnableChannel(apuIndex, i, i == activeChannel ? 1 : 0); } } if (activeChannel >= 0) { channels[activeChannel].UpdateEnvelopes(); channels[activeChannel].UpdateAPU(); for (int i = 0; i < Envelope.Max; i++) { envelopeFrames[i] = channels[activeChannel].GetEnvelopeFrame(i); } } else { for (int i = 0; i < Envelope.Max; i++) { envelopeFrames[i] = 0; } foreach (var channel in channels) { channel.ClearNote(); } } EndFrameAndQueueSamples(); } xaudio2Stream.Stop(); while (sampleQueue.TryDequeue(out _)) { ; } }
public Project Load(string filename, int songIndex, int duration, int patternLength, int startFrame, bool removeIntroSilence) { nsf = NsfOpen(filename); if (nsf == null) { return(null); } var numFrames = duration * (NsfIsPal(nsf) != 0 ? 50 : 60); project = new Project(); project.Name = Marshal.PtrToStringAnsi(NsfGetTitle(nsf)); project.Author = Marshal.PtrToStringAnsi(NsfGetArtist(nsf)); project.Copyright = Marshal.PtrToStringAnsi(NsfGetCopyright(nsf)); switch (NsfGetExpansion(nsf)) { case EXTSOUND_VRC6: project.SetExpansionAudio(Project.ExpansionVrc6); break; case EXTSOUND_VRC7: project.SetExpansionAudio(Project.ExpansionVrc7); break; case EXTSOUND_FDS: project.SetExpansionAudio(Project.ExpansionFds); break; case EXTSOUND_MMC5: project.SetExpansionAudio(Project.ExpansionMmc5); break; case EXTSOUND_N163: project.SetExpansionAudio(Project.ExpansionN163, GetNumNamcoChannels(filename, songIndex, numFrames)); break; case EXTSOUND_S5B: project.SetExpansionAudio(Project.ExpansionS5B); break; case 0: break; default: NsfClose(nsf); // Unsupported expansion combination. return(null); } var songName = Marshal.PtrToStringAnsi(NsfGetTrackName(nsf, songIndex)); song = project.CreateSong(string.IsNullOrEmpty(songName) ? $"Song {songIndex + 1}" : songName); channelStates = new ChannelState[song.Channels.Length]; NsfSetTrack(nsf, songIndex); song.ResizeNotes(1, false); song.SetDefaultPatternLength(patternLength); for (int i = 0; i < song.Channels.Length; i++) { channelStates[i] = new ChannelState(); } var foundFirstNote = !removeIntroSilence; var p = 0; var n = 0; var f = startFrame; for (int i = 0; i < numFrames; i++) { p = f / song.PatternLength; n = f % song.PatternLength; if (p >= Song.MaxLength - 1) { break; } var playCalled = 0; do { playCalled = NsfRunFrame(nsf); }while (playCalled == 0); for (int c = 0; c < song.Channels.Length; c++) { foundFirstNote |= UpdateChannel(p, n, song.Channels[c], channelStates[c]); } if (foundFirstNote) { f++; } else { // Reset everything until we find our first note. project.DeleteAllInstrument(); project.DeleteAllSamples(); for (int c = 0; c < song.Channels.Length; c++) { channelStates[c] = new ChannelState(); } } } song.SetLength(p + 1); NsfClose(nsf); var factors = Utils.GetFactors(song.PatternLength, Song.MaxNoteLength); if (factors.Length > 0 && factors[0] <= Song.MaxNoteLength) { song.ResizeNotes(factors[0], false); } else { song.ResizeNotes(1, false); } song.SetSensibleBarLength(); song.DeleteEmptyPatterns(); song.UpdatePatternStartNotes(); project.DeleteUnusedInstruments(); project.UpdateAllLastValidNotesAndVolume(); return(project); }
public Project Load(string filename, int songIndex, int duration, int patternLength, int startFrame, bool removeIntroSilence, bool reverseDpcm, bool preserveDpcmPad) { nsf = NsfOpen(filename); if (nsf == IntPtr.Zero) { Log.LogMessage(LogSeverity.Error, "Error opening NSF. File may be corrupted or may be a NSF2 using advanced features such as IRQ which are not supported at the moment."); return(null); } var trackCount = NsfGetTrackCount(nsf); if (songIndex < 0 || songIndex > trackCount) { return(null); } preserveDpcmPadding = preserveDpcmPad; var palSource = (NsfIsPal(nsf) & 1) == 1; var numFrames = duration * (palSource ? 50 : 60); project = new Project(); project.Name = Marshal.PtrToStringAnsi(NsfGetTitle(nsf)); project.Author = Marshal.PtrToStringAnsi(NsfGetArtist(nsf)); project.Copyright = Marshal.PtrToStringAnsi(NsfGetCopyright(nsf)); project.PalMode = palSource; // Our expansion mask is the same as NSF. var expansionMask = NsfGetExpansion(nsf); // The 2 upper bits of the mask need to be zero, we dont support these. if (expansionMask != (expansionMask & ExpansionType.AllMask)) { Log.LogMessage(LogSeverity.Error, "NSF uses unknown or unsupported expansion chips, aborting."); NsfClose(nsf); return(null); } var numN163Channels = (expansionMask & ExpansionType.N163Mask) != 0 ? GetNumNamcoChannels(filename, songIndex, numFrames) : 1; project.SetExpansionAudioMask(expansionMask, numN163Channels); var songName = Marshal.PtrToStringAnsi(NsfGetTrackName(nsf, songIndex)); song = project.CreateSong(string.IsNullOrEmpty(songName) ? $"Song {songIndex + 1}" : songName); channelStates = new ChannelState[song.Channels.Length]; NsfSetTrack(nsf, songIndex); song.ChangeFamiStudioTempoGroove(new[] { 1 }, false); song.SetDefaultPatternLength(patternLength); for (int i = 0; i < song.Channels.Length; i++) { channelStates[i] = new ChannelState(); } var foundFirstNote = !removeIntroSilence; var p = 0; var n = 0; var f = startFrame; for (int i = 0; i < numFrames; i++) { p = f / song.PatternLength; n = f % song.PatternLength; if (p >= Song.MaxLength - 1) { break; } var playCalled = 0; var waitFrameCount = 0; do { playCalled = NsfRunFrame(nsf); if (++waitFrameCount == 1000) { Log.LogMessage(LogSeverity.Error, "NSF did not call PLAY after 1000 frames, aborting."); NsfClose(nsf); return(null); } }while (playCalled == 0); for (int c = 0; c < song.Channels.Length; c++) { foundFirstNote |= UpdateChannel(p, n, song.Channels[c], channelStates[c]); } if (foundFirstNote) { f++; } else { // Reset everything until we find our first note. project.DeleteAllInstruments(); project.DeleteAllSamples(); for (int c = 0; c < song.Channels.Length; c++) { channelStates[c] = new ChannelState(); } } } song.SetLength(p + 1); NsfClose(nsf); var factors = Utils.GetFactors(song.PatternLength, FamiStudioTempoUtils.MaxNoteLength); if (factors.Length > 0) { var noteLen = factors[0]; // Look for a factor that generates a note length < 10 and gives a pattern length that is a multiple of 16. foreach (var factor in factors) { if (factor <= 10) { noteLen = factor; if (((song.PatternLength / noteLen) % 16) == 0) { break; } } } song.ChangeFamiStudioTempoGroove(new[] { noteLen }, false); } else { song.ChangeFamiStudioTempoGroove(new[] { 1 }, false); } song.SetSensibleBeatLength(); song.ConvertToCompoundNotes(); song.DeleteEmptyPatterns(); song.UpdatePatternStartNotes(); song.InvalidateCumulativePatternCache(); project.DeleteUnusedInstruments(); foreach (var sample in project.Samples) { sample.ReverseBits = reverseDpcm; } return(project); }