int TransposeNote(int direction, int data) { if (data < 0) { return(data); } int note = (int)VirtualKeyboard.GetNote(data) - 1; if (note + 1 == ( int )VirtualKeyboard.Note.NoteOff) { return(data); } int octave = VirtualKeyboard.GetOctave(data); int offset = note + direction; if (offset > 11) { octave++; } if (offset < 0) { octave--; offset = offset + 12; } offset = System.Math.Abs(offset) % 12 + 1; return(VirtualKeyboard.EncodeNoteInfo(offset, octave)); }
public void OnIrqCallback() { if (!m_IsPlaying) { return; } m_Counter++; if (m_Counter >= playbackSpeed) { if (m_IsStopping) { m_IsPlaying = false; m_IsStopping = true; return; } m_Counter = 0; int patternBreakTarget = -1; for (int i = 0; i < data.channels; i++) { m_PlayingPattern = m_CurrentPattern; SongData.ColumnEntry col = data.GetPatternColumn(m_CurrentPattern, i); if (col == null) { //m_Instruments [ i ].note = VirtualKeyboard.Note.NoteOff; //psg.SetAttenuation ( i, 0 ); continue; } if (mute[i]) { m_Instruments[i].note = VirtualKeyboard.Note.NoteOff; } else { int volume = col.data[m_CurrentLine, 2]; if (volume >= 0) { m_Instruments[i].relativeVolume = volume; } if (col.data[m_CurrentLine, 0] > 0) { VirtualKeyboard.Note note = VirtualKeyboard.GetNote(col.data[m_CurrentLine, 0]); if (note == VirtualKeyboard.Note.NoteOff) { m_Instruments[i].note = VirtualKeyboard.Note.NoteOff; psg.SetAttenuation(i, 0); } else if (col.data[m_CurrentLine, 1] < instruments.presets.Length) { m_PrevInstruments[i] = m_Instruments[i]; m_Instruments[i] = instruments.presets[col.data[m_CurrentLine, 1]]; m_Instruments[i].relativeVolume = volume >= 0 ? volume : 0xF; m_Instruments[i].note = note; m_Instruments[i].noteOffset = data.transposeTable[m_CurrentPattern][i]; m_Instruments[i].octave = VirtualKeyboard.GetOctave(col.data[m_CurrentLine, 0]); } } } int fxVal = col.data [m_CurrentLine, 4]; if (fxVal >= 0) { switch (col.data [m_CurrentLine, 3]) { //arpreggio case 0x00: if (fxVal == 0) { m_Instruments [i].arpeggio = new int [0]; } else { int hiArp, loArp; SplitByte(fxVal, out hiArp, out loArp); m_Instruments [i].arpeggio = new int [3] { 0, loArp, hiArp }; m_Instruments[i].arpLoopPoint = 3; } break; case 0x01: m_Instruments [i].portamentoSpeed = fxVal; break; case 0x02: m_Instruments [i].portamentoSpeed = -fxVal; break; case 0x03: if (m_Instruments [i].samplePlayback) { m_Instruments [i].pulseWidth = m_PrevInstruments [i].pulseWidth; } m_Instruments [i].SetAutoPortamento(m_PrevInstruments[i], fxVal); break; case 0x04: int speed, depth; SplitByte(fxVal, out depth, out speed); m_Instruments [i].vibratoDepth = depth; m_Instruments [i].vibratoSpeed = speed; break; case 0x08: int left, right; SplitByte(fxVal, out right, out left); psg.SetStereo(i, left != 0, right != 0); break; case 0x0B: //loop point break; case 0x0D: if (m_CurrentPattern >= data.numPatterns - 1) { break; } if (fxVal < data.patternLength) { m_PatternBreakTarget = fxVal; } else { m_PatternBreakTarget = 0; } patternBreakTarget = m_PatternBreakTarget; break; case 0x0F: playbackSpeed = fxVal; break; case 0x10: m_Instruments [i].noteDelay = fxVal; break; case 0x20: int mode, fb; SplitByte(fxVal, out mode, out fb); int val = (mode << 1) | fb; m_Instruments[i].noiseMode = new int[] { val }; break; case 0x40: m_Instruments [i].pulseWidthPanSpeed = fxVal; break; case 0xFF: psg.PSGDirectWrite(fxVal); break; } } } m_MoveLine++; if (patternBreakTarget >= 0) { m_CurrentLine = patternBreakTarget; m_CurrentPattern++; } else { m_CurrentLine++; if (m_CurrentLine >= data.patternLength) { m_CurrentLine = 0; m_CurrentPattern++; if (m_CurrentPattern >= data.numPatterns) { m_CurrentPattern = m_PatternLoop; if (m_Loops >= 0) { m_IsStopping = m_Loops == 0; m_Loops--; } } } } } for (int i = 0; i < data.channels; i++) { if (m_Instruments [i].noteDelay > 0) { m_Instruments [i].noteDelay--; m_PrevInstruments [i].UpdatePSG(psg, i); m_ChnAttenuation[i] = m_PrevInstruments[i].GetCurrentVol(); } else { m_Instruments [i].UpdatePSG(psg, i); m_ChnAttenuation[i] = m_Instruments[i].GetCurrentVol(); } if (mute[i]) { m_ChnAttenuation[i] = 0; } } }