/// <summary> /// Decode DSP-ADPCM data. /// </summary> /// <param name="src">DSP-ADPCM source.</param> /// <param name="dst">Destination array of samples.</param> /// <param name="cxt">DSP-APCM context.</param> /// <param name="samples">Number of samples.</param> public static void Decode(byte[] src, ref Int16[] dst, ref DspAdpcmInfo cxt, UInt32 samples) { //Each DSP-APCM frame is 8 bytes long. It contains 1 header byte, and 7 sample bytes. //Set initial values. short hist1 = cxt.yn1; short hist2 = cxt.yn2; int dstIndex = 0; int srcIndex = 0; //Until all samples decoded. while (dstIndex < samples) { //Get the header. byte header = src[srcIndex++]; //Get scale and co-efficient index. UInt16 scale = (UInt16)(1 << (header & 0xF)); byte coef_index = (byte)(header >> 4); short coef1 = cxt.coefs[coef_index][0]; short coef2 = cxt.coefs[coef_index][1]; //7 sample bytes per frame. for (UInt32 b = 0; b < 7; b++) { //Get byte. byte byt = src[srcIndex++]; //2 samples per byte. for (UInt32 s = 0; s < 2; s++) { sbyte adpcm_nibble = ((s == 0) ? GetHighNibble(byt) : GetLowNibble(byt)); short sample = Clamp16(((adpcm_nibble * scale) << 11) + 1024 + ((coef1 * hist1) + (coef2 * hist2)) >> 11); hist2 = hist1; hist1 = sample; dst[dstIndex++] = sample; if (dstIndex >= samples) { break; } } if (dstIndex >= samples) { break; } } } }
/// <summary> /// Encodes the samples. /// </summary> /// <returns>The samples.</returns> /// <param name="samples">Samples.</param> public static byte[] EncodeSamples(short[] samples, out DspAdpcmInfo info, uint loopStart) { //Encod data. short[] coeffs = GcAdpcmCoefficients.CalculateCoefficients(samples); byte[] dspAdpcm = GcAdpcmEncoder.Encode(samples, coeffs); info = new DspAdpcmInfo(); info.coefs = new short[8][]; info.coefs[0] = new short[2]; info.coefs[1] = new short[2]; info.coefs[2] = new short[2]; info.coefs[3] = new short[2]; info.coefs[4] = new short[2]; info.coefs[5] = new short[2]; info.coefs[6] = new short[2]; info.coefs[7] = new short[2]; info.coefs[0][0] = coeffs[0]; info.coefs[0][1] = coeffs[1]; info.coefs[1][0] = coeffs[2]; info.coefs[1][1] = coeffs[3]; info.coefs[2][0] = coeffs[4]; info.coefs[2][1] = coeffs[5]; info.coefs[3][0] = coeffs[6]; info.coefs[3][1] = coeffs[7]; info.coefs[4][0] = coeffs[8]; info.coefs[4][1] = coeffs[9]; info.coefs[5][0] = coeffs[10]; info.coefs[5][1] = coeffs[11]; info.coefs[6][0] = coeffs[12]; info.coefs[6][1] = coeffs[13]; info.coefs[7][0] = coeffs[14]; info.coefs[7][1] = coeffs[15]; //Loop stuff. if (loopStart > 0) { info.loop_yn1 = samples[loopStart - 1]; } if (loopStart > 1) { info.loop_yn2 = samples[loopStart - 2]; } return(dspAdpcm); }
/// <summary> /// Load a wave file. /// </summary> /// <param name="b">The byte array.</param> public void Load(byte[] b) { //Read file. MemoryStream src = new MemoryStream(b); BinaryDataReader br = new BinaryDataReader(src); //Get byte order. br.ByteOrder = ByteOrder = Syroot.BinaryData.ByteOrder.BigEndian; br.Position = 4; if (br.ReadUInt16() == CitraFileLoader.ByteOrder.LittleEndian) { br.ByteOrder = ByteOrder = Syroot.BinaryData.ByteOrder.LittleEndian; } //Get version. ushort version = br.ReadUInt16(); Major = (byte)((version & 0xFF00) >> 8); Minor = (byte)(version & 0xFF); //Get num channels. br.Position = 0x0E; ushort numChannels = br.ReadUInt16(); ChannelPans = new ChannelPan[numChannels]; //Get codic. Reference:vgmstream/src/meta/bwav.c . br.Position = 0x10; Codic = br.ReadUInt16(); //Get info from first channel. br.Position = 0x12; ChannelPans[0] = (ChannelPan)br.ReadUInt16(); SampleRate = br.ReadUInt32(); NumSamples = br.ReadUInt32(); br.ReadUInt32(); DspAdpcmInfo = new DspAdpcmInfo[numChannels]; DspAdpcmInfo[0] = new DspAdpcmInfo(); DspAdpcmInfo[0].coefs = new short[8][]; DspAdpcmInfo[0].coefs[0] = br.ReadInt16s(2); DspAdpcmInfo[0].coefs[1] = br.ReadInt16s(2); DspAdpcmInfo[0].coefs[2] = br.ReadInt16s(2); DspAdpcmInfo[0].coefs[3] = br.ReadInt16s(2); DspAdpcmInfo[0].coefs[4] = br.ReadInt16s(2); DspAdpcmInfo[0].coefs[5] = br.ReadInt16s(2); DspAdpcmInfo[0].coefs[6] = br.ReadInt16s(2); DspAdpcmInfo[0].coefs[7] = br.ReadInt16s(2); //Start offsets. uint[] startOffsets = new uint[numChannels]; startOffsets[0] = br.ReadUInt32(); br.Position += 4; //Loop info. Loops = br.ReadUInt32() > 0; LoopEndSample = br.ReadUInt32(); LoopStartSample = br.ReadUInt32(); //More DSP info. DspAdpcmInfo[0].pred_scale = DspAdpcmInfo[0].loop_pred_scale = br.ReadUInt16(); DspAdpcmInfo[0].yn1 = DspAdpcmInfo[0].loop_yn1 = br.ReadInt16(); DspAdpcmInfo[0].yn2 = DspAdpcmInfo[0].loop_yn2 = br.ReadInt16(); //Read each channel start offset. for (int i = 1; i < numChannels; i++) { //Get channel pan. br.Position = i * 0x4C + 0x10 + 0x2; ChannelPans[i] = (ChannelPan)br.ReadUInt16(); //Start offset. br.Position = i * 0x4C + 0x10 + 0x30; startOffsets[i] = br.ReadUInt32(); //Get DSP info. br.Position = i * 0x4C + 0x10 + 0x10; DspAdpcmInfo[i] = new DspAdpcmInfo(); DspAdpcmInfo[i].coefs = new short[8][]; DspAdpcmInfo[i].coefs[0] = br.ReadInt16s(2); DspAdpcmInfo[i].coefs[1] = br.ReadInt16s(2); DspAdpcmInfo[i].coefs[2] = br.ReadInt16s(2); DspAdpcmInfo[i].coefs[3] = br.ReadInt16s(2); DspAdpcmInfo[i].coefs[4] = br.ReadInt16s(2); DspAdpcmInfo[i].coefs[5] = br.ReadInt16s(2); DspAdpcmInfo[i].coefs[6] = br.ReadInt16s(2); DspAdpcmInfo[i].coefs[7] = br.ReadInt16s(2); br.Position += 20; DspAdpcmInfo[i].pred_scale = DspAdpcmInfo[i].loop_pred_scale = br.ReadUInt16(); DspAdpcmInfo[i].yn1 = DspAdpcmInfo[i].loop_yn1 = br.ReadInt16(); DspAdpcmInfo[i].yn2 = DspAdpcmInfo[i].loop_yn2 = br.ReadInt16(); } //Read the wave data. Data = new SoundNStreamDataBlock(br, startOffsets); try { br.Dispose(); } catch { } try { src.Dispose(); } catch { } }
/// <summary> /// Create a standard b_stm. /// </summary> /// <param name="sampleRate">The sample rate.</param> /// <param name="numSamples">Number of samples.</param> /// <param name="samples">Pcm8[][] or Pcm16[][] audio samples.</param> /// <param name="encoding">If samples is Pcm8[][] always 0. Must be 1 or 2 for if samples is Pcm16[][].</param> /// <param name="version">The version of the file.</param> /// <returns></returns> public static b_stm CreateStream(UInt32 sampleRate, UInt32 numSamples, object samples, byte encoding, byte vMajor, byte vMinor, byte vRevision) { b_stm s = new b_stm(); s.fileHeader = new FileHeader("FSTM", ByteOrder.BigEndian, vMajor, vMinor, vRevision, 0, new List <SizedReference>()); s.info = new b_stm.InfoBlock(); s.info.streamSoundInfo = new b_stm.StreamSoundInfo(); s.info.tracks = new List <b_stm.TrackInfo>(); s.info.channels = new List <b_stm.ChannelInfo>(); //Stream info. s.info.streamSoundInfo = new b_stm.StreamSoundInfo(); s.info.streamSoundInfo.encoding = encoding; s.info.streamSoundInfo.sampleCount = numSamples; s.info.streamSoundInfo.sampleRate = sampleRate; //Channels. switch (encoding) { case EncodingTypes.PCM8: s.data = new SoundNStreamDataBlock(EncoderFactory.Pcm8ToSignedPcm8(samples as byte[][]), encoding); for (int i = 0; i < (samples as byte[][]).Length; i++) { s.info.channels.Add(new b_stm.ChannelInfo()); } break; case EncodingTypes.PCM16: s.data = new SoundNStreamDataBlock(samples, encoding); for (int i = 0; i < (samples as Int16[][]).Length; i++) { s.info.channels.Add(new b_stm.ChannelInfo()); } break; case EncodingTypes.DSP_ADPCM: s.data = new SoundNStreamDataBlock(EncoderFactory.Pcm16ToDspAdpcmSTM(samples as Int16[][], s), encoding); s.Update(ByteOrder.BigEndian); //Get DSP-ADPCM info. DspAdpcmInfo[] context = new DspAdpcmInfo[s.data.dspAdpcm.Length]; int cCount = 0; foreach (var channel in s.info.channels) { context[cCount] = channel.dspAdpcmInfo; cCount++; } //Create SEEK block. s.seek = new SoundNStreamSeekBlock(s.data.dspAdpcm, s.info.streamSoundInfo.sampleCount, context); break; } //Tracks. for (int i = 0; i < s.info.channels.Count; i += 2) { s.info.tracks.Add(new b_stm.TrackInfo() { volume = 0x64, pan = 0x40, span = 0x0, surroundMode = 0, globalChannelIndexTable = new Table <byte>() { entries = new List <byte>() { (byte)i } } }); if (i + 1 != s.info.channels.Count) { s.info.tracks[s.info.tracks.Count - 1].globalChannelIndexTable.entries.Add((byte)(i + 1)); } } s.Update(ByteOrder.BigEndian); return(s); }