コード例 #1
0
        /// <summary>
        /// Create a wave from a stream.
        /// </summary>
        /// <param name="s">The stream.</param>
        /// <param name="version">Version of the file.</param>
        /// <returns></returns>
        public static b_wav CreateWave(b_stm s, byte vMajor, byte vMinor, byte vRevision)
        {
            b_wav b = new b_wav();

            b.fileHeader = new FileHeader("FWAV", ByteOrder.BigEndian, vMajor, vMinor, vRevision, 0, new List <SizedReference>());
            b.data       = s.data;

            b.info                   = new b_wav.InfoBlock();
            b.info.sampleRate        = s.info.streamSoundInfo.sampleRate;
            b.info.originalLoopStart = s.info.streamSoundInfo.originalLoopStart;
            b.info.loopStart         = s.info.streamSoundInfo.loopStart;
            b.info.loopEnd           = s.info.streamSoundInfo.sampleCount;
            b.info.isLoop            = s.info.streamSoundInfo.isLoop;
            b.info.encoding          = s.info.streamSoundInfo.encoding;

            b.info.channelInfo = new List <b_wav.InfoBlock.ChannelInfo>();
            foreach (b_stm.ChannelInfo c in s.info.channels)
            {
                b_wav.InfoBlock.ChannelInfo i = new b_wav.InfoBlock.ChannelInfo();
                i.dspAdpcmInfo = c.dspAdpcmInfo;
                b.info.channelInfo.Add(i);
            }

            b.Update(ByteOrder.BigEndian);

            return(b);
        }
コード例 #2
0
        /// <summary>
        /// Creates a prefetch stream.
        /// </summary>
        /// <returns>The prefetch stream.</returns>
        /// <param name="s">S.</param>
        /// <param name="version">Version.</param>
        public static b_stp CreatePrefetchStream(b_stm s, byte vMajor, byte vMinor, byte vRevision)
        {
            //New prefetch data.
            b_stp p = new b_stp();

            //Change version, and update info.
            p.fileHeader = new FileHeader("FSTP", ByteOrder.BigEndian, vMajor, vMinor, vRevision, 0, new System.Collections.Generic.List <SizedReference>());
            p.info       = s.info;

            //Make prefetch data.
            p.pdat = new b_stp.PdatBlock();
            p.pdat.prefetchData         = new Table <b_stp.PdatBlock.PrefetchData>();
            p.pdat.prefetchData.entries = new System.Collections.Generic.List <b_stp.PdatBlock.PrefetchData>();
            p.pdat.prefetchData.entries.Add(new b_stp.PdatBlock.PrefetchData()
            {
                startFrame   = 0,
                prefetchSize = 0xA000 * (uint)p.info.streamSoundInfo.channelCount
            });

            //Audio data.
            p.pdat.samples    = new byte[1][][];
            p.pdat.samples[0] = s.data.dspAdpcm;

            //Return data.
            return(p);
        }
コード例 #3
0
ファイル: SoundStream.cs プロジェクト: nnn1590/Audinfo
        /// <summary>
        /// Create a looping b_stm.
        /// </summary>
        /// <param name="sampleRate">The sample rate.</param>
        /// <param name="numSamples">Number of samples.</param>
        /// <param name="samples">Pcm8[][] or Pcm16[][] audio samples.</param>
        /// <param name="encoding">If samples is Pcm8[][] always 0. Must be 1 or 2 for if samples is Pcm16[][].</param>
        /// <param name="version">The version of the file.</param>
        /// <param name="loopStart">Loop starting point.</param>
        /// <returns></returns>
        public static b_stm CreateStream(UInt32 sampleRate, UInt32 numSamples, object samples, byte encoding, byte vMajor, byte vMinor, byte vRevision, UInt32 loopStart)
        {
            b_stm s = CreateStream(sampleRate, numSamples, samples, encoding, vMajor, vMinor, vRevision);

            s.info.streamSoundInfo.loopStart = loopStart;
            s.info.streamSoundInfo.isLoop    = true;
            return(s);
        }
コード例 #4
0
 /// <summary>
 /// Convert pcm16 audio to dsp adpcm with a seek block, and update channel info.
 /// </summary>
 /// <param name="pcm16">Pcm 16 audio.</param>
 /// <returns></returns>
 public static byte[][] Pcm16ToDspAdpcmSTM(Int16[][] pcm16, b_stm s)
 {
     byte[][] data = new byte[pcm16.Length][];
     s.info.channels = new List <b_stm.ChannelInfo>();
     for (int i = 0; i < data.Length; i++)
     {
         s.info.channels.Add(new b_stm.ChannelInfo());
         data[i] = DspAdpcmEncoder.EncodeSamples(pcm16[i], out s.info.channels[i].dspAdpcmInfo, s.info.streamSoundInfo.loopStart);
     }
     return(data);
 }
コード例 #5
0
        /// <summary>
        /// Make a RIFF Wave from a b_stm.
        /// </summary>
        /// <param name="b">The b_stm.</param>
        /// <returns></returns>
        public static RiffWave CreateRiffWave(b_stm b)
        {
            RiffWave r = new RiffWave();
            UInt16   bytesPerSample = 2;

            if (b.info.streamSoundInfo.encoding == EncodingTypes.PCM8)
            {
                bytesPerSample = 1;
            }

            //Non-looping.
            if (!b.info.streamSoundInfo.isLoop)
            {
                r = CreateRiffWave(b.info.streamSoundInfo.sampleRate, bytesPerSample, b.data.GetDataSTM(b.info.streamSoundInfo, b.info));
            }

            //Looping.
            else
            {
                r = CreateRiffWave(b.info.streamSoundInfo.sampleRate, bytesPerSample, b.data.GetDataSTM(b.info.streamSoundInfo, b.info), b.info.streamSoundInfo.loopStart, b.info.streamSoundInfo.sampleCount);
            }

            return(r);
        }
コード例 #6
0
        /// <summary>
        /// New binary wave.
        /// </summary>
        /// <param name="s">The stream.</param>
        public BinaryWave(b_stm s)
        {
            //Set data.
            ByteOrder  = Syroot.BinaryData.ByteOrder.LittleEndian;
            Major      = 0;
            Minor      = 1;
            SampleRate = s.info.streamSoundInfo.sampleRate;
            NumSamples = s.info.streamSoundInfo.sampleCount;
            switch (s.info.streamSoundInfo.encoding)
            {
            case 0:
                s.data.dspAdpcm = EncoderFactory.Pcm16ToDspAdpcmSTM(EncoderFactory.SignedPcm8ToPcm16(s.data.pcm8), s);
                break;

            case 1:
                s.data.dspAdpcm = EncoderFactory.Pcm16ToDspAdpcmSTM(s.data.pcm16, s);
                break;
            }
            DspAdpcmInfo = new DspAdpcmInfo[s.info.channels.Count];
            for (int i = 0; i < DspAdpcmInfo.Length; i++)
            {
                DspAdpcmInfo[i] = s.info.channels[i].dspAdpcmInfo;
            }
            Loops           = s.info.streamSoundInfo.isLoop;
            LoopStartSample = s.info.streamSoundInfo.loopStart;
            LoopEndSample   = s.info.streamSoundInfo.sampleCount;
            Data            = s.data;

            //Do channel pans.
            ChannelPans = new ChannelPan[s.info.channels.Count];
            for (int i = 0; i < s.info.channels.Count; i++)
            {
                if (i == s.info.channels.Count - 1)
                {
                    ChannelPans[i] = ChannelPan.Middle;
                }
                else if (i % 2 == 0)
                {
                    ChannelPans[i]     = ChannelPan.Left;
                    ChannelPans[i + 1] = ChannelPan.Right;
                    i++;
                }
            }
            if (s.info.tracks != null)
            {
                foreach (var t in s.info.tracks)
                {
                    if (t.globalChannelIndexTable.count > 0)
                    {
                        if (t.globalChannelIndexTable.count > 1)
                        {
                            ChannelPans[t.globalChannelIndexTable.entries[0]] = ChannelPan.Left;
                            ChannelPans[t.globalChannelIndexTable.entries[1]] = ChannelPan.Right;
                        }
                        else
                        {
                            ChannelPans[t.globalChannelIndexTable.entries[0]] = ChannelPan.Middle;
                        }
                    }
                }
            }
        }
コード例 #7
0
ファイル: SoundStream.cs プロジェクト: nnn1590/Audinfo
        /// <summary>
        /// Create a new b_stm from a RIFF Wave.
        /// </summary>
        /// <param name="r">The RIFF Wave.</param>
        /// <param name="encode">Whether or not to encode PCM16 data.</param>
        /// <param name="version">Version of the file.</param>
        /// <returns></returns>
        public static b_stm CreateStream(RiffWave r, bool encode, byte vMajor, byte vMinor, byte vRevision)
        {
            b_stm s     = new b_stm();
            bool  loops = false;

            if (r.smpl != null)
            {
                loops = true; if (r.smpl.numLoops < 1)
                {
                    loops = false;
                }
            }
            if (r.fmt.bitsPerSample == RiffWave.FmtChunk.BitsPerSample.PCM8)
            {
                List <byte[]> pcm8      = new List <byte[]>();
                int           count     = 0;
                UInt32        endSample = (UInt32)r.data.channels[0].pcm8.Count();
                if (loops)
                {
                    endSample = r.smpl.loops[0].endSample;
                }
                foreach (RiffWave.DataChunk.DataSamples d in r.data.channels)
                {
                    if (count <= endSample)
                    {
                        pcm8.Add(d.pcm8.ToArray());
                    }
                    count += 1;
                }
                if (!loops)
                {
                    s = CreateStream(r.fmt.sampleRate, endSample, pcm8.ToArray(), EncodingTypes.PCM8, vMajor, vMinor, vRevision);
                }
                else
                {
                    s = CreateStream(r.fmt.sampleRate, endSample, pcm8.ToArray(), EncodingTypes.PCM8, vMajor, vMinor, vRevision, r.smpl.loops[0].startSample);
                }
            }
            else
            {
                List <Int16[]> pcm16      = new List <Int16[]>();
                int            count2     = 0;
                UInt32         endSample2 = (UInt32)r.data.channels[0].pcm16.Count();
                if (loops)
                {
                    endSample2 = r.smpl.loops[0].endSample;
                }
                foreach (RiffWave.DataChunk.DataSamples d in r.data.channels)
                {
                    if (count2 <= endSample2)
                    {
                        pcm16.Add(d.pcm16.ToArray());
                    }
                    count2 += 1;
                }
                byte encoding = EncodingTypes.PCM16;;
                if (encode)
                {
                    encoding = EncodingTypes.DSP_ADPCM;
                }
                if (!loops)
                {
                    s = CreateStream(r.fmt.sampleRate, (UInt32)r.data.channels[0].pcm16.Count(), pcm16.ToArray(), encoding, vMajor, vMinor, vRevision);
                }
                else
                {
                    s = CreateStream(r.fmt.sampleRate, (UInt32)r.data.channels[0].pcm16.Count(), pcm16.ToArray(), encoding, vMajor, vMinor, vRevision, r.smpl.loops[0].startSample);
                }
            }

            return(s);
        }
コード例 #8
0
ファイル: SoundStream.cs プロジェクト: nnn1590/Audinfo
        /// <summary>
        /// Create a standard b_stm.
        /// </summary>
        /// <param name="sampleRate">The sample rate.</param>
        /// <param name="numSamples">Number of samples.</param>
        /// <param name="samples">Pcm8[][] or Pcm16[][] audio samples.</param>
        /// <param name="encoding">If samples is Pcm8[][] always 0. Must be 1 or 2 for if samples is Pcm16[][].</param>
        /// <param name="version">The version of the file.</param>
        /// <returns></returns>
        public static b_stm CreateStream(UInt32 sampleRate, UInt32 numSamples, object samples, byte encoding, byte vMajor, byte vMinor, byte vRevision)
        {
            b_stm s = new b_stm();

            s.fileHeader = new FileHeader("FSTM", ByteOrder.BigEndian, vMajor, vMinor, vRevision, 0, new List <SizedReference>());

            s.info = new b_stm.InfoBlock();
            s.info.streamSoundInfo = new b_stm.StreamSoundInfo();
            s.info.tracks          = new List <b_stm.TrackInfo>();
            s.info.channels        = new List <b_stm.ChannelInfo>();

            //Stream info.
            s.info.streamSoundInfo             = new b_stm.StreamSoundInfo();
            s.info.streamSoundInfo.encoding    = encoding;
            s.info.streamSoundInfo.sampleCount = numSamples;
            s.info.streamSoundInfo.sampleRate  = sampleRate;

            //Channels.
            switch (encoding)
            {
            case EncodingTypes.PCM8:
                s.data = new SoundNStreamDataBlock(EncoderFactory.Pcm8ToSignedPcm8(samples as byte[][]), encoding);
                for (int i = 0; i < (samples as byte[][]).Length; i++)
                {
                    s.info.channels.Add(new b_stm.ChannelInfo());
                }
                break;

            case EncodingTypes.PCM16:
                s.data = new SoundNStreamDataBlock(samples, encoding);
                for (int i = 0; i < (samples as Int16[][]).Length; i++)
                {
                    s.info.channels.Add(new b_stm.ChannelInfo());
                }
                break;

            case EncodingTypes.DSP_ADPCM:
                s.data = new SoundNStreamDataBlock(EncoderFactory.Pcm16ToDspAdpcmSTM(samples as Int16[][], s), encoding);
                s.Update(ByteOrder.BigEndian);

                //Get DSP-ADPCM info.
                DspAdpcmInfo[] context = new DspAdpcmInfo[s.data.dspAdpcm.Length];
                int            cCount  = 0;
                foreach (var channel in s.info.channels)
                {
                    context[cCount] = channel.dspAdpcmInfo;
                    cCount++;
                }

                //Create SEEK block.
                s.seek = new SoundNStreamSeekBlock(s.data.dspAdpcm, s.info.streamSoundInfo.sampleCount, context);

                break;
            }

            //Tracks.
            for (int i = 0; i < s.info.channels.Count; i += 2)
            {
                s.info.tracks.Add(new b_stm.TrackInfo()
                {
                    volume = 0x64, pan = 0x40, span = 0x0, surroundMode = 0, globalChannelIndexTable = new Table <byte>()
                    {
                        entries = new List <byte>()
                        {
                            (byte)i
                        }
                    }
                });
                if (i + 1 != s.info.channels.Count)
                {
                    s.info.tracks[s.info.tracks.Count - 1].globalChannelIndexTable.entries.Add((byte)(i + 1));
                }
            }

            s.Update(ByteOrder.BigEndian);

            return(s);
        }
コード例 #9
0
ファイル: SoundStream.cs プロジェクト: nnn1590/Audinfo
        /// <summary>
        /// Create a wave from a FISP.
        /// </summary>
        /// <param name="f"></param>
        /// <returns></returns>
        public static b_stm CreateStream(FISP f, byte vMajor, byte vMinor, byte vRevision)
        {
            //New stream.
            b_stm s = new b_stm();

            //Trim the fat from each loop.
            object channels = new short[f.data.data.Count()][];

            for (int i = 0; i < (channels as short[][]).Length; i++)
            {
                List <short> l = new List <short>();

                for (int j = 0; j < f.stream.loopEnd; j++)
                {
                    l.Add(f.data.data[i][j]);
                }

                (channels as short[][])[i] = l.ToArray();
            }

            //PCM8 conversion.
            if (f.stream.encoding == EncodingTypes.PCM8)
            {
                channels = EncoderFactory.Pcm16ToPcm8(channels as short[][]);
            }

            //If looped.
            if (f.stream.isLoop)
            {
                s = StreamFactory.CreateStream(f.stream.sampleRate, f.stream.loopEnd, channels, f.stream.encoding, vMajor, vMinor, vRevision, f.stream.loopStart);
            }

            //Not looped.
            else
            {
                s = StreamFactory.CreateStream(f.stream.sampleRate, f.stream.loopEnd, channels, f.stream.encoding, vMajor, vMinor, vRevision);
            }

            //Make tracks.
            s.info.tracks = new List <b_stm.TrackInfo>();
            foreach (FISP.TrackInfo i in f.tracks)
            {
                b_stm.TrackInfo t = new b_stm.TrackInfo();
                t.globalChannelIndexTable         = new Table <byte>();
                t.globalChannelIndexTable.count   = (uint)i.channels.Count();
                t.globalChannelIndexTable.entries = i.channels;
                t.pan          = i.pan;
                t.span         = i.span;
                t.surroundMode = i.surroundMode;
                t.volume       = i.volume;
                s.info.tracks.Add(t);
            }

            //Nullify.
            if (f.tracks.Count() <= 0)
            {
                s.info.tracks = null;
            }

            //Make regions. EXPERIMENTAL! Yell at me if this doesn't work.
            s.region = null;
            if (f.regions.Count > 0)
            {
                s.region         = new SoundNStreamRegionBlock();
                s.region.regions = new SoundNStreamRegionBlock.RegionInfo[f.regions.Count];
                int index = 0;
                foreach (FISP.RegionInfo i in f.regions)
                {
                    SoundNStreamRegionBlock.RegionInfo r = new SoundNStreamRegionBlock.RegionInfo();
                    r.start    = i.start;
                    r.end      = i.end;
                    r.loopInfo = new SoundNStreamRegionBlock.RegionInfo.DspAdpcmLoopInfo[s.info.channels.Count];
                    if (f.stream.encoding >= EncodingTypes.DSP_ADPCM)
                    {
                        for (int j = 0; j < s.info.channels.Count; j++)
                        {
                            short h1 = 0;
                            short h2 = 0;
                            if (r.start >= 1)
                            {
                                h1 = f.data.data[j][r.start - 1];
                            }
                            if (r.start >= 2)
                            {
                                h2 = f.data.data[j][r.start - 2];
                            }

                            r.loopInfo[j] = new SoundNStreamRegionBlock.RegionInfo.DspAdpcmLoopInfo()
                            {
                                loopPredScale = s.info.channels[j].dspAdpcmInfo.loop_pred_scale,
                                loopYn1       = h1,
                                loopYn2       = h2
                            };
                        }
                    }
                    s.region.regions[index] = r;

                    index++;
                }
            }

            //Set info.
            s.info.streamSoundInfo.originalLoopStart = f.stream.originalLoopStart;
            s.info.streamSoundInfo.originalLoopEnd   = f.stream.originalLoopEnd;
            s.info.streamSoundInfo.secretInfo        = f.stream.secretInfo;

            return(s);
        }