Example #1
0
        public override int Open(string filename)
        {
            bool r = Bass.BASS_Init(0, 48000, BASSInit.BASS_DEVICE_DEFAULT, IntPtr.Zero);

            stream_in = Bass.BASS_StreamCreateFile(filename, 0, 0, BASSFlag.BASS_DEFAULT | BASSFlag.BASS_STREAM_DECODE);
            // BASS_DEFAULT: output 32bit (16bit stereo)
            if (stream_in == 0)
            {
                BASSError be = Bass.BASS_ErrorGetCode();
                Trace.TraceInformation("Cmp3ogg: StreamCreateFile error: " + be.ToString());
            }
            nTotalPCMSize = Bass.BASS_ChannelGetLength(stream_in);

            #region [ Getting WAVEFORMEX info ]
            var chinfo = Bass.BASS_ChannelGetInfo(stream_in);
            wfx = new CWin32.WAVEFORMATEX(
                (ushort)1,                                              // wFormatTag
                (ushort)chinfo.chans,                                   // nChannels
                (uint)chinfo.freq,                                      // nSamplesPerSec
                (uint)(chinfo.freq * 2 * chinfo.chans),                 // nAvgBytesPerSec
                (ushort)(2 * chinfo.chans),                             // nBlockAlign
                16,                                                     // wBitsPerSample
                0                                                       // cbSize
                );
            #endregion

            //string fn = Path.GetFileName(filename);
            //Trace.TraceInformation("filename=" + fn + ", size=(decode): " + wavdata.Length + ", channelgetlength=" + _TotalPCMSize2 + ", " + _TotalPCMSize) ;

            return(0);
        }
Example #2
0
        public override int GetFormat(int nHandle, ref CWin32.WAVEFORMATEX wfx)
        {
            var chinfo = Bass.BASS_ChannelGetInfo(stream_in);

            _wfx.wFormatTag      = 1;                                           // 1 == PCM
            _wfx.nChannels       = (ushort)chinfo.chans;                        //
            _wfx.nSamplesPerSec  = (uint)chinfo.freq;                           //
            _wfx.nAvgBytesPerSec = (uint)(chinfo.freq * 2 * chinfo.chans);      //
            _wfx.nBlockAlign     = (ushort)(2 * chinfo.chans);                  // 16bit * mono/stereo
            _wfx.wBitsPerSample  = 16;

            //Debug.WriteLine("**WAVEFORMATEX** in Cmp3.cs from stream info");
            //Debug.WriteLine("wFormatTag=      " + 1);
            //Debug.WriteLine("nChannels =      " + chinfo.chans.ToString("X4"));
            //Debug.WriteLine("nSamplesPerSec=  " + chinfo.freq.ToString("X8"));
            //Debug.WriteLine("nAvgBytesPerSec= " + (chinfo.freq * 4).ToString("X8"));
            //Debug.WriteLine("nBlockAlign=     " + (2 * chinfo.chans).ToString("X4"));
            //Debug.WriteLine("wBitsPerSample=  " + (16).ToString("X4"));

            wfx = _wfx;

            #region [ Debug info ]
            //Debug.WriteLine("**WAVEFORMATEX** in Cmp3.cs from binary array");
            //Debug.WriteLine("wFormatTag=      " + wfx.wFormatTag.ToString("X4"));
            //Debug.WriteLine("nChannels =      " + wfx.nChannels.ToString("X4"));
            //Debug.WriteLine("nSamplesPerSec=  " + wfx.nSamplesPerSec.ToString("X8"));
            //Debug.WriteLine("nAvgBytesPerSec= " + wfx.nAvgBytesPerSec.ToString("X8"));
            //Debug.WriteLine("nBlockAlign=     " + wfx.nBlockAlign.ToString("X4"));
            //Debug.WriteLine("wBitsPerSample=  " + wfx.wBitsPerSample.ToString("X4"));
            //Debug.WriteLine("cbSize=          " + wfx.cbSize.ToString("X4"));
            #endregion

            return(0);
        }
Example #3
0
        public override int Open(string filename)
        {
            this._filename = filename;
            bjxa           = new bjxa.Decoder();

            #region [ Reading XA headers, then store it ]
            fs     = new FileStream(filename, FileMode.Open, FileAccess.Read, FileShare.ReadWrite);           // Need to set FileShare flag, to avoid locking after Closer()
            format = bjxa.ReadHeader(fs);
            //string xaid = Encoding.ASCII.GetString( xah.id );

            #region [ Debug info ]
            //Debug.WriteLine( "**XAHEADER**" );
            //Debug.WriteLine( "id=             " + xaheader.id.ToString( "X8" ) );
            //Debug.WriteLine( "nDataLen=       " + xaheader.nDataLen.ToString( "X8" ) );
            //Debug.WriteLine( "nSamples=       " + xaheader.nSamples.ToString( "X8" ) );
            //Debug.WriteLine( "nSamplesPerSec= " + xaheader.nSamplesPerSec.ToString( "X4" ) );
            //Debug.WriteLine( "nBits=          " + xaheader.nBits.ToString( "X2" ) );
            //Debug.WriteLine( "nChannels=      " + xaheader.nChannels.ToString( "X2" ) );
            //Debug.WriteLine( "nLoopPtr=       " + xaheader.nLoopPtr.ToString( "X8" ) );
            //Debug.WriteLine( "befL[0]=        " + xaheader.befL[ 0 ].ToString( "X4" ) );
            //Debug.WriteLine( "befL[1]=        " + xaheader.befL[ 1 ].ToString( "X4" ) );
            //Debug.WriteLine( "befR[0]=        " + xaheader.befR[ 0 ].ToString( "X4" ) );
            //Debug.WriteLine( "befR[1]=        " + xaheader.befR[ 1 ].ToString( "X4" ) );
            #endregion
            #endregion


            #region [ Getting WAVEFORMEX info ]
            wfx = new CWin32.WAVEFORMATEX(
                (ushort)format.WaveFormatPcm,                       // wFormatTag
                (ushort)format.Channels,                            // nChannels
                format.SamplesRate,                                 // nSamplesPerSec
                format.WaveByteRate,                                // nAvgBytesPerSec
                (ushort)format.WaveBlockAlign,                      // nBlockAlign
                (ushort)format.SampleBits,                          // wBitsPerSample
                0                                                   // cbSize
                );
            #endregion

            #region [ Debug info ]
            //Debug.WriteLine( "**WAVEFORMATEX**" );
            //Debug.WriteLine( "wFormatTag=      " + waveformatex.wFormatTag.ToString( "X4" ) );
            //Debug.WriteLine( "nChannels =      " + waveformatex.nChannels.ToString( "X4" ) );
            //Debug.WriteLine( "nSamplesPerSec=  " + waveformatex.nSamplesPerSec.ToString( "X8" ) );
            //Debug.WriteLine( "nAvgBytesPerSec= " + waveformatex.nAvgBytesPerSec.ToString( "X8" ) );
            //Debug.WriteLine( "nBlockAlign=     " + waveformatex.nBlockAlign.ToString( "X4" ) );
            //Debug.WriteLine( "wBitsPerSample=  " + waveformatex.wBitsPerSample.ToString( "X4" ) );
            //Debug.WriteLine( "cbSize=          " + waveformatex.cbSize.ToString( "X4" ) );
            #endregion

            nTotalPCMSize = (uint)format.DataLengthPcm;

            return(0);
        }
Example #4
0
        public override int GetFormat(int nHandle, ref CWin32.WAVEFORMATEX wfx)
        {
            #region [ WAVEFORMATEX構造体の手動コピー ]
            wfx.nAvgBytesPerSec = waveformatex.nAvgBytesPerSec;
            wfx.wBitsPerSample  = waveformatex.wBitsPerSample;
            wfx.nBlockAlign     = waveformatex.nBlockAlign;
            wfx.nChannels       = waveformatex.nChannels;
            wfx.wFormatTag      = waveformatex.wFormatTag;
            wfx.nSamplesPerSec  = waveformatex.nSamplesPerSec;

            return(0);

            #endregion
        }
Example #5
0
        private void SaveWav(string filename)
        {
            long _TotalPCMSize = (uint)format.DataLengthPcm;

            CWin32.WAVEFORMATEX _wfx = wfx;

            string outfile = Path.GetFileName(filename);
            var    fs2     = new FileStream(outfile + ".wav", FileMode.Create);
            var    st      = new BinaryWriter(fs2);

            st.Write(new byte[] { 0x52, 0x49, 0x46, 0x46 }, 0, 4); // 'RIFF'
            st.Write((int)_TotalPCMSize + 44 - 8);                 // filesize - 8 [byte];今は不明なので後で上書きする。
            st.Write(new byte[] { 0x57, 0x41, 0x56, 0x45 }, 0, 4); // 'WAVE'
            st.Write(new byte[] { 0x66, 0x6D, 0x74, 0x20 }, 0, 4); // 'fmt '
            st.Write(new byte[] { 0x10, 0x00, 0x00, 0x00 }, 0, 4); // chunk size 16bytes
            st.Write(new byte[] { 0x01, 0x00 }, 0, 2);             // formatTag 0001 PCM
            st.Write((short)_wfx.nChannels);                       // channels
            st.Write((int)_wfx.nSamplesPerSec);                    // samples per sec
            st.Write((int)_wfx.nAvgBytesPerSec);                   // avg bytesper sec
            st.Write((short)_wfx.nBlockAlign);                     // blockalign = 16bit * mono/stereo
            st.Write((short)_wfx.wBitsPerSample);                  // bitspersample = 16bits

            st.Write(new byte[] { 0x64, 0x61, 0x74, 0x61 }, 0, 4); // 'data'
            st.Write((int)_TotalPCMSize);                          // datasize


            //var pcmbuf = new short[format.Blocks * format.BlockSizePcm];
            //if (fs.Read(srcBuf, 0, srcBuf.Length) != srcBuf.Length)
            //{
            //	string s = Path.GetFileName(filename);
            //	throw new Exception($"Failed to load xa data: {s}");
            //}
            //int ret = bjxa.Decode(srcBuf, pcmbuf, out long pcmBufLength);

            int shortsize    = (int)(format.Blocks * format.BlockSizePcm);
            var pcmbuf_bytes = new byte[shortsize * 2];

            for (int i = 0; i < shortsize; i++)
            {
                var b = BitConverter.GetBytes(pcmbuf[i]);
                pcmbuf_bytes[i * 2]     = b[0];
                pcmbuf_bytes[i * 2 + 1] = b[1];
            }
            st.Write(pcmbuf_bytes);
            Trace.TraceInformation($"wrote ({outfile}.wav) " + fs2.Length);
            st.Dispose();
            fs2.Dispose();
        }
Example #6
0
 public abstract int GetFormat(int nHandle, ref CWin32.WAVEFORMATEX wfx);
Example #7
0
 public override int GetFormat(int nHandle, ref CWin32.WAVEFORMATEX wfx)
 {
     return(oggGetFormat(nHandle, ref wfx));
 }
Example #8
0
 private static extern int oggGetFormat(int nHandle, ref CWin32.WAVEFORMATEX wfx);
Example #9
0
        public override int Open(string filename)
        {
            this.filename = filename;

            #region [ XAヘッダと、XAデータの読み出し  ]
            xaheader = new XAHEADER();
            using (FileStream fs = new FileStream(filename, FileMode.Open, FileAccess.Read, FileShare.ReadWrite))                       // FileShare を付けとかないと、Close() 後もロックがかかる??
            {
                using (BinaryReader br = new BinaryReader(fs))
                {
                    xaheader.id             = br.ReadUInt32();
                    xaheader.nDataLen       = br.ReadUInt32();
                    xaheader.nSamples       = br.ReadUInt32();
                    xaheader.nSamplesPerSec = br.ReadUInt16();
                    xaheader.nBits          = br.ReadByte();
                    xaheader.nChannels      = br.ReadByte();
                    xaheader.nLoopPtr       = br.ReadUInt32();

                    xaheader.befL = new short[2];
                    xaheader.befR = new short[2];
                    xaheader.pad  = new byte[4];

                    xaheader.befL[0] = br.ReadInt16();
                    xaheader.befL[1] = br.ReadInt16();
                    xaheader.befR[0] = br.ReadInt16();
                    xaheader.befR[1] = br.ReadInt16();
                    xaheader.pad     = br.ReadBytes(4);

                    srcBuf = new byte[xaheader.nDataLen];
                    srcBuf = br.ReadBytes((int)xaheader.nDataLen);
                }
            }
            //string xaid = Encoding.ASCII.GetString( xah.id );
            #region [ デバッグ表示 ]
            //Debug.WriteLine( "**XAHEADER**" );
            //Debug.WriteLine( "id=             " + xaheader.id.ToString( "X8" ) );
            //Debug.WriteLine( "nDataLen=       " + xaheader.nDataLen.ToString( "X8" ) );
            //Debug.WriteLine( "nSamples=       " + xaheader.nSamples.ToString( "X8" ) );
            //Debug.WriteLine( "nSamplesPerSec= " + xaheader.nSamplesPerSec.ToString( "X4" ) );
            //Debug.WriteLine( "nBits=          " + xaheader.nBits.ToString( "X2" ) );
            //Debug.WriteLine( "nChannels=      " + xaheader.nChannels.ToString( "X2" ) );
            //Debug.WriteLine( "nLoopPtr=       " + xaheader.nLoopPtr.ToString( "X8" ) );
            //Debug.WriteLine( "befL[0]=        " + xaheader.befL[ 0 ].ToString( "X4" ) );
            //Debug.WriteLine( "befL[1]=        " + xaheader.befL[ 1 ].ToString( "X4" ) );
            //Debug.WriteLine( "befR[0]=        " + xaheader.befR[ 0 ].ToString( "X4" ) );
            //Debug.WriteLine( "befR[1]=        " + xaheader.befR[ 1 ].ToString( "X4" ) );
            #endregion
            #endregion

            IntPtr hxas;

            #region [ WAVEFORMEX情報の取得  ]
            waveformatex = new CWin32.WAVEFORMATEX();
            hxas         = xaDecodeOpen(ref xaheader, out waveformatex);
            if (hxas == null)
            {
                Trace.TraceError("Error: xa: Open(): xaDecodeOpen(): " + Path.GetFileName(filename));
                return(-1);
            }

            #region [ デバッグ表示 ]
            //Debug.WriteLine( "**WAVEFORMATEX**" );
            //Debug.WriteLine( "wFormatTag=      " + waveformatex.wFormatTag.ToString( "X4" ) );
            //Debug.WriteLine( "nChannels =      " + waveformatex.nChannels.ToString( "X4" ) );
            //Debug.WriteLine( "nSamplesPerSec=  " + waveformatex.nSamplesPerSec.ToString( "X8" ) );
            //Debug.WriteLine( "nAvgBytesPerSec= " + waveformatex.nAvgBytesPerSec.ToString( "X8" ) );
            //Debug.WriteLine( "nBlockAlign=     " + waveformatex.nBlockAlign.ToString( "X4" ) );
            //Debug.WriteLine( "wBitsPerSample=  " + waveformatex.wBitsPerSample.ToString( "X4" ) );
            //Debug.WriteLine( "cbSize=          " + waveformatex.cbSize.ToString( "X4" ) );
            #endregion
            #endregion

            this.nHandle = (int)hxas;
            return((int)hxas);
        }
Example #10
0
        //#region [ IDisposable 実装 ]
        ////-----------------
        //private bool bDispose完了済み = false;
        //public void Dispose()
        //{
        //    if ( !this.bDispose完了済み )
        //    {
        //        if ( srcBuf != null )
        //        {
        //            srcBuf = null;
        //        }
        //        if ( dstBuf != null )
        //        {
        //            dstBuf = null;
        //        }

        //        if ( this.nHandle >= 0 )
        //        {
        //            this.Close( this.nHandle );
        //            this.nHandle = -1;
        //        }
        //        this.bDispose完了済み = true;
        //    }
        //}
        ////-----------------
        //#endregion

#if false
        /// <summary>
        /// xaファイルを読み込んで、wavにdecodeする
        /// </summary>
        /// <param name="filename">xaファイル名</param>
        /// <param name="wavBuf">wavファイルが格納されるバッファ</param>
        /// <returns></returns>
        public bool Decode(string filename, out byte[] wavBuf)
        {
            // Debug.WriteLine( "xa: Decode: " + Path.GetFileName( filename ) );

            #region [ XAヘッダと、XAデータの読み出し  ]
            xaheader = new XAHEADER();
            byte[] xaBuf;
            using (FileStream fs = new FileStream(filename, FileMode.Open, FileAccess.Read, FileShare.ReadWrite))                       // FileShare を付けとかないと、Close() 後もロックがかかる??
            {
                using (BinaryReader br = new BinaryReader(fs))
                {
                    xaheader.id             = br.ReadUInt32();
                    xaheader.nDataLen       = br.ReadUInt32();
                    xaheader.nSamples       = br.ReadUInt32();
                    xaheader.nSamplesPerSec = br.ReadUInt16();
                    xaheader.nBits          = br.ReadByte();
                    xaheader.nChannels      = br.ReadByte();
                    xaheader.nLoopPtr       = br.ReadUInt32();

                    xaheader.befL = new short[2];
                    xaheader.befR = new short[2];
                    xaheader.pad  = new byte[4];

                    xaheader.befL[0] = br.ReadInt16();
                    xaheader.befL[1] = br.ReadInt16();
                    xaheader.befR[0] = br.ReadInt16();
                    xaheader.befR[1] = br.ReadInt16();
                    xaheader.pad     = br.ReadBytes(4);

                    xaBuf = new byte[xaheader.nDataLen];
                    xaBuf = br.ReadBytes((int)xaheader.nDataLen);
                }
            }
            //string xaid = Encoding.ASCII.GetString( xah.id );
            #region [ デバッグ表示 ]
            //Debug.WriteLine( "**XAHEADER**" );
            //Debug.WriteLine( "id=             " + xaheader.id.ToString( "X8" ) );
            //Debug.WriteLine( "nDataLen=       " + xaheader.nDataLen.ToString( "X8" ) );
            //Debug.WriteLine( "nSamples=       " + xaheader.nSamples.ToString( "X8" ) );
            //Debug.WriteLine( "nSamplesPerSec= " + xaheader.nSamplesPerSec.ToString( "X4" ) );
            //Debug.WriteLine( "nBits=          " + xaheader.nBits.ToString( "X2" ) );
            //Debug.WriteLine( "nChannels=      " + xaheader.nChannels.ToString( "X2" ) );
            //Debug.WriteLine( "nLoopPtr=       " + xaheader.nLoopPtr.ToString( "X8" ) );
            //Debug.WriteLine( "befL[0]=        " + xaheader.befL[ 0 ].ToString( "X4" ) );
            //Debug.WriteLine( "befL[1]=        " + xaheader.befL[ 1 ].ToString( "X4" ) );
            //Debug.WriteLine( "befR[0]=        " + xaheader.befR[ 0 ].ToString( "X4" ) );
            //Debug.WriteLine( "befR[1]=        " + xaheader.befR[ 1 ].ToString( "X4" ) );
            #endregion
            #endregion

            object lockobj = new object();
            lock ( lockobj )                    // スレッドセーフじゃないかも知れないので、念のため
            {
                #region [ WAVEFORMEX情報の取得  ]
                waveformatex = new CWin32.WAVEFORMATEX();
                IntPtr hxas = xaDecodeOpen(ref xaheader, out waveformatex);
                if (hxas == null)
                {
                    Trace.TraceError("Error: xaDecodeOpen(): " + Path.GetFileName(filename));
                    wavBuf = null;
                    return(false);
                }

                #region [ デバッグ表示 ]
                //Debug.WriteLine( "**WAVEFORMATEX**" );
                //Debug.WriteLine( "wFormatTag=      " + waveformatex.wFormatTag.ToString( "X4" ) );
                //Debug.WriteLine( "nChannels =      " + waveformatex.nChannels.ToString( "X4" ) );
                //Debug.WriteLine( "nSamplesPerSec=  " + waveformatex.nSamplesPerSec.ToString( "X8" ) );
                //Debug.WriteLine( "nAvgBytesPerSec= " + waveformatex.nAvgBytesPerSec.ToString( "X8" ) );
                //Debug.WriteLine( "nBlockAlign=     " + waveformatex.nBlockAlign.ToString( "X4" ) );
                //Debug.WriteLine( "wBitsPerSample=  " + waveformatex.wBitsPerSample.ToString( "X4" ) );
                //Debug.WriteLine( "cbSize=          " + waveformatex.cbSize.ToString( "X4" ) );
                #endregion
                #endregion

                #region [ データ長の取得 ]
                uint dlen;
                xaDecodeSize(hxas, xaheader.nDataLen, out dlen);
                #region [ デバッグ表示 ]
                //Debug.WriteLine( "**INTERNAL VALUE**" );
                //Debug.WriteLine( "dlen=          " + dlen );
                #endregion
                #endregion

                #region [ xaデータのデコード ]
                wavBuf         = new byte[dlen];
                xastreamheader = new XASTREAMHEADER();

                unsafe
                {
                    fixed(byte *pXaBuf = xaBuf, pWavBuf = wavBuf)
                    {
                        xastreamheader.pSrc     = pXaBuf;
                        xastreamheader.nSrcLen  = xaheader.nDataLen;
                        xastreamheader.nSrcUsed = 0;
                        xastreamheader.pDst     = pWavBuf;
                        xastreamheader.nDstLen  = dlen;
                        xastreamheader.nDstUsed = 0;
                        bool b = xaDecodeConvert(hxas, ref xastreamheader);

                        if (!b)
                        {
                            Trace.TraceError("Error: xaDecodeConvert(): " + Path.GetFileName(filename));
                            wavBuf = null;
                            return(false);
                        }
                    }
                }
                #region [ デバッグ表示 ]
                //Debug.WriteLine( "**XASTREAMHEADER**" );
                //Debug.WriteLine( "nSrcLen=  " + xastreamheader.nSrcLen );
                //Debug.WriteLine( "nSrcUsed= " + xastreamheader.nSrcUsed );
                //Debug.WriteLine( "nDstLen=  " + xastreamheader.nDstLen );
                //Debug.WriteLine( "nDstUsed= " + xastreamheader.nDstUsed );
                #endregion
                #endregion

                #region [ xaファイルのクローズ ]
                bool bb = xaDecodeClose(hxas);
                if (!bb)
                {
                    Trace.TraceError("Error: xaDecodeClose(): " + Path.GetFileName(filename));
                }
                #endregion
            }

            return(true);
        }
Example #11
0
        public static int AudioDecode(string filename, out byte[] buffer,
                                      out int nPCMデータの先頭インデックス, out int totalPCMSize, out CWin32.WAVEFORMATEX wfx, bool enablechunk)
        {
            if (!File.Exists(filename))
            {
                throw new FileNotFoundException(filename + " not found...");
            }

            AVFormatContext *format_context = null;

            if (ffmpeg.avformat_open_input(&format_context, filename, null, null) != 0)
            {
                throw new FileLoadException("avformat_open_input failed\n");
            }

            // get stream info
            if (ffmpeg.avformat_find_stream_info(format_context, null) < 0)
            {
                throw new FileLoadException("avformat_find_stream_info failed\n");
            }

            // find audio stream
            AVStream *audio_stream = null;

            for (int i = 0; i < (int)format_context->nb_streams; i++)
            {
                if (format_context->streams[i]->codecpar->codec_type == AVMediaType.AVMEDIA_TYPE_AUDIO)
                {
                    audio_stream = format_context->streams[i];
                    break;
                }
            }
            if (audio_stream == null)
            {
                throw new FileLoadException("No audio stream ...\n");
            }

            // find decoder
            AVCodec *codec = ffmpeg.avcodec_find_decoder(audio_stream->codecpar->codec_id);

            if (codec == null)
            {
                throw new NotSupportedException("No supported decoder ...\n");
            }

            // alloc codec context
            AVCodecContext *codec_context = ffmpeg.avcodec_alloc_context3(codec);

            if (codec_context == null)
            {
                throw new OutOfMemoryException("avcodec_alloc_context3 failed\n");
            }

            // open codec
            if (ffmpeg.avcodec_parameters_to_context(codec_context, audio_stream->codecpar) < 0)
            {
                Trace.WriteLine("avcodec_parameters_to_context failed\n");
            }
            if (ffmpeg.avcodec_open2(codec_context, codec, null) != 0)
            {
                Trace.WriteLine("avcodec_open2 failed\n");
            }

            // フレームの確保
            AVFrame *   frame       = ffmpeg.av_frame_alloc();
            AVPacket *  packet      = ffmpeg.av_packet_alloc();
            SwrContext *swr         = null;
            byte *      swr_buf     = null;
            int         swr_buf_len = 0;
            int         ret;
            int         nFrame  = 0;
            int         nSample = 0;
            int         pos     = 0;

            using (MemoryStream ms = new MemoryStream())
            {
                using (BinaryWriter bw = new BinaryWriter(ms))
                {
                    if (enablechunk)
                    {
                        bw.Write(new byte[] { 0x52, 0x49, 0x46, 0x46 });                                   // 'RIFF'
                        bw.Write((UInt32)0);                                                               // ファイルサイズ - 8 [byte];今は不明なので後で上書きする。
                        bw.Write(new byte[] { 0x57, 0x41, 0x56, 0x45 });                                   // 'WAVE'
                        bw.Write(new byte[] { 0x66, 0x6D, 0x74, 0x20 });                                   // 'fmt '
                        bw.Write((UInt32)(16));                                                            // fmtチャンクのサイズ[byte]
                        bw.Write((UInt16)1);                                                               // フォーマットID(リニアPCMなら1)
                        bw.Write((UInt16)codec_context->channels);                                         // チャンネル数
                        bw.Write((UInt32)codec_context->sample_rate);                                      // サンプリングレート
                        bw.Write((UInt32)(16 / 8 * codec_context->channels * codec_context->sample_rate)); // データ速度
                        bw.Write((UInt16)(codec_context->channels * 16 / 8));                              // ブロックサイズ
                        bw.Write((UInt16)16);                                                              // サンプルあたりのビット数
                        bw.Write(new byte[] { 0x64, 0x61, 0x74, 0x61 });                                   // 'data'
                        pos = (int)ms.Position;
                        bw.Write((UInt32)0);
                    }
                    while (true)
                    {
                        if (ffmpeg.av_read_frame(format_context, packet) < 0)
                        {
                            Trace.TraceError("av_read_frame eof or error.\n");
                            break;                             // eof or error
                        }
                        if (packet->stream_index == audio_stream->index)
                        {
                            if (ffmpeg.avcodec_send_packet(codec_context, packet) < 0)
                            {
                                Trace.TraceError("avcodec_send_packet error\n");
                            }
                            if ((ret = ffmpeg.avcodec_receive_frame(codec_context, frame)) < 0)
                            {
                                if (ret != ffmpeg.AVERROR(ffmpeg.EAGAIN))
                                {
                                    Trace.TraceError("avcodec_receive_frame error.\n");
                                    break;
                                }
                            }
                            else
                            {
                                nFrame++;
                                nSample += frame->nb_samples;

                                if ((AVSampleFormat)frame->format != AVSampleFormat.AV_SAMPLE_FMT_S16)
                                {
                                    if (swr == null)
                                    {
                                        swr = ffmpeg.swr_alloc();
                                        if (swr == null)
                                        {
                                            Trace.TraceError("swr_alloc error.\n");
                                            break;
                                        }
                                        ffmpeg.av_opt_set_int(swr, "in_channel_layout", (long)frame->channel_layout, 0);
                                        ffmpeg.av_opt_set_int(swr, "out_channel_layout", (long)frame->channel_layout, 0);
                                        ffmpeg.av_opt_set_int(swr, "in_sample_rate", frame->sample_rate, 0);
                                        ffmpeg.av_opt_set_int(swr, "out_sample_rate", frame->sample_rate, 0);
                                        ffmpeg.av_opt_set_sample_fmt(swr, "in_sample_fmt", (AVSampleFormat)frame->format, 0);
                                        ffmpeg.av_opt_set_sample_fmt(swr, "out_sample_fmt", AVSampleFormat.AV_SAMPLE_FMT_S16, 0);
                                        ret = ffmpeg.swr_init(swr);
                                        if (ret < 0)
                                        {
                                            Trace.TraceError("swr_init error.\n");
                                            break;
                                        }
                                        swr_buf_len = ffmpeg.av_samples_get_buffer_size(null, frame->channels, frame->sample_rate, AVSampleFormat.AV_SAMPLE_FMT_S16, 1);
                                        swr_buf     = (byte *)ffmpeg.av_malloc((ulong)swr_buf_len);
                                    }

                                    ret = ffmpeg.swr_convert(swr, &swr_buf, frame->nb_samples, frame->extended_data, frame->nb_samples);
                                    for (int index = 0; index < frame->nb_samples * (16 / 8) * frame->channels; index++)
                                    {
                                        bw.Write(swr_buf[index]);
                                    }
                                }
                                else
                                {
                                    for (int index = 0; index < frame->nb_samples * (16 / 8) * frame->channels; index++)
                                    {
                                        bw.Write((*(frame->extended_data))[index]);
                                    }
                                }
                            }
                        }
                        ffmpeg.av_packet_unref(packet);
                    }
                    if (enablechunk)
                    {
                        bw.Seek(4, SeekOrigin.Begin);
                        bw.Write((UInt32)ms.Length - 8);                                            // ファイルサイズ - 8 [byte]

                        bw.Seek(pos, SeekOrigin.Begin);
                        bw.Write((UInt32)ms.Length - (pos + 4));                         // dataチャンクサイズ [byte]
                    }

                    bw.Close();

                    Debug.Print("Frames=" + nFrame + "\n" + "Samples=" + nSample);
                    buffer = ms.ToArray();
                }
            }

            nPCMデータの先頭インデックス = pos;
            totalPCMSize     = buffer.Length;
            wfx = new CWin32.WAVEFORMATEX(1, (ushort)codec_context->channels, (uint)codec_context->sample_rate, (uint)(16 / 8 * codec_context->channels * codec_context->sample_rate), (ushort)(codec_context->channels * 16 / 8), 16);

            ffmpeg.av_free((void *)swr_buf);
            ffmpeg.av_frame_free(&frame);
            ffmpeg.avcodec_free_context(&codec_context);
            ffmpeg.avformat_close_input(&format_context);

            return(0);
        }