Beispiel #1
0
 /// <summary>
 /// Tries to find the encoding media type with the closest bitrate to that specified
 /// </summary>
 /// <param name="audioSubtype">Audio subtype, a value from AudioSubtypes</param>
 /// <param name="inputFormat">Your encoder input format (used to check sample rate and channel count)</param>
 /// <param name="desiredBitRate">Your desired bitrate</param>
 /// <returns>The closest media type, or null if none available</returns>
 public static MF.MediaType SelectMediaType(Guid audioSubtype, SharpDX.Multimedia.WaveFormat inputFormat, int desiredBitRate)
 {
     return(GetOutputMediaTypes(audioSubtype)
            .Where(mt => mt.Get(MF.MediaTypeAttributeKeys.AudioSamplesPerSecond) == inputFormat.SampleRate &&
                   mt.Get(MF.MediaTypeAttributeKeys.AudioNumChannels) == inputFormat.Channels)
            .Select(mt => new { MediaType = mt, Delta = Math.Abs(desiredBitRate - mt.Get(MF.MediaTypeAttributeKeys.AudioAvgBytesPerSecond) * 8) })
            .OrderBy(mt => mt.Delta)
            .Select(mt => mt.MediaType)
            .FirstOrDefault());
 }
Beispiel #2
0
 /// <summary>
 /// <p>Queries if a specific output format is supported for a given input format.</p>
 /// </summary>
 /// <param name = "inputFormatRef"><dd> <p>Input format. </p> </dd></param>
 /// <param name = "requestedOutputFormatRef"><dd> <p>Output format to check for being supported.</p> </dd></param>
 /// <param name = "supportedOutputFormatOut"><dd> <p>If not <c>null</c> and the output format is not supported for the given input format, <em>ppSupportedOutputFormat</em> returns a reference to the closest output format that is supported. Use <strong>XAPOFree</strong> to free the returned structure. </p> </dd></param>
 /// <returns><p>Returns <see cref = "SharpDX.Result.Ok"/> if the format pair is supported. Returns XAPO_E_FORMAT_UNSUPPORTED if the format pair is not supported.</p></returns>
 /// <remarks>
 /// <p>The <strong>IXAPO::IsInputFormatSupported</strong> and <strong>IsOutputFormatSupported</strong> methods allow an XAPO to indicate which audio formats it is capable of processing. If a requested format is not supported, the XAPO should return the closest format that it does support. The closest format should be determined based on frame rate, bit depth, and channel count, in that order of importance. The behavior of <strong>IsOutputFormatSupported</strong> is allowed to change, based on the internal state of the XAPO, but its behavior should remain constant between calls to the <strong>IXAPO::LockForProcess</strong> and <strong>IXAPO::UnlockForProcess</strong> methods.</p>
 /// </remarks>
 /// <doc-id>microsoft.directx_sdk.ixapo.ixapo.isoutputformatsupported</doc-id>
 /// <unmanaged>HRESULT IXAPO::IsOutputFormatSupported([In] const WAVEFORMATEX* pInputFormat,[In] const WAVEFORMATEX* pRequestedOutputFormat,[Out, Optional] WAVEFORMATEX** ppSupportedOutputFormat)</unmanaged>
 /// <unmanaged-short>IXAPO::IsOutputFormatSupported</unmanaged-short>
 internal unsafe SharpDX.Result IsOutputFormatSupported_(ref SharpDX.Multimedia.WaveFormat inputFormatRef, ref SharpDX.Multimedia.WaveFormat requestedOutputFormatRef, out SharpDX.Multimedia.WaveFormat supportedOutputFormatOut)
 {
     SharpDX.Multimedia.WaveFormat.__Native inputFormatRef_           = default(SharpDX.Multimedia.WaveFormat.__Native);
     SharpDX.Multimedia.WaveFormat.__Native requestedOutputFormatRef_ = default(SharpDX.Multimedia.WaveFormat.__Native);
     SharpDX.Multimedia.WaveFormat.__Native supportedOutputFormatOut_ = default(SharpDX.Multimedia.WaveFormat.__Native);
     supportedOutputFormatOut = default(SharpDX.Multimedia.WaveFormat);
     SharpDX.Result __result__;
     inputFormatRef.__MarshalTo(ref inputFormatRef_);
     requestedOutputFormatRef.__MarshalTo(ref requestedOutputFormatRef_);
     __result__ = SharpDX.XAudio2.LocalInterop.CalliStdCallint(this._nativePointer, &inputFormatRef_, &requestedOutputFormatRef_, &supportedOutputFormatOut_, (*(void ***)this._nativePointer)[5]);
     supportedOutputFormatOut.__MarshalFrom(ref supportedOutputFormatOut_);
     inputFormatRef.__MarshalFree(ref inputFormatRef_);
     requestedOutputFormatRef.__MarshalFree(ref requestedOutputFormatRef_);
     return(__result__);
 }
Beispiel #3
0
        /// <summary>
        /// Load sound data. wavData must not contain WAV Head.
        /// </summary>
        /// <param name="wavBytes"></param>
        /// <returns>Returns duration.</returns>
        public decimal Load(byte[] wavBytes, int sampleRate, int bitsPerSample, int channelCount)
        {
            var format = new SharpDX.Multimedia.WaveFormat(sampleRate, bitsPerSample, channelCount);
            // Create and set the buffer description.
            var desc = new SoundBufferDescription();

            desc.Format = format;
            desc.Flags  =
                // Play sound even if application loses focus.
                BufferFlags.GlobalFocus |
                // This has to be true to use effects.
                BufferFlags.ControlEffects;
            desc.BufferBytes = wavBytes.Length;
            // Create and set the buffer for playing the sound.
            ApplicationBuffer = new SecondarySoundBuffer(ApplicationDevice, desc);
            ApplicationBuffer.Write(wavBytes, 0, LockFlags.None);
            var duration = AudioHelper.GetDuration(wavBytes.Length, sampleRate, bitsPerSample, channelCount);

            return(duration);
        }
        public WaveBank(AudioEngine audioEngine, string nonStreamingWaveBankFilename)
        {
            //XWB PARSING
            //Adapted from MonoXNA
            //Originally adaped from Luigi Auriemma's unxwb
			
            WaveBankHeader wavebankheader;
            WaveBankData wavebankdata;
            WaveBankEntry wavebankentry;

            wavebankdata.EntryNameElementSize = 0;
            wavebankdata.CompactFormat = 0;
            wavebankdata.Alignment = 0;

            wavebankentry.Format = 0;
            wavebankentry.PlayRegion.Length = 0;
            wavebankentry.PlayRegion.Offset = 0;

            int wavebank_offset = 0;

#if WINRT
			const char notSeparator = '/';
			const char separator = '\\';
#else
            const char notSeparator = '\\';
            var separator = Path.DirectorySeparatorChar;
#endif
			// Check for windows-style directory separator character
			nonStreamingWaveBankFilename = nonStreamingWaveBankFilename.Replace(notSeparator, separator);

#if !ANDROID
            BinaryReader reader = new BinaryReader(TitleContainer.OpenStream(nonStreamingWaveBankFilename));
#else 
			Stream stream = Game.Activity.Assets.Open(nonStreamingWaveBankFilename);
			MemoryStream ms = new MemoryStream();
			stream.CopyTo( ms );
			stream.Close();
			ms.Position = 0;
			BinaryReader reader = new BinaryReader(ms);
#endif
			reader.ReadBytes(4);

            wavebankheader.Version = reader.ReadInt32();

            int last_segment = 4;
            //if (wavebankheader.Version == 1) goto WAVEBANKDATA;
            if (wavebankheader.Version <= 3) last_segment = 3;
            if (wavebankheader.Version >= 42) reader.ReadInt32();    // skip HeaderVersion

            wavebankheader.Segments = new Segment[5];

            for (int i = 0; i <= last_segment; i++)
            {
                wavebankheader.Segments[i].Offset = reader.ReadInt32();
                wavebankheader.Segments[i].Length = reader.ReadInt32();
            }

            reader.BaseStream.Seek(wavebankheader.Segments[0].Offset, SeekOrigin.Begin);

            //WAVEBANKDATA:

            wavebankdata.Flags = reader.ReadInt32();
            wavebankdata.EntryCount = reader.ReadInt32();

            if ((wavebankheader.Version == 2) || (wavebankheader.Version == 3))
            {
                wavebankdata.BankName = System.Text.Encoding.UTF8.GetString(reader.ReadBytes(16),0,16).Replace("\0", "");
            }
            else
            {
                wavebankdata.BankName = System.Text.Encoding.UTF8.GetString(reader.ReadBytes(64),0,64).Replace("\0", "");
            }

            BankName = wavebankdata.BankName;

            if (wavebankheader.Version == 1)
            {
                //wavebank_offset = (int)ftell(fd) - file_offset;
                wavebankdata.EntryMetaDataElementSize = 20;
            }
            else
            {
                wavebankdata.EntryMetaDataElementSize = reader.ReadInt32();
                wavebankdata.EntryNameElementSize = reader.ReadInt32();
                wavebankdata.Alignment = reader.ReadInt32();
                wavebank_offset = wavebankheader.Segments[1].Offset; //METADATASEGMENT
            }

            int compact_format;

            if ((wavebankdata.Flags & Flag_Compact) != 0)
            {
                compact_format = reader.ReadInt32();
            }

            int playregion_offset = wavebankheader.Segments[last_segment].Offset;
            if (playregion_offset == 0)
            {
                playregion_offset =
                    wavebank_offset +
                    (wavebankdata.EntryCount * wavebankdata.EntryMetaDataElementSize);
            }
            
            int segidx_entry_name = 2;
            if (wavebankheader.Version >= 42) segidx_entry_name = 3;
            
            int waveentry_offset = wavebankheader.Segments[segidx_entry_name].Offset;
            if ((wavebankheader.Segments[segidx_entry_name].Offset != 0) &&
                (wavebankheader.Segments[segidx_entry_name].Length != 0))
            {
                if (wavebankdata.EntryNameElementSize == -1) wavebankdata.EntryNameElementSize = 0;
                byte[] entry_name = new byte[wavebankdata.EntryNameElementSize + 1];
                entry_name[wavebankdata.EntryNameElementSize] = 0;
            }

            sounds = new SoundEffectInstance[wavebankdata.EntryCount];

            for (int current_entry = 0; current_entry < wavebankdata.EntryCount; current_entry++)
            {
                reader.BaseStream.Seek(wavebank_offset, SeekOrigin.Begin);
                //SHOWFILEOFF;

                //memset(&wavebankentry, 0, sizeof(wavebankentry));
				wavebankentry.LoopRegion.Length = 0;
				wavebankentry.LoopRegion.Offset = 0;

                if ((wavebankdata.Flags & Flag_Compact) != 0)
                {
                    int len = reader.ReadInt32();
                    wavebankentry.Format = wavebankdata.CompactFormat;
                    wavebankentry.PlayRegion.Offset = (len & ((1 << 21) - 1)) * wavebankdata.Alignment;
                    wavebankentry.PlayRegion.Length = (len >> 21) & ((1 << 11) - 1);

                    // workaround because I don't know how to handke the deviation length
                    reader.BaseStream.Seek(wavebank_offset + wavebankdata.EntryMetaDataElementSize, SeekOrigin.Begin);

                    //MYFSEEK(wavebank_offset + wavebankdata.dwEntryMetaDataElementSize); // seek to the next
                    if (current_entry == (wavebankdata.EntryCount - 1))
                    {              // the last track
                        len = wavebankheader.Segments[last_segment].Length;
                    }
                    else
                    {
                        len = ((reader.ReadInt32() & ((1 << 21) - 1)) * wavebankdata.Alignment);
                    }
                    wavebankentry.PlayRegion.Length =
                        len -                               // next offset
                        wavebankentry.PlayRegion.Offset;  // current offset
                    goto wavebank_handle;
                }

                if (wavebankheader.Version == 1)
                {
                    wavebankentry.Format = reader.ReadInt32();
                    wavebankentry.PlayRegion.Offset = reader.ReadInt32();
                    wavebankentry.PlayRegion.Length = reader.ReadInt32();
                    wavebankentry.LoopRegion.Offset = reader.ReadInt32();
                    wavebankentry.LoopRegion.Length = reader.ReadInt32();
                }
                else
                {
                    if (wavebankdata.EntryMetaDataElementSize >= 4) wavebankentry.FlagsAndDuration = reader.ReadInt32();
                    if (wavebankdata.EntryMetaDataElementSize >= 8) wavebankentry.Format = reader.ReadInt32();
                    if (wavebankdata.EntryMetaDataElementSize >= 12) wavebankentry.PlayRegion.Offset = reader.ReadInt32();
                    if (wavebankdata.EntryMetaDataElementSize >= 16) wavebankentry.PlayRegion.Length = reader.ReadInt32();
                    if (wavebankdata.EntryMetaDataElementSize >= 20) wavebankentry.LoopRegion.Offset = reader.ReadInt32();
                    if (wavebankdata.EntryMetaDataElementSize >= 24) wavebankentry.LoopRegion.Length = reader.ReadInt32();
                }

                if (wavebankdata.EntryMetaDataElementSize < 24)
                {                              // work-around
                    if (wavebankentry.PlayRegion.Length != 0)
                    {
                        wavebankentry.PlayRegion.Length = wavebankheader.Segments[last_segment].Length;
                    }

                }// else if(wavebankdata.EntryMetaDataElementSize > sizeof(WaveBankEntry)) {    // skip unused fields
            //   MYFSEEK(wavebank_offset + wavebankdata.EntryMetaDataElementSize);
            //}

                wavebank_handle:
                wavebank_offset += wavebankdata.EntryMetaDataElementSize;
                wavebankentry.PlayRegion.Offset += playregion_offset;
                
                // Parse WAVEBANKMINIWAVEFORMAT
                
                int codec;
                int chans;
                int rate;
                int align;
                int bits;

                if (wavebankheader.Version == 1)
                {         // I'm not 100% sure if the following is correct
                    // version 1:
                    // 1 00000000 000101011000100010 0 001 0
                    // | |         |                 | |   |
                    // | |         |                 | |   wFormatTag
                    // | |         |                 | nChannels
                    // | |         |                 ???
                    // | |         nSamplesPerSec
                    // | wBlockAlign
                    // wBitsPerSample

                    codec = (wavebankentry.Format) & ((1 << 1) - 1);
                    chans = (wavebankentry.Format >> (1)) & ((1 << 3) - 1);
                    rate = (wavebankentry.Format >> (1 + 3 + 1)) & ((1 << 18) - 1);
                    align = (wavebankentry.Format >> (1 + 3 + 1 + 18)) & ((1 << 8) - 1);
                    bits = (wavebankentry.Format >> (1 + 3 + 1 + 18 + 8)) & ((1 << 1) - 1);

                    /*} else if(wavebankheader.dwVersion == 23) { // I'm not 100% sure if the following is correct
                        // version 23:
                        // 1000000000 001011101110000000 001 1
                        // | |        |                  |   |
                        // | |        |                  |   ???
                        // | |        |                  nChannels?
                        // | |        nSamplesPerSec
                        // | ???
                        // !!!UNKNOWN FORMAT!!!

                        //codec = -1;
                        //chans = (wavebankentry.Format >>  1) & ((1 <<  3) - 1);
                        //rate  = (wavebankentry.Format >>  4) & ((1 << 18) - 1);
                        //bits  = (wavebankentry.Format >> 31) & ((1 <<  1) - 1);
                        codec = (wavebankentry.Format                    ) & ((1 <<  1) - 1);
                        chans = (wavebankentry.Format >> (1)             ) & ((1 <<  3) - 1);
                        rate  = (wavebankentry.Format >> (1 + 3)         ) & ((1 << 18) - 1);
                        align = (wavebankentry.Format >> (1 + 3 + 18)    ) & ((1 <<  9) - 1);
                        bits  = (wavebankentry.Format >> (1 + 3 + 18 + 9)) & ((1 <<  1) - 1); */

                }
                else
                {
                    // 0 00000000 000111110100000000 010 01
                    // | |        |                  |   |
                    // | |        |                  |   wFormatTag
                    // | |        |                  nChannels
                    // | |        nSamplesPerSec
                    // | wBlockAlign
                    // wBitsPerSample

                    codec = (wavebankentry.Format) & ((1 << 2) - 1);
                    chans = (wavebankentry.Format >> (2)) & ((1 << 3) - 1);
                    rate = (wavebankentry.Format >> (2 + 3)) & ((1 << 18) - 1);
                    align = (wavebankentry.Format >> (2 + 3 + 18)) & ((1 << 8) - 1);
                    bits = (wavebankentry.Format >> (2 + 3 + 18 + 8)) & ((1 << 1) - 1);
                }
                
                reader.BaseStream.Seek(wavebankentry.PlayRegion.Offset, SeekOrigin.Begin);
                byte[] audiodata = reader.ReadBytes(wavebankentry.PlayRegion.Length);
                
                if (codec == MiniFormatTag_PCM) {
                    
                    //write PCM data into a wav
#if DIRECTX
					SharpDX.Multimedia.WaveFormat waveFormat = new SharpDX.Multimedia.WaveFormat(rate, chans);
					sounds[current_entry] = new SoundEffect(waveFormat, audiodata, 0, audiodata.Length, wavebankentry.LoopRegion.Offset, wavebankentry.LoopRegion.Length).CreateInstance();
#else
					sounds[current_entry] = new SoundEffectInstance(audiodata, rate, chans);
#endif                    
                } else if (codec == MiniForamtTag_WMA) { //WMA or xWMA (or XMA2)
                    byte[] wmaSig = {0x30, 0x26, 0xb2, 0x75, 0x8e, 0x66, 0xcf, 0x11, 0xa6, 0xd9, 0x0, 0xaa, 0x0, 0x62, 0xce, 0x6c};
                    
                    bool isWma = true;
                    for (int i=0; i<wmaSig.Length; i++) {
                        if (wmaSig[i] != audiodata[i]) {
                            isWma = false;
                            break;
                        }
                    }
                    
                    //Let's support m4a data as well for convenience
                    byte[][] m4aSigs = new byte[][] {
                        new byte[] {0x00, 0x00, 0x00, 0x18, 0x66, 0x74, 0x79, 0x70, 0x4D, 0x34, 0x41, 0x20, 0x00, 0x00, 0x02, 0x00},
                        new byte[] {0x00, 0x00, 0x00, 0x20, 0x66, 0x74, 0x79, 0x70, 0x4D, 0x34, 0x41, 0x20, 0x00, 0x00, 0x00, 0x00}
                    };
                    
                    bool isM4a = false;
                    for (int i=0; i<m4aSigs.Length; i++) {    
                        byte[] sig = m4aSigs[i];
                        bool matches = true;
                        for (int j=0; j<sig.Length; j++) {
                            if (sig[j] != audiodata[j]) {
                                matches = false;
                                break;
                            }
                        }
                        if (matches) {
                            isM4a = true;
                            break;
                        }
                    }
                    
                    if (isWma || isM4a) {
                        //WMA data can sometimes be played directly
#if DIRECTX
                        throw new NotImplementedException();
#elif !WINRT
                        //hack - NSSound can't play non-wav from data, we have to give a filename
                        string filename = Path.GetTempFileName();
                        if (isWma) {
                            filename = filename.Replace(".tmp", ".wma");
                        } else if (isM4a) {
                            filename = filename.Replace(".tmp", ".m4a");
                        }
                        using (var audioFile = File.Create(filename))
                            audioFile.Write(audiodata, 0, audiodata.Length);
                        
                        sounds[current_entry] = new SoundEffect(filename).CreateInstance();
#else
						throw new NotImplementedException();
#endif
                    } else {
                        //An xWMA or XMA2 file. Can't be played atm :(
                        throw new NotImplementedException();
                    }
#if !DIRECTX
                /* DirectX platforms can use XAudio2 to stream MSADPCM natively.
                 * This code is cross-platform, but the problem is that it just
                 * decodes ALL of the wavedata here. For XAudio2 in particular,
                 * this is probably ludicrous.
                 *
                 * You need to write a DIRECTX ADPCM reader that just loads this
                 * into the SoundEffect. No decoding should be necessary.
                 * -flibit
                 */
                } else if (codec == MiniFormatTag_ADPCM) {
                    using (MemoryStream dataStream = new MemoryStream(audiodata)) {
                        using (BinaryReader source = new BinaryReader(dataStream)) {
                            sounds[current_entry] = new SoundEffectInstance(
                                MSADPCMToPCM.MSADPCM_TO_PCM(source, (short) chans, (short) align),
                                rate,
                                chans
                            );
                        }
                    }
#endif
                } else {
                    throw new NotImplementedException();
                }
                
            }
			
			audioEngine.Wavebanks[BankName] = this;
        }
Beispiel #5
0
        public WaveBank(AudioEngine audioEngine, string nonStreamingWaveBankFilename)
        {
            //XWB PARSING
            //Adapted from MonoXNA
            //Originally adaped from Luigi Auriemma's unxwb

            WaveBankHeader wavebankheader;
            WaveBankData   wavebankdata;
            WaveBankEntry  wavebankentry;

            wavebankdata.EntryNameElementSize = 0;
            wavebankdata.CompactFormat        = 0;
            wavebankdata.Alignment            = 0;
            wavebankdata.BuildTime            = 0;

            wavebankentry.Format            = 0;
            wavebankentry.PlayRegion.Length = 0;
            wavebankentry.PlayRegion.Offset = 0;

            int wavebank_offset = 0;

#if WINRT
            const char notSeparator = '/';
            const char separator    = '\\';
#else
            const char notSeparator = '\\';
            var        separator    = Path.DirectorySeparatorChar;
#endif
            // Check for windows-style directory separator character
            nonStreamingWaveBankFilename = nonStreamingWaveBankFilename.Replace(notSeparator, separator);

#if !ANDROID
            BinaryReader reader = new BinaryReader(TitleContainer.OpenStream(nonStreamingWaveBankFilename));
#else
            Stream       stream = Game.Activity.Assets.Open(nonStreamingWaveBankFilename);
            MemoryStream ms     = new MemoryStream();
            stream.CopyTo(ms);
            stream.Close();
            ms.Position = 0;
            BinaryReader reader = new BinaryReader(ms);
#endif
            reader.ReadBytes(4);

            wavebankheader.Version = reader.ReadInt32();

            int last_segment = 4;
            //if (wavebankheader.Version == 1) goto WAVEBANKDATA;
            if (wavebankheader.Version <= 3)
            {
                last_segment = 3;
            }
            if (wavebankheader.Version >= 42)
            {
                reader.ReadInt32();                                  // skip HeaderVersion
            }
            wavebankheader.Segments = new Segment[5];

            for (int i = 0; i <= last_segment; i++)
            {
                wavebankheader.Segments[i].Offset = reader.ReadInt32();
                wavebankheader.Segments[i].Length = reader.ReadInt32();
            }

            reader.BaseStream.Seek(wavebankheader.Segments[0].Offset, SeekOrigin.Begin);

            //WAVEBANKDATA:

            wavebankdata.Flags      = reader.ReadInt32();
            wavebankdata.EntryCount = reader.ReadInt32();

            if ((wavebankheader.Version == 2) || (wavebankheader.Version == 3))
            {
                wavebankdata.BankName = System.Text.Encoding.UTF8.GetString(reader.ReadBytes(16), 0, 16).Replace("\0", "");
            }
            else
            {
                wavebankdata.BankName = System.Text.Encoding.UTF8.GetString(reader.ReadBytes(64), 0, 64).Replace("\0", "");
            }

            BankName = wavebankdata.BankName;

            if (wavebankheader.Version == 1)
            {
                //wavebank_offset = (int)ftell(fd) - file_offset;
                wavebankdata.EntryMetaDataElementSize = 20;
            }
            else
            {
                wavebankdata.EntryMetaDataElementSize = reader.ReadInt32();
                wavebankdata.EntryNameElementSize     = reader.ReadInt32();
                wavebankdata.Alignment = reader.ReadInt32();
                wavebank_offset        = wavebankheader.Segments[1].Offset; //METADATASEGMENT
            }

            if ((wavebankdata.Flags & Flag_Compact) != 0)
            {
                reader.ReadInt32(); // compact_format
            }

            int playregion_offset = wavebankheader.Segments[last_segment].Offset;
            if (playregion_offset == 0)
            {
                playregion_offset =
                    wavebank_offset +
                    (wavebankdata.EntryCount * wavebankdata.EntryMetaDataElementSize);
            }

            int segidx_entry_name = 2;
            if (wavebankheader.Version >= 42)
            {
                segidx_entry_name = 3;
            }

            if ((wavebankheader.Segments[segidx_entry_name].Offset != 0) &&
                (wavebankheader.Segments[segidx_entry_name].Length != 0))
            {
                if (wavebankdata.EntryNameElementSize == -1)
                {
                    wavebankdata.EntryNameElementSize = 0;
                }
                byte[] entry_name = new byte[wavebankdata.EntryNameElementSize + 1];
                entry_name[wavebankdata.EntryNameElementSize] = 0;
            }

            sounds = new SoundEffectInstance[wavebankdata.EntryCount];

            for (int current_entry = 0; current_entry < wavebankdata.EntryCount; current_entry++)
            {
                reader.BaseStream.Seek(wavebank_offset, SeekOrigin.Begin);
                //SHOWFILEOFF;

                //memset(&wavebankentry, 0, sizeof(wavebankentry));
                wavebankentry.LoopRegion.Length = 0;
                wavebankentry.LoopRegion.Offset = 0;

                if ((wavebankdata.Flags & Flag_Compact) != 0)
                {
                    int len = reader.ReadInt32();
                    wavebankentry.Format            = wavebankdata.CompactFormat;
                    wavebankentry.PlayRegion.Offset = (len & ((1 << 21) - 1)) * wavebankdata.Alignment;
                    wavebankentry.PlayRegion.Length = (len >> 21) & ((1 << 11) - 1);

                    // workaround because I don't know how to handke the deviation length
                    reader.BaseStream.Seek(wavebank_offset + wavebankdata.EntryMetaDataElementSize, SeekOrigin.Begin);

                    //MYFSEEK(wavebank_offset + wavebankdata.dwEntryMetaDataElementSize); // seek to the next
                    if (current_entry == (wavebankdata.EntryCount - 1))
                    {              // the last track
                        len = wavebankheader.Segments[last_segment].Length;
                    }
                    else
                    {
                        len = ((reader.ReadInt32() & ((1 << 21) - 1)) * wavebankdata.Alignment);
                    }
                    wavebankentry.PlayRegion.Length =
                        len -                             // next offset
                        wavebankentry.PlayRegion.Offset;  // current offset
                    goto wavebank_handle;
                }

                if (wavebankheader.Version == 1)
                {
                    wavebankentry.Format            = reader.ReadInt32();
                    wavebankentry.PlayRegion.Offset = reader.ReadInt32();
                    wavebankentry.PlayRegion.Length = reader.ReadInt32();
                    wavebankentry.LoopRegion.Offset = reader.ReadInt32();
                    wavebankentry.LoopRegion.Length = reader.ReadInt32();
                }
                else
                {
                    if (wavebankdata.EntryMetaDataElementSize >= 4)
                    {
                        wavebankentry.FlagsAndDuration = reader.ReadInt32();
                    }
                    if (wavebankdata.EntryMetaDataElementSize >= 8)
                    {
                        wavebankentry.Format = reader.ReadInt32();
                    }
                    if (wavebankdata.EntryMetaDataElementSize >= 12)
                    {
                        wavebankentry.PlayRegion.Offset = reader.ReadInt32();
                    }
                    if (wavebankdata.EntryMetaDataElementSize >= 16)
                    {
                        wavebankentry.PlayRegion.Length = reader.ReadInt32();
                    }
                    if (wavebankdata.EntryMetaDataElementSize >= 20)
                    {
                        wavebankentry.LoopRegion.Offset = reader.ReadInt32();
                    }
                    if (wavebankdata.EntryMetaDataElementSize >= 24)
                    {
                        wavebankentry.LoopRegion.Length = reader.ReadInt32();
                    }
                }

                if (wavebankdata.EntryMetaDataElementSize < 24)
                {                              // work-around
                    if (wavebankentry.PlayRegion.Length != 0)
                    {
                        wavebankentry.PlayRegion.Length = wavebankheader.Segments[last_segment].Length;
                    }
                }// else if(wavebankdata.EntryMetaDataElementSize > sizeof(WaveBankEntry)) {    // skip unused fields
                //   MYFSEEK(wavebank_offset + wavebankdata.EntryMetaDataElementSize);
                //}

wavebank_handle:
                wavebank_offset += wavebankdata.EntryMetaDataElementSize;
                wavebankentry.PlayRegion.Offset += playregion_offset;

                // Parse WAVEBANKMINIWAVEFORMAT

                int codec;
                int chans;
                int rate;
                int align;
                //int bits;

                if (wavebankheader.Version == 1)
                {         // I'm not 100% sure if the following is correct
                    // version 1:
                    // 1 00000000 000101011000100010 0 001 0
                    // | |         |                 | |   |
                    // | |         |                 | |   wFormatTag
                    // | |         |                 | nChannels
                    // | |         |                 ???
                    // | |         nSamplesPerSec
                    // | wBlockAlign
                    // wBitsPerSample

                    codec = (wavebankentry.Format) & ((1 << 1) - 1);
                    chans = (wavebankentry.Format >> (1)) & ((1 << 3) - 1);
                    rate  = (wavebankentry.Format >> (1 + 3 + 1)) & ((1 << 18) - 1);
                    align = (wavebankentry.Format >> (1 + 3 + 1 + 18)) & ((1 << 8) - 1);
                    //bits = (wavebankentry.Format >> (1 + 3 + 1 + 18 + 8)) & ((1 << 1) - 1);

                    /*} else if(wavebankheader.dwVersion == 23) { // I'm not 100% sure if the following is correct
                     *  // version 23:
                     *  // 1000000000 001011101110000000 001 1
                     *  // | |        |                  |   |
                     *  // | |        |                  |   ???
                     *  // | |        |                  nChannels?
                     *  // | |        nSamplesPerSec
                     *  // | ???
                     *  // !!!UNKNOWN FORMAT!!!
                     *
                     *  //codec = -1;
                     *  //chans = (wavebankentry.Format >>  1) & ((1 <<  3) - 1);
                     *  //rate  = (wavebankentry.Format >>  4) & ((1 << 18) - 1);
                     *  //bits  = (wavebankentry.Format >> 31) & ((1 <<  1) - 1);
                     *  codec = (wavebankentry.Format                    ) & ((1 <<  1) - 1);
                     *  chans = (wavebankentry.Format >> (1)             ) & ((1 <<  3) - 1);
                     *  rate  = (wavebankentry.Format >> (1 + 3)         ) & ((1 << 18) - 1);
                     *  align = (wavebankentry.Format >> (1 + 3 + 18)    ) & ((1 <<  9) - 1);
                     *  bits  = (wavebankentry.Format >> (1 + 3 + 18 + 9)) & ((1 <<  1) - 1); */
                }
                else
                {
                    // 0 00000000 000111110100000000 010 01
                    // | |        |                  |   |
                    // | |        |                  |   wFormatTag
                    // | |        |                  nChannels
                    // | |        nSamplesPerSec
                    // | wBlockAlign
                    // wBitsPerSample

                    codec = (wavebankentry.Format) & ((1 << 2) - 1);
                    chans = (wavebankentry.Format >> (2)) & ((1 << 3) - 1);
                    rate  = (wavebankentry.Format >> (2 + 3)) & ((1 << 18) - 1);
                    align = (wavebankentry.Format >> (2 + 3 + 18)) & ((1 << 8) - 1);
                    //bits = (wavebankentry.Format >> (2 + 3 + 18 + 8)) & ((1 << 1) - 1);
                }

                reader.BaseStream.Seek(wavebankentry.PlayRegion.Offset, SeekOrigin.Begin);
                byte[] audiodata = reader.ReadBytes(wavebankentry.PlayRegion.Length);

                if (codec == MiniFormatTag_PCM)
                {
                    //write PCM data into a wav
#if DIRECTX
                    SharpDX.Multimedia.WaveFormat waveFormat = new SharpDX.Multimedia.WaveFormat(rate, chans);
                    sounds[current_entry] = new SoundEffect(waveFormat, audiodata, 0, audiodata.Length, wavebankentry.LoopRegion.Offset, wavebankentry.LoopRegion.Length).CreateInstance();
#else
                    sounds[current_entry] = new SoundEffectInstance(audiodata, rate, chans);
#endif
                }
                else if (codec == MiniForamtTag_WMA)     //WMA or xWMA (or XMA2)
                {
                    byte[] wmaSig = { 0x30, 0x26, 0xb2, 0x75, 0x8e, 0x66, 0xcf, 0x11, 0xa6, 0xd9, 0x0, 0xaa, 0x0, 0x62, 0xce, 0x6c };

                    bool isWma = true;
                    for (int i = 0; i < wmaSig.Length; i++)
                    {
                        if (wmaSig[i] != audiodata[i])
                        {
                            isWma = false;
                            break;
                        }
                    }

                    //Let's support m4a data as well for convenience
                    byte[][] m4aSigs = new byte[][] {
                        new byte[] { 0x00, 0x00, 0x00, 0x18, 0x66, 0x74, 0x79, 0x70, 0x4D, 0x34, 0x41, 0x20, 0x00, 0x00, 0x02, 0x00 },
                        new byte[] { 0x00, 0x00, 0x00, 0x20, 0x66, 0x74, 0x79, 0x70, 0x4D, 0x34, 0x41, 0x20, 0x00, 0x00, 0x00, 0x00 }
                    };

                    bool isM4a = false;
                    for (int i = 0; i < m4aSigs.Length; i++)
                    {
                        byte[] sig     = m4aSigs[i];
                        bool   matches = true;
                        for (int j = 0; j < sig.Length; j++)
                        {
                            if (sig[j] != audiodata[j])
                            {
                                matches = false;
                                break;
                            }
                        }
                        if (matches)
                        {
                            isM4a = true;
                            break;
                        }
                    }

                    if (isWma || isM4a)
                    {
                        //WMA data can sometimes be played directly
#if DIRECTX
                        throw new NotImplementedException();
#elif !WINRT
                        //hack - NSSound can't play non-wav from data, we have to give a filename
                        string filename = Path.GetTempFileName();
                        if (isWma)
                        {
                            filename = filename.Replace(".tmp", ".wma");
                        }
                        else if (isM4a)
                        {
                            filename = filename.Replace(".tmp", ".m4a");
                        }
                        using (var audioFile = File.Create(filename))
                            audioFile.Write(audiodata, 0, audiodata.Length);

                        sounds[current_entry] = new SoundEffect(filename).CreateInstance();
#else
                        throw new NotImplementedException();
#endif
                    }
                    else
                    {
                        //An xWMA or XMA2 file. Can't be played atm :(
                        throw new NotImplementedException();
                    }
#if !DIRECTX
                    /* DirectX platforms can use XAudio2 to stream MSADPCM natively.
                     * This code is cross-platform, but the problem is that it just
                     * decodes ALL of the wavedata here. For XAudio2 in particular,
                     * this is probably ludicrous.
                     *
                     * You need to write a DIRECTX ADPCM reader that just loads this
                     * into the SoundEffect. No decoding should be necessary.
                     * -flibit
                     */
                }
                else if (codec == MiniFormatTag_ADPCM)
                {
                    using (MemoryStream dataStream = new MemoryStream(audiodata)) {
                        using (BinaryReader source = new BinaryReader(dataStream)) {
                            sounds[current_entry] = new SoundEffectInstance(
                                MSADPCMToPCM.MSADPCM_TO_PCM(source, (short)chans, (short)align),
                                rate,
                                chans
                                );
                        }
                    }
#endif
                }
                else
                {
                    throw new NotImplementedException();
                }
            }

            audioEngine.Wavebanks[BankName] = this;
        }
Beispiel #6
0
        protected internal override SoundEffect Read(ContentReader input, SoundEffect existingInstance)
        {
            // NXB format for SoundEffect...
            //
            // Byte [format size]	Format	WAVEFORMATEX structure
            // UInt32	Data size
            // Byte [data size]	Data	Audio waveform data
            // Int32	Loop start	In bytes (start must be format block aligned)
            // Int32	Loop length	In bytes (length must be format block aligned)
            // Int32	Duration	In milliseconds

            // WAVEFORMATEX structure...
            //
            //typedef struct {
            //  WORD  wFormatTag;       // byte[0]  +2
            //  WORD  nChannels;        // byte[2]  +2
            //  DWORD nSamplesPerSec;   // byte[4]  +4
            //  DWORD nAvgBytesPerSec;  // byte[8]  +4
            //  WORD  nBlockAlign;      // byte[12] +2
            //  WORD  wBitsPerSample;   // byte[14] +2
            //  WORD  cbSize;           // byte[16] +2
            //} WAVEFORMATEX;

            byte[] header     = input.ReadBytes(input.ReadInt32());
            byte[] data       = input.ReadBytes(input.ReadInt32());
            int    loopStart  = input.ReadInt32();
            int    loopLength = input.ReadInt32();
            int    num        = input.ReadInt32();

#if DIRECTX
            var count      = data.Length;
            var format     = (int)BitConverter.ToUInt16(header, 0);
            var sampleRate = (int)BitConverter.ToUInt16(header, 4);
            var channels   = BitConverter.ToUInt16(header, 2);
            //var avgBPS = (int)BitConverter.ToUInt16(header, 8);
            var blockAlignment = (int)BitConverter.ToUInt16(header, 12);
            //var bps = (int)BitConverter.ToUInt16(header, 14);

            SharpDX.Multimedia.WaveFormat waveFormat;
            if (format == 1)
            {
                waveFormat = new SharpDX.Multimedia.WaveFormat(sampleRate, channels);
            }
            else if (format == 2)
            {
                waveFormat = new SharpDX.Multimedia.WaveFormatAdpcm(sampleRate, channels, blockAlignment);
            }
            else
            {
                throw new NotSupportedException("Unsupported wave format!");
            }

            return(new SoundEffect(waveFormat, data, 0, count, loopStart, loopLength)
            {
                Name = input.AssetName,
            });
#else
            if (header[0] == 2 && header[1] == 0)
            {
                // We've found MSADPCM data! Let's decode it here.
                using (MemoryStream origDataStream = new MemoryStream(data))
                {
                    using (BinaryReader reader = new BinaryReader(origDataStream))
                    {
                        byte[] newData = MSADPCMToPCM.MSADPCM_TO_PCM(
                            reader,
                            header[2],
                            (short)((header[12] / header[2]) - 22)
                            );
                        data = newData;
                    }
                }

                // This is PCM data now!
                header[0] = 1;
            }

            int sampleRate = (
                (header[4]) +
                (header[5] << 8) +
                (header[6] << 16) +
                (header[7] << 24)
                );

            return(new SoundEffect(
                       input.AssetName,
                       data,
                       sampleRate,
                       (header[2] == 2) ? AudioChannels.Stereo : AudioChannels.Mono,
                       loopStart,
                       loopLength
                       ));
#endif
        }
        protected internal override SoundEffect Read(ContentReader input, SoundEffect existingInstance)
        {
            // NXB format for SoundEffect...
            //
            // Byte [format size]	Format	WAVEFORMATEX structure
            // UInt32	Data size
            // Byte [data size]	Data	Audio waveform data
            // Int32	Loop start	In bytes (start must be format block aligned)
            // Int32	Loop length	In bytes (length must be format block aligned)
            // Int32	Duration	In milliseconds

            // WAVEFORMATEX structure...
            //
            //typedef struct {
            //  WORD  wFormatTag;       // byte[0]  +2
            //  WORD  nChannels;        // byte[2]  +2
            //  DWORD nSamplesPerSec;   // byte[4]  +4
            //  DWORD nAvgBytesPerSec;  // byte[8]  +4
            //  WORD  nBlockAlign;      // byte[12] +2
            //  WORD  wBitsPerSample;   // byte[14] +2
            //  WORD  cbSize;           // byte[16] +2
            //} WAVEFORMATEX;

            byte[] header     = input.ReadBytes(input.ReadInt32());
            byte[] data       = input.ReadBytes(input.ReadInt32());
            int    loopStart  = input.ReadInt32();
            int    loopLength = input.ReadInt32();
            int    num        = input.ReadInt32();

#if WINRT
            var count      = data.Length;
            var format     = (int)BitConverter.ToUInt16(header, 0);
            var sampleRate = (int)BitConverter.ToUInt16(header, 4);
            var channels   = BitConverter.ToUInt16(header, 2);
            //var avgBPS = (int)BitConverter.ToUInt16(header, 8);
            var blockAlignment = (int)BitConverter.ToUInt16(header, 12);
            //var bps = (int)BitConverter.ToUInt16(header, 14);

            SharpDX.Multimedia.WaveFormat waveFormat;
            if (format == 1)
            {
                waveFormat = new SharpDX.Multimedia.WaveFormat(sampleRate, channels);
            }
            else if (format == 2)
            {
                waveFormat = new SharpDX.Multimedia.WaveFormatAdpcm(sampleRate, channels, blockAlignment);
            }
            else
            {
                throw new NotImplementedException("Unsupported wave format!");
            }

            return(new SoundEffect(waveFormat, data, 0, count, loopStart, loopLength)
            {
                Name = input.AssetName,
            });
#else
            byte[] soundData = null;
            // Proper use of "using" corectly disposes of BinaryWriter which in turn disposes the underlying stream
            MemoryStream mStream = new MemoryStream(20 + header.Length + 8 + data.Length);
            using (BinaryWriter writer = new BinaryWriter(mStream))
            {
                writer.Write("RIFF".ToCharArray());
                writer.Write((int)(20 + header.Length + data.Length));
                writer.Write("WAVE".ToCharArray());

                //header can be written as-is
                writer.Write("fmt ".ToCharArray());
                writer.Write(header.Length);
                writer.Write(header);

                writer.Write("data".ToCharArray());
                writer.Write((int)data.Length);
                writer.Write(data);

                // Copy the data to an array before disposing the stream
                soundData = mStream.ToArray();
            }
            if (soundData == null)
            {
                throw new ContentLoadException("Failed to load SoundEffect");
            }
            return(new SoundEffect(input.AssetName, soundData));
#endif
        }