示例#1
0
 void ISpEngineSite.LoadResource(string uri, ref string mediaType, out IStream stream)
 {
     mediaType = null;
     try
     {
         Stream       stream2    = _site.LoadResource(new Uri(uri, UriKind.RelativeOrAbsolute), mediaType);
         BinaryReader br         = new BinaryReader(stream2);
         byte[]       waveFormat = AudioBase.GetWaveFormat(br);
         mediaType = null;
         if (waveFormat != null)
         {
             WAVEFORMATEX wAVEFORMATEX = WAVEFORMATEX.ToWaveHeader(waveFormat);
             WaveFormatId wFormatTag   = (WaveFormatId)wAVEFORMATEX.wFormatTag;
             if (wFormatTag == WaveFormatId.Pcm || (uint)(wFormatTag - 6) <= 1u)
             {
                 mediaType = "audio/x-wav";
             }
         }
         stream2.Position = 0L;
         stream           = new SpStreamWrapper(stream2);
     }
     catch
     {
         stream = null;
     }
 }
示例#2
0
        /// <summary>
        /// Load a file either from a local network or from the Internet.
        /// </summary>
        void ISpEngineSite.LoadResource(string uri, ref string mediaType, out IStream stream)
        {
            mediaType = null;
#pragma warning disable 56518 // BinaryReader can't be disposed because underlying stream still in use.
            try
            {
                // Get the mime type
                Stream       localStream = _site.LoadResource(new Uri(uri, UriKind.RelativeOrAbsolute), mediaType);
                BinaryReader reader      = new(localStream);
                byte[]       waveFormat  = System.Speech.Internal.Synthesis.AudioBase.GetWaveFormat(reader);
                mediaType = null;
                if (waveFormat != null)
                {
                    WAVEFORMATEX hdr = WAVEFORMATEX.ToWaveHeader(waveFormat);
                    switch ((WaveFormatId)hdr.wFormatTag)
                    {
                    case WaveFormatId.Alaw:
                    case WaveFormatId.Mulaw:
                    case WaveFormatId.Pcm:
                        mediaType = "audio/x-wav";
                        break;
                    }
                }
                localStream.Position = 0;
                stream = new SpStreamWrapper(localStream);
            }
            catch
            {
                stream = null;
            }
#pragma warning restore 56518
        }
示例#3
0
        internal override void Begin(byte[] wfx)
        {
            if (_deviceOpen)
            {
                throw new InvalidOperationException();
            }
            WAVEFORMATEX.AvgBytesPerSec(wfx, out _nAvgBytesPerSec, out _blockAlign);
            MMSYSERR mMSYSERR;

            lock (_noWriteOutLock)
            {
                mMSYSERR = SafeNativeMethods.waveOutOpen(ref _hwo, _curDevice, wfx, _delegate, IntPtr.Zero, 196608u);
                if (_fPaused && mMSYSERR == MMSYSERR.NOERROR)
                {
                    mMSYSERR = SafeNativeMethods.waveOutPause(_hwo);
                }
                _aborted    = false;
                _deviceOpen = true;
            }
            if (mMSYSERR != 0)
            {
                throw new AudioException(mMSYSERR);
            }
            _bytesWritten = 0;
            _evt.Set();
        }
示例#4
0
        /// <summary>
        /// Create an instance of AudioFileOut.
        /// </summary>
        internal AudioFileOut(Stream stream, SpeechAudioFormatInfo formatInfo, bool headerInfo, IAsyncDispatch asyncDispatch)
        {
            _asyncDispatch       = asyncDispatch;
            _stream              = stream;
            _startStreamPosition = _stream.Position;
            _hasHeader           = headerInfo;

            _wfxOut = new WAVEFORMATEX();
            // if we have a formatInfo object, format conversion may be necessary
            if (formatInfo != null)
            {
                // Build the Wave format from the formatInfo
                _wfxOut.wFormatTag     = (short)formatInfo.EncodingFormat;
                _wfxOut.wBitsPerSample = (short)formatInfo.BitsPerSample;
                _wfxOut.nSamplesPerSec = formatInfo.SamplesPerSecond;
                _wfxOut.nChannels      = (short)formatInfo.ChannelCount;
            }
            else
            {
                // Set the default values
                _wfxOut = WAVEFORMATEX.Default;
            }
            _wfxOut.nBlockAlign     = (short)(_wfxOut.nChannels * _wfxOut.wBitsPerSample / 8);
            _wfxOut.nAvgBytesPerSec = _wfxOut.wBitsPerSample * _wfxOut.nSamplesPerSec * _wfxOut.nChannels / 8;
        }
示例#5
0
        internal byte[] WaveFormat(byte[] targetWaveFormat)
        {
            // Get the Wave header if it has not been set by the user
            if (targetWaveFormat == null && _waveFormat == null)
            {
                // The registry values contains a default rate
                if (VoiceInfo.SupportedAudioFormats.Count > 0)
                {
                    // Create the array of bytes containing the format
                    targetWaveFormat = VoiceInfo.SupportedAudioFormats[0].WaveFormat;
                }
            }

            // No input specified and we already got the default
            if (targetWaveFormat == null && _waveFormat != null)
            {
                return(_waveFormat);
            }

            // New waveFormat provided?
            if (_waveFormat == null || !Array.Equals(targetWaveFormat, _waveFormat))
            {
                IntPtr   waveFormat   = IntPtr.Zero;
                GCHandle targetFormat = new();

                if (targetWaveFormat != null)
                {
                    targetFormat = GCHandle.Alloc(targetWaveFormat, GCHandleType.Pinned);
                }
                try
                {
                    waveFormat = _engine.GetOutputFormat(targetWaveFormat != null ? targetFormat.AddrOfPinnedObject() : IntPtr.Zero);
                }
                finally
                {
                    if (targetWaveFormat != null)
                    {
                        targetFormat.Free();
                    }
                }

                if (waveFormat != IntPtr.Zero)
                {
                    _waveFormat = WAVEFORMATEX.ToBytes(waveFormat);

                    // Free the buffer
                    Marshal.FreeCoTaskMem(waveFormat);
                }
                else
                {
                    _waveFormat = WAVEFORMATEX.Default.ToBytes();
                }
            }
            return(_waveFormat);
        }
示例#6
0
 internal override void Begin(byte[] wfx)
 {
     if (_deviceOpen)
     {
         throw new InvalidOperationException();
     }
     _wfxIn        = WAVEFORMATEX.ToWaveHeader(wfx);
     _doConversion = _pcmConverter.PrepareConverter(ref _wfxIn, ref _wfxOut);
     if (_totalByteWrittens == 0 && _hasHeader)
     {
         AudioBase.WriteWaveHeader(_stream, _wfxOut, _startStreamPosition, 0);
     }
     _bytesWritten = 0;
     _aborted      = false;
     _deviceOpen   = true;
 }
示例#7
0
        internal static void WriteWaveHeader(Stream stream, WAVEFORMATEX waveEx, long position, int cData)
        {
            RIFFHDR  riff    = new(0);
            BLOCKHDR block   = new(0);
            DATAHDR  dataHdr = new(0);

            int cRiff    = Marshal.SizeOf <RIFFHDR>();
            int cBlock   = Marshal.SizeOf <BLOCKHDR>();
            int cWaveEx  = waveEx.Length;// Marshal.SizeOf (waveEx); // The CLR automatically pad the waveEx structure to dword boundary. Force 16.
            int cDataHdr = Marshal.SizeOf <DATAHDR>();

            int total = cRiff + cBlock + cWaveEx + cDataHdr;

            using (MemoryStream memStream = new())
            {
                BinaryWriter bw = new(memStream);
                try
                {
                    // Write the RIFF section
                    riff._len = total + cData - 8 /* - cRiff*/; // for the "WAVE" 4 characters
                    bw.Write(riff._id);
                    bw.Write(riff._len);
                    bw.Write(riff._type);

                    // Write the wave header section
                    block._len = cWaveEx;
                    bw.Write(block._id);
                    bw.Write(block._len);

                    // Write the FormatEx structure
                    bw.Write(waveEx.ToBytes());
                    //bw.Write (waveEx.cbSize);

                    // Write the data section
                    dataHdr._len = cData;
                    bw.Write(dataHdr._id);
                    bw.Write(dataHdr._len);

                    stream.Seek(position, SeekOrigin.Begin);
                    stream.Write(memStream.GetBuffer(), 0, (int)memStream.Length);
                }
                finally
                {
                    ((IDisposable)bw).Dispose();
                }
            }
        }
示例#8
0
        internal static WAVEFORMATEX ToWaveHeader(byte[] waveHeader)
        {
            GCHandle     gCHandle = GCHandle.Alloc(waveHeader, GCHandleType.Pinned);
            IntPtr       ptr      = gCHandle.AddrOfPinnedObject();
            WAVEFORMATEX result   = default(WAVEFORMATEX);

            result.wFormatTag      = Marshal.ReadInt16(ptr);
            result.nChannels       = Marshal.ReadInt16(ptr, 2);
            result.nSamplesPerSec  = Marshal.ReadInt32(ptr, 4);
            result.nAvgBytesPerSec = Marshal.ReadInt32(ptr, 8);
            result.nBlockAlign     = Marshal.ReadInt16(ptr, 12);
            result.wBitsPerSample  = Marshal.ReadInt16(ptr, 14);
            result.cbSize          = Marshal.ReadInt16(ptr, 16);
            if (result.cbSize != 0)
            {
                throw new InvalidOperationException();
            }
            gCHandle.Free();
            return(result);
        }
示例#9
0
 internal byte[] WaveFormat(byte[] targetWaveFormat)
 {
     if (targetWaveFormat == null && _waveFormat == null && VoiceInfo.SupportedAudioFormats.Count > 0)
     {
         targetWaveFormat = VoiceInfo.SupportedAudioFormats[0].WaveFormat;
     }
     if (targetWaveFormat == null && _waveFormat != null)
     {
         return(_waveFormat);
     }
     if (_waveFormat == null || !object.Equals(targetWaveFormat, _waveFormat))
     {
         IntPtr   intPtr   = IntPtr.Zero;
         GCHandle gCHandle = default(GCHandle);
         if (targetWaveFormat != null)
         {
             gCHandle = GCHandle.Alloc(targetWaveFormat, GCHandleType.Pinned);
         }
         try
         {
             intPtr = _engine.GetOutputFormat((targetWaveFormat != null) ? gCHandle.AddrOfPinnedObject() : IntPtr.Zero);
         }
         finally
         {
             if (targetWaveFormat != null)
             {
                 gCHandle.Free();
             }
         }
         if (intPtr != IntPtr.Zero)
         {
             _waveFormat = WAVEFORMATEX.ToBytes(intPtr);
             Marshal.FreeCoTaskMem(intPtr);
         }
         else
         {
             _waveFormat = WAVEFORMATEX.Default.ToBytes();
         }
     }
     return(_waveFormat);
 }
示例#10
0
 internal AudioFileOut(Stream stream, SpeechAudioFormatInfo formatInfo, bool headerInfo, IAsyncDispatch asyncDispatch)
 {
     _asyncDispatch       = asyncDispatch;
     _stream              = stream;
     _startStreamPosition = _stream.Position;
     _hasHeader           = headerInfo;
     _wfxOut              = default(WAVEFORMATEX);
     if (formatInfo != null)
     {
         _wfxOut.wFormatTag     = (short)formatInfo.EncodingFormat;
         _wfxOut.wBitsPerSample = (short)formatInfo.BitsPerSample;
         _wfxOut.nSamplesPerSec = formatInfo.SamplesPerSecond;
         _wfxOut.nChannels      = (short)formatInfo.ChannelCount;
     }
     else
     {
         _wfxOut = WAVEFORMATEX.Default;
     }
     _wfxOut.nBlockAlign     = (short)(_wfxOut.nChannels * _wfxOut.wBitsPerSample / 8);
     _wfxOut.nAvgBytesPerSec = _wfxOut.wBitsPerSample * _wfxOut.nSamplesPerSec * _wfxOut.nChannels / 8;
 }
示例#11
0
        /// <summary>
        /// Begin to play
        /// </summary>
        internal override void Begin(byte[] wfx)
        {
            if (_deviceOpen)
            {
                System.Diagnostics.Debug.Assert(false);
                throw new InvalidOperationException();
            }

            // Get the audio format if conversion is needed
            _wfxIn        = WAVEFORMATEX.ToWaveHeader(wfx);
            _doConversion = _pcmConverter.PrepareConverter(ref _wfxIn, ref _wfxOut);

            if (_totalByteWrittens == 0 && _hasHeader)
            {
                WriteWaveHeader(_stream, _wfxOut, _startStreamPosition, 0);
            }

            _bytesWritten = 0;

            // set the flags
            _aborted    = false;
            _deviceOpen = true;
        }
示例#12
0
        /// <summary>
        /// Begin to play
        /// </summary>
        internal override void Begin(byte[] wfx)
        {
            if (_deviceOpen)
            {
                System.Diagnostics.Debug.Assert(false);
                throw new InvalidOperationException();
            }

            // Get the alignments values
            WAVEFORMATEX.AvgBytesPerSec(wfx, out _nAvgBytesPerSec, out _blockAlign);

            MMSYSERR result;

            lock (_noWriteOutLock)
            {
                result = SafeNativeMethods.waveOutOpen(ref _hwo, _curDevice, wfx, _delegate, IntPtr.Zero, SafeNativeMethods.CALLBACK_FUNCTION);

                if (_fPaused && result == MMSYSERR.NOERROR)
                {
                    result = SafeNativeMethods.waveOutPause(_hwo);
                }
                // set the flags
                _aborted    = false;
                _deviceOpen = true;
            }

            if (result != MMSYSERR.NOERROR)
            {
                throw new AudioException(result);
            }

            // Reset the counter for the number of bytes written so far
            _bytesWritten = 0;

            // Nothing in the queue
            _evt.Set();
        }
示例#13
0
        internal static void WriteWaveHeader(Stream stream, WAVEFORMATEX waveEx, long position, int cData)
        {
            RIFFHDR  rIFFHDR  = new RIFFHDR(0);
            BLOCKHDR bLOCKHDR = new BLOCKHDR(0);
            DATAHDR  dATAHDR  = new DATAHDR(0);
            int      num      = Marshal.SizeOf((object)rIFFHDR);
            int      num2     = Marshal.SizeOf((object)bLOCKHDR);
            int      length   = waveEx.Length;
            int      num3     = Marshal.SizeOf((object)dATAHDR);
            int      num4     = num + num2 + length + num3;

            using (MemoryStream memoryStream = new MemoryStream())
            {
                BinaryWriter binaryWriter = new BinaryWriter(memoryStream);
                try
                {
                    rIFFHDR._len = num4 + cData - 8;
                    binaryWriter.Write(rIFFHDR._id);
                    binaryWriter.Write(rIFFHDR._len);
                    binaryWriter.Write(rIFFHDR._type);
                    bLOCKHDR._len = length;
                    binaryWriter.Write(bLOCKHDR._id);
                    binaryWriter.Write(bLOCKHDR._len);
                    binaryWriter.Write(waveEx.ToBytes());
                    dATAHDR._len = cData;
                    binaryWriter.Write(dATAHDR._id);
                    binaryWriter.Write(dATAHDR._len);
                    stream.Seek(position, SeekOrigin.Begin);
                    stream.Write(memoryStream.GetBuffer(), 0, (int)memoryStream.Length);
                }
                finally
                {
                    ((IDisposable)binaryWriter).Dispose();
                }
            }
        }
示例#14
0
 internal void PlayWaveFile(AudioData audio)
 {
     try
     {
         if (!string.IsNullOrEmpty(audio._mimeType))
         {
             WAVEFORMATEX wAVEFORMATEX = default(WAVEFORMATEX);
             wAVEFORMATEX.nChannels       = 1;
             wAVEFORMATEX.nSamplesPerSec  = 8000;
             wAVEFORMATEX.nAvgBytesPerSec = 8000;
             wAVEFORMATEX.nBlockAlign     = 1;
             wAVEFORMATEX.wBitsPerSample  = 8;
             wAVEFORMATEX.cbSize          = 0;
             string mimeType = audio._mimeType;
             if (!(mimeType == "audio/basic"))
             {
                 if (!(mimeType == "audio/x-alaw-basic"))
                 {
                     throw new FormatException(SR.Get(SRID.UnknownMimeFormat));
                 }
                 wAVEFORMATEX.wFormatTag = 6;
             }
             else
             {
                 wAVEFORMATEX.wFormatTag = 7;
             }
             Begin(wAVEFORMATEX.ToBytes());
             try
             {
                 byte[] array = new byte[(int)audio._stream.Length];
                 audio._stream.Read(array, 0, array.Length);
                 Play(array);
             }
             finally
             {
                 WaitUntilDone();
                 End();
             }
         }
         else
         {
             BinaryReader binaryReader = new BinaryReader(audio._stream);
             try
             {
                 byte[] waveFormat = GetWaveFormat(binaryReader);
                 if (waveFormat == null)
                 {
                     throw new FormatException(SR.Get(SRID.NotValidAudioFile, audio._uri.ToString()));
                 }
                 Begin(waveFormat);
                 try
                 {
                     while (true)
                     {
                         DATAHDR dATAHDR = default(DATAHDR);
                         if (audio._stream.Position + 8 >= audio._stream.Length)
                         {
                             break;
                         }
                         dATAHDR._id  = binaryReader.ReadUInt32();
                         dATAHDR._len = binaryReader.ReadInt32();
                         if (dATAHDR._id == 1635017060)
                         {
                             byte[] buffer = Helpers.ReadStreamToByteArray(audio._stream, dATAHDR._len);
                             Play(buffer);
                         }
                         else
                         {
                             audio._stream.Seek(dATAHDR._len, SeekOrigin.Current);
                         }
                     }
                 }
                 finally
                 {
                     WaitUntilDone();
                     End();
                 }
             }
             finally
             {
                 ((IDisposable)binaryReader).Dispose();
             }
         }
     }
     finally
     {
         audio.Dispose();
     }
 }