Example #1
0
        /// <summary>
        /// WaveStream to resample using the DMO Resampler
        /// </summary>
        /// <param name="inputProvider">Input Stream</param>
        /// <param name="outputFormat">Desired Output Format</param>
        public ResamplerDmoStream(IWaveProvider inputProvider, WaveFormat outputFormat)
        {
            this.inputProvider = inputProvider;
            this.inputStream = inputProvider as WaveStream;
            this.outputFormat = outputFormat;
            this.dmoResampler = new DmoResampler();
            if (!dmoResampler.MediaObject.SupportsInputWaveFormat(0, inputProvider.WaveFormat))
            {
                throw new ArgumentException("Unsupported Input Stream format", "inputStream");
            }

            dmoResampler.MediaObject.SetInputWaveFormat(0, inputProvider.WaveFormat);
            if (!dmoResampler.MediaObject.SupportsOutputWaveFormat(0, outputFormat))
            {
                throw new ArgumentException("Unsupported Output Stream format", "outputStream");
            }

            dmoResampler.MediaObject.SetOutputWaveFormat(0, outputFormat);
            if (inputStream != null)
            {
                position = InputToOutputPosition(inputStream.Position);
            }
            this.inputMediaBuffer = new MediaBuffer(inputProvider.WaveFormat.AverageBytesPerSecond);
            this.outputBuffer = new DmoOutputDataBuffer(outputFormat.AverageBytesPerSecond);
        }
 /// <summary>
 /// Constructs a new MediaFoundationTransform wrapper
 /// Will read one second at a time
 /// </summary>
 /// <param name="sourceProvider">The source provider for input data to the transform</param>
 /// <param name="outputFormat">The desired output format</param>
 public MediaFoundationTransform(IWaveProvider sourceProvider, WaveFormat outputFormat)
 {
     this.outputWaveFormat = outputFormat;
     this.sourceProvider = sourceProvider;
     sourceBuffer = new byte[sourceProvider.WaveFormat.AverageBytesPerSecond];
     outputBuffer = new byte[outputWaveFormat.AverageBytesPerSecond + outputWaveFormat.BlockAlign]; // we will grow this buffer if needed, but try to make something big enough
 }
Example #3
0
 /// <summary>
 /// Creates a new 32 bit WaveMixerStream
 /// </summary>
 public WaveMixerStream32()
 {
     AutoStop = true;
     waveFormat = WaveFormat.CreateIeeeFloatWaveFormat(44100, 2);
     bytesPerSample = 4;
     inputStreams = new List<WaveStream>();
     inputsLock = new object();
 }
 /// <summary>
 /// Creates a new MixingSampleProvider, with no inputs, but a specified WaveFormat
 /// </summary>
 /// <param name="waveFormat">The WaveFormat of this mixer. All inputs must be in this format</param>
 public MixingSampleProvider(WaveFormat waveFormat)
 {
     if (waveFormat.Encoding != WaveFormatEncoding.IeeeFloat)
     {
         throw new ArgumentException("Mixer wave format must be IEEE float");
     }
     this.sources = new List<ISampleProvider>();
     this.waveFormat = waveFormat;
 }
 /// <summary>
 /// Initializes a new instance of MonoToStereoSampleProvider
 /// </summary>
 /// <param name="source">Source sample provider</param>
 public MonoToStereoSampleProvider(ISampleProvider source)
 {
     if (source.WaveFormat.Channels != 1)
     {
         throw new ArgumentException("Source must be mono");
     }
     this.source = source;
     this.waveFormat = WaveFormat.CreateIeeeFloatWaveFormat(source.WaveFormat.SampleRate, 2);
 }
Example #6
0
 /// <summary>
 /// Creates a new ACM stream to convert one format to another, using a 
 /// specified driver identified and wave filter
 /// </summary>
 /// <param name="driverId">the driver identifier</param>
 /// <param name="sourceFormat">the source format</param>
 /// <param name="waveFilter">the wave filter</param>
 public AcmStream(IntPtr driverId, WaveFormat sourceFormat, WaveFilter waveFilter)
 {
     int sourceBufferSize = Math.Max(16384, sourceFormat.AverageBytesPerSecond);
     this.sourceFormat = sourceFormat;
     sourceBufferSize -= (sourceBufferSize % sourceFormat.BlockAlign);
     MmException.Try(AcmInterop.acmDriverOpen(out driverHandle, driverId, 0), "acmDriverOpen");
     MmException.Try(AcmInterop.acmStreamOpen(out streamHandle, driverHandle,
                   sourceFormat, sourceFormat, waveFilter, IntPtr.Zero, IntPtr.Zero, AcmStreamOpenFlags.NonRealTime), "acmStreamOpen");
     streamHeader = new AcmStreamHeader(streamHandle, sourceBufferSize, SourceToDest(sourceBufferSize));
 }
        /// <summary>
        /// Constructs a new resampler
        /// </summary>
        /// <param name="source">Source to resample</param>
        /// <param name="newSampleRate">Desired output sample rate</param>
        public WdlResamplingSampleProvider(ISampleProvider source, int newSampleRate)
        {
            channels = source.WaveFormat.Channels;
            outFormat = WaveFormat.CreateIeeeFloatWaveFormat(newSampleRate, channels);
            this.source = source;

            resampler = new WdlResampler();
            resampler.SetMode(true, 2, false);
            resampler.SetFilterParms();
            resampler.SetFeedMode(false); // output driven
            resampler.SetRates(source.WaveFormat.SampleRate, newSampleRate);
        }
        /// <summary>
        /// Creates a new Wave16toFloatProvider
        /// </summary>
        /// <param name="sourceProvider">the source provider</param>
        public Wave16ToFloatProvider(IWaveProvider sourceProvider)
        {
            if (sourceProvider.WaveFormat.Encoding != WaveFormatEncoding.Pcm)
                throw new ArgumentException("Only PCM supported");
            if (sourceProvider.WaveFormat.BitsPerSample != 16)
                throw new ArgumentException("Only 16 bit audio supported");

            waveFormat = WaveFormat.CreateIeeeFloatWaveFormat(sourceProvider.WaveFormat.SampleRate, sourceProvider.WaveFormat.Channels);

            this.sourceProvider = sourceProvider;
            this.volume = 1.0f;
        }
Example #9
0
        /// <summary>
        /// Creates a new Wave32To16Stream
        /// </summary>
        /// <param name="sourceStream">the source stream</param>
        public Wave32To16Stream(WaveStream sourceStream)
        {
            if (sourceStream.WaveFormat.Encoding != WaveFormatEncoding.IeeeFloat)
                throw new ArgumentException("Only 32 bit Floating point supported");
            if (sourceStream.WaveFormat.BitsPerSample != 32)
                throw new ArgumentException("Only 32 bit Floating point supported");

            waveFormat = new WaveFormat(sourceStream.WaveFormat.SampleRate, 16, sourceStream.WaveFormat.Channels);
            this.volume = 1.0f;
            this.sourceStream = sourceStream;
            length = sourceStream.Length / 2;
            position = sourceStream.Position / 2;
        }
        /// <summary>
        /// Creates a multiplexing wave provider, allowing re-patching of input channels to different
        /// output channels
        /// </summary>
        /// <param name="inputs">Input wave providers. Must all be of the same format, but can have any number of channels</param>
        /// <param name="numberOfOutputChannels">Desired number of output channels.</param>
        public MultiplexingWaveProvider(IEnumerable<IWaveProvider> inputs, int numberOfOutputChannels)
        {
            this.inputs = new List<IWaveProvider>(inputs);
            this.outputChannelCount = numberOfOutputChannels;

            if (this.inputs.Count == 0)
            {
                throw new ArgumentException("You must provide at least one input");
            }
            if (numberOfOutputChannels < 1)
            {
                throw new ArgumentException("You must provide at least one output");
            }
            foreach (var input in this.inputs)
            {
                if (this.waveFormat == null)
                {
                    if (input.WaveFormat.Encoding == WaveFormatEncoding.Pcm)
                    {
                        this.waveFormat = new WaveFormat(input.WaveFormat.SampleRate, input.WaveFormat.BitsPerSample, numberOfOutputChannels);
                    }
                    else if (input.WaveFormat.Encoding == WaveFormatEncoding.IeeeFloat)
                    {
                        this.waveFormat = WaveFormat.CreateIeeeFloatWaveFormat(input.WaveFormat.SampleRate, numberOfOutputChannels);
                    }
                    else
                    {
                        throw new ArgumentException("Only PCM and 32 bit float are supported");
                    }
                }
                else
                {
                    if (input.WaveFormat.BitsPerSample != this.waveFormat.BitsPerSample)
                    {
                        throw new ArgumentException("All inputs must have the same bit depth");
                    }
                    if (input.WaveFormat.SampleRate != this.waveFormat.SampleRate)
                    {
                        throw new ArgumentException("All inputs must have the same sample rate");
                    }
                }
                inputChannelCount += input.WaveFormat.Channels;
            }
            this.bytesPerSample = this.waveFormat.BitsPerSample / 8;

            mappings = new List<int>();
            for (int n = 0; n < outputChannelCount; n++)
            {
                mappings.Add(n % inputChannelCount);
            }
        }
Example #11
0
 /// <summary>
 /// Creates a new ACM frame decompressor
 /// </summary>
 /// <param name="sourceFormat">The MP3 source format</param>
 public AcmMp3FrameDecompressor(WaveFormat sourceFormat)
 {
     this.pcmFormat = AcmStream.SuggestPcmFormat(sourceFormat);
     try
     {
         conversionStream = new AcmStream(sourceFormat, pcmFormat);
     }
     catch (Exception)
     {
         disposed = true;
         GC.SuppressFinalize(this);
         throw;
     }
 }
 /// <summary>
 /// Creates a media type from a WaveFormat
 /// </summary>
 public static IMFMediaType CreateMediaTypeFromWaveFormat(WaveFormat waveFormat)
 {
     var mediaType = CreateMediaType();
     try
     {
         MediaFoundationInterop.MFInitMediaTypeFromWaveFormatEx(mediaType, waveFormat, Marshal.SizeOf(waveFormat));
     }
     catch (Exception)
     {
         Marshal.ReleaseComObject(mediaType);
         throw;
     }
     return mediaType;
 }
Example #13
0
        /// <summary>
        /// WaveFileWriter that actually writes to a stream
        /// </summary>
        /// <param name="outStream">Stream to be written to</param>
        /// <param name="format">Wave format to use</param>
        public WaveFileWriter(Stream outStream, WaveFormat format)
        {
            this.outStream = outStream;
            this.format = format;
            this.writer = new BinaryWriter(outStream, System.Text.Encoding.UTF8);
            this.writer.Write(System.Text.Encoding.UTF8.GetBytes("RIFF"));
            this.writer.Write((int)0); // placeholder
            this.writer.Write(System.Text.Encoding.UTF8.GetBytes("WAVE"));

            this.writer.Write(System.Text.Encoding.UTF8.GetBytes("fmt "));
            format.Serialize(this.writer);

            CreateFactChunk();
            WriteDataChunkHeader();
        }
Example #14
0
 /// <summary>
 /// Prepares a Wave input device for recording
 /// </summary>
 public WaveIn(WaveCallbackInfo callbackInfo)
 {
     syncContext = SynchronizationContext.Current;
     if ((callbackInfo.Strategy == WaveCallbackStrategy.NewWindow || callbackInfo.Strategy == WaveCallbackStrategy.ExistingWindow) &&
         syncContext == null)
     {
         throw new InvalidOperationException("Use WaveInEvent to record on a background thread");
     }
     DeviceNumber = 0;
     WaveFormat = new WaveFormat(8000, 16, 1);
     BufferMilliseconds = 100;
     NumberOfBuffers = 3;
     callback = Callback;
     this.callbackInfo = callbackInfo;
     callbackInfo.Connect(callback);
 }
        /// <summary>
        /// Creates the Media Foundation Resampler, allowing modifying of sample rate, bit depth and channel count
        /// </summary>
        /// <param name="sourceProvider">Source provider, must be PCM</param>
        /// <param name="outputFormat">Output format, must also be PCM</param>
        public MediaFoundationResampler(IWaveProvider sourceProvider, WaveFormat outputFormat)
            : base(sourceProvider, outputFormat)
        {
            if (!IsPcmOrIeeeFloat(sourceProvider.WaveFormat))
                throw new ArgumentException("Input must be PCM or IEEE float", "sourceProvider");
            if (!IsPcmOrIeeeFloat(outputFormat))
                throw new ArgumentException("Output must be PCM or IEEE float", "outputFormat");
            MediaFoundationApi.Startup();
            ResamplerQuality = 60; // maximum quality

            // n.b. we will create the resampler COM object on demand in the Read method,
            // to avoid threading issues but just
            // so we can check it exists on the system we'll make one so it will throw an
            // exception if not exists
            var comObject = CreateResamplerComObject();
            FreeComObject(comObject);
        }
Example #16
0
        public MemoryReader(string audioFileName)
        {
            using (var audioFileReader = new AudioFileReader(audioFileName))
            {
                // TODO: could add resampling in here if required
                this.waveFormat = audioFileReader.WaveFormat;
                this.length = audioFileReader.Length;

                var wholeFile = new List<float>((int)(audioFileReader.Length / 4));
                var readBuffer= new float[audioFileReader.WaveFormat.SampleRate * audioFileReader.WaveFormat.Channels];
                int samplesRead;
                while((samplesRead = audioFileReader.Read(readBuffer,0,readBuffer.Length)) > 0)
                {
                    wholeFile.AddRange(readBuffer.Take(samplesRead));
                }
                AudioData = wholeFile.ToArray();
            }
            memorySampleProvider = new MemoryReaderSampleProvider(this);
        }
 /// <summary>
 /// Creates a new stereo waveprovider based on a mono input
 /// </summary>
 /// <param name="sourceProvider">Mono 16 bit PCM input</param>
 public MonoToStereoProvider16(IWaveProvider sourceProvider)
 {
     if (sourceProvider.WaveFormat.Encoding != WaveFormatEncoding.Pcm)
     {
         throw new ArgumentException("Source must be PCM");
     }
     if (sourceProvider.WaveFormat.Channels != 1)
     {
         throw new ArgumentException("Source must be Mono");
     }
     if (sourceProvider.WaveFormat.BitsPerSample != 16)
     {
         throw new ArgumentException("Source must be 16 bit");
     }
     this.sourceProvider = sourceProvider;
     this.outputFormat = new WaveFormat(sourceProvider.WaveFormat.SampleRate, 2);
     RightVolume = 1.0f;
     LeftVolume = 1.0f;
 }
        /// <summary>
        /// Initializes a new instance of the DMO MP3 Frame decompressor
        /// </summary>
        /// <param name="sourceFormat"></param>
        public DmoMp3FrameDecompressor(WaveFormat sourceFormat)
        {
            this.mp3Decoder = new WindowsMediaMp3Decoder();
            if (!mp3Decoder.MediaObject.SupportsInputWaveFormat(0, sourceFormat))
            {
                throw new ArgumentException("Unsupported input format");
            }
            mp3Decoder.MediaObject.SetInputWaveFormat(0, sourceFormat);
            pcmFormat = new WaveFormat(sourceFormat.SampleRate, sourceFormat.Channels); // 16 bit
            if (!mp3Decoder.MediaObject.SupportsOutputWaveFormat(0, pcmFormat))
            {
                throw new ArgumentException(String.Format("Unsupported output format {0}", pcmFormat));
            }
            mp3Decoder.MediaObject.SetOutputWaveFormat(0, pcmFormat);

            // a second is more than enough to decompress a frame at a time
            inputMediaBuffer = new MediaBuffer(sourceFormat.AverageBytesPerSecond);
            outputBuffer = new DmoOutputDataBuffer(pcmFormat.AverageBytesPerSecond);
        }
Example #19
0
        /// <summary>
        /// Initialises a new instance of the WASAPI capture class
        /// </summary>
        /// <param name="captureDevice">Capture device to use</param>
        public WasapiCapture(MMDevice captureDevice)
        {
            syncContext = SynchronizationContext.Current;
            audioClient = captureDevice.AudioClient;
            ShareMode = AudioClientShareMode.Shared;

            waveFormat = audioClient.MixFormat;
            var wfe = waveFormat as WaveFormatExtensible;
            if (wfe != null)
            {
                try
                {
                    waveFormat = wfe.ToStandardWaveFormat();
                }
                catch (InvalidOperationException)
                {
                    // couldn't convert to a standard format
                }
            }
        }
Example #20
0
        /// <summary>
        /// Creates a new WaveChannel32
        /// </summary>
        /// <param name="sourceStream">the source stream</param>
        /// <param name="volume">stream volume (1 is 0dB)</param>
        /// <param name="pan">pan control (-1 to 1)</param>
        public WaveChannel32(WaveStream sourceStream, float volume, float pan)
        {
            PadWithZeroes = true;

            var providers = new ISampleChunkConverter[]
            {
                new Mono8SampleChunkConverter(),
                new Stereo8SampleChunkConverter(),
                new Mono16SampleChunkConverter(),
                new Stereo16SampleChunkConverter(),
                new Mono24SampleChunkConverter(),
                new Stereo24SampleChunkConverter(),
                new MonoFloatSampleChunkConverter(),
                new StereoFloatSampleChunkConverter(),
            };
            foreach (var provider in providers)
            {
                if (provider.Supports(sourceStream.WaveFormat))
                {
                    this.sampleProvider = provider;
                    break;
                }
            }

            if (this.sampleProvider == null)
            {
                throw new ArgumentException("Unsupported sourceStream format");
            }

            // always outputs stereo 32 bit
            waveFormat = WaveFormat.CreateIeeeFloatWaveFormat(sourceStream.WaveFormat.SampleRate, 2);
            destBytesPerSample = 8; // includes stereo factoring

            this.sourceStream = sourceStream;
            this.volume = volume;
            this.pan = pan;
            sourceBytesPerSample = sourceStream.WaveFormat.Channels * sourceStream.WaveFormat.BitsPerSample / 8;

            length = SourceToDest(sourceStream.Length);
            position = 0;
        }
Example #21
0
        /// <summary>
        /// Creates a new ACM stream to convert one format to another. Note that
        /// not all conversions can be done in one step
        /// </summary>
        /// <param name="sourceFormat">The source audio format</param>
        /// <param name="destFormat">The destination audio format</param>
        public AcmStream(WaveFormat sourceFormat, WaveFormat destFormat)
        {
            try
            {
                streamHandle = IntPtr.Zero;
                this.sourceFormat = sourceFormat;
                int sourceBufferSize = Math.Max(65536, sourceFormat.AverageBytesPerSecond);
                sourceBufferSize -= (sourceBufferSize % sourceFormat.BlockAlign);
                MmException.Try(AcmInterop.acmStreamOpen(out streamHandle, IntPtr.Zero, sourceFormat, destFormat, null, IntPtr.Zero, IntPtr.Zero, AcmStreamOpenFlags.NonRealTime), "acmStreamOpen");

                int destBufferSize = SourceToDest(sourceBufferSize);
                streamHeader = new AcmStreamHeader(streamHandle, sourceBufferSize, destBufferSize);
                driverHandle = IntPtr.Zero;
            }
            catch
            {
                // suppress the finalise and clean up resources
                Dispose();
                throw;
            }
        }
        /// <summary>
        /// Create a new WaveFormat conversion stream
        /// </summary>
        /// <param name="targetFormat">Desired output format</param>
        /// <param name="sourceStream">Source stream</param>
        public WaveFormatConversionStream(WaveFormat targetFormat, WaveStream sourceStream)
        {
            this.sourceStream = sourceStream;
            this.targetFormat = targetFormat;

            conversionStream = new AcmStream(sourceStream.WaveFormat, targetFormat);
            /*try
            {
                // work out how many bytes the entire input stream will convert to
                length = conversionStream.SourceToDest((int)sourceStream.Length);
            }
            catch
            {
                Dispose();
                throw;
            }*/
            length = EstimateSourceToDest((int)sourceStream.Length);

            position = 0;
            preferredSourceReadSize = Math.Min(sourceStream.WaveFormat.AverageBytesPerSecond, conversionStream.SourceBuffer.Length);
            preferredSourceReadSize -= (preferredSourceReadSize%sourceStream.WaveFormat.BlockAlign);
        }
Example #23
0
 /// <summary>
 /// Initialises a new instance of RawSourceWaveStream
 /// </summary>
 /// <param name="sourceStream">The source stream containing raw audio</param>
 /// <param name="waveFormat">The waveformat of the audio in the source stream</param>
 public RawSourceWaveStream(Stream sourceStream, WaveFormat waveFormat)
 {
     this.sourceStream = sourceStream;
     this.waveFormat = waveFormat;
 }
 /// <summary>
 /// Creates a stream that can convert to PCM
 /// </summary>
 /// <param name="sourceStream">The source stream</param>
 /// <returns>A PCM stream</returns>
 public static WaveStream CreatePcmStream(WaveStream sourceStream)
 {
     if (sourceStream.WaveFormat.Encoding == WaveFormatEncoding.Pcm)
     {
         return sourceStream;
     }
     WaveFormat pcmFormat = AcmStream.SuggestPcmFormat(sourceStream.WaveFormat);
     if (pcmFormat.SampleRate < 8000)
     {
         if (sourceStream.WaveFormat.Encoding == WaveFormatEncoding.G723)
         {
             pcmFormat = new WaveFormat(8000, 16, 1);
         }
         else
         {
             throw new InvalidOperationException("Invalid suggested output format, please explicitly provide a target format");
         }
     }
     return new WaveFormatConversionStream(pcmFormat, sourceStream);
 }
Example #25
0
        /// <summary>
        /// Initialize for playing the specified wave stream
        /// </summary>
        /// <param name="waveProvider">IWaveProvider to play</param>
        public void Init(IWaveProvider waveProvider)
        {
            long latencyRefTimes = latencyMilliseconds * 10000;
            outputFormat = waveProvider.WaveFormat;
            // first attempt uses the WaveFormat from the WaveStream
            WaveFormatExtensible closestSampleRateFormat;
            if (!audioClient.IsFormatSupported(shareMode, outputFormat, out closestSampleRateFormat))
            {
                // Use closesSampleRateFormat (in sharedMode, it equals usualy to the audioClient.MixFormat)
                // See documentation : http://msdn.microsoft.com/en-us/library/ms678737(VS.85).aspx
                // They say : "In shared mode, the audio engine always supports the mix format"
                // The MixFormat is more likely to be a WaveFormatExtensible.
                if (closestSampleRateFormat == null)
                {
                    WaveFormat correctSampleRateFormat = audioClient.MixFormat;
                        /*WaveFormat.CreateIeeeFloatWaveFormat(
                        audioClient.MixFormat.SampleRate,
                        audioClient.MixFormat.Channels);*/

                    if (!audioClient.IsFormatSupported(shareMode, correctSampleRateFormat))
                    {
                        // Iterate from Worst to Best Format
                        WaveFormatExtensible[] bestToWorstFormats = {
                                  new WaveFormatExtensible(
                                      outputFormat.SampleRate, 32,
                                      outputFormat.Channels),
                                  new WaveFormatExtensible(
                                      outputFormat.SampleRate, 24,
                                      outputFormat.Channels),
                                  new WaveFormatExtensible(
                                      outputFormat.SampleRate, 16,
                                      outputFormat.Channels),
                              };

                        // Check from best Format to worst format ( Float32, Int24, Int16 )
                        for (int i = 0; i < bestToWorstFormats.Length; i++ )
                        {
                            correctSampleRateFormat = bestToWorstFormats[i];
                            if ( audioClient.IsFormatSupported(shareMode, correctSampleRateFormat) )
                            {
                                break;
                            }
                            correctSampleRateFormat = null;
                        }

                        // If still null, then test on the PCM16, 2 channels
                        if (correctSampleRateFormat == null)
                        {
                            // Last Last Last Chance (Thanks WASAPI)
                            correctSampleRateFormat = new WaveFormatExtensible(outputFormat.SampleRate, 16, 2);
                            if (!audioClient.IsFormatSupported(shareMode, correctSampleRateFormat))
                            {
                                throw new NotSupportedException("Can't find a supported format to use");
                            }
                        }
                    }
                    outputFormat = correctSampleRateFormat;
                }
                else
                {
                    outputFormat = closestSampleRateFormat;
                }

                // just check that we can make it.
                using (new ResamplerDmoStream(waveProvider, outputFormat))
                {
                }
                this.dmoResamplerNeeded = true;
            }
            else
            {
                dmoResamplerNeeded = false;
            }
            this.sourceProvider = waveProvider;

            // If using EventSync, setup is specific with shareMode
            if (isUsingEventSync)
            {
                // Init Shared or Exclusive
                if (shareMode == AudioClientShareMode.Shared)
                {
                    // With EventCallBack and Shared, both latencies must be set to 0
                    audioClient.Initialize(shareMode, AudioClientStreamFlags.EventCallback, 0, 0,
                        outputFormat, Guid.Empty);

                    // Get back the effective latency from AudioClient
                    latencyMilliseconds = (int)(audioClient.StreamLatency / 10000);
                }
                else
                {
                    // With EventCallBack and Exclusive, both latencies must equals
                    audioClient.Initialize(shareMode, AudioClientStreamFlags.EventCallback, latencyRefTimes, latencyRefTimes,
                                        outputFormat, Guid.Empty);
                }

                // Create the Wait Event Handle
                frameEventWaitHandle = new EventWaitHandle(false, EventResetMode.AutoReset);
                audioClient.SetEventHandle(frameEventWaitHandle.SafeWaitHandle.DangerousGetHandle());
            }
            else
            {
                // Normal setup for both sharedMode
                audioClient.Initialize(shareMode, AudioClientStreamFlags.None, latencyRefTimes, 0,
                                    outputFormat, Guid.Empty);
            }

            // Get the RenderClient
            renderClient = audioClient.AudioRenderClient;
        }
 public bool Supports(WaveFormat waveFormat)
 {
     return waveFormat.Encoding == WaveFormatEncoding.Pcm &&
         waveFormat.BitsPerSample == 8 &&
         waveFormat.Channels == 1;
 }
Example #27
0
        /// <summary>
        /// Ensures valid AIFF header and then finds data offset.
        /// </summary>
        /// <param name="stream">The stream, positioned at the start of audio data</param>
        /// <param name="format">The format found</param>
        /// <param name="dataChunkPosition">The position of the data chunk</param>
        /// <param name="dataChunkLength">The length of the data chunk</param>
        /// <param name="chunks">Additional chunks found</param>
        public static void ReadAiffHeader(Stream stream, out WaveFormat format, out long dataChunkPosition, out int dataChunkLength, List<AiffChunk> chunks)
        {
            dataChunkPosition = -1;
            format = null;
            BinaryReader br = new BinaryReader(stream);

            if (ReadChunkName(br) != "FORM")
            {
                throw new FormatException("Not an AIFF file - no FORM header.");
            }
            uint fileSize = ConvertInt(br.ReadBytes(4));
            string formType = ReadChunkName(br);
            if (formType != "AIFC" && formType != "AIFF")
            {
                throw new FormatException("Not an AIFF file - no AIFF/AIFC header.");
            }

            dataChunkLength = 0;

            while (br.BaseStream.Position < br.BaseStream.Length)
            {
                AiffChunk nextChunk = ReadChunkHeader(br);
                if (nextChunk.ChunkName == "COMM")
                {
                    short numChannels = ConvertShort(br.ReadBytes(2));
                    uint numSampleFrames = ConvertInt(br.ReadBytes(4));
                    short sampleSize = ConvertShort(br.ReadBytes(2));
                    double sampleRate = IEEE.ConvertFromIeeeExtended(br.ReadBytes(10));

                    format = new WaveFormat((int)sampleRate, (int)sampleSize, (int)numChannels);

                    if (nextChunk.ChunkLength > 18 && formType == "AIFC")
                    {
                        // In an AIFC file, the compression format is tacked on to the COMM chunk
                        string compress = new string(br.ReadChars(4)).ToLower();
                        if (compress != "none") throw new FormatException("Compressed AIFC is not supported.");
                        br.ReadBytes((int)nextChunk.ChunkLength - 22);
                    }
                    else br.ReadBytes((int)nextChunk.ChunkLength - 18);
                }
                else if (nextChunk.ChunkName == "SSND")
                {
                    uint offset = ConvertInt(br.ReadBytes(4));
                    uint blockSize = ConvertInt(br.ReadBytes(4));
                    dataChunkPosition = nextChunk.ChunkStart + 16 + offset;
                    dataChunkLength = (int)nextChunk.ChunkLength - 8;

                    br.ReadBytes((int)nextChunk.ChunkLength - 8);
                }
                else
                {
                    if (chunks != null)
                    {
                        chunks.Add(nextChunk);
                    }
                    br.ReadBytes((int)nextChunk.ChunkLength);
                }

                if (nextChunk.ChunkName == "\0\0\0\0") break;
            }

            if (format == null)
            {
                throw new FormatException("Invalid AIFF file - No COMM chunk found.");
            }
            if (dataChunkPosition == -1)
            {
                throw new FormatException("Invalid AIFF file - No SSND chunk found.");
            }
        }
Example #28
0
 /// <summary>
 /// Creates a new MixingWaveProvider32
 /// </summary>
 public MixingWaveProvider32()
 {
     this.waveFormat = WaveFormat.CreateIeeeFloatWaveFormat(44100, 2);
     this.bytesPerSample = 4;
     this.inputs = new List<IWaveProvider>();
 }
Example #29
0
 /// <summary>
 /// Initialise playback
 /// </summary>
 /// <param name="waveProvider">The waveprovider to be played</param>
 public void Init(IWaveProvider waveProvider)
 {
     this.waveStream = waveProvider;
     this.waveFormat = waveProvider.WaveFormat;
 }
Example #30
0
        /// <summary>
        /// Add a new input to the mixer
        /// </summary>
        /// <param name="waveProvider">The wave input to add</param>
        public void AddInputStream(IWaveProvider waveProvider)
        {
            if (waveProvider.WaveFormat.Encoding != WaveFormatEncoding.IeeeFloat)
                throw new ArgumentException("Must be IEEE floating point", "waveProvider.WaveFormat");
            if (waveProvider.WaveFormat.BitsPerSample != 32)
                throw new ArgumentException("Only 32 bit audio currently supported", "waveProvider.WaveFormat");

            if (inputs.Count == 0)
            {
                // first one - set the format
                int sampleRate = waveProvider.WaveFormat.SampleRate;
                int channels = waveProvider.WaveFormat.Channels;
                this.waveFormat = WaveFormat.CreateIeeeFloatWaveFormat(sampleRate, channels);
            }
            else
            {
                if (!waveProvider.WaveFormat.Equals(waveFormat))
                    throw new ArgumentException("All incoming channels must have the same format", "waveProvider.WaveFormat");
            }

            lock (inputs)
            {
                this.inputs.Add(waveProvider);
            }
        }