/// <summary>
        /// Event handler to capture waspi device and convert to pcm16.
        /// </summary>
        /// <remarks>
        /// see also: https://qiita.com/zufall/items/2e027a2bc996864fe4af
        /// </remarks>
        /// <param name="sender"></param>
        /// <param name="eventArgs"></param>
        private void WaspiDataAvailable(object sender, WaveInEventArgs eventArgs)
        {
            if (eventArgs.BytesRecorded == 0)
            {
                ResampledDataAvailable?.Invoke(this, new byte[0]);
                ResampledMaxValueAvailable?.Invoke(this, 0);
                return;
            }

            using (var memStream = new MemoryStream(eventArgs.Buffer, 0, eventArgs.BytesRecorded))
            {
                using (var inputStream = new RawSourceWaveStream(memStream, capture.WaveFormat))
                {
                    var           sampleStream       = new WaveToSampleProvider(inputStream);
                    var           resamplingProvider = new WdlResamplingSampleProvider(sampleStream, TargetWaveFormat.SampleRate);
                    var           pcmProvider        = new SampleToWaveProvider16(resamplingProvider);
                    IWaveProvider targetProvider     = pcmProvider;
                    if (capture.WaveFormat.Channels == 2)
                    {
                        var stereoToMonoProvider = new StereoToMonoProvider16(pcmProvider);
                        stereoToMonoProvider.RightVolume = 0.5f;
                        stereoToMonoProvider.LeftVolume  = 0.5f;
                        targetProvider = stereoToMonoProvider;
                    }

                    byte[] buffer = new byte[eventArgs.BytesRecorded];

                    var outputStream = new MemoryStream();
                    int readBytes;
                    int writeBytes = 0;
                    while ((readBytes = targetProvider.Read(buffer, 0, eventArgs.BytesRecorded)) > 0)
                    {
                        outputStream.Write(buffer, 0, readBytes);
                        writeBytes += readBytes;
                    }
                    var aryOutputStream = outputStream.ToArray();
                    ResampledDataAvailable?.Invoke(this, aryOutputStream);

                    float max        = 0;
                    var   tempBuffer = new WaveBuffer(aryOutputStream);
                    for (int index = 0; index < aryOutputStream.Length / 2; index++)
                    {
                        var sample = (double)tempBuffer.ShortBuffer[index];
                        // absolute value
                        if (sample < 0.0)
                        {
                            sample = -sample;
                        }
                        // is this the max value?
                        if (sample > max)
                        {
                            max = (float)sample;
                        }
                    }
                    ResampledMaxValueAvailable?.Invoke(this, max);
                }
            }
        }
Пример #2
0
        private void initAudioRecorder()
        {
            if (audio != null)
            {
                audio.DataAvailable    -= Audio_DataAvailable;
                audio.RecordingStopped -= Audio_RecordingStopped;
                audio.Dispose();
            }
            if (blankplayer != null)
            {
                blankplayer.Dispose();
            }
            audio        = new WasapiLoopbackCapture(device);
            sourceFormat = audio.WaveFormat;
            if (sourceProvider == null)
            {
                sourceProvider           = new BufferedWaveProvider(sourceFormat);
                sourceProvider.ReadFully = false;
                wfto16prov         = new WaveFloatTo16Provider(sourceProvider);
                monovolumeprovider = new StereoToMonoProvider16(wfto16prov);
                formatconv         = new WaveFormatConversionProvider(new WaveFormat(24000, 16, 1), monovolumeprovider);
            }


            text_encoding.Text = sourceFormat.Encoding.ToString();
            //var client = device.AudioClient.AudioRenderClient;
            blankplayer = new WasapiOut(device, AudioClientShareMode.Shared, false, 0);

            silence = new SilenceProvider(sourceFormat).ToSampleProvider();

            AudioDevice_Text.ForeColor = Color.Black;

            try
            {
                blankplayer.Init(silence);
            }
            catch
            {
                AudioDevice_Text.ForeColor = Color.Red;
            }
            audio.DataAvailable    += Audio_DataAvailable;
            audio.RecordingStopped += Audio_RecordingStopped;
            AudioMonitor            = device.AudioMeterInformation;
        }
 /// <summary>
 /// Converts the given input wavestream into mono or stereo for 16 bit samples
 /// </summary>
 /// <param name="input">The input WaveProvider to convert</param>
 /// <param name="toMono">True for mono output, or false for stereo output</param>
 /// <returns>A converted IWaveProvider of the original input in either mono or stereo</returns>
 public IWaveProvider MonoStereoConvert16(IWaveProvider input, bool toMono)
 {
     if (toMono && input.WaveFormat.Channels != 1)
     {
         var stmp = new StereoToMonoProvider16(input);
         return(stmp);
     }
     else if (!toMono && input.WaveFormat.Channels != 2)
     {
         var mtsp = new MonoToStereoProvider16(input);
         mtsp.LeftVolume  = 0.7f;
         mtsp.RightVolume = 0.7f; //0.7 on each to avoid double loud
         return(mtsp);
     }
     else
     {
         return(input);
     }
 }
Пример #4
0
        public void RightChannelOnly()
        {
            IWaveProvider          stereoStream = new TestStereoProvider();
            StereoToMonoProvider16 mono         = new StereoToMonoProvider16(stereoStream);

            mono.LeftVolume  = 0.0f;
            mono.RightVolume = 1.0f;
            int samples = 1000;

            byte[] buffer = new byte[samples * 2];
            int    read   = mono.Read(buffer, 0, buffer.Length);

            Assert.AreEqual(buffer.Length, read, "bytes read");
            WaveBuffer waveBuffer = new WaveBuffer(buffer);
            short      expected   = 0;

            for (int sample = 0; sample < samples; sample++)
            {
                short sampleVal = waveBuffer.ShortBuffer[sample];
                Assert.AreEqual(expected--, sampleVal, "sample #" + sample.ToString());
            }
        }
 public MonoStereoSwapProvider(ISampleProvider sampleProvider)
 {
     BackingSampleProvider = sampleProvider;
     MonoProvider          = new MonoToStereoProvider16(sampleProvider.ToWaveProvider16()).ToSampleProvider();
     StereoProvider        = new StereoToMonoProvider16(sampleProvider.ToWaveProvider16()).ToSampleProvider();
 }
Пример #6
0
        /// <summary>
        /// Initialize for playing the specified wave stream
        /// </summary>
        private IWaveProvider Init()
        {
            var  waveProvider    = waveProviderFunc();
            long latencyRefTimes = latencyMilliseconds * 10000;

            outputFormat = waveProvider.WaveFormat;
            // first attempt uses the WaveFormat from the WaveStream
            WaveFormatExtensible closestSampleRateFormat;

            if (!audioClient.IsFormatSupported(shareMode, outputFormat, out closestSampleRateFormat))
            {
                // Use closesSampleRateFormat (in sharedMode, it equals usualy to the audioClient.MixFormat)
                // See documentation : http://msdn.microsoft.com/en-us/library/ms678737(VS.85).aspx
                // They say : "In shared mode, the audio engine always supports the mix format"
                // The MixFormat is more likely to be a WaveFormatExtensible.
                if (closestSampleRateFormat == null)
                {
                    WaveFormat correctSampleRateFormat = audioClient.MixFormat;

                    /*WaveFormat.CreateIeeeFloatWaveFormat(
                     * audioClient.MixFormat.SampleRate,
                     * audioClient.MixFormat.Channels);*/

                    if (!audioClient.IsFormatSupported(shareMode, correctSampleRateFormat))
                    {
                        // Iterate from Worst to Best Format
                        WaveFormatExtensible[] bestToWorstFormats =
                        {
                            new WaveFormatExtensible(
                                outputFormat.SampleRate, 32,
                                outputFormat.Channels),
                            new WaveFormatExtensible(
                                outputFormat.SampleRate, 24,
                                outputFormat.Channels),
                            new WaveFormatExtensible(
                                outputFormat.SampleRate, 16,
                                outputFormat.Channels),
                        };

                        // Check from best Format to worst format ( Float32, Int24, Int16 )
                        for (int i = 0; i < bestToWorstFormats.Length; i++)
                        {
                            correctSampleRateFormat = bestToWorstFormats[i];
                            if (audioClient.IsFormatSupported(shareMode, correctSampleRateFormat))
                            {
                                break;
                            }
                            correctSampleRateFormat = null;
                        }

                        // If still null, then test on the PCM16, 2 channels
                        if (correctSampleRateFormat == null)
                        {
                            // Last Last Last Chance (Thanks WASAPI)
                            correctSampleRateFormat = new WaveFormatExtensible(outputFormat.SampleRate, 16, 2);
                            if (!audioClient.IsFormatSupported(shareMode, correctSampleRateFormat))
                            {
                                throw new NotSupportedException("Can't find a supported format to use");
                            }
                        }
                    }
                    outputFormat = correctSampleRateFormat;
                }
                else
                {
                    if (closestSampleRateFormat.Channels == 1 && outputFormat.Channels == 2)
                    {
                        var downmixer = new StereoToMonoProvider16(waveProvider);
                        downmixer.LeftVolume  = 0.5F;
                        downmixer.RightVolume = 0.5F;
                        waveProvider          = downmixer;
                    }

                    outputFormat = closestSampleRateFormat;
                }

                // just check that we can make it.
                //using (new MediaFoundationResampler(waveProvider, outputFormat))
                {
                }
                this.resamplerNeeded = true;
            }
            else
            {
                resamplerNeeded = false;
            }

            // Init Shared or Exclusive
            if (shareMode == AudioClientShareMode.Shared)
            {
                // With EventCallBack and Shared,
                audioClient.Initialize(shareMode, AudioClientStreamFlags.EventCallback, latencyRefTimes, 0,
                                       outputFormat, Guid.Empty);

                // Get back the effective latency from AudioClient. On Windows 10 it can be 0
                if (audioClient.StreamLatency > 0)
                {
                    latencyMilliseconds = (int)(audioClient.StreamLatency / 10000);
                }
            }
            else
            {
                // With EventCallBack and Exclusive, both latencies must equals
                audioClient.Initialize(shareMode, AudioClientStreamFlags.EventCallback, latencyRefTimes, latencyRefTimes,
                                       outputFormat, Guid.Empty);
            }

            // Create the Wait Event Handle
            frameEventWaitHandle = NativeMethods.CreateEventExW(IntPtr.Zero, IntPtr.Zero, 0, EventAccess.EVENT_ALL_ACCESS);
            audioClient.SetEventHandle(frameEventWaitHandle);

            // Get the RenderClient
            renderClient = audioClient.AudioRenderClient;
            return(waveProvider);
        }