Пример #1
0
        //INITIALIZATION
        #region "initialization"
        private void InitializeMicrophone(ref WaveIn wi, int deviceID, int iBufMS, int volume)
        {
            //Gets rid of previous instance if there is one
            if (wi != null)
            {
                wi.Dispose();
            }
            wi = new WaveIn();

            wi.DeviceNumber       = deviceID;       //0 is default input device always
            wi.NumberOfBuffers    = 4;
            wi.BufferMilliseconds = iBufMS;

            wi.WaveFormat     = audioFormat;
            wi.DataAvailable += WiMicrophone_DataAvailable;

            if (bwpMicrophone != null && bwpMicrophone.BufferLength > 0)
            {
                bwpMicrophone.ClearBuffer();                                                                      //Just in case i f**k up later. edit 03-09-2016 i have no idea why i wrote that lol
            }
            bwpMicrophone = new BufferedWaveProvider(audioFormat);
            bwpMicrophone.DiscardOnBufferOverflow = true;
            vspMicrophone = new VolumeSampleProvider(bwpMicrophone.ToSampleProvider()); //Allows us to control the volume

            vspMicrophone.Volume = IntToFloat(volume);

            mspStandard.AddMixerInput(vspMicrophone);
            wi.StartRecording();
        }
Пример #2
0
 public void ReadData(float[] data)
 {
     if (bufferedWaveProvider != null)
     {
         bufferedWaveProvider.ToSampleProvider().Read(data, 0, data.Length);
     }
 }
Пример #3
0
        public void Init(int sampleRate)
        {
            try
            {
                DeInit();

                WaveFormat audioFormat = WaveFormat.CreateIeeeFloatWaveFormat(sampleRate > 0 ? sampleRate : 44100, 2);
                _bufferedWaveProvider = new BufferedWaveProvider(audioFormat)
                {
                    DiscardOnBufferOverflow = true,
                    BufferLength            = AUDIO_BUFFER_SIZE
                };

                _volumeProvider = new VolumeSampleProvider(_bufferedWaveProvider.ToSampleProvider())
                {
                    Volume = 1f
                };

                _audioDevice = new WaveOutEvent
                {
                    DesiredLatency = 110
                };

                _audioDevice.Init(_volumeProvider);
                _audioDevice.Play();
            }
            catch (Exception e)
            {
                Logger.Instance.LogException(e);
            }
        }
Пример #4
0
        private void InitSpeakers()
        {
            _waveProvider = new BufferedWaveProvider(new WaveFormat(48000, 2))
            {
                DiscardOnBufferOverflow = true
            };
            _waveOut = new WaveOut();

            if (PlaybackDeviceName != null)
            {
                for (int i = 0; i < WaveOut.DeviceCount; i++)
                {
                    var device = WaveOut.GetCapabilities(i);
                    if (PlaybackDeviceName == device.ProductName)
                    {
                        _waveOut.DeviceNumber = i;
                        Logging.Log($"Using playback device: {i} {device.ProductName}", LogLevel.LogLevel_DEBUG);
                    }
                }
            }
            _waveOut.PlaybackStopped += _waveOut_PlaybackStopped;
            _volumeSampleProvider     = new VolumeSampleProvider(_waveProvider.ToSampleProvider());
            _waveOut.Init(_volumeSampleProvider);
            Logging.Log($"Wave device {_waveOut.DeviceNumber} initialized. Samplerate: {_waveProvider.WaveFormat.SampleRate} " +
                        $"Channels: {_waveProvider.WaveFormat.Channels}", LogLevel.LogLevel_DEBUG);
        }
Пример #5
0
        private void InitAudio(InputState state)
        {
            filePanel.Enabled = false;

            switch (state)
            {
            case InputState.SpeakerOut:
                input = new WasapiLoopbackCapture();

                InitReader();
                break;

            case InputState.MicrophoneIn:
                input            = new WaveIn();
                input.WaveFormat = new WaveFormat(44100, 32, 2);

                InitReader();
                break;

            case InputState.FileIn:
                input = new WasapiLoopbackCapture();

                waveProvider = new BufferedWaveProvider(input.WaveFormat);

                provider                = waveProvider.ToSampleProvider();
                input.DataAvailable    += AddDataFromFile;
                input.RecordingStopped += (s, a) => { input?.Dispose(); };
                break;

            default:
                break;
            }
        }
Пример #6
0
        private void AudioOutput_SelectedIndexChanged(object sender, EventArgs e)
        {
            if (output != null && output.PlaybackState != PlaybackState.Stopped)
            {
                output.Pause();
            }

            output = new WasapiOut(outputs[audioOutputSelector.SelectedIndex], AudioClientShareMode.Shared, true, outputLatency);

            bitsPrSample = output.OutputWaveFormat.BitsPerSample;
            sampleRate   = output.OutputWaveFormat.SampleRate;
            channels     = output.OutputWaveFormat.Channels;


            // Set the WaveFormat
            outputFormat = WaveFormat.CreateIeeeFloatWaveFormat(sampleRate, channels);

            pflBuffer           = new BufferedWaveProvider(internalFormatStereo);
            pflBuffer.ReadFully = true;
            pflBuffer.DiscardOnBufferOverflow = true;

            WdlResamplingSampleProvider resampler = new WdlResamplingSampleProvider(pflBuffer.ToSampleProvider(), outputFormat.SampleRate);



            output.Init(resampler);
            output.Play();

            Logger.WriteLine("SET OUTPUT FORMAT: "
                             + "Sample Rate: " + sampleRate
                             + ", BitsPrSasmple: " + bitsPrSample
                             + ", Channels: " + channels);
        }
Пример #7
0
    public AsioSampleProvider(string driverName, int sampleRate = 44100, int numChannels = 2)
    {
        this.sampleRate  = sampleRate;
        this.numChannels = numChannels;

        //For now assuming ASIO in is PCM 16 (H6 input is PCM 16bit and not planning to use another interface)
        //In future may need some sort of data conversion
        this.WaveFormat = new WaveFormat(sampleRate, numChannels);
        waveProvider    = new BufferedWaveProvider(this.WaveFormat);

        sampleProvider = waveProvider.ToSampleProvider();

        try
        {
            asio = new AsioOut(driverName);
            asio.InitRecordAndPlayback(null, numChannels, sampleRate);
            asio.AudioAvailable += OnAudioAvailable;
            asio.Play();
            Debug.Log(string.Format("Asio Playing: {0}", asio.PlaybackState));
        }
        catch (System.ArgumentException e)
        {
            Debug.Log(string.Format("Invalid ASIO Driver Name: {0}", e));
        }
        catch (System.Exception e)
        {
            Debug.Log(string.Format("Unknown ASIO Error: {0}", e));
        }
    }
Пример #8
0
 public Input(WaveIn waveIn)
 {
     waveProvider          = new BufferedWaveProvider(Config.WaveFormat);
     waveIn.DataAvailable += DataAvailableHandler;
     source = waveProvider.ToSampleProvider();
     waveProvider.DiscardOnBufferOverflow = true;
 }
Пример #9
0
        //+start(): initialize webcam and start feed
        public void start(float playbackVolume = 0)
        {
            //Get desired audio device
            audioSrc = numDevices - device.SelectedIndex - 1;

            //Initialize device
            source = new WaveInEvent {
                WaveFormat = new WaveFormat(44100, WaveIn.GetCapabilities(audioSrc).Channels)
            };
            source.DataAvailable += sourceDataAvailable;
            provider              = new BufferedWaveProvider(new WaveFormat());
            player       = new WaveOut();
            sampleObject = new object();

            //Initialize waveForm painter
            notify         = new NotifyingSampleProvider(provider.ToSampleProvider());
            notify.Sample += DrawAudioWave;

            //Start feed
            source.StartRecording();
            source.BufferMilliseconds = 10;
            player.Init(notify);
            player.Play();
            player.Volume = playbackVolume;
        }
Пример #10
0
        public MixedAudioProvider(params NAudioProvider[] AudioProviders)
        {
            foreach (var provider in AudioProviders)
            {
                var bufferedProvider = new BufferedWaveProvider(provider.NAudioWaveFormat)
                {
                    DiscardOnBufferOverflow = true
                };

                provider.DataAvailable += (S, E) =>
                {
                    bufferedProvider.AddSamples(E.Buffer, 0, E.Length);
                };

                var sampleProvider = bufferedProvider.ToSampleProvider();

                var providerWf = provider.WaveFormat;

                // Mono to Stereo
                if (providerWf.Channels == 1)
                {
                    sampleProvider = sampleProvider.ToStereo();
                }

                // Resample
                if (providerWf.SampleRate != WaveFormat.SampleRate)
                {
                    sampleProvider = new WdlResamplingSampleProvider(sampleProvider, WaveFormat.SampleRate);
                }

                _audioProviders.Add(provider, sampleProvider);
            }

            if (_audioProviders.Count == 1)
            {
                _mixingWaveProvider = _audioProviders
                                      .Values
                                      .First()
                                      .ToWaveProvider16();
            }
            else
            {
                var mixingSampleProvider = new MixingSampleProvider(_audioProviders.Values);

                // Screna expects 44.1 kHz 16-bit Stereo
                _mixingWaveProvider = mixingSampleProvider.ToWaveProvider16();
            }

            var bufferSize = (int)
                             (
                (ReadInterval / 1000.0)
                * WaveFormat.SampleRate
                * WaveFormat.Channels
                * (WaveFormat.BitsPerSample / 8.0)
                             );

            _buffer = new byte[bufferSize];

            Task.Factory.StartNew(Loop, TaskCreationOptions.LongRunning);
        }
Пример #11
0
        public void StartPreview(int mic, MMDevice speakers)
        {
            try
            {
                _waveOut = new WasapiOut(speakers, AudioClientShareMode.Shared, true, 30);

                _buffBufferedWaveProvider           = new BufferedWaveProvider(new WaveFormat(AudioManager.INPUT_SAMPLE_RATE, 16, 1));
                _buffBufferedWaveProvider.ReadFully = true;

                RadioFilter filter = new RadioFilter(_buffBufferedWaveProvider.ToSampleProvider());

                //add final volume boost to all mixed audio
                _volumeSampleProvider        = new VolumeSampleProviderWithPeak(filter, (peak => SpeakerMax = peak));
                _volumeSampleProvider.Volume = SpeakerBoost;

                _waveOut.Init(_volumeSampleProvider);

                _waveOut.Play();
            }
            catch (Exception ex)
            {
                Logger.Error(ex, "Error starting audio Output - Quitting! " + ex.Message);

                MessageBox.Show($"Problem Initialising Audio Output! Try a different Output device and please post your client log on the forums", "Audio Output Error", MessageBoxButton.OK,
                                MessageBoxImage.Error);

                Environment.Exit(1);
            }

            try
            {
                //opus
                _encoder = OpusEncoder.Create(AudioManager.INPUT_SAMPLE_RATE, 1, FragLabs.Audio.Codecs.Opus.Application.Voip);
                _encoder.ForwardErrorCorrection = false;
                _decoder = OpusDecoder.Create(AudioManager.INPUT_SAMPLE_RATE, 1);
                _decoder.ForwardErrorCorrection = false;

                _waveIn = new WaveIn(WaveCallbackInfo.FunctionCallback())
                {
                    BufferMilliseconds = AudioManager.INPUT_AUDIO_LENGTH_MS,
                    DeviceNumber       = mic
                };

                _waveIn.NumberOfBuffers = 2;
                _waveIn.DataAvailable  += _waveIn_DataAvailable;
                _waveIn.WaveFormat      = new WaveFormat(AudioManager.INPUT_SAMPLE_RATE, 16, 1);

                _waveIn.StartRecording();
            }
            catch (Exception ex)
            {
                Logger.Error(ex, "Error starting audio Input - Quitting! " + ex.Message);

                MessageBox.Show($"Problem Initialising Audio Input! Try a different Input device and please post your client log on the forums", "Audio Input Error", MessageBoxButton.OK,
                                MessageBoxImage.Error);

                Environment.Exit(1);
            }
        }
Пример #12
0
 private void InitializeAggregator()
 {
     _aggregator = new SampleAggregator(_buffer.ToSampleProvider());
     _aggregator.NotificationCount  = _format.SampleRate / 10;
     _aggregator.PerformFft         = true;
     _aggregator.FftCalculated     += (s, a) => OnFftCalculated(a);
     _aggregator.MaximumCalculated += (s, a) => OnMaximumCalculated(a);
 }
Пример #13
0
        private void InitReader()
        {
            waveProvider = new BufferedWaveProvider(input.WaveFormat);

            provider                = waveProvider.ToSampleProvider();
            input.DataAvailable    += AddData;
            input.RecordingStopped += (s, a) => { input?.Dispose(); };
        }
Пример #14
0
 /// <summary>
 /// BOTH FORMATS HAVE TO BE IEEE-FLOAT AND 2 CHANNELS
 /// </summary>
 /// <param name="input"></param>
 /// <param name="output"></param>
 public Resampler(WaveFormat input, WaveFormat output)
 {
     inputFormat        = input;
     outputFormat       = output;
     provider           = new BufferedWaveProvider(input);
     provider.ReadFully = true;
     provider.DiscardOnBufferOverflow = true;
     resampler = new WdlResamplingSampleProvider(provider.ToSampleProvider(), outputFormat.SampleRate);
 }
Пример #15
0
        public Form1()
        {
            InitializeComponent();
            audio     = new WaveOut();
            _provider = new BufferedWaveProvider(new WaveFormat(44100, 1));
            data      = new byte[8000];
            _notify   = new NotifyingSampleProvider(_provider.ToSampleProvider());

            Listening = new Thread(StartListener);
            SoundOut  = new Thread(VoiceOut);
        }
Пример #16
0
        public AudioBuffer(IAudioCodec codec)
        {
            _codec        = codec;
            _waveProvider = new BufferedWaveProvider(_codec.Format.ToWaveFormat());
            _waveProvider.DiscardOnBufferOverflow = false;

            _sampleProvider = _waveProvider.ToSampleProvider();

            _jitter            = new JitterTimer <IAudioPacket>(new AudioPacketRestorer(), codec.Format.Duration);
            _jitter.Completed += OnCaptured;
        }
        private void InitMicPassthrough(MMDevice micOutput)
        {
            if (micOutput != null) // && micOutput !=speakers
            {
                //TODO handle case when they're the same?

                try
                {
                    _micWaveOut = new WasapiOut(micOutput, AudioClientShareMode.Shared, true, 40, windowsN);

                    _micWaveOutBuffer           = new BufferedWaveProvider(new WaveFormat(AudioManager.INPUT_SAMPLE_RATE, 16, 1));
                    _micWaveOutBuffer.ReadFully = true;
                    _micWaveOutBuffer.DiscardOnBufferOverflow = true;

                    var sampleProvider = _micWaveOutBuffer.ToSampleProvider();

                    if (micOutput.AudioClient.MixFormat.Channels == 1)
                    {
                        if (sampleProvider.WaveFormat.Channels == 2)
                        {
                            _micWaveOut.Init(new RadioFilter(sampleProvider.ToMono()));
                        }
                        else
                        {
                            //already mono
                            _micWaveOut.Init(new RadioFilter(sampleProvider));
                        }
                    }
                    else
                    {
                        if (sampleProvider.WaveFormat.Channels == 1)
                        {
                            _micWaveOut.Init(new RadioFilter(sampleProvider.ToStereo()));
                        }
                        else
                        {
                            //already stereo
                            _micWaveOut.Init(new RadioFilter(sampleProvider));
                        }
                    }

                    _micWaveOut.Play();
                }
                catch (Exception ex)
                {
                    Logger.Error(ex, "Error starting mic audio Output - Quitting! " + ex.Message);

                    ShowOutputError("Problem Initialising Mic Audio Output!");


                    Environment.Exit(1);
                }
            }
        }
Пример #18
0
 public static void Connected(ushort Id)
 {
     if (Active)
     {
         byte[] silence = new byte[10000];
         Array.Clear(silence, 0, 5000);
         BufferedWaveProvider player = new BufferedWaveProvider(new WaveFormat(48000, 16, 1));
         players[Id] = player;
         player.AddSamples(silence, 0, 5000);
         mixer.AddMixerInput(player.ToSampleProvider());
     }
 }
Пример #19
0
        private void AudioOutput_SelectedIndexChanged(object sender, EventArgs e)
        {
            if (output != null && output.PlaybackState != PlaybackState.Stopped)
            {
                output.Pause();
            }

            output = new WasapiOut(outputs[audioOutputSelector.SelectedIndex], AudioClientShareMode.Shared, true, outputLatency);
            StoreSetting("output_device", audioOutputSelector.SelectedIndex.ToString());
            Logger.WriteLine("Set outputDevice to: " + audioOutputSelector.SelectedIndex.ToString());

            bitsPrSample = output.OutputWaveFormat.BitsPerSample;
            sampleRate   = output.OutputWaveFormat.SampleRate;
            channels     = output.OutputWaveFormat.Channels;


            // Set the WaveFormat
            outputFormat = WaveFormat.CreateIeeeFloatWaveFormat(sampleRate, channels);

            // (Re)Setup the mixer and buffers
            if (outputBufferTimer != null)
            {
                outputBufferTimer.Stop();
            }

            outputBuffer           = new BufferedWaveProvider(internalFormatStereo);
            outputBuffer.ReadFully = true;
            outputBuffer.DiscardOnBufferOverflow = true;

            WdlResamplingSampleProvider resampler = new WdlResamplingSampleProvider(outputBuffer.ToSampleProvider(), outputFormat.SampleRate);

            outputMeter = new MeteringSampleProvider(resampler, samplesPrIntegration);
            outputMeter.StreamVolume += (a, b) => RunOutputMeter(a, b, meter);

            SetOutputMode(outputMode);


            output.Init(outputMeter);
            output.Play();



            outputBufferTimer          = new System.Windows.Forms.Timer();
            outputBufferTimer.Interval = mainServerInterval;
            outputBufferTimer.Tick    += outputBufferTimerCheck;
            outputBufferTimer.Start();


            Logger.WriteLine("SET OUTPUT FORMAT: "
                             + "Sample Rate: " + sampleRate
                             + ", BitsPrSasmple: " + bitsPrSample
                             + ", Channels: " + channels);
        }
Пример #20
0
        public MixedAudioProvider(params NAudioProvider[] AudioProviders)
        {
            foreach (var provider in AudioProviders)
            {
                var bufferedProvider = new BufferedWaveProvider(provider.NAudioWaveFormat)
                {
                    DiscardOnBufferOverflow = true,
                    ReadFully = false
                };

                provider.WaveIn.DataAvailable += (S, E) =>
                {
                    bufferedProvider.AddSamples(E.Buffer, 0, E.BytesRecorded);
                };

                var sampleProvider = bufferedProvider.ToSampleProvider();

                var providerWf = provider.WaveFormat;

                // Mono to Stereo
                if (providerWf.Channels == 1)
                {
                    sampleProvider = sampleProvider.ToStereo();
                }

                // Resample
                if (providerWf.SampleRate != WaveFormat.SampleRate)
                {
                    sampleProvider = new WdlResamplingSampleProvider(sampleProvider, WaveFormat.SampleRate);
                }

                _audioProviders.Add(provider, sampleProvider);
            }

            if (_audioProviders.Count == 1)
            {
                _mixingWaveProvider = _audioProviders
                                      .Values
                                      .First()
                                      .ToWaveProvider16();
            }
            else
            {
                var waveProviders = _audioProviders.Values.Select(M => M.ToWaveProvider());

                // MixingSampleProvider cannot be used here due to it removing inputs that don't return as many bytes as requested.

                // Screna expects 44.1 kHz 16-bit Stereo
                _mixingWaveProvider = new MixingWaveProvider32(waveProviders)
                                      .ToSampleProvider()
                                      .ToWaveProvider16();
            }
        }
        public void Init()
        {
            buffer = new BufferedWaveProvider(new WaveFormat(mixer.WaveFormat.SampleRate, 16, mixer.WaveFormat.Channels));
            buffer.BufferDuration          = TimeSpan.FromSeconds(10);
            buffer.DiscardOnBufferOverflow = true;
            sampleProvider = buffer.ToSampleProvider();
            mixer.AddMixerInput(sampleProvider);

            SpeakMessage(new TextToSpeechMessage()
            {
                Message = "IL2-SRS Text to Speech Active"
            });
        }
Пример #22
0
 private void InitSpeakers()
 {
     _waveProvider = new BufferedWaveProvider(new WaveFormat(48000, 2))
     {
         DiscardOnBufferOverflow = true
     };
     _waveOut = new WaveOut();
     _waveOut.PlaybackStopped += _waveOut_PlaybackStopped;
     _volumeSampleProvider     = new VolumeSampleProvider(_waveProvider.ToSampleProvider());
     _waveOut.Init(_volumeSampleProvider);
     Logging.Log($"Wave device {_waveOut.DeviceNumber} initialized. Samplerate: {_waveProvider.WaveFormat.SampleRate} " +
                 $"Channels: {_waveProvider.WaveFormat.Channels}", LogLevel.LogLevel_DEBUG);
 }
        public StreamingSampleSource(DetectorConfig config, BufferedWaveProvider source, bool forceMono = true)
        {
            sourceBuffer = source;

            var sampleProvider = source.ToSampleProvider();

            if (forceMono)
            {
                sampleProvider = sampleProvider.AsMono();
            }

            samples = sampleProvider.DownsampleTo(config.MaxSampleRate);
        }
Пример #24
0
        public MixerChannel(IEventAggregator events)
        {
            _events = events ?? throw new ArgumentNullException(nameof(events));

            _buffer  = new BufferedWaveProvider(WaveFormat.CreateIeeeFloatWaveFormat(WaveFormat.SampleRate, 1));
            _volume  = new VolumeSampleProvider(_buffer.ToSampleProvider());
            _balance = new PanningSampleProvider(_volume)
            {
                Pan = 0f
            };

            _events.Subscribe(this);
        }
Пример #25
0
 public void SetupLoop(byte[] buffer)
 {
     using (var ms = new MemoryStream(buffer))
     {
         using (var br = new BinaryReader(ms))
         {
             WaveFormat format = new WaveFormat(br);
             LoopBuffer = new BufferedWaveProvider(format);
             LoopVolume = new VolumeSampleProvider(LoopBuffer.ToSampleProvider());
             LoopOut.Init(LoopVolume);
             LoopOut.Play();
         }
     }
 }
Пример #26
0
        public static ISampleProvider GetSampleProvider(this WaveFormat waveFormat, byte[] data)
        {
            var waveProvider = new BufferedWaveProvider(waveFormat)
            {
                BufferLength = data.Length,
                ReadFully    = false
            };

            waveProvider.ClearBuffer();
            waveProvider.AddSamples(data, 0, data.Length);
            var sampleProvider = waveProvider.ToSampleProvider();

            return(sampleProvider);
        }
Пример #27
0
 static VoIP()
 {
     recorder = new MyWaveInEvent {
         WaveFormat = new WaveFormat(44100, 16, 2), DeviceNumber = 0
     };
     recorder.DataAvailable += RecorderOnDataAvailable;
     sender = new BufferedWaveProvider(recorder.WaveFormat);
     sender.DiscardOnBufferOverflow = true;
     equalizer = new MyFilter(sender.ToSampleProvider(), 60, 1200, -2.5f, -4f);
     outBuffer = new float[sender.BufferedBytes];
     receiver  = new MyBufferedSampleProvider(recorder.WaveFormat);
     //receiver.DiscardOnBufferOverflow = true;
     player = new WaveOutEvent();
     player.Init(receiver);
 }
Пример #28
0
    static public void InitialiseMic()
    {
        WaveInEvent wi = new WaveInEvent
        {
            WaveFormat = new WaveFormat(44100, 1)
        };

        wi.DataAvailable += new EventHandler <WaveInEventArgs>(wi_DataAvailable);

        bwp = new BufferedWaveProvider(wi.WaveFormat)
        {
            DiscardOnBufferOverflow = true
        };
        input = bwp.ToSampleProvider();
        wi.StartRecording();
    }
Пример #29
0
        public MixedAudioProvider(IEnumerable <NAudioProvider> audioProviders)
        {
            foreach (var provider in audioProviders)
            {
                var bufferedProvider = new BufferedWaveProvider(provider.NAudioWaveFormat);

                provider.DataAvailable += (sender, e) =>
                {
                    bufferedProvider.AddSamples(e.Buffer, 0, e.Length);
                };

                var sampleProvider = bufferedProvider.ToSampleProvider();

                var providerWf = provider.WaveFormat;

                // Mono to Stereo
                if (providerWf.Channels == 1)
                {
                    sampleProvider = sampleProvider.ToStereo();
                }

                // Resample
                if (providerWf.SampleRate != WaveFormat.SampleRate)
                {
                    sampleProvider = new WdlResamplingSampleProvider(sampleProvider, WaveFormat.SampleRate);
                }

                _audioProviders.Add(provider, sampleProvider);
            }

            var mixingSampleProvider = new MixingSampleProvider(_audioProviders.Values);

            // Screna expects 44.1 kHz 16-bit Stereo
            _mixingWaveProvider = mixingSampleProvider.ToWaveProvider16();

            var bufferSize = (int)
                             (
                (ReadInterval / 1000.0)
                * WaveFormat.SampleRate
                * WaveFormat.Channels
                * (WaveFormat.BitsPerSample / 8.0)
                             );

            _buffer = new byte[bufferSize];

            Task.Factory.StartNew(Loop, TaskCreationOptions.LongRunning);
        }
Пример #30
0
        protected override void _Start()
        {
            m_OutputDevice = m_OutputDeviceFactory();
            m_OutputBuffer = new BufferedWaveProvider(m_SampleFormat.ToWaveFormat());
            m_OutputBuffer.BufferLength = m_BufferFrameCount * m_SampleFormat.FrameSize * 2;
            m_OutputBuffer.ReadFully    = true; // keeps the audio device playing silence while we're not sending any data

            m_VolumeSampleProvider = new VolumeSampleProvider(m_OutputBuffer.ToSampleProvider());
            _OnSettingsUpdated();

            m_OutputDevice.Init(m_VolumeSampleProvider);
            m_OutputDevice.Play();

            m_AudioThread          = new Thread(_PlayLoop);
            m_AudioThread.Name     = "Audio player thread";
            m_AudioThread.Priority = ThreadPriority.Highest;
            m_AudioThread.Start();
        }