예제 #1
0
파일: Form1.cs 프로젝트: VirusFree/Opus.NET
        void StartEncoding()
        {
            _startTime = DateTime.Now;
            _bytesSent = 0;
            _segmentFrames = 960;
            _encoder = new OpusEncoder(48000, 1, OpusNet.OpusApplication.Voip);
            _encoder.Bitrate = 8192;
            _decoder = new OpusDecoder(48000, 1);
            _bytesPerSegment = _encoder.FrameByteCount(_segmentFrames);

            _waveIn = new WaveIn(WaveCallbackInfo.FunctionCallback());
            _waveIn.BufferMilliseconds = 50;
            _waveIn.DeviceNumber = comboBox1.SelectedIndex;
            _waveIn.DataAvailable += _waveIn_DataAvailable;
            _waveIn.WaveFormat = new WaveFormat(48000, 16, 1);

            _playBuffer = new BufferedWaveProvider(new WaveFormat(48000, 16, 1));

            _waveOut = new WaveOut(WaveCallbackInfo.FunctionCallback());
            _waveOut.DeviceNumber = comboBox2.SelectedIndex;
            _waveOut.Init(_playBuffer);

            _waveOut.Play();
            _waveIn.StartRecording();

            if (_timer == null)
            {
                _timer = new Timer();
                _timer.Interval = 1000;
                _timer.Tick += _timer_Tick;
            }
            _timer.Start();
        }
예제 #2
0
 /// <summary>
 /// Create an input
 /// </summary>
 /// <param name="ID">The ID of the input to be created</param>
 public Input(string ID)
 {
     // Set the device ID
     deviceID = ID;
     // Get Device from specified ID
     MMDeviceEnumerator devices = new MMDeviceEnumerator();
     device = devices.GetDevice(ID);
     // Set wave in to WASAPI capture of the specified device
     waveIn = new WasapiCapture(device);
     // Set the number of bytes used by each sample
     sampleByteSize = waveIn.WaveFormat.BitsPerSample / 8;
     // Add event handler to retrieve samples from the device
     waveIn.DataAvailable += waveIn_DataAvailable;
     // Create buffered wave provider
     bufferedWaveProvider = new BufferedWaveProvider(waveIn.WaveFormat);
     bufferedWaveProvider.DiscardOnBufferOverflow = true;
     // Create sample channel
     sampleChannel = new SampleChannel(bufferedWaveProvider);
     // Create sample provider
     sampleChannel.PreVolumeMeter += sampleProvider_StreamVolume;
     // Start recording
     try
     {
         waveIn.StartRecording();
     }
     catch
     {
         throw new ArgumentException("This input device is not supported.");
     }
 }
예제 #3
0
        private void Connect(IPEndPoint endPoint, int inputDeviceNumber, INetworkChatCodec codec)
        {
            waveIn = new WaveIn();
            waveIn.BufferMilliseconds = 50;
            waveIn.DeviceNumber = inputDeviceNumber;
            waveIn.WaveFormat = codec.RecordFormat;
            waveIn.DataAvailable += waveIn_DataAvailable;
            waveIn.StartRecording();

            udpSender = new UdpClient();
            udpListener = new UdpClient();

            // To allow us to talk to ourselves for test purposes:
            // http://stackoverflow.com/questions/687868/sending-and-receiving-udp-packets-between-two-programs-on-the-same-computer
            udpListener.Client.SetSocketOption(SocketOptionLevel.Socket, SocketOptionName.ReuseAddress, true);
            udpListener.Client.Bind(endPoint);

            udpSender.Connect(endPoint);

            waveOut = new WaveOut();
            waveProvider = new BufferedWaveProvider(codec.RecordFormat);
            waveOut.Init(waveProvider);
            waveOut.Play();

            connected = true;
            var state = new ListenerThreadState { Codec = codec, EndPoint = endPoint };
            ThreadPool.QueueUserWorkItem(ListenerThread, state);
        }
예제 #4
0
		public DirectSoundPlayer(INetworkChatCodec c)
			: base(c)
		{
			waveProvider = new BufferedWaveProvider(codec.RecordFormat);
			wavePlayer = new DirectSoundOut();
			wavePlayer.Init(waveProvider);
		}
예제 #5
0
        private WaveFormat _waveFormat = new WaveFormat(8000, 16, 1); // The format that both the input and output audio streams will use, i.e. PCMU.

        #endregion Fields

        #region Constructors

        public AudioChannel()
        {
            // Set up the device that will play the audio from the RTP received from the remote end of the call.
            m_waveOut = new WaveOut();
            m_waveProvider = new BufferedWaveProvider(_waveFormat);
            m_waveOut.Init(m_waveProvider);
            m_waveOut.Play();

            // Set up the input device that will provide audio samples that can be encoded, packaged into RTP and sent to
            // the remote end of the call.
            m_waveInEvent = new WaveInEvent();
            m_waveInEvent.BufferMilliseconds = 20;
            m_waveInEvent.NumberOfBuffers = 1;
            m_waveInEvent.DeviceNumber = 0;
            m_waveInEvent.DataAvailable += RTPChannelSampleAvailable;
            m_waveInEvent.WaveFormat = _waveFormat;

            // Create a UDP socket to use for sending and receiving RTP packets.
            int port = FreePort.FindNextAvailableUDPPort(DEFAULT_START_RTP_PORT);
            _rtpEndPoint = new IPEndPoint(_defaultLocalAddress, port);
            m_rtpChannel = new RTPChannel(_rtpEndPoint);
            m_rtpChannel.OnFrameReady += RTPChannelSampleReceived;

            _audioLogger.Debug("RTP channel endpoint " + _rtpEndPoint.ToString());
        }
예제 #6
0
        public void Initialise(WaveFormat format, WaveOut driver)
        {
            if (driver == null)
            {
                throw new ArgumentNullException("driver", "Must specify a WaveIn device instance");
            }

            if (format == null)
            {
                throw new ArgumentNullException("format", "Must specify an audio format");
            }

            var caps = WaveOut.GetCapabilities(driver.DeviceNumber);

            device = new WaveOutDeviceData
            {
                Driver = driver,
                Name = caps.ProductName,
                Channels = caps.Channels,
                Buffers = new float[caps.Channels][]
            };

            Format = WaveFormat.CreateIeeeFloatWaveFormat(format.SampleRate, caps.Channels);
            OutputBuffer = new BufferedWaveProvider(Format);
            OutputBuffer.DiscardOnBufferOverflow = true;

            driver.Init(OutputBuffer);

            mapOutputs();
        }
예제 #7
0
파일: Form1.cs 프로젝트: odinhaus/Saltable
        private void Form1_Load(object sender, EventArgs e)
        {
            byte[] apk, ask, bpk, bsk;
            NaClClient.CreateKeys(out apk, out ask);
            NaClClient.CreateKeys(out bpk, out bsk);

            var hasher = System.Security.Cryptography.SHA256.Create();

            _clientA = NaClClient.Create(apk, ask, bpk);
            _clientB = NaClClient.Create(bpk, bsk, apk);

            _sw = new Stopwatch();
            _sw.Start();

            _wave = new WaveIn(this.Handle);
            _wave.WaveFormat = new WaveFormat(12000, 8, 1);
            _wave.BufferMilliseconds = 100;
            _wave.DataAvailable += _wave_DataAvailable;
            _wave.StartRecording();

            _playback = new BufferedWaveProvider(_wave.WaveFormat);

            _waveOut = new WaveOut();
            _waveOut.DesiredLatency = 100;
            _waveOut.Init(_playback);
            _waveOut.Play();
        }
예제 #8
0
파일: ToxCall.cs 프로젝트: kstaruch/Toxy
        public void Start()
        {
            if (WaveIn.DeviceCount < 1)
                throw new Exception("Insufficient input device(s)!");

            if (WaveOut.DeviceCount < 1)
                throw new Exception("Insufficient output device(s)!");

            frame_size = toxav.CodecSettings.audio_sample_rate * toxav.CodecSettings.audio_frame_duration / 1000;

            toxav.PrepareTransmission(CallIndex, false);

            WaveFormat format = new WaveFormat((int)toxav.CodecSettings.audio_sample_rate, (int)toxav.CodecSettings.audio_channels);
            wave_provider = new BufferedWaveProvider(format);
            wave_provider.DiscardOnBufferOverflow = true;

            wave_out = new WaveOut();
            //wave_out.DeviceNumber = config["device_output"];
            wave_out.Init(wave_provider);

            wave_source = new WaveIn();
            //wave_source.DeviceNumber = config["device_input"];
            wave_source.WaveFormat = format;
            wave_source.DataAvailable += wave_source_DataAvailable;
            wave_source.RecordingStopped += wave_source_RecordingStopped;
            wave_source.BufferMilliseconds = (int)toxav.CodecSettings.audio_frame_duration;
            wave_source.StartRecording();

            wave_out.Play();
        }
예제 #9
0
 public AudioPlayer(DiscordVoiceConfig __config)
 {
     config = __config;
     callbackInfo = WaveCallbackInfo.FunctionCallback();
     outputDevice = new WaveOut(callbackInfo);
     bufferedWaveProvider = new BufferedWaveProvider(new WaveFormat(48000, 16, config.Channels));
 }
예제 #10
0
        private void PlayAudioFromConnection(TcpClient client)
        {
            var inputStream = new BufferedStream(client.GetStream());

            var bufferedWaveProvider = new BufferedWaveProvider(waveFormat);
            var savingWaveProvider = new SavingWaveProvider(bufferedWaveProvider, "temp.wav");

            var player = new WaveOut();
            player.Init(savingWaveProvider);
            player.Play();

            while (client.Connected)
            {
                if (terminate)
                {
                    client.Close();
                    break;
                }

                var available = client.Available;
                if (available > 0)
                {
                    var buffer = new byte[available];
                    var bytes = inputStream.Read(buffer, 0, buffer.Length);
                    bufferedWaveProvider.AddSamples(buffer, 0, bytes);
                    Console.WriteLine("{0} \t {1} bytes", client.Client.RemoteEndPoint, bytes);
                }
            }

            player.Stop();
            savingWaveProvider.Dispose();
        }
 public void EmptyBufferCanReturnZeroFromRead()
 {
     var bwp = new BufferedWaveProvider(new WaveFormat());
     bwp.ReadFully = false;
     var buffer = new byte[44100];
     var read = bwp.Read(buffer, 0, buffer.Length);
     Assert.AreEqual(0, read);
 }
예제 #12
0
 public AudioRecorder(int microphone)
 {
     waveIn = new WaveIn();
     waveIn.DeviceNumber = microphone;
     waveIn.WaveFormat = new WaveFormat(44100, 1);
     bufferedWaveProvider = new BufferedWaveProvider(waveIn.WaveFormat);
     writer = new WaveFileWriter(Settings.Default.tempSoundLocation, waveIn.WaveFormat);
 }
예제 #13
0
 public override void Reset()
 {
     if (bufferedWaveProvider != null)
     {
         bufferedWaveProvider.ClearBuffer();
         bufferedWaveProvider = null;
     }
 }
 public HaltableBufferedWaveProvider(WaveFormat waveFormat)
 {
     _bufferedWaveProvider = new BufferedWaveProvider(waveFormat)
                             {
                                 DiscardOnBufferOverflow = true,
                                 BufferDuration = new TimeSpan(0, 10, 0)
                             };
 }
예제 #15
0
파일: Speaker.cs 프로젝트: yodiwo/plegma
        //------------------------------------------------------------------------------------------------------------------------
        #endregion

        #region Constructor
        //------------------------------------------------------------------------------------------------------------------------
        public Speaker()
        {
            waveout = new WaveOut();
            bufferedWaveProvider = new BufferedWaveProvider(new WaveFormat(8000, 16, 2));
            waveout.PlaybackStopped += Waveout_PlaybackStopped;
            volumeProvider = new VolumeWaveProvider16(bufferedWaveProvider);
            waveout.Init(volumeProvider);
        }
 public void WhenBufferHasMoreThanNeededReadFully()
 {
     var bwp = new BufferedWaveProvider(new WaveFormat());
     var buffer = new byte[44100];
     bwp.AddSamples(buffer, 0, 5000);
     var read = bwp.Read(buffer, 0, 2000);
     Assert.AreEqual(2000, read);
     Assert.AreEqual(3000, bwp.BufferedBytes);
 }
 public void FullReadsByDefault()
 {
     var bwp = new BufferedWaveProvider(new WaveFormat());
     var buffer = new byte[44100];
     bwp.AddSamples(buffer, 0, 2000);
     var read = bwp.Read(buffer, 0, buffer.Length);
     Assert.AreEqual(buffer.Length, read);
     Assert.AreEqual(0, bwp.BufferedBytes);
 }
        /// <summary>
        /// Initializes the audio render provider.
        /// </summary>
        /// <param name="renderArgs">The arguments.</param>
        public override void Initialize(AudioRenderInitializeArgs renderArgs)
        {
            WaveProvider = new BufferedWaveProvider(new WaveFormat(ClockRate, 16, Channels));
            WaveProvider.DiscardOnBufferOverflow = true;
            WaveProvider.BufferDuration = new TimeSpan(0, 0, 0, 0, 250);

            InitializeWasapi();

            Log.Info("Audio render initialized using WASAPI.");
        }
 public void PartialReadsPossibleWithReadFullyFalse()
 {
     var bwp = new BufferedWaveProvider(new WaveFormat());
     bwp.ReadFully = false;
     var buffer = new byte[44100];
     bwp.AddSamples(buffer, 0, 2000);
     var read = bwp.Read(buffer, 0, buffer.Length);
     Assert.AreEqual(2000, read);
     Assert.AreEqual(0, bwp.BufferedBytes);
 }
예제 #20
0
        public void Dispose()
        {
            if (_waveOut != null)
            {
                _waveOut.Dispose();
                _waveOut = null;
            }

            _waveProvider = null;
        }
예제 #21
0
        private void PlaybackManager_OnBufferStateChange(NAudio.Wave.BufferedWaveProvider sender, double totalSecconds)
        {
            this.Dispatcher.Invoke(() =>  //Thread is not GUI Thread
            {
                TrackProgressBar.Maximum = TrackManager.CurrentTrack.DurationMillis / 1000;
                TrackProgressBar.Value   = PlaybackManager.TrackTimeSpan.TotalSeconds;

                TrackBufferProgressBar.Maximum = TrackManager.CurrentTrack.DurationMillis / 1000;
                TrackBufferProgressBar.Value   = PlaybackManager.TrackTimeSpan.TotalSeconds + totalSecconds;

                Debug.WriteLine(PlaybackManager.TrackTimeSpan);
            });
        }
 public void BufferedBytesAreReturned()
 {
     var bytesToBuffer = 1000;
     var bwp = new BufferedWaveProvider(new WaveFormat(44100, 16, 2));
     var data = Enumerable.Range(1, bytesToBuffer).Select(n => (byte)(n % 256)).ToArray();
     bwp.AddSamples(data, 0, data.Length);
     Assert.AreEqual(bytesToBuffer, bwp.BufferedBytes);
     var readBuffer = new byte[bytesToBuffer];
     var bytesRead = bwp.Read(readBuffer, 0, bytesToBuffer);
     Assert.AreEqual(bytesRead, bytesToBuffer);
     Assert.AreEqual(readBuffer,data);
     Assert.AreEqual(0, bwp.BufferedBytes);
 }
예제 #23
0
        public void Init()
        {
            waveIn = new WaveInEvent();
            waveIn.BufferMilliseconds = 100;
            waveIn.DeviceNumber = -1;
            waveIn.WaveFormat = new WaveFormat(8000, 1);
            waveIn.DataAvailable += WaveIn_DataAvailable;

            waveOut = new WaveOut();
            waveOutProvider = new BufferedWaveProvider(waveIn.WaveFormat);
            waveOut.Init(waveOutProvider);
            waveOut.Play();
        }
예제 #24
0
        public static void Init()
        {
            provider = new BufferedWaveProvider(new WaveFormat(44100, 16, 1)); // DO NOT TOUCH!
            provider.DiscardOnBufferOverflow = true;

            decoder = new WideBandSpeexCodec();

            waveOut = new WaveOut();
            //waveOut.DesiredLatency = 20;
            waveOut.NumberOfBuffers = 2;
            waveOut.Init(provider);

            waveOut.Play();
        }
예제 #25
0
 private void buttonPlay_Click(object sender, EventArgs e)
 {
     if (playbackState == StreamingPlaybackState.Stopped)
     {
         playbackState = StreamingPlaybackState.Buffering;
         this.bufferedWaveProvider = null;
         ThreadPool.QueueUserWorkItem(new WaitCallback(StreamMP3), textBoxStreamingUrl.Text);
         timer1.Enabled = true;
     }
     else if (playbackState == StreamingPlaybackState.Paused)
     {
         playbackState = StreamingPlaybackState.Buffering;
     }
 }
예제 #26
0
        private void OnStartRecordingClick(object sender, RoutedEventArgs e)
        {
            recorder = new WaveIn();
            recorder.DataAvailable += RecorderOnDataAvailable;

            bufferedWaveProvider = new BufferedWaveProvider(recorder.WaveFormat);
            savingWaveProvider = new SavingWaveProvider(bufferedWaveProvider, "temp.wav");

            player = new WaveOut();
            player.Init(savingWaveProvider);

            player.Play();
            recorder.StartRecording();
        }
예제 #27
0
        public void StartPlayBack()
        {
            if (_player != null)
                throw new InvalidOperationException("Can't begin playback when already playing");
            if(_recorder == null)
                throw new InvalidOperationException("Can't begin playback when not recording");

            _player = new WaveOut();
            _bufferedWaveProvider = new BufferedWaveProvider(_recorder.WaveFormat);

            _player.Init(_bufferedWaveProvider);
            _player.Play();
            IsPlaying = true;
        }
예제 #28
0
파일: Monitor.cs 프로젝트: Jiyuu/SkypeFX
        public override void Init()
        {
            NAudio.CoreAudioApi.MMDeviceEnumerator a = new NAudio.CoreAudioApi.MMDeviceEnumerator();
            var dev = a.GetDefaultAudioEndpoint(NAudio.CoreAudioApi.DataFlow.Render, NAudio.CoreAudioApi.Role.Multimedia);
            o = new WasapiOut(dev,NAudio.CoreAudioApi.AudioClientShareMode.Shared,false, 50);

            //o = new WaveOut();
            bwp = new BufferedWaveProvider(WaveFormat.CreateIeeeFloatWaveFormat(16000, 1));
            bwp.BufferDuration = TimeSpan.FromMilliseconds(2000);
            bwp.DiscardOnBufferOverflow = true;
            o.Init(bwp);
            o.Play();
            //delaypos = 0;
        }
 public ConversationPage(MainWindow mainWindow)
 {
     InitializeComponent();
     this.mainWindow = mainWindow;
     PartyListViewSource = ((System.Windows.Data.CollectionViewSource)(this.FindResource("conversationDCViewSource")));
     waveOut = new WaveOut();
     waveIn = new WaveIn();
     waveProvider = new BufferedWaveProvider(new WaveIn().WaveFormat);
     waveOut.Init(waveProvider);
     waveOut.Play();
     int inputDeviceNumber = WaveInEvent.DeviceCount - 1;
     waveIn.DeviceNumber = inputDeviceNumber;
     waveIn.BufferMilliseconds = 10;
     waveIn.DataAvailable += new EventHandler<WaveInEventArgs>(waveIn_DataAvailable);
 }
예제 #30
0
        public int EnqueueSamples(int channels, int rate, byte[] samples, int frames)
        {
            if (_buffer == null)
            {
                _buffer = new BufferedWaveProvider(new WaveFormat(rate, channels));
                CreateOutput();
            }

            int space = _buffer.BufferLength - _buffer.BufferedBytes;
            if (space > samples.Length)
            {
                _buffer.AddSamples(samples, 0, samples.Length);
                return frames;
            }
            return 0;
        }
예제 #31
0
파일: Form1.cs 프로젝트: sbst/code
        System.Windows.Forms.Timer timerStat; //таймер для статистики

        #endregion Fields

        #region Constructors

        //конструктор по умолчанию
        public Form1()
        {
            InitializeComponent();  //инициализируем интерфейс
            comboBoxBaud.SelectedIndex = 0; //значения combobox по умолчанию
            comboBoxFlow.SelectedIndex = 0;
            disconnectToolStripMenuItem.Enabled = false;    //переключаем интерфейс
            stopToolStripMenuItem.Enabled = false;
            startToolStripMenuItem.Enabled = false;
            ZigUsb = new SerialPort();  //инициализируем объект порта
            voiceCodec = new VoiceOverZigbee.Codec.ALawChatCodec(); //инициализируем объект кодека
            output = new DirectSoundOut();  //инициализируем объект для считывания голоса
            bufferStream = new BufferedWaveProvider(voiceCodec.RecordFormat);   //задаем формат буферу воспроизведения голоса, он соответствует формату кодека
            output.Init(bufferStream);  //задаем соответствие буфера - объекту воспроизведения
            listenThread = new ListeningThread(new ThreadStart(Listening)); //инициализируем поток делегатом с функцией на выполнение в качестве аргумента
            connected = false;  //флаг о передачи данных
            flagThread = false; //флаг о потоке
        }
예제 #32
0
        // Method that initialized name, location and thread for
        // wave file to which input stream is going to be recorded
        private void initRecord(bool stream)
        {
            //Console.WriteLine(inputName.Text);
            wavSaveName = inputName.Text + ".wav";
            int deviceNumber;

            if (inputList.SelectedItem == null)
            {
                return;
            }

            deviceNumber = inputList.SelectedIndex;

            sourceStream = new NAudio.Wave.WaveIn();
            sourceStream.DeviceNumber = deviceNumber;
            int rate, bits = 16, chn = 1;

            if (inputFormat.SelectedIndex == 0)
            {
                rate = 11025;
            }
            else
            {
                rate = 16000;
            }
            sourceStream.WaveFormat         = new NAudio.Wave.WaveFormat(rate, bits, chn);
            sourceStream.BufferMilliseconds = 50;
            if (!stream)
            {
                sourceStream.DataAvailable += new EventHandler <NAudio.Wave.WaveInEventArgs>(OnDataAvailable);
                waveWriter = new NAudio.Wave.WaveFileWriter(wavSaveName, sourceStream.WaveFormat);
            }
            else
            {
                algo.originalWavSamples     = new double[800];
                sourceStream.DataAvailable += new EventHandler <NAudio.Wave.WaveInEventArgs>(StreamDataAvailable);
                waveBuffer = new NAudio.Wave.BufferedWaveProvider(sourceStream.WaveFormat);
                samples    = waveBuffer.ToSampleProvider();
            }
        }
예제 #33
0
        public unsafe void asioThread()
        {
            if (mSettingsMgr.Settings.MidiInDeviceNumbers == null)
            {
                mSettingsMgr.Settings.MidiInDeviceNumbers = new List <int>();
            }
            for (int i = 0; i < mSettingsMgr.Settings.MidiInDeviceNumbers.Count(); i++)
            {
                mMidiDevice.OpenInPort(mSettingsMgr.Settings.MidiInDeviceNumbers[i]);
            }

            try
            {
#if NAUDIO_ASIO
                mAsio = new NAudio.Wave.AsioOut(mSettingsMgr.Settings.AsioDeviceNr);
#else
                mAsio = AsioDriver.SelectDriver(AsioDriver.InstalledDrivers[mSettingsMgr.Settings.AsioDeviceNr]);
#endif
            }
            catch (Exception ex)
            {
                MessageBox.Show(ex.Message);
            }

            if (mAsio != null)
            {
#if NAUDIO_ASIO
                if (mAsio != null)
                {
                    mWaveProvider = new NAudio.Wave.BufferedWaveProvider(new NAudio.Wave.WaveFormat(44100, 16, 2));
                    mAsio.InitRecordAndPlayback(mWaveProvider, 2, 44100);
                    mAsio.AudioAvailable += mAsio_AudioAvailable;
                    mAsioBuffSize         = mSettingsMgr.Settings.AsioBufferSize;
                }
#else
                int p   = mAsio.BufferSizex.PreferredSize;
                int max = mAsio.BufferSizex.MaxSize;
                int min = mAsio.BufferSizex.MinSize;

                if (mSettingsMgr.Settings.AsioBufferSize < min)
                {
                    mSettingsMgr.Settings.AsioBufferSize = min;
                    mSettingsMgr.SaveSettings();
                }

                if (mSettingsMgr.Settings.AsioBufferSize > max)
                {
                    mSettingsMgr.Settings.AsioBufferSize = max;
                    mSettingsMgr.SaveSettings();
                }
                mAsioBuffSize = mSettingsMgr.Settings.AsioBufferSize;

                // get our driver wrapper to create its buffers
                mAsio.CreateBuffers(mAsioBuffSize);
                // this is our buffer fill event we need to respond to
                mAsio.BufferUpdate += new EventHandler(asio_BufferUpdateHandler);
                mAsioOutputLeft     = mAsio.OutputChannels[0];  // Use only 1x stereo out
                mAsioOutputRight    = mAsio.OutputChannels[1];

                mAsioInputBuffers = new AudioBufferInfo(2, mAsioBuffSize);
                mAsioInputLeft    = mAsio.InputChannels[0];     // Use only 1x stereo in
                mAsioInputRight   = mAsio.InputChannels[1];
#endif
                // todo: test
                //mMixLeft = new float[mAsioBuffSize];
                //mMixRight = new float[mAsioBuffSize];

                // and off we go

                stopWatchTicksForOneAsioBuffer = (long)(Stopwatch.Frequency / (mAsio.SampleRate / mAsioBuffSize));
#if NAUDIO_ASIO
                mAsioLeftInt32LSBBuff  = new byte[mAsioBuffSize * 4];
                mAsioRightInt32LSBBuff = new byte[mAsioBuffSize * 4];
                mAsio.Play();
#else
                mAsio.Start();
#endif
                mAsioStartEvent.Set();

                // Keep running untill stop request!
                mAsioStopEvent.Wait();
                StopAsio();
            }
            else
            {
                mIsAsioRunning = false;
                mAsioStartEvent.Set();
            }
        }
 /// <summary>
 /// Creates a new WaveInProvider
 /// n.b. Should make sure the WaveFormat is set correctly on IWaveIn before calling
 /// </summary>
 /// <param name="waveIn">The source of wave data</param>
 public WaveInProvider(IWaveIn waveIn)
 {
     this.waveIn           = waveIn;
     waveIn.DataAvailable += waveIn_DataAvailable;
     bufferedWaveProvider  = new BufferedWaveProvider(this.WaveFormat);
 }