Ejemplo n.º 1
0
        public Recorder(int deviceIndex, int sampleRate, int channelCount, int bufferLength)
        {
            this.bufferLength = bufferLength;

            recorderDevice = new WaveIn
            {
                DeviceNumber = deviceIndex,
                WaveFormat   = new WaveFormat(sampleRate, channelCount)
            };

            recorderDevice.DataAvailable += new EventHandler <WaveInEventArgs>(Recorder_DataAvailable);

            bufferProvider = new BufferedWaveProvider(recorderDevice.WaveFormat)
            {
                BufferLength            = bufferLength * 2,
                DiscardOnBufferOverflow = true
            };
        }
Ejemplo n.º 2
0
        public void StopMirroring()
        {
            if (!IsMirroring)
            {
                Debug.WriteLine($"\nNot mirroring\n");
                return;
            }

            player.Stop();

            player.Dispose();
            player = null;

            bufferedWaveProvider = null;

            Debug.WriteLine("\nStopped mirroring audio\n");
            IsMirroring = false;
        }
Ejemplo n.º 3
0
        public AudioPlayer()
        {
            WaveFormat format = new WaveFormat(Globals.SAMPLE_RATE, 2);

            provider = new BufferedWaveProvider(format)
            {
                BufferDuration = TimeSpan.FromSeconds(10)
            };

            volumeChannel = new SampleChannel(provider)
            {
                Volume = Settings.Get <float>(Setting.Volume)
            };

            bool success = Guid.TryParse(Settings.Get <string>(Setting.OutputDeviceGuid), out Guid deviceGuid);

            SetAudioOutputDevice(success ? deviceGuid : DirectSoundOut.DSDEVID_DefaultPlayback);
        }
Ejemplo n.º 4
0
        public void StartMirroring()
        {
            if (IsMirroring)
            {
                Debug.WriteLine($"\nAlready mirroring to {OutputDevice.FriendlyName}\n");
                return;
            }
            capture.DataAvailable += (s, a) => bufferedWaveProvider.AddSamples(a.Buffer, 0, a.BytesRecorded);

            bufferedWaveProvider = new BufferedWaveProvider(capture.WaveFormat);

            player = new WasapiOut(OutputDevice, AudioClientShareMode.Shared, false, 0);
            player.Init(bufferedWaveProvider);
            player.Play();

            Debug.WriteLine($"\nStarted mirroring to {OutputDevice.FriendlyName}\n");
            IsMirroring = true;
        }
Ejemplo n.º 5
0
        protected override void _Start()
        {
            m_OutputDevice = m_OutputDeviceFactory();
            m_OutputBuffer = new BufferedWaveProvider(m_SampleFormat.ToWaveFormat());
            m_OutputBuffer.BufferLength = m_BufferFrameCount * m_SampleFormat.FrameSize * 2;
            m_OutputBuffer.ReadFully    = true; // keeps the audio device playing silence while we're not sending any data

            m_VolumeSampleProvider = new VolumeSampleProvider(m_OutputBuffer.ToSampleProvider());
            _OnSettingsUpdated();

            m_OutputDevice.Init(m_VolumeSampleProvider);
            m_OutputDevice.Play();

            m_AudioThread          = new Thread(_PlayLoop);
            m_AudioThread.Name     = "Audio player thread";
            m_AudioThread.Priority = ThreadPriority.Highest;
            m_AudioThread.Start();
        }
Ejemplo n.º 6
0
        static void Connect(IPEndPoint endPoint, int inputDeviceNumber, INetworkChatCodec codec)
        {
            udpListener = new UdpClient();
            // if running both from the same computer: (otherwise comment these next two out)
            udpListener.Client.SetSocketOption(SocketOptionLevel.Socket, SocketOptionName.ReuseAddress, true);
            udpListener.Client.Bind(endPoint);

            waveOut      = new DirectSoundOut();
            waveProvider = new BufferedWaveProvider(codec.RecordFormat);
            waveOut.Init(waveProvider);
            waveOut.Play();

            var state = new ListenerThreadState {
                Codec = codec, EndPoint = endPoint
            };

            ThreadPool.QueueUserWorkItem(ListenerThread, state);
        }
Ejemplo n.º 7
0
        /// <summary>
        /// Wires up the active RTP session to the speaker.
        /// </summary>
        /// <param name="rtpSession">The active RTP session receiving the remote party's RTP packets.</param>
        /// <param name="audioOutProvider">The audio buffer for the default system audio output device.</param>
        private static void PlayRemoteMedia(RTPMediaSession rtpSession, BufferedWaveProvider audioOutProvider)
        {
            if (rtpSession == null)
            {
                return;
            }

            rtpSession.OnRtpPacketReceived += (rtpPacket) =>
            {
                var sample = rtpPacket.Payload;
                for (int index = 0; index < sample.Length; index++)
                {
                    short  pcm       = NAudio.Codecs.MuLawDecoder.MuLawToLinearSample(sample[index]);
                    byte[] pcmSample = new byte[] { (byte)(pcm & 0xFF), (byte)(pcm >> 8) };
                    audioOutProvider.AddSamples(pcmSample, 0, 2);
                }
            };
        }
        public EventDrivenResampler(WaveFormat input, WaveFormat output)
        {
            windowsN      = DetectWindowsN();
            this.input    = input;
            this.output   = output;
            buf           = new BufferedWaveProvider(input);
            buf.ReadFully = false;

            if (windowsN)
            {
                mediaFoundationResampler = new WdlResamplingSampleProvider(buf.ToSampleProvider(), output.SampleRate);
                waveOut = mediaFoundationResampler.ToMono().ToWaveProvider16();
            }
            else
            {
                dmoResampler = new ResamplerDmoStream(buf, output);
            }
        }
Ejemplo n.º 9
0
        private int DecompressFrame(Mp3Frame frame, byte[] buffer)
        {
            // decode frame
            if (_decompressor == null)
            {
                WaveFormat waveFormat = new Mp3WaveFormat(frame.SampleRate, frame.ChannelMode == ChannelMode.Mono ? 1 : 2, frame.FrameLength, frame.BitRate);
                _decompressor = new AcmMp3FrameDecompressor(waveFormat);

                _waveProvider = new BufferedWaveProvider(_decompressor.OutputFormat);
                _waveProvider.BufferDuration = TimeSpan.FromSeconds(5);

                _channels = _waveProvider.WaveFormat.Channels;

                _sampleProvider = _waveProvider.ToSampleProvider();
            }

            return(_decompressor.DecompressFrame(frame, buffer, 0));
        }
Ejemplo n.º 10
0
        void waveIn_RecordingStopped(object sender, StoppedEventArgs e)
        {
            // stop playback
            if (waveOut != null)
            {
                waveOut.Stop();
            }

            // dispose of wave input
            if (waveIn != null)
            {
                waveIn.Dispose();
                waveIn = null;
            }

            // drop wave provider
            waveProvider = null;
        }
 protected virtual void Dispose(bool disposing)
 {
     if (disposing)
     {
         lock (_syncLock)
         {
             if (_audioStream != null)
             {
                 _audioStream.Stop();
                 _provider.ClearBuffer();
                 _audioStream.Dispose();
                 _audioStream = null;
                 _provider    = null;
             }
             IsStarted = false;
         }
     }
 }
Ejemplo n.º 12
0
        public AudioChannel()
        {
            // Set up the device that will play the audio from the RTP received from the remote end of the call.
            m_waveOut      = new WaveOut();
            m_waveProvider = new BufferedWaveProvider(_waveFormat);
            m_waveProvider.BufferLength = 100000;
            m_waveOut.Init(m_waveProvider);
            m_waveOut.Play();

            // Set up the input device that will provide audio samples that can be encoded, packaged into RTP and sent to
            // the remote end of the call.
            m_waveInEvent = new WaveInEvent();
            m_waveInEvent.BufferMilliseconds = 20;
            m_waveInEvent.NumberOfBuffers    = 1;
            m_waveInEvent.DeviceNumber       = 0;
            m_waveInEvent.DataAvailable     += AudioSampleAvailable;
            m_waveInEvent.WaveFormat         = _waveFormat;
        }
Ejemplo n.º 13
0
        public GAudioClient(IPEndPoint endPoint)
        {
            _audioClient            = new AudioClient(endPoint);
            _audioClient.OnReceive += _audioClient_OnReceive;

            _speexCodec = new WideBandSpeexCodec();

            _waveProvider = new BufferedWaveProvider(_speexCodec.RecordFormat);

            _waveOut = new WaveOut();
            _waveOut.Init(_waveProvider);

            _waveIn = new WaveIn();
            _waveIn.BufferMilliseconds = 50;
            _waveIn.DeviceNumber       = 0;
            _waveIn.WaveFormat         = _speexCodec.RecordFormat;
            _waveIn.DataAvailable     += OnAudioCaptured;
        }
Ejemplo n.º 14
0
        /// <summary>
        /// Plays RAW or WAV bytes.
        /// </summary>
        /// <param name="bytes"></param>
        /// <param name="settings"></param>
        /// <param name="cancellationToken"></param>
        public static async Task PlayAsync(this byte[] bytes, AudioSettings settings, CancellationToken cancellationToken = default)
        {
            bytes    = bytes ?? throw new ArgumentNullException(nameof(bytes));
            settings = settings ?? throw new ArgumentNullException(nameof(settings));

            var provider = new BufferedWaveProvider(new WaveFormat(settings.Rate, settings.Bits, settings.Channels));

            using var output = new WaveOutEvent();
            output.Init(provider);
            output.Play();

            provider.AddSamples(bytes, 0, bytes.Length);

            var averageBytesPerSecond = settings.Rate * settings.Channels * (settings.Bits / 8);

            await Task.Delay(TimeSpan.FromSeconds((double)bytes.Length / averageBytesPerSecond), cancellationToken)
            .ConfigureAwait(false);
        }
Ejemplo n.º 15
0
        private void CleanUpAudio()
        {
            if (waveOut != null)
            {
                waveOut.Stop();
                waveOut.Dispose();
                waveOut = null;
            }

            if (decompressor != null)
            {
                decompressor.Dispose();
                decompressor = null;
            }

            bufferedWaveProvider = null;
            stream.Flush();
        }
Ejemplo n.º 16
0
        public void Initialise(WaveFormat format, AsioOut driver)
        {
            if (driver == null)
            {
                throw new ArgumentNullException("driver", "Must specify an asio interface");
            }

            if (format == null)
            {
                throw new ArgumentNullException("format", "Must specify an audio format");
            }

            Format = WaveFormat.CreateIeeeFloatWaveFormat(format.SampleRate, driver.DriverOutputChannelCount);

            OutputBuffer = new BufferedWaveProvider(Format);

            mapOutputs(driver);
        }
        public void StopEncoding()
        {
            if (_waveIn != null)
            {
                _waveIn.StopRecording();
                _waveIn.Dispose();
                _waveIn = null;
            }

            if (_waveOut != null)
            {
                _waveOut.Stop();
                _waveOut.Dispose();
                _waveOut = null;
            }

            if (_playBuffer != null)
            {
                _playBuffer.ClearBuffer();
                _playBuffer = null;
            }


            if (_encoder != null)
            {
                _encoder.Dispose();
                _encoder = null;
            }

            if (_decoder != null)
            {
                _decoder.Dispose();
                _decoder = null;
            }

            SpeakerMax = 0;
            MicMax     = 0;

            if (_playBuffer != null)
            {
                _playBuffer.ClearBuffer();
                _playBuffer = null;
            }
        }
Ejemplo n.º 18
0
        public bool SetRecordingSettings(int sampleRate, int channels)
        {
            if (this.audioRecorder != null)
            {
                if (this.IsRecording)
                {
                    this.audioRecorder.StopRecording();
                    this.IsRecording = false;
                }

                this.audioRecorder.Dispose();
                this.audioRecorder = null;
            }

            if (DeviceManager.Instance.RecordingDevice == null)
            {
                return(false);
            }
            if (DeviceManager.Instance.RecordingDevice.Number >= WaveIn.DeviceCount)
            {
                return(false);
            }

            var capabilities     = WaveIn.GetCapabilities(DeviceManager.Instance.RecordingDevice.Number);
            var waveSourceFormat = new WaveFormat(sampleRate, channels);

            this.audioRecordingProvider = new BufferedWaveProvider(waveSourceFormat);
            this.audioRecordingProvider.DiscardOnBufferOverflow = true;

            this.audioRecorder = new WaveInEvent();
            this.audioRecorder.BufferMilliseconds = 20;
            this.audioRecorder.WaveFormat         = waveSourceFormat;
            this.audioRecorder.DeviceNumber       = DeviceManager.Instance.RecordingDevice.Number;
            this.audioRecorder.DataAvailable     += this.OnAudioRecordingDataAvailable;

            this.audioRecordingMeter = new MeteringSampleProvider(this.audioRecordingProvider.ToSampleProvider());
            this.audioRecordingMeter.StreamVolume += this.OnAudioRecordingMeterStreamVolume;

            this.IsRecording = false;

            Logger.Log(LogLevel.Info, string.Format("Changed recording config to: sampling rate: {0}, channels: {1}", sampleRate, channels));

            return(true);
        }
Ejemplo n.º 19
0
        void RT()
        {
            if (flag)
            {
                Sound();
                recorder.DeviceNumber = comboBox1.SelectedIndex;

                recorder.WaveFormat = new WaveFormat(8000, 16, 1);

                recorder.DataAvailable += Voice_Input;

                player.Init(bufferedWaveProvider);
                recorder.StartRecording();
                flag = false;
                AudioMonitorInitialize(comboBox1.SelectedIndex);
            }
            else
            {
                if (player != null)
                {
                    player.Stop();
                    player.Dispose();
                    player = null;
                }
                if (recorder != null)
                {
                    recorder.StopRecording();

                    recorder.Dispose();

                    bufferedWaveProvider.ClearBuffer();
                    recorder = null;
                }
                bufferedWaveProvider = null;
                flag = true;
                Sound();
                if (wvin != null)
                {
                    wvin.StopRecording();
                    wvin = null;
                }
                RT();
            }
        }
Ejemplo n.º 20
0
        private void initAudioRecorder()
        {
            if (audio != null)
            {
                audio.DataAvailable    -= Audio_DataAvailable;
                audio.RecordingStopped -= Audio_RecordingStopped;
                audio.Dispose();
            }
            if (blankplayer != null)
            {
                blankplayer.Dispose();
            }
            audio        = new WasapiLoopbackCapture(device);
            sourceFormat = audio.WaveFormat;
            if (sourceProvider == null)
            {
                sourceProvider           = new BufferedWaveProvider(sourceFormat);
                sourceProvider.ReadFully = false;
                wfto16prov         = new WaveFloatTo16Provider(sourceProvider);
                monovolumeprovider = new StereoToMonoProvider16(wfto16prov);
                formatconv         = new WaveFormatConversionProvider(new WaveFormat(24000, 16, 1), monovolumeprovider);
            }


            text_encoding.Text = sourceFormat.Encoding.ToString();
            //var client = device.AudioClient.AudioRenderClient;
            blankplayer = new WasapiOut(device, AudioClientShareMode.Shared, false, 0);

            silence = new SilenceProvider(sourceFormat).ToSampleProvider();

            AudioDevice_Text.ForeColor = Color.Black;

            try
            {
                blankplayer.Init(silence);
            }
            catch
            {
                AudioDevice_Text.ForeColor = Color.Red;
            }
            audio.DataAvailable    += Audio_DataAvailable;
            audio.RecordingStopped += Audio_RecordingStopped;
            AudioMonitor            = device.AudioMeterInformation;
        }
        public Form1()
        {
            InitializeComponent();

            if (NAudio.Wave.WaveIn.DeviceCount < 1)
            {
                MessageBox.Show("No microphone! ... exiting");
                return;
            }

            //Mixer
            //Hook Up Audio Mic for sound peak detection
            audioRecorder.SampleAggregator.MaximumCalculated += OnRecorderMaximumCalculated;

            for (int n = 0; n < WaveIn.DeviceCount; n++)
            {
                recordingDevices.Add(WaveIn.GetCapabilities(n).ProductName);
            }

            //Set up Google specific code
            //oneShotConfig = new RecognitionConfig();
            //oneShotConfig.Encoding = RecognitionConfig.Types.AudioEncoding.Linear16;
            //oneShotConfig.SampleRateHertz = 16000;
            //oneShotConfig.LanguageCode = "en";



            //Set up NAudio waveIn object and events
            waveIn.DeviceNumber = 0;
            waveIn.WaveFormat   = new NAudio.Wave.WaveFormat(16000, 1);
            //Need to catch this event to fill our audio beffer up
            waveIn.DataAvailable += WaveIn_DataAvailable;
            //the actuall wave buffer we will be sending to googles for voice to text conversion
            waveBuffer = new BufferedWaveProvider(waveIn.WaveFormat);
            waveBuffer.DiscardOnBufferOverflow = true;

            //We are using a timer object to fire a one second record interval
            //this gets enabled and disabled based on when we get a peak detection from NAudio
            timer1.Enabled = false;
            //One second record window
            timer1.Interval = 1000;
            //Hook up to timer tick event
            timer1.Tick += Timer1_Tick;
        }
Ejemplo n.º 22
0
            public void Init(
                MMDevice device)
            {
                if (device == null)
                {
                    return;
                }

                this.Player = new WasapiOut(
                    device,
                    AudioClientShareMode.Shared,
                    false,
                    Config.Instance.WasapiLatency);

                var list = new List <BufferedWaveProvider>();

                for (int i = 0; i < MultiplePlaybackCount; i++)
                {
                    var buffer = new BufferedWaveProvider(this.OutputFormat)
                    {
                        BufferDuration          = BufferDurations,
                        DiscardOnBufferOverflow = true,
                    };

                    list.Add(buffer);
                }

                // シンクロ再生用のバッファを追加しておく
                list.Add(new BufferedWaveProvider(this.OutputFormat)
                {
                    BufferDuration          = BufferDurations,
                    DiscardOnBufferOverflow = true,
                });

                this.Buffers = list.ToArray();

                // ミキサを生成する
                var mixer = new MixingWaveProvider32(
                    this.Buffers.Select(x => new Wave16ToFloatProvider(x)));

                this.Player.Init(mixer);
                this.Player.Play();
                this.Player.SetBackground();
            }
Ejemplo n.º 23
0
        public AudioPassthrough()
        {
            waveOut = new WaveOutEvent();
            int deviceNum = -1;

            for (int n = 0; n < WaveIn.DeviceCount; n++)
            {
                var caps = WaveIn.GetCapabilities(n);
                if (caps.ProductName.StartsWith("CABLE"))
                {
                    deviceNum = n;
                    break;
                }
            }
            if (deviceNum == -1)
            {
                return;
            }

            var waveIn = new WaveInEvent()
            {
                DeviceNumber = deviceNum, WaveFormat = new WaveFormat(192000, 24, 2), BufferMilliseconds = 1
            };

            waveIn.DataAvailable += OnDataAvailable;

            for (int n = 0; n < WaveOut.DeviceCount; n++)
            {
                var caps = WaveOut.GetCapabilities(n);
                if (caps.ProductName.StartsWith("Speakers"))
                {
                    waveOut.DeviceNumber = n;
                    break;
                }
            }
            bufferedWaveProvider = new BufferedWaveProvider(waveIn.WaveFormat);


            waveOut.Init(bufferedWaveProvider);
            waveIn.StartRecording();

            waveOut.Play();
        }
Ejemplo n.º 24
0
        public void StartListeningToMicrophone(int audioDeviceNumber = 0)
        {
            mWaveIn = new WaveInEvent
            {
                DeviceNumber       = audioDeviceNumber,
                WaveFormat         = new WaveFormat(SampleRate, 1),
                BufferMilliseconds = (int)((double)BufferSize / (double)SampleRate * 1000.0)
            };

            mWaveIn.DataAvailable += new EventHandler <WaveInEventArgs>(On_AudioDataAvailable);
            mBufferedWaveProvider  = new BufferedWaveProvider(mWaveIn.WaveFormat)
            {
                BufferLength            = BufferSize * 2,
                DiscardOnBufferOverflow = true
            };

            mWaveIn.StartRecording();
            mTimer = new Timer(SendLatestAudioBytes, null, 0, 2);
        }
Ejemplo n.º 25
0
 public void Stop()
 {
     if (input != null)
     {
         input.StopRecording();
         input.Dispose();
         input = new WaveIn();
     }
     if (output != null && output.PlaybackState != PlaybackState.Stopped)
     {
         output.Stop();
         output.Dispose();
         volumeHandler.ToSampleProvider().Skip(TimeSpan.FromSeconds(5));
         WaveProvider.ClearBuffer();
         WaveProvider = null;
         output       = new WaveOut();
     }
     DeviceOpen = false;
 }
        protected override void OnFormatChanged()
        {
            if (buffer == null)
            {
                buffer = new BufferedWaveProvider(Format);
            }
            else if (buffer.WaveFormat != Format)
            {
                buffer.ClearBuffer();
                buffer = new BufferedWaveProvider(Format);
            }
            else
            {
                return;
            }

            System.Diagnostics.Debug.WriteLine("OnFormatChanged");
            player.Init(buffer);
        }
Ejemplo n.º 27
0
 public void naudioInit(int sampleRate, int channels)
 {
     Console.WriteLine("-----------初始化音频设备--------------");
     try
     {
         waveOut = new WaveOut();
         WaveFormat wf = new WaveFormat((int)(sampleRate), channels);
         bufferedWaveProvider = new BufferedWaveProvider(wf);
         bufferedWaveProvider.DiscardOnBufferOverflow = true;
         bufferedWaveProvider.BufferDuration          = new TimeSpan(0, 0, 0, 0, 500);
         waveOut.Init(bufferedWaveProvider);
         waveOut.Play();
         Console.WriteLine("-----------初始化成功--------------");
     }
     catch (Exception ex)
     {
         Console.WriteLine(ex);
     }
 }
Ejemplo n.º 28
0
        /// <summary>
        /// Constructor
        /// </summary>
        public AudioHandler()
        {
            // initialize audio recorder
            this.audio_recoder = new WaveIn();
            this.audio_recoder.DataAvailable += new EventHandler <WaveInEventArgs>(this.AudioDataAvailable);
            this.audio_recoder.WaveFormat     = new WaveFormat(
                8000,
                1
                );
            this.audio_recoder.BufferMilliseconds = 100;

            // initialize output audio data provider for audio data storage
            this.audio_out_provider = new BufferedWaveProvider(this.audio_recoder.WaveFormat);
            this.audio_out_provider.DiscardOnBufferOverflow = true;

            // inotialize audio data player
            this.audio_player = new WaveOut();
            this.audio_player.Init(this.audio_out_provider);
        }
Ejemplo n.º 29
0
        /// <summary>
        ///     Resets the wave format data
        ///     Note that this should only be used if the SamplingRate changed
        /// </summary>
        protected void ResetWaveFormat()
        {
            // Create a new player as needed
            if (Player == null)
            {
                Player = new WaveOut();
            }
            else
            {
                Player.Stop();
            }

            // Change the format and the stream, and start playing
            Format       = WaveFormat.CreateIeeeFloatWaveFormat(SamplingRate, 1);
            SourceStream = new BufferedWaveProvider(Format);
            SourceStream.DiscardOnBufferOverflow = true;
            Player.Init(SourceStream);
            Player.Play();
        }
Ejemplo n.º 30
0
        private void InitializeNAudio()
        {
            WaveOut      = new WaveOut();
            waveProvider = new BufferedWaveProvider(new WaveFormat());
            WaveOut.Init(waveProvider);

            Device = WaveIn.DeviceCount;
            if (Device == 0)
            {
                return;
            }

            WaveFormat  = new WaveFormat(44100, 2);
            WaveInEvent = new WaveInEvent {
                BufferMilliseconds = 50,
                DeviceNumber       = 0,
                WaveFormat         = WaveFormat
            };
        }