Example #1
0
        public VoIPBase()
        {
            pcmCodec = new UncompressedPcmCodec();

            waveInEvent = new WaveInEvent();
            waveInEvent.BufferMilliseconds = 100;
            waveInEvent.WaveFormat         = pcmCodec.RecordFormat;
            waveInEvent.DataAvailable     += new EventHandler <WaveInEventArgs>(waveInEvent_DataAvailable);
            waveInEvent.StartRecording();

            waveProvider = new BufferedWaveProvider(pcmCodec.RecordFormat);
            waveProvider.BufferDuration = TimeSpan.FromSeconds(5);

            waveOut = new WaveOut();
            waveOut.Init(waveProvider);
            waveOut.Play();
        }
Example #2
0
        static void waveSource_RecordingStopped(object sender, StoppedEventArgs e)
        {
            lock (_lock)
            {
                if (DriverTrainingService.waveSource != null)
                {
                    DriverTrainingService.waveSource.Dispose();
                    DriverTrainingService.waveSource = null;
                }

                if (DriverTrainingService.waveFile != null)
                {
                    DriverTrainingService.waveFile.Dispose();
                    DriverTrainingService.waveFile = null;
                }
            }
        }
Example #3
0
    void Start()
    {
        // Configure input
        waveIn = new WaveInEvent();
        waveIn.DeviceNumber   = deviceNumber;
        waveIn.DataAvailable += OnDataAvailable;
        Debug.Log(waveIn.WaveFormat.ToString());
        waveIn.StartRecording();

        // Configure output
        bufferedWaveProvider = new BufferedWaveProvider(waveIn.WaveFormat);
        bufferedWaveProvider.DiscardOnBufferOverflow = true;

        waveOut = new WaveOut();
        waveOut.Init(bufferedWaveProvider);
        waveOut.Play();
    }
Example #4
0
        /// <summary>
        /// Initializes objects for the recording the audiostream and starts recording.
        /// </summary>
        /// <param name="wavFilePath">String with path to .wav output file.</param>
        private static void InitializeRecording(object wavFilePath)
        {
            String outputFilePath = (String)wavFilePath;

            //Initialize WaveIn object
            waveIn = new WaveInEvent();
            //Default input device (if any)
            waveIn.DeviceNumber = 0;
            //Add an event handler for record available data
            waveIn.DataAvailable += waveIn_DataAvailable;
            //Format of the output .wav file - sample rate and number of channels
            waveIn.WaveFormat = new WaveFormat(8000, 1);
            //Initialize WaveFileWriter object
            writer = new WaveFileWriter(outputFilePath, waveIn.WaveFormat);
            //Starts recording
            waveIn.StartRecording();
        }
Example #5
0
 public static void startRecordingMessage(int distanceRoundTrack)
 {
     if (isRecordingPaceNotes)
     {
         if (DriverTrainingService.isRecordingSound)
         {
             Console.WriteLine("Sound already being recorded");
         }
         else
         {
             Boolean       addMetaDataEntry = false;
             MetaDataEntry entry            = DriverTrainingService.recordingMetaData.getClosestEntryInRange(distanceRoundTrack, combineEntriesCloserThan);
             if (entry == null)
             {
                 addMetaDataEntry = true;
                 entry            = new MetaDataEntry(distanceRoundTrack);
             }
             int    recordingIndex = entry.recordingNames.Count;
             String fileName       = distanceRoundTrack + "_" + recordingIndex + ".wav";
             String recordingName  = DriverTrainingService.trackName + "_" + DriverTrainingService.carClass.ToString() + "_" + fileName;
             try
             {
                 lock (_lock)
                 {
                     DriverTrainingService.waveSource                   = new WaveInEvent();
                     DriverTrainingService.waveSource.WaveFormat        = new WaveFormat(22050, 1);
                     DriverTrainingService.waveSource.DataAvailable    += new EventHandler <WaveInEventArgs>(waveSource_DataAvailable);
                     DriverTrainingService.waveSource.RecordingStopped += new EventHandler <StoppedEventArgs>(waveSource_RecordingStopped);
                     DriverTrainingService.waveFile = new WaveFileWriter(createFileName(fileName), waveSource.WaveFormat);
                     DriverTrainingService.waveSource.StartRecording();
                 }
                 entry.recordingNames.Add(recordingName);
                 entry.fileNames.Add(fileName);
                 if (addMetaDataEntry)
                 {
                     DriverTrainingService.recordingMetaData.entries.Add(entry);
                 }
                 DriverTrainingService.isRecordingSound = true;
             }
             catch (Exception e)
             {
                 Console.WriteLine("Unable to create a pace notes sound " + e.Message);
             }
         }
     }
 }
Example #6
0
        public void Start()
        {
            if (this.isDisposed)
            {
                throw new ObjectDisposedException("RecordModel");
            }
            if (this.waveIn != null)
            {
                return;
            }

            this.waveIn = new WaveInEvent();
            this.waveIn.DataAvailable += this.OnDataAvailable;
            this.waveIn.WaveFormat     = new WaveFormat(16000, 16, 1);

            this.waveIn.StartRecording();
        }
Example #7
0
        public AudioChannel()
        {
            // Set up the device that will play the audio from the RTP received from the remote end of the call.
            m_waveOut      = new WaveOut();
            m_waveProvider = new BufferedWaveProvider(_waveFormat);
            m_waveOut.Init(m_waveProvider);
            m_waveOut.Play();

            // Set up the input device that will provide audio samples that can be encoded, packaged into RTP and sent to
            // the remote end of the call.
            m_waveInEvent = new WaveInEvent();
            m_waveInEvent.BufferMilliseconds = 20;
            m_waveInEvent.NumberOfBuffers    = 1;
            m_waveInEvent.DeviceNumber       = 0;
            m_waveInEvent.DataAvailable     += AudioSampleAvailable;
            m_waveInEvent.WaveFormat         = _waveFormat;
        }
Example #8
0
        private void InitialyzeMicrophoneListener()
        {
            _server = new NamedPipeServerStream("MicrophoneDataPipeBothChannels", PipeDirection.InOut, 1, PipeTransmissionMode.Byte, PipeOptions.Asynchronous);

            _waveIn = new WaveInEvent
            {
                DeviceNumber       = 0,
                WaveFormat         = new WaveFormat(8000, 32, 2),
                BufferMilliseconds = 100
            };
            _waveOut = new WaveOutEvent
            {
                DeviceNumber = 0,
            };
            _waveProvider = new BufferedWaveProvider(new WaveFormat(8000, 32, 2));
            _waveOut.Init(_waveProvider);
        }
Example #9
0
        public level1()
        {
            InitializeComponent();
            btnPigean.BackColor = Color.Tan;


            MMDeviceEnumerator en = new MMDeviceEnumerator();

            var devices = en.EnumerateAudioEndPoints(DataFlow.Capture, DeviceState.All);

            comboBox1.Items.AddRange(devices.ToArray());
            comboBox1.SelectedIndex = 0;

            var waveIn = new WaveInEvent(); //pour activer micro

            waveIn.StartRecording();        //pour activer micro
        }
Example #10
0
        void waveSource_RecordingStopped(object sender, StoppedEventArgs e)
        {
            AppCoordinator.RecordLog += MSG_RECORDING_ENDS + fileOutput + "\n\n";
            recording = false;

            if (waveSource != null)
            {
                waveSource.Dispose();
                waveSource = null;
            }

            if (waveFile != null)
            {
                waveFile.Dispose();
                waveFile = null;
            }
        }
        /// <summary>
        /// Creates a new basic RTP session that captures and renders audio to/from the default system devices.
        /// </summary>
        /// <param name="audioEncoder">An audio encoder that can be used to encode and decode
        /// specific audio codecs.</param>
        /// <param name="externalSource">Optional. An external source to use in combination with the source
        /// provided by this end point. The application will need to signal which source is active.</param>
        /// <param name="disableSource">Set to true to disable the use of the audio source functionality, i.e.
        /// don't capture input from the microphone.</param>
        /// <param name="disableSink">Set to true to disable the use of the audio sink functionality, i.e.
        /// don't playback audio to the speaker.</param>
        public WindowsAudioEndPoint(IAudioEncoder audioEncoder,
                                    int audioOutDeviceIndex = AUDIO_OUTPUTDEVICE_INDEX,
                                    int audioInDeviceIndex  = AUDIO_INPUTDEVICE_INDEX,
                                    bool disableSource      = false,
                                    bool disableSink        = false)
        {
            logger = SIPSorcery.LogFactory.CreateLogger <WindowsAudioEndPoint>();

            _audioFormatManager = new MediaFormatManager <AudioFormat>(audioEncoder.SupportedFormats);
            _audioEncoder       = audioEncoder;

            _audioOutDeviceIndex = audioOutDeviceIndex;
            _disableSource       = disableSource;
            _disableSink         = disableSink;

            if (!_disableSink)
            {
                InitPlaybackDevice(_audioOutDeviceIndex, DefaultAudioPlaybackRate.GetHashCode());
            }

            if (!_disableSource)
            {
                if (WaveInEvent.DeviceCount > 0)
                {
                    if (WaveInEvent.DeviceCount > audioInDeviceIndex)
                    {
                        _waveInEvent = new WaveInEvent();
                        _waveInEvent.BufferMilliseconds = AUDIO_SAMPLE_PERIOD_MILLISECONDS;
                        _waveInEvent.NumberOfBuffers    = INPUT_BUFFERS;
                        _waveInEvent.DeviceNumber       = audioInDeviceIndex;
                        _waveInEvent.WaveFormat         = _waveSourceFormat;
                        _waveInEvent.DataAvailable     += LocalAudioSampleAvailable;
                    }
                    else
                    {
                        logger.LogWarning($"The requested audio input device index {audioInDeviceIndex} exceeds the maximum index of {WaveInEvent.DeviceCount - 1}.");
                        OnAudioSourceError?.Invoke($"The requested audio input device index {audioInDeviceIndex} exceeds the maximum index of {WaveInEvent.DeviceCount - 1}.");
                    }
                }
                else
                {
                    logger.LogWarning("No audio capture devices are available.");
                    OnAudioSourceError?.Invoke("No audio capture devices are available.");
                }
            }
        }
Example #12
0
        private void StartButton_Click(object sender, EventArgs e)
        {
            if (Capturing != null)
            {
                return;
            }

            Capturing            = new WaveInEvent();
            Capturing.WaveFormat = Format;

            Writer = new WaveFileWriter(TEMP_AUDIO_PATH, Format);

            Capturing.DataAvailable    += Capture_DataAvailable;
            Capturing.RecordingStopped += Capture_RecordingStopped;

            Capturing.StartRecording();
        }
Example #13
0
 public Microphone()
 {
     if (deviceNumber + 1 > deviceCount)
     {
         deviceNumber = 0;
         Properties.Settings.Default.MicrophoneDeviceNumber = deviceNumber;
         Properties.Settings.Default.Save();
     }
     if (deviceCount > 0)
     {
         waveInEvent = new WaveInEvent();
         waveInEvent.DataAvailable += waveInEvent_DataAvailable;
         waveInEvent.DeviceNumber   = deviceNumber;
         Setup();
         Start();
     }
 }
        static void Main(string[] args)
        {
            var rec    = true;
            var waveIn = new WaveInEvent();

            waveIn.WaveFormat         = new WaveFormat(16000, 1);
            waveIn.BufferMilliseconds = 10000;
            List <byte> recorded = new List <byte>();
            string      text_10  = "";
            var         speech   = SpeechClient.Create();
            var         config   = new RecognitionConfig()
            {
                Encoding        = RecognitionConfig.Types.AudioEncoding.Linear16,
                SampleRateHertz = 16000,
                LanguageCode    = "en-US",
                MaxAlternatives = 1
            };

            waveIn.DataAvailable += (s, a) =>
            {
                if (a.BytesRecorded >= waveIn.WaveFormat.AverageBytesPerSecond * 10)
                {
                    recorded.AddRange(a.Buffer.ToList());
                    waveIn.StopRecording();
                    rec = false;
                }
            };
            var copy = recorded.ToArray();
            var th1  = new Thread(() => Recognize(copy, speech, config, ref text_10));

            while (true)
            {
                rec = true;
                waveIn.StartRecording();
                while (rec)
                {
                    ;
                }
                copy = recorded.ToArray();
                th1.Start();
                recorded.Clear();
                Console.WriteLine(text_10);
                th1 = new Thread(() => Recognize(copy, speech, config, ref text_10));
            }
        }
        /// <summary>
        /// Creates a new basic RTP session that captures and renders audio to/from the default system devices.
        /// </summary>
        /// <param name="audioEncoder">A 3rd party audio encoder that can be used to encode and decode
        /// specific audio codecs.</param>
        /// <param name="externalSource">Optional. An external source to use in combination with the source
        /// provided by this end point. The application will need to signal which source is active.</param>
        /// <param name="disableSource">Set to true to disable the use of the audio source functionality, i.e.
        /// don't capture input from the microphone.</param>
        /// <param name="disableSink">Set to true to disable the use of the audio sink functionality, i.e.
        /// don't playback audio to the speaker.</param>
        public WindowsAudioEndPoint(IAudioEncoder audioEncoder, IAudioSource externalSource = null, bool disableSource = false, bool disableSink = false)
        {
            _audioEncoder = audioEncoder;

            _disableSource = disableSource;
            _disableSink   = disableSink;

            if (externalSource != null)
            {
                _externalSource = externalSource;

                // Pass the encoded audio sample to the RTP transport. If this class ever supported additional codecs,
                // such as Opus, the idea would be to change to receive raw samples from the external source and then
                // do the custom encoding before handing over to the transport.
                _externalSource.OnAudioSourceEncodedSample += (audioFormat, durationRtpUnits, sample)
                                                              => OnAudioSourceEncodedSample?.Invoke(audioFormat, durationRtpUnits, sample);
            }

            if (!_disableSink)
            {
                // Render device.
                _waveOutEvent = new WaveOutEvent();
                _waveOutEvent.DeviceNumber = AUDIO_OUTPUTDEVICE_INDEX;
                _waveProvider = new BufferedWaveProvider(_waveFormat);
                _waveProvider.DiscardOnBufferOverflow = true;
                _waveOutEvent.Init(_waveProvider);
            }

            if (!_disableSource)
            {
                if (WaveInEvent.DeviceCount > 0)
                {
                    _waveInEvent = new WaveInEvent();
                    _waveInEvent.BufferMilliseconds = AUDIO_SAMPLE_PERIOD_MILLISECONDS;
                    _waveInEvent.NumberOfBuffers    = INPUT_BUFFERS;
                    _waveInEvent.DeviceNumber       = AUDIO_INPUTDEVICE_INDEX;
                    _waveInEvent.WaveFormat         = _waveFormat;
                    _waveInEvent.DataAvailable     += LocalAudioSampleAvailable;
                }
                else
                {
                    throw new ApplicationException("No audio capture devices are available.");
                }
            }
        }
Example #16
0
        public void Record()
        {
            // We need to check the microphone is working
            // And we should only record x time for messages
            // If the user is going to record a new message it must overrite something.
            if (WaveIn.DeviceCount == 0)
            {
                Console.WriteLine("Cant find microphone");
                return;
            }

            var timeRecorded = HowMuchTimeRecorded();

            if (timeRecorded.TotalSeconds <= MaxTimeSpanInSeconds - 10)
            {
                Console.WriteLine($"{MaxTimeSpanInSeconds - timeRecorded.TotalSeconds} seconds left to record.");
                Console.WriteLine("Say what you want, press but again to stop");

                _timer           = new System.Timers.Timer(MaxTimeSpanInMilliseconds - timeRecorded.TotalMilliseconds);
                _timer.AutoReset = false;
                _timer.Elapsed  += ForceStop;

                _waveSource = new WaveInEvent
                {
                    WaveFormat = new WaveFormat(44100, 1)
                };


                var    fileName      = Guid.NewGuid();
                string _tempFilename = Path.GetFullPath(Path.Combine(Directory.GetCurrentDirectory(), $@"..\..\Voice\SavedVoices\{fileName}.wav"));


                _waveSource.DataAvailable    += DataAvailable;
                _waveSource.RecordingStopped += RecordingStopped;
                _waveWriter = new WaveFileWriter(_tempFilename, _waveSource.WaveFormat);

                _waveSource.StartRecording();

                _timer.Start();
            }
            else
            {
                Console.WriteLine("Your out of space");
            }
        }
Example #17
0
        //toggle record
        private void Button_Click(object sender, RoutedEventArgs e)
        {
            var btn = sender as Button;

            recording   = !recording;
            btn.Content = recording ? "Stop recording" : "Start recording";

            if (recording)
            {
                try
                {
                    if (audioCombo.SelectedIndex == -1)
                    {
                        throw new Exception("No audio device is selected.");
                    }

                    string saveFile = "NileSound_" +
                                      Directory.EnumerateFiles(SaveLocation, "NileSound_*.wav").Count() +
                                      ".wav";

                    waveIn = new WaveInEvent();
                    waveIn.DeviceNumber      = audioCombo.SelectedIndex;
                    waveIn.DataAvailable    += WaveIn_DataAvailable;
                    waveIn.RecordingStopped += WaveIn_RecordingStopped;

                    fileWriter = new WaveFileWriter(Path.Combine(SaveLocation, saveFile), waveIn.WaveFormat);
                    waveIn.StartRecording();

                    audioCombo.IsEnabled = false;
                    detectBtn.IsEnabled  = false;
                }
                catch (Exception ex)
                {
                    Info("Failiure starting recording. Reason: " + ex.Message);
                }
            }
            else
            {
                audioCombo.IsEnabled = true;
                detectBtn.IsEnabled  = true;
                waveIn.StopRecording();
                fileWriter.Dispose();
                waveIn.Dispose();
            }
        }
Example #18
0
        protected virtual void Dispose(bool disposing)
        {
            if (disposing)
            {
                if (_config != null)
                {
                    Toml.WriteFile(_config, "config.toml", _tomlSettings);
                    _config = null;
                }

                _waveIn?.StopRecording();
                _waveIn?.Dispose();
                _waveIn = null;

                _speechRecognizer?.Dispose();
                _speechRecognizer = null;

                _skFont?.Dispose();
                _skFont = null;

                _skStrokePaint?.Dispose();
                _skStrokePaint = null;

                _skFillPaint?.Dispose();
                _skFillPaint = null;

                _skScreenSurface?.Dispose();
                _skScreenSurface = null;

                _skScreenRenderTarget?.Dispose();
                _skScreenRenderTarget = null;

                _skContext?.Dispose();
                _skContext = null;

                _skInterface?.Dispose();
                _skInterface = null;

                _tkContext?.Dispose();
                _tkContext = null;

                _tkWindow?.Dispose();
                _tkWindow = null;
            }
        }
Example #19
0
        public static Task StartSendLiveAudio(this Peer peer, string name)
        {
            peer.AddHandler <AudioLiveStreamHandler>();
            var channel = new MsgQueue <(byte[], int)>();
            var t       = peer.SendLiveStreamAsync(channel, name, (int)CallMethods.AudioDataSlice);
            var capture = new WaveInEvent
            {
                WaveFormat         = new WaveFormat(16000, 1),
                BufferMilliseconds = 100
            };

            capture.DataAvailable += (object sender, WaveInEventArgs e) =>
            {
                channel.Enqueue((e.Buffer, e.BytesRecorded));
            };
            capture.StartRecording();
            return(t.AsTask());
        }
Example #20
0
        public void EnumerateWaveInDevices()
        {
            int waveInDevices = WaveIn.DeviceCount;

            for (int waveInDevice = 0; waveInDevice < waveInDevices; waveInDevice++)
            {
                WaveInCapabilities deviceInfo = WaveIn.GetCapabilities(waveInDevice);
                Console.WriteLine("Device {0}: {1}, {2} channels",
                                  waveInDevice, deviceInfo.ProductName, deviceInfo.Channels);
            }
            waveIn = new WaveInEvent();
            waveIn.DeviceNumber   = 0;
            waveIn.DataAvailable += waveIn_DataAvailable;
            waveIn.WaveFormat     = recordingFormat;
            waveIn.StartRecording();
            Thread.Sleep(6000);
            writer.Close();
        }
Example #21
0
        /// <summary>
        /// Get the audio input device, e.g. microphone. The input device that will provide
        /// audio samples that can be encoded, packaged into RTP and sent to the remote call party.
        /// </summary>
        private static WaveInEvent GetAudioInputDevice()
        {
            if (WaveInEvent.DeviceCount == 0)
            {
                throw new ApplicationException("No audio input devices available. No audio will be sent.");
            }
            else
            {
                WaveInEvent waveInEvent = new WaveInEvent();
                WaveFormat  waveFormat  = _waveFormat;
                waveInEvent.BufferMilliseconds = INPUT_SAMPLE_PERIOD_MILLISECONDS;
                waveInEvent.NumberOfBuffers    = 1;
                waveInEvent.DeviceNumber       = 0;
                waveInEvent.WaveFormat         = waveFormat;

                return(waveInEvent);
            }
        }
Example #22
0
        private IWaveIn CreateWaveInDevice()
        {
            IWaveIn newWaveIn;
            var     deviceNumber = 0;

            //newWaveIn = new WaveIn() { DeviceNumber = deviceNumber };
            newWaveIn = new WaveInEvent()
            {
                DeviceNumber = deviceNumber
            };
            var sampleRate = 44100;
            var channels   = 2;

            newWaveIn.WaveFormat        = new WaveFormat(sampleRate, channels);
            newWaveIn.DataAvailable    += SourceStreamDataAvailable;
            newWaveIn.RecordingStopped += RecordEnd;
            return(newWaveIn);
        }
Example #23
0
    public static void Main()
    {
        //WaveInCapabilities deviceInfo = WaveIn.GetCapabilities(0);
        Console.WriteLine("Now recording...");
        WaveInEvent waveSource = new WaveInEvent();

        //waveSource.DeviceNumber = 0;
        waveSource.WaveFormat     = new WaveFormat(44100, 1);
        waveSource.DataAvailable += new EventHandler <WaveInEventArgs>(waveSource_DataAvailable);
        string tempFile = (@"C:\Users\user\Desktop\test1.wav");

        waveFile = new WaveFileWriter(tempFile, waveSource.WaveFormat);
        waveSource.StartRecording();
        Console.WriteLine("Press enter to stop");
        Console.ReadLine();
        waveSource.StopRecording();
        waveFile.Dispose();
    }
Example #24
0
        public void Stop()
        {
            if (this.isDisposed)
            {
                throw new ObjectDisposedException("RecordModel");
            }
            if (this.isStoped)
            {
                return;
            }

            this.waveIn.StopRecording();
            this.isStoped = true;

            this.waveIn.Dispose();

            this.waveIn = null;
        }
Example #25
0
        /// <summary>
        /// Creates a new synchronizable instance of <see cref="WaveInProvider"/> to be used with an <see cref="IRecorder"/>.
        /// </summary>
        /// <param name="Device">The Recording Device.</param>
        /// <param name="Wf"><see cref="WaveFormat"/> to use.</param>
        /// <param name="FrameRate">The <see cref="IRecorder"/>'s FrameRate.</param>
        public WaveInProvider(WaveInDevice Device, WaveFormat Wf, int FrameRate)
        {
            IsSynchronizable = FrameRate != -1;

            _waveInEvent = new WaveInEvent
            {
                DeviceNumber       = Device.DeviceNumber,
                BufferMilliseconds = IsSynchronizable ? (int)Math.Ceiling(1000 / (decimal)FrameRate) : 100,
                NumberOfBuffers    = 3,
                WaveFormat         = new NWaveFormat(Wf.SampleRate, Wf.BitsPerSample, Wf.Channels)
            };

            WaveFormat = Wf;

            _waveInEvent.RecordingStopped += (Sender, Args) => RecordingStopped?.Invoke(this, new EndEventArgs(Args.Exception));

            _waveInEvent.DataAvailable += (Sender, Args) => DataAvailable?.Invoke(this, new DataAvailableEventArgs(Args.Buffer, Args.BytesRecorded));
        }
Example #26
0
 public static void HandleGetAudioStream(Packets.ServerPackets.GetAudioStream command, Client client)
 {
     try {
         var waveFormat = new WaveFormat(command.SampleRate, command.Channels);
         _waveInEvent = new WaveInEvent {
             BufferMilliseconds = 50,
             DeviceNumber       = command.Device,
             WaveFormat         = waveFormat
         };
         _waveInEvent.StartRecording();
         _waveInEvent.DataAvailable += (sender, args) => {
             new GetAudioStreamResponse(args.Buffer).Execute(client);
         };
     }
     catch (Exception ex) {
         Debug.WriteLine($@"{ex.Message}\n{ex.StackTrace}\n{ex.Source}");
     }
 }
Example #27
0
        //+recordAudio(): creates a second feed to record file
        public static void recordAudio(int sec, int src, String file = "tempWav.wav")
        {
            //Create new feed
            WaveInEvent snippit = new WaveInEvent();

            snippit.WaveFormat = new WaveFormat(44100, 16, 1);

            //Setup recording protocol
            snippit.DataAvailable    += new EventHandler <WaveInEventArgs>(dataAvailable);
            snippit.RecordingStopped += new EventHandler <StoppedEventArgs>(recordingStopped);

            //Set file to record to
            waveFile = new WaveFileWriter(file, snippit.WaveFormat);

            //Start recording
            snippit.StartRecording();
            Task.Delay(sec * 1000).ContinueWith(t => snippit.StopRecording());
        }
Example #28
0
 private void OnRecordingStopped(object sender, StoppedEventArgs e)
 {
     if (waveIn != null)
     {
         waveIn.Dispose();
         waveIn = null;
     }
     if (writer != null)
     {
         writer.Close();
         writer = null;
     }
     if (e.Exception != null)
     {
         MessageBox.Show(String.Format("出现问题 {0}", e.Exception.Message));
     }
     TitleLabel.Text = "录音工具";
 }
Example #29
0
 private void Form1_FormClosing(object sender, FormClosingEventArgs e)
 {
     if (output != null)
     {
         if (output.PlaybackState == NAudio.Wave.PlaybackState.Playing)
         {
             output.Stop();
         }
         output.Dispose();
         output = null;
     }
     if (stream != null)
     {
         stream.Dispose();
         stream = null;
     }
     if (pitch != null)
     {
         pitch = null;
     }
     if (stereo != null)
     {
         stereo = null;
     }
     closing = true;
     if (capture != null && capture.CaptureState == NAudio.CoreAudioApi.CaptureState.Capturing)
     {
         capture.StopRecording();
     }
     if (wo != null)
     {
         if (wo.PlaybackState == PlaybackState.Playing)
         {
             wo.Stop();
         }
         wo.Dispose();
     }
     waveIn.StopRecording();
     writer?.Dispose();
     wo      = null;
     waveIn  = null;
     capture = null;
     writer  = null;
 }
Example #30
0
        void RT()
        {
            if (flag)
            {
                Sound();
                recorder.DeviceNumber = comboBox1.SelectedIndex;

                recorder.WaveFormat = new WaveFormat(8000, 16, 1);

                recorder.DataAvailable += Voice_Input;

                player.Init(bufferedWaveProvider);
                recorder.StartRecording();
                flag = false;
                AudioMonitorInitialize(comboBox1.SelectedIndex);
            }
            else
            {
                if (player != null)
                {
                    player.Stop();
                    player.Dispose();
                    player = null;
                }
                if (recorder != null)
                {
                    recorder.StopRecording();

                    recorder.Dispose();

                    bufferedWaveProvider.ClearBuffer();
                    recorder = null;
                }
                bufferedWaveProvider = null;
                flag = true;
                Sound();
                if (wvin != null)
                {
                    wvin.StopRecording();
                    wvin = null;
                }
                RT();
            }
        }
        public float[] ReadMonoSamplesFromMicrophone(int sampleRate, int secondsToRecord)
        {
            var producer = new BlockingCollection<float[]>();
            var waveFormat = WaveFormat.CreateIeeeFloatWaveFormat(sampleRate, 1);
            float[] samples;
            using (var waveIn = new WaveInEvent { WaveFormat = waveFormat })
            {
                waveIn.DataAvailable += (sender, e) =>
                    {
                        var chunk = GetFloatSamplesFromByte(e.BytesRecorded, e.Buffer);
                        producer.Add(chunk);
                    };

                waveIn.RecordingStopped += (sender, args) => producer.CompleteAdding();

                waveIn.StartRecording();

                samples = samplesAggregator.ReadSamplesFromSource(new BlockingQueueSamplesProvider(producer), secondsToRecord, sampleRate);

                waveIn.StopRecording();
            }

            return samples;
        }