Ejemplo n.º 1
0
 public bool StartReading()
 {
     lock (wi)
     {
         try
         {
             wi.StartRecording();
             return(true);
         }
         catch (InvalidOperationException ex)
         {
             if (ex.Message == "Already recording")
             {
                 try
                 {
                     Thread.Sleep(100);
                     wi.StartRecording();
                     return(true);
                 }
                 catch
                 {
                     return(false);
                 }
             }
             return(false);
         }
         catch
         {
             return(false);
         }
     }
 }
Ejemplo n.º 2
0
        public CustomTrayIcon()
        {
            settings = CustomSettings.Instance;

            waveIn.DataAvailable += (sender, args) =>
            {
                for (int index = 0; index < args.BytesRecorded; index += 2)
                {
                    short sample   = (short)((args.Buffer[index + 1] << 8) | args.Buffer[index + 0]);
                    float sample32 = sample / 32768f;
                    if (sample32 < 0)
                    {
                        sample32 = -sample32;
                    }
                    if (sample32 > value)
                    {
                        value = sample32;
                    }
                }
            };
            UpdateDeviceList();
            waveIn.StartRecording();
            recStarted = true;

            // periodical check for changed default device
            var checkDeviceTimer = new System.Windows.Forms.Timer();

            checkDeviceTimer.Interval = 60 * 1000;
            checkDeviceTimer.Tick    += (sender, args) => { UpdateDeviceList(); };
            checkDeviceTimer.Start();

            SetUpdateInterval(settings.updateInterval);
            EnableIcon();
        }
        /// <summary>
        /// Starts the media capturing/source devices.
        /// </summary>
        public Task StartAudio()
        {
            if (!_isStarted)
            {
                _isStarted = true;
                _waveOutEvent?.Play();
                _waveInEvent?.StartRecording();
            }

            return(Task.CompletedTask);
        }
Ejemplo n.º 4
0
        //Public: Публичные методы

        /// <summary>
        /// Тест чувствительности микрофона.
        /// </summary>
        public void SensitivityTest()
        {
            _waveIn.DataAvailable += Sensitivity_Tick;
            MicrophoneSens         = 0;

            _waveIn.StartRecording();

            Thread.Sleep(Core.TIME_TO_NOISETEST);

            _waveIn.StopRecording();

            _waveIn.DataAvailable -= Sensitivity_Tick;
        }
Ejemplo n.º 5
0
        public float[] ReadMonoSamplesFromMicrophone(int sampleRate, int secondsToRecord)
        {
            var producer   = new BlockingCollection <float[]>();
            var waveFormat = WaveFormat.CreateIeeeFloatWaveFormat(sampleRate, 1);

            float[] samples;
            using (var waveIn = new WaveInEvent {
                WaveFormat = waveFormat
            })
            {
                waveIn.DataAvailable += (sender, e) =>
                {
                    var chunk = GetFloatSamplesFromByte(e.BytesRecorded, e.Buffer);
                    producer.Add(chunk);
                };

                waveIn.RecordingStopped += (sender, args) => producer.CompleteAdding();

                waveIn.StartRecording();

                samples = samplesAggregator.ReadSamplesFromSource(new BlockingQueueSamplesProvider(producer), secondsToRecord, sampleRate);

                waveIn.StopRecording();
            }

            return(samples);
        }
Ejemplo n.º 6
0
        private void StartRecord()
        {
            waveIn = new WaveInEvent();
            int device   = inputDeviceBox.SelectedIndex;
            int channels = WaveInEvent.GetCapabilities(device).Channels;

            waveIn.DeviceNumber = device;
            waveIn.WaveFormat   = new WaveFormat(48000, channels);
            //waveIn.BufferMilliseconds = 100;
            //waveIn.NumberOfBuffers = 2;

            waveProvider = new BufferedWaveProvider(waveIn.WaveFormat);
            waveProvider.DiscardOnBufferOverflow = true;
            //waveProvider.BufferLength = 44100;
            //waveProvider.BufferDuration = TimeSpan.FromMilliseconds(10000);

            waveOut = new WaveOut();
            waveOut.Init(waveProvider);
            waveOut.DeviceNumber = outputDeviceBox.SelectedIndex;

            waveIn.DataAvailable += (_, e) => {
                waveProvider.AddSamples(e.Buffer, 0, e.BytesRecorded);
            };
            waveIn.RecordingStopped += (_, __) => {
                waveProvider.ClearBuffer();
            };

            waveProvider.ClearBuffer();
            waveIn.StartRecording();
            if (playVoiceCheck.Checked)
            {
                waveOut.Play();
            }
        }
Ejemplo n.º 7
0
        private void BtnStart_Click(object sender, EventArgs e)
        {
            Clear();
            WaveIn = new WaveInEvent();
            Writer = new WaveFileWriter(Path.Combine(WaveRecorder.SavePath, "Test.wav"), WaveIn.WaveFormat);
            WaveIn.StartRecording();

            WaveIn.DataAvailable += (s, a) =>
            {
                Writer.Write(a.Buffer, 0, a.BytesRecorded);
                if (Writer.Position > WaveIn.WaveFormat.AverageBytesPerSecond * 10)
                {
                    WaveIn.StopRecording();
                }
            };
            WaveIn.RecordingStopped += (s, a) =>
            {
                Writer.Close();
                Writer?.Dispose();
                Writer           = null;
                btnStop.Enabled  = false;
                btnStart.Enabled = true;
                btnPlay.Enabled  = true;
                timer1.Stop();
            };

            btnStop.Enabled  = true;
            btnStart.Enabled = false;
            btnPlay.Enabled  = false;

            timer1.Start();
        }
Ejemplo n.º 8
0
        //+start(): initialize webcam and start feed
        public void start(float playbackVolume = 0)
        {
            //Get desired audio device
            audioSrc = numDevices - device.SelectedIndex - 1;

            //Initialize device
            source = new WaveInEvent {
                WaveFormat = new WaveFormat(44100, WaveIn.GetCapabilities(audioSrc).Channels)
            };
            source.DataAvailable += sourceDataAvailable;
            provider              = new BufferedWaveProvider(new WaveFormat());
            player       = new WaveOut();
            sampleObject = new object();

            //Initialize waveForm painter
            notify         = new NotifyingSampleProvider(provider.ToSampleProvider());
            notify.Sample += DrawAudioWave;

            //Start feed
            source.StartRecording();
            source.BufferMilliseconds = 10;
            player.Init(notify);
            player.Play();
            player.Volume = playbackVolume;
        }
Ejemplo n.º 9
0
        private void checkBoxChanged(object sender, EventArgs e)
        {
            CheckBox ch = (CheckBox)sender;

            if (ch == MicEnabled_checkbox)
            {
                DebugLine.setVisible("Microphone: ", ch.Checked);
                Settings.Default.MicShow = ch.Checked;
            }
            else if (ch == volume_checkbox)
            {
                DebugLine.setVisible("Volume: ", ch.Checked);
                Settings.Default.VolumeShow = ch.Checked;
                if (ch.Checked)
                {
                    WaveEvent.StartRecording();
                    TimerIndex.Start();
                }
                else
                {
                    TimerIndex.Stop();
                    WaveEvent.StopRecording();
                }
            }
            else if (ch == CPULoad_check)
            {
                DebugLine.setVisible("CPU Load: ", ch.Checked);
                Settings.Default.CPUVisible = ch.Checked;
            }
            else if (ch == MemLoad_check)
            {
                DebugLine.setVisible("Mem: ", ch.Checked);
                Settings.Default.MemVisible = ch.Checked;
            }
            else if (ch == cbBlockKey)
            {
                Settings.Default.BlockKey = ch.Checked;
            }
            else if (ch == cbAlwaysAsAdmin)
            {
                Settings.Default.AlwaysRunAsAdmin = ch.Checked;
            }
            else if (ch == cbEnableOverlay)
            {
                Settings.Default.OverlayEnabled = btnRunHookProcess.Enabled = ch.Checked;
            }
            else if (ch == cbDragBoxMode)
            {
                DebugLine.DragMode = ch.Checked;
                if (!ch.Checked)
                {
                    lPosition.Text = "Position: " + Settings.Default.WindowPosition.X.ToString() + ", " + Settings.Default.WindowPosition.Y.ToString();
                }
            }
            else if (ch == cbAutorun)
            {
                Autorun = cbAutorun.Checked;
            }
            Settings.Default.Save();
        }
Ejemplo n.º 10
0
        private void InitializeAudio()
        {
            input.Dispose();
            output.Dispose();
            output              = new WaveOutEvent();
            input               = new WaveInEvent();
            input.DeviceNumber  = inputSelector.SelectedIndex;
            output.DeviceNumber = outputSelector.SelectedIndex;


            input.WaveFormat = waveformat;

            input.DataAvailable += new EventHandler <WaveInEventArgs>(DataAvailable);
            filter = new BiQuadFilter[waveformat.Channels];
            for (int i = 0; i < filter.Length; i++)
            {
                filter[i] = BiQuadFilter.LowPassFilter(waveformat.SampleRate, waveformat.SampleRate / 2, (float)Q);
            }

            output.Init(buffer);
            output.Pause();
            try
            {
                input.StartRecording();
                recording = true;
            }
            catch
            {
            }
        }
Ejemplo n.º 11
0
        byte[] buffer = new byte[8192];                   //Buffer to store the sound in

        private void btnAudioInputStart_Click(object sender, EventArgs e)
        {
            WaveInEvent waveIn = new WaveInEvent();

            waveIn.DeviceNumber   = 0;                        //The divice I want to use as the source for the sound (Device 0 is the default device)
            waveIn.WaveFormat     = new WaveFormat(44100, 1); //Format of the source
            waveIn.DataAvailable += WaveIn_DataAvailable;     //IDK what it does, it seems to add all the audio bits together to make it work

            //This thingy is the thingy that stores the data
            bufferedWaveProvider = new BufferedWaveProvider(waveIn.WaveFormat);

            // begin record
            waveIn.StartRecording();


            //Start and stop the timer
            if (tmr1.Enabled)
            {
                tmr1.Enabled = false;
            }
            else
            {
                tmr1.Enabled = true;
            }
        }
Ejemplo n.º 12
0
        public void StartDetect(int inputDevice)
        {
            this.Dispatcher.BeginInvoke((ThreadStart) delegate()
            {
                waveIn = new WaveInEvent();

                waveIn.DeviceNumber   = inputDevice;
                waveIn.WaveFormat     = new WaveFormat(44100, 1);
                waveIn.DataAvailable += sound.WaveIn_DataAvailable;

                sound.bufferedWaveProvider = new BufferedWaveProvider(waveIn.WaveFormat);

                // begin record
                waveIn.StartRecording();

                IWaveProvider stream = new Wave16ToFloatProvider(sound.bufferedWaveProvider);
                Pitch pitch          = new Pitch(stream);

                byte[] buffer = new byte[8192];
                int bytesRead;


                bytesRead = stream.Read(buffer, 0, buffer.Length);
                freq      = pitch.Get(buffer);

                if (freq != 0)
                {
                    ReturnFreq();
                }
            });
        }
Ejemplo n.º 13
0
        private static void Main(string[] args)
        {
            _bufferedWaveProvider = new BufferedWaveProvider(_waveFormat);
            _waveIn = new WaveInEvent()
            {
                DeviceNumber       = 0,
                WaveFormat         = _waveFormat,
                BufferMilliseconds = 100,
            };
            _waveIn.DataAvailable    += WaveIn_DataAvailable;
            _waveIn.RecordingStopped += WaveIn_RecordingStopped;

            _waveOut = new WaveOutEvent()
            {
                DeviceNumber   = 0,
                DesiredLatency = 100,
            };
            _waveOut.Init(_bufferedWaveProvider);

            Task task = new Task(() => { _waveIn.StartRecording(); while (true)
                                         {
                                         }
                                 });

            task.Start();

            Task task2 = new Task(() => { _waveOut.Play(); while (true)
                                          {
                                          }
                                  });

            task2.Start();

            Console.ReadKey();
        }
 public void Run()
 {
     micIn.DeviceNumber   = 0;
     micIn.WaveFormat     = new WaveFormat();
     micIn.DataAvailable += new EventHandler <WaveInEventArgs>(OnDataRecieved);
     micIn.StartRecording();
 }
Ejemplo n.º 15
0
        public void BeginMonitoring(int recordingDevice)
        {
            if (recordingState != RecordingState.Stopped)
            {
                throw new InvalidOperationException("Can't begin monitoring while we are in this state: " + recordingState.ToString());
            }

            waveIn = new WaveInEvent();
            waveIn.DeviceNumber      = recordingDevice;
            waveIn.DataAvailable    += OnDataAvailable;
            waveIn.RecordingStopped += OnRecordingStopped;
            waveIn.WaveFormat        = recordingFormat;
            //wi.WaveFormat = new NAudio.Wave.WaveFormat(RATE, 1);
            //waveIn.BufferMilliseconds = BUFFERSIZE; //(int)((double)BUFFERSIZE / (double)RATE * 1000.0);
            waveIn.BufferMilliseconds = (int)((double)BUFFERSIZE / (double)RATE * 1000.0);
            //waveIn.BufferMilliseconds = 5120;

            bwp = new BufferedWaveProvider(waveIn.WaveFormat);
            bwp.BufferLength            = BUFFERSIZE * 2;
            bwp.DiscardOnBufferOverflow = true;

            waveIn.StartRecording();
            _audioEvaluator.Start(new WaveInEvent()
            {
                DeviceNumber = recordingDevice
            });
            //_audioEvaluator.Start(waveIn);

            recordingState = RecordingState.Monitoring;
        }
Ejemplo n.º 16
0
        public async Task SpeechToText()
        {
            //var authData = GenerateAuthData(path, AccessKey, SecretKey, ContractId);
            //var authString = GenerateAuthString(authData);
            //var uri = $"{endpoint}?auth={authString}";

            var jwtToken = await HttpUtils.GenerateJwtDataAsync(AccessKey, SecretKey, 3 * 60 /* 3 minutes */, jwtEndpoint);

            var uri = $"{sttEndpoint}?token=Bearer {jwtToken}";

            var waveIn = new WaveInEvent();

            // デフォルト録音デバイスを利用します。
            waveIn.DeviceNumber = 0;
            // サンプルレート、ビットレート、チャンネル数を16000Hz、16bits、1に指定します。
            waveIn.WaveFormat     = new WaveFormat(16000, 16, 1);
            waveIn.DataAvailable += (object sender, WaveInEventArgs e) =>
            {
                var inputMemStream  = new MemoryStream(e.Buffer);
                var rawWaveStream   = new RawSourceWaveStream(inputMemStream, waveIn.WaveFormat);
                var outputMemStream = new MemoryStream();
                WaveFileWriter.WriteWavFileToStream(outputMemStream, rawWaveStream);
                audioBuffers.Enqueue(outputMemStream.ToArray());
            };
            waveIn.RecordingStopped += (object sender, StoppedEventArgs e) =>
            {
                clientAsyncTokenSource.Cancel();
            };

            var client = new ClientWebSocket();

            await client.ConnectAsync(new Uri(uri), CancellationToken.None);

            // 日本語の音声を認識します。
            _ = await SetLanguageAsync(client, "ja");

            _ = await SetSamplingRateAsync(client, 16000);

            try
            {
                waveIn.StartRecording();
                Console.WriteLine("(音声認識:認識中です。)");

                var sendLoop = this.InitSendLoop(client);
                var readLoop = this.InitReadLoop(client);
                Console.Read();

                waveIn.StopRecording();
                Console.WriteLine("(音声認識:完了しました。)");

                await sendLoop;
                await readLoop;

                await client.CloseAsync(WebSocketCloseStatus.NormalClosure, "OK", CancellationToken.None);
            }
            catch (OperationCanceledException)
            {
                Console.WriteLine("(音声認識:サーバとの通信を止めました。)");
            }
        }
Ejemplo n.º 17
0
        /// <summary>
        ///
        /// </summary>
        /// <param name="settings"></param>
        /// <param name="deviceNumber"></param>
        /// <param name="numberOfBuffers"></param>
        public NAudioRecording(AudioSettings settings, int deviceNumber, int numberOfBuffers) :
            base(settings)
        {
            WaveIn = new WaveInEvent
            {
                WaveFormat         = new WaveFormat(settings.Rate, settings.Bits, settings.Channels),
                BufferMilliseconds = (int)settings.Delay.TotalMilliseconds,
                DeviceNumber       = deviceNumber,
                NumberOfBuffers    = numberOfBuffers,
            };
            WaveIn.DataAvailable += (_, args) =>
            {
                if (WaveFileWriter != null)
                {
                    WaveFileWriter.Write(args.Buffer, 0, args.BytesRecorded);
                    WaveFileWriter.Flush();
                }

                Data = Data.Concat(args.Buffer).ToArray();

                OnDataReceived(args.Buffer);
            };

            if (Settings.Format is AudioFormat.Wav or AudioFormat.Mp3)
            {
                Header         = WaveIn.WaveFormat.ToWavHeader();
                Stream         = new MemoryStream();
                WaveFileWriter = new WaveFileWriter(Stream, WaveIn.WaveFormat);
            }

            WaveIn.StartRecording();
        }
Ejemplo n.º 18
0
 public void Start()
 {
     _waveInEvent?.StartRecording();
     //_audStm?.Start();
     _simulationTrigger?.Change(0, _simulationPeriodMilli);
     _simulationStartTime = DateTime.Now;
 }
Ejemplo n.º 19
0
 private void BeginRecognition_Click(object sender, EventArgs e)
 {
     if (TitleLabel.Text == "录音工具")
     {
         if (!Directory.Exists(cPath))
         {
             Directory.CreateDirectory(cPath);
         }
         System.IO.DirectoryInfo dir = new System.IO.DirectoryInfo(cPath);
         wavename                 = cPath + "录音文件_" + dir.GetFiles().Length + ".wav";
         waveIn                   = new WaveInEvent();
         waveIn.WaveFormat        = new WaveFormat(44100, 1);
         waveIn.DataAvailable    += new EventHandler <WaveInEventArgs>(waveIn_DataAvailable);
         waveIn.RecordingStopped += new EventHandler <StoppedEventArgs>(OnRecordingStopped);
         writer                   = new WaveFileWriter(wavename, waveIn.WaveFormat);
         waveIn.StartRecording();
         TitleLabel.Text = "录音中";
     }
     else
     {
         if (waveIn != null)
         {
             waveIn.StopRecording();
             RecordAudiosBox.Items.Add(wavename);
             TitleLabel.Text = "录音工具";
         }
     }
 }
Ejemplo n.º 20
0
        static async Task <object> StreamingMicRecognizeAsync(string locale, int seconds, int waveInDeviceNumber = 0)
        {
            waveIn.DeviceNumber = waveInDeviceNumber;
            waveIn.WaveFormat   = waveFormat;

            waveIn.StartRecording();

            Console.WriteLine(String.Format("Recording has been started on {0}",
                                            WaveIn.GetCapabilities(waveInDeviceNumber).ProductName), Color.Lime);

            var loadDataTasks = new Task[]
            {
                Task.Run(async() => await Loop(locale, seconds)),
                Task.Run(async() => await ndiRenderer.Run())
            };

            try
            {
                await Task.WhenAll(loadDataTasks);
            }
            catch (Exception ex)
            {
                Console.WriteLine(ex.Message);
            }

            waveIn.StopRecording();
            Console.WriteLine("exited gracefully");

            return(0);
        }
Ejemplo n.º 21
0
        //++++
        private void Connect(IPEndPoint myendPoint, IPEndPoint hisendPoint, int inputDeviceNumber, InetworkChatCodec codec)
        {
            waveIn = new WaveInEvent();
            waveIn.BufferMilliseconds = 50;
            waveIn.DeviceNumber       = inputDeviceNumber;
            waveIn.WaveFormat         = codec.RecordFormat;
            waveIn.DataAvailable     += waveIn_DataAvailable;
            waveIn.StartRecording();

            udpSender   = new UdpClient();
            udpListener = new UdpClient();

            // To allow us to talk to ourselves for test purposes:
            // http://stackoverflow.com/questions/687868/sending-and-receiving-udp-packets-between-two-programs-on-the-same-computer
            udpListener.Client.SetSocketOption(SocketOptionLevel.Socket, SocketOptionName.ReuseAddress, true);
            udpListener.Client.Bind(myendPoint);

            udpSender.Connect(hisendPoint);

            waveOut      = new WaveOut();
            waveProvider = new BufferedWaveProvider(codec.RecordFormat);
            waveOut.Init(waveProvider);
            waveOut.Play();

            connected = true;
            var state = new ListenerThreadState {
                Codec = codec, EndPoint = myendPoint
            };

            ThreadPool.QueueUserWorkItem(ListenerThread, state);
            MessageBox.Show("Conncected");
        }
Ejemplo n.º 22
0
        private static void Main(string[] args)
        {
            var elapsed = new TimeSpan(0, 0, 60);
            var id      = Guid.NewGuid();
            var capture = new WaveInEvent
            {
                DeviceNumber       = 0,
                BufferMilliseconds = 1000,
                WaveFormat         = new WaveFormat(44100, 2),
            };
            var writer = new WaveFileWriter($"record-{id}.wav", capture.WaveFormat);

            capture.DataAvailable += (s, a) =>
            {
                writer.Write(a.Buffer, 0, a.BytesRecorded);
            };

            capture.RecordingStopped += (s, a) =>
            {
                writer.Dispose();
                writer = null;
                capture.Dispose();
            };

            Task.Run(async() => await ExecFfm($"-y -f vfwcap -r 25 -t {elapsed:g} -i 0 front-{id}.mp4"));
            Task.Run(async() => await ExecFfm($"-y -f gdigrab -framerate 25 -t {elapsed:g} -i desktop desk-{id}.mkv"));

            capture.StartRecording();

            Thread.Sleep(elapsed);
            Console.WriteLine("Done!");
            capture.StopRecording();

            Environment.Exit(Environment.ExitCode);
        }
        static Task recordAsync(int deviceNum, CancellationToken cancellationToken)
        {
            return(new Task(async() =>
            {
                WaveFileWriter waveFile;
                WaveInEvent waveSource = new WaveInEvent();
                waveSource.DeviceNumber = deviceNum;
                waveSource.WaveFormat = new WaveFormat(44100, 1);

                string tempFile = ($@"{_saveDirectory}mic_recording_{deviceNum}.wav");
                waveFile = new WaveFileWriter(tempFile, waveSource.WaveFormat);

                waveSource.DataAvailable += (sender, e) =>
                {
                    waveFile.Write(e.Buffer, 0, e.BytesRecorded);
                };

                waveSource.StartRecording();

                while (!cancellationToken.IsCancellationRequested)
                {
                    await Task.Delay(100);
                }

                waveSource.StopRecording();
                waveFile.Dispose();
            }));
        }
Ejemplo n.º 24
0
        public void Start(string inputDevice)
        {
            if (Started)
            {
                throw new Exception("Input already started");
            }

            //waveIn = new WaveIn(WaveCallbackInfo.FunctionCallback());
            waveIn = new WaveInEvent();
            waveIn.BufferMilliseconds = 20;
            //Console.WriteLine("Input device: " + WaveIn.GetCapabilities(0).ProductName);
            waveIn.DeviceNumber   = ClientAudioUtilities.MapInputDevice(inputDevice);
            waveIn.DataAvailable += _waveIn_DataAvailable;
            waveIn.WaveFormat     = new WaveFormat(sampleRate, 16, 1);

            inputVolumeStreamArgs = new InputVolumeStreamEventArgs()
            {
                DeviceNumber = waveIn.DeviceNumber, PeakRaw = 0, PeakDB = float.NegativeInfinity, PeakVU = 0
            };
            opusDataAvailableArgs = new OpusDataAvailableEventArgs();

            //inputControls = ClientAudioUtilities.GetWaveInMixerControls(waveIn.DeviceNumber);
            waveIn.StartRecording();

            Started = true;
        }
Ejemplo n.º 25
0
        private void button1_Click(object sender, EventArgs e)
        {
            if (inputDevices == 0)
            {
                return;
            }

            button3.Enabled    = false;
            PlayButton.Enabled = false;
            stopButton.Enabled = false;
            button1.Enabled    = false;
            button2.Enabled    = true;

            saveWave = new WaveInEvent();
            saveWave.DeviceNumber = 0;
            saveWave.WaveFormat   = new WaveFormat(sampleRate, bitDepth, channels);
            saver = new WaveFileWriter(outPutFilePath, saveWave.WaveFormat);
            saveWave.StartRecording();



            saveWave.DataAvailable += (s, a) => {
                saver.Write(a.Buffer, 0, a.BytesRecorded);
                if (saver.Position > saveWave.WaveFormat.AverageBytesPerSecond * 30)
                {
                    saveWave.StopRecording();
                }
            };
        }
Ejemplo n.º 26
0
        public Task RecognizeStreaming(WaveInEvent waveIn, CancellationToken ct)
        {
            if (waveIn.WaveFormat.Encoding != WaveFormatEncoding.Pcm)
            {
                throw new InvalidCastException("Format not supported for Streaming recognize");
            }

            waveIn.DataAvailable += WaveIn_DataAvailable;


            Task recognizeTask = Task.Factory.StartNew(async() =>
            {
                try
                {
                    for (; !ct.IsCancellationRequested;)
                    {
                        await StreamingMicRecognizeAsync(waveIn.WaveFormat.SampleRate,
                                                         SAFE_STREAM_TIMEOUT,
                                                         ct);
                    }

                    waveIn.DataAvailable -= WaveIn_DataAvailable;
                    waveIn.StopRecording();
                    audioBuffer.Clear(); // Clear temp buffer
                }
                catch (Exception ex)
                {
                    LogHelper.WriteLog(this, ex.ToString());
                }
            }, ct, TaskCreationOptions.LongRunning, TaskScheduler.Default);

            waveIn.StartRecording();
            return(recognizeTask);
        }
Ejemplo n.º 27
0
        public Recorder(string fileName,
                        FourCC codec, int quality,
                        int audioSourceIndex, SupportedWaveFormat audioWaveFormat, bool encodeAudio, int audioBitRate)
        {
            System.Windows.Media.Matrix toDevice;
            using (var source = new HwndSource(new HwndSourceParameters()))
            {
                toDevice = source.CompositionTarget.TransformToDevice;
            }

            screenWidth  = (int)Math.Round(SystemParameters.PrimaryScreenWidth * toDevice.M11);
            screenHeight = (int)Math.Round(SystemParameters.PrimaryScreenHeight * toDevice.M22);

            // Create AVI writer and specify FPS
            writer = new AviWriter(fileName)
            {
                FramesPerSecond = 10,
                EmitIndex1      = true,
            };

            // Create video stream
            videoStream = CreateVideoStream(codec, quality);
            // Set only name. Other properties were when creating stream,
            // either explicitly by arguments or implicitly by the encoder used
            videoStream.Name = "Screencast";

            if (audioSourceIndex >= 0)
            {
                var waveFormat = ToWaveFormat(audioWaveFormat);

                audioStream = CreateAudioStream(waveFormat, encodeAudio, audioBitRate);
                // Set only name. Other properties were when creating stream,
                // either explicitly by arguments or implicitly by the encoder used
                audioStream.Name = "Voice";

                audioSource = new WaveInEvent
                {
                    DeviceNumber = audioSourceIndex,
                    WaveFormat   = waveFormat,
                    // Buffer size to store duration of 1 frame
                    BufferMilliseconds = (int)Math.Ceiling(1000 / writer.FramesPerSecond),
                    NumberOfBuffers    = 3,
                };
                audioSource.DataAvailable += audioSource_DataAvailable;
            }

            screenThread = new Thread(RecordScreen)
            {
                Name         = typeof(Recorder).Name + ".RecordScreen",
                IsBackground = true
            };

            if (audioSource != null)
            {
                videoFrameWritten.Set();
                audioBlockWritten.Reset();
                audioSource.StartRecording();
            }
            screenThread.Start();
        }
Ejemplo n.º 28
0
        /// <summary>
        /// Connects to other client and starts transmitting audio
        /// </summary>
        public void Start()
        {
            if (WaveIn.DeviceCount < 1)
            {
                return;
            }
            udpSender              = new UdpClient();
            SoundIn                = new WaveInEvent();
            SoundIn.DataAvailable += OnAudioCaptured;
            SoundIn.StartRecording();
            udpListener = new UdpClient();
            udpListener.Client.SetSocketOption(SocketOptionLevel.Socket, SocketOptionName.ReuseAddress, true); // test
            //
            var EndPointListen = new IPEndPoint(IPAddress.Any, connectionEndPoint.Port);

            udpListener.Client.Bind(EndPointListen);

            udpSender.Connect(connectionEndPoint);


            soundOut     = new DirectSoundOut();
            waveProvider = new BufferedWaveProvider(SoundIn.WaveFormat);
            soundOut.Init(waveProvider);
            soundOut.Play();

            ThreadPool.QueueUserWorkItem(ListenerThread, EndPointListen);

            connected = true;
            ConnectedToPerson(connected);
        }
Ejemplo n.º 29
0
        /// <summary>
        /// Starts recording
        /// </summary>
        public void StartRecording()
        {
            if (!_isRecording)
            {
                try
                {
                    MyWaveIn.StartRecording();
                }
                catch (InvalidOperationException)
                {
                    Debug.WriteLine("Already recording!");
                }
            }

            _isRecording = true;
        }
Ejemplo n.º 30
0
        public void Start()
        {
            lock (_locker)
            {
                Debug.WriteLine("Starting audio input...");

                if (_isStarted)
                {
                    throw new InvalidOperationException("The device is already started");
                }

                _isStarted = false;

                _waveIn = new WaveInEvent()
                {
                    DeviceNumber = _device.DeviceId,
                    WaveFormat   = _format
                };

                _waveIn.DataAvailable += WaveInOnDataAvailable;
                _waveIn.StartRecording();

                _isStarted = true;
            }
        }
Ejemplo n.º 31
0
        public float[] ReadMonoSamplesFromMicrophone(int sampleRate, int secondsToRecord)
        {
            var producer = new BlockingCollection<float[]>();
            var waveFormat = WaveFormat.CreateIeeeFloatWaveFormat(sampleRate, 1);
            float[] samples;
            using (var waveIn = new WaveInEvent { WaveFormat = waveFormat })
            {
                waveIn.DataAvailable += (sender, e) =>
                    {
                        var chunk = GetFloatSamplesFromByte(e.BytesRecorded, e.Buffer);
                        producer.Add(chunk);
                    };

                waveIn.RecordingStopped += (sender, args) => producer.CompleteAdding();

                waveIn.StartRecording();

                samples = samplesAggregator.ReadSamplesFromSource(new BlockingQueueSamplesProvider(producer), secondsToRecord, sampleRate);

                waveIn.StopRecording();
            }

            return samples;
        }