Ejemplo n.º 1
1
 public void InitializeStream()
 {
     sourceStream = new WaveInEvent();
     sourceStream.BufferMilliseconds = 50;
     sourceStream.DeviceNumber = 0;
     sourceStream.WaveFormat = new WaveFormat(44100, 16, WaveIn.GetCapabilities(0).Channels);
 }
Ejemplo n.º 2
1
 public void StartRec()
 {
     WaveSourceStream = new NAudio.Wave.WaveInEvent();
     WaveSourceStream.DeviceNumber = 0;
     WaveSourceStream.WaveFormat = new WaveFormat(16000,1);
     WaveSourceStream.DataAvailable += sourceStream_DataAvailable;
     WaveSourceStream.StartRecording();
     bufl = new List<byte[]>();
 }
Ejemplo n.º 3
0
 private IWaveIn CreateWaveInDevice()
 {
     IWaveIn newWaveIn;
     if (radioButtonWaveIn.Checked)
     {
         newWaveIn = new WaveIn();
         newWaveIn.WaveFormat = new WaveFormat(8000, 1);
     }
     else if (radioButtonWaveInEvent.Checked)
     {
         newWaveIn = new WaveInEvent();
         newWaveIn.WaveFormat = new WaveFormat(8000, 1);
     }
     else if (radioButtonWasapi.Checked)
     {
         // can't set WaveFormat as WASAPI doesn't support SRC
         var device = (MMDevice) comboWasapiDevices.SelectedItem;
         newWaveIn = new WasapiCapture(device);
     }
     else
     {
         // can't set WaveFormat as WASAPI doesn't support SRC
         newWaveIn = new WasapiLoopbackCapture();
     }
     newWaveIn.DataAvailable += OnDataAvailable;
     newWaveIn.RecordingStopped += OnRecordingStopped;
     return newWaveIn;
 }
Ejemplo n.º 4
0
        public Recorder(string fileName, 
            FourCC codec, int quality, 
            int audioSourceIndex, SupportedWaveFormat audioWaveFormat, bool encodeAudio, int audioBitRate)
        {
            System.Windows.Media.Matrix toDevice;
            using (var source = new HwndSource(new HwndSourceParameters()))
            {
                toDevice = source.CompositionTarget.TransformToDevice;
            }

            screenWidth = (int)Math.Round(SystemParameters.PrimaryScreenWidth * toDevice.M11);
            screenHeight = (int)Math.Round(SystemParameters.PrimaryScreenHeight * toDevice.M22);

            // Create AVI writer and specify FPS
            writer = new AviWriter(fileName)
            {
                FramesPerSecond = 10,
                EmitIndex1 = true,
            };

            // Create video stream
            videoStream = CreateVideoStream(codec, quality);
            // Set only name. Other properties were when creating stream,
            // either explicitly by arguments or implicitly by the encoder used
            videoStream.Name = "Screencast";

            if (audioSourceIndex >= 0)
            {
                var waveFormat = ToWaveFormat(audioWaveFormat);

                audioStream = CreateAudioStream(waveFormat, encodeAudio, audioBitRate);
                // Set only name. Other properties were when creating stream,
                // either explicitly by arguments or implicitly by the encoder used
                audioStream.Name = "Voice";

                audioSource = new WaveInEvent
                {
                    DeviceNumber = audioSourceIndex,
                    WaveFormat = waveFormat,
                    // Buffer size to store duration of 1 frame
                    BufferMilliseconds = (int)Math.Ceiling(1000 / writer.FramesPerSecond),
                    NumberOfBuffers = 3,
                };
                audioSource.DataAvailable += audioSource_DataAvailable;
            }

            screenThread = new Thread(RecordScreen)
            {
                Name = typeof(Recorder).Name + ".RecordScreen",
                IsBackground = true
            };

            if (audioSource != null)
            {
                videoFrameWritten.Set();
                audioBlockWritten.Reset();
                audioSource.StartRecording();
            }
            screenThread.Start();
        }
Ejemplo n.º 5
0
 public void CanGetWaveInMixerLine()
 {
     using (var waveIn = new WaveInEvent())
     {
         MixerLine line = waveIn.GetMixerLine();                
         //Debug.WriteLine(String.Format("Mic Level {0}", level));
     }
 }
Ejemplo n.º 6
0
        public void iniciarCaptura()
        {
            try
            {
                /*WaveInCapabilities capabilities;

                for (int numberDevice = 0; numberDevice < WaveIn.DeviceCount; numberDevice++)
                {
                    capabilities = WaveIn.GetCapabilities(numberDevice);
                    Console.WriteLine("Producto->" + capabilities.ProductName.ToUpper().Trim());
                    if (capabilities.ProductName.ToUpper().Trim().Contains("BLUETOOTH"))
                    {
                        deviceBluetooth = numberDevice;
                        break;
                    }
                }*/

                foreach (IPAddress ip in System.Net.Dns.GetHostAddresses(""))
                {
                    if (Regex.IsMatch(ip.ToString(), @"[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}"))
                    {
                        ipLocal = ip.ToString();
                    }
                }

                wi = new WaveInEvent();
                wi.BufferMilliseconds = 1000;
                wi.DeviceNumber = deviceBluetooth;
                wi.WaveFormat = new WaveFormat(44100, 2);
                wi.DataAvailable += new EventHandler<WaveInEventArgs>(wi_DataAvailable);
                wi.StartRecording();

                /*wo = new WaveOutEvent();
                bwp = new BufferedWaveProvider(wi.WaveFormat);
                bwp.DiscardOnBufferOverflow = true;
                wo.Init(bwp);
                wo.Play();*/

                tempFile = Path.Combine(Path.GetTempPath(), Guid.NewGuid().ToString() + ".wav");
                writer = new WaveFileWriter(tempFile, wi.WaveFormat);

                hilo = new Thread(new ThreadStart(iniciarStreaming));
                hilo.Start();
            }
            catch (Exception ex)
            {
                logger.WriteToEventLog("ERROR: " + ex.Message +
                                        Environment.NewLine +
                                        "STACK TRACE: " + ex.StackTrace,
                                        "Servicio de captura de audio [iniciarCaptura]",
                                        EventLogEntryType.Error,
                                        "LogSucursalAudio");
                logger.WriteToErrorLog("ERROR: " + ex.Message,
                                        ex.StackTrace,
                                        "capturaAudio.cs");
                Console.WriteLine("Error [iniciarCaptura]->" + ex.Message);
            }
        }
Ejemplo n.º 7
0
        public void Stop()
        {
            _recording = false;
            _protocol.LocalUser.Channel.SendVoiceStop();

            sourceStream.StopRecording();
            sourceStream.Dispose();
            sourceStream = null;
        }
Ejemplo n.º 8
0
 public VolumeHelper(WaveInEvent waveIn)
 {
     if (waveIn == null)
         throw new ArgumentNullException();
     _waveIn = new WeakReference(waveIn);
     volumeControl = null;
     altVolumeControl = null;
     TryGetVolumeControl();
 }
Ejemplo n.º 9
0
        public AudioServer()
        {
            recorder = new WaveInEvent
            {
                WaveFormat = new WaveFormat(12000, 16, 2)
            };

            recorder.DataAvailable += WaveInOnDataAvailable;
        }
Ejemplo n.º 10
0
 public SwhEar(int deviceNumber)
 {
     Console.WriteLine($"Preparing audio device: {deviceNumber}");
     wvin = new NAudio.Wave.WaveInEvent();
     wvin.DeviceNumber       = deviceNumber;
     wvin.WaveFormat         = new NAudio.Wave.WaveFormat(SAMPLERATE, BITRATE, CHANNELS);
     wvin.BufferMilliseconds = BUFFERMILLISEC;
     wvin.DataAvailable     += OnDataAvailable;
     Start();
 }
Ejemplo n.º 11
0
        public MicrophoneRecorder(IMumbleProtocol protocol)
        {
            _protocol = protocol;
            var sourceStream = new WaveInEvent
            {
                WaveFormat = new WaveFormat(48000, 1)
            };
            sourceStream.DataAvailable += VoiceDataAvailable;

            sourceStream.StartRecording();
        }
Ejemplo n.º 12
0
        /// <summary>
        /// Starts recording.
        /// </summary>
        public void StartRecord(string audioFileName)
        {
            waveIn = new WaveInEvent();
            waveIn.DeviceNumber = AudioController.getInstance().GetDefaultInputDeviceNumber();
            waveIn.WaveFormat = new WaveFormat(44100, 2);
            waveIn.DataAvailable += OnDataAvailable;
            writer = new WaveFileWriter(audioFileName, waveIn.WaveFormat);
            isRecording = true;

            waveIn.StartRecording();
        }
Ejemplo n.º 13
0
 //------------------------------------------------------------------------------------------------------------------------
 void waveSource_RecordingStopped(object sender, StoppedEventArgs e)
 {
     lock (this)
     {
         if (waveSource != null)
         {
             waveSource.Dispose();
             waveSource = null;
             IsActive = false;
         }
     }
 }
Ejemplo n.º 14
0
        public void Init()
        {
            waveIn = new WaveInEvent();
            waveIn.BufferMilliseconds = 100;
            waveIn.DeviceNumber = -1;
            waveIn.WaveFormat = new WaveFormat(8000, 1);
            waveIn.DataAvailable += WaveIn_DataAvailable;

            waveOut = new WaveOut();
            waveOutProvider = new BufferedWaveProvider(waveIn.WaveFormat);
            waveOut.Init(waveOutProvider);
            waveOut.Play();
        }
Ejemplo n.º 15
0
 //------------------------------------------------------------------------------------------------------------------------
 public void Start()
 {
     audioFormat = new AudioFormat(8000, 16, 2);
     IsActive = true;
     waveSource = new WaveInEvent();
     //wave format
     waveSource.WaveFormat = new WaveFormat(audioFormat.samplerate, audioFormat.bitsperchannel, audioFormat.channels);
     //register event cbs
     waveSource.DataAvailable += new EventHandler<WaveInEventArgs>(waveSource_DataAvailable);
     waveSource.RecordingStopped += new EventHandler<StoppedEventArgs>(waveSource_RecordingStopped);
     //start record from mic
     waveSource.StartRecording();
 }
Ejemplo n.º 16
0
        static void _main()
        {
            BlackCore.basic.cParams args = bcore.app.args;

               client = new System.Net.Sockets.TcpClient();

               int wavInDevices = WaveIn.DeviceCount;
               int selWav = 0;
               for (int wavDevice = 0; wavDevice < wavInDevices; wavDevice++)
               {
               WaveInCapabilities deviceInfo = WaveIn.GetCapabilities(wavDevice);
               Console.WriteLine("Device {0}: {1}, {2} channels", wavDevice, deviceInfo.ProductName, deviceInfo.Channels);
               }

               Console.Write("Select device: ");
               selWav = int.Parse(Console.ReadLine());
               Console.WriteLine("Selected device is " + selWav.ToString());

               sshClient = new SshClient(args["host"], args["user"], args["pass"]);
               sshClient.Connect();

               if (sshClient.IsConnected)
               {

               shell = sshClient.CreateShellStream("xterm", 50, 50, 640, 480, 17640);
               Console.WriteLine("Open listening socket...");
               shell.WriteLine("nc -l " + args["port"] + "|pacat --playback");
               System.Threading.Thread.Sleep(2000);

               Console.WriteLine("Try to connect...");
               client.Connect(args["host"], int.Parse(args["port"]));
               if (!client.Connected) return;
               upStream = client.GetStream();

               //====================

               WaveInEvent wavInStream = new WaveInEvent();
               wavInStream.DataAvailable += new EventHandler<WaveInEventArgs>(wavInStream_DataAvailable);
               wavInStream.DeviceNumber = selWav;
               wavInStream.WaveFormat = new WaveFormat(44100, 16, 2);
               wavInStream.StartRecording();
               Console.WriteLine("Working.....");

               Console.ReadKey();
               sshClient.Disconnect();
               client.Close();
               wavInStream.StopRecording();
               wavInStream.Dispose();
               wavInStream = null;
               }
        }
Ejemplo n.º 17
0
        public void waveSource_RecordingStopped(object sender, StoppedEventArgs e)
        {
            if (WaveSource != null)
            {
                WaveSource.Dispose();
                WaveSource = null;
            }

            if (WaveFile != null)
            {
                WaveFile.Dispose();
                WaveFile = null;
            }
        }
Ejemplo n.º 18
0
        private static void MicAud_RecordingStopped(object sender, NAudio.Wave.StoppedEventArgs e)
        {
            if (micAud != null)
            {
                micAud.Dispose();
                micAud = null;
            }

            if (wfw != null)
            {
                wfw.Dispose();
                wfw = null;
            }
        }
    public Form1()
    {
        InitializeComponent();

        var waveIn = new NAudio.Wave.WaveInEvent
        {
            DeviceNumber       = 0, // customize this to select your microphone device
            WaveFormat         = new NAudio.Wave.WaveFormat(rate: 1000, bits: 16, channels: 1),
            BufferMilliseconds = 10
        };

        waveIn.DataAvailable += WaveIn_DataAvailable;;
        waveIn.StartRecording();
    }
Ejemplo n.º 20
0
        /// <summary>
        ///
        /// </summary>
        /// <param name="deviceNumber"></param>
        /// <param name="waveFormatSampleRate"></param>
        /// <param name="numChannels"></param>
        /// <returns></returns>
        public int ConfigureAudioDevice(int deviceNumber, int waveFormatSampleRate, int numChannels)
        {
            if (NAudio.Wave.WaveIn.DeviceCount < 1)
            {
                Console.WriteLine("No microphone!");
                return(-1);
            }

            waveIn = new NAudio.Wave.WaveInEvent();
            waveIn.DeviceNumber   = deviceNumber;
            waveIn.WaveFormat     = new NAudio.Wave.WaveFormat(waveFormatSampleRate, numChannels);
            waveIn.DataAvailable += this.OnDataAvailable;

            return(0);
        }
Ejemplo n.º 21
0
        /// <summary>
        /// Stops recording.
        /// </summary>
        public void StopRecording()
        {
            if (waveIn != null)
            {
                waveIn.StopRecording();
                waveIn.Dispose();
                waveIn = null;
            }
            if (writer != null)
            {
                writer.Dispose();
                writer = null;
            }

            isRecording = false;
        }
Ejemplo n.º 22
0
        private WaveFormat _waveFormat = new WaveFormat(8000, 16, 1); // The format that both the input and output audio streams will use, i.e. PCMU.

        #endregion Fields

        #region Constructors

        public AudioChannel()
        {
            // Set up the device that will play the audio from the RTP received from the remote end of the call.
            m_waveOut = new WaveOut();
            m_waveProvider = new BufferedWaveProvider(_waveFormat);
            m_waveOut.Init(m_waveProvider);
            m_waveOut.Play();

            // Set up the input device that will provide audio samples that can be encoded, packaged into RTP and sent to
            // the remote end of the call.
            m_waveInEvent = new WaveInEvent();
            m_waveInEvent.BufferMilliseconds = 20;
            m_waveInEvent.NumberOfBuffers = 1;
            m_waveInEvent.DeviceNumber = 0;
            m_waveInEvent.DataAvailable += AudioSampleAvailable;
            m_waveInEvent.WaveFormat = _waveFormat;
        }
Ejemplo n.º 23
0
        public void Record()
        {
            _recording = true;

            if (sourceStream != null)
                sourceStream.Dispose();
            sourceStream = new WaveInEvent
            {
                WaveFormat = new WaveFormat(48000,16, 1)
            };
            sourceStream.BufferMilliseconds = 5;
            sourceStream.DeviceNumber = SelectedDevice;
            sourceStream.NumberOfBuffers = 3;
            sourceStream.DataAvailable += VoiceDataAvailable;

            sourceStream.StartRecording();
        }
Ejemplo n.º 24
0
        //Set's up the stream for recording/streaming. it makes it so when data is available in the stream it calls function dataAvailable with said data
        public void StartRecording(int index, EventHandler<NAudio.Wave.WaveInEventArgs> dataAvailable)
        {
            if (dataAvailable == null)
                return;

            //setup the input stream. we get the device number from the selected index, setup the format for reading
            sourceStream = new NAudio.Wave.WaveInEvent();//NAudio.Wave.WaveIn();
            sourceStream.DeviceNumber = index;
            sourceStream.WaveFormat = new NAudio.Wave.WaveFormat(44100, NAudio.Wave.WaveIn.GetCapabilities(index).Channels);
            waveFormat = sourceStream.WaveFormat;

            //setup the callbacks when there is data or the recording stopped(suddenly disconnection = no recording = the function)
            sourceStream.DataAvailable += new EventHandler<NAudio.Wave.WaveInEventArgs>(dataAvailable);
            sourceStream.RecordingStopped += new EventHandler<NAudio.Wave.StoppedEventArgs>(StopRecording);

            sourceStream.StartRecording();
        }
Ejemplo n.º 25
0
        public sourceSatellite(MixingSampleProvider targetMixer)
        {
            volume = 1f;
            sourceState = "OFF";

            //Open channel
            inputSat = new WaveInEvent();
            inputSat.DeviceNumber = devNum;
            inputSat.WaveFormat = new WaveFormat();
            inputSat.BufferMilliseconds = 400;

            //Volume VSP
            //inputSatVSP = new VolumeSampleProvider(new Pcm16BitToSampleProvider(new WaveInProvider(inputSat)));
            inputSatVSP = new VolumeSampleProvider(new WaveInProvider(inputSat).ToSampleProvider());

            //Send to mixer
            targetMixer.AddMixerInput(inputSatVSP);
        }
        private void record_Click(object sender, EventArgs e)
        {
            int deviceNumber = sourceList.SelectedItems[0].Index;
            if (sourceList.SelectedItems.Count == 0) return;
            // set up the recorder
            recorder = new WaveInEvent();
            //recorder.DataAvailable += RecorderOnDataAvailable;
            recorder.DataAvailable += SendDataAvaible;
            recorder.DeviceNumber = deviceNumber;
            recorder.WaveFormat = new WaveFormat(44100, NAudio.Wave.WaveIn.GetCapabilities(deviceNumber).Channels);

            // set up our signal chain
            bufferedWaveProvider = new BufferedWaveProvider(recorder.WaveFormat);
            //writer = new WaveFileWriter("temp.wav", bufferedWaveProvider.WaveFormat);
            //savingWaveProvider = new LoopBack(bufferedWaveProvider, "temp.wav");

            recorder.StartRecording();
        }
Ejemplo n.º 27
0
 public void Stop()
 {
     if (waveIn != null)
     {
         waveIn.StopRecording();
         waveIn.Dispose();
         waveIn = null;
     }
     if (waveOut != null)
     {
         waveOut.Stop();
         waveOut.Dispose();
         waveOut = null;
     }
     if (provider != null)
     {
         provider.ClearBuffer();
         provider = null;
     }
 }
Ejemplo n.º 28
0
    public override void Setup() {
      base.Setup();

      int waveInDevices = WaveIn.DeviceCount;
      for (int waveInDevice = 0; waveInDevice < waveInDevices; waveInDevice++) {
        WaveInCapabilities deviceInfo = WaveIn.GetCapabilities(waveInDevice);
        Host.Log(this, "Device " + waveInDevice + ": " + deviceInfo.ProductName + ", " + deviceInfo.Channels + " channels");
      }

      waveIn = new WaveInEvent();
      waveIn.DeviceNumber = ConfigManager.GetInstance().Find("microphone.device", 0);
      waveIn.WaveFormat = new WaveFormat(16000, 2);
      waveIn.DataAvailable += waveIn_DataAvailable;

      buffer = new StreamBuffer();
      waveIn.StartRecording();

      double confidence = ConfigManager.GetInstance().Find("microphone.confidence", 0.6);
      AddOnManager.GetInstance().AddAudioSource("Microphone", buffer, "Microphone", null, confidence);
    }
Ejemplo n.º 29
0
        public void record()
        {
            Console.WriteLine();
            Console.WriteLine("Recording on Device  # 0 ");

            WaveSource = new WaveInEvent();
            WaveSource.DeviceNumber = ActiveDevice;
            WaveSource.WaveFormat   = new WaveFormat(44100, 1);

            WaveSource.DataAvailable    += new EventHandler <WaveInEventArgs>(waveSource_DataAvailable);
            WaveSource.RecordingStopped += new EventHandler <StoppedEventArgs>(waveSource_RecordingStopped);

            long milliseconds = (long)Math.Round(DateTime.Now.Subtract(DateTime.MinValue.AddYears(1969)).TotalMilliseconds);

            Filename = Path.Combine(samplePath, $"{sampleCount}_AudioSample_{milliseconds}.wav");
            sampleCount++;
            WaveFile = new WaveFileWriter(Filename, WaveSource.WaveFormat);

            WaveSource.StartRecording();
        }
Ejemplo n.º 30
0
        public void Start(IPEndPoint RemoteUdpPoint)
        {
            RemoteServer = RemoteUdpPoint;

            provider = new BufferedWaveProvider(format);

            if (waveIn == null)
            {
                waveIn = new WaveInEvent();
                waveIn.WaveFormat = format;
                waveIn.BufferMilliseconds = 500;
                waveIn.DataAvailable += waveIn_DataAvailable;
                waveIn.StartRecording();
            }
            if (waveOut == null)
            {
                waveOut = new WaveOut();
                waveOut.DesiredLatency = 500;
                waveOut.Init(provider);
                waveOut.Play();
            }
        }
Ejemplo n.º 31
0
        public static void RecThread()
        {
            micAud = new NAudio.Wave.WaveInEvent();
            //micAud.WaveFormat = new NAudio.Wave.WaveFormat(44100, 1);
            //micAud.DataAvailable += MicAud_DataAvailable;
            //micAud.RecordingStopped += MicAud_RecordingStopped;
            //// micAud.DataAvailable += (s, capData) => wfw.Write(capData.Buffer, 0, capData.BytesRecorded);
            //wfw = new WaveFileWriter(_micLoc, micAud.WaveFormat);
            //micAud.StartRecording();

            using (spkAud = new CSCore.SoundIn.WasapiLoopbackCapture())
            {
                spkAud.Initialize();

                micAud.WaveFormat        = new NAudio.Wave.WaveFormat(spkAud.WaveFormat.SampleRate, spkAud.WaveFormat.Channels);
                micAud.DataAvailable    += MicAud_DataAvailable;
                micAud.RecordingStopped += MicAud_RecordingStopped;
                // micAud.DataAvailable += (s, capData) => wfw.Write(capData.Buffer, 0, capData.BytesRecorded);
                wfw = new WaveFileWriter(_micLoc, micAud.WaveFormat);
                micAud.StartRecording();

                using (var w = new WaveWriter(_spkLoc, spkAud.WaveFormat))
                {
                    spkAud.DataAvailable += (s, capData) => w.Write(capData.Data, capData.Offset, capData.ByteCount);
                    spkAud.Start();

                    while (!stopRec)
                    {
                        ;
                    }

                    spkAud.Stop();
                    micAud.StopRecording();
                }
            }
        }
Ejemplo n.º 32
0
        static void Main(string[] args)
        {
            StatsdClient.Metrics.Configure(new MetricsConfig { StatsdServerName = "127.0.0.1" });

            int waveInDevices = WaveIn.DeviceCount;
            for (int waveInDevice = 0; waveInDevice < waveInDevices; waveInDevice++)
            {
                WaveInCapabilities deviceInfo = WaveIn.GetCapabilities(waveInDevice);
                Console.WriteLine("Device {0}: {1}, {2} channels", waveInDevice, deviceInfo.ProductName, deviceInfo.Channels);
            }

            Console.WriteLine();
            Console.Write("Select Device: ");

            int device = Int32.Parse(Console.ReadLine());

            waveIn = new WaveInEvent();
            waveIn.DeviceNumber = device;
            waveIn.DataAvailable += waveIn_DataAvailable;
            waveIn.WaveFormat = new WaveFormat(200, 2);
            waveIn.StartRecording();

            while (true) Thread.Sleep(100);
        }
Ejemplo n.º 33
0
        private WaveFormat _waveFormat = new WaveFormat(8000, 16, 1); // The format that both the input and output audio streams will use, i.e. PCMU.

        #endregion Fields

        #region Constructors

        public AudioChannel()
        {
            // Set up the device that will play the audio from the RTP received from the remote end of the call.
            m_waveOut = new WaveOut();
            m_waveProvider = new BufferedWaveProvider(_waveFormat);
            m_waveOut.Init(m_waveProvider);
            m_waveOut.Play();

            // Set up the input device that will provide audio samples that can be encoded, packaged into RTP and sent to
            // the remote end of the call.
            m_waveInEvent = new WaveInEvent();
            m_waveInEvent.BufferMilliseconds = 20;
            m_waveInEvent.NumberOfBuffers = 1;
            m_waveInEvent.DeviceNumber = 0;
            m_waveInEvent.DataAvailable += RTPChannelSampleAvailable;
            m_waveInEvent.WaveFormat = _waveFormat;

            // Create a UDP socket to use for sending and receiving RTP packets.
            int port = FreePort.FindNextAvailableUDPPort(DEFAULT_START_RTP_PORT);
            _rtpEndPoint = new IPEndPoint(_defaultLocalAddress, port);
            m_rtpChannel = new RTPChannel(_rtpEndPoint);
            m_rtpChannel.OnFrameReady += RTPChannelSampleReceived;

            _audioLogger.Debug("RTP channel endpoint " + _rtpEndPoint.ToString());
        }
Ejemplo n.º 34
0
        async Task <object> StreamingMicRecognizeAsync(int seconds)
        {
            object writeLock = new object();
            bool   writeMore = true;

            if (tamam)
            {
                return(0);
            }


            if (NAudio.Wave.WaveIn.DeviceCount < 1)
            {
                metin.Content = "Mikrofon Yok!";
                return(-1);
            }
            var speech        = SpeechClient.Create();
            var streamingCall = speech.StreamingRecognize();

            await streamingCall.WriteAsync(
                new StreamingRecognizeRequest()
            {
                StreamingConfig = new StreamingRecognitionConfig()
                {
                    Config = new RecognitionConfig()
                    {
                        Encoding =
                            RecognitionConfig.Types.AudioEncoding.Linear16,
                        SampleRateHertz = 16000,
                        LanguageCode    = "tr",
                    },
                    InterimResults = true,
                }
            });

            Task printResponses = Task.Run(async() =>
            {
                while (await streamingCall.ResponseStream.MoveNext(
                           default(System.Threading.CancellationToken)))
                {
                    foreach (var result in streamingCall.ResponseStream
                             .Current.Results)
                    {
                        foreach (var alternative in result.Alternatives)
                        {
                            if (!tamam)
                            {
                                yazi = alternative.Transcript;
                                timer.Start();
                            }
                        }
                    }
                }
            });



            var waveIn = new NAudio.Wave.WaveInEvent();

            waveIn.DeviceNumber   = 0;
            waveIn.WaveFormat     = new NAudio.Wave.WaveFormat(16000, 1);
            waveIn.DataAvailable +=
                (object sender, NAudio.Wave.WaveInEventArgs args) =>
            {
                lock (writeLock)
                {
                    if (!writeMore)
                    {
                        return;
                    }
                    streamingCall.WriteAsync(
                        new StreamingRecognizeRequest()
                    {
                        AudioContent = Google.Protobuf.ByteString
                                       .CopyFrom(args.Buffer, 0, args.BytesRecorded)
                    }).Wait();
                }
            };


            waveIn.StartRecording();
            metin.Content        = "Şimdi Konuşabilirsiniz";
            kulak.Visibility     = Visibility.Visible;
            acikAgiz.IsEnabled   = false;
            kapaliAgiz.IsEnabled = false;
            try
            {
                await Task.Delay(TimeSpan.FromSeconds(seconds), cancellationTokenSource.Token);
            }
            catch (TaskCanceledException ex)
            {
                Console.WriteLine(ex.Message);
            }
            finally
            {
                cancellationTokenSource.Dispose();
            }

            acikAgiz.IsEnabled   = true;
            kapaliAgiz.IsEnabled = true;
            kulak.Visibility     = Visibility.Hidden;
            waveIn.StopRecording();

            lock (writeLock) writeMore = false;


            if (genelMod.IsChecked == true)
            {
                cevapla(yazi);
            }
            if (ceviriMod.IsChecked == true)
            {
                cevir(yazi);
            }

            await streamingCall.WriteCompleteAsync();

            await printResponses;

            metin.Content = yazi;

            return(0);
        }
Ejemplo n.º 35
0
        /// <summary>
        /// Stop audio source.
        /// </summary>
        /// 
        /// <remarks><para>Stops audio source.</para>
        /// </remarks>
        /// 
        public void Stop()
        {
            if (_sampleChannel != null)
                _sampleChannel.PreVolumeMeter -= SampleChannelPreVolumeMeter;

            if (_waveIn != null)
            {
                // signal to stop
                _waveIn.DataAvailable -= WaveInDataAvailable;
                _waveIn.StopRecording();
                _waveIn.RecordingStopped -= WaveInRecordingStopped;

                if (WaveOutProvider != null)
                {
                    if (WaveOutProvider.BufferedBytes>0) WaveOutProvider.ClearBuffer();
                    WaveOutProvider = null;
                }

                _waveIn.Dispose();
                _waveIn = null;

            }
        }
Ejemplo n.º 36
0
        /// <summary>
        /// Start audio source.
        /// </summary>
        /// 
        /// <remarks>Starts audio source and return execution to caller. audio source
        /// object creates background thread and notifies about new frames with the
        /// help of <see cref="DataAvailable"/> event.</remarks>
        /// 
        /// <exception cref="ArgumentException">audio source is not specified.</exception>
        /// 
        public void Start()
        {
            if (!IsRunning)
            {
                // check source

                int i = 0, selind = -1;
                for (int n = 0; n < WaveIn.DeviceCount; n++)
                {
                    if (WaveIn.GetCapabilities(n).ProductName == _source)
                        selind = i;
                    i++;
                }
                if (selind == -1)
                {
                    //device no longer connected or not configured
                    if (i > 0)
                        selind = 0;
                    else
                    {
                        //if (AudioSourceError != null)
                        //    AudioSourceError(this, new AudioSourceErrorEventArgs("not connected"));
                        AudioFinished?.Invoke(this, new PlayingFinishedEventArgs(ReasonToFinishPlaying.DeviceLost));
                        return;    
                    }
                    
                }

                _waveIn = new WaveInEvent { BufferMilliseconds = 200, DeviceNumber = selind, WaveFormat = RecordingFormat };
                _waveIn.DataAvailable += WaveInDataAvailable;
                _waveIn.RecordingStopped += WaveInRecordingStopped;

                _waveProvider = new WaveInProvider(_waveIn);
                _sampleChannel = new SampleChannel(_waveProvider);
                
                if (LevelChanged != null)
                {
                    _sampleChannel.PreVolumeMeter += SampleChannelPreVolumeMeter;
                }
                _waveIn.StartRecording();

            }
        }
Ejemplo n.º 37
0
        /// <summary>
        /// Start audio source.
        /// </summary>
        /// 
        /// <remarks>Starts audio source and return execution to caller. audio source
        /// object creates background thread and notifies about new frames with the
        /// help of <see cref="DataAvailable"/> event.</remarks>
        /// 
        /// <exception cref="ArgumentException">audio source is not specified.</exception>
        /// 
        public void Start()
        {
            if (string.IsNullOrEmpty(_source))
                throw new ArgumentException("Audio source is not specified.");

            if (_started) return;

            // check source
            lock (_lock)
            {
                if (_started)
                    return;

                int i = 0, selind = -1;
                for (var n = 0; n < WaveIn.DeviceCount; n++)
                {
                    if (WaveIn.GetCapabilities(n).ProductName == _source)
                        selind = i;
                    i++;
                }
                if (selind == -1)
                {
                    AudioFinished?.Invoke(this, new PlayingFinishedEventArgs(ReasonToFinishPlaying.DeviceLost));
                    return;
                }

                _started = true;
                _waveIn = new WaveInEvent
                          {
                              BufferMilliseconds = 200,
                              DeviceNumber = selind,
                              WaveFormat = RecordingFormat
                          };
                _waveIn.DataAvailable += WaveInDataAvailable;
                _waveIn.RecordingStopped += WaveInRecordingStopped;

                _waveProvider = new WaveInProvider(_waveIn);
                _sampleChannel = new SampleChannel(_waveProvider);
                _sampleChannel.PreVolumeMeter += SampleChannelPreVolumeMeter;
                _waveIn.StartRecording();
            }
        }
        static async Task <object> StreamingMicrophoneRecognizeAsync(int seconds = 60, string languageCode = "en-US")
        {
            var speech        = SpeechClient.Create();
            var streamingCall = speech.StreamingRecognize();
            await streamingCall.WriteAsync(
                new StreamingRecognizeRequest()
            {
                StreamingConfig = new StreamingRecognitionConfig()
                {
                    Config = new RecognitionConfig()
                    {
                        Encoding        = RecognitionConfig.Types.AudioEncoding.Linear16,
                        SampleRateHertz = 44100,
                        LanguageCode    = languageCode
                    },
                    InterimResults = true,
                }
            });

            Task printResponses = Task.Run(async() =>
            {
                var responseStream = streamingCall.GetResponseStream();
                while (await responseStream.MoveNextAsync())
                {
                    StreamingRecognizeResponse response = responseStream.Current;
                    Console.WriteLine(response.Results[0].Alternatives[0].Transcript); // Print most probable result.
                }
            });

            object writeLock = new object();
            bool   writeMore = true;
            var    waveIn    = new NAudio.Wave.WaveInEvent();

            waveIn.DeviceNumber   = 0;
            waveIn.WaveFormat     = new NAudio.Wave.WaveFormat(44100, 1); // 44100Hz Mono.
            waveIn.DataAvailable += (object sender, NAudio.Wave.WaveInEventArgs args) =>
            {
                lock (writeLock)
                {
                    if (!writeMore)
                    {
                        return;
                    }

                    streamingCall.WriteAsync(
                        new StreamingRecognizeRequest()
                    {
                        AudioContent = Google.Protobuf.ByteString.CopyFrom(args.Buffer, 0, args.BytesRecorded)
                    }).Wait();
                }
            };

            waveIn.StartRecording();
            Console.WriteLine("Speek now.");
            await Task.Delay(TimeSpan.FromSeconds(seconds));

            waveIn.StopRecording();
            lock (writeLock)
            {
                writeMore = false;
            }

            await streamingCall.WriteCompleteAsync();

            await printResponses;

            return(0);
        }
Ejemplo n.º 39
0
        //這是引擎的初始化的部分,如果成功請返回 true, 如果失敗請返回 false
        static bool Initialize()
        {
            WaveInEvent WavEvent = new WaveInEvent();
            WavEvent.DeviceNumber = 0;

            WavEvent.DataAvailable += new EventHandler<WaveInEventArgs>(InputDevice_DataAvailable);
            WavEvent.WaveFormat = WavFormat;
            WavEvent.StartRecording();

            Console.WriteLine("SYS_LANG?");
            lang = Console.ReadLine().ToLower();

            Console.WriteLine("GSR_THRESHOLD?");
            if(!float.TryParse(Console.ReadLine(),out threshold)){
                threshold = 0.1f;
            }

            if (lang == "sys_lang")
            {
                lang = "ja";
            }

            return true;
        }