コード例 #1
0
        public void Start(string endpoint)
        {
            StateChanged?.Invoke(this, new ConsumerStateEventArgs(State.Busy, "", SessionId));

            byte[] buffer = null;

            using (var file = new Mp3FileReader(@"C:\Users\karel\Downloads\102-Keith-Million.mp3"))
            {
                using (var stream = WaveFormatConversionStream.CreatePcmStream(file))
                {
                    using (var raw = new RawSourceWaveStream(stream, format))
                    {
                        buffer = new byte[raw.Length];

                        raw.Read(buffer, 0, buffer.Length);
                    }
                }
            }

            if (buffer != null && buffer.Length > 0)
            {
                DataAvailable?.Invoke(this, new DataEventArgs(buffer, buffer.Length));
            }

            StateChanged?.Invoke(this, new ConsumerStateEventArgs(State.Finished, "", SessionId));
        }
コード例 #2
0
            private void Playbutton_Click(object sender, EventArgs e)
            {
                if (player != null)
                {
                    player.Stop();
                    return;
                }

                player = new WaveOutEvent();

                WaveStream mainOutputStream = new RawSourceWaveStream(this.GetMemoryStream(), new WaveFormat());
                var        volumeStream     = new WaveChannel32(mainOutputStream);

                volumeStream.PadWithZeroes = false;



                player.PlaybackStopped += (o, args) =>
                {
                    player.Dispose();
                    player = null;
                };

                player.Init(volumeStream);

                player.Play();
            }
コード例 #3
0
ファイル: Form1.cs プロジェクト: kubaadamus/UDP
 //===================================== P L A Y ========================================================//
 public void PlayReceivedAudio(byte[] ReceivedAudioArray)
 {
     Task.Factory.StartNew(() =>
     {
         audioout.Volume = 1.0f;
         using (WaveOut audioout = new WaveOut())
             using (MemoryStream ms = new MemoryStream(ReceivedAudioArray))
             {
                 ManualResetEvent semaphoreObject = new ManualResetEvent(false);
                 audioout.DesiredLatency          = 100;
                 RawSourceWaveStream rsws         = new RawSourceWaveStream(ms, wf);
                 IWaveProvider provider           = rsws;
                 audioout.Init(provider);
                 EventHandler <NAudio.Wave.StoppedEventArgs> handler = (o, k) =>
                 {
                     semaphoreObject.Set();
                 };
                 audioout.PlaybackStopped += handler;
                 audioout.Play();
                 //while (audioout.PlaybackState != PlaybackState.Stopped) ;
                 semaphoreObject.WaitOne();
                 audioout.PlaybackStopped -= handler;
             }
     });
 }
コード例 #4
0
        public void Play(byte[] buffer)
        {
            IWaveProvider waveProvider = new RawSourceWaveStream(new MemoryStream(buffer), _waveFormat);

            _speakers.Init(waveProvider);
            _speakers.Play();
        }
コード例 #5
0
        public double TransmitData(byte[] data)
        {
            var    samples = ModulateData(data);
            double txTime  = ((double)samples.Length) / SampleRateHz;

            var rawBytes = new byte[samples.Length * 2];

            for (int i = 0; i < samples.Length; i++)
            {
                var bts = BitConverter.GetBytes(samples[i]);
                rawBytes[i * 2]     = bts[0];
                rawBytes[i * 2 + 1] = bts[1];
            }

            using (var ms = new MemoryStream(rawBytes))
            {
                using (var rs = new RawSourceWaveStream(ms, new WaveFormat(Convert.ToInt32(SampleRateHz), 16, 1)))
                {
                    using (var wo = new WaveOutEvent())
                    {
                        wo.Init(rs);
                        wo.Play();
                        while (wo.PlaybackState == PlaybackState.Playing)
                        {
                            Thread.SpinWait(1);
                        }
                    }

                    rs.Close();
                }
                ms.Close();
            }

            return(txTime);
        }
コード例 #6
0
        //https://github.com/naudio/NAudio/issues/174 dan geliştirdiğim ve WaveFormatConvert yarattım.
        // Ses kartından gelen sesleri resample yani yeniden örneklemeye çalıştığımızdan Hz değerlerinin aynı olmasına dikkat edelim.

        /*
         *  Ses kartından gelen değerler Örnek:
         *  48000 Hz(Sample Rate) 24 bit STEREO
         *  -> Convert
         *  48000 Hz(Sample Rate) 16 bit STEREO olacak şekilde daha iyi ses alırız.
         */
        public byte[] WaveFormatConvert(byte[] input, int length, WaveFormat inFormat, WaveFormat outFormat)
        {
            if (length == 0)
            {
                return(new byte[0]);
            }
            using (var memStream = new MemoryStream(input, 0, length))
            {
                using (var inputStream = new RawSourceWaveStream(memStream, inFormat))
                {
                    using (var resampler = new MediaFoundationResampler(inputStream, outFormat))
                    {
                        resampler.ResamplerQuality = sesKalitesi; // 1 low - 60 max high
                        //CONVERTED READ STREAM
                        byte[] buffer = new byte[length];
                        using (var stream = new MemoryStream())
                        {
                            int read;
                            while ((read = resampler.Read(buffer, 0, length)) > 0)
                            {
                                stream.Write(buffer, 0, read);
                            }
                            return(stream.ToArray());
                        }
                    }
                }
            }
        }
コード例 #7
0
        private async Task <StorageFile> GetAudioTempFileAsync(byte[] Audiobuffer)
        {
            if (audioEncodingProperties != null)
            {
                var folder = ApplicationData.Current.TemporaryFolder;
                var name   = DateTime.Now.ToString("yyyy-MM-dd-HHmmss");
                var file   = await folder.CreateFileAsync($"{name}.wav");

                var s = new RawSourceWaveStream(new MemoryStream(Audiobuffer), WaveFormat.CreateIeeeFloatWaveFormat((int)audioEncodingProperties.SampleRate, (int)audioEncodingProperties.ChannelCount));
                using (var writer = new WaveFileWriterRT(await file.OpenStreamForWriteAsync(), s.WaveFormat))
                {
                    long outputLength = 0;
                    var  buffer       = new byte[s.WaveFormat.AverageBytesPerSecond * 4];
                    while (true)
                    {
                        int bytesRead = s.Read(buffer, 0, buffer.Length);
                        if (bytesRead == 0)
                        {
                            // end of source provider
                            break;
                        }
                        outputLength += bytesRead;
                        // Write will throw exception if WAV file becomes too large
                        writer.Write(buffer, 0, bytesRead);
                    }
                }
                return(file);
            }
            else
            {
                StorageFile file = null;
                return(file);
            }
        }
コード例 #8
0
        private void BackgroundPlayer_DoWork(object sender, System.ComponentModel.DoWorkEventArgs e)
        {
            object[] arguments = e.Argument as object[];

            WaveStream WaveFloat = (WaveStream)arguments[0];
            int        LeftPos   = Conversions.ToInteger(arguments[1]);
            int        RightPos  = Conversions.ToInteger(arguments[2]);
            var        bytes     = new byte[RightPos - LeftPos + 1];

            WaveFloat.Position = LeftPos;
            WaveFloat.Read(bytes, 0, RightPos - LeftPos);
            WaveFloat = new RawSourceWaveStream(new MemoryStream(bytes), WaveFloat.WaveFormat);
            // WaveFloat.PadWithZeroes = False

            using (var output = new WaveOutEvent())
            {
                output.Init(WaveFloat);
                output.Play();
                while (output.PlaybackState == PlaybackState.Playing & !BackgroundPlayer.CancellationPending)
                {
                    Thread.Sleep(45);
                    BackgroundPlayer.ReportProgress((int)(output.GetPosition() / (WaveFloat.WaveFormat.BitsPerSample / 8d)));
                }
            }
        }
コード例 #9
0
        private async void recive_audio()
        {
            UdpClient  listener_audio = new UdpClient(listenPort);
            IPEndPoint groupEP        = new IPEndPoint(IPAddress.Any, listenPort);

            WaveOut _waveOut = new WaveOut();

            var udp = await listener_audio.ReceiveAsync();

            var receive_byte_array = udp.Buffer;

            Console.WriteLine(receive_byte_array);
            IWaveProvider provider = new RawSourceWaveStream(
                new MemoryStream(receive_byte_array), new WaveFormat(44100, 1));

            while (!stopCall)
            {
                receive_byte_array = listener_audio.Receive(ref groupEP);
                Console.WriteLine(receive_byte_array);
                provider = new RawSourceWaveStream(
                    new MemoryStream(receive_byte_array), new WaveFormat(44100, 1));

                _waveOut.Init(provider);
                _waveOut.Play();
            }
        }
コード例 #10
0
 public WavePlayer(byte[] rawData, WaveFormat rawFormat, float totalTimeMs)
 {
     _Device   = new WaveOut();
     _Provider = new RawSourceWaveStream(rawData, 0, rawData.Length, rawFormat);
     _Device.Init(_Provider);
     _TotalMs = totalTimeMs;
 }
コード例 #11
0
        private void Play(bool fadeIn, double duration = 0)
        {
            if (State == AudioTrackState.Playing)
            {
                return;
            }

            _currentStream = new RawSourceWaveStream(new MemoryStream(AudioData), AudioFormat);

            _outputSample = CreateSampleChain(_currentStream);

            if (MultipartLoopEnabled)
            {
                SetupMultipartLoop();
                _currentStream.CurrentTime = MultipartLoop.StartTime;
            }

            if (State == AudioTrackState.Paused)
            {
                _currentStream.CurrentTime = _pauseTime;
            }

            if (fadeIn)
            {
                _fadingProvider.BeginFadeIn(duration);
            }

            Mixer.AddMixerInput(_outputSample);

            State = AudioTrackState.Playing;
        }
コード例 #12
0
        public static byte[] CompressToMsAdpcm(byte[] data, uint overwriteSampleRate, out int uncompressedSize)
        {
            WaveFormat      pcmFormat   = new WaveFormat((int)overwriteSampleRate, 16, 1);
            AdpcmWaveFormat adpcmFormat = new AdpcmWaveFormat((int)overwriteSampleRate, 1);

            using (var inStream = new MemoryStream(data))
                using (var anyWaveStream = new WaveFileReader(inStream))
                    using (var pcmStream = new RawSourceWaveStream(anyWaveStream, pcmFormat))
                    {
                        int sampleSize = ((pcmStream.WaveFormat.BitsPerSample * pcmStream.WaveFormat.Channels) / 8);
                        int uncompressedSampleCount = (int)(pcmStream.Length / sampleSize);
                        uncompressedSampleCount = AlignTo(uncompressedSampleCount, adpcmFormat.SamplesPerBlock);
                        uncompressedSize        = uncompressedSampleCount * 2; // Time 2 because 16 bit mono samples (2 byte per sample)

                        // We have to align the wave data to the wave block size
                        // otherise NAudio will just cut off some samples!
                        using (var alignedPcmStream = new AlignStream {
                            _baseStream = pcmStream, _extendedLengthInBytes = uncompressedSize
                        })
                            using (var adpcmStream = new WaveFormatConversionStream(adpcmFormat, alignedPcmStream))
                                using (var outStream = new MemoryStream())
                                {
                                    using (WaveFileWriter outWaveFileformat = new WaveFileWriter(outStream, adpcmFormat))
                                    {
                                        byte[] buffer = new byte[8192];
                                        int    bytesRead;
                                        while ((bytesRead = adpcmStream.Read(buffer, 0, buffer.Length)) != 0)
                                        {
                                            outWaveFileformat.Write(buffer, 0, bytesRead);
                                        }
                                    }
                                    return(outStream.ToArray());
                                }
                    }
        }
コード例 #13
0
        static void Main(string[] args)
        {
            if (!IsAdmin())
            {
                RestartElevated();
            }
            Console.WriteLine("App started with admin priviledges = " + IsAdmin());
            WifiHotspotComm.Start_Hotspot("PhoneMicLive!", "8948365369o", true);
            Console.WriteLine("Hotspot Started....");
            TCPDataReader TCPRdr = new TCPDataReader(PORT);

            TCPRdr.ReceiveBufferSize = 3200;
            TCPRdr.StartListener();


            Console.WriteLine("Listening on port " + PORT + ".....");
            MemoryStream s;

            while (true)
            {
                s = new MemoryStream(TCPRdr.GetReceivedByteData());
                var waveFormat = new WaveFormat(16000, 16, 1); // must match the waveformat of the raw audio
                var waveOut    = new WaveOut();
                var rawSource  = new RawSourceWaveStream(s, waveFormat);
                waveOut.Init(rawSource);
                waveOut.Play();
            }
            Console.Read();
            WifiHotspotComm.Start_Hotspot(null, null, false);
        }
コード例 #14
0
 public IWaveProvider Convert()
 {
     using (Stream stream = new MemoryStream(buffer))
     {
         using (var rawStream = new RawSourceWaveStream(stream, new WaveFormat(48000, 16, 1)))
             using (var downsample = new WaveFormatConversionStream(new WaveFormat(44100, 16, 1), rawStream))
                 using (var outputStream = new MemoryStream())
                 {
                     WaveFileWriter.WriteWavFileToStream(outputStream, downsample.ToSampleProvider().ToWaveProvider16());
                     var outputData = outputStream.ToArray();
                     waveBufferSize = outputData.Length;
                     BufferedWaveProvider bufferedWaveProvider = new BufferedWaveProvider(new WaveFormat(44100, 1));
                     if (outputData.Length < bufferedWaveProvider.BufferLength)
                     {
                         bufferedWaveProvider.AddSamples(outputData, 0, outputData.Length);
                     }
                     else
                     {
                         bufferedWaveProvider.AddSamples(outputData, 0, bufferedWaveProvider.BufferLength);
                     }
                     IWaveProvider finalStream = new Wave16ToFloatProvider(bufferedWaveProvider);
                     return(finalStream);
                 }
     }
 }
コード例 #15
0
        /// <summary>
        /// Converts an IEEE Floating Point audio buffer into a 16bit PCM compatible buffer.
        /// </summary>
        /// <param name="inputBuffer">The buffer in IEEE Floating Point format.</param>
        /// <param name="length">The number of bytes in the buffer.</param>
        /// <param name="format">The WaveFormat of the buffer.</param>
        /// <returns>A byte array that represents the given buffer converted into PCM format.</returns>
        internal static byte[] AudioToPCM16(byte[] inputBuffer, int length, WaveFormat format)
        {
            if (length == 0)
            {
                return(Array.Empty <byte>()); // No bytes recorded, return empty array.
            }
            // Create a WaveStream from the input buffer.
            using var memStream   = new MemoryStream(inputBuffer, 0, length);
            using var inputStream = new RawSourceWaveStream(memStream, format);

            // Convert the input stream to a WaveProvider in 16bit PCM format with sample rate of 48000 Hz.
            var convertedPCM = new SampleToWaveProvider16(
                new WdlResamplingSampleProvider(
                    new WaveToSampleProvider(inputStream),
                    48000)
                );

            byte[] convertedBuffer = new byte[length];

            using var stream = new MemoryStream();
            int read;

            // Read the converted WaveProvider into a buffer and turn it into a Stream.
            while ((read = convertedPCM.Read(convertedBuffer, 0, length)) > 0)
            {
                stream.Write(convertedBuffer, 0, read);
            }

            // Return the converted Stream as a byte array.
            return(stream.ToArray());
        }
コード例 #16
0
        public void TextToSpeech()
        {
            _log.Information("Testing speech synthesis. WARNING! Synthesised text wull be played out on default audio device!");
            _log.Information("Enter text to synthesize, then press Enter.");
            string text = Console.ReadLine();

            if (string.IsNullOrEmpty(text))
            {
                _log.Error("Empty text!");
                return;
            }

            var speechKit = new SpeechKitApi(_fullAuthorizer, _configuration["FolderId"]);
            var data      = speechKit.SynthesizeSpeechAsync(
                text,
                AudioFormat.PCM,
                speed: 0.8f,
                emotion: Emotion.Good).Result;

            _log.Information("Playing back synthesised text.");

            var evt      = new ManualResetEventSlim();
            var waveOut  = new WaveOutEvent();
            var provider = new RawSourceWaveStream(data, 0, data.Length, new WaveFormat(48000, 16, 1));

            waveOut.PlaybackStopped += (sender, args) => evt.Set();
            waveOut.Init(provider);
            waveOut.Play();

            evt.Wait();

            _log.Information("Playback finished.");
        }
コード例 #17
0
        public IWaveProvider PlayRawWaveSound(byte[] rawWaveData, WaveFormat format)
        {
            IWaveProvider provider = new RawSourceWaveStream(new MemoryStream(rawWaveData), format);

            AddMixerInput(provider);
            return(provider);
        }
コード例 #18
0
        public async Task <IActionResult> Test()
        {
            try
            {
                var file = Request.Form.Files.First();

                int outRate = 44000;
                var source  = new RawSourceWaveStream(file.OpenReadStream(), new WaveFormat(outRate, 2));
                using (var wavFileReader = new WaveFileReader(source))
                {
                    var resampler  = new WdlResamplingSampleProvider(wavFileReader.ToSampleProvider(), 16000);
                    var monoSource = resampler.ToMono().ToWaveProvider16();

                    using (var outputStream = new MemoryStream())
                    {
                        WaveFileWriter.WriteWavFileToStream(outputStream, monoSource);
                        outputStream.Seek(0, SeekOrigin.Begin);

                        var result = await client.VerifyAsync(outputStream, Guid.Parse("fb786241-9f01-41cc-a585-50b65bd52c38"));

                        if (result.Result == Result.Accept)
                        {
                            // verification successful
                        }
                    }
                }
            }
            catch (Exception e)
            {
                int x = 1;
            }

            return(Ok());
        }
コード例 #19
0
    public void ConvertToWave(string inputFileOgg, string outputFileWav)
    {
        using (FileStream fileIn = new FileStream(inputFileOgg, FileMode.Open))
            using (MemoryStream pcmStream = new MemoryStream())
            {
                OpusDecoder decoder = OpusDecoder.Create(48000, 1);

                OpusOggReadStream oggIn = new OpusOggReadStream(decoder, fileIn);
                while (oggIn.HasNextPacket)
                {
                    short[] packet = oggIn.DecodeNextPacket();
                    if (packet != null)
                    {
                        for (int i = 0; i < packet.Length; i++)
                        {
                            var bytes = BitConverter.GetBytes(packet[i]);
                            pcmStream.Write(bytes, 0, bytes.Length);
                        }
                    }
                }
                pcmStream.Position = 0;
                var wavStream      = new RawSourceWaveStream(pcmStream, new WaveFormat(48000, 1));
                var sampleProvider = wavStream.ToSampleProvider();
                WaveFileWriter.CreateWaveFile16(outputFileWav, sampleProvider);
            }
    }
コード例 #20
0
        private async Task <RawSourceWaveStream> LoadChime(string chimeFilePath)
        {
            RawSourceWaveStream chime = null;
            WaveFormat          format;
            MemoryStream        stream = new MemoryStream();

            try
            {
                using (var audioFile = File.OpenRead(chimeFilePath))
                    using (var waveReader = new WaveFileReader(audioFile))
                    {
                        format = waveReader.WaveFormat;
                        await audioFile.CopyToAsync(stream);
                    }

                chime          = new RawSourceWaveStream(stream, format);
                chime.Position = 0;
            }
            catch (FileNotFoundException ex)
            {
                Log.Error($"Failed to find chime file at {_options.ChimesDirectoryPath}chime.wav", ex);
            }

            return(chime);
        }
コード例 #21
0
        public async Task SpeechToText()
        {
            //var authData = GenerateAuthData(path, AccessKey, SecretKey, ContractId);
            //var authString = GenerateAuthString(authData);
            //var uri = $"{endpoint}?auth={authString}";

            var jwtToken = await HttpUtils.GenerateJwtDataAsync(AccessKey, SecretKey, 3 * 60 /* 3 minutes */, jwtEndpoint);

            var uri = $"{sttEndpoint}?token=Bearer {jwtToken}";

            var waveIn = new WaveInEvent();

            // デフォルト録音デバイスを利用します。
            waveIn.DeviceNumber = 0;
            // サンプルレート、ビットレート、チャンネル数を16000Hz、16bits、1に指定します。
            waveIn.WaveFormat     = new WaveFormat(16000, 16, 1);
            waveIn.DataAvailable += (object sender, WaveInEventArgs e) =>
            {
                var inputMemStream  = new MemoryStream(e.Buffer);
                var rawWaveStream   = new RawSourceWaveStream(inputMemStream, waveIn.WaveFormat);
                var outputMemStream = new MemoryStream();
                WaveFileWriter.WriteWavFileToStream(outputMemStream, rawWaveStream);
                audioBuffers.Enqueue(outputMemStream.ToArray());
            };
            waveIn.RecordingStopped += (object sender, StoppedEventArgs e) =>
            {
                clientAsyncTokenSource.Cancel();
            };

            var client = new ClientWebSocket();

            await client.ConnectAsync(new Uri(uri), CancellationToken.None);

            // 日本語の音声を認識します。
            _ = await SetLanguageAsync(client, "ja");

            _ = await SetSamplingRateAsync(client, 16000);

            try
            {
                waveIn.StartRecording();
                Console.WriteLine("(音声認識:認識中です。)");

                var sendLoop = this.InitSendLoop(client);
                var readLoop = this.InitReadLoop(client);
                Console.Read();

                waveIn.StopRecording();
                Console.WriteLine("(音声認識:完了しました。)");

                await sendLoop;
                await readLoop;

                await client.CloseAsync(WebSocketCloseStatus.NormalClosure, "OK", CancellationToken.None);
            }
            catch (OperationCanceledException)
            {
                Console.WriteLine("(音声認識:サーバとの通信を止めました。)");
            }
        }
コード例 #22
0
ファイル: WavPlayer.cs プロジェクト: MichaelGCox/ivrToolkit
        public void Run()
        {
            // pcm 8000 hz, 64kb/s 1 channel
            var wfOKI = new WaveFormat(8000, 8, 1);

            WaveStream wsRaw = new WaveFileReader(_fileName);

            using (wsRaw = WaveFormatConversionStream.CreatePcmStream(wsRaw))
                using (WaveStream wsOKI = new RawSourceWaveStream(wsRaw, wfOKI))
                    using (_woCall = new WaveOut())
                    {
                        _woCall.PlaybackStopped += woCall_PlaybackStopped;
                        _woCall.Init(wsOKI);

                        _stopped = false;
                        _woCall.Play();

                        while (!_stopped)
                        {
                            Thread.Sleep(100);
                        }

                        if (OnFinished != null)
                        {
                            OnFinished(this, null);
                        }
                    }// using
        }
コード例 #23
0
 public void waveInStream_DataAvailable(object sender, WaveInEventArgs e)
 {
     AudioArray = e.Buffer;
     if (MonitorAudioInput)
     {
         Task.Factory.StartNew(() =>
         {
             //TUTAJ JEST MIEJSCE NA KONWERSJĘ!
             //===================================
             //Console.WriteLine("Mam mikrofon" + AudioArray.Length);
             using (WaveOut audioout = new WaveOut())
                 using (MemoryStream ms = new MemoryStream(AudioArray))
                 {
                     ManualResetEvent semaphoreObject = new ManualResetEvent(false);
                     audioout.DesiredLatency          = 100;
                     RawSourceWaveStream rsws         = new RawSourceWaveStream(ms, wf);
                     IWaveProvider provider           = rsws;
                     audioout.Init(provider);
                     EventHandler <NAudio.Wave.StoppedEventArgs> handler = (o, k) =>
                     {
                         semaphoreObject.Set();
                     };
                     audioout.PlaybackStopped += handler;
                     audioout.Play();
                     //while (audioout.PlaybackState != PlaybackState.Stopped) ;
                     semaphoreObject.WaitOne();
                     audioout.PlaybackStopped -= handler;
                 }
         });
     }
     if (SendAudio)
     {
         Send(AudioArray);
     }
 }
コード例 #24
0
ファイル: VoiceToTextService.cs プロジェクト: Valchris/ddbot
        public async Task <string> ProcessVoiceToText(Stream stream, int bitRate)
        {
            var fn = $"a-{Guid.NewGuid()}-{bitRate}.wav";

            stream.Seek(0, SeekOrigin.Begin);
            var wavStream = new RawSourceWaveStream(stream, new WaveFormat(bitRate, 2));

            // Debugging only
            // WaveFileWriter.CreateWaveFile($"{fn}-source.wav", wavStream);

            stream.Seek(0, SeekOrigin.Begin);
            var newFormat = new WaveFormat(InputRate, 1);
            WaveFormatConversionStream cs = new WaveFormatConversionStream(newFormat, wavStream);

            // Debugging only
            // WaveFileWriter.CreateWaveFile(fn, cs);
            cs.Seek(0, SeekOrigin.Begin);
            speechRecognizer.StartRecognition(cs);
            var result = speechRecognizer.GetResult();

            speechRecognizer.StopRecognition();

            cs.Close();
            return(result?.GetHypothesis());
        }
コード例 #25
0
        private void Service_AudioStreaming(string data, WaveFormat format)
        {
            var audioBuffer = Convert.FromBase64String(data);

            provider = new RawSourceWaveStream(new MemoryStream(audioBuffer), format);
            _waveOut.Init(provider);
            _waveOut.Play();
        }
コード例 #26
0
        /// <summary>
        /// Pipeline Package handler that will process audio bytes and play them using speaker.
        /// </summary>
        /// <param name="audio">Audio bytes to play on speaker</param>
        /// <param name="e">Pipeline Envelope</param>
        protected void Receive(AudioBuffer audio, Envelope e)
        {
            IWaveProvider provider = new RawSourceWaveStream(new MemoryStream(audio.Data), new NAudio.Wave.WaveFormat(22000, 16, 1));
            var           wo       = new WaveOutEvent();

            wo.Init(provider);
            wo.Play();
        }
コード例 #27
0
 private void PlaySound(UnmanagedMemoryStream sound)
 {
     using (var provider = new RawSourceWaveStream(sound, new WaveFormat()))
     {
         wave.Init(provider);
         wave.Play();
     }
 }
コード例 #28
0
        /// <summary>
        /// Event handler to capture waspi device and convert to pcm16.
        /// </summary>
        /// <remarks>
        /// see also: https://qiita.com/zufall/items/2e027a2bc996864fe4af
        /// </remarks>
        /// <param name="sender"></param>
        /// <param name="eventArgs"></param>
        private void WaspiDataAvailable(object sender, WaveInEventArgs eventArgs)
        {
            if (eventArgs.BytesRecorded == 0)
            {
                ResampledDataAvailable?.Invoke(this, new byte[0]);
                ResampledMaxValueAvailable?.Invoke(this, 0);
                return;
            }

            using (var memStream = new MemoryStream(eventArgs.Buffer, 0, eventArgs.BytesRecorded))
            {
                using (var inputStream = new RawSourceWaveStream(memStream, capture.WaveFormat))
                {
                    var           sampleStream       = new WaveToSampleProvider(inputStream);
                    var           resamplingProvider = new WdlResamplingSampleProvider(sampleStream, TargetWaveFormat.SampleRate);
                    var           pcmProvider        = new SampleToWaveProvider16(resamplingProvider);
                    IWaveProvider targetProvider     = pcmProvider;
                    if (capture.WaveFormat.Channels == 2)
                    {
                        var stereoToMonoProvider = new StereoToMonoProvider16(pcmProvider);
                        stereoToMonoProvider.RightVolume = 0.5f;
                        stereoToMonoProvider.LeftVolume  = 0.5f;
                        targetProvider = stereoToMonoProvider;
                    }

                    byte[] buffer = new byte[eventArgs.BytesRecorded];

                    var outputStream = new MemoryStream();
                    int readBytes;
                    int writeBytes = 0;
                    while ((readBytes = targetProvider.Read(buffer, 0, eventArgs.BytesRecorded)) > 0)
                    {
                        outputStream.Write(buffer, 0, readBytes);
                        writeBytes += readBytes;
                    }
                    var aryOutputStream = outputStream.ToArray();
                    ResampledDataAvailable?.Invoke(this, aryOutputStream);

                    float max        = 0;
                    var   tempBuffer = new WaveBuffer(aryOutputStream);
                    for (int index = 0; index < aryOutputStream.Length / 2; index++)
                    {
                        var sample = (double)tempBuffer.ShortBuffer[index];
                        // absolute value
                        if (sample < 0.0)
                        {
                            sample = -sample;
                        }
                        // is this the max value?
                        if (sample > max)
                        {
                            max = (float)sample;
                        }
                    }
                    ResampledMaxValueAvailable?.Invoke(this, max);
                }
            }
        }
コード例 #29
0
ファイル: Sound.cs プロジェクト: 9Lucky9/Sky
        public static void PlayAudio(byte[] content)
        {
            WaveOut       waveOut  = new WaveOut();
            IWaveProvider provider = new RawSourceWaveStream(
                new MemoryStream(content), new WaveFormat(48000, 16, 1));

            waveOut.Init(provider);
            waveOut.Play();
        }
コード例 #30
0
 public void Play(byte[] data)
 {
     waveOut = new WaveOut();
     stream  = new RawSourceWaveStream(data, 0, data.Length, recorder.WaveFormat);
     waveOut.PlaybackStopped += this.WaveOut_PlaybackStopped;
     waveOut.Init(stream);
     waveOut.Volume = 1;
     waveOut.Play();
 }
コード例 #31
0
        private void GetAudioAsync(IAsyncResult res)
        {
            HttpWebRequest request = (HttpWebRequest)res.AsyncState;
            if (request == null)
                return;
            HttpWebResponse response = (HttpWebResponse)request.EndGetResponse(res);
            if (response == null)
                return;

            var waveFormat = WaveFormat.CreateMuLawFormat(8000, 1);
            Stream respStream = response.GetResponseStream();

            var reader = new RawSourceWaveStream(respStream, waveFormat);
            using (WaveStream convertedStream = WaveFormatConversionStream.CreatePcmStream(reader))
            {
                using (WaveOutEvent waveOut = new WaveOutEvent())
                {
                    waveOut.DeviceNumber = _audioDevices.GetCurWaveOutDeviceNumber();
                    waveOut.Init(convertedStream);
                    while (true)
                    {
                        // Check if we should be stopping
                        if (_reqToStop)
                        {
                            request.Abort();
                            _isListening = false;
                            _reqToStop = false;
                            logger.Info("ListenToAxisCamera::GetAudioAsync::Request aborted");
                            break;
                        }

                        // Play the audio
                        waveOut.Play();
            //                        Thread.Sleep(1);
                    }
                }
            }

            /*
             *          TEST CODE TO JUST SHOW STREAM READS - MUST COMMENT OUT ABOVE TO TRY
             *
                        Stream r = response.GetResponseStream();
                        byte[] data = new byte[4096];
                        int read;
                        while ((read = r.Read(data, 0, data.Length)) > 0)
                        {
                            Console.WriteLine(read);
                        }
                        */
        }