Exemple #1
0
        /// <summary>
        /// Converts an IEEE Floating Point audio buffer into a 16bit PCM compatible buffer.
        /// </summary>
        /// <param name="inputBuffer">The buffer in IEEE Floating Point format.</param>
        /// <param name="length">The number of bytes in the buffer.</param>
        /// <param name="format">The WaveFormat of the buffer.</param>
        /// <returns>A byte array that represents the given buffer converted into PCM format.</returns>
        /// source: https://stackoverflow.com/questions/65467635/converting-wasapiloopbackcapture-buffer-to-pcm
        private static byte[] _ToPcm16(byte[] inputBuffer, int length, WaveFormat format)
        {
            if (length == 0)
            {
                return(new byte[0]); // No bytes recorded, return empty array.
            }
            // Create a WaveStream from the input buffer.
            using var memStream   = new MemoryStream(inputBuffer, 0, length);
            using var inputStream = new RawSourceWaveStream(memStream, format);

            // Convert the input stream to a WaveProvider in 16bit PCM format with sample rate of 48000 Hz.
            var convertedPCM = new SampleToWaveProvider16(
                new WdlResamplingSampleProvider(
                    new WaveToSampleProvider(inputStream),
                    96000 / format.Channels)
                );

            byte[] convertedBuffer = new byte[length];

            using var stream = new MemoryStream();
            int read;

            // Read the converted WaveProvider into a buffer and turn it into a Stream.
            while ((read = convertedPCM.Read(convertedBuffer, 0, length)) > 0)
            {
                stream.Write(convertedBuffer, 0, read);
            }

            // Return the converted Stream as a byte array.
            return(stream.ToArray());
        }
Exemple #2
0
        public static void CreateWave(Stream oggStream, Stream wavStream)
        {
            VorbisWaveReader       vorbisReader = new VorbisWaveReader(oggStream);
            SampleToWaveProvider16 converter    = new SampleToWaveProvider16(vorbisReader);

            using (var writer = new WaveFileWriter(wavStream, converter.WaveFormat))
            {
                int loopCount = 0;
                var buffer    = new byte[converter.WaveFormat.AverageBytesPerSecond * 4];
                while (true)
                {
                    loopCount++;
                    Debug.WriteLine("looped: " + loopCount.ToString());

                    int bytesRead = converter.Read(buffer, 0, buffer.Length);
                    if (bytesRead == 0)
                    {
                        // end of source provider
                        break;
                    }

                    // Write will throw exception if WAV file becomes too large
                    writer.Write(buffer, 0, bytesRead);
                }
            }

            vorbisReader.Dispose();
        }
        /// <summary>
        /// Event handler to capture waspi device and convert to pcm16.
        /// </summary>
        /// <remarks>
        /// see also: https://qiita.com/zufall/items/2e027a2bc996864fe4af
        /// </remarks>
        /// <param name="sender"></param>
        /// <param name="eventArgs"></param>
        private void WaspiDataAvailable(object sender, WaveInEventArgs eventArgs)
        {
            if (eventArgs.BytesRecorded == 0)
            {
                ResampledDataAvailable?.Invoke(this, new byte[0]);
                ResampledMaxValueAvailable?.Invoke(this, 0);
                return;
            }

            using (var memStream = new MemoryStream(eventArgs.Buffer, 0, eventArgs.BytesRecorded))
            {
                using (var inputStream = new RawSourceWaveStream(memStream, capture.WaveFormat))
                {
                    var           sampleStream       = new WaveToSampleProvider(inputStream);
                    var           resamplingProvider = new WdlResamplingSampleProvider(sampleStream, TargetWaveFormat.SampleRate);
                    var           pcmProvider        = new SampleToWaveProvider16(resamplingProvider);
                    IWaveProvider targetProvider     = pcmProvider;
                    if (capture.WaveFormat.Channels == 2)
                    {
                        var stereoToMonoProvider = new StereoToMonoProvider16(pcmProvider);
                        stereoToMonoProvider.RightVolume = 0.5f;
                        stereoToMonoProvider.LeftVolume  = 0.5f;
                        targetProvider = stereoToMonoProvider;
                    }

                    byte[] buffer = new byte[eventArgs.BytesRecorded];

                    var outputStream = new MemoryStream();
                    int readBytes;
                    int writeBytes = 0;
                    while ((readBytes = targetProvider.Read(buffer, 0, eventArgs.BytesRecorded)) > 0)
                    {
                        outputStream.Write(buffer, 0, readBytes);
                        writeBytes += readBytes;
                    }
                    var aryOutputStream = outputStream.ToArray();
                    ResampledDataAvailable?.Invoke(this, aryOutputStream);

                    float max        = 0;
                    var   tempBuffer = new WaveBuffer(aryOutputStream);
                    for (int index = 0; index < aryOutputStream.Length / 2; index++)
                    {
                        var sample = (double)tempBuffer.ShortBuffer[index];
                        // absolute value
                        if (sample < 0.0)
                        {
                            sample = -sample;
                        }
                        // is this the max value?
                        if (sample > max)
                        {
                            max = (float)sample;
                        }
                    }
                    ResampledMaxValueAvailable?.Invoke(this, max);
                }
            }
        }
 internal WdlResampling16Stream([NotNull] WaveStream sourceStream, int sampleRate)
 {
     _sourceStream         = sourceStream;
     _sourceSampleProvider = sourceStream.ToSampleProvider();
     _sampleProvider       = new WdlResamplingSampleProvider(_sourceSampleProvider, sampleRate);
     // WdlResamplingSampleProvider always outputs IEEE float samples, so a 16-bit PCM wrapper is required.
     // See the source code of WdlResamplingSampleProvider in NAudio.
     // The wrapping technique is adapted from WaveFileWriter.CreateWaveFile16(), inspired from
     // Mark Heath's (creator of NAudio) article: http://markheath.net/post/fully-managed-input-driven-resampling-wdl.
     _to16Provider = new SampleToWaveProvider16(_sampleProvider);
 }
Exemple #5
0
        private byte[] convert32bitFloat48000HzStereoPCMTo16bitMonoPCM_Alpha(WaveInEventArgs e, int sampleRate)
        {
            byte[] recorded_buf    = e.Buffer;
            int    recorded_length = e.BytesRecorded;

            byte[] result_buf = null;
            int    result_len = -1;

            try
            {
                //// 生データを再生可能なデータに変換
                var waveBufferResample = new BufferedWaveProvider(this._WaveIn.WaveFormat);
                waveBufferResample.DiscardOnBufferOverflow = true;
                waveBufferResample.ReadFully    = false; // leave a buffer?
                waveBufferResample.BufferLength = recorded_length;
                var sampleStream = new WaveToSampleProvider(waveBufferResample);

                // Downsample
                var resamplingProvider = new WdlResamplingSampleProvider(sampleStream, sampleRate);

                // Stereo to mono
                var monoProvider = new StereoToMonoSampleProvider(resamplingProvider)
                {
                    LeftVolume  = 1f,
                    RightVolume = 1f
                };

                // Convert to 32bit float to 16bit PCM
                var ieeeToPcm               = new SampleToWaveProvider16(monoProvider);
                var depthConvertProvider    = new WaveFormatConversionProvider(new WaveFormat(sampleRate, 8, 1), ieeeToPcm);
                var depthConvertProviderRev = new WaveFormatConversionProvider(new WaveFormat(sampleRate, 16, 1), depthConvertProvider);

                waveBufferResample.AddSamples(recorded_buf, 0, recorded_length);

                result_len = recorded_length / (2 * (48000 / sampleRate) * 2); // depth conv and sampling and ch conv
                result_buf = new byte[result_len];
                depthConvertProviderRev.Read(result_buf, 0, result_len);
            }
            catch (Exception ex)
            {
                Console.WriteLine(ex);
                Console.WriteLine("exit...");
                System.Windows.Forms.Application.Exit();
            }

            return(result_buf);
        }
Exemple #6
0
        public static void Init(this IWavePlayer wavePlayer, ISampleProvider sampleProvider, bool convertTo16Bit = false)
        {
            IWaveProvider waveProvider2;

            if (!convertTo16Bit)
            {
                IWaveProvider waveProvider = new SampleToWaveProvider(sampleProvider);
                waveProvider2 = waveProvider;
            }
            else
            {
                waveProvider2 = new SampleToWaveProvider16(sampleProvider);
            }
            IWaveProvider waveProvider3 = waveProvider2;

            wavePlayer.Init(waveProvider3);
        }
        private byte[] ResampleLinux(byte[] pcm)
        {
            using (MemoryStream mem = new MemoryStream(pcm))
            {
                using (RawSourceWaveStream stream = new RawSourceWaveStream(mem, oldFormat))
                {
                    Pcm16BitToSampleProvider    sampleProvider      = new Pcm16BitToSampleProvider(stream);
                    WdlResamplingSampleProvider resampedWavFile     = new WdlResamplingSampleProvider(sampleProvider, newFormat.SampleRate);
                    SampleToWaveProvider16      sampleToWavProvider = new SampleToWaveProvider16(resampedWavFile);

                    int    resampled_length = (int)((float)pcm.Length * ((float)newFormat.SampleRate / (float)oldFormat.SampleRate));
                    byte[] ret = new byte[resampled_length];
                    sampleToWavProvider.Read(ret, 0, resampled_length);
                    return(ret);
                }
            }
        }
Exemple #8
0
        public static void Init(this IWavePlayer wavePlayer, ISampleProvider sampleProvider, bool convertTo16Bit = false)
        {
            IWaveProvider arg_15_0;

            if (!convertTo16Bit)
            {
                IWaveProvider waveProvider = new SampleToWaveProvider(sampleProvider);
                arg_15_0 = waveProvider;
            }
            else
            {
                IWaveProvider waveProvider = new SampleToWaveProvider16(sampleProvider);
                arg_15_0 = waveProvider;
            }
            IWaveProvider waveProvider2 = arg_15_0;

            wavePlayer.Init(waveProvider2);
        }
        private byte[] ConvertToWav(byte[] file)
        {
            var originalFileStream = new MemoryStream(file);
            var outputStream       = new MemoryStream();

            using (var waveStream = WaveFormatConversionStream.CreatePcmStream(new Mp3FileReader(originalFileStream)))
            {
                var sample = waveStream.ToSampleProvider();
                var mono   = new StereoToMonoSampleProvider(sample);
                mono.LeftVolume  = 0.5f;
                mono.RightVolume = 0.5f;

                var bitSample = new SampleToWaveProvider16(mono);
                WaveFileWriter.WriteWavFileToStream(outputStream, bitSample);
            }

            return(outputStream.ToArray());
        }
Exemple #10
0
        private void MixAudio(string[] SourceAudios, string outAudio)
        {
            MixingSampleProvider mixer = new MixingSampleProvider(WaveFormat.CreateIeeeFloatWaveFormat(44100, 2));

            NAudio.MediaFoundation.MediaFoundationApi.Startup();
            for (int i = 0; i < SourceAudios.Count(); i++)
            {
                var sourcefile = new AudioFileReader(SourceAudios[i]);
                var mfr        = new MediaFoundationResampler(sourcefile, WaveFormat.CreateIeeeFloatWaveFormat(44100, 2));
                mixer.AddMixerInput(mfr);
            }
            var converted16Bit = new SampleToWaveProvider16((ISampleProvider)mixer);

            using (var resampled = new MediaFoundationResampler(converted16Bit, new WaveFormat(44100, 2)))
            {
                MediaFoundationEncoder.EncodeToMp3(resampled, outAudio, 192000);
            }
            NAudio.MediaFoundation.MediaFoundationApi.Shutdown();
        }
Exemple #11
0
        public ChatClient(Uri wsEndpoint, IPEndPoint udpEndpoint, Encoding encoding)
        {
            enc              = encoding;
            _lock            = new object();
            this.udpEndpoint = udpEndpoint;
            this.wsEndpoint  = wsEndpoint;
            tokenSource      = new CancellationTokenSource();

            _providers = new ConcurrentDictionary <uint, BufferedWaveProvider>();


            _decoder = new OpusDecoder(48000, 2);

            _audioIn = new WaveInEvent()
            {
                BufferMilliseconds = 100,
                WaveFormat         = new WaveFormat(48000, 2),
            };
            _encoder = new OpusEncoder(48000, 2, OpusApplication.OPUS_APPLICATION_VOIP)
            {
                Bitrate = 522240
            };

            _audioIn.DataAvailable += AudioDataAvailable;

            _udp = new UdpClient()
            {
                EnableBroadcast = true,
            };
            _ws             = new ClientWebSocket();
            _sampleProvider = new MixingSampleProvider(WaveFormat.CreateIeeeFloatWaveFormat(_audioIn.WaveFormat.SampleRate, _audioIn.WaveFormat.Channels));
            _provider       = new SampleToWaveProvider16(_sampleProvider);
            _audioOut       = new WaveOutEvent()
            {
            };

            _audioOut.Init(_provider);
            _frames = 960;
        }
        /// <summary>
        /// This method convert the Audio encoding to PCM 16 format from the given format
        /// </summary>
        /// <param name="input"></param>
        /// <param name="length"></param>
        /// <param name="format"></param>
        /// <returns></returns>
        private byte[] ConvertToPCM16Bit(byte[] input, int length, WaveFormat format)
        {
            if (length == 0)
            {
                return(new byte[0]);
            }

            using (var memStream = new MemoryStream(input, 0, length))
            {
                using (var inputStream = new RawSourceWaveStream(memStream, format))
                {
                    //convert bytes to floats for operations.
                    WaveToSampleProvider sampleStream = new WaveToSampleProvider(inputStream);

                    //resample to 48khz
                    var resamplingProvider = new WdlResamplingSampleProvider(sampleStream, 48000);

                    //convert float stream to PCM 16 bit.
                    var ieeeToPCM = new SampleToWaveProvider16(resamplingProvider);
                    return(readStream(ieeeToPCM));
                }
            }
        }
Exemple #13
0
        public void Update(WaveOutputDevice device, AudioSampleBuffer input, out string status,
                           out WaveFormat waveFormat, out int latency, out float cpuUsage,
                           out int bufferUnderRuns, int sampleRate = 44100,
                           int driverLatency = 200, int internalLatency = 300, int bufferSize = 512, bool reset = false)
        {
            bool hasDeviceChanged = device?.Value != Device?.Value ||
                                    sampleRate != SampleRate ||
                                    driverLatency != DriverLatency ||
                                    bufferSize != BufferSize ||
                                    reset;

            Device          = device;
            Input           = input;
            SampleRate      = sampleRate;
            InternalLatency = internalLatency;
            DriverLatency   = driverLatency;
            BufferSize      = bufferSize;

            if (hasDeviceChanged)
            {
                processor?.Dispose();
                processor = null;

                if (waveOut != null)
                {
                    AudioEngine.Log("Stopping WaveOut...");
                    waveOut.Stop();
                    waveOut.Dispose();
                }
            }

            if (processor == null)
            {
                processor = new AudioThread.AudioThreadProcessor(BufferSize);
            }

            processor.EnsureThreadIsRunning();
            processor.RequestedLatency = InternalLatency;

            if (hasDeviceChanged)
            {
                InternalFormat      = WaveFormat.CreateIeeeFloatWaveFormat(sampleRate, 2);
                SingleChannelFormat = WaveFormat.CreateIeeeFloatWaveFormat(sampleRate, 1);

                processor.WaveFormat = InternalFormat;

                if (device != null)
                {
                    AudioEngine.Log(
                        $"WaveOutput: Configuration changed, device={device.Value}, sampleRate={sampleRate}, latency={InternalLatency} {HotSwapped} ");
                    try
                    {
                        waveOut = ((IWaveOutputFactory)device.Tag).Create(DriverLatency);
                        var wave16 = new SampleToWaveProvider16(processor);
                        waveOut.Init(wave16);
                        waveOut.Play();
                        AudioEngine.Log("WaveOutput: Started");
                        OutputFormat = wave16.WaveFormat;
                    }
                    catch (Exception e)
                    {
                        AudioEngine.Log(e);
                        waveOut = null;
                    }
                }
            }

            processor.Input = Input;

            status = waveOut != null?waveOut.PlaybackState.ToString() : "Uninitialized";

            waveFormat      = OutputFormat;
            latency         = processor.Latency;
            cpuUsage        = processor.CpuUsage;
            bufferUnderRuns = processor.BufferUnderRuns;
        }
        public static void ConvertWavToMp3_POC(string wav, string mp3) // NOT TESTED  see more at     http://mark-dot-net.blogspot.ca/2015/02/how-to-encode-mp3s-with-naudio.html
        {
            var wav2 = @"D:\Users\alex\Videos\0Pod\Cuts\BPr-6659,`Hacking Addiction with Dr. Mark – #351\[012].--- 65 ---.wav";

            try
            {
                var mediaType = MediaFoundationEncoder.SelectMediaType(AudioSubtypes.MFAudioFormat_WMAudioV8, new WaveFormat(16000, 1), 16000); if (mediaType != null)
                {
                    Debug.WriteLine(" we can encode");
                }

                mediaType = MediaFoundationEncoder.SelectMediaType(AudioSubtypes.MFAudioFormat_MP3, new WaveFormat(44100, 1), 0); if (mediaType != null)
                {
                    Debug.WriteLine(" we can encode");
                }
                mediaType = MediaFoundationEncoder.SelectMediaType(AudioSubtypes.MFAudioFormat_MP3, new WaveFormat(352000, 1), 0); if (mediaType != null)
                {
                    Debug.WriteLine(" we can encode");
                }

                var incoming = new WaveFileReader(wav);
                var outgoing = new WaveFileReader(wav2);

                var mixer = new MixingSampleProvider(WaveFormat.CreateIeeeFloatWaveFormat(16000, 1));
                Debug.WriteLine(mixer.WaveFormat.ToString());

                // add the inputs - they will automatically be turned into ISampleProviders
                mixer.AddMixerInput(incoming);
                mixer.AddMixerInput(outgoing);

                //var truncateAudio = true;        // optionally truncate to 30 second for unlicensed users
                var truncated = //truncateAudio ? new OffsetSampleProvider(mixer) { Take = TimeSpan.FromSeconds(30) } :
                                (ISampleProvider)mixer;

                // go back down to 16 bit PCM
                var converted16Bit = new SampleToWaveProvider16(truncated);

                // now for MP3, we need to upsample to 44.1kHz. Use MediaFoundationResampler
                using (var resampled = new MediaFoundationResampler(converted16Bit, new WaveFormat(44100, 1)))
                {
                    var desiredBitRate = 0; // ask for lowest available bitrate
                    MediaFoundationEncoder.EncodeToMp3(resampled, "mixed.mp3", desiredBitRate);
                }



                var myWaveProvider = (IWaveProvider) new MixingSampleProvider(WaveFormat.CreateIeeeFloatWaveFormat(16000, 1));

                using (var enc = new MediaFoundationEncoder(mediaType))
                {
                    enc.Encode("output.wma", myWaveProvider);
                }

                using (var reader = new WaveFileReader(wav))
                {
                    MediaFoundationEncoder.EncodeToMp3(reader, mp3, 48000);
                }
            }
            catch (Exception ex) { System.Diagnostics.Trace.WriteLine(ex.Message, System.Reflection.MethodInfo.GetCurrentMethod().Name); if (System.Diagnostics.Debugger.IsAttached)
                                   {
                                       System.Diagnostics.Debugger.Break();
                                   }
                                   throw; }
        }
Exemple #15
0
        /// <summary>
        /// This method initializes audio capture and socket connection.
        /// </summary>
        /// <param name="debugMode">enable or disable the debug mode.</param>
        public async Task InitializeAsync(bool debugMode = true)
        {
            if ((!string.IsNullOrEmpty(DeviceId)) && ((device = DeviceManagement.GetDevice(DeviceId)) != null))
            {
                if (DeviceType == DeviceType.Loopback)
                {
                    capture = new WasapiLoopbackCapture(device);
                }
                else if (DeviceType == DeviceType.Microphone)
                {
                    capture = new WasapiCapture(device);
                }
            }
            else
            {
                if (DeviceType == DeviceType.Loopback)
                {
                    capture = new WasapiLoopbackCapture();
                }
                else if (DeviceType == DeviceType.Microphone)
                {
                    capture = new WasapiCapture();
                }
            }

            if (debugMode)
            {
                var culture = CultureInfo.InvariantCulture;

                var inFilePath = Path.Combine(Directory.GetCurrentDirectory(), $"{ConversationId}-{culture.TextInfo.ToLower(DeviceType.ToString())}-raw.wav");
                inFileWriter = new WaveFileWriter(inFilePath, capture.WaveFormat);

                var outFilePath = Path.Combine(Directory.GetCurrentDirectory(), $"{ConversationId}-{culture.TextInfo.ToLower(DeviceType.ToString())}-out.wav");
                outFileWriter = new WaveFileWriter(outFilePath, outFormat);

                DisplayWaveFormat(capture.WaveFormat);
                Console.WriteLine();
            }

            audioIeee       = new StreamSampleProvider(capture.WaveFormat);
            audioMono       = new StereoToMonoSampleProvider(audioIeee);
            audioResampling = new WdlResamplingSampleProvider(audioMono, 16000);
            audioPcm        = new SampleToWaveProvider16(audioResampling);

            try
            {
                socket = new ClientWebSocket();
                socket.Options.SetRequestHeader("ConversationId", ConversationId);
                socket.Options.SetRequestHeader("SpeakerType", DeviceToSpeakerConverter.Convert(DeviceType).ToString());
                await socket.ConnectAsync(SocketUri, CancellationToken.None).ConfigureAwait(false);

                if (socket.State == WebSocketState.Open)
                {
                    Console.WriteLine($"Successfully connected to {SocketUri}.");
                }
            }
            catch (AggregateException e)
            {
                Console.WriteLine($"Failed to connect to {SocketUri}.");
                Console.WriteLine(e.Message);
                throw;
            }
            catch (Exception ex)
            {
                Console.WriteLine($"Exception : {ex.Message}");
                throw;
            }

            capture.DataAvailable += async(s, a) =>
            {
                Console.WriteLine($"Captured {a.BytesRecorded} bytes on {DeviceType}.");

                if (socket.State == WebSocketState.Open)
                {
                    var data = ResampleAudioInput(a.Buffer, a.BytesRecorded, debugMode);

                    try
                    {
                        await socket.SendAsync(data, WebSocketMessageType.Binary, false, CancellationToken.None).ConfigureAwait(false);
                    }
                    catch (AggregateException ex)
                    {
                        Console.WriteLine($"Exception on SendAsync: {ex.Message}");
                    }
                }
            };

            capture.RecordingStopped += async(s, a) =>
            {
                Console.WriteLine($"Recording stopped on {DeviceType}.");

                if (socket.State == WebSocketState.Open)
                {
                    try
                    {
                        await socket.CloseAsync(WebSocketCloseStatus.NormalClosure, "Recording Stopped", CancellationToken.None).ConfigureAwait(false);
                    }
                    catch (AggregateException ex)
                    {
                        Console.WriteLine($"Exception on CloseAsync: {ex.Message}");
                    }

                    Console.WriteLine($"Connection closed.");
                    taskCompletionSource.SetResult(0);
                }
            };
        }
 protected void InputWasSet(AudioSignal newInput)
 {
     FWave16Provider = new SampleToWaveProvider16(newInput);
 }