コード例 #1
0
        /// <summary>
        /// Converts an IEEE Floating Point audio buffer into a 16bit PCM compatible buffer.
        /// </summary>
        /// <param name="inputBuffer">The buffer in IEEE Floating Point format.</param>
        /// <param name="length">The number of bytes in the buffer.</param>
        /// <param name="format">The WaveFormat of the buffer.</param>
        /// <returns>A byte array that represents the given buffer converted into PCM format.</returns>
        /// source: https://stackoverflow.com/questions/65467635/converting-wasapiloopbackcapture-buffer-to-pcm
        private static byte[] _ToPcm16(byte[] inputBuffer, int length, WaveFormat format)
        {
            if (length == 0)
            {
                return(new byte[0]); // No bytes recorded, return empty array.
            }
            // Create a WaveStream from the input buffer.
            using var memStream   = new MemoryStream(inputBuffer, 0, length);
            using var inputStream = new RawSourceWaveStream(memStream, format);

            // Convert the input stream to a WaveProvider in 16bit PCM format with sample rate of 48000 Hz.
            var convertedPCM = new SampleToWaveProvider16(
                new WdlResamplingSampleProvider(
                    new WaveToSampleProvider(inputStream),
                    96000 / format.Channels)
                );

            byte[] convertedBuffer = new byte[length];

            using var stream = new MemoryStream();
            int read;

            // Read the converted WaveProvider into a buffer and turn it into a Stream.
            while ((read = convertedPCM.Read(convertedBuffer, 0, length)) > 0)
            {
                stream.Write(convertedBuffer, 0, read);
            }

            // Return the converted Stream as a byte array.
            return(stream.ToArray());
        }
コード例 #2
0
        public static void CreateWave(Stream oggStream, Stream wavStream)
        {
            VorbisWaveReader       vorbisReader = new VorbisWaveReader(oggStream);
            SampleToWaveProvider16 converter    = new SampleToWaveProvider16(vorbisReader);

            using (var writer = new WaveFileWriter(wavStream, converter.WaveFormat))
            {
                int loopCount = 0;
                var buffer    = new byte[converter.WaveFormat.AverageBytesPerSecond * 4];
                while (true)
                {
                    loopCount++;
                    Debug.WriteLine("looped: " + loopCount.ToString());

                    int bytesRead = converter.Read(buffer, 0, buffer.Length);
                    if (bytesRead == 0)
                    {
                        // end of source provider
                        break;
                    }

                    // Write will throw exception if WAV file becomes too large
                    writer.Write(buffer, 0, bytesRead);
                }
            }

            vorbisReader.Dispose();
        }
コード例 #3
0
 protected override void FillBuffer(float[] buffer, int offset, int count)
 {
     if (Write && InputSignal.Value != null && FWriter != null)
     {
         var byteCount = count * 2;
         if (FByteBuffer.Length < byteCount)
         {
             FByteBuffer = new byte[byteCount];
         }
         //read bytes from input
         FWave16Provider.Read(FByteBuffer, 0, byteCount);
         //write to stream
         FWriter.Write(FByteBuffer, 0, byteCount);
         SamplesWritten += count;
         FFlushCounter  += count;
         if (FFlushCounter >= 32768)
         {
             FWriter.Flush();
             FFlushCounter = 0;
         }
         FLastWriteState = true;
     }
     else
     {
         FFlushCounter = 0;
         if (FLastWriteState)
         {
             FWriter.Flush();
             FLastWriteState = false;
         }
     }
 }
コード例 #4
0
        private byte[] ResampleLinux(byte[] pcm)
        {
            using (MemoryStream mem = new MemoryStream(pcm))
            {
                using (RawSourceWaveStream stream = new RawSourceWaveStream(mem, oldFormat))
                {
                    Pcm16BitToSampleProvider    sampleProvider      = new Pcm16BitToSampleProvider(stream);
                    WdlResamplingSampleProvider resampedWavFile     = new WdlResamplingSampleProvider(sampleProvider, newFormat.SampleRate);
                    SampleToWaveProvider16      sampleToWavProvider = new SampleToWaveProvider16(resampedWavFile);

                    int    resampled_length = (int)((float)pcm.Length * ((float)newFormat.SampleRate / (float)oldFormat.SampleRate));
                    byte[] ret = new byte[resampled_length];
                    sampleToWavProvider.Read(ret, 0, resampled_length);
                    return(ret);
                }
            }
        }
コード例 #5
0
        private byte[] convert32bitFloat48000HzStereoPCMTo16bitMonoPCM(WaveInEventArgs e, int sampleRate)
        {
            byte[] recorded_buf    = e.Buffer;
            int    recorded_length = e.BytesRecorded;

            byte[] result_buf = null;
            int    result_len = -1;

            try
            {
                //// 生データを再生可能なデータに変換
                var waveBufferResample = new BufferedWaveProvider(this._WaveIn.WaveFormat);
                waveBufferResample.DiscardOnBufferOverflow = true;
                waveBufferResample.ReadFully    = false; // leave a buffer?
                waveBufferResample.BufferLength = recorded_length;
                var sampleStream = new WaveToSampleProvider(waveBufferResample);

                // Downsample
                var resamplingProvider = new WdlResamplingSampleProvider(sampleStream, sampleRate);

                // Stereo to mono
                var monoProvider = new StereoToMonoSampleProvider(resamplingProvider)
                {
                    LeftVolume  = 1f,
                    RightVolume = 1f
                };

                // Convert to 32bit float to 16bit PCM
                var ieeeToPcm = new SampleToWaveProvider16(monoProvider);

                waveBufferResample.AddSamples(recorded_buf, 0, recorded_length);

                result_len = recorded_length / (2 * (48000 / sampleRate) * 2); // depth conv and sampling and ch conv
                result_buf = new byte[result_len];
                ieeeToPcm.Read(result_buf, 0, result_len);
            }
            catch (Exception ex)
            {
                Console.WriteLine(ex);
                Console.WriteLine("exit...");
                System.Windows.Forms.Application.Exit();
            }

            return(result_buf);
        }
コード例 #6
0
        private byte[] ResampleAudioInput(byte[] audioBuffer, int audioBytesCount, bool debugMode)
        {
            if (debugMode)
            {
                inFileWriter.Write(audioBuffer, 0, audioBytesCount);
            }

            audioIeee.Write(audioBuffer, 0, audioBytesCount);

            var numBytes = ((float)audioBytesCount / 4 / capture.WaveFormat.SampleRate) * outFormat.SampleRate;

            byte[] outBuffer     = new byte[Convert.ToInt32(Math.Ceiling(numBytes))];
            var    outBytesCount = audioPcm.Read(outBuffer, 0, outBuffer.Length);

            if (debugMode)
            {
                outFileWriter.Write(outBuffer, 0, outBytesCount);
            }

            return(outBuffer);
        }