Exemplo n.º 1
0
        /// <summary>
        /// Initializes a new instance of the <see cref="WaveFileStreamReader"/> class.
        /// </summary>
        /// <param name="name">Name of the WAVE file.</param>
        /// <param name="path">Path of the WAVE file.</param>
        /// <param name="startTime">Starting time for streams of data..</param>
        /// <param name="audioBufferSizeMs">The size of each data buffer in milliseconds.</param>
        internal WaveFileStreamReader(string name, string path, DateTime startTime, int audioBufferSizeMs = DefaultAudioBufferSizeMs)
        {
            this.Name      = name;
            this.Path      = path;
            this.startTime = startTime;
            var file = System.IO.Path.Combine(path, name);

            this.Size           = file.Length;
            this.waveFileReader = new BinaryReader(new FileStream(file, FileMode.Open, FileAccess.Read, FileShare.Read));
            this.waveFormat     = WaveFileHelper.ReadWaveFileHeader(this.waveFileReader);
            this.dataLength     = WaveFileHelper.ReadWaveDataLength(this.waveFileReader);
            this.dataStart      = this.waveFileReader.BaseStream.Position;
            var bufferSize = (int)(this.waveFormat.AvgBytesPerSec * audioBufferSizeMs / 1000);

            this.buffer = new byte[bufferSize];

            // Compute originating times based on audio chunk start time + duration
            var endTime = this.startTime.AddSeconds((double)this.dataLength / (double)this.waveFormat.AvgBytesPerSec);

            this.MessageOriginatingTimeInterval = this.MessageCreationTimeInterval = this.StreamTimeInterval = new TimeInterval(this.startTime, endTime);

            var messageCount = (long)Math.Ceiling((double)this.dataLength / bufferSize);

            this.audioStreamMetadata = new WaveAudioStreamMetadata(AudioStreamName, typeof(AudioBuffer).AssemblyQualifiedName, name, path, this.startTime, endTime, messageCount, (double)this.dataLength / messageCount, audioBufferSizeMs);
        }
Exemplo n.º 2
0
        private static IEnumerator <ValueTuple <AudioBuffer, DateTime> > EnumerateWaveFile(
            Pipeline pipeline,
            string filename,
            DateTime?audioStartTime = null,
            int targetLatencyMs     = 20)
        {
            using (FileStream stream = new FileStream(filename, FileMode.Open, FileAccess.Read, FileShare.Read))
            {
                BinaryReader br     = new BinaryReader(stream);
                WaveFormat   format = WaveFileHelper.ReadWaveFileHeader(br);

                // Compute originating times based on audio chunk start time + duration
                var startTime = audioStartTime ?? pipeline.GetCurrentTime();

                // Buffer size based on target latency
                int bufferSize = (int)(format.AvgBytesPerSec * targetLatencyMs / 1000);

                // Get total length in bytes of audio data
                long bytesRemaining = (int)WaveFileHelper.ReadWaveDataLength(br);

                byte[] buffer = null;
                while (bytesRemaining > 0)
                {
                    int nextBytesToRead = (int)Math.Min(bufferSize, bytesRemaining);

                    // Re-allocate buffer if necessary
                    if ((buffer == null) || (buffer.Length != nextBytesToRead))
                    {
                        buffer = new byte[nextBytesToRead];
                    }

                    // Read next audio chunk
                    int bytesRead = br.Read(buffer, 0, (int)nextBytesToRead);
                    if (bytesRead == 0)
                    {
                        // Break on end of file
                        break;
                    }

                    // Bytes remaining
                    bytesRemaining -= bytesRead;

                    // Truncate buffer if necessary
                    if (bytesRead < nextBytesToRead)
                    {
                        byte[] buffer2 = new byte[bytesRead];
                        Array.Copy(buffer, 0, buffer2, 0, bytesRead);
                        buffer = buffer2;
                    }

                    // Add duration to get originating time
                    DateTime originatingTime = startTime.AddSeconds((double)bytesRead / (double)format.AvgBytesPerSec);

                    // Update for next audio chunk
                    startTime = originatingTime;

                    yield return(ValueTuple.Create(new AudioBuffer(buffer, format), originatingTime));
                }
            }
        }
Exemplo n.º 3
0
        /// <summary>
        /// Initializes a new instance of the <see cref="WaveStreamSampleSource"/> class.
        /// </summary>
        /// <param name="pipeline">The pipeline to add the component to.</param>
        /// <param name="stream">Audio stream in WAVE format (48KHz, 1-channel, IEEE Float).</param>
        /// <param name="name">An optional name for this component.</param>
        public WaveStreamSampleSource(Pipeline pipeline, Stream stream, string name = nameof(WaveStreamSampleSource))
        {
            this.pipeline = pipeline;
            this.name     = name;
            this.In       = pipeline.CreateReceiver <bool>(this, this.Play, nameof(this.In));
            this.Out      = pipeline.CreateEmitter <AudioBuffer>(this, nameof(this.Out));

            var reader      = new BinaryReader(stream);
            var inputFormat = WaveFileHelper.ReadWaveFileHeader(reader);

            // we don't do resampling or conversion (must be 1-channel, 48kHz, float32).
            // convert offline if needed: ffmpeg -i foo.wav -f wav -acodec pcm_f32le -ar 48000 -ac 1 bar.wav
            if (inputFormat.Channels != 1 ||
                inputFormat.SamplesPerSec != 48000 ||
                (inputFormat.FormatTag != WaveFormatTag.WAVE_FORMAT_IEEE_FLOAT &&
                 inputFormat.FormatTag != WaveFormatTag.WAVE_FORMAT_EXTENSIBLE) ||
                inputFormat.BitsPerSample != 32)
            {
                throw new ArgumentException("Expected 1-channel, 48kHz, float32 audio format.");
            }

            // break into 1 second audio buffers
            var outputFormat = WaveFormat.CreateIeeeFloat(48000, 1);
            var dataLength   = WaveFileHelper.ReadWaveDataLength(reader);

            // stepping over this line computing frames (e.g. F10) in the debugger will throw - still trying to understand why
            var frames = (int)Math.Ceiling((double)dataLength / (double)outputFormat.AvgBytesPerSec);

            this.audioData = new AudioBuffer[frames];
            for (var i = 0; dataLength > 0; i++)
            {
                var count = (int)Math.Min(dataLength, outputFormat.AvgBytesPerSec);
                var bytes = reader.ReadBytes(count);
                this.audioData[i] = new AudioBuffer(bytes, outputFormat);
                dataLength       -= count;
            }
        }