Esempio n. 1
0
 public StreamDataMonitorEventArgs(AudioProperties properties, byte[] buffer, int offset, int length)
 {
     Properties = properties;
     Buffer     = buffer;
     Offset     = offset;
     Length     = length;
 }
Esempio n. 2
0
        public IeeeStream(IAudioStream sourceStream)
            : base(sourceStream)
        {
            if (sourceStream.Properties.Format == AudioFormat.IEEE && sourceStream.Properties.BitDepth == 32)
            {
                passthrough = true;
                properties  = sourceStream.Properties;
            }
            else if (sourceStream.Properties.Format == AudioFormat.LPCM && sourceStream.Properties.BitDepth == 16)
            {
                ReadAndConvert = ReadPCM16;
                properties     = new AudioProperties(sourceStream.Properties.Channels,
                                                     sourceStream.Properties.SampleRate, 32, AudioFormat.IEEE);
            }
            else if (sourceStream.Properties.Format == AudioFormat.LPCM && sourceStream.Properties.BitDepth == 24)
            {
                ReadAndConvert = ReadPCM24;
                properties     = new AudioProperties(sourceStream.Properties.Channels,
                                                     sourceStream.Properties.SampleRate, 32, AudioFormat.IEEE);
            }
            else
            {
                throw new ArgumentException("unsupported source format: " + sourceStream.Properties);
            }

            sourceBuffer = new ByteBuffer();
        }
Esempio n. 3
0
        public NAudioSinkStream(IAudioStream sourceStream)
        {
            AudioProperties sourceProperties = sourceStream.Properties;

            if (sourceProperties.Format == AudioFormat.LPCM)
            {
                waveFormat = WaveFormat.CreateCustomFormat(
                    WaveFormatEncoding.Pcm,
                    sourceProperties.SampleRate,
                    sourceProperties.Channels,
                    sourceProperties.SampleRate * sourceProperties.Channels * sourceProperties.SampleByteSize,
                    sourceProperties.Channels * sourceProperties.SampleByteSize, sourceProperties.BitDepth);
            }
            else if (sourceProperties.Format == AudioFormat.IEEE)
            {
                waveFormat = WaveFormat.CreateCustomFormat(
                    WaveFormatEncoding.IeeeFloat,
                    sourceProperties.SampleRate,
                    sourceProperties.Channels,
                    sourceProperties.SampleRate * sourceProperties.Channels * sourceProperties.SampleByteSize,
                    sourceProperties.Channels * sourceProperties.SampleByteSize, sourceProperties.BitDepth);
            }
            else
            {
                throw new ArgumentException("unsupported source format: " + sourceProperties.ToString());
            }

            this.sourceStream = sourceStream;
        }
Esempio n. 4
0
 public static ByteTimeWarp Convert(TimeWarp timeWarp, AudioProperties properties)
 {
     return(new ByteTimeWarp {
         From = TimeUtil.TimeSpanToBytes(timeWarp.From, properties),
         To = TimeUtil.TimeSpanToBytes(timeWarp.To, properties)
     });
 }
Esempio n. 5
0
        public NAudioSourceStream(WaveStream sourceStream)
        {
            WaveFormat  sourceFormat = sourceStream.WaveFormat;
            AudioFormat format;

            // check for supported formats:
            if (sourceFormat.Encoding == WaveFormatEncoding.Pcm && sourceFormat.BitsPerSample == 16)
            {
                format = AudioFormat.LPCM;
            }
            else if (sourceFormat.Encoding == WaveFormatEncoding.Pcm && sourceFormat.BitsPerSample == 24)
            {
                format = AudioFormat.LPCM;
            }
            else if (sourceFormat.Encoding == WaveFormatEncoding.IeeeFloat && sourceFormat.BitsPerSample == 32)
            {
                format = AudioFormat.IEEE;
            }
            else
            {
                throw new ArgumentException(String.Format("unsupported source format: {0}bit {1}Hz {2}ch {3}",
                                                          sourceFormat.BitsPerSample, sourceFormat.SampleRate, sourceFormat.Channels, sourceFormat.Encoding));
            }

            this.sourceStream = sourceStream;
            this.properties   = new AudioProperties(sourceFormat.Channels, sourceFormat.SampleRate, sourceFormat.BitsPerSample, format);
        }
Esempio n. 6
0
 public MixerStream(int channels, int sampleRate)
 {
     properties    = new AudioProperties(channels, sampleRate, 32, AudioFormat.IEEE);
     sourceStreams = new List <IAudioStream>();
     length        = 0;
     position      = 0;
     sourceBuffer  = new ByteBuffer();
 }
        public CircularMemoryWriterStream(AudioProperties properties, MemoryStream target) : base(target, properties)
        {
            if (target.Capacity == 0)
            {
                throw new Exception("Circular stream must have a fixed capacity");
            }

            _bufferFillLevel = 0;
            _bufferHead      = 0;
            _position        = 0;
        }
Esempio n. 8
0
        /// <summary>
        /// Creates a MonoStream that downmixes all channels of the source stream into a single mono channel
        /// and outputs the mono mix to multiple output channels.
        /// </summary>
        /// <param name="sourceStream">the stream to downmix to mono</param>
        /// <param name="outputChannels">the number of channel into which the mono mix should be split</param>
        public MonoStream(IAudioStream sourceStream, int outputChannels) : base(sourceStream)
        {
            if (!(sourceStream.Properties.Format == AudioFormat.IEEE && sourceStream.Properties.BitDepth == 32))
            {
                throw new ArgumentException("unsupported source format: " + sourceStream.Properties);
            }

            properties = new AudioProperties(outputChannels, sourceStream.Properties.SampleRate,
                                             sourceStream.Properties.BitDepth, sourceStream.Properties.Format);
            sourceBuffer = new ByteBuffer();
            downmix      = true;
        }
Esempio n. 9
0
        /// <summary>
        /// Decodes an audio stream through FFmpeg from an encoded file stream.
        /// Accepts an optional file name hint to help FFmpeg determine the format of
        /// the encoded data.
        /// </summary>
        /// <param name="stream">the stream to decode</param>
        /// <param name="fileName">optional file name hint for FFmpeg</param>
        public FFmpegSourceStream(Stream stream, string fileName)
        {
            sourceStream = stream;
            reader       = new FFmpegReader(stream, FFmpeg.Type.Audio, fileName);

            if (reader.AudioOutputConfig.length == long.MinValue)
            {
                /*
                 * length == FFmpeg AV_NOPTS_VALUE
                 *
                 * This means that for the opened file/format, there is no length/PTS data
                 * available, which also makes seeking more or less impossible.
                 *
                 * As a workaround, an index could be created to map the frames to the file
                 * position, and then seek by file position. The index could be created by
                 * linearly reading through the file (decoding not necessary), and creating
                 * a mapping of AVPacket.pos to the frame time.
                 */
                throw new FileNotSeekableException();
            }

            properties = new AudioProperties(
                reader.AudioOutputConfig.format.channels,
                reader.AudioOutputConfig.format.sample_rate,
                reader.AudioOutputConfig.format.sample_size * 8,
                reader.AudioOutputConfig.format.sample_size == 4 ? AudioFormat.IEEE : AudioFormat.LPCM);

            readerPosition = 0;
            sourceBuffer   = new byte[reader.AudioOutputConfig.frame_size *
                                      reader.AudioOutputConfig.format.channels *
                                      reader.AudioOutputConfig.format.sample_size];
            sourceBufferPosition = 0;
            sourceBufferLength   = -1; // -1 means buffer empty, >= 0 means valid buffer data

            // determine first PTS to handle cases where it is > 0
            try {
                Position = 0;
            }
            catch (InvalidOperationException) {
                readerFirstPTS = readerPosition;
                readerPosition = 0;
                Console.WriteLine("first PTS = " + readerFirstPTS);
            }

            seekIndexCreated = false;
        }
Esempio n. 10
0
        public ResamplingStream(IAudioStream sourceStream, ResamplingQuality quality)
            : base(sourceStream)
        {
            if (!(sourceStream.Properties.Format == AudioFormat.IEEE && sourceStream.Properties.BitDepth == 32))
            {
                throw new ArgumentException("unsupported source format: " + sourceStream.Properties);
            }

            properties = new AudioProperties(sourceStream.Properties.Channels, sourceStream.Properties.SampleRate,
                                             sourceStream.Properties.BitDepth, sourceStream.Properties.Format);

            this.quality = quality;
            SetupResampler();

            sourceBuffer = new ByteBuffer();

            TargetSampleRate = properties.SampleRate;

            position = 0;
        }
Esempio n. 11
0
        public NAudioSourceStream(WaveStream sourceStream)
        {
            AudioProperties sourceProperties = GetAudioProperties(sourceStream.WaveFormat);

            // check for supported formats:
            if (sourceProperties.Format == AudioFormat.LPCM && sourceProperties.BitDepth == 16)
            {
            }
            else if (sourceProperties.Format == AudioFormat.LPCM && sourceProperties.BitDepth == 24)
            {
            }
            else if (sourceProperties.Format == AudioFormat.IEEE && sourceProperties.BitDepth == 32)
            {
            }
            else
            {
                throw new ArgumentException(String.Format("unsupported source format: {0}bit {1}Hz {2}ch {3}",
                                                          sourceProperties.BitDepth, sourceProperties.SampleRate, sourceProperties.Channels, sourceProperties.Format));
            }

            this.sourceStream = sourceStream;
            this.properties   = sourceProperties;
        }
        /// <summary>
        /// Creates a SurroundDownmixStream that downmixes surround sound of the source stream and outputs a stereo stream.
        /// </summary>
        /// <param name="sourceStream">the stream to downmix to stereo</param>
        public SurroundDownmixStream(IAudioStream sourceStream) : base(sourceStream)
        {
            if (!(sourceStream.Properties.Format == AudioFormat.IEEE && sourceStream.Properties.BitDepth == 32))
            {
                throw new ArgumentException("unsupported source format: " + sourceStream.Properties);
            }

            int sourceChannels = sourceStream.Properties.Channels;

            unsafe
            {
                if (sourceChannels == 4)
                {
                    // Assume 4.0 quad layout
                    DownmixFunction = DownmixQuad;
                }
                else if (sourceChannels == 6)
                {
                    // Assume 5.0/5.1 surround
                    DownmixFunction = Downmix51;
                }
                else if (sourceChannels == 8)
                {
                    // Assume 7.0/7.1
                    DownmixFunction = Downmix71;
                }
                else
                {
                    throw new Exception("Unsupported number of input channels: " + sourceChannels);
                }
            }

            properties = new AudioProperties(2, sourceStream.Properties.SampleRate,
                                             sourceStream.Properties.BitDepth, sourceStream.Properties.Format);
            sourceBuffer = new ByteBuffer();
        }
Esempio n. 13
0
 public NullStream(AudioProperties audioProperties, long length)
 {
     this.audioProperties = audioProperties;
     this.length          = length;
     this.position        = 0;
 }
 public BlockingFixedLengthFifoStream(AudioProperties properties, int capacity) : base(properties, capacity)
 {
     _readPosition = 0;
     _endOfInput   = false;
 }
Esempio n. 15
0
 public static TimeSpan BytesToTimeSpan(long bytes, Streams.AudioProperties audioProperties)
 {
     return(new TimeSpan((long)((double)bytes / audioProperties.SampleBlockByteSize / audioProperties.SampleRate * SECS_TO_TICKS)));
 }
 public CircularMemoryWriterStream(AudioProperties properties, int capacity) : this(properties, new MemoryStream(new byte[capacity]))
 {
 }
Esempio n. 17
0
        public static long TimeSpanToBytes(TimeSpan timeSpan, Streams.AudioProperties audioProperties)
        {
            long bytes = (long)(timeSpan.TotalSeconds * audioProperties.SampleRate * audioProperties.SampleBlockByteSize);

            return(bytes - (bytes % audioProperties.SampleBlockByteSize));
        }
 public MemoryWriterStream(MemoryStream target, AudioProperties properties) :
     base(target, properties)
 {
 }
 public SineGeneratorStream(int sampleRate, float frequency, TimeSpan length)
 {
     this.properties = new AudioProperties(1, sampleRate, 32, AudioFormat.IEEE);
     this.frequency  = frequency;
     this.length     = TimeUtil.TimeSpanToBytes(length, properties);
 }
 public MemoryWriterStream(AudioProperties properties) :
     base(new MemoryStream(), properties)
 {
 }
Esempio n. 21
0
 public MemorySourceStream(MemoryStream source, AudioProperties properties)
 {
     this.source     = source;
     this.properties = properties;
 }