/// <summary> /// Add a new input to the mixer /// </summary> /// <param name="waveProvider">The wave input to add</param> public void AddInputStream(IWaveProvider waveProvider) { if (waveProvider.WaveFormat.Encoding != WaveFormatEncoding.IeeeFloat) throw new ArgumentException("Must be IEEE floating point", "waveProvider.WaveFormat"); if (waveProvider.WaveFormat.BitsPerSample != 32) throw new ArgumentException("Only 32 bit audio currently supported", "waveProvider.WaveFormat"); if (inputs.Count == 0) { // first one - set the format int sampleRate = waveProvider.WaveFormat.SampleRate; int channels = waveProvider.WaveFormat.Channels; this.waveFormat = WaveFormat.CreateIeeeFloatWaveFormat(sampleRate, channels); } else { if (!waveProvider.WaveFormat.Equals(waveFormat)) throw new ArgumentException("All incoming channels must have the same format", "waveProvider.WaveFormat"); } lock (inputs) { this.inputs.Add(waveProvider); } }
/// <summary> /// WaveStream to resample using the DMO Resampler /// </summary> /// <param name="inputProvider">Input Stream</param> /// <param name="outputFormat">Desired Output Format</param> public ResamplerDmoStream(IWaveProvider inputProvider, WaveFormat outputFormat) { this.inputProvider = inputProvider; this.inputStream = inputProvider as WaveStream; this.outputFormat = outputFormat; this.dmoResampler = new DmoResampler(); if (!dmoResampler.MediaObject.SupportsInputWaveFormat(0, inputProvider.WaveFormat)) { throw new ArgumentException("Unsupported Input Stream format", "inputStream"); } dmoResampler.MediaObject.SetInputWaveFormat(0, inputProvider.WaveFormat); if (!dmoResampler.MediaObject.SupportsOutputWaveFormat(0, outputFormat)) { throw new ArgumentException("Unsupported Output Stream format", "outputStream"); } dmoResampler.MediaObject.SetOutputWaveFormat(0, outputFormat); if (inputStream != null) { position = InputToOutputPosition(inputStream.Position); } this.inputMediaBuffer = new MediaBuffer(inputProvider.WaveFormat.AverageBytesPerSecond); this.outputBuffer = new DmoOutputDataBuffer(outputFormat.AverageBytesPerSecond); }
/// <summary> /// Add a new input to the mixer /// </summary> /// <param name="waveStream">The wave input to add</param> public void AddInputStream(WaveStream waveStream) { if (waveStream.WaveFormat.Encoding != WaveFormatEncoding.IeeeFloat) throw new ArgumentException("Must be IEEE floating point", "waveStream"); if (waveStream.WaveFormat.BitsPerSample != 32) throw new ArgumentException("Only 32 bit audio currently supported", "waveStream"); if (inputStreams.Count == 0) { // first one - set the format int sampleRate = waveStream.WaveFormat.SampleRate; int channels = waveStream.WaveFormat.Channels; this.waveFormat = WaveFormat.CreateIeeeFloatWaveFormat(sampleRate, channels); } else { if (!waveStream.WaveFormat.Equals(waveFormat)) throw new ArgumentException("All incoming channels must have the same format", "waveStream"); } lock (inputsLock) { this.inputStreams.Add(waveStream); this.length = Math.Max(this.length, waveStream.Length); // get to the right point in this input file waveStream.Position = Position; } }
/// <summary> /// Creates a new 32 bit WaveMixerStream /// </summary> public WaveMixerStream32() { AutoStop = true; waveFormat = WaveFormat.CreateIeeeFloatWaveFormat(44100, 2); bytesPerSample = 4; inputStreams = new List<WaveStream>(); inputsLock = new object(); }
/// <summary> /// Initializes a new instance of the <see cref="WasapiCapture"/> class. /// </summary> /// <param name="captureDevice">The capture device.</param> /// <param name="useEventSync">true if sync is done with event. false use sleep.</param> public WasapiCapture(MMDevice captureDevice, bool useEventSync) { syncContext = SynchronizationContext.Current; audioClient = captureDevice.AudioClient; ShareMode = AudioClientShareMode.Shared; isUsingEventSync = useEventSync; waveFormat = audioClient.MixFormat; }
/// <summary> /// Creates a Wave File Reader based on an input stream /// </summary> /// <param name="inputStream">The input stream containing a WAV file including header</param> public WaveFileReader(Stream inputStream) { this.waveStream = inputStream; var chunkReader = new WaveFileChunkReader(); chunkReader.ReadWaveHeader(inputStream); this.waveFormat = chunkReader.WaveFormat; this.dataPosition = chunkReader.DataChunkPosition; this.dataChunkLength = chunkReader.DataChunkLength; this.chunks = chunkReader.RiffChunks; Position = 0; }
/// <summary> /// Creates a new Wave16toFloatProvider /// </summary> /// <param name="sourceProvider">the source provider</param> public Wave16ToFloatProvider(IWaveProvider sourceProvider) { if (sourceProvider.WaveFormat.Encoding != WaveFormatEncoding.Pcm) throw new ArgumentException("Only PCM supported"); if (sourceProvider.WaveFormat.BitsPerSample != 16) throw new ArgumentException("Only 16 bit audio supported"); waveFormat = WaveFormat.CreateIeeeFloatWaveFormat(sourceProvider.WaveFormat.SampleRate, sourceProvider.WaveFormat.Channels); this.sourceProvider = sourceProvider; this.volume = 1.0f; }
/// <summary> /// Initializes the Audio Client /// </summary> /// <param name="shareMode">Share Mode</param> /// <param name="streamFlags">Stream Flags</param> /// <param name="bufferDuration">Buffer Duration</param> /// <param name="periodicity">Periodicity</param> /// <param name="waveFormat">Wave Format</param> /// <param name="audioSessionGuid">Audio Session GUID (can be null)</param> public void Initialize(AudioClientShareMode shareMode, AudioClientStreamFlags streamFlags, long bufferDuration, long periodicity, WaveFormat waveFormat, Guid audioSessionGuid) { int hresult = audioClientInterface.Initialize(shareMode, streamFlags, bufferDuration, periodicity, waveFormat, ref audioSessionGuid); Marshal.ThrowExceptionForHR(hresult); // may have changed the mix format so reset it mixFormat = null; }
/// <summary> /// Creates a new WaveFloatTo16Provider /// </summary> /// <param name="sourceProvider">the source provider</param> public WaveFloatTo16Provider(IWaveProvider sourceProvider) { if (sourceProvider.WaveFormat.Encoding != WaveFormatEncoding.IeeeFloat) throw new ArgumentException("Input wave provider must be IEEE float", "sourceProvider"); if (sourceProvider.WaveFormat.BitsPerSample != 32) throw new ArgumentException("Input wave provider must be 32 bit", "sourceProvider"); waveFormat = new WaveFormat(sourceProvider.WaveFormat.SampleRate, 16, sourceProvider.WaveFormat.Channels); this.sourceProvider = sourceProvider; this.volume = 1.0f; }
/// <summary> /// Creates a WaveFormat with custom members /// </summary> /// <param name="tag">The encoding</param> /// <param name="sampleRate">Sample Rate</param> /// <param name="channels">Number of channels</param> /// <param name="averageBytesPerSecond">Average Bytes Per Second</param> /// <param name="blockAlign">Block Align</param> /// <param name="bitsPerSample">Bits Per Sample</param> /// <returns></returns> public static WaveFormat CreateCustomFormat(WaveFormatEncoding tag, int sampleRate, int channels, int averageBytesPerSecond, int blockAlign, int bitsPerSample) { WaveFormat waveFormat = new WaveFormat(); waveFormat.waveFormatTag = tag; waveFormat.channels = (short)channels; waveFormat.sampleRate = sampleRate; waveFormat.averageBytesPerSecond = averageBytesPerSecond; waveFormat.blockAlign = (short)blockAlign; waveFormat.bitsPerSample = (short)bitsPerSample; waveFormat.extraSize = 0; return waveFormat; }
/// <summary> /// Creates a new Wave32To16Stream /// </summary> /// <param name="sourceStream">the source stream</param> public Wave32To16Stream(WaveStream sourceStream) { if (sourceStream.WaveFormat.Encoding != WaveFormatEncoding.IeeeFloat) throw new ArgumentException("Only 32 bit Floating point supported"); if (sourceStream.WaveFormat.BitsPerSample != 32) throw new ArgumentException("Only 32 bit Floating point supported"); waveFormat = new WaveFormat(sourceStream.WaveFormat.SampleRate, 16, sourceStream.WaveFormat.Channels); this.volume = 1.0f; this.sourceStream = sourceStream; length = sourceStream.Length / 2; position = sourceStream.Position / 2; }
internal MmResult WaveOutOpen(out IntPtr waveOutHandle, int deviceNumber, WaveFormat waveFormat, WaveInterop.WaveCallback callback) { MmResult result; if (Strategy == WaveCallbackStrategy.FunctionCallback) { result = WaveInterop.waveOutOpen(out waveOutHandle, (IntPtr)deviceNumber, waveFormat, callback, IntPtr.Zero, WaveInterop.WaveInOutOpenFlags.CallbackFunction); } else { result = WaveInterop.waveOutOpenWindow(out waveOutHandle, (IntPtr)deviceNumber, waveFormat, this.Handle, IntPtr.Zero, WaveInterop.WaveInOutOpenFlags.CallbackWindow); } return result; }
/// <summary> /// Creates a multiplexing wave provider, allowing re-patching of input channels to different /// output channels /// </summary> /// <param name="inputs">Input wave providers. Must all be of the same format, but can have any number of channels</param> /// <param name="numberOfOutputChannels">Desired number of output channels.</param> public MultiplexingWaveProvider(IEnumerable<IWaveProvider> inputs, int numberOfOutputChannels) { this.inputs = new List<IWaveProvider>(inputs); this.outputChannelCount = numberOfOutputChannels; if (this.inputs.Count == 0) { throw new ArgumentException("You must provide at least one input"); } if (numberOfOutputChannels < 1) { throw new ArgumentException("You must provide at least one output"); } foreach (var input in this.inputs) { if (this.waveFormat == null) { if (input.WaveFormat.Encoding == WaveFormatEncoding.Pcm) { this.waveFormat = new WaveFormat(input.WaveFormat.SampleRate, input.WaveFormat.BitsPerSample, numberOfOutputChannels); } else if (input.WaveFormat.Encoding == WaveFormatEncoding.IeeeFloat) { this.waveFormat = WaveFormat.CreateIeeeFloatWaveFormat(input.WaveFormat.SampleRate, numberOfOutputChannels); } else { throw new ArgumentException("Only PCM and 32 bit float are supported"); } } else { if (input.WaveFormat.BitsPerSample != this.waveFormat.BitsPerSample) { throw new ArgumentException("All inputs must have the same bit depth"); } if (input.WaveFormat.SampleRate != this.waveFormat.SampleRate) { throw new ArgumentException("All inputs must have the same sample rate"); } } inputChannelCount += input.WaveFormat.Channels; } this.bytesPerSample = this.waveFormat.BitsPerSample / 8; mappings = new List<int>(); for (int n = 0; n < outputChannelCount; n++) { mappings.Add(n % inputChannelCount); } }
/// <summary> /// Creates a new ACM frame decompressor /// </summary> /// <param name="sourceFormat">The MP3 source format</param> public AcmMp3FrameDecompressor(WaveFormat sourceFormat) { this.pcmFormat = AcmStream.SuggestPcmFormat(sourceFormat); try { conversionStream = new AcmStream(sourceFormat, pcmFormat); } catch (Exception) { disposed = true; GC.SuppressFinalize(this); throw; } }
/// <summary> /// WaveFileWriter that actually writes to a stream /// </summary> /// <param name="outStream">Stream to be written to</param> /// <param name="format">Wave format to use</param> public WaveFileWriter(Stream outStream, WaveFormat format) { this.outStream = outStream; this.format = format; this.writer = new BinaryWriter(outStream, System.Text.Encoding.UTF8); this.writer.Write(System.Text.Encoding.UTF8.GetBytes("RIFF")); this.writer.Write((int)0); // placeholder this.writer.Write(System.Text.Encoding.UTF8.GetBytes("WAVE")); this.writer.Write(System.Text.Encoding.UTF8.GetBytes("fmt ")); format.Serialize(this.writer); CreateFactChunk(); WriteDataChunkHeader(); }
/// <summary> /// Prepares a Wave input device for recording /// </summary> public WaveIn(WaveCallbackInfo callbackInfo) { syncContext = SynchronizationContext.Current; if ((callbackInfo.Strategy == WaveCallbackStrategy.NewWindow || callbackInfo.Strategy == WaveCallbackStrategy.ExistingWindow) && syncContext == null) { throw new InvalidOperationException("Use WaveInEvent to record on a background thread"); } DeviceNumber = 0; WaveFormat = new WaveFormat(8000, 16, 1); BufferMilliseconds = 100; NumberOfBuffers = 3; callback = Callback; this.callbackInfo = callbackInfo; callbackInfo.Connect(callback); }
/// <summary> /// Creates a new MediaFoundationReader based on the supplied file /// </summary> /// <param name="file">Filename</param> /// <param name="settings">Advanced settings</param> public MediaFoundationReader(string file, MediaFoundationReaderSettings settings) { MediaFoundationApi.Startup(); this.settings = settings; this.file = file; var reader = CreateReader(settings); waveFormat = GetCurrentWaveFormat(reader); reader.SetStreamSelection(MediaFoundationInterop.MF_SOURCE_READER_FIRST_AUDIO_STREAM, true); length = GetLength(reader); if (settings.SingleReaderObject) { pReader = reader; } }
/// <summary> /// Creates a new mono waveprovider based on a stereo input /// </summary> /// <param name="sourceProvider">Stereo 16 bit PCM input</param> public StereoToMonoProvider16(IWaveProvider sourceProvider) { if (sourceProvider.WaveFormat.Encoding != WaveFormatEncoding.Pcm) { throw new ArgumentException("Source must be PCM"); } if (sourceProvider.WaveFormat.Channels != 2) { throw new ArgumentException("Source must be stereo"); } if (sourceProvider.WaveFormat.BitsPerSample != 16) { throw new ArgumentException("Source must be 16 bit"); } this.sourceProvider = sourceProvider; this.outputFormat = new WaveFormat(sourceProvider.WaveFormat.SampleRate, 1); }
/// <summary> /// Creates a new stereo waveprovider based on a mono input /// </summary> /// <param name="sourceProvider">Mono 16 bit PCM input</param> public MonoToStereoProvider16(IWaveProvider sourceProvider) { if (sourceProvider.WaveFormat.Encoding != WaveFormatEncoding.Pcm) { throw new ArgumentException("Source must be PCM"); } if (sourceProvider.WaveFormat.Channels != 1) { throw new ArgumentException("Source must be Mono"); } if (sourceProvider.WaveFormat.BitsPerSample != 16) { throw new ArgumentException("Source must be 16 bit"); } this.sourceProvider = sourceProvider; this.outputFormat = new WaveFormat(sourceProvider.WaveFormat.SampleRate, 2); RightVolume = 1.0f; LeftVolume = 1.0f; }
/// <summary> /// Creates a stream that can convert to PCM /// </summary> /// <param name="sourceStream">The source stream</param> /// <returns>A PCM stream</returns> public static WaveStream CreatePcmStream(WaveStream sourceStream) { if (sourceStream.WaveFormat.Encoding == WaveFormatEncoding.Pcm) { return sourceStream; } WaveFormat pcmFormat = AcmStream.SuggestPcmFormat(sourceStream.WaveFormat); if (pcmFormat.SampleRate < 8000) { if (sourceStream.WaveFormat.Encoding == WaveFormatEncoding.G723) { pcmFormat = new WaveFormat(8000, 16, 1); } else { throw new InvalidOperationException("Invalid suggested output format, please explicitly provide a target format"); } } return new WaveFormatConversionStream(pcmFormat, sourceStream); }
/// <summary> /// Creates a new WaveChannel32 /// </summary> /// <param name="sourceStream">the source stream</param> /// <param name="volume">stream volume (1 is 0dB)</param> /// <param name="pan">pan control (-1 to 1)</param> public WaveChannel32(WaveStream sourceStream, float volume, float pan) { PadWithZeroes = true; var providers = new ISampleChunkConverter[] { new Mono8SampleChunkConverter(), new Stereo8SampleChunkConverter(), new Mono16SampleChunkConverter(), new Stereo16SampleChunkConverter(), new Mono24SampleChunkConverter(), new Stereo24SampleChunkConverter(), new MonoFloatSampleChunkConverter(), new StereoFloatSampleChunkConverter(), }; foreach (var provider in providers) { if (provider.Supports(sourceStream.WaveFormat)) { this.sampleProvider = provider; break; } } if (this.sampleProvider == null) { throw new ArgumentException("Unsupported sourceStream format"); } // always outputs stereo 32 bit waveFormat = WaveFormat.CreateIeeeFloatWaveFormat(sourceStream.WaveFormat.SampleRate, 2); destBytesPerSample = 8; // includes stereo factoring this.sourceStream = sourceStream; this.volume = volume; this.pan = pan; sourceBytesPerSample = sourceStream.WaveFormat.Channels * sourceStream.WaveFormat.BitsPerSample / 8; length = SourceToDest(sourceStream.Length); position = 0; }
/// <summary> /// Create WmaStream with specific format for for uncompressed audio data. /// </summary> /// <param name="FileName">Name of asf file</param> /// <param name="OutputFormat">WaveFormat that define the desired audio data format</param> public WmaStream(string FileName, WaveFormat OutputFormat) { m_reader = WM.CreateSyncReader(WMT_RIGHTS.WMT_RIGHT_NO_DRM); try { m_reader.Open(FileName); Init(OutputFormat); } catch { try { m_reader.Close(); } catch { } m_reader = null; throw; } }
/// <summary> /// Create a new WaveFormat conversion stream /// </summary> /// <param name="targetFormat">Desired output format</param> /// <param name="sourceStream">Source stream</param> public WaveFormatConversionStream(WaveFormat targetFormat, WaveStream sourceStream) { this.sourceStream = sourceStream; this.targetFormat = targetFormat; conversionStream = new AcmStream(sourceStream.WaveFormat, targetFormat); /*try { // work out how many bytes the entire input stream will convert to length = conversionStream.SourceToDest((int)sourceStream.Length); } catch { Dispose(); throw; }*/ length = EstimateSourceToDest((int)sourceStream.Length); position = 0; preferredSourceReadSize = Math.Min(sourceStream.WaveFormat.AverageBytesPerSecond, conversionStream.SourceBuffer.Length); preferredSourceReadSize -= (preferredSourceReadSize%sourceStream.WaveFormat.BlockAlign); }
private static long BytesToNsPosition(int bytes, WaveFormat waveFormat) { long nsPosition = (10000000L * bytes) / waveFormat.AverageBytesPerSecond; return nsPosition; }
/// <summary> /// Creates a new AiffFileWriter /// </summary> /// <param name="filename">The filename to write to</param> /// <param name="format">The Wave Format of the output data</param> public AiffFileWriter(string filename, WaveFormat format) : this(new FileStream(filename, FileMode.Create, FileAccess.Write, FileShare.Read), format) { this.filename = filename; }
/// <summary> /// Creates an ACM MP3 Frame decompressor. This is the default with BestCS.Audio /// </summary> /// <param name="mp3Format">A WaveFormat object based </param> /// <returns></returns> public static IMp3FrameDecompressor CreateAcmFrameDecompressor(WaveFormat mp3Format) { // new DmoMp3FrameDecompressor(this.Mp3WaveFormat); return new AcmMp3FrameDecompressor(mp3Format); }
/// <summary> /// Opens MP3 from a stream rather than a file /// Will not dispose of this stream itself /// </summary> /// <param name="inputStream">The incoming stream containing MP3 data</param> /// <param name="frameDecompressorBuilder">Factory method to build a frame decompressor</param> public Mp3FileReader(Stream inputStream, FrameDecompressorBuilder frameDecompressorBuilder) { if (inputStream == null) throw new ArgumentNullException("inputStream"); try { mp3Stream = inputStream; id3v2Tag = Id3v2Tag.ReadTag(mp3Stream); dataStartPosition = mp3Stream.Position; var firstFrame = Mp3Frame.LoadFromStream(mp3Stream); if (firstFrame == null) throw new InvalidDataException("Invalid MP3 file - no MP3 Frames Detected"); double bitRate = firstFrame.BitRate; xingHeader = XingHeader.LoadXingHeader(firstFrame); // If the header exists, we can skip over it when decoding the rest of the file if (xingHeader != null) dataStartPosition = mp3Stream.Position; // workaround for a longstanding issue with some files failing to load // because they report a spurious sample rate change var secondFrame = Mp3Frame.LoadFromStream(mp3Stream); if (secondFrame != null && (secondFrame.SampleRate != firstFrame.SampleRate || secondFrame.ChannelMode != firstFrame.ChannelMode)) { // assume that the first frame was some kind of VBR/LAME header that we failed to recognise properly dataStartPosition = secondFrame.FileOffset; // forget about the first frame, the second one is the first one we really care about firstFrame = secondFrame; } this.mp3DataLength = mp3Stream.Length - dataStartPosition; // try for an ID3v1 tag as well mp3Stream.Position = mp3Stream.Length - 128; byte[] tag = new byte[128]; mp3Stream.Read(tag, 0, 128); if (tag[0] == 'T' && tag[1] == 'A' && tag[2] == 'G') { id3v1Tag = tag; this.mp3DataLength -= 128; } mp3Stream.Position = dataStartPosition; // create a temporary MP3 format before we know the real bitrate this.Mp3WaveFormat = new Mp3WaveFormat(firstFrame.SampleRate, firstFrame.ChannelMode == ChannelMode.Mono ? 1 : 2, firstFrame.FrameLength, (int) bitRate); CreateTableOfContents(); this.tocIndex = 0; // [Bit rate in Kilobits/sec] = [Length in kbits] / [time in seconds] // = [Length in bits ] / [time in milliseconds] // Note: in audio, 1 kilobit = 1000 bits. // Calculated as a double to minimize rounding errors bitRate = (mp3DataLength*8.0/TotalSeconds()); mp3Stream.Position = dataStartPosition; // now we know the real bitrate we can create an accurate MP3 WaveFormat this.Mp3WaveFormat = new Mp3WaveFormat(firstFrame.SampleRate, firstFrame.ChannelMode == ChannelMode.Mono ? 1 : 2, firstFrame.FrameLength, (int) bitRate); decompressor = frameDecompressorBuilder(Mp3WaveFormat); this.waveFormat = decompressor.OutputFormat; this.bytesPerSample = (decompressor.OutputFormat.BitsPerSample)/8*decompressor.OutputFormat.Channels; // no MP3 frames have more than 1152 samples in them // some MP3s I seem to get double this.decompressBuffer = new byte[1152*bytesPerSample*2]; } catch (Exception) { if (ownInputStream) inputStream.Dispose(); throw; } }
/// <summary> /// Marshal managed to native /// </summary> public IntPtr MarshalManagedToNative(object ManagedObj) { return(WaveFormat.MarshalToPtr((WaveFormat)ManagedObj)); }
/// <summary> /// Creates a new MixingWaveProvider32 /// </summary> public MixingWaveProvider32() { this.waveFormat = WaveFormat.CreateIeeeFloatWaveFormat(44100, 2); this.bytesPerSample = 4; this.inputs = new List<IWaveProvider>(); }
/// <summary> /// Marshal Native to Managed /// </summary> public object MarshalNativeToManaged(IntPtr pNativeData) { return(WaveFormat.MarshalFromPtr(pNativeData)); }
/// <summary> /// Creates a new buffered WaveProvider /// </summary> /// <param name="waveFormat">WaveFormat</param> public BufferedWaveProvider(WaveFormat waveFormat) { this.waveFormat = waveFormat; this.BufferLength = waveFormat.AverageBytesPerSecond * 5; }
/// <summary> /// Tries to find the encoding media type with the closest bitrate to that specified /// </summary> /// <param name="audioSubtype">Audio subtype, a value from AudioSubtypes</param> /// <param name="inputFormat">Your encoder input format (used to check sample rate and channel count)</param> /// <param name="desiredBitRate">Your desired bitrate</param> /// <returns>The closest media type, or null if none available</returns> public static MediaType SelectMediaType(Guid audioSubtype, WaveFormat inputFormat, int desiredBitRate) { return GetOutputMediaTypes(audioSubtype) .Where(mt => mt.SampleRate == inputFormat.SampleRate && mt.ChannelCount == inputFormat.Channels) .Select(mt => new { MediaType = mt, Delta = Math.Abs(desiredBitRate - mt.AverageBytesPerSecond * 8) } ) .OrderBy(mt => mt.Delta) .Select(mt => mt.MediaType) .FirstOrDefault(); }
/// <summary> /// Ensures valid AIFF header and then finds data offset. /// </summary> /// <param name="stream">The stream, positioned at the start of audio data</param> /// <param name="format">The format found</param> /// <param name="dataChunkPosition">The position of the data chunk</param> /// <param name="dataChunkLength">The length of the data chunk</param> /// <param name="chunks">Additional chunks found</param> public static void ReadAiffHeader(Stream stream, out WaveFormat format, out long dataChunkPosition, out int dataChunkLength, List <AiffChunk> chunks) { dataChunkPosition = -1; format = null; BinaryReader br = new BinaryReader(stream); if (ReadChunkName(br) != "FORM") { throw new FormatException("Not an AIFF file - no FORM header."); } uint fileSize = ConvertInt(br.ReadBytes(4)); string formType = ReadChunkName(br); if (formType != "AIFC" && formType != "AIFF") { throw new FormatException("Not an AIFF file - no AIFF/AIFC header."); } dataChunkLength = 0; while (br.BaseStream.Position < br.BaseStream.Length) { AiffChunk nextChunk = ReadChunkHeader(br); if (nextChunk.ChunkName == "COMM") { short numChannels = ConvertShort(br.ReadBytes(2)); uint numSampleFrames = ConvertInt(br.ReadBytes(4)); short sampleSize = ConvertShort(br.ReadBytes(2)); double sampleRate = IEEE.ConvertFromIeeeExtended(br.ReadBytes(10)); format = new WaveFormat((int)sampleRate, (int)sampleSize, (int)numChannels); if (nextChunk.ChunkLength > 18 && formType == "AIFC") { // In an AIFC file, the compression format is tacked on to the COMM chunk string compress = new string(br.ReadChars(4)).ToLower(); if (compress != "none") { throw new FormatException("Compressed AIFC is not supported."); } br.ReadBytes((int)nextChunk.ChunkLength - 22); } else { br.ReadBytes((int)nextChunk.ChunkLength - 18); } } else if (nextChunk.ChunkName == "SSND") { uint offset = ConvertInt(br.ReadBytes(4)); uint blockSize = ConvertInt(br.ReadBytes(4)); dataChunkPosition = nextChunk.ChunkStart + 16 + offset; dataChunkLength = (int)nextChunk.ChunkLength - 8; br.ReadBytes((int)nextChunk.ChunkLength - 8); } else { if (chunks != null) { chunks.Add(nextChunk); } br.ReadBytes((int)nextChunk.ChunkLength); } if (nextChunk.ChunkName == "\0\0\0\0") { break; } } if (format == null) { throw new FormatException("Invalid AIFF file - No COMM chunk found."); } if (dataChunkPosition == -1) { throw new FormatException("Invalid AIFF file - No SSND chunk found."); } }
/// <summary> /// Writes a wave file, including a cues chunk /// </summary> public CueWaveFileWriter(string fileName, WaveFormat waveFormat) : base(fileName, waveFormat) { }
/// <summary> /// Initialises a new instance of RawSourceWaveStream /// </summary> /// <param name="sourceStream">The source stream containing raw audio</param> /// <param name="waveFormat">The waveformat of the audio in the source stream</param> public RawSourceWaveStream(Stream sourceStream, WaveFormat waveFormat) { this.sourceStream = sourceStream; this.waveFormat = waveFormat; }
/// <summary> /// Reads from this wave stream /// </summary> /// <param name="buffer">Buffer to read into</param> /// <param name="offset">Offset in buffer</param> /// <param name="count">Bytes required</param> /// <returns>Number of bytes read; 0 indicates end of stream</returns> public override int Read(byte[] buffer, int offset, int count) { if (pReader == null) { pReader = CreateReader(settings); } if (repositionTo != -1) { Reposition(repositionTo); } int bytesWritten = 0; // read in any leftovers from last time if (decoderOutputCount > 0) { bytesWritten += ReadFromDecoderBuffer(buffer, offset, count - bytesWritten); } while (bytesWritten < count) { IMFSample pSample; MF_SOURCE_READER_FLAG dwFlags; ulong timestamp; int actualStreamIndex; pReader.ReadSample(MediaFoundationInterop.MF_SOURCE_READER_FIRST_AUDIO_STREAM, 0, out actualStreamIndex, out dwFlags, out timestamp, out pSample); if ((dwFlags & MF_SOURCE_READER_FLAG.MF_SOURCE_READERF_ENDOFSTREAM) != 0) { // reached the end of the stream break; } else if ((dwFlags & MF_SOURCE_READER_FLAG.MF_SOURCE_READERF_CURRENTMEDIATYPECHANGED) != 0) { waveFormat = GetCurrentWaveFormat(pReader); OnWaveFormatChanged(); // carry on, but user must handle the change of format } else if (dwFlags != 0) { throw new InvalidOperationException(String.Format("MediaFoundationReadError {0}", dwFlags)); } IMFMediaBuffer pBuffer; pSample.ConvertToContiguousBuffer(out pBuffer); IntPtr pAudioData; int cbBuffer; int pcbMaxLength; pBuffer.Lock(out pAudioData, out pcbMaxLength, out cbBuffer); EnsureBuffer(cbBuffer); Marshal.Copy(pAudioData, decoderOutputBuffer, 0, cbBuffer); decoderOutputOffset = 0; decoderOutputCount = cbBuffer; bytesWritten += ReadFromDecoderBuffer(buffer, offset + bytesWritten, count - bytesWritten); pBuffer.Unlock(); Marshal.ReleaseComObject(pBuffer); Marshal.ReleaseComObject(pSample); } position += bytesWritten; return(bytesWritten); }