/// <summary> /// Determines whether if the specified output format is supported /// </summary> /// <param name="shareMode">The share mode.</param> /// <param name="desiredFormat">The desired format.</param> /// <returns>True if the format is supported</returns> public bool IsFormatSupported(AudioClientShareMode shareMode, WaveFormatProvider desiredFormat) { WaveFormatExtensible closestMatchFormat; return(IsFormatSupported(shareMode, desiredFormat, out closestMatchFormat)); }
public static extern MmResult acmFormatSuggest( IntPtr hAcmDriver, [In, MarshalAs(UnmanagedType.CustomMarshaler, MarshalType = "Nequeo.IO.Audio.Formats.WaveFormatCustomMarshaler")] WaveFormatProvider sourceFormat, [In, Out, MarshalAs(UnmanagedType.CustomMarshaler, MarshalType = "Nequeo.IO.Audio.Formats.WaveFormatCustomMarshaler")] WaveFormatProvider destFormat, int sizeDestFormat, AcmFormatSuggestFlags suggestFlags);
/// <summary> /// Initializes a new instance of MonoToStereoSampleProvider /// </summary> /// <param name="source">Source sample provider</param> public MonoToStereoSampleProvider(ISampleProvider source) { if (source.WaveFormat.Channels != 1) { throw new ArgumentException("Source must be mono"); } this.source = source; this.waveFormat = WaveFormatProvider.CreateIeeeFloatWaveFormat(source.WaveFormat.SampleRate, 2); }
/// <summary> /// Creates a new ACM stream to convert one format to another, using a /// specified driver identified and wave filter /// </summary> /// <param name="driverId">the driver identifier</param> /// <param name="sourceFormat">the source format</param> /// <param name="waveFilter">the wave filter</param> public AcmStream(IntPtr driverId, WaveFormatProvider sourceFormat, WaveFilter waveFilter) { int sourceBufferSize = System.Math.Max(16384, sourceFormat.AverageBytesPerSecond); this.sourceFormat = sourceFormat; sourceBufferSize -= (sourceBufferSize % sourceFormat.BlockAlign); MmException.Try(AcmInterop.acmDriverOpen(out driverHandle, driverId, 0), "acmDriverOpen"); MmException.Try(AcmInterop.acmStreamOpen(out streamHandle, driverHandle, sourceFormat, sourceFormat, waveFilter, IntPtr.Zero, IntPtr.Zero, AcmStreamOpenFlags.NonRealTime), "acmStreamOpen"); streamHeader = new AcmStreamHeader(streamHandle, sourceBufferSize, SourceToDest(sourceBufferSize)); }
public static extern MmResult acmStreamOpen( out IntPtr hAcmStream, IntPtr hAcmDriver, [In, MarshalAs(UnmanagedType.CustomMarshaler, MarshalType = "Nequeo.IO.Audio.Formats.WaveFormatCustomMarshaler")] WaveFormatProvider sourceFormat, [In, MarshalAs(UnmanagedType.CustomMarshaler, MarshalType = "Nequeo.IO.Audio.Formats.WaveFormatCustomMarshaler")] WaveFormatProvider destFormat, [In] WaveFilter waveFilter, IntPtr callback, IntPtr instance, AcmStreamOpenFlags openFlags);
/// <summary> /// Creates a Wave File Reader based on an input stream /// </summary> /// <param name="inputStream">The input stream containing a WAV file including header</param> public WaveReader(Stream inputStream) { this.waveStream = inputStream; var chunkReader = new WaveFileChunkReader(); chunkReader.ReadWaveHeader(inputStream); this.waveFormat = chunkReader.WaveFormat; this.dataPosition = chunkReader.DataChunkPosition; this.dataChunkLength = chunkReader.DataChunkLength; this.chunks = chunkReader.RiffChunks; Position = 0; }
/// <summary> /// Initializes the Audio Client /// </summary> /// <param name="shareMode">Share Mode</param> /// <param name="streamFlags">Stream Flags</param> /// <param name="bufferDuration">Buffer Duration</param> /// <param name="periodicity">Periodicity</param> /// <param name="waveFormat">Wave Format</param> /// <param name="audioSessionGuid">Audio Session GUID (can be null)</param> public void Initialize(AudioClientShareMode shareMode, AudioClientStreamFlags streamFlags, long bufferDuration, long periodicity, WaveFormatProvider waveFormat, Guid audioSessionGuid) { int hresult = audioClientInterface.Initialize(shareMode, streamFlags, bufferDuration, periodicity, waveFormat, ref audioSessionGuid); Marshal.ThrowExceptionForHR(hresult); // may have changed the mix format so reset it mixFormat = null; }
/// <summary> /// Creates a new ACM frame decompressor /// </summary> /// <param name="sourceFormat">The MP3 source format</param> public AcmMp3FrameDecompressor(WaveFormatProvider sourceFormat) { this.pcmFormat = AcmStream.SuggestPcmFormat(sourceFormat); try { conversionStream = new AcmStream(sourceFormat, pcmFormat); } catch (Exception) { disposed = true; GC.SuppressFinalize(this); throw; } }
/// <summary> /// Creates a media type from a WaveFormat /// </summary> public static IMFMediaType CreateMediaTypeFromWaveFormat(WaveFormatProvider waveFormat) { var mediaType = CreateMediaType(); try { MediaFoundationInterop.MFInitMediaTypeFromWaveFormatEx(mediaType, waveFormat, Marshal.SizeOf(waveFormat)); } catch (Exception) { Marshal.ReleaseComObject(mediaType); throw; } return(mediaType); }
/// <summary> /// Converts from an ISampleProvider (IEEE float) to a 16 bit PCM IWaveProvider. /// Number of channels and sample rate remain unchanged. /// </summary> /// <param name="sourceProvider">The input source provider</param> public SampleToWaveProvider24(ISampleProvider sourceProvider) { if (sourceProvider.WaveFormat.Encoding != WaveFormatEncoding.IeeeFloat) { throw new ArgumentException("Input source provider must be IEEE float", "sourceProvider"); } if (sourceProvider.WaveFormat.BitsPerSample != 32) { throw new ArgumentException("Input source provider must be 32 bit", "sourceProvider"); } waveFormat = new WaveFormatProvider(sourceProvider.WaveFormat.SampleRate, 24, sourceProvider.WaveFormat.Channels); this.sourceProvider = sourceProvider; volume = 1.0f; }
/// <summary> /// Suggests an appropriate PCM format that the compressed format can be converted /// to in one step /// </summary> /// <param name="compressedFormat">The compressed format</param> /// <returns>The PCM format</returns> public static WaveFormatProvider SuggestPcmFormat(WaveFormatProvider compressedFormat) { // create a PCM format WaveFormatProvider suggestedFormat = new WaveFormatProvider(compressedFormat.SampleRate, 16, compressedFormat.Channels); MmException.Try(AcmInterop.acmFormatSuggest(IntPtr.Zero, compressedFormat, suggestedFormat, Marshal.SizeOf(suggestedFormat), AcmFormatSuggestFlags.FormatTag), "acmFormatSuggest"); /*IntPtr suggestedFormatPointer = WaveFormat.MarshalToPtr(suggestedFormat); * IntPtr compressedFormatPointer = WaveFormat.MarshalToPtr(compressedFormat); * MmResult result = AcmInterop.acmFormatSuggest2(IntPtr.Zero, compressedFormatPointer, suggestedFormatPointer, Marshal.SizeOf(suggestedFormat), AcmFormatSuggestFlags.FormatTag); * suggestedFormat = WaveFormat.MarshalFromPtr(suggestedFormatPointer); * Marshal.FreeHGlobal(suggestedFormatPointer); * Marshal.FreeHGlobal(compressedFormatPointer); * MmException.Try(result, "acmFormatSuggest");*/ return(suggestedFormat); }
/// <summary> /// Creates a new ACM stream to convert one format to another. Note that /// not all conversions can be done in one step /// </summary> /// <param name="sourceFormat">The source audio format</param> /// <param name="destFormat">The destination audio format</param> public AcmStream(WaveFormatProvider sourceFormat, WaveFormatProvider destFormat) { try { streamHandle = IntPtr.Zero; this.sourceFormat = sourceFormat; int sourceBufferSize = System.Math.Max(65536, sourceFormat.AverageBytesPerSecond); sourceBufferSize -= (sourceBufferSize % sourceFormat.BlockAlign); MmException.Try(AcmInterop.acmStreamOpen(out streamHandle, IntPtr.Zero, sourceFormat, destFormat, null, IntPtr.Zero, IntPtr.Zero, AcmStreamOpenFlags.NonRealTime), "acmStreamOpen"); int destBufferSize = SourceToDest(sourceBufferSize); streamHeader = new AcmStreamHeader(streamHandle, sourceBufferSize, destBufferSize); driverHandle = IntPtr.Zero; } catch { // suppress the finalise and clean up resources Dispose(); throw; } }
/// <summary> /// Determines if the specified output format is supported in shared mode /// </summary> /// <param name="shareMode">Share Mode</param> /// <param name="desiredFormat">Desired Format</param> /// <param name="closestMatchFormat">Output The closest match format.</param> /// <returns>True if the format is supported</returns> public bool IsFormatSupported(AudioClientShareMode shareMode, WaveFormatProvider desiredFormat, out WaveFormatExtensible closestMatchFormat) { int hresult = audioClientInterface.IsFormatSupported(shareMode, desiredFormat, out closestMatchFormat); // S_OK is 0, S_FALSE = 1 if (hresult == 0) { // directly supported return(true); } if (hresult == 1) { return(false); } if (hresult == (int)AudioClientErrors.UnsupportedFormat) { return(false); } Marshal.ThrowExceptionForHR(hresult); // shouldn't get here throw new NotSupportedException("Unknown hresult " + hresult); }
public void ReadWaveHeader(System.IO.Stream stream) { this.dataChunkPosition = -1; this.waveFormat = null; this.riffChunks = new List <RiffChunk>(); this.dataChunkLength = 0; var br = new BinaryReader(stream); ReadRiffHeader(br); this.riffSize = br.ReadUInt32(); // read the file size (minus 8 bytes) int riffType = br.ReadInt32(); if (riffType != ChunkIdentifier.ChunkIdentifierToInt32("WAVE")) { throw new FormatException("Not a WAVE file - no WAVE header"); } if (isRf64) { ReadDs64Chunk(br); } int dataChunkId = ChunkIdentifier.ChunkIdentifierToInt32("data"); int formatChunkId = ChunkIdentifier.ChunkIdentifierToInt32("fmt "); // sometimes a file has more data than is specified after the RIFF header long stopPosition = System.Math.Min(riffSize + 8, stream.Length); // this -8 is so we can be sure that there are at least 8 bytes for a chunk id and length while (stream.Position <= stopPosition - 8) { Int32 chunkIdentifier = br.ReadInt32(); var chunkLength = br.ReadUInt32(); if (chunkIdentifier == dataChunkId) { dataChunkPosition = stream.Position; if (!isRf64) // we already know the dataChunkLength if this is an RF64 file { dataChunkLength = chunkLength; } stream.Position += chunkLength; } else if (chunkIdentifier == formatChunkId) { if (chunkLength > Int32.MaxValue) { throw new InvalidDataException(string.Format("Format chunk length must be between 0 and {0}.", Int32.MaxValue)); } waveFormat = WaveFormatProvider.FromFormatChunk(br, (int)chunkLength); } else { // check for invalid chunk length if (chunkLength > stream.Length - stream.Position) { if (strictMode) { } // an exception will be thrown further down if we haven't got a format and data chunk yet, // otherwise we will tolerate this file despite it having corrupt data at the end break; } if (storeAllChunks) { if (chunkLength > Int32.MaxValue) { throw new InvalidDataException(string.Format("RiffChunk chunk length must be between 0 and {0}.", Int32.MaxValue)); } riffChunks.Add(GetRiffChunk(stream, chunkIdentifier, (int)chunkLength)); } stream.Position += chunkLength; } } if (waveFormat == null) { throw new FormatException("Invalid WAV file - No fmt chunk found"); } if (dataChunkPosition == -1) { throw new FormatException("Invalid WAV file - No data chunk found"); } }
/// <summary> /// Ensures valid AIFF header and then finds data offset. /// </summary> /// <param name="stream">The stream, positioned at the start of audio data</param> /// <param name="format">The format found</param> /// <param name="dataChunkPosition">The position of the data chunk</param> /// <param name="dataChunkLength">The length of the data chunk</param> /// <param name="chunks">Additional chunks found</param> public static void ReadAiffHeader(System.IO.Stream stream, out WaveFormatProvider format, out long dataChunkPosition, out int dataChunkLength, List <AiffChunk> chunks) { dataChunkPosition = -1; format = null; BinaryReader br = new BinaryReader(stream); if (ReadChunkName(br) != "FORM") { throw new FormatException("Not an AIFF file - no FORM header."); } uint fileSize = ConvertInt(br.ReadBytes(4)); string formType = ReadChunkName(br); if (formType != "AIFC" && formType != "AIFF") { throw new FormatException("Not an AIFF file - no AIFF/AIFC header."); } dataChunkLength = 0; while (br.BaseStream.Position < br.BaseStream.Length) { AiffChunk nextChunk = ReadChunkHeader(br); if (nextChunk.ChunkName == "COMM") { short numChannels = ConvertShort(br.ReadBytes(2)); uint numSampleFrames = ConvertInt(br.ReadBytes(4)); short sampleSize = ConvertShort(br.ReadBytes(2)); double sampleRate = IEEE.ConvertFromIeeeExtended(br.ReadBytes(10)); format = new WaveFormatProvider((int)sampleRate, (int)sampleSize, (int)numChannels); if (nextChunk.ChunkLength > 18 && formType == "AIFC") { // In an AIFC file, the compression format is tacked on to the COMM chunk string compress = new string(br.ReadChars(4)).ToLower(); if (compress != "none") { throw new FormatException("Compressed AIFC is not supported."); } br.ReadBytes((int)nextChunk.ChunkLength - 22); } else { br.ReadBytes((int)nextChunk.ChunkLength - 18); } } else if (nextChunk.ChunkName == "SSND") { uint offset = ConvertInt(br.ReadBytes(4)); uint blockSize = ConvertInt(br.ReadBytes(4)); dataChunkPosition = nextChunk.ChunkStart + 16 + offset; dataChunkLength = (int)nextChunk.ChunkLength - 8; br.ReadBytes((int)nextChunk.ChunkLength - 8); } else { if (chunks != null) { chunks.Add(nextChunk); } br.ReadBytes((int)nextChunk.ChunkLength); } if (nextChunk.ChunkName == "\0\0\0\0") { break; } } if (format == null) { throw new FormatException("Invalid AIFF file - No COMM chunk found."); } if (dataChunkPosition == -1) { throw new FormatException("Invalid AIFF file - No SSND chunk found."); } }
/// <summary> /// Creates and wraps a new IMFMediaType object based on a WaveFormat /// </summary> /// <param name="waveFormat">WaveFormat</param> public MediaType(WaveFormatProvider waveFormat) { mediaType = MediaFoundationApi.CreateMediaTypeFromWaveFormat(waveFormat); }
internal static extern void MFInitMediaTypeFromWaveFormatEx([In] IMFMediaType pMFType, [In] WaveFormatProvider pWaveFormat, [In] int cbBufSize);
/// <summary> /// Opens MP3 from a stream rather than a file /// Will not dispose of this stream itself /// </summary> /// <param name="inputStream">The incoming stream containing MP3 data</param> /// <param name="frameDecompressorBuilder">Factory method to build a frame decompressor</param> public Mp3Reader(Stream inputStream, FrameDecompressorBuilder frameDecompressorBuilder) { // Calculated as a double to minimize rounding errors mp3Stream = inputStream; id3v2Tag = Id3v2Tag.ReadTag(mp3Stream); dataStartPosition = mp3Stream.Position; var firstFrame = Mp3Frame.LoadFromStream(mp3Stream); double bitRate = firstFrame.BitRate; xingHeader = XingHeader.LoadXingHeader(firstFrame); // If the header exists, we can skip over it when decoding the rest of the file if (xingHeader != null) { dataStartPosition = mp3Stream.Position; } // workaround for a longstanding issue with some files failing to load // because they report a spurious sample rate change var secondFrame = Mp3Frame.LoadFromStream(mp3Stream); if (secondFrame != null && (secondFrame.SampleRate != firstFrame.SampleRate || secondFrame.ChannelMode != firstFrame.ChannelMode)) { // assume that the first frame was some kind of VBR/LAME header that we failed to recognise properly dataStartPosition = secondFrame.FileOffset; // forget about the first frame, the second one is the first one we really care about firstFrame = secondFrame; } this.mp3DataLength = mp3Stream.Length - dataStartPosition; // try for an ID3v1 tag as well mp3Stream.Position = mp3Stream.Length - 128; byte[] tag = new byte[128]; mp3Stream.Read(tag, 0, 128); if (tag[0] == 'T' && tag[1] == 'A' && tag[2] == 'G') { id3v1Tag = tag; this.mp3DataLength -= 128; } mp3Stream.Position = dataStartPosition; // create a temporary MP3 format before we know the real bitrate this.Mp3WaveFormat = new Mp3WaveFormat(firstFrame.SampleRate, firstFrame.ChannelMode == ChannelMode.Mono ? 1 : 2, firstFrame.FrameLength, (int)bitRate); CreateTableOfContents(); this.tocIndex = 0; // [Bit rate in Kilobits/sec] = [Length in kbits] / [time in seconds] // = [Length in bits ] / [time in milliseconds] // Note: in audio, 1 kilobit = 1000 bits. bitRate = (mp3DataLength * 8.0 / TotalSeconds()); mp3Stream.Position = dataStartPosition; // now we know the real bitrate we can create an accurate this.Mp3WaveFormat = new Mp3WaveFormat(firstFrame.SampleRate, firstFrame.ChannelMode == ChannelMode.Mono ? 1 : 2, firstFrame.FrameLength, (int)bitRate); decompressor = frameDecompressorBuilder(Mp3WaveFormat); this.waveFormat = decompressor.OutputFormat; this.bytesPerSample = (decompressor.OutputFormat.BitsPerSample) / 8 * decompressor.OutputFormat.Channels; // no MP3 frames have more than 1152 samples in them // some MP3s I seem to get double this.decompressBuffer = new byte[1152 * bytesPerSample * 2]; }
/// <summary> /// Stream wave provider. /// </summary> /// <param name="stream">The stream containing the data.</param> public StreamWaveReader(System.IO.Stream stream) { _stream = stream; _waveFormat = new WaveFormatProvider(44100, 16, 2); }
/// <summary> /// Initialises a new instance of SampleProviderConverterBase /// </summary> /// <param name="source">Source Wave provider</param> public SampleProviderConverterBase(IWaveProvider source) { this.source = source; this.waveFormat = WaveFormatProvider.CreateIeeeFloatWaveFormat(source.WaveFormat.SampleRate, source.WaveFormat.Channels); }
/// <summary> /// Creates an ACM MP3 Frame decompressor. This is the default with NAudio /// </summary> /// <param name="mp3Format">A WaveFormat object based </param> /// <returns></returns> public static IMp3FrameDecompressor CreateAcmFrameDecompressor(WaveFormatProvider mp3Format) { // new DmoMp3FrameDecompressor(this.Mp3WaveFormat); return(new AcmMp3FrameDecompressor(mp3Format)); }
/// <summary> /// Marshal managed to native /// </summary> public IntPtr MarshalManagedToNative(object ManagedObj) { return(WaveFormatProvider.MarshalToPtr((WaveFormatProvider)ManagedObj)); }
/// <summary> /// Marshal Native to Managed /// </summary> public object MarshalNativeToManaged(IntPtr pNativeData) { return(WaveFormatProvider.MarshalFromPtr(pNativeData)); }
/// <summary> /// Stream wave provider. /// </summary> /// <param name="stream">The stream containing the data.</param> public StreamWaveProvider(Stream stream) { _stream = stream; _waveFormat = new WaveFormatProvider(44100, 16, 2); }