public IEnumerable<AudioChunk> StreamData( Stream source ) { infoBuffer.Length = 0; info = new AudioChunk(); PrimitiveReader reader = new PrimitiveReader( source ); string signature = reader.ReadASCIIString( 4 ); if( signature != "FORM" ) throw new InvalidDataException( "Invalid initial signature." ); reader.BigEndian = true; int formChunkSize = reader.ReadInt32(); AppendInfoLine( 0, "-- Begin info --" ); AppendInfoLine( 0, "{0} (Chunk size: {1} bytes, {2} KB, {3} MB)", signature, formChunkSize, formChunkSize / 1024, formChunkSize / 1024 / 1024 ); string format = reader.ReadASCIIString( 4 ); switch( format ) { case "AIFF": break; //http://www-mmsp.ece.mcgill.ca/Documents/AudioFormats/AIFF/Docs/AIFF-C.9.26.91.pdf case "AIFC": aifcFormat = true; break; default: throw new InvalidDataException( "Invalid initial signature." ); } AiffChunkHeader chunk; while( true ) { chunk = ReadChunkHeader( reader ); AppendInfoLine( 1, "{0} (Chunk size: {1})", chunk.Signature, chunk.DataSize ); if( chunk.Signature == "COMM" ) { ProcessCommonChunk( chunk, reader ); } else if( chunk.Signature == "SSND" ) { break; } else { SkipChunkData( reader, chunk.DataSize ); } } AppendInfoLine( 0, "-- End info --" ); fileinfo = infoBuffer.ToString(); return StreamDataCore( chunk, reader ); }
public IEnumerable<AudioChunk> StreamData( Stream source ) { infoBuffer.Length = 0; chunk = new AudioChunk(); PrimitiveReader reader = new PrimitiveReader( source ); string signature = reader.ReadASCIIString( 4 ); if( signature != ".snd" ) { throw new InvalidDataException( "Invalid initial signature." ); } AppendInfoLine( 0, "-- Begin info --" ); reader.BigEndian = true; uint dataOffset = reader.ReadUInt32(); AppendInfoLine( 0, "Data offset: {0}", dataOffset ); uint dataSize = reader.ReadUInt32(); if( dataSize == 0xFFFFFFFF ) { dataLength = source.Length - dataOffset; } else { dataLength = dataSize; } AppendInfoLine( 0, "Data length: {0}", dataLength ); AuEncoding encoding = (AuEncoding)reader.ReadUInt32(); AppendInfoLine( 0, "Encoding: {0}", encoding ); uint sampleRate = reader.ReadUInt32(); AppendInfoLine( 0, "Sample rate: {0}", sampleRate ); uint channels = reader.ReadUInt32(); AppendInfoLine( 0, "Channels: {0}", channels ); if( dataOffset > 24 ) { int infoLength = (int)( dataOffset - 24 ); string info = reader.ReadASCIIString( infoLength ); AppendInfoLine( 0, "Info: {0}", info ); } int bitsPerSample = bitsPerSampleEncoding[(int)encoding]; int adjustedBitsPerSample = paddedBitsPerSampleEncoding[(int)encoding]; bufferSize = (int)( sampleRate * channels * adjustedBitsPerSample / 8 ); AppendInfoLine( 0, "-- End info --" ); fileinfo = infoBuffer.ToString(); return StreamDataCore( reader ); }
void SendNextBuffer( AudioChunk chunk ) { Console.WriteLine( "send buffer " + buffersIndex ); WaveHeader header = new WaveHeader(); byte[] data = chunk.Data; GCHandle bufferHandle = GCHandle.Alloc( data, GCHandleType.Pinned ); header.DataBuffer = bufferHandle.AddrOfPinnedObject(); header.BufferLength = data.Length; header.Loops = 1; UserData userData = new UserData(); userData.BufferHandle = bufferHandle; userData.Index = buffersIndex; GCHandle userDataHandle = GCHandle.Alloc( userData, GCHandleType.Pinned ); header.UserData = GCHandle.ToIntPtr( userDataHandle ); buffers[buffersIndex] = header; uint result = PrepareHeader( handle, ref buffers[buffersIndex], (uint)waveHeaderSize ); CheckError( result ); result = Write( handle, ref buffers[buffersIndex], (uint)waveHeaderSize ); CheckError( result ); }
void InitWinMm( AudioChunk chunk ) { handle = IntPtr.Zero; WaveFormatEx format = new WaveFormatEx(); format.Channels = (ushort)chunk.Channels; format.ExtraSize = 0; format.FormatTag = WaveFormatTag.Pcm; format.BitsPerSample = (ushort)chunk.BitsPerSample; format.BlockAlign = (ushort)( format.Channels * format.BitsPerSample / 8 ); format.SampleRate = (uint)chunk.Frequency; format.AverageBytesPerSecond = chunk.Frequency * format.BlockAlign; WaveOpenFlags flags = WaveOpenFlags.CallbackFunction; callback = ProcessWaveOutCallback; uint result = Open( out handle, new UIntPtr( (uint)0xFFFF ), ref format, callback, UIntPtr.Zero, flags ); CheckError( result ); }
public IEnumerable<AudioChunk> StreamData( Stream source ) { PrimitiveReader reader = new PrimitiveReader( source ); reader.BigEndian = true; while( true ) { #region Frame header FlacBitReader bitReader = new FlacBitReader( reader ); bitReader.BigEndian = true; int syncCode = bitReader.ReadBits( 14 ); if( syncCode != 0x3FFE ) { throw new InvalidDataException( "Invalid synchronisation code." ); } int reserved = bitReader.ReadBit(); bool variableBlockSize = bitReader.ReadBit() != 0; int blockSizeFlags = bitReader.ReadBits( 4 ); int sampleRateFlags = bitReader.ReadBits( 4 ); int channelAssignment = bitReader.ReadBits( 4 ); int sampleSizeFlags = bitReader.ReadBits( 3 ); if( bitReader.ReadBit() != 0 ) throw new InvalidDataException( "Reserved bit is not 0." ); byte[] numberData = ReadRawUTF8Char( reader ); /*int frameNumber = numberData[0]; for( int i = 1; i < numberData.Length; i++ ) { frameNumber <<= 6; frameNumber |= numberData[i]; } Console.WriteLine( frameNumber );*/ int blockSize = 0; if( blockSizeFlags == 0x0 ) { throw new InvalidDataException( "0 is reserved for block sizes." ); } else if( blockSizeFlags == 0x1 ) { blockSize = 192; } else if( blockSizeFlags >= 0x2 && blockSizeFlags <= 0x5 ) { blockSize = 576 * ( 1 << ( blockSizeFlags - 2 ) ); // 2^x. } else if( blockSizeFlags == 0x6 ) { blockSize = reader.ReadByte() + 1; } else if( blockSizeFlags == 0x7 ) { blockSize = reader.ReadUInt16() + 1; } else { blockSize = 256 * ( 1 << ( blockSizeFlags - 8 ) ); // 2^x. } int sampleRate = 0; if( sampleRateFlags == 0x0 ) { sampleRate = metaSampleRate; } else if( sampleRateFlags >= 0x01 && sampleRateFlags <= 0xB ) { sampleRate = sampleRates[sampleRateFlags]; } else if( sampleRateFlags == 0xC ) { sampleRate = reader.ReadByte(); } else if( sampleRateFlags == 0xD ) { sampleRate = reader.ReadUInt16(); } else if( sampleRateFlags == 0xE ) { sampleRate = reader.ReadUInt16() * 10; } else { throw new InvalidDataException( "Invalid sample rate flag." ); } int bitsPerSample; if( sampleSizeFlags == 0 ) { bitsPerSample = metaBitsPerSample; } else if( sampleSizeFlags == 0x3 || sampleRateFlags == 0x7 ) { throw new InvalidDataException( "Sample size is reserved." ); } else { bitsPerSample = bitSampleSizes[sampleSizeFlags]; } int channelsCount; ChannelAssignment soundAssignment = (ChannelAssignment)channelAssignment; if( channelAssignment < 0x08 ) { channelsCount = channelAssignment + 1; } else if( channelAssignment < 0x0B ) { channelsCount = 2; } else { throw new InvalidDataException( "Channel assignment values > 1010 are reserved." ); } byte crc8 = reader.ReadByte(); #endregion #region Subframe int[][] channelsData = new int[channelsCount][]; for( int i = 0; i < channelsCount; i++ ) { if( bitReader.ReadBit() != 0 ) { throw new InvalidDataException( "Padding bit should be 0." ); } int[] channelData = null; int subframeType = bitReader.ReadBits( 6 ); bool wastedBitsPerSampleFlag = bitReader.ReadBit() != 0; int adjustedBitsPerSample = bitsPerSample; switch( soundAssignment ) { case ChannelAssignment.LeftSide: if( i == 1 ) adjustedBitsPerSample++; break; case ChannelAssignment.RightSide: if( i == 0 ) adjustedBitsPerSample++; break; case ChannelAssignment.MidSide: if( i == 1 ) adjustedBitsPerSample++; break; } int wastedBitsPerSample = 0; if( wastedBitsPerSampleFlag ) { wastedBitsPerSample = 1 + bitReader.ReadUnary(); } if( subframeType == 0x00 ) { channelData = ProcessConstantSubframe( bitReader, adjustedBitsPerSample, blockSize ); } else if( subframeType == 0x01 ) { channelData = ProcessVerbatimSubframe( bitReader, adjustedBitsPerSample, blockSize ); } else { if( ( subframeType & 0x20 ) != 0 ) { int order = ( subframeType & 0x1F ) + 1; channelData = ProcessLpcSubframe( bitReader, adjustedBitsPerSample, order, blockSize ); } else if( ( subframeType & 0x08 ) != 0 ) { int order = subframeType & 0x07; channelData = ProcessFixedSubframe( bitReader, adjustedBitsPerSample, order, blockSize ); } } channelsData[i] = channelData; } bitReader.SkipRemainingBits(); #endregion // Transform the samples into left right switch( soundAssignment ) { case ChannelAssignment.LeftSide: TransformSamplesLS( channelsData[0], channelsData[1] ); break; case ChannelAssignment.RightSide: TransformSamplesSR( channelsData[0], channelsData[1] ); break; case ChannelAssignment.MidSide: TransformSamplesMS( channelsData[0], channelsData[1] ); break; } int bytesPerSample = (int)Math.Ceiling( bitsPerSample / 8.0 ); byte[] data = new byte[channelsCount * bytesPerSample * blockSize]; bool use16Bits = bitsPerSample <= 16; bool use8Bits = bitsPerSample <= 8; int offset = 0; for( int i = 0; i < blockSize; i++ ) { for( int ch = 0; ch < channelsCount; ch++ ) { int[] channelData = channelsData[ch]; if( use16Bits ) { ushort sample = (ushort)channelData[i]; data[offset++] = (byte)( sample ); data[offset++] = (byte)( sample >> 8 ); } else if( use8Bits ) { data[offset++] = (byte)channelData[i]; } } } // Read frame footer ushort crc16 = reader.ReadUInt16(); AudioChunk chunk = new AudioChunk(); chunk.Frequency = sampleRate; chunk.Channels = channelsCount; chunk.BitsPerSample = bitsPerSample; chunk.Data = data; yield return chunk; } }