public TransformerCodec( uint dataSize, PrimitiveReader reader, int frequency, int channels, int bitsPerSample, Transformer transformer, int codecBitsPerSample) { info = new AudioChunk(); this.dataSize = dataSize; this.reader = reader; info.Frequency = frequency; info.Channels = channels; info.BitsPerSample = bitsPerSample; this.transformer = transformer; this.codecBitsPerSample = codecBitsPerSample; bufferSize = (int)( frequency * channels * codecBitsPerSample / 8 ); }
public override void ReadMetadata() { reader = new PrimitiveReader( stream ); string signature = reader.ReadASCIIString( 4 ); if( signature != ".snd" ) { throw new InvalidDataException( "Invalid initial signature." ); } reader.BigEndian = true; uint dataOffset = reader.ReadUInt32(); dataLength = reader.ReadUInt32(); if( dataLength == 0xFFFFFFFF ) { dataLength = (uint)( reader.Length - dataOffset ); } AuEncoding encoding = (AuEncoding)reader.ReadUInt32(); Metadata["AU encoding"] = encoding.ToString(); freq = reader.ReadInt32(); Metadata[MetadataKeys.SampleRate] = freq.ToString(); channels = reader.ReadInt32(); Metadata[MetadataKeys.Channels] = channels.ToString(); if( dataOffset > 24 ) { int infoLength = (int)( dataOffset - 24 ); string info = reader.ReadASCIIString( infoLength ); Metadata["File comment"] = info; } transformer = EmptyTransformer.Instance; switch( encoding ) { case AuEncoding.Int8G711uLaw: transformer = MuLawTransformer.Instance; bitsPerSample = 16; codecBitsPerSample = 8; break; case AuEncoding.Int8LinearPcm: bitsPerSample = codecBitsPerSample = 8; break; case AuEncoding.Int16LinearPcm: bitsPerSample = codecBitsPerSample = 16; transformer = BigEndian16BitTo16BitTransformer.Instance; break; case AuEncoding.Int24LinearPcm: bitsPerSample = 16; codecBitsPerSample = 24; transformer = BigEndian24BitTo16BitTransformer.Instance; break; case AuEncoding.Int32LinearPcm: bitsPerSample = 16; codecBitsPerSample = 32; transformer = BigEndian32BitTo16BitTransformer.Instance; break; case AuEncoding.Float32LinearPcm: bitsPerSample = 16; codecBitsPerSample = 32; transformer = BigEndianFloat32To16BitTransformer.Instance; break; case AuEncoding.Float64LinearPcm: bitsPerSample = 16; codecBitsPerSample = 64; transformer = BigEndianFloat64To16BitTransformer.Instance; break; case AuEncoding.Int8G711ALaw: transformer = ALawTransformer.Instance; bitsPerSample = 16; codecBitsPerSample = 8; break; default: throw new NotSupportedException( "Unsupported audio format: " + encoding ); } Metadata[MetadataKeys.BitsPerSample] = bitsPerSample.ToString(); }
void ProcessFormatChunk( RiffChunkHeader chunk, PrimitiveReader reader ) { byte[] chunkData = reader.ReadBytes( chunk.DataSize ); Stream source = reader.stream; reader.stream = new MemoryStream( chunkData ); int audioFormat = reader.ReadUInt16(); Metadata["WAVE audio format"] = ((AudioFormat)audioFormat).ToString(); int channels = reader.ReadInt16(); Metadata[MetadataKeys.Channels] = channels.ToString(); int sampleRate = reader.ReadInt32(); Metadata[MetadataKeys.SampleRate] = sampleRate.ToString(); int byteRate = reader.ReadInt32(); int blockAlign = reader.ReadInt16(); int bitsPerSample = reader.ReadInt16(); Metadata[MetadataKeys.BitsPerSample] = bitsPerSample.ToString(); int extraInfoSize = 0; #pragma warning disable 0618 // Usually, only very old wave files don't have this // field included. (WaveFormat structure, not WaveFormatEx structure) // Supress the warning because it's a MemoryStream. if( reader.Position != reader.Length ) { extraInfoSize = reader.ReadUInt16(); } #pragma warning restore 0618 this.freq = sampleRate; transformer = EmptyTransformer.Instance; this.channels = channels; this.bitsPerSample = bitsPerSample; codecBitsPerSample = bitsPerSample; switch( (AudioFormat)audioFormat ) { case AudioFormat.Pcm: // No compression. if( bitsPerSample == 16 && reader.BigEndian ) { transformer = BigEndian16BitTo16BitTransformer.Instance; } break; case AudioFormat.IeeeFloat: if( bitsPerSample == 32 ) { this.bitsPerSample = 16; } else if( bitsPerSample == 64 ) { this.bitsPerSample = 16; } throw new NotSupportedException(); break; case AudioFormat.ALaw: transformer = ALawTransformer.Instance; this.bitsPerSample = 16; break; case AudioFormat.MuLaw: transformer = MuLawTransformer.Instance; this.bitsPerSample = 16; break; // TODO: Properly test this transformer, but I can't seem to find any wave files that use this format. case AudioFormat.DialogicOkiAdpcm: transformer = DialogicAdpcmTransformer.Instance; this.bitsPerSample = 16; break; // TODO: Test this transformer too. case AudioFormat.ImaAdpcm: transformer = ImaAdpcmTransformer.Instance; this.bitsPerSample = 16; break; case AudioFormat.Extensible: ushort validBitsPerSample = reader.ReadUInt16(); uint channelMask = reader.ReadUInt32(); Guid subFormat = reader.ReadGuid(); Metadata["extensible guid"] = subFormat.ToString(); if( subFormat == PcmGuid ) { } else if( subFormat == IeeeFloatGuid ) { throw new NotSupportedException( "Ieee float sub format not supported." ); } else if( subFormat == DrmGuid ) { throw new NotSupportedException( "Drm sub format not supported." ); } else if( subFormat == AlawGuid ) { transformer = ALawTransformer.Instance; this.bitsPerSample = 16; } else if( subFormat == MulawGuid ) { transformer = MuLawTransformer.Instance; this.bitsPerSample = 16; } else if( subFormat == AdpcmGuid ) { throw new NotSupportedException( "Adpcm sub format not supported." ); } else if( subFormat == MpegGuid ) { throw new NotSupportedException( "Mpeg sub format not supported." ); } else { throw new NotSupportedException( "Unsupported sub format: " + subFormat ); } break; default: throw new NotSupportedException( "Unsupported audio format: " + (AudioFormat)audioFormat ); } reader.stream = source; }
void ProcessCommonChunk( AiffChunkHeader chunk, PrimitiveReader reader ) { byte[] chunkData = reader.ReadBytes( chunk.DataSize ); Stream source = reader.stream; reader.stream = new MemoryStream( chunkData ); int channelsCount = reader.ReadInt16(); uint frameCount = reader.ReadUInt32(); int bitsPerSample = reader.ReadInt16(); byte[] sampleRateBytes = reader.ReadBytes( 10 ); double sampleRate = ConvertFromIeeeExtended( sampleRateBytes ); Console.WriteLine( sampleRate ); AppendInfoLine( 2, "Channels Count: {0}", channelsCount ); AppendInfoLine( 2, "Sample rate (frames/sec): {0}", sampleRate ); AppendInfoLine( 2, "Bits per sample: {0}", bitsPerSample ); AppendInfoLine( 2, "Frame count: {0}", frameCount ); int byteRate = (int)Math.Ceiling( sampleRate ) * channelsCount * bitsPerSample / 8; info.Frequency = (int)sampleRate; Console.WriteLine( "BPS:" + bitsPerSample + ", SR:" + info.Frequency ); transformer = EmptyTransformer.Instance; if( bitsPerSample > 8 && bitsPerSample <= 16 ) { transformer = BigEndian16BitTo16BitTransformer.Instance; } if( bitsPerSample > 16 && bitsPerSample <= 24 ) { transformer = BigEndian24BitTo16BitTransformer.Instance; } // Number of bytes that make up a second's worth of audio data. bufferSize = byteRate; info.Channels = channelsCount; actualBitsPerSample = bitsPerSample; // TODO: Remove this hackery. if( bitsPerSample > 16 ) bitsPerSample = 16; info.BitsPerSample = bitsPerSample; if( aifcFormat ) { string compressionType = reader.ReadASCIIString( 4 ); string compressionName = reader.ReadASCIIString( reader.ReadByte() ); AppendInfoLine( 2, "Compression type: {0}", compressionType ); AppendInfoLine( 2, "Compression name: {0}", compressionName ); switch( compressionType ) { case "NONE": case "sowt": break; case "alaw": case "ALAW": info.BitsPerSample = 16; transformer = ALawTransformer.Instance; break; case "ulaw": case "ULAW": info.BitsPerSample = 16; transformer = MuLawTransformer.Instance; break; } } reader.stream = source; }