static AudioStreamBasicDescription GetOutputDescription(AudioStreamBasicDescription inputDescription) { var result = new AudioStreamBasicDescription { SampleRate = inputDescription.SampleRate, FramesPerPacket = 4096, AudioFormat = AudioFormat.AppleLossless, ChannelsPerFrame = inputDescription.ChannelsPerFrame }; switch (inputDescription.BitsPerChannel) { case 16: result.Flags = AudioFormatFlags.Alac16BitSourceData; break; case 20: result.Flags = AudioFormatFlags.Alac20BitSourceData; break; case 24: result.Flags = AudioFormatFlags.Alac24BitSourceData; break; case 32: result.Flags = AudioFormatFlags.Alac32BitSourceData; break; default: throw new AudioUnsupportedException( $"ALAC does not support {inputDescription.BitsPerChannel}-bit audio."); } return(result); }
public ExtendedAudioFile( AudioStreamBasicDescription description, AudioFileType fileType, [NotNull] Stream stream) : base(description, fileType, stream) { SafeNativeMethods.ExtAudioFileWrapAudioFile(Handle, true, out _handle); }
internal static extern AudioFileStatus AudioFileInitializeWithCallbacks( IntPtr userData, [NotNull] NativeCallbacks.AudioFileReadCallback readCallback, [NotNull] NativeCallbacks.AudioFileWriteCallback writeCallback, [NotNull] NativeCallbacks.AudioFileGetSizeCallback getSizeCallback, [NotNull] NativeCallbacks.AudioFileSetSizeCallback setSizeCallback, AudioFileType fileType, ref AudioStreamBasicDescription description, uint flags, [NotNull] out AudioFileHandle handle);
internal AudioConverter(ref AudioStreamBasicDescription inputDescription, ref AudioStreamBasicDescription outputDescription, [NotNull] AudioFile audioFile) { _inputCallback = InputCallback; SafeNativeMethods.AudioConverterNew(ref inputDescription, ref outputDescription, out _handle); _audioFile = audioFile; }
internal AudioFile(AudioStreamBasicDescription description, AudioFileType fileType, [NotNull] Stream stream) { // This constructor is for writing _readCallback = ReadCallback; _getSizeCallback = GetSizeCallback; _writeCallback = WriteCallback; _setSizeCallback = SetSizeCallback; _stream = stream; SafeNativeMethods.AudioFileInitializeWithCallbacks(IntPtr.Zero, _readCallback, _writeCallback, _getSizeCallback, _setSizeCallback, fileType, ref description, 0, out var handle); Handle = handle; }
public void Initialize(Stream stream) { _audioFile = new AudioFile(AudioFileType.M4A, stream); var inputDescription = _audioFile.GetProperty <AudioStreamBasicDescription>(AudioFilePropertyId.DataFormat); if (inputDescription.AudioFormat != AudioFormat.AppleLossless) { throw new AudioUnsupportedException("The stream is not in Apple Lossless format."); } _outputDescription = GetOutputDescription(inputDescription); _converter = new AudioConverter(ref inputDescription, ref _outputDescription, _audioFile); _magicCookie = GetMagicCookie(_audioFile, _converter); }
static AudioStreamBasicDescription GetOutputDescription(AudioStreamBasicDescription inputDescription) { var result = new AudioStreamBasicDescription { SampleRate = inputDescription.SampleRate, FramesPerPacket = 1024, AudioFormat = AudioFormat.AacLowComplexity, ChannelsPerFrame = inputDescription.ChannelsPerFrame }; switch (inputDescription.SampleRate) { case 192000: case 144000: case 128000: // conversion required case 96000: case 64000: // conversion required case 48000: result.SampleRate = 48000; break; case 176400: case 88200: case 44100: case 37800: // conversion required case 36000: // conversion required result.SampleRate = 44100; break; case 32000: case 28000: // conversion required result.SampleRate = 32000; break; case 22050: case 18900: // conversion required result.SampleRate = 22050; break; default: throw new AudioUnsupportedException( $"Apple AAC does not support a {inputDescription.SampleRate} Hz sample rate."); } return(result); }
static AudioStreamBasicDescription GetOutputDescription(AudioStreamBasicDescription inputDescription) { uint bitsPerSample; // ReSharper disable once SwitchStatementMissingSomeCases switch (inputDescription.Flags) { case AudioFormatFlags.Alac16BitSourceData: bitsPerSample = 16; break; case AudioFormatFlags.Alac20BitSourceData: bitsPerSample = 20; break; case AudioFormatFlags.Alac24BitSourceData: bitsPerSample = 24; break; case AudioFormatFlags.Alac32BitSourceData: bitsPerSample = 32; break; default: throw new AudioUnsupportedException("Unknown audio format."); } return(new AudioStreamBasicDescription { AudioFormat = AudioFormat.LinearPcm, Flags = AudioFormatFlags.PcmIsSignedInteger | AudioFormatFlags.PcmIsAlignedHigh, BytesPerPacket = sizeof(int) * inputDescription.ChannelsPerFrame, FramesPerPacket = 1, BytesPerFrame = sizeof(int) * inputDescription.ChannelsPerFrame, ChannelsPerFrame = inputDescription.ChannelsPerFrame, BitsPerChannel = bitsPerSample, SampleRate = inputDescription.SampleRate }); }
static AudioStreamBasicDescription GetOutputDescription(AudioStreamBasicDescription inputDescription) { // ReSharper disable once SwitchExpressionHandlesSomeKnownEnumValuesWithExceptionInDefault var bitsPerSample = inputDescription.Flags switch { AudioFormatFlags.Alac16BitSourceData => 16u, AudioFormatFlags.Alac20BitSourceData => 20u, AudioFormatFlags.Alac24BitSourceData => 24u, AudioFormatFlags.Alac32BitSourceData => 32u, _ => throw new AudioUnsupportedException("Unknown audio format.") }; return(new() { AudioFormat = AudioFormat.LinearPcm, Flags = AudioFormatFlags.PcmIsSignedInteger | AudioFormatFlags.PcmIsAlignedHigh, BytesPerPacket = sizeof(int) * inputDescription.ChannelsPerFrame, FramesPerPacket = 1, BytesPerFrame = sizeof(int) * inputDescription.ChannelsPerFrame, ChannelsPerFrame = inputDescription.ChannelsPerFrame, BitsPerChannel = bitsPerSample, SampleRate = inputDescription.SampleRate }); }
internal static extern AudioConverterStatus AudioConverterNew( ref AudioStreamBasicDescription sourceFormat, ref AudioStreamBasicDescription destinationFormat, [NotNull] out AudioConverterHandle handle);
public ExtendedAudioFile( AudioStreamBasicDescription description, AudioFileType fileType, [NotNull] Stream stream) : base(description, fileType, stream) =>