/// <inheritdoc /> public override bool MaterializeFrame(MediaFrame input, ref MediaBlock output, MediaBlock previousBlock) { if (output == null) { output = new AudioBlock(); } if (input is AudioFrame == false || output is AudioBlock == false) { throw new ArgumentNullException($"{nameof(input)} and {nameof(output)} are either null or not of a compatible media type '{MediaType}'"); } var source = (AudioFrame)input; var target = (AudioBlock)output; // Create the source and target audio specs. We might need to scale from // the source to the target var sourceSpec = FFAudioParams.CreateSource(source.Pointer); var targetSpec = FFAudioParams.CreateTarget(source.Pointer); // Initialize or update the audio scaler if required if (Scaler == null || LastSourceSpec == null || FFAudioParams.AreCompatible(LastSourceSpec, sourceSpec) == false) { Scaler = ffmpeg.swr_alloc_set_opts( Scaler, targetSpec.ChannelLayout, targetSpec.Format, targetSpec.SampleRate, sourceSpec.ChannelLayout, sourceSpec.Format, sourceSpec.SampleRate, 0, null); RC.Current.Add(Scaler); ffmpeg.swr_init(Scaler); LastSourceSpec = sourceSpec; } // Allocate the unmanaged output buffer and convert to stereo. int outputSamplesPerChannel; if (target.Allocate(targetSpec.BufferLength) && target.TryAcquireWriterLock(out var writeLock)) { using (writeLock) { var outputBufferPtr = (byte *)target.Buffer; // Execute the conversion (audio scaling). It will return the number of samples that were output outputSamplesPerChannel = ffmpeg.swr_convert( Scaler, &outputBufferPtr, targetSpec.SamplesPerChannel, source.Pointer->extended_data, source.Pointer->nb_samples); } } else { return(false); } // Compute the buffer length var outputBufferLength = ffmpeg.av_samples_get_buffer_size(null, targetSpec.ChannelCount, outputSamplesPerChannel, targetSpec.Format, 1); // Flag the block if we have to target.IsStartTimeGuessed = source.HasValidStartTime == false; // Try to fix the start time, duration and End time if we don't have valid data if (source.HasValidStartTime == false && previousBlock != null) { // Get timing information from the previous block target.StartTime = TimeSpan.FromTicks(previousBlock.EndTime.Ticks + 1); target.Duration = source.Duration.Ticks > 0 ? source.Duration : previousBlock.Duration; target.EndTime = TimeSpan.FromTicks(target.StartTime.Ticks + target.Duration.Ticks); } else { // We set the target properties directly from the source target.StartTime = source.StartTime; target.Duration = source.Duration; target.EndTime = source.EndTime; } target.CompressedSize = source.CompressedSize; target.SamplesBufferLength = outputBufferLength; target.ChannelCount = targetSpec.ChannelCount; target.SampleRate = targetSpec.SampleRate; target.SamplesPerChannel = outputSamplesPerChannel; target.StreamIndex = input.StreamIndex; return(true); }
/// <summary> /// Converts decoded, raw frame data in the frame source into a a usable frame. <br /> /// The process includes performing picture, samples or text conversions /// so that the decoded source frame data is easily usable in multimedia applications /// </summary> /// <param name="input">The source frame to use as an input.</param> /// <param name="output">The target frame that will be updated with the source frame. If null is passed the frame will be instantiated.</param> /// <param name="siblings">The sibling blocks that may help guess some additional parameters for the input frame.</param> /// <returns> /// Return the updated output frame /// </returns> /// <exception cref="ArgumentNullException">input</exception> public override MediaBlock MaterializeFrame(MediaFrame input, ref MediaBlock output, List <MediaBlock> siblings) { if (output == null) { output = new AudioBlock(); } var source = input as AudioFrame; var target = output as AudioBlock; if (source == null || target == null) { throw new ArgumentNullException($"{nameof(input)} and {nameof(output)} are either null or not of a compatible media type '{MediaType}'"); } // Create the source and target ausio specs. We might need to scale from // the source to the target var sourceSpec = FFAudioParams.CreateSource(source.Pointer); var targetSpec = FFAudioParams.CreateTarget(source.Pointer); // Initialize or update the audio scaler if required if (Scaler == null || LastSourceSpec == null || FFAudioParams.AreCompatible(LastSourceSpec, sourceSpec) == false) { Scaler = ffmpeg.swr_alloc_set_opts( Scaler, targetSpec.ChannelLayout, targetSpec.Format, targetSpec.SampleRate, sourceSpec.ChannelLayout, sourceSpec.Format, sourceSpec.SampleRate, 0, null); RC.Current.Add(Scaler, $"109: {nameof(AudioComponent)}.{nameof(MaterializeFrame)}()"); ffmpeg.swr_init(Scaler); LastSourceSpec = sourceSpec; } // Allocate the unmanaged output buffer if (target.AudioBufferLength != targetSpec.BufferLength) { if (target.AudioBuffer != IntPtr.Zero) { Marshal.FreeHGlobal(target.AudioBuffer); } target.AudioBufferLength = targetSpec.BufferLength; target.AudioBuffer = Marshal.AllocHGlobal(targetSpec.BufferLength); } var outputBufferPtr = (byte *)target.AudioBuffer; // Execute the conversion (audio scaling). It will return the number of samples that were output var outputSamplesPerChannel = ffmpeg.swr_convert( Scaler, &outputBufferPtr, targetSpec.SamplesPerChannel, source.Pointer->extended_data, source.Pointer->nb_samples); // Compute the buffer length var outputBufferLength = ffmpeg.av_samples_get_buffer_size(null, targetSpec.ChannelCount, outputSamplesPerChannel, targetSpec.Format, 1); // Flag the block if we have to target.IsStartTimeGuessed = source.HasValidStartTime == false; // Try to fix the start time, duration and End time if we don't have valid data if (source.HasValidStartTime == false && siblings != null && siblings.Count > 0) { // Get timing information from the last sibling var lastSibling = siblings[siblings.Count - 1]; // We set the target properties target.StartTime = lastSibling.EndTime; target.Duration = source.Duration.Ticks > 0 ? source.Duration : lastSibling.Duration; target.EndTime = TimeSpan.FromTicks(target.StartTime.Ticks + target.Duration.Ticks); } else { // We set the target properties directly from the source target.StartTime = source.StartTime; target.Duration = source.Duration; target.EndTime = source.EndTime; } target.BufferLength = outputBufferLength; target.ChannelCount = targetSpec.ChannelCount; target.SampleRate = targetSpec.SampleRate; target.SamplesPerChannel = outputSamplesPerChannel; target.StreamIndex = input.StreamIndex; return(target); }