/// <inheritdoc /> public override bool MaterializeFrame(MediaFrame input, ref MediaBlock output, MediaBlock previousBlock) { if (output == null) { output = new AudioBlock(); } if (input is AudioFrame == false || output is AudioBlock == false) { throw new ArgumentNullException($"{nameof(input)} and {nameof(output)} are either null or not of a compatible media type '{MediaType}'"); } var source = (AudioFrame)input; var target = (AudioBlock)output; // Create the source and target audio specs. We might need to scale from // the source to the target var sourceSpec = FFAudioParams.CreateSource(source.Pointer); var targetSpec = FFAudioParams.CreateTarget(source.Pointer); // Initialize or update the audio scaler if required if (Scaler == null || LastSourceSpec == null || FFAudioParams.AreCompatible(LastSourceSpec, sourceSpec) == false) { Scaler = ffmpeg.swr_alloc_set_opts( Scaler, targetSpec.ChannelLayout, targetSpec.Format, targetSpec.SampleRate, sourceSpec.ChannelLayout, sourceSpec.Format, sourceSpec.SampleRate, 0, null); RC.Current.Add(Scaler); ffmpeg.swr_init(Scaler); LastSourceSpec = sourceSpec; } // Allocate the unmanaged output buffer and convert to stereo. int outputSamplesPerChannel; if (target.Allocate(targetSpec.BufferLength) && target.TryAcquireWriterLock(out var writeLock)) { using (writeLock) { var outputBufferPtr = (byte *)target.Buffer; // Execute the conversion (audio scaling). It will return the number of samples that were output outputSamplesPerChannel = ffmpeg.swr_convert( Scaler, &outputBufferPtr, targetSpec.SamplesPerChannel, source.Pointer->extended_data, source.Pointer->nb_samples); } } else { return(false); } // Compute the buffer length var outputBufferLength = ffmpeg.av_samples_get_buffer_size(null, targetSpec.ChannelCount, outputSamplesPerChannel, targetSpec.Format, 1); // Flag the block if we have to target.IsStartTimeGuessed = source.HasValidStartTime == false; // Try to fix the start time, duration and End time if we don't have valid data if (source.HasValidStartTime == false && previousBlock != null) { // Get timing information from the previous block target.StartTime = TimeSpan.FromTicks(previousBlock.EndTime.Ticks + 1); target.Duration = source.Duration.Ticks > 0 ? source.Duration : previousBlock.Duration; target.EndTime = TimeSpan.FromTicks(target.StartTime.Ticks + target.Duration.Ticks); } else { // We set the target properties directly from the source target.StartTime = source.StartTime; target.Duration = source.Duration; target.EndTime = source.EndTime; } target.CompressedSize = source.CompressedSize; target.SamplesBufferLength = outputBufferLength; target.ChannelCount = targetSpec.ChannelCount; target.SampleRate = targetSpec.SampleRate; target.SamplesPerChannel = outputSamplesPerChannel; target.StreamIndex = input.StreamIndex; return(true); }
/// <inheritdoc /> public override bool MaterializeFrame(MediaFrame input, ref MediaBlock output, MediaBlock previousBlock) { if (output == null) { output = new SubtitleBlock(); } if (input is SubtitleFrame == false || output is SubtitleBlock == false) { throw new ArgumentNullException($"{nameof(input)} and {nameof(output)} are either null or not of a compatible media type '{MediaType}'"); } var source = (SubtitleFrame)input; var target = (SubtitleBlock)output; // Set the target data target.EndTime = source.EndTime; target.StartTime = source.StartTime; target.Duration = source.Duration; target.StreamIndex = input.StreamIndex; // Process time offsets if (Delay != TimeSpan.Zero) { target.StartTime = TimeSpan.FromTicks(target.StartTime.Ticks + Delay.Ticks); target.EndTime = TimeSpan.FromTicks(target.EndTime.Ticks + Delay.Ticks); target.Duration = TimeSpan.FromTicks(target.EndTime.Ticks - target.StartTime.Ticks); } target.OriginalText.Clear(); if (source.Text.Count > 0) { foreach (var t in source.Text) { target.OriginalText.Add(t); } } target.OriginalTextType = source.TextType; target.Text.Clear(); foreach (var text in source.Text) { if (string.IsNullOrWhiteSpace(text)) { continue; } if (source.TextType == AVSubtitleType.SUBTITLE_ASS) { var strippedText = StripAssFormat(text); if (string.IsNullOrWhiteSpace(strippedText) == false) { target.Text.Add(strippedText); } } else { var strippedText = StripSrtFormat(text); if (string.IsNullOrWhiteSpace(strippedText) == false) { target.Text.Add(strippedText); } } } // TODO: CompressedSize is just an estimate. // It would be better if we counted chars in all text lines. target.CompressedSize = source.CompressedSize; return(true); }
/// <inheritdoc /> public override bool MaterializeFrame(MediaFrame input, ref MediaBlock output, MediaBlock previousBlock) { if (output == null) { output = new VideoBlock(); } if (input is VideoFrame == false || output is VideoBlock == false) { throw new ArgumentNullException($"{nameof(input)} and {nameof(output)} are either null or not of a compatible media type '{MediaType}'"); } var source = (VideoFrame)input; var target = (VideoBlock)output; // Retrieve a suitable scaler or create it on the fly var newScaler = ffmpeg.sws_getCachedContext( Scaler, source.Pointer->width, source.Pointer->height, NormalizePixelFormat(source.Pointer), source.Pointer->width, source.Pointer->height, Constants.VideoPixelFormat, ScalerFlags, null, null, null); // if it's the first time we set the scaler, simply assign it. if (Scaler == null) { Scaler = newScaler; RC.Current.Add(Scaler); } // Reassign to the new scaler and remove the reference to the existing one // The get cached context function automatically frees the existing scaler. if (Scaler != newScaler) { RC.Current.Remove(Scaler); Scaler = newScaler; } // Perform scaling and save the data to our unmanaged buffer pointer if (target.Allocate(source, Constants.VideoPixelFormat) && target.TryAcquireWriterLock(out var writeLock)) { using (writeLock) { var targetStride = new[] { target.PictureBufferStride }; var targetScan = default(byte_ptrArray8); targetScan[0] = (byte *)target.Buffer; // The scaling is done here var outputHeight = ffmpeg.sws_scale( Scaler, source.Pointer->data, source.Pointer->linesize, 0, source.Pointer->height, targetScan, targetStride); if (outputHeight <= 0) { return(false); } } } else { return(false); } // After scaling, we need to copy and guess some of the block properties // Flag the block if we have to target.IsStartTimeGuessed = source.HasValidStartTime == false; // Try to fix the start time, duration and End time if we don't have valid data if (source.HasValidStartTime == false && previousBlock != null) { // Get timing information from the previous block target.StartTime = TimeSpan.FromTicks(previousBlock.EndTime.Ticks + 1); target.Duration = source.Duration.Ticks > 0 ? source.Duration : previousBlock.Duration; target.EndTime = TimeSpan.FromTicks(target.StartTime.Ticks + target.Duration.Ticks); // Guess picture number and SMTPE var frameRate = ffmpeg.av_guess_frame_rate(Container.InputContext, Stream, source.Pointer); target.DisplayPictureNumber = Utilities.ComputePictureNumber(StartTime, target.StartTime, frameRate); target.SmtpeTimeCode = Utilities.ComputeSmtpeTimeCode(target.DisplayPictureNumber, frameRate); } else { // We set the target properties directly from the source target.StartTime = source.StartTime; target.Duration = source.Duration; target.EndTime = source.EndTime; // Copy picture number and SMTPE target.DisplayPictureNumber = source.DisplayPictureNumber; target.SmtpeTimeCode = source.SmtpeTimeCode; } // Fill out other properties target.IsHardwareFrame = source.IsHardwareFrame; target.HardwareAcceleratorName = source.HardwareAcceleratorName; target.CompressedSize = source.CompressedSize; target.CodedPictureNumber = source.CodedPictureNumber; target.StreamIndex = source.StreamIndex; target.ClosedCaptions = source.ClosedCaptions.ToList(); // Update the stream info object if we get Closed Caption Data if (StreamInfo.HasClosedCaptions == false && target.ClosedCaptions.Count > 0) { StreamInfo.HasClosedCaptions = true; } // Process the aspect ratio var aspectRatio = ffmpeg.av_guess_sample_aspect_ratio(Container.InputContext, Stream, source.Pointer); if (aspectRatio.num == 0 || aspectRatio.den == 0) { target.PixelAspectWidth = 1; target.PixelAspectHeight = 1; } else { target.PixelAspectWidth = aspectRatio.num; target.PixelAspectHeight = aspectRatio.den; } return(true); }