/// <summary> /// Initializes a new instance of the <see cref="MediaLogMessage" /> class. /// </summary> /// <param name="loggingHandler">The object that shall handle the message when it is output by the queue.</param> /// <param name="messageType">Type of the message.</param> /// <param name="messageText">The message text.</param> /// <param name="aspectName">Name of the code aspect the message came from.</param> internal MediaLogMessage(ILoggingHandler loggingHandler, MediaLogMessageType messageType, string messageText, string aspectName) { MessageType = messageType; Message = messageText; TimestampUtc = DateTime.UtcNow; Handler = loggingHandler; AspectName = aspectName; }
/// <summary> /// Logs the specified logging handler. /// </summary> /// <param name="loggingHandler">The logging handler.</param> /// <param name="messageType">Type of the message.</param> /// <param name="aspectName">Name of the code aspect where the message is coming from.</param> /// <param name="message">The message.</param> internal static void Log(ILoggingHandler loggingHandler, MediaLogMessageType messageType, string aspectName, string message) { // Prevent queueing messages without a handler if (loggingHandler == null || messageType == MediaLogMessageType.None) { return; } var messageItem = new LoggingMessage(loggingHandler, messageType, message, aspectName); LogQueue.Enqueue(messageItem); }
/// <summary> /// Reads all the blocks of the specified media type from the source url. /// </summary> /// <param name="mediaSource">The subtitles URL.</param> /// <param name="sourceType">Type of the source.</param> /// <param name="parent">The parent.</param> /// <returns>A buffer containing all the blocks.</returns> internal static MediaBlockBuffer LoadBlocks(string mediaSource, MediaType sourceType, ILoggingHandler parent) { if (string.IsNullOrWhiteSpace(mediaSource)) { throw new ArgumentNullException(nameof(mediaSource)); } using (var tempContainer = new MediaContainer(mediaSource, null, parent)) { // Skip reading and decoding unused blocks tempContainer.MediaOptions.IsAudioDisabled = sourceType != MediaType.Audio; tempContainer.MediaOptions.IsVideoDisabled = sourceType != MediaType.Video; tempContainer.MediaOptions.IsSubtitleDisabled = sourceType != MediaType.Subtitle; // Open the container tempContainer.Open(); if (tempContainer.Components.Main == null || tempContainer.Components.MainMediaType != sourceType) { throw new MediaContainerException($"Could not find a stream of type '{sourceType}' to load blocks from"); } // read all the packets and decode them var outputFrames = new List <MediaFrame>(1024 * 8); while (true) { tempContainer.Read(); var frames = tempContainer.Decode(); foreach (var frame in frames) { if (frame.MediaType != sourceType) { continue; } outputFrames.Add(frame); } if (frames.Count <= 0 && tempContainer.IsAtEndOfStream) { break; } } // Build the result var result = new MediaBlockBuffer(outputFrames.Count, sourceType); foreach (var frame in outputFrames) { result.Add(frame, tempContainer); } tempContainer.Close(); return(result); } }
/// <summary> /// Logs the specified message. This the generic logging mechanism available to all classes. /// </summary> /// <param name="loggingHandler">The object that will handle the message output.</param> /// <param name="messageType">Type of the message.</param> /// <param name="message">The message.</param> internal static void Log(ILoggingHandler loggingHandler, MediaLogMessageType messageType, string message) => Log(loggingHandler, messageType, Aspects.None, message);
public LoggingMiddleware(ILoggingHandler <TRequest, TResponse> loggingHandler) { _loggingHandler = loggingHandler; }
/// <summary> /// Initializes a new instance of the <see cref="MediaComponent"/> class. /// </summary> /// <param name="container">The container.</param> /// <param name="streamIndex">Index of the stream.</param> /// <exception cref="ArgumentNullException">container.</exception> /// <exception cref="MediaContainerException">The container exception.</exception> public MediaComponent(MediaContainer container, int streamIndex) { // Ported from: https://github.com/FFmpeg/FFmpeg/blob/master/fftools/ffplay.c#L2559 Container = container ?? throw new ArgumentNullException(nameof(container)); m_LoggingHandler = ((ILoggingSource)Container).LoggingHandler; m_CodecContext = new IntPtr(ffmpeg.avcodec_alloc_context3(null)); RC.Current.Add(CodecContext); StreamIndex = streamIndex; m_Stream = new IntPtr(container.InputContext->streams[streamIndex]); StreamInfo = container.MediaInfo.Streams[streamIndex]; // Set default codec context options from probed stream var setCodecParamsResult = ffmpeg.avcodec_parameters_to_context(CodecContext, Stream->codecpar); if (setCodecParamsResult < 0) { this.LogWarning(Aspects.Component, $"Could not set codec parameters. Error code: {setCodecParamsResult}"); } // We set the packet timebase in the same timebase as the stream as opposed to the typical AV_TIME_BASE if (this is VideoComponent && Container.MediaOptions.VideoForcedFps > 0) { var fpsRational = ffmpeg.av_d2q(Container.MediaOptions.VideoForcedFps, 1000000); Stream->r_frame_rate = fpsRational; CodecContext->pkt_timebase = new AVRational { num = fpsRational.den, den = fpsRational.num }; } else { CodecContext->pkt_timebase = Stream->time_base; } // Find the default decoder codec from the stream and set it. var defaultCodec = ffmpeg.avcodec_find_decoder(Stream->codec->codec_id); AVCodec *forcedCodec = null; // If set, change the codec to the forced codec. if (Container.MediaOptions.DecoderCodec.ContainsKey(StreamIndex) && string.IsNullOrWhiteSpace(Container.MediaOptions.DecoderCodec[StreamIndex]) == false) { var forcedCodecName = Container.MediaOptions.DecoderCodec[StreamIndex]; forcedCodec = ffmpeg.avcodec_find_decoder_by_name(forcedCodecName); if (forcedCodec == null) { this.LogWarning(Aspects.Component, $"COMP {MediaType.ToString().ToUpperInvariant()}: " + $"Unable to set decoder codec to '{forcedCodecName}' on stream index {StreamIndex}"); } } // Check we have a valid codec to open and process the stream. if (defaultCodec == null && forcedCodec == null) { var errorMessage = $"Fatal error. Unable to find suitable decoder for {Stream->codec->codec_id.ToString()}"; CloseComponent(); throw new MediaContainerException(errorMessage); } var codecCandidates = new[] { forcedCodec, defaultCodec }; AVCodec *selectedCodec = null; var codecOpenResult = 0; foreach (var codec in codecCandidates) { if (codec == null) { continue; } // Pass default codec stuff to the codec context CodecContext->codec_id = codec->id; // Process the decoder options { var decoderOptions = Container.MediaOptions.DecoderParams; // Configure the codec context flags if (decoderOptions.EnableFastDecoding) { CodecContext->flags2 |= ffmpeg.AV_CODEC_FLAG2_FAST; } if (decoderOptions.EnableLowDelayDecoding) { CodecContext->flags |= ffmpeg.AV_CODEC_FLAG_LOW_DELAY; } // process the low res option if (decoderOptions.LowResolutionIndex != VideoResolutionDivider.Full && codec->max_lowres > 0) { var lowResOption = Math.Min((byte)decoderOptions.LowResolutionIndex, codec->max_lowres) .ToString(CultureInfo.InvariantCulture); decoderOptions.LowResIndexOption = lowResOption; } // Ensure ref counted frames for audio and video decoding if (CodecContext->codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO || CodecContext->codec_type == AVMediaType.AVMEDIA_TYPE_AUDIO) { decoderOptions.RefCountedFrames = "1"; } CodecContext->thread_count = 4; } // Setup additional settings. The most important one is Threads -- Setting it to 1 decoding is very slow. Setting it to auto // decoding is very fast in most scenarios. var codecOptions = Container.MediaOptions.DecoderParams.GetStreamCodecOptions(Stream->index); // Enable Hardware acceleration if requested (this as VideoComponent)?.AttachHardwareDevice(container.MediaOptions.VideoHardwareDevice); // Open the CodecContext. This requires exclusive FFmpeg access lock (CodecLock) { var codecOptionsRef = codecOptions.Pointer; codecOpenResult = ffmpeg.avcodec_open2(CodecContext, codec, &codecOptionsRef); codecOptions.UpdateReference(codecOptionsRef); } // Check if the codec opened successfully if (codecOpenResult < 0) { this.LogWarning(Aspects.Component, $"Unable to open codec '{Utilities.PtrToStringUTF8(codec->name)}' on stream {streamIndex}"); continue; } // If there are any codec options left over from passing them, it means they were not consumed var currentEntry = codecOptions.First(); while (currentEntry?.Key != null) { this.LogWarning(Aspects.Component, $"Invalid codec option: '{currentEntry.Key}' for codec '{Utilities.PtrToStringUTF8(codec->name)}', stream {streamIndex}"); currentEntry = codecOptions.Next(currentEntry); } selectedCodec = codec; break; } if (selectedCodec == null) { CloseComponent(); throw new MediaContainerException($"Unable to find suitable decoder codec for stream {streamIndex}. Error code {codecOpenResult}"); } // Startup done. Set some options. Stream->discard = AVDiscard.AVDISCARD_DEFAULT; MediaType = (MediaType)CodecContext->codec_type; switch (MediaType) { case MediaType.Audio: case MediaType.Video: BufferCountThreshold = 25; BufferDurationThreshold = TimeSpan.FromSeconds(1); DecodePacketFunction = DecodeNextAVFrame; break; default: throw new NotSupportedException($"A component of MediaType '{MediaType}' is not supported"); } var contentDisposition = StreamInfo.Disposition; IsStillPictures = MediaType == MediaType.Video && ((contentDisposition & ffmpeg.AV_DISPOSITION_ATTACHED_PIC) != 0 || (contentDisposition & ffmpeg.AV_DISPOSITION_STILL_IMAGE) != 0 || (contentDisposition & ffmpeg.AV_DISPOSITION_TIMED_THUMBNAILS) != 0); if (IsStillPictures) { BufferCountThreshold = 0; BufferDurationThreshold = TimeSpan.Zero; } // Compute the start time StartTime = Stream->start_time == ffmpeg.AV_NOPTS_VALUE ? Container.MediaInfo.StartTime == TimeSpan.MinValue ? TimeSpan.Zero : Container.MediaInfo.StartTime : Stream->start_time.ToTimeSpan(Stream->time_base); // Compute the duration Duration = (Stream->duration == ffmpeg.AV_NOPTS_VALUE || Stream->duration <= 0) ? Container.MediaInfo.Duration : Stream->duration.ToTimeSpan(Stream->time_base); CodecId = Stream->codec->codec_id; CodecName = Utilities.PtrToStringUTF8(selectedCodec->name); BitRate = Stream->codec->bit_rate < 0 ? 0 : Stream->codec->bit_rate; this.LogDebug(Aspects.Component, $"{MediaType.ToString().ToUpperInvariant()} - Start Time: {StartTime.Format()}; Duration: {Duration.Format()}"); // Begin processing with a flush packet SendFlushPacket(); }