/// <summary> /// Gets the media info internal. /// </summary> /// <param name="inputPath">The input path.</param> /// <param name="primaryPath">The primary path.</param> /// <param name="protocol">The protocol.</param> /// <param name="extractChapters">if set to <c>true</c> [extract chapters].</param> /// <param name="extractKeyFrameInterval">if set to <c>true</c> [extract key frame interval].</param> /// <param name="probeSizeArgument">The probe size argument.</param> /// <param name="isAudio">if set to <c>true</c> [is audio].</param> /// <param name="videoType">Type of the video.</param> /// <param name="cancellationToken">The cancellation token.</param> /// <returns>Task{MediaInfoResult}.</returns> /// <exception cref="System.ApplicationException"></exception> private async Task <Model.MediaInfo.MediaInfo> GetMediaInfoInternal(string inputPath, string primaryPath, MediaProtocol protocol, bool extractChapters, bool extractKeyFrameInterval, string probeSizeArgument, bool isAudio, VideoType videoType, CancellationToken cancellationToken) { var args = extractChapters ? "{0} -i {1} -threads 0 -v info -print_format json -show_streams -show_chapters -show_format" : "{0} -i {1} -threads 0 -v info -print_format json -show_streams -show_format"; var process = new Process { StartInfo = new ProcessStartInfo { CreateNoWindow = true, UseShellExecute = false, // Must consume both or ffmpeg may hang due to deadlocks. See comments below. RedirectStandardOutput = true, RedirectStandardError = true, RedirectStandardInput = true, FileName = FFProbePath, Arguments = string.Format(args, probeSizeArgument, inputPath).Trim(), WindowStyle = ProcessWindowStyle.Hidden, ErrorDialog = false }, EnableRaisingEvents = true }; _logger.Debug("{0} {1}", process.StartInfo.FileName, process.StartInfo.Arguments); await _ffProbeResourcePool.WaitAsync(cancellationToken).ConfigureAwait(false); using (var processWrapper = new ProcessWrapper(process, this, _logger)) { try { StartProcess(processWrapper); } catch (Exception ex) { _ffProbeResourcePool.Release(); _logger.ErrorException("Error starting ffprobe", ex); throw; } try { process.BeginErrorReadLine(); var result = _jsonSerializer.DeserializeFromStream <InternalMediaInfoResult>(process.StandardOutput.BaseStream); if (result != null) { if (result.streams != null) { // Normalize aspect ratio if invalid foreach (var stream in result.streams) { if (string.Equals(stream.display_aspect_ratio, "0:1", StringComparison.OrdinalIgnoreCase)) { stream.display_aspect_ratio = string.Empty; } if (string.Equals(stream.sample_aspect_ratio, "0:1", StringComparison.OrdinalIgnoreCase)) { stream.sample_aspect_ratio = string.Empty; } } } var mediaInfo = new ProbeResultNormalizer(_logger, FileSystem).GetMediaInfo(result, videoType, isAudio, primaryPath, protocol); if (extractKeyFrameInterval && mediaInfo.RunTimeTicks.HasValue) { if (ConfigurationManager.Configuration.EnableVideoFrameAnalysis && mediaInfo.Size.HasValue && mediaInfo.Size.Value <= ConfigurationManager.Configuration.VideoFrameAnalysisLimitBytes) { foreach (var stream in mediaInfo.MediaStreams) { if (stream.Type == MediaStreamType.Video && string.Equals(stream.Codec, "h264", StringComparison.OrdinalIgnoreCase) && !stream.IsInterlaced && !(stream.IsAnamorphic ?? false)) { try { stream.KeyFrames = await GetKeyFrames(inputPath, stream.Index, cancellationToken).ConfigureAwait(false); } catch (OperationCanceledException) { } catch (Exception ex) { _logger.ErrorException("Error getting key frame interval", ex); } } } } } return(mediaInfo); } } catch { StopProcess(processWrapper, 100, true); throw; } finally { _ffProbeResourcePool.Release(); } } throw new ApplicationException(string.Format("FFProbe failed for {0}", inputPath)); }
/// <summary> /// Gets the media info internal. /// </summary> /// <returns>Task{MediaInfoResult}.</returns> private async Task <MediaInfo> GetMediaInfoInternal(string inputPath, string primaryPath, MediaProtocol protocol, bool extractChapters, string probeSizeArgument, bool isAudio, VideoType videoType, bool forceEnableLogging, CancellationToken cancellationToken) { var args = extractChapters ? "{0} -i {1} -threads 0 -v info -print_format json -show_streams -show_chapters -show_format" : "{0} -i {1} -threads 0 -v info -print_format json -show_streams -show_format"; var process = _processFactory.Create(new ProcessOptions { CreateNoWindow = true, UseShellExecute = false, // Must consume both or ffmpeg may hang due to deadlocks. See comments below. RedirectStandardOutput = true, FileName = FFProbePath, Arguments = string.Format(args, probeSizeArgument, inputPath).Trim(), IsHidden = true, ErrorDialog = false, EnableRaisingEvents = true }); if (forceEnableLogging) { _logger.Info("{0} {1}", process.StartInfo.FileName, process.StartInfo.Arguments); } else { _logger.Debug("{0} {1}", process.StartInfo.FileName, process.StartInfo.Arguments); } using (var processWrapper = new ProcessWrapper(process, this, _logger)) { await _ffProbeResourcePool.WaitAsync(cancellationToken).ConfigureAwait(false); try { StartProcess(processWrapper); } catch (Exception ex) { _ffProbeResourcePool.Release(); _logger.ErrorException("Error starting ffprobe", ex); throw; } try { //process.BeginErrorReadLine(); var result = _jsonSerializer.DeserializeFromStream <InternalMediaInfoResult>(process.StandardOutput.BaseStream); if (result == null || (result.streams == null && result.format == null)) { throw new Exception("ffprobe failed - streams and format are both null."); } if (result.streams != null) { // Normalize aspect ratio if invalid foreach (var stream in result.streams) { if (string.Equals(stream.display_aspect_ratio, "0:1", StringComparison.OrdinalIgnoreCase)) { stream.display_aspect_ratio = string.Empty; } if (string.Equals(stream.sample_aspect_ratio, "0:1", StringComparison.OrdinalIgnoreCase)) { stream.sample_aspect_ratio = string.Empty; } } } var mediaInfo = new ProbeResultNormalizer(_logger, FileSystem, _memoryStreamProvider).GetMediaInfo(result, videoType, isAudio, primaryPath, protocol); var videoStream = mediaInfo.MediaStreams.FirstOrDefault(i => i.Type == MediaStreamType.Video); if (videoStream != null && !videoStream.IsInterlaced) { var isInterlaced = DetectInterlaced(mediaInfo, videoStream); if (isInterlaced) { videoStream.IsInterlaced = true; } } return(mediaInfo); } catch { StopProcess(processWrapper, 100); throw; } finally { _ffProbeResourcePool.Release(); } } }
public void IsCodecTimeBaseDoubleTheFrameRate_Success(float?frameRate, string codecTimeBase, bool expected) => Assert.Equal(expected, ProbeResultNormalizer.IsCodecTimeBaseDoubleTheFrameRate(frameRate, codecTimeBase));
public void GetMediaInfo_Mp4MetaData_Success() { var bytes = File.ReadAllBytes("Test Data/Probing/video_mp4_metadata.json"); var internalMediaInfoResult = JsonSerializer.Deserialize <InternalMediaInfoResult>(bytes, _jsonOptions); // subtitle handling requires a localization object, set a mock to return the input string var mockLocalization = new Mock <ILocalizationManager>(); mockLocalization.Setup(x => x.GetLocalizedString(It.IsAny <string>())).Returns <string>(x => x); ProbeResultNormalizer localizedProbeResultNormalizer = new ProbeResultNormalizer(new NullLogger <EncoderValidatorTests>(), mockLocalization.Object); MediaInfo res = localizedProbeResultNormalizer.GetMediaInfo(internalMediaInfoResult, VideoType.VideoFile, false, "Test Data/Probing/video_mp4_metadata.mkv", MediaProtocol.File); // [Video, Audio (Main), Audio (Commentary), Subtitle (Main, Spanish), Subtitle (Main, English), Subtitle (Commentary) Assert.Equal(6, res.MediaStreams.Count); Assert.NotNull(res.VideoStream); Assert.Equal(res.MediaStreams[0], res.VideoStream); Assert.Equal(0, res.VideoStream.Index); Assert.Equal("h264", res.VideoStream.Codec); Assert.Equal("High", res.VideoStream.Profile); Assert.Equal(MediaStreamType.Video, res.VideoStream.Type); Assert.Equal(358, res.VideoStream.Height); Assert.Equal(720, res.VideoStream.Width); Assert.Equal("2.40:1", res.VideoStream.AspectRatio); Assert.Equal("yuv420p", res.VideoStream.PixelFormat); Assert.Equal(31d, res.VideoStream.Level); Assert.Equal(1, res.VideoStream.RefFrames); Assert.True(res.VideoStream.IsAVC); Assert.Equal(120f, res.VideoStream.RealFrameRate); Assert.Equal("1/90000", res.VideoStream.TimeBase); Assert.Equal(1147365, res.VideoStream.BitRate); Assert.Equal(8, res.VideoStream.BitDepth); Assert.True(res.VideoStream.IsDefault); Assert.Equal("und", res.VideoStream.Language); Assert.Equal(MediaStreamType.Audio, res.MediaStreams[1].Type); Assert.Equal("aac", res.MediaStreams[1].Codec); Assert.Equal(7, res.MediaStreams[1].Channels); Assert.True(res.MediaStreams[1].IsDefault); Assert.Equal("eng", res.MediaStreams[1].Language); Assert.Equal("Surround 6.1", res.MediaStreams[1].Title); Assert.Equal(MediaStreamType.Audio, res.MediaStreams[2].Type); Assert.Equal("aac", res.MediaStreams[2].Codec); Assert.Equal(2, res.MediaStreams[2].Channels); Assert.False(res.MediaStreams[2].IsDefault); Assert.Equal("eng", res.MediaStreams[2].Language); Assert.Equal("Commentary", res.MediaStreams[2].Title); Assert.Equal("spa", res.MediaStreams[3].Language); Assert.Equal(MediaStreamType.Subtitle, res.MediaStreams[3].Type); Assert.Equal("DVDSUB", res.MediaStreams[3].Codec); Assert.Null(res.MediaStreams[3].Title); Assert.Equal("eng", res.MediaStreams[4].Language); Assert.Equal(MediaStreamType.Subtitle, res.MediaStreams[4].Type); Assert.Equal("mov_text", res.MediaStreams[4].Codec); Assert.Null(res.MediaStreams[4].Title); Assert.Equal("eng", res.MediaStreams[5].Language); Assert.Equal(MediaStreamType.Subtitle, res.MediaStreams[5].Type); Assert.Equal("mov_text", res.MediaStreams[5].Codec); Assert.Equal("Commentary", res.MediaStreams[5].Title); }
public void GetFrameRate_Success(string value, float?expected) => Assert.Equal(expected, ProbeResultNormalizer.GetFrameRate(value));