/// <summary> /// Gets the media info internal. /// </summary> /// <param name="inputPath">The input path.</param> /// <param name="primaryPath">The primary path.</param> /// <param name="protocol">The protocol.</param> /// <param name="extractChapters">if set to <c>true</c> [extract chapters].</param> /// <param name="probeSizeArgument">The probe size argument.</param> /// <param name="isAudio">if set to <c>true</c> [is audio].</param> /// <param name="videoType">Type of the video.</param> /// <param name="cancellationToken">The cancellation token.</param> /// <returns>Task{MediaInfoResult}.</returns> /// <exception cref="System.ApplicationException">ffprobe failed - streams and format are both null.</exception> private async Task<Model.MediaInfo.MediaInfo> GetMediaInfoInternal(string inputPath, string primaryPath, MediaProtocol protocol, bool extractChapters, string probeSizeArgument, bool isAudio, VideoType videoType, CancellationToken cancellationToken) { var args = extractChapters ? "{0} -i {1} -threads 0 -v info -print_format json -show_streams -show_chapters -show_format" : "{0} -i {1} -threads 0 -v info -print_format json -show_streams -show_format"; var process = new Process { StartInfo = new ProcessStartInfo { CreateNoWindow = true, UseShellExecute = false, // Must consume both or ffmpeg may hang due to deadlocks. See comments below. RedirectStandardOutput = true, RedirectStandardError = true, RedirectStandardInput = true, FileName = FFProbePath, Arguments = string.Format(args, probeSizeArgument, inputPath).Trim(), WindowStyle = ProcessWindowStyle.Hidden, ErrorDialog = false }, EnableRaisingEvents = true }; _logger.Debug("{0} {1}", process.StartInfo.FileName, process.StartInfo.Arguments); using (var processWrapper = new ProcessWrapper(process, this, _logger)) { await _ffProbeResourcePool.WaitAsync(cancellationToken).ConfigureAwait(false); try { StartProcess(processWrapper); } catch (Exception ex) { _ffProbeResourcePool.Release(); _logger.ErrorException("Error starting ffprobe", ex); throw; } try { process.BeginErrorReadLine(); var result = _jsonSerializer.DeserializeFromStream<InternalMediaInfoResult>(process.StandardOutput.BaseStream); if (result.streams == null && result.format == null) { throw new ApplicationException("ffprobe failed - streams and format are both null."); } if (result.streams != null) { // Normalize aspect ratio if invalid foreach (var stream in result.streams) { if (string.Equals(stream.display_aspect_ratio, "0:1", StringComparison.OrdinalIgnoreCase)) { stream.display_aspect_ratio = string.Empty; } if (string.Equals(stream.sample_aspect_ratio, "0:1", StringComparison.OrdinalIgnoreCase)) { stream.sample_aspect_ratio = string.Empty; } } } var mediaInfo = new ProbeResultNormalizer(_logger, FileSystem).GetMediaInfo(result, videoType, isAudio, primaryPath, protocol); var videoStream = mediaInfo.MediaStreams.FirstOrDefault(i => i.Type == MediaStreamType.Video); if (videoStream != null) { var isInterlaced = await DetectInterlaced(mediaInfo, videoStream, inputPath, probeSizeArgument).ConfigureAwait(false); if (isInterlaced) { videoStream.IsInterlaced = true; } } return mediaInfo; } catch { StopProcess(processWrapper, 100, true); throw; } finally { _ffProbeResourcePool.Release(); } } }
/// <summary> /// Gets the media info internal. /// </summary> /// <param name="inputPath">The input path.</param> /// <param name="primaryPath">The primary path.</param> /// <param name="protocol">The protocol.</param> /// <param name="extractChapters">if set to <c>true</c> [extract chapters].</param> /// <param name="extractKeyFrameInterval">if set to <c>true</c> [extract key frame interval].</param> /// <param name="probeSizeArgument">The probe size argument.</param> /// <param name="isAudio">if set to <c>true</c> [is audio].</param> /// <param name="videoType">Type of the video.</param> /// <param name="cancellationToken">The cancellation token.</param> /// <returns>Task{MediaInfoResult}.</returns> /// <exception cref="System.ApplicationException"></exception> private async Task<Model.MediaInfo.MediaInfo> GetMediaInfoInternal(string inputPath, string primaryPath, MediaProtocol protocol, bool extractChapters, bool extractKeyFrameInterval, string probeSizeArgument, bool isAudio, VideoType videoType, CancellationToken cancellationToken) { var args = extractChapters ? "{0} -i {1} -threads 0 -v info -print_format json -show_streams -show_chapters -show_format" : "{0} -i {1} -threads 0 -v info -print_format json -show_streams -show_format"; var process = new Process { StartInfo = new ProcessStartInfo { CreateNoWindow = true, UseShellExecute = false, // Must consume both or ffmpeg may hang due to deadlocks. See comments below. RedirectStandardOutput = true, RedirectStandardError = true, RedirectStandardInput = true, FileName = FFProbePath, Arguments = string.Format(args, probeSizeArgument, inputPath).Trim(), WindowStyle = ProcessWindowStyle.Hidden, ErrorDialog = false }, EnableRaisingEvents = true }; _logger.Debug("{0} {1}", process.StartInfo.FileName, process.StartInfo.Arguments); await _ffProbeResourcePool.WaitAsync(cancellationToken).ConfigureAwait(false); using (var processWrapper = new ProcessWrapper(process, this, _logger)) { try { StartProcess(processWrapper); } catch (Exception ex) { _ffProbeResourcePool.Release(); _logger.ErrorException("Error starting ffprobe", ex); throw; } try { process.BeginErrorReadLine(); var result = _jsonSerializer.DeserializeFromStream<InternalMediaInfoResult>(process.StandardOutput.BaseStream); if (result != null) { if (result.streams != null) { // Normalize aspect ratio if invalid foreach (var stream in result.streams) { if (string.Equals(stream.display_aspect_ratio, "0:1", StringComparison.OrdinalIgnoreCase)) { stream.display_aspect_ratio = string.Empty; } if (string.Equals(stream.sample_aspect_ratio, "0:1", StringComparison.OrdinalIgnoreCase)) { stream.sample_aspect_ratio = string.Empty; } } } var mediaInfo = new ProbeResultNormalizer(_logger, FileSystem).GetMediaInfo(result, videoType, isAudio, primaryPath, protocol); if (extractKeyFrameInterval && mediaInfo.RunTimeTicks.HasValue) { if (ConfigurationManager.Configuration.EnableVideoFrameAnalysis && mediaInfo.Size.HasValue && mediaInfo.Size.Value <= ConfigurationManager.Configuration.VideoFrameAnalysisLimitBytes) { foreach (var stream in mediaInfo.MediaStreams) { if (stream.Type == MediaStreamType.Video && string.Equals(stream.Codec, "h264", StringComparison.OrdinalIgnoreCase) && !stream.IsInterlaced && !(stream.IsAnamorphic ?? false)) { try { stream.KeyFrames = await GetKeyFrames(inputPath, stream.Index, cancellationToken).ConfigureAwait(false); } catch (OperationCanceledException) { } catch (Exception ex) { _logger.ErrorException("Error getting key frame interval", ex); } } } } } return mediaInfo; } } catch { StopProcess(processWrapper, 100, true); throw; } finally { _ffProbeResourcePool.Release(); } } throw new ApplicationException(string.Format("FFProbe failed for {0}", inputPath)); }