public string GetFFmpegArgs(bool isCustom = false) { if (!FFmpeg.IsVideoSourceSelected && !FFmpeg.IsAudioSourceSelected) { return(null); } StringBuilder args = new StringBuilder(); args.Append("-y "); // -y for overwrite file args.Append("-rtbufsize 100M "); // default real time buffer size was 3041280 (3M) string fps; if (isCustom) { fps = "$fps$"; } else { fps = FFmpeg.VideoCodec == FFmpegVideoCodec.gif ? Giffps.ToString() : ScreenRecordFps.ToString(); } if (FFmpeg.IsVideoSourceSelected) { if (FFmpeg.VideoSource.Equals(FFmpegHelper.SourceGdiGrab, StringComparison.InvariantCultureIgnoreCase)) { // http://ffmpeg.org/ffmpeg-devices.html#gdigrab args.AppendFormat("-f gdigrab -framerate {0} -offset_x {1} -offset_y {2} -video_size {3}x{4} -draw_mouse {5} -i desktop ", fps, isCustom ? "$area_x$" : CaptureArea.X.ToString(), isCustom ? "$area_y$" : CaptureArea.Y.ToString(), isCustom ? "$area_width$" : CaptureArea.Width.ToString(), isCustom ? "$area_height$" : CaptureArea.Height.ToString(), isCustom ? "$cursor$" : DrawCursor ? "1" : "0"); if (FFmpeg.IsAudioSourceSelected) { args.AppendFormat("-f dshow -i audio=\"{0}\" ", FFmpeg.AudioSource); } } else { args.AppendFormat("-f dshow -framerate {0} -i video=\"{1}\"", fps, FFmpeg.VideoSource); if (FFmpeg.IsAudioSourceSelected) { args.AppendFormat(":audio=\"{0}\" ", FFmpeg.AudioSource); } else { args.Append(" "); } } } else if (FFmpeg.IsAudioSourceSelected) { args.AppendFormat("-f dshow -i audio=\"{0}\" ", FFmpeg.AudioSource); } if (!string.IsNullOrEmpty(FFmpeg.UserArgs)) { args.Append(FFmpeg.UserArgs + " "); } if (FFmpeg.IsVideoSourceSelected) { string videoCodec; switch (FFmpeg.VideoCodec) { default: videoCodec = FFmpeg.VideoCodec.ToString(); break; case FFmpegVideoCodec.gif: videoCodec = FFmpegVideoCodec.libx264.ToString(); break; } args.AppendFormat("-c:v {0} ", videoCodec); args.AppendFormat("-r {0} ", fps); // output FPS switch (FFmpeg.VideoCodec) { case FFmpegVideoCodec.libx264: // https://trac.ffmpeg.org/wiki/Encode/H.264 case FFmpegVideoCodec.libx265: // https://trac.ffmpeg.org/wiki/Encode/H.265 args.AppendFormat("-preset {0} ", FFmpeg.X264Preset); args.AppendFormat("-tune {0} ", FFmpegTune.zerolatency); args.AppendFormat("-crf {0} ", FFmpeg.X264Crf); args.AppendFormat("-pix_fmt {0} ", "yuv420p"); // -pix_fmt yuv420p required otherwise can't stream in Chrome break; case FFmpegVideoCodec.libvpx: // https://trac.ffmpeg.org/wiki/Encode/VP8 args.AppendFormat("-deadline {0} ", "realtime"); args.AppendFormat("-b:v {0}k ", FFmpeg.VPxBitrate); args.AppendFormat("-pix_fmt {0} ", "yuv420p"); // -pix_fmt yuv420p required otherwise causing issues in Chrome related to WebM transparency support break; case FFmpegVideoCodec.libxvid: // https://trac.ffmpeg.org/wiki/Encode/MPEG-4 args.AppendFormat("-qscale:v {0} ", FFmpeg.XviDQscale); break; case FFmpegVideoCodec.h264_nvenc: // https://trac.ffmpeg.org/wiki/HWAccelIntro#NVENC case FFmpegVideoCodec.hevc_nvenc: args.AppendFormat("-preset {0} ", FFmpeg.NvencPreset); args.AppendFormat("-b:v {0}k ", FFmpeg.NvencBitrate); args.AppendFormat("-pix_fmt {0} ", "yuv420p"); break; case FFmpegVideoCodec.gif: args.AppendFormat("-preset {0} ", FFmpegPreset.ultrafast); args.AppendFormat("-tune {0} ", FFmpegTune.zerolatency); args.AppendFormat("-qp {0} ", 0); break; } } if (FFmpeg.IsAudioSourceSelected) { switch (FFmpeg.AudioCodec) { case FFmpegAudioCodec.libvoaacenc: // http://trac.ffmpeg.org/wiki/Encode/AAC args.AppendFormat("-c:a aac -strict -2 -ac 2 -b:a {0}k ", FFmpeg.AacBitrate); // -ac 2 required otherwise failing with 7.1 break; case FFmpegAudioCodec.libvorbis: // http://trac.ffmpeg.org/wiki/TheoraVorbisEncodingGuide args.AppendFormat("-c:a libvorbis -qscale:a {0} ", FFmpeg.VorbisQscale); break; case FFmpegAudioCodec.libmp3lame: // http://trac.ffmpeg.org/wiki/Encode/MP3 args.AppendFormat("-c:a libmp3lame -qscale:a {0} ", FFmpeg.Mp3Qscale); break; } } if (Duration > 0) { args.AppendFormat("-t {0} ", isCustom ? "$duration$" : Duration.ToString("0.0", CultureInfo.InvariantCulture)); // duration limit } args.AppendFormat("\"{0}\"", isCustom ? "$output$" : Path.ChangeExtension(OutputPath, FFmpeg.Extension)); return(args.ToString()); }
public string GetFFmpegCommands() { string commands; if (!string.IsNullOrEmpty(FFmpeg.VideoSource) && FFmpeg.VideoSource.Equals("screen-capture-recorder", StringComparison.InvariantCultureIgnoreCase)) { // https://github.com/rdp/screen-capture-recorder-to-video-windows-free const string registryPath = "Software\\screen-capture-recorder"; RegistryHelper.CreateRegistry(registryPath, "start_x", CaptureArea.X); RegistryHelper.CreateRegistry(registryPath, "start_y", CaptureArea.Y); RegistryHelper.CreateRegistry(registryPath, "capture_width", CaptureArea.Width); RegistryHelper.CreateRegistry(registryPath, "capture_height", CaptureArea.Height); RegistryHelper.CreateRegistry(registryPath, "default_max_fps", Giffps); //GIF FPS used for ScreenRecordFPS RegistryHelper.CreateRegistry(registryPath, "capture_mouse_default_1", DrawCursor ? 1 : 0); } if (FFmpeg.UseCustomCommands && !string.IsNullOrEmpty(FFmpeg.CustomCommands)) { commands = FFmpeg.CustomCommands. Replace("$fps$", FFmpeg.VideoCodec == FFmpegVideoCodec.gif ? Giffps.ToString() : ScreenRecordFps.ToString(), StringComparison.InvariantCultureIgnoreCase). Replace("$area_x$", CaptureArea.X.ToString(), StringComparison.InvariantCultureIgnoreCase). Replace("$area_y$", CaptureArea.Y.ToString(), StringComparison.InvariantCultureIgnoreCase). Replace("$area_width$", CaptureArea.Width.ToString(), StringComparison.InvariantCultureIgnoreCase). Replace("$area_height$", CaptureArea.Height.ToString(), StringComparison.InvariantCultureIgnoreCase). Replace("$cursor$", DrawCursor ? "1" : "0", StringComparison.InvariantCultureIgnoreCase). Replace("$duration$", Duration.ToString("0.0", CultureInfo.InvariantCulture), StringComparison.InvariantCultureIgnoreCase). Replace("$output$", Path.ChangeExtension(OutputPath, FFmpeg.Extension), StringComparison.InvariantCultureIgnoreCase); } else { commands = GetFFmpegArgs(); } return(commands.Trim()); }