/// <summary> /// Opens output audio file for writing. This will delete any existing file. Call this before writing samples. /// </summary> /// <param name="showFFmpegOutput">Show output to terminal. Error stream will not be redirected if this is set to true.</param> public void OpenWrite(bool showFFmpegOutput = false) { if (OpenedForWriting) { throw new InvalidOperationException("File was already opened for writing!"); } var cmd = $"-f s{BitDepth}le -channels {Channels} -sample_rate {SampleRate} -i - " + $"-c:a {EncoderOptions.EncoderName} {EncoderOptions.EncoderArguments} -f {EncoderOptions.Format}"; if (UseFilename) { if (File.Exists(Filename)) { File.Delete(Filename); } InputDataStream = FFmpegWrapper.OpenInput(ffmpeg, $"{cmd} \"{Filename}\"", out ffmpegp, showFFmpegOutput); } else { csc = new CancellationTokenSource(); // using stream (InputDataStream, OutputDataStream) = FFmpegWrapper.Open(ffmpeg, $"{cmd} -", out ffmpegp, showFFmpegOutput); _ = OutputDataStream.CopyToAsync(DestinationStream, csc.Token); } OpenedForWriting = true; }
/// <summary> /// Prepares for writing. /// </summary> /// <param name="showFFmpegOutput">Show output to terminal. Error stream will not be redirected if this is set to true.</param> public void OpenWrite(bool showFFmpegOutput = false) { if (OpenedForWriting) { throw new InvalidOperationException("File was already opened for writing!"); } var cmd = $"-f rawvideo -video_size {Width}:{Height} -r {Framerate} -pixel_format rgb24 -i - " + $"-c:v {EncoderOptions.EncoderName} {EncoderOptions.EncoderArguments} -f {EncoderOptions.Format}"; if (UseFilename) { if (File.Exists(Filename)) { File.Delete(Filename); } InputDataStream = FFmpegWrapper.OpenInput(ffmpeg, $"{cmd} \"{Filename}\"", out ffmpegp, showFFmpegOutput); } else { csc = new CancellationTokenSource(); // using stream (InputDataStream, OutputDataStream) = FFmpegWrapper.Open(ffmpeg, $"{cmd} -", out ffmpegp, showFFmpegOutput); _ = OutputDataStream.CopyToAsync(DestinationStream, csc.Token); } OpenedForWriting = true; }
public void UninitializeVideoDecoder() { lock (_cdmaProcessor) { FFmpegWrapper.Uninitialize(); } }
static void Main(string[] args) { if (args.Length != 1) { Console.WriteLine("Drag n' drop a WEM or any audio file onto this EXE to convert it. WEM will be converted to WAV no matter what."); Console.WriteLine("Press any key to quit..."); Console.ReadKey(true); return; } FileInfo file = new FileInfo(args[0]); if (file.Extension.ToLower() == ".wem") { Console.WriteLine("WARNING: WEM conversion is a bit busted right now! If your file is broken, sorry! A patch will be out ASAP."); WEMFile wem = new WEMFile(file.FullName); WAVFile wav = wem.ConvertToWAV(); wav.SaveToFile(file.FullName + ".wav"); } else { file = FFmpegWrapper.ConvertToWaveFile(file.FullName); WAVFile wav = new WAVFile(file.FullName); WEMFile wem = wav.ConvertToWEM(); wem.SaveToFile(args[0] + ".wem"); } }
private static FileInfo ConvertToWEM(FileInfo file) { DirectoryInfo dir; if (!Directory.Exists(@".\AUDIO_TEMP")) { dir = Directory.CreateDirectory(@".\AUDIO_TEMP"); dir.Attributes = FileAttributes.Directory | FileAttributes.Hidden; } else { dir = new DirectoryInfo(@".\AUDIO_TEMP"); } if (file.Extension.ToLower() == ".wav") { WAVFile wav = new WAVFile(file.FullName); WEMFile wem = wav.ConvertToWEM(); string newWemPath = @".\AUDIO_TEMP\" + file.Name + ".wem"; return(wem.SaveToFile(newWemPath)); } else { // Guarantees the extension is wav. return(ConvertToWEM(FFmpegWrapper.ConvertToWaveFile(file.FullName, dir))); } }
/// <summary> /// Open player for writing samples for playing. /// </summary> /// <param name="sampleRate">Sample rate</param> /// <param name="channels">Number of channels</param> /// <param name="bitDepth">Bits per sample (16, 24, 32)</param> /// <param name="showWindow">Show player graphical window</param> /// <param name="showFFplayOutput">Show FFplay output for debugging purposes.</param> public void OpenWrite(int sampleRate, int channels, int bitDepth = 16, bool showWindow = false, bool showFFplayOutput = false) { if (bitDepth != 16 && bitDepth != 24 && bitDepth != 32) { throw new InvalidOperationException("Acceptable bit depths are 16, 24 and 32"); } if (OpenedForWriting) { throw new InvalidOperationException("Player is already opened for writing samples!"); } try { if (ffplayp != null && !ffplayp.HasExited) { ffplayp.Kill(); } } catch { } InputDataStream = FFmpegWrapper.OpenInput(ffplay, $"-f s{bitDepth}le -channels {channels} -sample_rate {sampleRate} -i -" + (showWindow ? "" : " -nodisp"), out ffplayp, showFFplayOutput); OpenedForWriting = true; }
public async Task <IExtractionResult> Start(string parameters, CancellationToken cancellationToken) { if (_wrapper != null) { throw new InvalidOperationException("Must create another Extraction resource, cannot reuse."); } _wrapper = new FFmpegWrapper(); var result = new ExtractResult { StartTime = DateTimeOffset.UtcNow, Arguments = parameters, Tracks = _tracks, Success = true }; var processResult = await _wrapper.RunProcess(parameters, cancellationToken).ConfigureAwait(false); if (processResult.ExitCode != 0 || processResult.Output.ToLower().Contains("error")) { result.Success = false; result.Error = processResult.Output; } result.EndTime = DateTimeOffset.UtcNow; return(result); }
/// <summary> /// Opens output stream for writing and returns both the input and output streams. Make sure to use a streaming format (like flv). /// </summary> /// <param name="options">Output options</param> /// <param name="process">FFmpeg process</param> /// <param name="inputArguments">Input arguments (such as -f, -v:c, -video_size, -ac, -ar...)</param> /// <param name="showOutput">Show output to terminal. Error stream will not be redirected if this is set to true.</param> /// <param name="ffmpegExecutable">Name or path to the ffmpeg executable</param> public static (Stream Input, Stream Output) StreamToStream(EncoderOptions options, out Process process, string inputArguments = "", bool showOutput = false, string ffmpegExecutable = "ffmpeg") { var(input, output) = FFmpegWrapper.Open(ffmpegExecutable, $"{inputArguments} -i - " + $"-c:v {options.EncoderName} {options.EncoderArguments} -f {options.Format} -", out process, showOutput); return(input, output); }
/// <summary> /// Converts given input file to output file. /// </summary> /// <param name="inputFilename">Input video file name/path</param> /// <param name="outputFilename">Input video file name/path</param> /// <param name="options">Output options</param> /// <param name="process">FFmpeg process</param> /// <param name="inputArguments">Input arguments (such as -f, -v:c, -video_size, -ac, -ar...)</param> /// <param name="showOutput">Show output to terminal. Error stream will not be redirected if this is set to true.</param> /// <param name="ffmpegExecutable">Name or path to the ffmpeg executable</param> public static void FileToFile(string inputFilename, string outputFilename, EncoderOptions options, out Process process, string inputArguments = "", bool showOutput = false, string ffmpegExecutable = "ffmpeg") { var output = FFmpegWrapper.ExecuteCommand(ffmpegExecutable, $"{inputArguments} -i \"{inputFilename}\" " + $"-c:v {options.EncoderName} {options.EncoderArguments} -f {options.Format} \"{outputFilename}\"", showOutput); process = output; }
/// <summary> /// Uses input file and returns the output stream. Make sure to use a streaming format (like flv). /// </summary> /// <param name="inputFilename">Input video file name/path</param> /// <param name="options">Output options</param> /// <param name="process">FFmpeg process</param> /// <param name="inputArguments">Input arguments (such as -f, -v:c, -video_size, -ac, -ar...)</param> /// <param name="showOutput">Show output to terminal. Error stream will not be redirected if this is set to true.</param> /// <param name="ffmpegExecutable">Name or path to the ffmpeg executable</param> public static Stream FileToStream(string inputFilename, EncoderOptions options, out Process process, string inputArguments = "", bool showOutput = false, string ffmpegExecutable = "ffmpeg") { var output = FFmpegWrapper.OpenOutput(ffmpegExecutable, $"{inputArguments} -i \"{inputFilename}\" " + $"-c:v {options.EncoderName} {options.EncoderArguments} -f {options.Format} -", out process, showOutput); return(output); }
/// <summary> /// Opens output file for writing and returns the input stream. /// </summary> /// <param name="outputFilename">Output audio file name/path</param> /// <param name="options">Output options</param> /// <param name="process">FFmpeg process</param> /// <param name="inputArguments">Input arguments (such as -f, -channels, -sample_rate,...)</param> /// <param name="ffmpegExecutable">Name or path to the ffmpeg executable</param> public static Stream StreamToFile(string outputFilename, FFmpegAudioEncoderOptions options, out Process process, string inputArguments = "", bool showOutput = false, string ffmpegExecutable = "ffmpeg") { var input = FFmpegWrapper.OpenInput(ffmpegExecutable, $"{inputArguments} -i - " + $"-c:a {options.EncoderName} {options.EncoderArguments} -f {options.Format} \"{outputFilename}\"", out process, showOutput); return(input); }
/// <summary> /// Get stream for writing and playing video in custom format. /// </summary> /// <param name="format">Custom video format</param> /// <param name="arguments">Custom FFmpeg arguments for the specified video format</param> /// <param name="showFFplayOutput">Show FFplay output for debugging purposes.</param> public static Stream GetStreamForWriting(string format, string arguments, out Process ffplayProcess, bool showFFplayOutput = false, string ffplayExecutable = "ffplay") { var str = FFmpegWrapper.OpenInput(ffplayExecutable, $"-f {format} {arguments} -i -", out ffplayProcess, showFFplayOutput); return(str); }
/// <summary> /// Load video metadata into memory. /// </summary> public async Task LoadMetadataAsync(bool ignoreStreamErrors = false) { if (LoadedMetadata) { throw new InvalidOperationException("Video metadata is already loaded!"); } var r = FFmpegWrapper.OpenOutput(ffprobe, $"-i \"{Filename}\" -v quiet -print_format json=c=1 -show_format -show_streams"); try { var metadata = await JsonSerializer.DeserializeAsync <VideoMetadata>(r); try { var videoStream = metadata.Streams.Where(x => x.CodecType.ToLower().Trim() == "video").FirstOrDefault(); if (videoStream != null) { metadata.Width = videoStream.Width.Value; metadata.Height = videoStream.Height.Value; metadata.PixelFormat = videoStream.PixFmt; metadata.Codec = videoStream.CodecName; metadata.CodecLongName = videoStream.CodecLongName; metadata.BitRate = videoStream.BitRate == null ? -1 : int.Parse(videoStream.BitRate); metadata.BitDepth = videoStream.BitsPerRawSample == null? tryParseBitDepth(videoStream.PixFmt) : int.Parse(videoStream.BitsPerRawSample); metadata.Duration = videoStream.Duration == null? double.Parse(metadata.Format.Duration ?? "0", CultureInfo.InvariantCulture) : double.Parse(videoStream.Duration, CultureInfo.InvariantCulture); metadata.SampleAspectRatio = videoStream.SampleAspectRatio; metadata.AvgFramerateText = videoStream.AvgFrameRate; metadata.AvgFramerate = videoStream.AvgFrameRateNumber; metadata.PredictedFrameCount = (int)(metadata.AvgFramerate * metadata.Duration); } } catch (Exception ex) { // failed to interpret video stream settings if (!ignoreStreamErrors) { throw new InvalidDataException("Failed to parse video stream data! " + ex.Message); } } LoadedMetadata = true; Metadata = metadata; } catch (JsonException ex) { throw new InvalidOperationException("Failed to interpret ffprobe video metadata output! " + ex.Message); } }
/// <summary> /// Play audio /// </summary> public void Play(bool showWindow = false) { if (OpenedForWriting) { throw new InvalidOperationException("Player is already opened for writing samples!"); } if (string.IsNullOrEmpty(Filename)) { throw new InvalidOperationException("No filename was specified!"); } FFmpegWrapper.RunCommand(ffplay, $"-i \"{Filename}\"" + (showWindow ? "" : " -nodisp")); }
/// <summary> /// Play video /// </summary> /// <param name="extraInputParameters">Extra FFmpeg input parameters to be passed</param> public void Play(string extraInputParameters = "") { if (OpenedForWriting) { throw new InvalidOperationException("Player is already opened for writing frames!"); } if (string.IsNullOrEmpty(Filename)) { throw new InvalidOperationException("No filename was specified!"); } FFmpegWrapper.RunCommand(ffplay, $"{extraInputParameters} -i \"{Filename}\""); }
/// <summary> /// Prepares for writing. /// </summary> /// <param name="showFFmpegOutput">Show output to terminal. Error stream will not be redirected if this is set to true.</param> /// <param name="thread_queue_size">Max. number of queued packets when reading from file/stream. /// Should be set to higher when dealing with high rate/low latency streams.</param> public void OpenWrite(bool showFFmpegOutput = false, int thread_queue_size = 4096) { if (OpenedForWriting) { throw new InvalidOperationException("File/Stream was already opened for writing!"); } var manual = new ManualResetEvent(false); socket = new Socket(SocketType.Stream, ProtocolType.Tcp); socket.Bind(new IPEndPoint(IPAddress.Loopback, 0)); socket.Listen(4); var port = ((IPEndPoint)socket.LocalEndPoint).Port; socket.BeginAccept(r => { connected_socket = socket.EndAccept(r); InputDataStreamAudio = new NetworkStream(connected_socket); manual.Set(); }, null); var cmd = $"-f s{AudioBitDepth}le -channels {AudioChannels} -sample_rate {AudioSampleRate} " + $"-thread_queue_size {thread_queue_size} -i \"tcp://{IPAddress.Loopback}:{port}\" " + $"-f rawvideo -video_size {VideoWidth}:{VideoHeight} -r {VideoFramerate} " + $"-thread_queue_size {thread_queue_size} -pixel_format rgb24 -i - " + $"-map 0 -c:a {AudioEncoderOptions.EncoderName} {AudioEncoderOptions.EncoderArguments} " + $"-map 1 -c:v {VideoEncoderOptions.EncoderName} {VideoEncoderOptions.EncoderArguments} " + $"-f {VideoEncoderOptions.Format}"; if (UseFilename) { if (File.Exists(Filename)) { File.Delete(Filename); } InputDataStreamVideo = FFmpegWrapper.OpenInput(ffmpeg, $"{cmd} \"{Filename}\"", out ffmpegp, showFFmpegOutput); } else { csc = new CancellationTokenSource(); // using stream (InputDataStreamVideo, OutputDataStream) = FFmpegWrapper.Open(ffmpeg, $"{cmd} -", out ffmpegp, showFFmpegOutput); _ = OutputDataStream.CopyToAsync(DestinationStream, csc.Token); } manual.WaitOne(); OpenedForWriting = true; }
/// <summary> /// Save frame as an image /// </summary> /// <param name="output">Output image</param> /// <param name="encoder">Encoder for image ('png', 'libwebp')</param> /// <param name="ffmpegExecutable">Name or path to the ffmpeg executable</param> public void Save(string output, string encoder = "png", string extraParameters = "", string ffmpegExecutable = "ffmpeg") { if (File.Exists(output)) { File.Delete(output); } using (var inp = FFmpegWrapper.OpenInput(ffmpegExecutable, $"-f rawvideo -video_size {Width}:{Height} -pixel_format rgb24 -i - " + $"-c:v {encoder} {extraParameters} -f image2pipe \"{output}\"", out _, false)) { // save it inp.Write(RawData.Span); } }
public async Task FFmpegWrapperProgressTest() { var path = Res.GetPath(Res.Video_Mp4); var opath = "out-test-v-0.mp4"; double lastval = -1; try { var video = new VideoReader(path); await video.LoadMetadataAsync(); var dur = video.Metadata.Duration; video.Dispose(); Assert.True(Math.Abs(dur - 5.533333) < 0.01); var p = FFmpegWrapper.ExecuteCommand("ffmpeg", $"-i \"{path}\" -c:v libx264 -f mp4 \"{opath}\""); var progress = FFmpegWrapper.RegisterProgressTracker(p, dur); progress.ProgressChanged += (s, prg) => lastval = prg; p.WaitForExit(); await Task.Delay(300); Assert.True(lastval > 50 && lastval <= 100); video = new VideoReader(opath); await video.LoadMetadataAsync(); Assert.True(video.Metadata.AvgFramerate == 30); Assert.True(video.Metadata.AvgFramerateText == "30/1"); Assert.True(Math.Abs(video.Metadata.Duration - 5.533333) < 0.01); Assert.True(video.Metadata.Width == 560); Assert.True(video.Metadata.Height == 320); video.Dispose(); } finally { if (File.Exists(opath)) { File.Delete(opath); } } }
public async Task FFmpegWrapperProgressTest() { var path = Res.GetPath(Res.Audio_Ogg); var opath = "out-test.mp3"; double lastval = -1; try { var audio = new AudioReader(path); await audio.LoadMetadataAsync(); var dur = audio.Metadata.Duration; audio.Dispose(); Assert.True(Math.Abs(dur - 1.515102) < 0.01); var p = FFmpegWrapper.ExecuteCommand("ffmpeg", $"-i \"{path}\" \"{opath}\""); var progress = FFmpegWrapper.RegisterProgressTracker(p, dur); progress.ProgressChanged += (s, prg) => lastval = prg; p.WaitForExit(); await Task.Delay(300); Assert.True(lastval > 50 && lastval <= 100); audio = new AudioReader(opath); await audio.LoadMetadataAsync(); Assert.True(audio.Metadata.Channels == 2); Assert.True(audio.Metadata.Streams.Length == 1); Assert.True(Math.Abs(audio.Metadata.Duration - 1.515102) < 0.2); audio.Dispose(); } finally { if (File.Exists(opath)) { File.Delete(opath); } } }
/// <summary> /// Play video in background and return the process associated with it /// </summary> /// <param name="runPureBackground">Detach the player from this VideoPlayer control. Player won't be killed on disposing.</param> /// <param name="extraInputParameters">Extra FFmpeg input parameters to be passed</param> public Process PlayInBackground(bool runPureBackground = false, string extraInputParameters = "") { if (!runPureBackground && OpenedForWriting) { throw new InvalidOperationException("Player is already opened for writing frames!"); } if (string.IsNullOrEmpty(Filename)) { throw new InvalidOperationException("No filename was specified!"); } FFmpegWrapper.OpenOutput(ffplay, $"{extraInputParameters} -i \"{Filename}\"", out Process p); if (!runPureBackground) { ffplayp = p; } return(ffplayp); }
/// <summary> /// Play audio in background and return the process associated with it /// </summary> /// <param name="showWindow">Show player window</param> /// <param name="runPureBackground">Detach the player from this AudioPlayer control. Player won't be killed on disposing.</param> public Process PlayInBackground(bool showWindow = false, bool runPureBackground = false) { if (!runPureBackground && OpenedForWriting) { throw new InvalidOperationException("Player is already opened for writing samples!"); } if (string.IsNullOrEmpty(Filename)) { throw new InvalidOperationException("No filename was specified!"); } FFmpegWrapper.OpenOutput(ffplay, $"-i \"{Filename}\"" + (showWindow ? "" : " -nodisp"), out Process p); if (!runPureBackground) { ffplayp = p; } return(ffplayp); }
/// <summary> /// Open player for writing frames for playing. /// </summary> /// <param name="width">Video frame width</param> /// <param name="height">Video frame height</param> /// <param name="framerateFrequency">Video framerate (frequency form)</param> /// <param name="showFFplayOutput">Show FFplay output for debugging purposes.</param> public void OpenWrite(int width, int height, string framerateFrequency, bool showFFplayOutput = false) { if (OpenedForWriting) { throw new InvalidOperationException("Player is already opened for writing frames!"); } try { if (ffplayp != null && !ffplayp.HasExited) { ffplayp.Kill(); } } catch { } InputDataStream = FFmpegWrapper.OpenInput(ffplay, $"-f rawvideo -video_size {width}:{height} -framerate {framerateFrequency} -pixel_format rgb24 -i -", out ffplayp, showFFplayOutput); OpenedForWriting = true; }
/// <summary> /// Load the video for reading frames and seeks to given offset in seconds. /// </summary> /// <param name="offsetSeconds">Offset in seconds to which to seek to</param> public void Load(double offsetSeconds) { if (OpenedForReading) { throw new InvalidOperationException("Video is already loaded!"); } if (!LoadedMetadata) { throw new InvalidOperationException("Please load the video metadata first!"); } if (Metadata.Width == 0 || Metadata.Height == 0) { throw new InvalidDataException("Loaded metadata contains errors!"); } // we will be reading video in RGB24 format DataStream = FFmpegWrapper.OpenOutput(ffmpeg, $"{(offsetSeconds <= 0 ? "" : $"-ss {offsetSeconds:0.00}")} -i \"{Filename}\"" + $" -pix_fmt rgb24 -f rawvideo -"); OpenedForReading = true; }
/// <summary> /// Load the audio and prepare it for reading frames. /// </summary> /// <param name="bitDepth">frame bit rate in which the audio will be processed (16, 24, 32)</param> public void Load(int bitDepth = 16) { if (bitDepth != 16 && bitDepth != 24 && bitDepth != 32) { throw new InvalidOperationException("Acceptable bit depths are 16, 24 and 32"); } if (OpenedForReading) { throw new InvalidOperationException("Audio is already loaded!"); } if (!MetadataLoaded) { throw new InvalidOperationException("Please load the audio metadata first!"); } // we will be reading audio in S16LE format (for best accuracy, could use S32LE) DataStream = FFmpegWrapper.OpenOutput(ffmpeg, $"-i \"{Filename}\" -f s{bitDepth}le -"); loadedBitDepth = bitDepth; OpenedForReading = true; }
private void RunSecondPass() { var inputFile = new InputFileOptions(); inputFile.FilePath = Path.GetFullPath(SourceFilePath); var outputFile = new OutputFileOptions(); if (ShouldDeinterlaceVideo) outputFile.Add(new DeinterlaceVideoOption()); if (_VideoWidth.IsSet && _VideoWidth.Value != 0) GeckonPresets.ApplyHighQualitySecondPassPreset(outputFile, VideoBitrate, AudioBitrate, VideoWidth, VideoHeight); else GeckonPresets.ApplyHighQualitySecondPassPreset(outputFile, VideoBitrate, AudioBitrate); outputFile.FilePath = Path.GetFullPath(DestinationFilePath); outputFile.Add(new ThreadsOption(ThreadsOption.AUTOMATICALLY_SELECT_NUMBER_OF_THREADS)); using (var wrapper = new FFmpegWrapper()) { wrapper.FFmpegExecutablePath = Path.GetFullPath(FFmpegFilePath); wrapper.AddInputFileOptions(inputFile); wrapper.OutputFileOptions = outputFile; wrapper.WorkingDirectory = TemporaryDirectoryPath; wrapper.ProgressDataChanged += SecondPassProgressDataChanged; wrapper.UnparsedOutputOccurred += (sender, args) => _FFmpegOutput.AppendLine(args.EventObject); wrapper.WarningOccurred += (sender, args) => _FFmpegOutput.AppendLine(args.EventObject.Warning); wrapper.ErrorOccurred += (sender, args) => _FFmpegOutput.AppendLine(args.EventObject.ToString()); wrapper.Execute(); FFmpegArguments += "\n" + wrapper.FFmpegArguments; wrapper.ProgressDataChanged -= SecondPassProgressDataChanged; } }
private void AdjustVideoSize() { var inputFile = new InputFileOptions(); inputFile.FilePath = Path.GetFullPath(SourceFilePath); using (var wrapper = new FFmpegWrapper()) { wrapper.FFmpegExecutablePath = Path.GetFullPath(FFmpegFilePath); wrapper.AddInputFileOptions(inputFile); wrapper.WorkingDirectory = TemporaryDirectoryPath; wrapper.UnparsedOutputOccurred += (sender, args) => _FFmpegOutput.AppendLine(args.EventObject); wrapper.WarningOccurred += (sender, args) => _FFmpegOutput.AppendLine(args.EventObject.Warning); wrapper.ErrorOccurred += (sender, args) => _FFmpegOutput.AppendLine(args.EventObject.ToString()); wrapper.Execute(); if (wrapper.MediaData.Inputs.Count < 1) return; StreamData stream; try { stream = wrapper.MediaData.Inputs[0].Streams.First(s => s.Value.Type == StreamType.Video).Value; } catch (InvalidOperationException) { return; //No video stream found, so aspectratio can be determined } var sizeData = stream.ParsedData.OfType<StreamSizeData>(); if(sizeData.Count() == 0) return; var firstSizeData = sizeData.First(); var aspect = firstSizeData.DisplayAspectRatio ?? firstSizeData.Size.Width / (double)firstSizeData.Size.Height; //Use DAR if available, else use video resolution if (VideoWidth / aspect < VideoHeight) _VideoHeight.Value = (uint)(Math.Round(VideoWidth / aspect / 2) * 2); //Make sure size is dividable by two else _VideoWidth.Value = (uint)(Math.Round(VideoHeight * aspect / 2) * 2); //Make sure size is dividable by two } }
static async Task Main() { if (!File.Exists(CONFIG_PATH)) { Configuration.Create(CONFIG_PATH); Console.WriteLine("No configuration file found. Created new one."); return; } try { var config = Configuration.Load(CONFIG_PATH); // VALIDATE CONFIGURATION if (string.IsNullOrEmpty(config.InputDirectory) || !Directory.Exists(config.InputDirectory)) { throw new Exception("Invalid input directory!"); } if (string.IsNullOrEmpty(config.OutputDirectory) || !Directory.Exists(config.OutputDirectory)) { throw new Exception("Invalid output directory!"); } if (config.ImageExtensions == null || config.VideoExtensions == null || config.AudioExtensions == null) { throw new Exception("Extensions can not be null!"); } if (config.VideoCompression == null || config.ImageCompression == null || config.AudioCompression == null) { throw new Exception("Compression configurations can not be null!"); } if (config.VideoCompression.ParametersPriorityList == null || config.VideoCompression.ParametersPriorityList.Length == 0) { throw new Exception("Video parameters priority list can not be empty!"); } if (config.ImageCompression.ParametersPriorityList == null || config.ImageCompression.ParametersPriorityList.Length == 0) { throw new Exception("Image parameters priority list can not be empty!"); } if (config.AudioCompression.ParametersPriorityList == null || config.AudioCompression.ParametersPriorityList.Length == 0) { throw new Exception("Audio parameters priority list can not be empty!"); } if (config.VideoCompression.MaxConcurrentWorkers <= 0) { throw new Exception("Video max. concurrent workers can not be 0 or less!"); } if (config.ImageCompression.MaxConcurrentWorkers <= 0) { throw new Exception("Image max. concurrent workers can not be 0 or less!"); } if (config.AudioCompression.MaxConcurrentWorkers <= 0) { throw new Exception("Image max. concurrent workers can not be 0 or less!"); } try { // CHECK IF FFMPEG PRESENT var encoders = FFmpegWrapper.GetEncoders(); } catch (Exception e) { throw new Exception("Failed to use FFmpeg! Make sure it's accessible! " + e.Message); } // HANDLE CANCEL EVENT var csc = new CancellationTokenSource(); Console.CancelKeyPress += (a, b) => { csc.Cancel(); b.Cancel = true; }; // START WORK var sw = Stopwatch.StartNew(); var c = new Compressor(config, csc.Token); await c.Start(); sw.Stop(); Console.WriteLine(); WriteInfo($"Done ({sw.Elapsed.TotalMilliseconds.ToTimeString()})"); } catch (JsonException) { WriteError("ERROR: Failed to parse configuration file!"); } catch (Exception ex) { WriteError("ERROR: " + ex.Message); } }
protected FileProcessorBase(IProcessorConfig config, FFmpegWrapper fFmpegWrapper) { _config = config; _fFmpegWrapper = fFmpegWrapper; }
private void AdjustVideoSize() { var inputFile = new InputFileOptions(); inputFile.FilePath = Path.GetFullPath(SourceFilePath); using (var wrapper = new FFmpegWrapper()) { wrapper.FFmpegExecutablePath = Path.GetFullPath(FFmpegFilePath); wrapper.AddInputFileOptions(inputFile); wrapper.Execute(); if (wrapper.MediaData.Inputs.Count < 1) return; StreamData stream; try { stream = wrapper.MediaData.Inputs[0].Streams.First(s => s.Value.Type == StreamType.Video).Value; } catch (InvalidOperationException) { return; //No video stream found, so aspectratio can be determined } var sizeData = stream.ParsedData.OfType<StreamSizeData>(); if (sizeData.Count() == 0) return; var firstSizeData = sizeData.First(); var aspect = firstSizeData.DisplayAspectRatio ?? firstSizeData.Size.Width / (double)firstSizeData.Size.Height; //Use DAR if available, else use video resolution if (Width / aspect < Height) _Height.Value = (uint)(Math.Round(Width / aspect / 2) * 2); //Make sure size is dividable by two else _Width.Value = (uint)(Math.Round(Height * aspect / 2) * 2); //Make sure size is dividable by two } }
private void CutFrame() { var inputFile = new InputFileOptions(); inputFile.FilePath = Path.GetFullPath(SourceFilePath); var outputFile = new OutputFileOptions(); if (_Width.IsSet && _Width.Value != 0) GeckonPresets.ApplyCutVideoFramePreset(inputFile, outputFile, TimeSpan.Parse(_VideoPosition.Value), Width, Height); else GeckonPresets.ApplyCutVideoFramePreset(inputFile, outputFile, TimeSpan.Parse(_VideoPosition.Value)); outputFile.FilePath = Path.GetFullPath(DestinationFilePath); using (var wrapper = new FFmpegWrapper()) { wrapper.FFmpegExecutablePath = Path.GetFullPath(FFmpegFilePath); wrapper.AddInputFileOptions(inputFile); wrapper.OutputFileOptions = outputFile; wrapper.ProgressDataChanged += ProgressDataChanged; wrapper.RawOutputOccurred += (sender, e) => Console.WriteLine(e.EventObject); wrapper.Execute(); wrapper.ProgressDataChanged -= ProgressDataChanged; } }
private void StripMetaDataWithFFmpeg(FileInfo preparedFile) { var ffmpeg = new FFmpegWrapper(configuration.InactiveProcessTimeout, configuration.FFmpegLocation); ffmpeg.TryStripMetadata(preparedFile); }
/// <summary> /// Load audio metadata into memory. /// </summary> public async Task LoadMetadata(bool ignoreStreamErrors = false) { if (MetadataLoaded) { throw new InvalidOperationException("Video metadata is already loaded!"); } var r = FFmpegWrapper.OpenOutput(ffprobe, $"-i \"{Filename}\" -v quiet -print_format json=c=1 -show_format -show_streams"); try { var metadata = await JsonSerializer.DeserializeAsync <AudioMetadata>(r); try { var audioStream = metadata.Streams.Where(x => x.CodecType.ToLower().Trim() == "audio").FirstOrDefault(); if (audioStream != null) { metadata.Channels = audioStream.Channels; metadata.Codec = audioStream.CodecName; metadata.CodecLongName = audioStream.CodecLongName; metadata.SampleFormat = audioStream.SampleFmt; metadata.SampleRate = audioStream.SampleRate == null ? -1 : int.Parse(audioStream.SampleRate); metadata.Duration = audioStream.Duration == null? double.Parse(metadata.Format.Duration ?? "-1") : double.Parse(audioStream.Duration); metadata.BitRate = audioStream.BitRate == null ? -1 : int.Parse(audioStream.BitRate); metadata.BitDepth = audioStream.BitsPerSample; metadata.PredictedSampleCount = (int)Math.Round(metadata.Duration * metadata.SampleRate); if (metadata.BitDepth == 0) { // try to parse it from format if (metadata.SampleFormat.Contains("64")) { metadata.BitDepth = 64; } else if (metadata.SampleFormat.Contains("32")) { metadata.BitDepth = 32; } else if (metadata.SampleFormat.Contains("24")) { metadata.BitDepth = 24; } else if (metadata.SampleFormat.Contains("16")) { metadata.BitDepth = 16; } else if (metadata.SampleFormat.Contains("8")) { metadata.BitDepth = 8; } } } } catch (Exception ex) { // failed to interpret video stream settings if (!ignoreStreamErrors) { throw new InvalidDataException("Failed to parse audio stream data! " + ex.Message); } } MetadataLoaded = true; Metadata = metadata; } catch (JsonException ex) { throw new InvalidOperationException("Failed to interpret ffprobe audio metadata output! " + ex.Message); } }
public ConvertFileProcessor(IProcessorConfig config, FFmpegWrapper fFmpegWrapper) : base(config, fFmpegWrapper) { }