public void Audio_ToAAC_Args_Pipe() { using var outputFile = new TemporaryFile($"out{VideoType.Mp4.Extension}"); var samples = new List <IAudioSample> { new PcmAudioSampleWrapper(new byte[] { 0, 0 }), new PcmAudioSampleWrapper(new byte[] { 0, 0 }), }; var audioSamplesSource = new RawAudioPipeSource(samples) { Channels = 2, Format = "s8", SampleRate = 8000, }; var success = FFMpegArguments .FromPipeInput(audioSamplesSource) .OutputToFile(outputFile, false, opt => opt .WithAudioCodec(AudioCodec.Aac)) .ProcessSynchronously(); Assert.IsTrue(success); }
public static Stream ConvertToWav(Stream inputStream, string mimeType) { if (mimeType.EndsWith("/wav")) { return(inputStream); } var outputStream = new MemoryStream(); var outputPipe = new StreamPipeSink(outputStream); outputPipe.Format = "wav"; // pcm_s16le or pcm_s32le var ffargs = FFMpegArguments .FromPipeInput(new StreamPipeSource(inputStream)) .OutputToPipe(outputPipe, options => options.WithCustomArgument("-ar 8000 -c:a pcm_s16le") .ForceFormat("wav") ); ffargs.ProcessAsynchronously().Wait(); return(outputStream); }
public async Task Audio_FromRaw() { await using var file = File.Open(TestResources.RawAudio, FileMode.Open); var memoryStream = new MemoryStream(); await FFMpegArguments .FromPipeInput(new StreamPipeSource(file), options => options.ForceFormat("s16le")) .OutputToPipe(new StreamPipeSink(memoryStream), options => options.ForceFormat("mp3")) .ProcessAsynchronously(); }
private void ConvertFromStreamPipe(ContainerFormat type, params IArgument[] arguments) { var output = Input.OutputLocation(type); try { var input = FFProbe.Analyse(VideoLibrary.LocalVideoWebm.FullName); using var inputStream = File.OpenRead(input.Path); var processor = FFMpegArguments .FromPipeInput(new StreamPipeSource(inputStream)) .OutputToFile(output, false, opt => { foreach (var arg in arguments) { opt.WithArgument(arg); } }); var scaling = arguments.OfType <ScaleArgument>().FirstOrDefault(); var success = processor.ProcessSynchronously(); var outputVideo = FFProbe.Analyse(output); Assert.IsTrue(success); Assert.IsTrue(File.Exists(output)); Assert.IsTrue(Math.Abs((outputVideo.Duration - input.Duration).TotalMilliseconds) < 1000.0 / input.PrimaryVideoStream.FrameRate); if (scaling?.Size == null) { Assert.AreEqual(outputVideo.PrimaryVideoStream.Width, input.PrimaryVideoStream.Width); Assert.AreEqual(outputVideo.PrimaryVideoStream.Height, input.PrimaryVideoStream.Height); } else { if (scaling.Size.Value.Width != -1) { Assert.AreEqual(outputVideo.PrimaryVideoStream.Width, scaling.Size.Value.Width); } if (scaling.Size.Value.Height != -1) { Assert.AreEqual(outputVideo.PrimaryVideoStream.Height, scaling.Size.Value.Height); } Assert.AreNotEqual(outputVideo.PrimaryVideoStream.Width, input.PrimaryVideoStream.Width); Assert.AreNotEqual(outputVideo.PrimaryVideoStream.Height, input.PrimaryVideoStream.Height); } } finally { if (File.Exists(output)) { File.Delete(output); } } }
public void Video_ToMP4_Args_Pipe(System.Drawing.Imaging.PixelFormat pixelFormat) { using var outputFile = new TemporaryFile($"out{VideoType.Mp4.Extension}"); var videoFramesSource = new RawVideoPipeSource(BitmapSource.CreateBitmaps(128, pixelFormat, 256, 256)); var success = FFMpegArguments .FromPipeInput(videoFramesSource) .OutputToFile(outputFile, false, opt => opt .WithVideoCodec(VideoCodec.LibX264)) .ProcessSynchronously(); Assert.IsTrue(success); }
public void Video_ToMP4_Args_StreamPipe() { using var input = File.OpenRead(TestResources.WebmVideo); using var output = new TemporaryFile($"out{VideoType.Mp4.Extension}"); var success = FFMpegArguments .FromPipeInput(new StreamPipeSource(input)) .OutputToFile(output, false, opt => opt .WithVideoCodec(VideoCodec.LibX264)) .ProcessSynchronously(); Assert.IsTrue(success); }
public void ConvertFromPipe(ContainerFormat type, System.Drawing.Imaging.PixelFormat fmt, params IArgument[] arguments) { var output = Input.OutputLocation(type); try { var videoFramesSource = new RawVideoPipeSource(BitmapSource.CreateBitmaps(128, fmt, 256, 256)); var processor = FFMpegArguments.FromPipeInput(videoFramesSource).OutputToFile(output, false, opt => { foreach (var arg in arguments) { opt.WithArgument(arg); } }); var scaling = arguments.OfType <ScaleArgument>().FirstOrDefault(); processor.ProcessSynchronously(); var outputVideo = FFProbe.Analyse(output); Assert.IsTrue(File.Exists(output)); if (scaling?.Size == null) { Assert.AreEqual(outputVideo.PrimaryVideoStream.Width, videoFramesSource.Width); Assert.AreEqual(outputVideo.PrimaryVideoStream.Height, videoFramesSource.Height); } else { if (scaling.Size.Value.Width != -1) { Assert.AreEqual(outputVideo.PrimaryVideoStream.Width, scaling.Size.Value.Width); } if (scaling.Size.Value.Height != -1) { Assert.AreEqual(outputVideo.PrimaryVideoStream.Height, scaling.Size.Value.Height); } Assert.AreNotEqual(outputVideo.PrimaryVideoStream.Width, videoFramesSource.Width); Assert.AreNotEqual(outputVideo.PrimaryVideoStream.Height, videoFramesSource.Height); } } finally { if (File.Exists(output)) { File.Delete(output); } } }
public void Audio_ToAAC_Args_Pipe_InvalidSampleRate() { using var outputFile = new TemporaryFile($"out{VideoType.Mp4.Extension}"); var audioSamplesSource = new RawAudioPipeSource(new List <IAudioSample>()) { SampleRate = 0, }; var ex = Assert.ThrowsException <FFMpegException>(() => FFMpegArguments .FromPipeInput(audioSamplesSource) .OutputToFile(outputFile, false, opt => opt .WithAudioCodec(AudioCodec.Aac)) .ProcessSynchronously()); }
public void Video_StreamFile_OutputToMemoryStream() { var output = new MemoryStream(); FFMpegArguments .FromPipeInput(new StreamPipeSource(File.OpenRead(TestResources.WebmVideo)), options => options.ForceFormat("webm")) .OutputToPipe(new StreamPipeSink(output), options => options .ForceFormat("mpegts")) .ProcessSynchronously(); output.Position = 0; var result = FFProbe.Analyse(output); Console.WriteLine(result.Duration); }
private async Task <Stream> GenerateSoundStream(Stream inputStream) { MemoryStream returnStream = null; using (Stream outputStream = new MemoryStream()) { await FFMpegArguments .FromPipeInput(new StreamPipeSource(inputStream)) .OutputToPipe(new StreamPipeSink(outputStream), options => options .WithAudioCodec(AudioCodec.LibMp3Lame) .ForceFormat("mp3")) .ProcessAsynchronously(); await outputStream.CopyToAsync(returnStream); } return(returnStream); }
/// <summary> /// The Record. /// </summary> /// <param name="outputFilePath">The outputFilePath<see cref="string"/>.</param> /// <param name="element">The element<see cref="FrameworkElement"/>.</param> /// <param name="drawChartAction">The drawChartAction<see cref="Action{int}"/>.</param> /// <param name="frameCount">The frameCount<see cref="int"/>.</param> /// <param name="token">The token<see cref="CancellationToken"/>.</param> public static void Record(string outputFilePath, FrameworkElement element, Action <int> drawChartAction, int frameCount, CancellationToken token) { if (File.Exists(outputFilePath)) { File.Delete(outputFilePath); } var codecArgs = new VideoCodecArgument(VideoCodec.LibX264); var videoType = VideoType.Mp4; var videoFramesSource = new RawVideoPipeSource(CreateChartBitmaps(element, drawChartAction, frameCount, token)); var arguments = FFMpegArguments.FromPipeInput(videoFramesSource); ////arguments.WithArgument(codecArgs); var processor = arguments.OutputToFile(outputFilePath, true, (option) => option.WithArgument(codecArgs)); processor.ProcessSynchronously(); }
// [DataRow(PixelFormat.Format48bppRgb)] public void RawVideoPipeSource_Ogv_Scale(System.Drawing.Imaging.PixelFormat pixelFormat) { using var outputFile = new TemporaryFile($"out{VideoType.Ogv.Extension}"); var videoFramesSource = new RawVideoPipeSource(BitmapSource.CreateBitmaps(128, pixelFormat, 256, 256)); FFMpegArguments .FromPipeInput(videoFramesSource) .OutputToFile(outputFile, false, opt => opt .WithVideoFilters(filterOptions => filterOptions .Scale(VideoSize.Ed)) .WithVideoCodec(VideoCodec.LibTheora)) .ProcessSynchronously(); var analysis = FFProbe.Analyse(outputFile); Assert.AreEqual((int)VideoSize.Ed, analysis.PrimaryVideoStream !.Width); }
public async Task Video_ToTS_Args_Pipe(System.Drawing.Imaging.PixelFormat pixelFormat) { using var output = new TemporaryFile($"out{VideoType.Ts.Extension}"); var input = new RawVideoPipeSource(BitmapSource.CreateBitmaps(128, pixelFormat, 256, 256)); var success = await FFMpegArguments .FromPipeInput(input) .OutputToFile(output, false, opt => opt .ForceFormat(VideoType.Ts)) .ProcessAsynchronously(); Assert.IsTrue(success); var analysis = await FFProbe.AnalyseAsync(output); Assert.AreEqual(VideoType.Ts.Name, analysis.Format.FormatName); }
public void Video_ToMP4_Args_Pipe_DifferentImageSizes() { using var outputFile = new TemporaryFile($"out{VideoType.Mp4.Extension}"); var frames = new List <IVideoFrame> { BitmapSource.CreateVideoFrame(0, System.Drawing.Imaging.PixelFormat.Format24bppRgb, 255, 255, 1, 0), BitmapSource.CreateVideoFrame(0, System.Drawing.Imaging.PixelFormat.Format24bppRgb, 256, 256, 1, 0) }; var videoFramesSource = new RawVideoPipeSource(frames); var ex = Assert.ThrowsException <FFMpegException>(() => FFMpegArguments .FromPipeInput(videoFramesSource) .OutputToFile(outputFile, false, opt => opt .WithVideoCodec(VideoCodec.LibX264)) .ProcessSynchronously()); Assert.IsInstanceOfType(ex.GetBaseException(), typeof(FFMpegStreamFormatException)); }
public void Video_TranscodeInMemory() { using var resStream = new MemoryStream(); var reader = new StreamPipeSink(resStream); var writer = new RawVideoPipeSource(BitmapSource.CreateBitmaps(128, System.Drawing.Imaging.PixelFormat.Format24bppRgb, 128, 128)); FFMpegArguments .FromPipeInput(writer) .OutputToPipe(reader, opt => opt .WithVideoCodec("vp9") .ForceFormat("webm")) .ProcessSynchronously(); resStream.Position = 0; var vi = FFProbe.Analyse(resStream); Assert.AreEqual(vi.PrimaryVideoStream.Width, 128); Assert.AreEqual(vi.PrimaryVideoStream.Height, 128); }
static void MakeSpinningGif(MagickImage image, double rotationalPeriod, double framerate, string filename, bool overwrite = false) { double ticksPerFrame = 100 / framerate; if (ticksPerFrame < 0 || ticksPerFrame % 0 > double.Epsilon * 100) { throw new ArgumentException( $"100/frameRate must be a positive integer according to the gif standard, {ticksPerFrame} is not a positive integer"); } var spinningFramesSource = new RawVideoPipeSource(MakeSpinningFrames(image, rotationalPeriod, framerate)) { FrameRate = framerate }; Console.WriteLine(FFMpegArguments .FromPipeInput(spinningFramesSource) .OutputToFile(filename, true, options => options .WithCustomArgument($"-filter_complex [0:v]scale=-2:{image.Height}:flags=bicubic,split[a][b];[a]palettegen[p];[b][p]paletteuse") ) .ProcessSynchronously()); }
public static void Encode(string outFile, IEnumerable <IVideoFrame> frames, params IArgument[] inputArguments) { using var frameSeq = frames.GetEnumerator(); var videoFramesSource = new RawVideoPipeSource(frameSeq); void _addArgs(FFMpegArgumentOptions ops) { foreach (var arg in inputArguments) { ops.WithArgument(arg); } } var arguments = FFMpegArguments.FromPipeInput(videoFramesSource, _addArgs); var processor = arguments.OutputToFile(outFile); processor.ProcessSynchronously(); }
/// <summary> /// A task for recording a video /// </summary> /// <param name="graph">Optional graph to record</param> async public void Go(PlottedGraph?graph) { if (!File.Exists(GlobalConfig.GetSettingPath(CONSTANTS.PathKey.FFmpegPath))) { StopRecording(processQueue: false, error: $"Unable to start recording: Path to ffmpeg.exe not configured"); Loaded = false; Initialised = false; return; } try { string?dirname = Path.GetDirectoryName(GlobalConfig.GetSettingPath(CONSTANTS.PathKey.FFmpegPath)); if (dirname is not null) { GlobalFFOptions.Configure(new FFOptions { BinaryFolder = dirname }); } else { StopRecording(processQueue: false, error: $"Unable to start recording: FFMpeg not found"); Loaded = false; return; } } catch (Exception e) { StopRecording(processQueue: false, error: $"Unable to start recording: Exception '{e.Message}' configuring recorder"); Loaded = false; return; } Initialised = true; CurrentRecordingFile = GenerateVideoFilepath(graph); _recordedFrameCount = 0; Logging.RecordLogEvent("Recording video to " + CurrentRecordingFile); var videoFramesSource = new RawVideoPipeSource(GetNextFrame()); try { //https://trac.ffmpeg.org/wiki/Encode/H.264 await FFMpegArguments .FromPipeInput(videoFramesSource) .OutputToFile(CurrentRecordingFile, false, opt => opt .WithFramerate(GlobalConfig.Settings.Media.VideoCodec_FPS) .WithConstantRateFactor(28 - GlobalConfig.Settings.Media.VideoCodec_Quality) .WithSpeedPreset(GetVideoSpeed()) .WithVideoCodec(VideoCodec.LibX264) ) .ProcessAsynchronously(); } catch (Exception e) { Logging.RecordException("FFMpeg Record Error: " + e.Message, e); } Initialised = false; StopRecording(); CapturePaused = false; _bmpQueue.Clear(); Logging.RecordLogEvent($"Recorded {_recordedFrameCount} x {CurrentVideoWidth}*{CurrentVideoHeight} frames of video to " + CurrentRecordingFile); CurrentRecordingFile = ""; _capturedContent = CaptureContent.Invalid; }