public static async Task <TimeSpan?> GetAccumulatedTimeAsync(IEnumerable <string> filePath) { var totalTime = new TimeSpan(); foreach (var file in filePath) { try { var info = await FFProbe.AnalyseAsync(file); if (info == null) { return(null); } totalTime += info.Duration; } catch { return(null); } } return(totalTime); }
public async Task Probe_Success_FromStream_Async() { await using var stream = File.OpenRead(TestResources.WebmVideo); var info = await FFProbe.AnalyseAsync(stream); Assert.AreEqual(3, info.Duration.Seconds); }
protected async Task CreateOutput(Action <double> progress, IReadOnlyList <Video> videos, string outputFile) { await Task.WhenAll(videos.Select(this.CreateOutput)); FFMpegCore.FFMpegArguments ffmpeg = null; foreach (var video in videos) { if (ffmpeg == null) { ffmpeg = FFMpegCore.FFMpegArguments.FromFileInput(GetCombineVideoPath(video)); } else { ffmpeg = ffmpeg.AddFileInput(GetCombineVideoPath(video)); } } try { var duration = (await FFProbe.AnalyseAsync(GetCombineVideoPath(videos[0]))).Duration; await ffmpeg.OutputToFile(outputFile, true, _ => this.ApplyOption(videos, _)) .NotifyOnProgress(progress, duration) .ProcessAsynchronously(); } catch (Exception e) { Console.Error.WriteLine(e.ToString()); } }
public async Task Video_Cancel_Async_With_Timeout() { var outputFile = new TemporaryFile("out.mp4"); var task = FFMpegArguments .FromFileInput("testsrc2=size=320x240[out0]; sine[out1]", false, args => args .WithCustomArgument("-re") .ForceFormat("lavfi")) .OutputToFile(outputFile, false, opt => opt .WithAudioCodec(AudioCodec.Aac) .WithVideoCodec(VideoCodec.LibX264) .WithSpeedPreset(Speed.VeryFast)) .CancellableThrough(out var cancel, 10000) .ProcessAsynchronously(false); await Task.Delay(300); cancel(); var result = await task; var outputInfo = await FFProbe.AnalyseAsync(outputFile); Assert.IsTrue(result); Assert.IsNotNull(outputInfo); Assert.AreEqual(320, outputInfo.PrimaryVideoStream !.Width); Assert.AreEqual(240, outputInfo.PrimaryVideoStream.Height); Assert.AreEqual("h264", outputInfo.PrimaryVideoStream.CodecName); Assert.AreEqual("aac", outputInfo.PrimaryAudioStream !.CodecName); }
public async Task Probe_Success_FromStream_Async() { await using var stream = File.OpenRead(VideoLibrary.LocalVideoWebm.FullName); var info = await FFProbe.AnalyseAsync(stream); Assert.AreEqual(3, info.Duration.Seconds); }
public async Task Produce(IBoundedQueue <MutableByteImage> sourceQueue) { try { if (!string.IsNullOrWhiteSpace(_arguments.PathToFfmpeg)) { FFMpegOptions.Configure(new FFMpegOptions { RootDirectory = _arguments.PathToFfmpeg }); } var result = await FFProbe.AnalyseAsync(_arguments.InputFile).ConfigureAwait(false); var width = result.PrimaryVideoStream.Width; var height = result.PrimaryVideoStream.Height; var bpp = Image.GetPixelFormatSize(System.Drawing.Imaging.PixelFormat.Format24bppRgb) / 8; var pixelsPerFrame = width * height; var frameSizeInBytes = pixelsPerFrame * bpp; _logger.WriteLine("Input from ffmpeg currently only supports rgb24-convertable input", Verbosity.Warning); var chunksQueue = BoundedQueueFactory.Get <byte[]>(4, "In-ChuQ"); using var memoryStream = new ChunkedSimpleMemoryStream(frameSizeInBytes, chunksQueue); // new MemoryStream(frameSizeInBytes); StreamPipeSink sink = new StreamPipeSink(memoryStream); var args = FFMpegArguments .FromFileInput(_arguments.InputFile).OutputToPipe(sink, options => options.DisableChannel(FFMpegCore.Enums.Channel.Audio) .UsingMultithreading(true) .ForceFormat("rawvideo") .WithCustomArgument(_arguments.CustomArgs ?? string.Empty) .ForcePixelFormat("bgr24")) .NotifyOnProgress( percent => _logger.NotifyFillstate(Convert.ToInt32(percent), "InputVideoParsing"), TimeSpan.FromSeconds(1)); var produceTask = args.ProcessAsynchronously(true).ContinueWith((_) => { chunksQueue.CompleteAdding(); sourceQueue.CompleteAdding(); }); var consumeTask = ParseInputStream(sourceQueue, chunksQueue, width, height, memoryStream) .ContinueWith((_) => _logger.WriteLine("finished reading", Verbosity.Info)); await Task.WhenAll(produceTask, consumeTask); _logger.WriteLine("finished reading", Verbosity.Info); } catch (System.ComponentModel.Win32Exception) { _logger.WriteLine("Couldn't find ffmpeg", Verbosity.Error); } catch (Exception e) { _logger.LogException(e); } }
public async Task Probe_Success_Subtitle_Async() { var info = await FFProbe.AnalyseAsync(TestResources.SrtSubtitle); Assert.IsNotNull(info.PrimarySubtitleStream); Assert.AreEqual(1, info.SubtitleStreams.Count); Assert.AreEqual(0, info.AudioStreams.Count); Assert.AreEqual(0, info.VideoStreams.Count); }
public async Task <double> GetMovieLength(string filename) { var mediaAnalysis = await FFProbe.AnalyseAsync(filename); return(mediaAnalysis.Duration.TotalSeconds); //var result = await MediaToolkitService.ExecuteAsync(new FfTaskGetMetadata(filename)); //return result.Metadata.Streams.Select(e => double.Parse(e.Duration)).Max(); }
public async Task Probe_Success_Disposition_Async() { var info = await FFProbe.AnalyseAsync(TestResources.Mp4Video); Assert.IsNotNull(info.PrimaryAudioStream); Assert.IsNotNull(info.PrimaryAudioStream.Disposition); Assert.AreEqual(true, info.PrimaryAudioStream.Disposition["default"]); Assert.AreEqual(false, info.PrimaryAudioStream.Disposition["forced"]); }
public async Task Audio_FromStream_Duration() { var fileAnalysis = await FFProbe.AnalyseAsync(TestResources.WebmVideo); await using var inputStream = File.OpenRead(TestResources.WebmVideo); var streamAnalysis = await FFProbe.AnalyseAsync(inputStream); Assert.IsTrue(fileAnalysis.Duration == streamAnalysis.Duration); }
public async Task <string> Handle(CreateSnapshotCommand @event, CancellationToken cancellationToken = default) { var tempFile = _tempFileService.GetFilename("generated", ".png"); var analysis = await FFProbe.AnalyseAsync(@event.InputVideoFilePath); await FFMpeg.SnapshotAsync(@event.InputVideoFilePath, tempFile, new Size(@event.Width, @event.Height), analysis !.Duration * @event.SeekPercentage); return(tempFile); }
public override async Task Init(Size canvasSize) { await base.Init(canvasSize); var result = await FFProbe.AnalyseAsync(_filePath); _videoSize = _overrideSize ?? new Size(result.PrimaryVideoStream.Width, result.PrimaryVideoStream.Height); _ = Task.Factory.StartNew(() => GenerateFrames(result), TaskCreationOptions.LongRunning); }
public static async Task Convert(string input, string output, Action <TimeSpan, TimeSpan> onProgress) { var info = await FFProbe.AnalyseAsync(input); await FFMpegArguments .FromFileInput(input) .OutputToFile(output, true, options => options .WithVideoCodec("libx265") .WithConstantRateFactor(28) .WithFastStart() ) .NotifyOnProgress(progress => onProgress(progress, info.Duration)) .ProcessAsynchronously(); }
public async Task Video_ToTS_Args_Pipe(System.Drawing.Imaging.PixelFormat pixelFormat) { using var output = new TemporaryFile($"out{VideoType.Ts.Extension}"); var input = new RawVideoPipeSource(BitmapSource.CreateBitmaps(128, pixelFormat, 256, 256)); var success = await FFMpegArguments .FromPipeInput(input) .OutputToFile(output, false, opt => opt .ForceFormat(VideoType.Ts)) .ProcessAsynchronously(); Assert.IsTrue(success); var analysis = await FFProbe.AnalyseAsync(output); Assert.AreEqual(VideoType.Ts.Name, analysis.Format.FormatName); }
public async Task <Stream> CreateVideoThumbnailAsync(IFormFile file, string thumbnailName) { string path = Directory.GetCurrentDirectory() + Path.GetTempPath(); Directory.CreateDirectory("tmp"); var filePath = Path.Combine(path, file.FileName); using (var fileStream = new FileStream(filePath, FileMode.Create)) { await file.CopyToAsync(fileStream); var mediaFileAnalysis = await FFProbe.AnalyseAsync(filePath); var bitmap = await FFMpeg.SnapshotAsync(mediaFileAnalysis, new Size(), TimeSpan.FromMinutes(1)); Directory.Delete("tmp", true); return(bitmap.ToStream(ImageFormat.Jpeg)); } }
public async Task Probe_Async_Success() { var info = await FFProbe.AnalyseAsync(TestResources.Mp4Video); Assert.AreEqual(3, info.Duration.Seconds); }
public async Task Uri_Duration() { var fileAnalysis = await FFProbe.AnalyseAsync(new Uri("https://github.com/rosenbjerg/FFMpegCore/raw/master/FFMpegCore.Test/Resources/input_3sec.webm")); Assert.IsNotNull(fileAnalysis); }
public async Task Produce(ConcurrentQueue <MutableByteImage> queue) { try { FFMpegOptions.Configure(new FFMpegOptions { RootDirectory = _arguments.PathToFfmpeg }); var result = await FFProbe.AnalyseAsync(_arguments.InputFiles.First()).ConfigureAwait(false); var width = result.PrimaryVideoStream.Width; var height = result.PrimaryVideoStream.Height; var bpp = Image.GetPixelFormatSize(System.Drawing.Imaging.PixelFormat.Format24bppRgb) / 8; var pixelsPerFrame = width * height; var frameSizeInBytes = pixelsPerFrame * bpp; _logger.WriteLine("Input from ffmpeg currently only supports rgb24-convertable input", Verbosity.Warning); var chunksQueue = new ConcurrentQueue <byte[]>(); using var memoryStream = new ChunkedMemoryStream(frameSizeInBytes, chunksQueue); // new MemoryStream(frameSizeInBytes); StreamPipeSink sink = new StreamPipeSink(memoryStream); var args = FFMpegArguments .FromInputFiles(_arguments.InputFiles) .UsingMultithreading(true) .ForceFormat("rawvideo") .ForcePixelFormat("bgr24") .OutputToPipe(sink) .NotifyOnProgress( percent => _logger.NotifyFillstate(Convert.ToInt32(percent), "InputVideoParsing"), TimeSpan.FromSeconds(1)); var produceTask = args.ProcessAsynchronously(true).ContinueWith((_) => parsingFinished.Cancel()); var consumeTask = ParseInputStream(queue, chunksQueue, width, height, frameSizeInBytes, memoryStream); await Task.WhenAll(produceTask, consumeTask); // await Task.WhenAny(produceTask, consumeTask).ConfigureAwait(false); } catch (Exception e) { } async Task ParseInputStream(ConcurrentQueue <MutableByteImage> queue, ConcurrentQueue <byte[]> chunksQueue, int width, int height, int frameSizeInBytes, ChunkedMemoryStream memoryStream) { int count = 0; while (true) //while ((memoryStream.HasUnwrittenData || chunksQueue.Count > 0) && !parsingFinished.IsCancellationRequested) { try { var foo = await chunksQueue.TryDequeueOrWait(parsingFinished); if (foo.cancelled) { break; } _logger.NotifyFillstate(++count, "ParsedImages"); _logger.NotifyFillstate(chunksQueue.Count, "ChunkedQueue"); queue.Enqueue(_factory.FromBytes(width, height, foo.item)); } catch (Exception e) { _logger.LogException(e); } await queue.WaitForBufferSpace(24); } Console.WriteLine(memoryStream.HasUnwrittenData); } }
private static async Task CreateTemporarySnapshotFile(string filePath, string snapshotPath) { var analysis = await FFProbe.AnalyseAsync(filePath); await FFMpeg.SnapshotAsync(analysis, snapshotPath, new Size(200, 200), analysis.Duration * 0.2); }
public async Task StartAsync() { ulong guildId = 463430274823356417; ulong channelId = 463471372589465622; var guild = _client.GetGuild(guildId); var voiceChannel = (IVoiceChannel)guild.GetChannel(channelId); // var sessionIdTsc = new TaskCompletionSource<string>(); // var socketVoiceServerTsc = new TaskCompletionSource<SocketVoiceServer>(); // _client.UserVoiceStateUpdated += VoiceStateUpdatedAsync; // _client.VoiceServerUpdated += VoiceServerUpdatedAsync; // Task VoiceStateUpdatedAsync(SocketUser user, SocketVoiceState oldState, SocketVoiceState newState) // { // if (user.Id != _client.CurrentUser.Id || string.IsNullOrWhiteSpace(newState.VoiceSessionId)) // return Task.CompletedTask; // sessionIdTsc.TrySetResult(newState.VoiceSessionId); // return Task.CompletedTask; // } // Task VoiceServerUpdatedAsync(SocketVoiceServer arg) // { // if (arg.Guild.Id == guildId) // { // socketVoiceServerTsc.TrySetResult(arg); // } // return Task.CompletedTask; // }\ var youtubeClient = new YoutubeClient(); var manifest = await youtubeClient.Videos.Streams.GetManifestAsync("https://www.youtube.com/watch?v=CY8E6N5Nzec"); var streamInfos = manifest.GetAudioOnly(); var streamInfo = streamInfos .Where(a => a.AudioCodec.Equals("opus")) .FirstOrDefault(); var sourceStream = await youtubeClient.Videos.Streams.GetAsync(streamInfo); var info = await FFProbe.AnalyseAsync(sourceStream); var streamReader = new OggStreamReader(sourceStream); var reader = new OpusOggReadStream(new OpusDecoder(48000, 2), sourceStream); using var audioClient = await voiceChannel.ConnectAsync(); using var outStream = audioClient.CreateOpusStream(); sourceStream.Position = 0; byte[] buffer = null; // _ = Task.Delay(TimeSpan.FromSeconds(30)) // .ContinueWith((_) => // { // streamReader.SeekTo(TimeSpan.FromSeconds(0)); // }); while ((buffer = streamReader.GetNextPacket()) != null) { await outStream.WriteAsync(buffer.AsMemory()); } await outStream.FlushAsync(); await voiceChannel.DisconnectAsync(); // var sessionId = await sessionIdTsc.Task; // var voiceServer = await socketVoiceServerTsc.Task; // var voiceClient = _voiceFactory.Create(voiceServer, sessionId); // await voiceClient.StartAsync(); // await Task.Delay(TimeSpan.FromMinutes(10)); // _client.UserVoiceStateUpdated -= VoiceStateUpdatedAsync; // _client.VoiceServerUpdated -= VoiceServerUpdatedAsync; }
public async Task Probe_Async_Success() { var info = await FFProbe.AnalyseAsync(VideoLibrary.LocalVideo.FullName); Assert.AreEqual(3, info.Duration.Seconds); }