private void kryptonButton4_Click(object sender, EventArgs e) { var m = globalTimeSpan.Minutes; var s = globalTimeSpan.Seconds; s = s + (int)numericUpDown1.Value; if (s >= 60) { m = m + 1; s = s - 60; } var oldtime = globalTimeSpan; globalTimeSpan = new TimeSpan(0, 0, m, s); if (TimeSpan.Compare(globalTimeSpan, videoLength) != 1) { string outstr = video.Directory + "\\" + video.Name + " - " + globalTimeSpan.ToString(@"hh\_mm\_ss") + ".jpg"; label16.Text = globalTimeSpan.ToString(@"hh\:mm\:ss"); FileInfo output = new FileInfo(outstr); Bitmap img = new FFMpeg().Snapshot( video, output, new Size(video.Width, video.Height), globalTimeSpan ); pictureBox1.Image = img; pictureBox1.SizeMode = PictureBoxSizeMode.Zoom; } else { globalTimeSpan = oldtime; MessageBox.Show("Вы достигли конца видео"); } }
public void Cut_Concat_Effect3Videos_Test() { const string OutputFile = OutputFolder + "Cut_Effect_Concat3Videos.avi"; const string FileToConcat1 = OutputFolder + "1EpisodeToConcat_tmp.avi"; const string FileToConcat2 = OutputFolder + "2EpisodeToConcat_tmp.avi"; const string FileToConcat2TW = OutputFolder + "2EpisodeToConcat_tmpTW.avi"; const string FileToConcat3 = OutputFolder + "3EpisodeToConcat_tmp.avi"; string source = Path.Combine(this.InputFolder, SampleFiles.RealInputVideoAVI2); var ffmpeg = new FFMpeg(this.temporaryFilesStorage); var cutOptions1 = FFMpegCutOptions.BuildSimpleCatOptions(source, FileToConcat1, 100, 20, GlobalExportProgress.Empty); ffmpeg.Cut(cutOptions1); var cutOptions2 = FFMpegCutOptions.BuildSimpleCatOptions(source, FileToConcat2, 300, 20, GlobalExportProgress.Empty); ffmpeg.Cut(cutOptions2); var cutOptions3 = FFMpegCutOptions.BuildSimpleCatOptions(source, FileToConcat3, 600, 20, GlobalExportProgress.Empty); ffmpeg.Cut(cutOptions3); ffmpeg.Concat(OutputFile, "copy", "copy", GlobalExportProgress.Empty, FileToConcat1, FileToConcat2, FileToConcat3); ffmpeg.ApplyTimeWarp(OutputFile, new List <TimeWarpRecord> { new TimeWarpRecord(23, 32, 2), new TimeWarpRecord(43, 52, 2) }, FileToConcat2TW, GlobalExportProgress.Empty); Assert.IsTrue(File.Exists(OutputFile)); }
public FFMpegVideoInfo GetVideoInfo(string videoFilePath) { using (var mhandler = new FFMpeg(new TemporaryFilesStorage())) { return(mhandler.GetVideoInfo(videoFilePath)); } }
/// <summary> /// split video by time lines in Dir /// </summary> /// <param name="timeLines">time lines</param> public void Split(TimeLineVolume[] timeLines) { char sep = Path.DirectorySeparatorChar; FFMpeg ffmpeg = new FFMpeg(); for (int i = 0; i < timeLines.Length; i++) { ArgumentContainer container = new ArgumentContainer(); // input arg container.Add(new InputArgument(Source)); // split arg container.Add(new SplitArgument(timeLines[i])); // output path VideoPartInfo partInfo = new VideoPartInfo() { IsNoise = timeLines[i].Volume == VolumeValue.Noise ? true: false, Name = "videoPart", FileExtension = SourceExtension, Number = i, }; string outputPath = $"{Dir.FullName}{sep}{partInfo.FullName}"; container.Add(new OutputArgument(outputPath)); // convert ffmpeg.Convert(container); } }
static void Main(string[] args) { Console.WriteLine("Welcome to EASY ANIMATION COMPOSER"); if (args.Length > 0) { ConsoleMode = true; } if (ConsoleMode) { RunConsole(args); } else { Console.WriteLine("You can also run program with arguments: eacomposer [name] [path-to-dir-with-images] [FPS]"); args = new string[3]; Console.WriteLine("Write project name: "); args[0] = Console.ReadLine(); Console.WriteLine("Write directory name: "); args[1] = Console.ReadLine(); Console.WriteLine("Write FPS (15 by default): "); string tmp = Console.ReadLine(); if (string.IsNullOrEmpty(tmp)) { args[2] = "15"; } else { args[2] = tmp; } RunConsole(args); } fps += fps / 10; FFMpegOptions.Configure(new FFMpegOptions { RootDirectory = AppContext.BaseDirectory + @"\bin" }); Console.WriteLine($"Working on project {name}"); Console.WriteLine($"Step 1: Add images ({img_paths.Capacity})"); ImageInfo[] frames = new ImageInfo[img_paths.Capacity]; for (int i = 0; i < img_paths.Capacity; i++) { Console.Write('.'); frames[i] = new ImageInfo(img_paths[i]); } path += "/" + name + ".mp4"; if (File.Exists(path)) { new FileInfo(path).Delete(); } Console.WriteLine(); Console.WriteLine($"Step 2: Creating video on path {path}"); FFMpeg encoder = new FFMpeg(); encoder.JoinImageSequence(new FileInfo(path), fps, frames); Console.WriteLine($"Finished!"); encoder.Dispose(); }
public void CanSplit() { FileSystemHelper.DeleteIfExists(Path.Combine(dir, "basis_01.mp4")); FileSystemHelper.DeleteIfExists(Path.Combine(dir, "basis_02.mp4")); FileSystemHelper.DeleteIfExists(Path.Combine(dir, "basis_03.mp4")); FileSystemHelper.DeleteIfExists(Path.Combine(dir, "basis_04.mp4")); var slidetask = MakeSlideShow("long-slideshow.mp4", 0.1m); var splittask = new FFMpegVideoSplitTask { SourceFile = slidetask.TargetFile, WorkingFolder = dir }; splittask.Frames.Add(new FFMpegFrame { StartTime = TimeSpan.FromSeconds(1), Duration = TimeSpan.FromSeconds(1) }); splittask.Frames.Add(new FFMpegFrame { StartTime = TimeSpan.FromSeconds(11), Duration = TimeSpan.FromSeconds(2) }); splittask.Frames.Add(new FFMpegFrame { StartTime = TimeSpan.FromSeconds(1), Duration = TimeSpan.FromSeconds(0.5) }); splittask.Frames.Add(new FFMpegFrame { StartTime = TimeSpan.FromSeconds(12), Duration = TimeSpan.FromSeconds(2) }); FFMpeg.Split(splittask); Assert.True(File.Exists(GetPath("basis_01.mp4"))); Assert.True(File.Exists(GetPath("basis_02.mp4"))); Assert.True(File.Exists(GetPath("basis_03.mp4"))); Assert.True(File.Exists(GetPath("basis_04.mp4"))); }
public void CanSplitImages() { FileSystemHelper.DeleteIfExists(Path.Combine(dir, "thumb_01.png")); FileSystemHelper.DeleteIfExists(Path.Combine(dir, "thumb_02.png")); FileSystemHelper.DeleteIfExists(Path.Combine(dir, "thumb_03.png")); FileSystemHelper.DeleteIfExists(Path.Combine(dir, "thumb_04.png")); var slidetask = MakeSlideShow("long-slideshow.mp4", 0.1m); var splittask = new FFMpegVideoSplitTask { SourceFile = slidetask.TargetFile, WorkingFolder = dir, FileNameBase = "thumb_" }; splittask.Frames.Add(new FFMpegFrame { StartTime = TimeSpan.FromSeconds(1), Duration = TimeSpan.FromSeconds(1) }); splittask.Frames.Add(new FFMpegFrame { StartTime = TimeSpan.FromSeconds(11), Duration = TimeSpan.FromSeconds(2) }); splittask.Frames.Add(new FFMpegFrame { StartTime = TimeSpan.FromSeconds(1), Duration = TimeSpan.FromSeconds(0.5) }); splittask.Frames.Add(new FFMpegFrame { StartTime = TimeSpan.FromSeconds(12), Duration = TimeSpan.FromSeconds(2) }); splittask.Width = 100; splittask.Height = 100; FFMpeg.SplitImages(splittask); Assert.True(File.Exists(GetPath("thumb_01.png"))); Assert.True(File.Exists(GetPath("thumb_02.png"))); Assert.True(File.Exists(GetPath("thumb_03.png"))); Assert.True(File.Exists(GetPath("thumb_04.png"))); }
public void Concat4ProblemVideosFromArturas_Test() { const string OutputFile = OutputFolder + "4Episodes60SecConcat_SuperFast.mkv"; const string FileToConcat1 = OutputFolder + "1EpisodeToConcat_SuperFast.mp4"; const string FileToConcat2 = OutputFolder + "2EpisodeToConcat_SuperFast.mp4"; const string FileToConcat3 = OutputFolder + "3EpisodeToConcat_SuperFast.mp4"; const string FileToConcat4 = OutputFolder + "4EpisodeToConcat_SuperFast.mp4"; string Source1 = SampleFiles.RealInputVideoAVI; string Source2 = SampleFiles.RealInputVideoAVI2; var ffmpeg = new FFMpeg(this.temporaryFilesStorage); var cutOptions1 = FFMpegCutOptions.BuildCatOptionsWithConvertations(Source1, FileToConcat1, 300, 20, GlobalExportProgress.Empty, new Size(1280, 720)); ffmpeg.Cut(cutOptions1); var cutOptions2 = FFMpegCutOptions.BuildCatOptionsWithConvertations(Source1, FileToConcat2, 500, 20, GlobalExportProgress.Empty, new Size(1280, 720)); ffmpeg.Cut(cutOptions2); var cutOptions3 = FFMpegCutOptions.BuildCatOptionsWithConvertations(Source2, FileToConcat3, 100, 20, GlobalExportProgress.Empty, new Size(1280, 720)); ffmpeg.Cut(cutOptions3); var cutOptions4 = FFMpegCutOptions.BuildCatOptionsWithConvertations(Source2, FileToConcat4, 300, 20, GlobalExportProgress.Empty, new Size(1280, 720)); ffmpeg.Cut(cutOptions4); ffmpeg.Concat(OutputFile, "copy", "copy", GlobalExportProgress.Empty, FileToConcat3, FileToConcat4, FileToConcat1, FileToConcat2); Assert.IsTrue(File.Exists(OutputFile)); }
static void Main(string[] args) { MelSpectrogram gram = new MelSpectrogram(); string dataDirPath = Path.Combine(IOUtils.AssemblyDirectory, "..", "..", "..", "gtzan", "genres"); if (!Directory.Exists(dataDirPath)) { Console.WriteLine("{0} does not exists", dataDirPath); return; } string[] subDirectories = Directory.GetDirectories(dataDirPath); foreach (string subDirectory in subDirectories) { string[] files = Directory.GetFiles(subDirectory, "*.au"); foreach (string file in files) { string mp3file = "converted.mp3"; Console.WriteLine("Converting {0}", file); FFMpeg.Convert2Mp3(file, mp3file); if (!File.Exists(mp3file)) { Console.WriteLine("Failed to convert to {0}", mp3file); } break; } break; } }
public void Video_Join_Image_Sequence() { var imageSet = new List <ImageInfo>(); Directory.EnumerateFiles(TestResources.ImageCollection) .Where(file => file.ToLower().EndsWith(".png")) .ToList() .ForEach(file => { for (var i = 0; i < 15; i++) { imageSet.Add(new ImageInfo(file)); } }); var outputFile = new TemporaryFile("out.mp4"); var success = FFMpeg.JoinImageSequence(outputFile, images: imageSet.ToArray()); Assert.IsTrue(success); var result = FFProbe.Analyse(outputFile); Assert.AreEqual(3, result.Duration.Seconds); Assert.AreEqual(imageSet.First().Width, result.PrimaryVideoStream.Width); Assert.AreEqual(imageSet.First().Height, result.PrimaryVideoStream.Height); }
public void ProcessRenderOptions() { using (var temporaryFilesStorage = new TemporaryFilesStorage()) { var subject = new Subject <double>(); // ReSharper disable once ImpureMethodCallOnReadonlyValueField // Внутри происходит регистрация через ссылку на родительский CancellationTokenSource. this.cancellationToken.Register(() => subject.OnNext(0)); using (var ffMpeg = new FFMpeg(temporaryFilesStorage, this.rendererProcessPriorityClass, subject.AsObservable())) { ffMpeg.LogMessage($"Started rendering of {this.outputFile}", string.Empty); var cutInfos = this.VideoRenderOptions.Select( v => { if (string.IsNullOrEmpty(v.FilePath)) { return(new FFMpegCutInfo(v.VideoStreamPath, v.AudioStreamPath, v.StartSecond, v.StartSecond + v.DurationSeconds, v.IsMuted)); } return(new FFMpegCutInfo(v.FilePath, v.StartSecond, v.StartSecond + v.DurationSeconds, v.IsMuted)); }).ToList(); this.CutAndConcatAndRenderTextAndImageAndTimeWarps(cutInfos, ffMpeg, temporaryFilesStorage); } } }
private void treeView2_NodeMouseClick(object sender, TreeNodeMouseClickEventArgs e) { if (Int32.Parse(e.Node.Tag.ToString()) != -1) { //vidTreeRefresh(Int32.Parse(e.Node.Tag.ToString()), e.Node.Text); var vid = DBOPS.GetVid(Int32.Parse(e.Node.Tag.ToString())); pics = DBOPS.GetImagesList(Int32.Parse(e.Node.Tag.ToString())); pics = pics.OrderBy(i => i.Id).ToList(); vidDiag = vid.Diag; vidId = Int32.Parse(e.Node.Tag.ToString()); inputFile = System.IO.Directory.GetCurrentDirectory() + vid.Path; var ffProbe = new NReco.VideoInfo.FFProbe(); var videoInfo = ffProbe.GetMediaInfo(inputFile); video = new VideoInfo(inputFile); count = 0; globalTimeSpan = new TimeSpan(0, 0, 0, 0); textBox2.Text = vidDiag; videoLength = videoInfo.Duration; string output1 = videoInfo.Duration.ToString(); //label3.Text = pat.FIO; //label2.Text = pat.Bdate.ToString("dd/MM/yyyy"); //label5.Text = pat.Pdate.ToString("dd/MM/yyyy"); label10.Text = output1; kryptonButton1.Enabled = true; panel4.Enabled = true; bool splitted = DBOPS.ExistPicturesCheck(vidId); if (splitted) { nButton.Visible = true; bButton.Visible = true; nButton.Enabled = true; bButton.Enabled = true; pictureBox1.Image = Image.FromFile(System.IO.Directory.GetCurrentDirectory() + pics[imgnum].path); pictureBox1.SizeMode = PictureBoxSizeMode.Zoom; label15.Text = "Текущий снимок:"; label16.Text = (imgnum + 1) + " из " + pics.Count + " (" + pics[imgnum].timestamp + ")"; } else { nButton.Visible = false; bButton.Visible = false; TimeSpan duration = new TimeSpan(0, 0, 0, 0); string outstr = video.Directory + "\\" + video.Name + " - " + duration.ToString(@"hh\_mm\_ss") + ".jpg"; label15.Text = "Текущая отметка:"; label16.Text = duration.ToString(@"hh\:mm\:ss"); FileInfo output = new FileInfo(outstr); Bitmap img = new FFMpeg().Snapshot( video, output, new Size(video.Width, video.Height), duration ); pictureBox1.Image = img; pictureBox1.SizeMode = PictureBoxSizeMode.Zoom; } } }
public void CTOR_Default() { var encoder = new FFMpeg(); var probe = new FFProbe(); Assert.IsNotNull(encoder); Assert.IsNotNull(probe); }
/// <summary> /// The WorkerDoWork /// </summary> /// <param name="e">The e<see cref="DoWorkEventArgs"/></param> protected override void WorkerDoWork(DoWorkEventArgs e) { downloader.Start(); while (downloader?.IsBusy == true) { Thread.Sleep(200); } if (_combine && _downloadSuccessful) { var audio = downloader.Files[0].Path; var video = downloader.Files[1].Path; this.ReportProgress(-1, new Dictionary <string, object>() { { nameof(Progress), 0 } }); this.ReportProgress(ProgressMax, null); try { FFMpegResult <bool> result; this.ReportProgress(-1, new Dictionary <string, object>() { { nameof(ProgressText), "Combining..." } }); result = FFMpeg.Combine(video, audio, this.Output, delegate(int percentage) { // Combine progress this.ReportProgress(percentage, null); }); if (result.Value) { e.Result = OperationStatus.Success; } else { e.Result = OperationStatus.Failed; this.ErrorsInternal.AddRange(result.Errors); } // Cleanup the separate audio and video files FileHelper.DeleteFiles(audio, video); } catch (Exception ex) { Logger.WriteException(ex); e.Result = OperationStatus.Failed; } } else { e.Result = this.Status; } }
public async Task <string> Handle(CreateSnapshotCommand @event, CancellationToken cancellationToken = default) { var tempFile = _tempFileService.GetFilename("generated", ".png"); var analysis = await FFProbe.AnalyseAsync(@event.InputVideoFilePath); await FFMpeg.SnapshotAsync(@event.InputVideoFilePath, tempFile, new Size(@event.Width, @event.Height), analysis !.Duration * @event.SeekPercentage); return(tempFile); }
public static void CompressVideo(string inputPath, string outputPath, Action <string> callback) { Activity activity = new Activity(); _callback = callback; ProgressDialog progress = new ProgressDialog(Forms.Context); progress.Indeterminate = true; progress.SetProgressStyle(ProgressDialogStyle.Spinner); progress.SetMessage("Compressing Video. Please wait..."); progress.SetCancelable(false); progress.Show(); Task.Run(() => { var _workingDirectory = Android.OS.Environment.ExternalStorageDirectory.AbsolutePath; var sourceMp4 = inputPath; var destinationPath1 = outputPath; FFMpeg ffmpeg = new FFMpeg(Android.App.Application.Context, _workingDirectory); TransposeVideoFilter vfTranspose = new TransposeVideoFilter(TransposeVideoFilter.NINETY_CLOCKWISE); var filters = new List <VideoFilter>(); filters.Add(vfTranspose); var sourceClip = new Clip(System.IO.Path.Combine(_workingDirectory, sourceMp4)) { videoFilter = VideoFilter.Build(filters) }; var br = System.Environment.NewLine; var onComplete = new MyCommand((_) => { _callback(destinationPath1); progress.Dismiss(); }); var onMessage = new MyCommand((message) => { System.Console.WriteLine(message); }); var callbacks = new FFMpegCallbacks(onComplete, onMessage); string[] cmds = new string[] { "-y", "-i", sourceClip.path, "-strict", "experimental", "-vcodec", "libx264", "-preset", "ultrafast", "-crf", "30", "-acodec", "aac", "-ar", "44100", "-q:v", "20", "-vf", sourceClip.videoFilter, // "mp=eq2=1:1.68:0.3:1.25:1:0.96:1", destinationPath1, }; ffmpeg.Execute(cmds, callbacks); }); }
public void Image_AddAudio() { using var outputFile = new TemporaryFile("out.mp4"); FFMpeg.PosterWithAudio(TestResources.PngImage, TestResources.Mp3Audio, outputFile); var analysis = FFProbe.Analyse(TestResources.Mp3Audio); Assert.IsTrue(analysis.Duration.TotalSeconds > 0); Assert.IsTrue(File.Exists(outputFile)); }
public byte[] GetFrameFromVideoAsByte(string videoFile, double positionMs, FFMpegImageSize imageSize) { using (var tempFileStorage = new TemporaryFilesStorage()) { using (var mhandler = new FFMpeg(tempFileStorage)) { return(mhandler.GetBitmapFromVideoAsByte(videoFile, positionMs, imageSize)); } } }
public void Video_Snapshot_InMemory() { var input = FFProbe.Analyse(TestResources.Mp4Video); using var bitmap = FFMpeg.Snapshot(input); Assert.AreEqual(input.PrimaryVideoStream.Width, bitmap.Width); Assert.AreEqual(input.PrimaryVideoStream.Height, bitmap.Height); Assert.AreEqual(bitmap.RawFormat, ImageFormat.Png); }
public void Audio_Save() { using var outputFile = new TemporaryFile("out.mp3"); FFMpeg.ExtractAudio(TestResources.Mp4Video, outputFile); var analysis = FFProbe.Analyse(outputFile); Assert.IsTrue(!analysis.VideoStreams.Any()); Assert.IsTrue(analysis.AudioStreams.Any()); }
public static VideoInfo SaveStream(this Uri uri, FileInfo output) { var success = new FFMpeg().SaveM3U8Stream(uri, output); if (!success) { throw new OperationCanceledException("Could not save stream."); } return(new VideoInfo(output)); }
public void Cut_Effect_Concat3Videos_Test() { const string OutputFile = OutputFolder + "3Episodes60SecConcat_tmp.avi"; const string FileToConcat1 = OutputFolder + "1EpisodeToConcat_tmp.avi"; const string FileToConcat2 = OutputFolder + "2EpisodeToConcat_tmp.avi"; const string FileToConcat3 = OutputFolder + "3EpisodeToConcat_tmp.avi"; string source = Path.Combine(this.InputFolder, SampleFiles.RealInputVideoAVI2); var ffmpeg = new FFMpeg(this.temporaryFilesStorage); var cutOptions1 = FFMpegCutOptions.BuildCatOptionsWithConvertations(source, FileToConcat1, 900, 20, GlobalExportProgress.Empty, Size.Empty, null, new List <DrawImageTimeRecord>(), new List <TimeWarpRecord>()); ffmpeg.Cut(cutOptions1); var cutOptions2 = FFMpegCutOptions.BuildCatOptionsWithConvertations(source, FileToConcat2, 300, 20, GlobalExportProgress.Empty, Size.Empty, null, new List <DrawImageTimeRecord>(), new List <TimeWarpRecord> { new TimeWarpRecord(3, 12, 2) }); ffmpeg.Cut(cutOptions2); var cutOptions3 = FFMpegCutOptions.BuildCatOptionsWithConvertations(source, FileToConcat3, 600, 20, GlobalExportProgress.Empty, Size.Empty, null, new List <DrawImageTimeRecord>(), new List <TimeWarpRecord> { new TimeWarpRecord(3, 12, 2) }); ffmpeg.Cut(cutOptions3); ffmpeg.Concat(OutputFile, "copy", "copy", GlobalExportProgress.Empty, FileToConcat2, FileToConcat1, FileToConcat3); Assert.IsTrue(File.Exists(OutputFile)); }
public void Video_Snapshot_PersistSnapshot() { var outputPath = new TemporaryFile("out.png"); var input = FFProbe.Analyse(TestResources.Mp4Video); FFMpeg.Snapshot(input, outputPath); using var bitmap = Image.FromFile(outputPath); Assert.AreEqual(input.PrimaryVideoStream.Width, bitmap.Width); Assert.AreEqual(input.PrimaryVideoStream.Height, bitmap.Height); Assert.AreEqual(bitmap.RawFormat, ImageFormat.Png); }
private async Task CreateGifAsync(TextureArray2D left, TextureArray2D right, Config cfg, IProgress progress) { // delay in milliseconds var numFrames = cfg.FramesPerSecond * cfg.NumSeconds; try { progressModel.EnableDllProgress = false; var leftView = left.GetSrView(LayerMipmapSlice.Mip0); var rightView = right.GetSrView(LayerMipmapSlice.Mip0); var curProg = progress.CreateSubProgress(0.9f); // create frames using (var dst = IO.CreateImage(new ImageFormat(Format.R8G8B8A8_UNorm_SRgb), left.Size, LayerMipmapCount.One)) { var dstMip = dst.GetMipmap(LayerMipmapSlice.Mip0); var dstPtr = dstMip.Bytes; var dstSize = dstMip.ByteSize; // render frames into texture using (var frame = new TextureArray2D(LayerMipmapCount.One, left.Size, Format.R8G8B8A8_UNorm_SRgb, false)) { var frameView = frame.GetRtView(LayerMipmapSlice.Mip0); for (int i = 0; i < numFrames; ++i) { float t = (float)i / (numFrames); int borderPos = (int)(t * frame.Size.Width); // render frame shader.Run(leftView, rightView, frameView, cfg.SliderWidth, borderPos, frame.Size.Width, frame.Size.Height); // save frame as png frame.CopyPixels(LayerMipmapSlice.Mip0, dstPtr, dstSize); var filename = $"{cfg.TmpFilename}{i:D4}"; await Task.Run(() => IO.SaveImage(dst, filename, "png", GliFormat.RGBA8_SRGB), progress.Token); curProg.Progress = i / (float)numFrames; curProg.What = "creating frames"; } } } // convert video await FFMpeg.ConvertAsync(cfg, progress.CreateSubProgress(1.0f)); } finally { progressModel.EnableDllProgress = true; } }
public void GenerateAnimation() { ImageInfo[] videoParameters = ReturnVideoParameters(); FFMpeg encoder = new FFMpeg(); encoder.JoinImageSequence( new FileInfo(@"..\test_video.mp4"), 14.28, // FPS videoParameters ); }
public static void Extract(Disc disc, string path, string fileBase) { var dsr = new DiscSectorReader(disc); bool confirmed = false; var tracks = disc.Session1.Tracks; foreach (var track in tracks) { if (!track.IsAudio) { continue; } int trackLength = track.NextTrack.LBA - track.LBA; var waveData = new byte[trackLength * 2352]; int startLba = track.LBA; for (int sector = 0; sector < trackLength; sector++) { dsr.ReadLBA_2352(startLba + sector, waveData, sector * 2352); } string mp3Path = $"{Path.Combine(path, fileBase)} - Track {track.Number:D2}.mp3"; if (File.Exists(mp3Path)) { if (!confirmed) { var dr = MessageBox.Show("This file already exists. Do you want extraction to proceed overwriting files, or cancel the entire operation immediately?", "File already exists", MessageBoxButtons.OKCancel); if (dr == DialogResult.Cancel) { return; } confirmed = true; } File.Delete(mp3Path); } string tempfile = Path.GetTempFileName(); try { File.WriteAllBytes(tempfile, waveData); var ffmpeg = new FFMpeg(); ffmpeg.Run("-f", "s16le", "-ar", "44100", "-ac", "2", "-i", tempfile, "-f", "mp3", "-ab", "192k", mp3Path); } finally { File.Delete(tempfile); } } }
public void DrawImageOnOneVideo() { const string OutputFile = OutputFolder + "Image20SecCut_Medium.mp4"; var ffmpeg = new FFMpeg(this.temporaryFilesStorage); var images = new List <DrawImageTimeRecord>(); images.Add(new DrawImageTimeRecord(File.ReadAllBytes(SampleFiles.SamplePngImage), 100, 100, 1, 4)); ffmpeg.DrawImage(SampleFiles.SampleVideo_5sec, images, OutputFile, GlobalExportProgress.Empty); Assert.IsTrue(File.Exists(OutputFile)); }
private static void Main(string[] args) { var encoder = new FFMpeg(); // Bind Progress Handler encoder.OnProgress += percentage => { Console.WriteLine("Progress {0}%", percentage); }; foreach (var input in args.Select(fileLocation => new VideoInfo(fileLocation))) { // Start Encoding encoder.ToMp4(input, new FileInfo(input.FullName.Replace(input.Extension, ".mp4"))); } }
public void Audio_Add() { using var outputFile = new TemporaryFile("out.mp4"); var success = FFMpeg.ReplaceAudio(TestResources.Mp4WithoutAudio, TestResources.Mp3Audio, outputFile); var videoAnalysis = FFProbe.Analyse(TestResources.Mp4WithoutAudio); var audioAnalysis = FFProbe.Analyse(TestResources.Mp3Audio); var outputAnalysis = FFProbe.Analyse(outputFile); Assert.IsTrue(success); Assert.AreEqual(Math.Max(videoAnalysis.Duration.TotalSeconds, audioAnalysis.Duration.TotalSeconds), outputAnalysis.Duration.TotalSeconds, 0.15); Assert.IsTrue(File.Exists(outputFile)); }
static void Main(string[] args) { string file = Path.Combine(IOUtils.AssemblyDirectory, "..", "..", "..", "blues.00000.au"); string mp3file = "converted.mp3"; Console.WriteLine("Converting {0}", file); FFMpeg.Convert2Mp3(file, mp3file); if (!File.Exists(mp3file)) { Console.WriteLine("Failed to convert to {0}", mp3file); } }
public MemoryStream CompressVideo( string sourceFilePath, string destinationFilePath, bool deleteSourceFile ) { if (sourceFilePath == null || destinationFilePath == null) return null; MediaMetadataRetriever media = new MediaMetadataRetriever (); media.SetDataSource ( sourceFilePath ); string videoRotation = media.ExtractMetadata ( MetadataKey.VideoRotation ); XamarinAndroidFFmpeg.FFMpeg ffmpeg = new FFMpeg ( MainApplication.Context, App.DownloadsPath); var onComplete = new MyCommand ((_) => { }); var onMessage = new MyCommand ((message) => { System.Diagnostics.Debug.WriteLine( "---" + message); }); if (videoRotation != null && videoRotation == "90") { string[] cmds = new string[] { "-i", sourceFilePath, "-vcodec", "mpeg4", "-acodec", "aac", "-strict", "-2", "-ac", "1", "-ar", "16000", "-r", "13", "-ab", "32000", "-vf", "transpose=1", "-y", destinationFilePath }; var callbacks = new FFMpegCallbacks (onComplete, onMessage); ffmpeg.Execute (cmds, callbacks); } else { string[] cmds = new string[] { "-i", sourceFilePath, "-vcodec", "mpeg4", "-acodec", "aac", "-strict", "-2", "-ac", "1", "-ar", "16000", "-r", "13", "-ab", "32000", "-y", destinationFilePath }; var callbacks = new FFMpegCallbacks (onComplete, onMessage); ffmpeg.Execute (cmds, callbacks); } if (deleteSourceFile) { Java.IO.File toDel = new Java.IO.File ( sourceFilePath ); toDel.Delete (); } MemoryStream ms = new MemoryStream(); FileStream file = new FileStream( destinationFilePath, FileMode.Open, FileAccess.Read); file.CopyTo ( ms ); file.Close(); return ms; }
void MountBlobs() { IBlob file_blob = null; BlobInfos = new List<BlobInfo>(); foreach (var ccf in IN_CompileJob.OUT_CompiledCueFiles) { var bi = new BlobInfo(); BlobInfos.Add(bi); switch (ccf.Type) { case CompiledCueFileType.BIN: case CompiledCueFileType.Unknown: { //raw files: var blob = new Disc.Blob_RawFile { PhysicalPath = ccf.FullPath }; OUT_Disc.DisposableResources.Add(file_blob = blob); bi.Length = blob.Length; break; } case CompiledCueFileType.ECM: { var blob = new Disc.Blob_ECM(); OUT_Disc.DisposableResources.Add(file_blob = blob); blob.Load(ccf.FullPath); bi.Length = blob.Length; break; } case CompiledCueFileType.WAVE: { var blob = new Disc.Blob_WaveFile(); OUT_Disc.DisposableResources.Add(file_blob = blob); blob.Load(ccf.FullPath); bi.Length = blob.Length; break; } case CompiledCueFileType.DecodeAudio: { FFMpeg ffmpeg = new FFMpeg(); if (!ffmpeg.QueryServiceAvailable()) { throw new DiscReferenceException(ccf.FullPath, "No decoding service was available (make sure ffmpeg.exe is available. even though this may be a wav, ffmpeg is used to load oddly formatted wave files. If you object to this, please send us a note and we'll see what we can do. It shouldn't be too hard.)"); } AudioDecoder dec = new AudioDecoder(); byte[] buf = dec.AcquireWaveData(ccf.FullPath); var blob = new Disc.Blob_WaveFile(); OUT_Disc.DisposableResources.Add(file_blob = blob); blob.Load(new MemoryStream(buf)); bi.Length = buf.Length; break; } default: throw new InvalidOperationException(); } //switch(file type) //wrap all the blobs with zero padding bi.Blob = new Disc.Blob_ZeroPadAdapter(file_blob, bi.Length); } }
public MemoryStream CreateVideoThumbnail ( string inputVideoPath, string outputImagePath ) { MediaMetadataRetriever media = new MediaMetadataRetriever (); media.SetDataSource ( inputVideoPath ); string videoRotation = media.ExtractMetadata ( MetadataKey.VideoRotation ); XamarinAndroidFFmpeg.FFMpeg ffmpeg = new FFMpeg ( MainApplication.Context, App.DownloadsPath); var onComplete = new MyCommand ((_) => { }); var onMessage = new MyCommand ((message) => { System.Diagnostics.Debug.WriteLine( "---" + message); }); var callbacks = new FFMpegCallbacks (onComplete, onMessage); if (videoRotation != null && videoRotation == "90") { string[] cmds = new string[] { "-i", inputVideoPath, "-ss", "00:00:01.000", "-vf", "transpose=1", outputImagePath }; ffmpeg.Execute (cmds, callbacks); } else { string[] cmds = new string[] { "-i", inputVideoPath, "-ss", "00:00:01.000", outputImagePath }; ffmpeg.Execute (cmds, callbacks); } MemoryStream ms = new MemoryStream (); FileStream stream = new FileStream (outputImagePath, FileMode.Open); stream.CopyTo (ms); return ms; }
void FinalAnalysis() { //some quick checks: if (OUT_CompiledCueFiles.Count == 0) Error("Cue file doesn't specify any input files!"); //we can't reliably analyze the length of files here, because we might have to be decoding to get lengths (VBR mp3s) //REMINDER: we could actually scan the mp3 frames in software //So, it's not really worth the trouble. We'll cope with lengths later //we could check the format of the wav file here, though //score the cost of loading the file bool needsCodec = false; OUT_LoadTime = 0; foreach (var cfi in OUT_CompiledCueFiles) { if (cfi == null) continue; if (cfi.Type == CompiledCueFileType.DecodeAudio) { needsCodec = true; OUT_LoadTime = Math.Max(OUT_LoadTime, 10); } if (cfi.Type == CompiledCueFileType.SeekAudio) needsCodec = true; if (cfi.Type == CompiledCueFileType.ECM) OUT_LoadTime = Math.Max(OUT_LoadTime, 1); } //check whether processing was available if (needsCodec) { FFMpeg ffmpeg = new FFMpeg(); if (!ffmpeg.QueryServiceAvailable()) Warn("Decoding service will be required for further processing, but is not available"); } }