/// <summary> /// compiles final bitrate statistics /// </summary> private void compileFinalStats() { try { if (!string.IsNullOrEmpty(job.Output) && File.Exists(job.Output)) { FileInfo fi = new FileInfo(job.Output); long size = fi.Length; // size in bytes ulong framecount; double framerate; JobUtil.getInputProperties(out framecount, out framerate, job.Input); double numberOfSeconds = (double)framecount / framerate; long bitrate = (long)((double)(size * 8.0) / (numberOfSeconds * 1000.0)); if (job.Settings.EncodingMode != 1) { log.Append("desired video bitrate of this job: " + job.Settings.BitrateQuantizer + " kbit/s - obtained video bitrate (approximate): " + bitrate + " kbit/s"); } else { log.Append("This is a CQ job so there's no desired bitrate. Obtained video bitrate: " + bitrate + " kbit/s"); } } } catch (Exception e) { log.Append("Exception in compileFinalStats. Message: " + e.Message + " stacktrace: " + e.StackTrace); } }
public LogItem postprocess() { audioFiles = vUtil.getAllDemuxedAudio(job.AudioTracks, job.Output, 8); fillInAudioInformation(); log.LogValue("Desired size", job.PostprocessingProperties.OutputSize); log.LogValue("Split size", job.PostprocessingProperties.Splitting); VideoCodecSettings videoSettings = job.PostprocessingProperties.VideoSettings; string videoOutput = Path.Combine(Path.GetDirectoryName(job.Output), Path.GetFileNameWithoutExtension(job.Output) + "_Video"); string muxedOutput = job.PostprocessingProperties.FinalOutput; //Open the video Dar? dar; string videoInput = openVideo(job.Output, job.PostprocessingProperties.DAR, job.PostprocessingProperties.HorizontalOutputResolution, job.PostprocessingProperties.SignalAR, log, job.PostprocessingProperties.AvsSettings, job.PostprocessingProperties.AutoDeinterlace, videoSettings, out dar); VideoStream myVideo = new VideoStream(); ulong length; double framerate; JobUtil.getInputProperties(out length, out framerate, videoInput); myVideo.Input = videoInput; myVideo.Output = videoOutput; myVideo.NumberOfFrames = length; myVideo.Framerate = (decimal)framerate; myVideo.DAR = dar; myVideo.VideoType = new MuxableType((new VideoEncoderProvider().GetSupportedOutput(videoSettings.EncoderType))[0], videoSettings.Codec); myVideo.Settings = videoSettings; List <string> intermediateFiles = new List <string>(); intermediateFiles.Add(videoInput); intermediateFiles.Add(job.Output); intermediateFiles.AddRange(audioFiles.Values); if (!string.IsNullOrEmpty(videoInput)) { //Create empty subtitles for muxing (subtitles not supported in one click mode) MuxStream[] subtitles = new MuxStream[0]; JobChain c = vUtil.GenerateJobSeries(myVideo, muxedOutput, job.PostprocessingProperties.AudioJobs, subtitles, job.PostprocessingProperties.ChapterFile, job.PostprocessingProperties.OutputSize, job.PostprocessingProperties.Splitting, job.PostprocessingProperties.Container, false, job.PostprocessingProperties.DirectMuxAudio, log); if (c == null) { log.Warn("Job creation aborted"); return(log); } c = CleanupJob.AddAfter(c, intermediateFiles); mainForm.Jobs.addJobsWithDependencies(c); } return(log); }
public void Run(MainForm info) { // normal video verification string error = null; if ((error = info.Video.verifyVideoSettings()) != null) { MessageBox.Show(error, "Unsupported video configuration", MessageBoxButtons.OK, MessageBoxIcon.Stop); return; } if ((error = info.Audio.verifyAudioSettings()) != null && !error.Equals("No audio input defined.")) { MessageBox.Show(error, "Unsupported audio configuration", MessageBoxButtons.OK, MessageBoxIcon.Stop); return; } if (info.Video.CurrentSettings.EncodingMode == 2 || info.Video.CurrentSettings.EncodingMode == 5) { MessageBox.Show("First pass encoding is not supported for automated encoding as no output is generated.\nPlease choose another encoding mode", "Improper configuration", MessageBoxButtons.OK, MessageBoxIcon.Stop); return; } VideoCodecSettings vSettings = info.Video.CurrentSettings.Clone(); Zone[] zones = info.Video.Info.Zones; // We can't simply modify the zones in place because that would reveal the final zones config to the user, including the credits/start zones bool cont = info.JobUtil.getFinalZoneConfiguration(vSettings, info.Video.Info.IntroEndFrame, info.Video.Info.CreditsStartFrame, ref zones); if (cont) { ulong length = 0; double framerate = 0.0; VideoStream myVideo = new VideoStream(); JobUtil.getInputProperties(out length, out framerate, info.Video.VideoInput); myVideo.Input = info.Video.Info.VideoInput; myVideo.Output = info.Video.Info.VideoOutput; myVideo.NumberOfFrames = length; myVideo.Framerate = (decimal)framerate; myVideo.DAR = info.Video.Info.DAR; myVideo.VideoType = info.Video.CurrentMuxableVideoType; myVideo.Settings = vSettings; VideoInfo vInfo = info.Video.Info.Clone(); // so we don't modify the data on the main form vInfo.Zones = zones; using (AutoEncodeWindow aew = new AutoEncodeWindow(myVideo, info.Audio.AudioStreams, info, info.Video.PrerenderJob, vInfo)) { if (aew.init()) { info.ClosePlayer(); aew.ShowDialog(); } else { MessageBox.Show("The currently selected combination of video and audio output cannot be muxed", "Unsupported configuration", MessageBoxButtons.OK, MessageBoxIcon.Warning); } } } }
public void postprocess() { audioFiles = vUtil.getAllDemuxedAudio(job.Output, 8); fillInAudioInformation(); logBuilder.Append("Desired size of this automated encoding series: " + job.PostprocessingProperties.OutputSize + " split size: " + job.PostprocessingProperties.Splitting + "\r\n"); VideoCodecSettings videoSettings = job.PostprocessingProperties.VideoSettings; string videoOutput = Path.Combine(Path.GetDirectoryName(job.Output), Path.GetFileNameWithoutExtension(job.Output) + "_Video"); string muxedOutput = job.PostprocessingProperties.FinalOutput; //Open the video Dar? dar; string videoInput = openVideo(job.Output, job.PostprocessingProperties.DAR, job.PostprocessingProperties.HorizontalOutputResolution, job.PostprocessingProperties.SignalAR, logBuilder, job.PostprocessingProperties.AvsSettings, job.PostprocessingProperties.AutoDeinterlace, videoSettings, out dar); VideoStream myVideo = new VideoStream(); ulong length; double framerate; JobUtil.getInputProperties(out length, out framerate, videoInput); myVideo.Input = videoInput; myVideo.Output = videoOutput; myVideo.NumberOfFrames = length; myVideo.Framerate = (decimal)framerate; myVideo.DAR = dar; myVideo.VideoType = new MuxableType((new VideoEncoderProvider().GetSupportedOutput(videoSettings.EncoderType))[0], videoSettings.Codec); myVideo.Settings = videoSettings; List <string> intermediateFiles = new List <string>(); intermediateFiles.Add(videoInput); intermediateFiles.Add(job.Output); intermediateFiles.AddRange(audioFiles.Values); if (!string.IsNullOrEmpty(videoInput)) { //Create empty subtitles for muxing (subtitles not supported in one click mode) MuxStream[] subtitles = new MuxStream[0]; JobChain c = vUtil.GenerateJobSeries(myVideo, muxedOutput, job.PostprocessingProperties.AudioJobs, subtitles, job.PostprocessingProperties.ChapterFile, job.PostprocessingProperties.OutputSize, job.PostprocessingProperties.Splitting, job.PostprocessingProperties.Container, false, job.PostprocessingProperties.DirectMuxAudio); /* vUtil.generateJobSeries(videoInput, videoOutput, muxedOutput, videoSettings, * audioStreams, audio, subtitles, job.PostprocessingProperties.ChapterFile, * job.PostprocessingProperties.OutputSize, job.PostprocessingProperties.SplitSize, * containerOverhead, type, new string[] { job.Output, videoInput });*/ c = CleanupJob.AddAfter(c, intermediateFiles); mainForm.Jobs.addJobsWithDependencies(c); } mainForm.addToLog(logBuilder.ToString()); }
public void Run(MainForm info) { // normal video verification string error = null; // update the current audio stream with the latest data // updateAudioStreams(); if ((error = info.Video.verifyVideoSettings()) != null) { MessageBox.Show(error, "Unsupported video configuration", MessageBoxButtons.OK, MessageBoxIcon.Stop); return; } if ((error = info.Audio.verifyAudioSettings()) != null && !error.Equals("No audio input defined.")) { MessageBox.Show(error, "Unsupported audio configuration", MessageBoxButtons.OK, MessageBoxIcon.Stop); return; } #warning must be fixed up to be more generic if (info.Video.CurrentVideoCodecSettings.EncodingMode == 2 || info.Video.CurrentVideoCodecSettings.EncodingMode == 5) { MessageBox.Show("First pass encoding is not supported for automated encoding as no output is generated.\nPlease choose another encoding mode", "Improper configuration", MessageBoxButtons.OK, MessageBoxIcon.Stop); return; } VideoCodecSettings vSettings = info.Video.CurrentVideoCodecSettings.clone(); bool cont = info.JobUtil.getFinalZoneConfiguration(vSettings, info.Video.Info.IntroEndFrame, info.Video.Info.CreditsStartFrame); if (cont) { ulong length = 0; double framerate = 0.0; VideoStream myVideo = new VideoStream(); JobUtil.getInputProperties(out length, out framerate, info.Video.VideoInput); myVideo.Input = info.Video.Info.VideoInput; myVideo.Output = info.Video.Info.VideoOutput; myVideo.NumberOfFrames = length; myVideo.Framerate = (decimal)framerate; myVideo.DAR = info.Video.Info.DAR; myVideo.VideoType = info.Video.CurrentMuxableVideoType; myVideo.Settings = vSettings; using (AutoEncodeWindow aew = new AutoEncodeWindow(myVideo, info.Audio.AudioStreams, info, info.Video.PrerenderJob)) { if (aew.init()) { info.ClosePlayer(); aew.ShowDialog(); } else { MessageBox.Show("The currently selected combination of video and audio output cannot be muxed", "Unsupported configuration", MessageBoxButtons.OK, MessageBoxIcon.Warning); } } } }
/// <summary> /// compiles final bitrate statistics /// </summary> protected void compileFinalStats() { try { if (!string.IsNullOrEmpty(job.Output) && File.Exists(job.Output)) { FileInfo fi = new FileInfo(job.Output); long size = fi.Length; // size in bytes ulong framecount; double framerate; JobUtil.getInputProperties(out framecount, out framerate, job.Input); double numberOfSeconds = (double)framecount / framerate; long bitrate = (long)((double)(size * 8.0) / (numberOfSeconds * 1000.0)); LogItem stats = log.Info("Final statistics"); if (job.Settings.EncodingMode == 1) // QP mode { stats.LogValue("Constant Quantizer Mode", "Quantizer " + job.Settings.BitrateQuantizer + " computed..."); } else if (job.Settings.EncodingMode == 9) // CRF mode { stats.LogValue("Constant Quality Mode", "Quality " + job.Settings.BitrateQuantizer + " computed..."); } else { stats.LogValue("Video Bitrate Desired", job.Settings.BitrateQuantizer + " kbit/s"); } stats.LogValue("Video Bitrate Obtained (approximate)", bitrate + " kbit/s"); } } catch (Exception e) { log.LogValue("Exception in compileFinalStats", e, ImageType.Warning); } }
private void StartPostProcessing() { Thread t = null; try { _log.LogEvent("Processing thread started"); raiseEvent("Preprocessing... ***PLEASE WAIT***"); _start = DateTime.Now; t = new Thread(new ThreadStart(delegate { while (true) { updateTime(); Thread.Sleep(1000); } })); t.Start(); List <string> arrAudioFilesDelete = new List <string>(); audioFiles = new Dictionary <int, string>(); List <AudioTrackInfo> arrAudioTracks = new List <AudioTrackInfo>(); List <AudioJob> arrAudioJobs = new List <AudioJob>(); List <MuxStream> arrMuxStreams = new List <MuxStream>(); List <string> intermediateFiles = new List <string>(); FileUtil.ensureDirectoryExists(job.PostprocessingProperties.WorkingDirectory); foreach (OneClickAudioTrack oAudioTrack in job.PostprocessingProperties.AudioTracks) { if (oAudioTrack.ExtractMKVTrack) { audioFiles.Add(oAudioTrack.AudioTrackInfo.TrackID, job.PostprocessingProperties.WorkingDirectory + "\\" + oAudioTrack.AudioTrackInfo.DemuxFileName); arrAudioFilesDelete.Add(job.PostprocessingProperties.WorkingDirectory + "\\" + oAudioTrack.AudioTrackInfo.DemuxFileName); } else if (oAudioTrack.AudioTrackInfo != null) { arrAudioTracks.Add(oAudioTrack.AudioTrackInfo); } if (oAudioTrack.AudioJob != null) { if (job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.NONE && String.IsNullOrEmpty(oAudioTrack.AudioJob.Input)) { oAudioTrack.AudioJob.Input = job.Input; } arrAudioJobs.Add(oAudioTrack.AudioJob); } if (oAudioTrack.DirectMuxAudio != null) { arrMuxStreams.Add(oAudioTrack.DirectMuxAudio); } } if (audioFiles.Count == 0 && job.PostprocessingProperties.IndexType != FileIndexerWindow.IndexType.NONE && !job.PostprocessingProperties.Eac3toDemux) { audioFiles = vUtil.getAllDemuxedAudio(arrAudioTracks, new List <AudioTrackInfo>(), out arrAudioFilesDelete, job.IndexFile, _log); } fillInAudioInformation(arrAudioJobs, arrMuxStreams); if (!String.IsNullOrEmpty(job.PostprocessingProperties.VideoFileToMux)) { _log.LogEvent("Don't encode video: True"); } else { _log.LogEvent("Desired size: " + job.PostprocessingProperties.OutputSize); } _log.LogEvent("Split size: " + job.PostprocessingProperties.Splitting); // chapter file handling if (String.IsNullOrEmpty(job.PostprocessingProperties.ChapterFile)) { job.PostprocessingProperties.ChapterFile = null; } else if (job.PostprocessingProperties.Container == ContainerType.AVI) { _log.LogEvent("Chapter handling disabled because of the AVI target container"); job.PostprocessingProperties.ChapterFile = null; } else if (!File.Exists(job.PostprocessingProperties.ChapterFile)) { if (job.PostprocessingProperties.ChapterFile.StartsWith("<") || job.PostprocessingProperties.ChapterExtracted) { // internal chapter file string strTempFile = job.PostprocessingProperties.ChapterFile; if (Path.GetExtension(job.PostprocessingProperties.VideoInput).ToLower(System.Globalization.CultureInfo.InvariantCulture).Equals(".mkv")) { MediaInfoFile oInfo = new MediaInfoFile(job.PostprocessingProperties.VideoInput, ref _log); if (oInfo.hasMKVChapters()) { job.PostprocessingProperties.ChapterFile = Path.Combine(job.PostprocessingProperties.WorkingDirectory, Path.GetFileNameWithoutExtension(job.IndexFile) + " - Chapter Information.txt"); if (oInfo.extractMKVChapters(job.PostprocessingProperties.ChapterFile)) { intermediateFiles.Add(job.PostprocessingProperties.ChapterFile); job.PostprocessingProperties.ChapterExtracted = true; } else { job.PostprocessingProperties.ChapterFile = strTempFile; } } } else if (File.Exists(job.PostprocessingProperties.IFOInput)) { job.PostprocessingProperties.ChapterFile = VideoUtil.getChaptersFromIFO(job.PostprocessingProperties.IFOInput, false, job.PostprocessingProperties.WorkingDirectory, job.PostprocessingProperties.TitleNumberToProcess); if (!String.IsNullOrEmpty(job.PostprocessingProperties.ChapterFile)) { intermediateFiles.Add(job.PostprocessingProperties.ChapterFile); job.PostprocessingProperties.ChapterExtracted = true; } else { job.PostprocessingProperties.ChapterFile = strTempFile; } } } if (!File.Exists(job.PostprocessingProperties.ChapterFile)) { _log.LogEvent("File not found: " + job.PostprocessingProperties.ChapterFile, ImageType.Error); job.PostprocessingProperties.ChapterFile = null; } } else if (job.PostprocessingProperties.ChapterExtracted) { intermediateFiles.Add(job.PostprocessingProperties.ChapterFile); } string avsFile = String.Empty; VideoStream myVideo = new VideoStream(); VideoCodecSettings videoSettings = job.PostprocessingProperties.VideoSettings; if (String.IsNullOrEmpty(job.PostprocessingProperties.VideoFileToMux)) { //Open the video Dar?dar; avsFile = createAVSFile(job.IndexFile, job.Input, job.PostprocessingProperties.DAR, job.PostprocessingProperties.HorizontalOutputResolution, job.PostprocessingProperties.SignalAR, _log, job.PostprocessingProperties.AvsSettings, job.PostprocessingProperties.AutoDeinterlace, videoSettings, out dar, job.PostprocessingProperties.AutoCrop, job.PostprocessingProperties.KeepInputResolution, job.PostprocessingProperties.UseChaptersMarks); ulong length; double framerate; JobUtil.getInputProperties(out length, out framerate, avsFile); myVideo.Input = avsFile; myVideo.Output = Path.Combine(job.PostprocessingProperties.WorkingDirectory, Path.GetFileNameWithoutExtension(job.Input) + "_Video"); myVideo.NumberOfFrames = length; myVideo.Framerate = (decimal)framerate; myVideo.DAR = dar; myVideo.VideoType = new MuxableType((new VideoEncoderProvider().GetSupportedOutput(videoSettings.EncoderType))[0], videoSettings.Codec); myVideo.Settings = videoSettings; } else { myVideo.Output = job.PostprocessingProperties.VideoFileToMux; myVideo.Settings = videoSettings; MediaInfoFile oInfo = new MediaInfoFile(myVideo.Output, ref _log); videoSettings.VideoName = oInfo.VideoInfo.Track.Name; myVideo.Framerate = (decimal)oInfo.VideoInfo.FPS; } intermediateFiles.Add(avsFile); intermediateFiles.Add(job.IndexFile); intermediateFiles.AddRange(audioFiles.Values); if (!string.IsNullOrEmpty(qpfile)) { intermediateFiles.Add(qpfile); } foreach (string file in arrAudioFilesDelete) { intermediateFiles.Add(file); } if (File.Exists(Path.Combine(Path.GetDirectoryName(job.Input), Path.GetFileNameWithoutExtension(job.Input) + "._log"))) { intermediateFiles.Add(Path.Combine(Path.GetDirectoryName(job.Input), Path.GetFileNameWithoutExtension(job.Input) + "._log")); } foreach (string file in job.PostprocessingProperties.FilesToDelete) { intermediateFiles.Add(file); } if (!string.IsNullOrEmpty(avsFile) || !String.IsNullOrEmpty(job.PostprocessingProperties.VideoFileToMux)) { MuxStream[] subtitles; if (job.PostprocessingProperties.SubtitleTracks.Count == 0) { //Create empty subtitles for muxing subtitles = new MuxStream[0]; } else { subtitles = new MuxStream[job.PostprocessingProperties.SubtitleTracks.Count]; int i = 0; foreach (OneClickStream oTrack in job.PostprocessingProperties.SubtitleTracks) { if (oTrack.TrackInfo.IsMKVContainer()) { //demuxed MKV string trackFile = Path.GetDirectoryName(job.IndexFile) + "\\" + oTrack.TrackInfo.DemuxFileName; if (File.Exists(trackFile)) { intermediateFiles.Add(trackFile); if (Path.GetExtension(trackFile).ToLower(System.Globalization.CultureInfo.InvariantCulture).Equals(".idx")) { intermediateFiles.Add(FileUtil.GetPathWithoutExtension(trackFile) + ".sub"); } subtitles[i] = new MuxStream(trackFile, oTrack.Language, oTrack.Name, oTrack.Delay, oTrack.DefaultStream, oTrack.ForcedStream, null); } else { _log.LogEvent("File not found: " + trackFile, ImageType.Error); } } else { subtitles[i] = new MuxStream(oTrack.DemuxFilePath, oTrack.Language, oTrack.Name, oTrack.Delay, oTrack.DefaultStream, oTrack.ForcedStream, null); } i++; } } JobChain c = vUtil.GenerateJobSeries(myVideo, job.PostprocessingProperties.FinalOutput, arrAudioJobs.ToArray(), subtitles, job.PostprocessingProperties.ChapterFile, job.PostprocessingProperties.OutputSize, job.PostprocessingProperties.Splitting, job.PostprocessingProperties.Container, job.PostprocessingProperties.PrerenderJob, arrMuxStreams.ToArray(), _log, job.PostprocessingProperties.DeviceOutputType, null, job.PostprocessingProperties.VideoFileToMux, job.PostprocessingProperties.AudioTracks.ToArray()); if (c == null) { _log.Warn("Job creation aborted"); return; } c = CleanupJob.AddAfter(c, intermediateFiles, job.PostprocessingProperties.FinalOutput); mainForm.Jobs.addJobsWithDependencies(c); // batch processing other input files if necessary if (job.PostprocessingProperties.FilesToProcess.Count > 0) { OneClickWindow ocw = new OneClickWindow(mainForm); ocw.setBatchProcessing(job.PostprocessingProperties.FilesToProcess, job.PostprocessingProperties.OneClickSetting); } } } catch (Exception e) { t.Abort(); if (e is ThreadAbortException) { _log.LogEvent("Aborting..."); su.WasAborted = true; su.IsComplete = true; raiseEvent(); } else { _log.LogValue("An error occurred", e, ImageType.Error); su.HasError = true; su.IsComplete = true; raiseEvent(); } return; } t.Abort(); su.IsComplete = true; raiseEvent(); }
public static string genCommandline(string input, string output, Dar?d, DivXAVCSettings xs, int hres, int vres, Zone[] zones) { StringBuilder sb = new StringBuilder(); CultureInfo ci = new CultureInfo("en-us"); sb.Append(" -i \"" + input + "\""); sb.Append(" -o \"" + output + "\""); switch (xs.EncodingMode) { case 0: // 2pass - 1st pass sb.Append(" -npass 1 -sf " + "\"" + xs.Logfile + "\" -br " + xs.BitrateQuantizer); // add logfile break; case 1: // 2pass - 2nd pass case 2: // Automated 2 pass sb.Append(" -npass 2 -sf " + "\"" + xs.Logfile + "\" -br " + xs.BitrateQuantizer); // add logfile break; } double framerate = 0.0; ulong length = 0; MainForm mainForm = MainForm.Instance; if (!string.IsNullOrEmpty(mainForm.Video.VideoInput)) { JobUtil.getInputProperties(out length, out framerate, mainForm.Video.VideoInput); sb.Append(" -fps " + framerate.ToString(ci)); } if (xs.Turbo) { xs.AQO = 0; xs.Pyramid = false; xs.BasRef = false; xs.MaxRefFrames = 1; xs.MaxBFrames = 0; } if (xs.InterlaceMode != 0) { sb.Append(" -fmode " + xs.InterlaceMode); } if (xs.AQO != 1) { sb.Append(" -aqo " + xs.AQO); } if (xs.GOPLength != 4) { sb.Append(" -I " + xs.GOPLength); } if (xs.MaxBFrames != 2) { sb.Append(" -bf " + xs.MaxBFrames); } if (xs.MaxRefFrames != 4) { sb.Append(" -ref " + xs.MaxRefFrames); } if (xs.BasRef) { sb.Append(" -bref"); } if (xs.Pyramid) { sb.Append(" -pyramid"); } sb.Append(" -threads " + xs.NbThreads); return(sb.ToString()); }