public void Run(MainForm info) { if (info.Video.VideoInput.Equals("")) { MessageBox.Show("You first need to load an AviSynth script", "No video configured", MessageBoxButtons.OK, MessageBoxIcon.Warning); return; } bool succ; int hRes, vRes; MeGUI.core.util.Dar d; ulong nbFrames; double framerate; AVCLevels.Levels?compliantLevel = null; x264Settings currentX264Settings = (x264Settings)MainForm.Instance.Profiles.GetCurrentSettings("x264"); if (JobUtil.GetAllInputProperties(out nbFrames, out framerate, out hRes, out vRes, out d, info.Video.VideoInput)) { AVCLevels al = new AVCLevels(); succ = al.validateAVCLevel(hRes, vRes, framerate, currentX264Settings, out compliantLevel); } else { succ = false; } if (succ) { MessageBox.Show("This file matches the criteria for the level chosen", "Video validated", MessageBoxButtons.OK, MessageBoxIcon.Information); } else { if (compliantLevel == null) { MessageBox.Show("Unable to open video", "Test failed", MessageBoxButtons.OK, MessageBoxIcon.Error); } else { AVCLevels al = new AVCLevels(); string message = "This video source cannot be encoded to comply with the chosen level.\n" + "You need at least Level " + AVCLevels.GetLevelText((AVCLevels.Levels)compliantLevel) + " for this source. Do you want\n" + "to increase the level automatically now?"; DialogResult dr = MessageBox.Show(message, "Test failed", MessageBoxButtons.YesNo, MessageBoxIcon.Question); if (dr == DialogResult.Yes) { currentX264Settings.AVCLevel = (AVCLevels.Levels)compliantLevel; } } } }
public void Run(MainForm info) { if (info.Video.VideoInput.Equals("")) { MessageBox.Show("You first need to load an AviSynth script", "No video configured", MessageBoxButtons.OK, MessageBoxIcon.Warning); } else { int compliantLevel = 15; x264Settings currentX264Settings = (x264Settings)MainForm.Instance.Profiles.GetCurrentSettings("x264"); bool succ = info.JobUtil.validateAVCLevel(info.Video.VideoInput, currentX264Settings, out compliantLevel); if (succ) { MessageBox.Show("This file matches the criteria for the level chosen", "Video validated", MessageBoxButtons.OK); } else { if (compliantLevel == -1) { MessageBox.Show("Unable to open video", "Test failed", MessageBoxButtons.OK); } else { AVCLevels al = new AVCLevels(); string[] levels = al.getLevels(); string levelRequired = levels[compliantLevel]; string message = "This video source cannot be encoded to comply with the chosen level.\n" + "You need at least level " + levelRequired + " for this source. Do you want\n" + "to increase the level automatically now?"; DialogResult dr = MessageBox.Show(message, "Test failed", MessageBoxButtons.YesNo, MessageBoxIcon.Question); if (dr == DialogResult.Yes) { currentX264Settings.Level = compliantLevel; } } } } }
public static string genCommandline(string input, string output, Dar?d, int hres, int vres, x264Settings xs) { StringBuilder sb = new StringBuilder(); CultureInfo ci = new CultureInfo("en-us"); if (xs.EncodingMode == 4 || xs.EncodingMode == 7) { xs.Turbo = false; // turn off turbo to prevent inconsistent commandline preview } switch (xs.EncodingMode) { case 0: // ABR sb.Append("--bitrate " + xs.BitrateQuantizer + " "); break; case 1: // CQ if (xs.Lossless) { sb.Append("--qp 0 "); } else { sb.Append("--qp " + xs.QuantizerCRF.ToString(new CultureInfo("en-us")) + " "); } break; case 2: // 2 pass first pass sb.Append("--pass 1 --bitrate " + xs.BitrateQuantizer + " --stats " + "\"" + xs.Logfile + "\" "); break; case 3: // 2 pass second pass case 4: // automated twopass sb.Append("--pass 2 --bitrate " + xs.BitrateQuantizer + " --stats " + "\"" + xs.Logfile + "\" "); break; case 5: // 3 pass first pass sb.Append("--pass 1 --bitrate " + xs.BitrateQuantizer + " --stats " + "\"" + xs.Logfile + "\" "); break; case 6: // 3 pass 2nd pass sb.Append("--pass 3 --bitrate " + xs.BitrateQuantizer + " --stats " + "\"" + xs.Logfile + "\" "); break; case 7: // 3 pass 3rd pass case 8: // automated threepass, show third pass options sb.Append("--pass 3 --bitrate " + xs.BitrateQuantizer + " --stats " + "\"" + xs.Logfile + "\" "); break; case 9: // constant quality sb.Append("--crf " + xs.QuantizerCRF.ToString(new CultureInfo("en-us")) + " "); break; } // now add the rest of the x264 encoder options // AVC Level if (xs.Level != 15) // unrestricted (x264.exe default) { sb.Append("--level " + AVCLevels.getCLILevelNames()[xs.Level] + " "); } if (xs.KeyframeInterval != 250) // gop size of 250 is default { sb.Append("--keyint " + xs.KeyframeInterval + " "); } if (xs.MinGOPSize != 25) { sb.Append("--min-keyint " + xs.MinGOPSize + " "); } if (xs.Turbo) { xs.NbRefFrames = 1; xs.SubPelRefinement = 0; // Q-Pel 1 iteration xs.METype = 0; // diamond search xs.I4x4mv = false; xs.P4x4mv = false; xs.I8x8mv = false; xs.P8x8mv = false; xs.B8x8mv = false; xs.AdaptiveDCT = false; xs.MixedRefs = false; xs.BRDO = false; xs.X264Trellis = 0; // disable trellis xs.noFastPSkip = false; xs.WeightedBPrediction = false; xs.biME = false; } if (xs.DeadZoneInter != 21) { sb.Append("--deadzone-inter " + xs.DeadZoneInter + " "); } if (xs.DeadZoneIntra != 11) { sb.Append("--deadzone-intra " + xs.DeadZoneIntra + " "); } if (xs.NbRefFrames != 1) // 1 ref frame is default { sb.Append("--ref " + xs.NbRefFrames + " "); } if (xs.MixedRefs) { sb.Append("--mixed-refs "); } if (xs.noFastPSkip) { sb.Append("--no-fast-pskip "); } if (xs.NbBframes != 0) // 0 is default value, adaptive and pyramid are conditional on b frames being enabled { sb.Append("--bframes " + xs.NbBframes + " "); if (!xs.AdaptiveBFrames) { sb.Append("--no-b-adapt "); } if (xs.NbBframes > 1 && xs.BFramePyramid) // pyramid needs a minimum of 2 b frames { sb.Append("--b-pyramid "); } if (xs.BRDO) { sb.Append("--b-rdo "); } if (xs.biME) { sb.Append("--bime "); } if (xs.WeightedBPrediction) { sb.Append("--weightb "); } if (xs.BframePredictionMode != 1) { sb.Append("--direct "); if (xs.BframePredictionMode == 0) { sb.Append("none "); } else if (xs.BframePredictionMode == 2) { sb.Append("temporal "); } else if (xs.BframePredictionMode == 3) { sb.Append("auto "); } } } if (xs.Deblock) // deblocker active, add options { if (xs.AlphaDeblock != 0 || xs.BetaDeblock != 0) // 0 is default value { sb.Append("--filter " + xs.AlphaDeblock + "," + xs.BetaDeblock + " "); } } else // no deblocking { sb.Append("--nf "); } if (!xs.Cabac) // no cabac { sb.Append("--no-cabac "); } if (xs.SubPelRefinement + 1 != 5) // non default subpel refinement { int subq = xs.SubPelRefinement + 1; sb.Append("--subme " + subq + " "); } if (!xs.ChromaME) { sb.Append("--no-chroma-me "); } if (xs.X264Trellis > 0) { sb.Append("--trellis " + xs.X264Trellis + " "); } // now it's time for the macroblock types if (xs.P8x8mv || xs.B8x8mv || xs.I4x4mv || xs.I8x8mv || xs.P4x4mv || xs.AdaptiveDCT) { sb.Append("--partitions "); if (xs.I4x4mv && xs.P4x4mv && xs.I8x8mv && xs.P8x8mv && xs.B8x8mv) { sb.Append("all "); } else { if (xs.P8x8mv) // default is checked { sb.Append("p8x8,"); } if (xs.B8x8mv) // default is checked { sb.Append("b8x8,"); } if (xs.I4x4mv) // default is checked { sb.Append("i4x4,"); } if (xs.P4x4mv) // default is unchecked { sb.Append("p4x4,"); } if (xs.I8x8mv) // default is checked { sb.Append("i8x8"); } if (sb.ToString().EndsWith(",")) { sb.Remove(sb.Length - 1, 1); } } if (xs.AdaptiveDCT) // default is unchecked { sb.Append(" --8x8dct "); } if (!sb.ToString().EndsWith(" ")) { sb.Append(" "); } } else { sb.Append("--partitions none "); } if (xs.EncodingMode != 1) // doesn't apply to CQ mode { if (xs.MinQuantizer != 10) // default min quantizer is 10 { sb.Append("--qpmin " + xs.MinQuantizer + " "); } if (xs.MaxQuantizer != 51) // 51 is the default max quanitzer { sb.Append("--qpmax " + xs.MaxQuantizer + " "); } if (xs.MaxQuantDelta != 4) // 4 is the default value { sb.Append("--qpstep " + xs.MaxQuantDelta + " "); } if (xs.IPFactor != new decimal(1.4)) // 1.4 is the default value { sb.Append("--ipratio " + xs.IPFactor.ToString(ci) + " "); } if (xs.PBFactor != new decimal(1.3)) // 1.3 is the default value here { sb.Append("--pbratio " + xs.PBFactor.ToString(ci) + " "); } if (xs.ChromaQPOffset != new decimal(0.0)) { sb.Append("--chroma-qp-offset " + xs.ChromaQPOffset.ToString(ci) + " "); } if (xs.VBVBufferSize > 0) { sb.Append("--vbv-bufsize " + xs.VBVBufferSize + " "); } if (xs.VBVMaxBitrate > 0) { sb.Append("--vbv-maxrate " + xs.VBVMaxBitrate + " "); } if (xs.VBVInitialBuffer != new decimal(0.9)) { sb.Append("--vbv-init " + xs.VBVInitialBuffer.ToString(ci) + " "); } if (xs.BitrateVariance != 1) { sb.Append("--ratetol " + xs.BitrateVariance.ToString(ci) + " "); } if (xs.QuantCompression != new decimal(0.6)) { sb.Append("--qcomp " + xs.QuantCompression.ToString(ci) + " "); } if (xs.EncodingMode > 1) // applies only to twopass { if (xs.TempComplexityBlur != 20) { sb.Append("--cplxblur " + xs.TempComplexityBlur.ToString(ci) + " "); } if (xs.TempQuanBlurCC != new decimal(0.5)) { sb.Append("--qblur " + xs.TempQuanBlurCC.ToString(ci) + " "); } } } if (xs.SCDSensitivity != new decimal(40)) { sb.Append("--scenecut " + xs.SCDSensitivity.ToString(ci) + " "); } if (xs.BframeBias != new decimal(0)) { sb.Append("--b-bias " + xs.BframeBias.ToString(ci) + " "); } if (xs.METype + 1 != 2) { sb.Append("--me "); if (xs.METype + 1 == 1) { sb.Append("dia "); } if (xs.METype + 1 == 3) { sb.Append("umh "); } if (xs.METype + 1 == 4) { sb.Append("esa "); } if (xs.METype + 1 == 5) { sb.Append("tesa "); } } if (xs.MERange != 16) { sb.Append("--merange " + xs.MERange + " "); } if (xs.NbThreads > 1) { sb.Append("--threads " + xs.NbThreads + " "); } if (xs.NbThreads == 0) { sb.Append("--threads auto "); } sb.Append("--thread-input "); if (xs.Zones != null && xs.Zones.Length > 0 && xs.CreditsQuantizer >= new decimal(1)) { sb.Append("--zones "); foreach (Zone zone in xs.Zones) { sb.Append(zone.startFrame + "," + zone.endFrame + ","); if (zone.mode == ZONEMODE.QUANTIZER) { sb.Append("q="); sb.Append(zone.modifier + "/"); } if (zone.mode == ZONEMODE.WEIGHT) { sb.Append("b="); double mod = (double)zone.modifier / 100.0; sb.Append(mod.ToString(ci) + "/"); } } sb.Remove(sb.Length - 1, 1); sb.Append(" "); } if (d.HasValue) { Sar s = d.Value.ToSar(hres, vres); sb.Append("--sar " + s.X + ":" + s.Y + " "); } if (xs.QuantizerMatrixType > 0) // custom matrices enabled { if (xs.QuantizerMatrixType == 1) { sb.Append("--cqm \"jvt\" "); } if (xs.QuantizerMatrixType == 2) { sb.Append("--cqmfile \"" + xs.QuantizerMatrix + "\" "); } } sb.Append("--progress "); // ensure that the progress is shown if (xs.NoDCTDecimate) { sb.Append("--no-dct-decimate "); } if (!xs.PSNRCalculation) { sb.Append("--no-psnr "); } if (!xs.SSIMCalculation) { sb.Append("--no-ssim "); } if (xs.EncodeInterlaced) { sb.Append("--interlaced "); } if (xs.NoiseReduction > 0) { sb.Append("--nr " + xs.NoiseReduction + " "); } //add the rest of the mencoder commandline regarding the output if (xs.EncodingMode == 2 || xs.EncodingMode == 5) { sb.Append("--output NUL "); } else { sb.Append("--output " + "\"" + output + "\" "); } sb.Append("\"" + input + "\" "); if (!xs.CustomEncoderOptions.Equals("")) // add custom encoder options { sb.Append(xs.CustomEncoderOptions); } return(sb.ToString()); }
/// <summary> /// opens a dgindex script /// if the file can be properly opened, auto-cropping is performed, then depending on the AR settings /// the proper resolution for automatic resizing, taking into account the derived cropping values /// is calculated, and finally the avisynth script is written and its name returned /// </summary> /// <param name="path">dgindex script</param> /// <param name="aspectRatio">aspect ratio selection to be used</param> /// <param name="customDAR">custom display aspect ratio for this source</param> /// <param name="horizontalResolution">desired horizontal resolution of the output</param> /// <param name="settings">the codec settings (used only for x264)</param> /// <param name="sarX">pixel aspect ratio X</param> /// <param name="sarY">pixel aspect ratio Y</param> /// <param name="height">the final height of the video</param> /// <param name="signalAR">whether or not ar signalling is to be used for the output /// (depending on this parameter, resizing changes to match the source AR)</param> /// <returns>the name of the AviSynth script created, empty of there was an error</returns> private string openVideo(string path, Dar?AR, int horizontalResolution, bool signalAR, LogItem log, AviSynthSettings avsSettings, bool autoDeint, VideoCodecSettings settings, out Dar?dar) { dar = null; IMediaFile d2v = new d2vFile(path); IVideoReader reader = d2v.GetVideoReader(); if (reader.FrameCount < 1) { log.Error("DGDecode reported 0 frames in this file. This is a fatal error. Please recreate the DGIndex project"); return(""); } //Autocrop CropValues final = Autocrop.autocrop(reader); if (signalAR) { if (avsSettings.Mod16Method == mod16Method.overcrop) { ScriptServer.overcrop(ref final); } else if (avsSettings.Mod16Method == mod16Method.mod4Horizontal) { ScriptServer.cropMod4Horizontal(ref final); } else if (avsSettings.Mod16Method == mod16Method.undercrop) { ScriptServer.undercrop(ref final); } } bool error = (final.left == -1); if (!error) { log.LogValue("Autocrop values", final); } else { log.Error("Autocrop failed, aborting now"); return(""); } decimal customDAR; log.LogValue("Auto-detect aspect ratio now", AR == null); //Check if AR needs to be autodetected now if (AR == null) // it does { customDAR = d2v.Info.DAR.ar; if (customDAR > 0) { log.LogValue("Aspect ratio", customDAR); } else { customDAR = Dar.ITU16x9PAL.ar; log.Warn(string.Format("No aspect ratio found, defaulting to {0}.", customDAR)); } } else { customDAR = AR.Value.ar; } // Minimise upsizing int sourceHorizontalResolution = (int)d2v.Info.Width - final.right - final.left; if (horizontalResolution > sourceHorizontalResolution) { if (avsSettings.Mod16Method == mod16Method.resize) { while (horizontalResolution > sourceHorizontalResolution + 16) { horizontalResolution -= 16; } } else { horizontalResolution = sourceHorizontalResolution; } } //Suggest a resolution (taken from AvisynthWindow.suggestResolution_CheckedChanged) int scriptVerticalResolution = Resolution.suggestResolution(d2v.Info.Height, d2v.Info.Width, (double)customDAR, final, horizontalResolution, signalAR, mainForm.Settings.AcceptableAspectErrorPercent, out dar); log.LogValue("Output resolution", horizontalResolution + "x" + scriptVerticalResolution); if (settings != null && settings is x264Settings) // verify that the video corresponds to the chosen avc level, if not, change the resolution until it does fit { x264Settings xs = (x264Settings)settings; if (xs.Level != 15) { AVCLevels al = new AVCLevels(); log.LogValue("AVC level", al.getLevels()[xs.Level]); int compliantLevel = 15; while (!this.al.validateAVCLevel(horizontalResolution, scriptVerticalResolution, d2v.Info.FPS, xs, out compliantLevel)) { // resolution not profile compliant, reduce horizontal resolution by 16, get the new vertical resolution and try again string levelName = al.getLevels()[xs.Level]; horizontalResolution -= 16; scriptVerticalResolution = Resolution.suggestResolution(d2v.Info.Height, d2v.Info.Width, (double)customDAR, final, horizontalResolution, signalAR, mainForm.Settings.AcceptableAspectErrorPercent, out dar); } log.LogValue("Resolution adjusted for AVC Level", horizontalResolution + "x" + scriptVerticalResolution); } } //Generate the avs script based on the template string inputLine = "#input"; string deinterlaceLines = "#deinterlace"; string denoiseLines = "#denoise"; string cropLine = "#crop"; string resizeLine = "#resize"; inputLine = ScriptServer.GetInputLine(path, false, PossibleSources.d2v, false, false, false, 0); log.LogValue("Automatic deinterlacing", autoDeint); if (autoDeint) { string d2vPath = path; SourceDetector sd = new SourceDetector(inputLine, d2vPath, false, mainForm.Settings.SourceDetectorSettings, new UpdateSourceDetectionStatus(analyseUpdate), new FinishedAnalysis(finishedAnalysis)); finished = false; sd.analyse(); waitTillAnalyseFinished(); deinterlaceLines = filters[0].Script; log.LogValue("Deinterlacing used", deinterlaceLines); } inputLine = ScriptServer.GetInputLine(path, interlaced, PossibleSources.d2v, avsSettings.ColourCorrect, avsSettings.MPEG2Deblock, false, 0); cropLine = ScriptServer.GetCropLine(true, final); denoiseLines = ScriptServer.GetDenoiseLines(avsSettings.Denoise, (DenoiseFilterType)avsSettings.DenoiseMethod); resizeLine = ScriptServer.GetResizeLine(!signalAR || avsSettings.Mod16Method == mod16Method.resize, horizontalResolution, scriptVerticalResolution, (ResizeFilterType)avsSettings.ResizeMethod); string newScript = ScriptServer.CreateScriptFromTemplate(avsSettings.Template, inputLine, cropLine, resizeLine, denoiseLines, deinterlaceLines); if (dar.HasValue) { newScript = string.Format("global MeGUI_darx = {0}\r\nglobal MeGUI_dary = {1}\r\n{2}", dar.Value.X, dar.Value.Y, newScript); } log.LogValue("Generated Avisynth script", newScript); try { StreamWriter sw = new StreamWriter(Path.ChangeExtension(path, ".avs")); sw.Write(newScript); sw.Close(); } catch (IOException i) { log.LogValue("Error saving AviSynth script", i, ImageType.Error); return(""); } return(Path.ChangeExtension(path, ".avs")); }
public JobUtil(MainForm mainForm) { this.mainForm = mainForm; al = new AVCLevels(); }
/// <summary> /// opens a dgindex script /// if the file can be properly opened, auto-cropping is performed, then depending on the AR settings /// the proper resolution for automatic resizing, taking into account the derived cropping values /// is calculated, and finally the avisynth script is written and its name returned /// </summary> /// <param name="path">dgindex script</param> /// <param name="aspectRatio">aspect ratio selection to be used</param> /// <param name="customDAR">custom display aspect ratio for this source</param> /// <param name="horizontalResolution">desired horizontal resolution of the output</param> /// <param name="settings">the codec settings (used only for x264)</param> /// <param name="sarX">pixel aspect ratio X</param> /// <param name="sarY">pixel aspect ratio Y</param> /// <param name="height">the final height of the video</param> /// <param name="signalAR">whether or not ar signalling is to be used for the output /// (depending on this parameter, resizing changes to match the source AR)</param> /// <param name="autoCrop">whether or not autoCrop is used for the input</param> /// <returns>the name of the AviSynth script created, empty of there was an error</returns> private string openVideo(string path, Dar? AR, int horizontalResolution, bool signalAR, LogItem log, AviSynthSettings avsSettings, bool autoDeint, VideoCodecSettings settings, out Dar? dar, bool autoCrop, bool keepInputResolution, bool useChaptersMarks) { dar = null; CropValues final = new CropValues(); Dar customDAR; IMediaFile d2v = new d2vFile(path); IVideoReader reader = d2v.GetVideoReader(); if (reader.FrameCount < 1) { log.Error("DGDecode reported 0 frames in this file. This is a fatal error. Please recreate the DGIndex project"); return ""; } if (!keepInputResolution) { //Autocrop final = Autocrop.autocrop(reader); if (signalAR) { if (avsSettings.Mod16Method == mod16Method.overcrop) ScriptServer.overcrop(ref final); else if (avsSettings.Mod16Method == mod16Method.mod4Horizontal) ScriptServer.cropMod4Horizontal(ref final); else if (avsSettings.Mod16Method == mod16Method.undercrop) ScriptServer.undercrop(ref final); } if (autoCrop) { bool error = (final.left == -1); if (!error) log.LogValue("Autocrop values", final); else { log.Error("Autocrop failed, aborting now"); return ""; } } } log.LogValue("Auto-detect aspect ratio now", AR == null); //Check if AR needs to be autodetected now if (AR == null) // it does { customDAR = d2v.Info.DAR; if (customDAR.ar > 0) log.LogValue("Aspect ratio", customDAR); else { customDAR = Dar.ITU16x9PAL; log.Warn(string.Format("No aspect ratio found, defaulting to {0}.", customDAR)); } } else customDAR = AR.Value; if (keepInputResolution) { horizontalResolution = (int)d2v.Info.Width; dar = customDAR; } else { // Minimise upsizing int sourceHorizontalResolution = (int)d2v.Info.Width - final.right - final.left; if (autoCrop) sourceHorizontalResolution = (int)d2v.Info.Width; if (horizontalResolution > sourceHorizontalResolution) { if (avsSettings.Mod16Method == mod16Method.resize) while (horizontalResolution > sourceHorizontalResolution + 16) horizontalResolution -= 16; else horizontalResolution = sourceHorizontalResolution; } } //Suggest a resolution (taken from AvisynthWindow.suggestResolution_CheckedChanged) int scriptVerticalResolution = 0; if (keepInputResolution) { scriptVerticalResolution = (int)d2v.Info.Height; log.LogValue("Output resolution", horizontalResolution + "x" + scriptVerticalResolution); } else { scriptVerticalResolution = Resolution.suggestResolution(d2v.Info.Height, d2v.Info.Width, (double)customDAR.ar, final, horizontalResolution, signalAR, mainForm.Settings.AcceptableAspectErrorPercent, out dar); log.LogValue("Output resolution", horizontalResolution + "x" + scriptVerticalResolution); if (settings != null && settings is x264Settings) // verify that the video corresponds to the chosen avc level, if not, change the resolution until it does fit { x264Settings xs = (x264Settings)settings; if (xs.Level != 15) { AVCLevels al = new AVCLevels(); log.LogValue("AVC level", al.getLevels()[xs.Level]); int compliantLevel = 15; while (!this.al.validateAVCLevel(horizontalResolution, scriptVerticalResolution, d2v.Info.FPS, xs, out compliantLevel)) { // resolution not profile compliant, reduce horizontal resolution by 16, get the new vertical resolution and try again string levelName = al.getLevels()[xs.Level]; horizontalResolution -= 16; scriptVerticalResolution = Resolution.suggestResolution(d2v.Info.Height, d2v.Info.Width, (double)customDAR.ar, final, horizontalResolution, signalAR, mainForm.Settings.AcceptableAspectErrorPercent, out dar); } log.LogValue("Resolution adjusted for AVC Level", horizontalResolution + "x" + scriptVerticalResolution); } if (useChaptersMarks) { qpfile = job.PostprocessingProperties.ChapterFile; if ((Path.GetExtension(qpfile).ToLower()) == ".txt") qpfile = VideoUtil.convertChaptersTextFileTox264QPFile(job.PostprocessingProperties.ChapterFile, d2v.Info.FPS); if (File.Exists(qpfile)) { xs.UseQPFile = true; xs.QPFile = qpfile; } } } } //Generate the avs script based on the template string inputLine = "#input"; string deinterlaceLines = "#deinterlace"; string denoiseLines = "#denoise"; string cropLine = "#crop"; string resizeLine = "#resize"; inputLine = ScriptServer.GetInputLine(path, false, PossibleSources.d2v, false, false, false, 0, false); log.LogValue("Automatic deinterlacing", autoDeint); if (autoDeint) { string d2vPath = path; SourceDetector sd = new SourceDetector(inputLine, d2vPath, false, mainForm.Settings.SourceDetectorSettings, new UpdateSourceDetectionStatus(analyseUpdate), new FinishedAnalysis(finishedAnalysis)); finished = false; sd.analyse(); waitTillAnalyseFinished(); deinterlaceLines = filters[0].Script; log.LogValue("Deinterlacing used", deinterlaceLines); } inputLine = ScriptServer.GetInputLine(path, interlaced, PossibleSources.d2v, avsSettings.ColourCorrect, avsSettings.MPEG2Deblock, false, 0, false); if (autoCrop) cropLine = ScriptServer.GetCropLine(true, final); else cropLine = ScriptServer.GetCropLine(false, final); denoiseLines = ScriptServer.GetDenoiseLines(avsSettings.Denoise, (DenoiseFilterType)avsSettings.DenoiseMethod); if (!keepInputResolution) resizeLine = ScriptServer.GetResizeLine(!signalAR || avsSettings.Mod16Method == mod16Method.resize, horizontalResolution, scriptVerticalResolution, (ResizeFilterType)avsSettings.ResizeMethod); string newScript = ScriptServer.CreateScriptFromTemplate(avsSettings.Template, inputLine, cropLine, resizeLine, denoiseLines, deinterlaceLines); if (dar.HasValue) newScript = string.Format("global MeGUI_darx = {0}\r\nglobal MeGUI_dary = {1}\r\n{2}", dar.Value.X, dar.Value.Y, newScript); log.LogValue("Generated Avisynth script", newScript); try { StreamWriter sw = new StreamWriter(Path.ChangeExtension(path, ".avs"),false, System.Text.Encoding.Default); sw.Write(newScript); sw.Close(); } catch (IOException i) { log.LogValue("Error saving AviSynth script", i, ImageType.Error); return ""; } return Path.ChangeExtension(path, ".avs"); }
public void Run(MainForm info) { if (info.Video.VideoInput.Equals("")) MessageBox.Show("You first need to load an AviSynth script", "No video configured", MessageBoxButtons.OK, MessageBoxIcon.Warning); else { int compliantLevel = 15; x264Settings currentX264Settings = (x264Settings)MainForm.Instance.Profiles.GetCurrentSettings("x264"); bool succ = info.JobUtil.validateAVCLevel(info.Video.VideoInput, currentX264Settings, out compliantLevel); if (succ) MessageBox.Show("This file matches the criteria for the level chosen", "Video validated", MessageBoxButtons.OK, MessageBoxIcon.Information); else { if (compliantLevel == -1) MessageBox.Show("Unable to open video", "Test failed", MessageBoxButtons.OK, MessageBoxIcon.Error); else { AVCLevels al = new AVCLevels(); string[] levels = al.getLevels(); string levelRequired = levels[compliantLevel]; string message = "This video source cannot be encoded to comply with the chosen level.\n" + "You need at least " + levelRequired + " for this source. Do you want\n" + "to increase the level automatically now?"; DialogResult dr = MessageBox.Show(message, "Test failed", MessageBoxButtons.YesNo, MessageBoxIcon.Question); if (dr == DialogResult.Yes) currentX264Settings.Level = compliantLevel; } } } }
/* private void getAudioStreams(Dictionary<int, string> audioFiles, OneClickWindow.PartialAudioStream[] partialAudioStream, out List<AudioJob> encodableAudioStreams, out List<MuxStream> muxOnlyAudioStreams) * { * muxOnlyAudioStreams = new List<MuxStream>(); * encodableAudioStreams = new List<AudioJob>(); * int counter = 0; * foreach (OneClickWindow.PartialAudioStream propertiesStream in job.PostprocessingProperties.AudioStreams) * { * counter++; // The track number starts at 1, so we increment right here. This also ensures it will always be incremented * * bool error = false; * string input = null, output = null, language = null; * AudioCodecSettings settings = null; * // Input * if (string.IsNullOrEmpty(propertiesStream.input)) * continue; // Here we have an unconfigured stream. Let's just go on to the next one * * if (propertiesStream.useExternalInput) * input = propertiesStream.input; * else if (audioFiles.ContainsKey(propertiesStream.trackNumber)) * input = audioFiles[propertiesStream.trackNumber]; * else * error = true; * * // Settings * if (propertiesStream.dontEncode) * settings = null; * else if (propertiesStream.settings != null) * settings = propertiesStream.settings; * else * error = true; * * // Output * if (propertiesStream.dontEncode) * output = input; * else if (!error) * output = Path.Combine( * Path.GetDirectoryName(input), * Path.GetFileNameWithoutExtension(input) + "_" + * propertiesStream.trackNumber + ".file"); * * // Language * if (!string.IsNullOrEmpty(propertiesStream.language)) * language = propertiesStream.language; * else * language = ""; * * if (error) * { * logBuilder.AppendFormat("Trouble with audio track {0}. Skipping track...{1}", counter, Environment.NewLine); * output = null; * input = null; * input = null; * } * else * { * if (propertiesStream.dontEncode) * { * MuxStream newStream = new MuxStream(); * newStream.path = input; * newStream.name = ""; * newStream.language = language; * muxOnlyAudioStreams.Add(newStream); * } * else * { * AudioJob encodeStream = new AudioJob(); * encodeStream.Input = input; * encodeStream.Output = output; * encodeStream.Settings = settings; * encodableAudioStreams.Add(encodeStream); * } * } * } * }*/ /// <summary> /// opens a dgindex script /// if the file can be properly opened, auto-cropping is performed, then depending on the AR settings /// the proper resolution for automatic resizing, taking into account the derived cropping values /// is calculated, and finally the avisynth script is written and its name returned /// </summary> /// <param name="path">dgindex script</param> /// <param name="aspectRatio">aspect ratio selection to be used</param> /// <param name="customDAR">custom display aspect ratio for this source</param> /// <param name="horizontalResolution">desired horizontal resolution of the output</param> /// <param name="logBuilder">stringbuilder where to append log messages</param> /// <param name="settings">the codec settings (used only for x264)</param> /// <param name="sarX">pixel aspect ratio X</param> /// <param name="sarY">pixel aspect ratio Y</param> /// <param name="height">the final height of the video</param> /// <param name="signalAR">whether or not ar signalling is to be used for the output /// (depending on this parameter, resizing changes to match the source AR)</param> /// <returns>the name of the AviSynth script created, empty of there was an error</returns> private string openVideo(string path, Dar?AR, int horizontalResolution, bool signalAR, StringBuilder logBuilder, AviSynthSettings avsSettings, bool autoDeint, VideoCodecSettings settings, out Dar?dar) { dar = null; IMediaFile d2v = new d2vFile(path); IVideoReader reader = d2v.GetVideoReader(); if (reader.FrameCount < 1) { logBuilder.Append("DGDecode reported 0 frames in this file.\r\nThis is a fatal error.\r\n\r\nPlease recreate the DGIndex project"); return(""); } //Autocrop CropValues final = Autocrop.autocrop(reader); if (signalAR) { if (avsSettings.Mod16Method == mod16Method.overcrop) { ScriptServer.overcrop(ref final); } else if (avsSettings.Mod16Method == mod16Method.mod4Horizontal) { ScriptServer.cropMod4Horizontal(ref final); } } bool error = (final.left == -1); if (!error) { logBuilder.Append("Autocropping successful. Using the following crop values: left: " + final.left + ", top: " + final.top + ", right: " + final.right + ", bottom: " + final.bottom + ".\r\n"); } else { logBuilder.Append("Autocropping did not find 3 frames that have matching crop values\r\n" + "Autocrop failed, aborting now"); return(""); } decimal customDAR; //Check if AR needs to be autodetected now if (AR == null) // it does { logBuilder.Append("Aspect Ratio set to auto-detect later, detecting now. "); customDAR = d2v.Info.DAR.ar; if (customDAR > 0) { logBuilder.AppendFormat("Found aspect ratio of {0}.{1}", customDAR, Environment.NewLine); } else { customDAR = Dar.ITU16x9PAL.ar; logBuilder.AppendFormat("No aspect ratio found, defaulting to {0}.{1}", customDAR, Environment.NewLine); } } else { customDAR = AR.Value.ar; } //Suggest a resolution (taken from AvisynthWindow.suggestResolution_CheckedChanged) int scriptVerticalResolution = Resolution.suggestResolution(d2v.Info.Height, d2v.Info.Width, (double)customDAR, final, horizontalResolution, signalAR, mainForm.Settings.AcceptableAspectErrorPercent, out dar); if (settings != null && settings is x264Settings) // verify that the video corresponds to the chosen avc level, if not, change the resolution until it does fit { x264Settings xs = (x264Settings)settings; if (xs.Level != 15) { int compliantLevel = 15; while (!this.al.validateAVCLevel(horizontalResolution, scriptVerticalResolution, d2v.Info.FPS, xs, out compliantLevel)) { // resolution not profile compliant, reduce horizontal resolution by 16, get the new vertical resolution and try again AVCLevels al = new AVCLevels(); string levelName = al.getLevels()[xs.Level]; logBuilder.Append("Your chosen AVC level " + levelName + " is too strict to allow your chosen resolution of " + horizontalResolution + "*" + scriptVerticalResolution + ". Reducing horizontal resolution by 16.\r\n"); horizontalResolution -= 16; scriptVerticalResolution = Resolution.suggestResolution(d2v.Info.Height, d2v.Info.Width, (double)customDAR, final, horizontalResolution, signalAR, mainForm.Settings.AcceptableAspectErrorPercent, out dar); } logBuilder.Append("Final resolution that is compatible with the chosen AVC Level: " + horizontalResolution + "*" + scriptVerticalResolution + "\r\n"); } } //Generate the avs script based on the template string inputLine = "#input"; string deinterlaceLines = "#deinterlace"; string denoiseLines = "#denoise"; string cropLine = "#crop"; string resizeLine = "#resize"; inputLine = ScriptServer.GetInputLine(path, false, PossibleSources.d2v, false, false, false, 0); if (autoDeint) { logBuilder.AppendLine("Automatic deinterlacing was checked. Running now..."); string d2vPath = path; SourceDetector sd = new SourceDetector(inputLine, d2vPath, false, mainForm.Settings.SourceDetectorSettings, new UpdateSourceDetectionStatus(analyseUpdate), new FinishedAnalysis(finishedAnalysis)); finished = false; sd.analyse(); waitTillAnalyseFinished(); deinterlaceLines = filters[0].Script; logBuilder.AppendLine("Deinterlacing used: " + deinterlaceLines); } inputLine = ScriptServer.GetInputLine(path, interlaced, PossibleSources.d2v, avsSettings.ColourCorrect, avsSettings.MPEG2Deblock, false, 0); cropLine = ScriptServer.GetCropLine(true, final); denoiseLines = ScriptServer.GetDenoiseLines(avsSettings.Denoise, (DenoiseFilterType)avsSettings.DenoiseMethod); resizeLine = ScriptServer.GetResizeLine(!signalAR || avsSettings.Mod16Method == mod16Method.resize, horizontalResolution, scriptVerticalResolution, (ResizeFilterType)avsSettings.ResizeMethod); string newScript = ScriptServer.CreateScriptFromTemplate(avsSettings.Template, inputLine, cropLine, resizeLine, denoiseLines, deinterlaceLines); if (dar.HasValue) { newScript = string.Format("global MeGUI_darx = {0}\r\nglobal MeGUI_dary = {1}\r\n{2}", dar.Value.X, dar.Value.Y, newScript); } logBuilder.Append("Avisynth script created:\r\n"); logBuilder.Append(newScript); try { StreamWriter sw = new StreamWriter(Path.ChangeExtension(path, ".avs")); sw.Write(newScript); sw.Close(); } catch (IOException i) { logBuilder.Append("An error ocurred when trying to save the AviSynth script:\r\n" + i.Message); return(""); } return(Path.ChangeExtension(path, ".avs")); }