public void Run(MainForm info) { using (Calculator calc = new Calculator(info)) { ulong nbFrames = 0; double framerate = 0.0; int hRes = 0, vRes = 0; Dar dar = new Dar(); if (!string.IsNullOrEmpty(info.Video.VideoInput)) { JobUtil.GetAllInputProperties(out nbFrames, out framerate, out hRes, out vRes, out dar, info.Video.VideoInput); } calc.SetDefaults(nbFrames, framerate, hRes, vRes, info.Video.CurrentSettings, info.Audio.AudioStreams); DialogResult dr = calc.ShowDialog(); if (dr != DialogResult.OK) { return; } if (info.Video.CurrentSettings.EncoderType != calc.SelectedVCodec) { return; } VideoCodecSettings settings = info.Video.CurrentSettings; if (settings.VideoEncodingType == VideoCodecSettings.VideoEncodingMode.CQ || settings.VideoEncodingType == VideoCodecSettings.VideoEncodingMode.quality) { dr = MessageBox.Show("Copy calculated bitrate into current video settings and change encoding mode to automated " + info.Settings.NbPasses + "-pass?", "Save calculated bitrate?", MessageBoxButtons.YesNo, MessageBoxIcon.Question); if (dr != DialogResult.Yes) { return; } if (info.Settings.NbPasses == 3) { settings.VideoEncodingType = VideoCodecSettings.VideoEncodingMode.threepassAutomated; // Automated 3-pass } else { settings.VideoEncodingType = VideoCodecSettings.VideoEncodingMode.twopassAutomated; // Automated 2-pass } } else { dr = MessageBox.Show("Copy calculated bitrate into current video settings?", "Save calculated bitrate?", MessageBoxButtons.YesNo, MessageBoxIcon.Question); if (dr != DialogResult.Yes) { return; } } settings.BitrateQuantizer = calc.VideoBitrate; } }
/// <summary> /// tries to open the video source and gets the number of frames from it, or /// exits with an error /// </summary> /// <param name="videoSource">the AviSynth script</param> /// <param name="error">return parameter for all errors</param> /// <returns>true if the file could be opened, false if not</returns> protected void getInputProperties(VideoJob job) { double fps; Dar d; JobUtil.GetAllInputProperties(out numberOfFrames, out fps, out hres, out vres, out d, job.Input); dar = job.DAR; su.NbFramesTotal = numberOfFrames; su.ClipLength = TimeSpan.FromSeconds((double)numberOfFrames / fps); }
public void Run(MainForm info) { if (info.Video.VideoInput.Equals("")) { MessageBox.Show("You first need to load an AviSynth script", "No video configured", MessageBoxButtons.OK, MessageBoxIcon.Warning); return; } bool succ; int hRes, vRes; MeGUI.core.util.Dar d; ulong nbFrames; double framerate; AVCLevels.Levels?compliantLevel = null; x264Settings currentX264Settings = (x264Settings)MainForm.Instance.Profiles.GetCurrentSettings("x264"); if (JobUtil.GetAllInputProperties(out nbFrames, out framerate, out hRes, out vRes, out d, info.Video.VideoInput)) { AVCLevels al = new AVCLevels(); succ = al.validateAVCLevel(hRes, vRes, framerate, currentX264Settings, out compliantLevel); } else { succ = false; } if (succ) { MessageBox.Show("This file matches the criteria for the level chosen", "Video validated", MessageBoxButtons.OK, MessageBoxIcon.Information); } else { if (compliantLevel == null) { MessageBox.Show("Unable to open video", "Test failed", MessageBoxButtons.OK, MessageBoxIcon.Error); } else { AVCLevels al = new AVCLevels(); string message = "This video source cannot be encoded to comply with the chosen level.\n" + "You need at least Level " + AVCLevels.GetLevelText((AVCLevels.Levels)compliantLevel) + " for this source. Do you want\n" + "to increase the level automatically now?"; DialogResult dr = MessageBox.Show(message, "Test failed", MessageBoxButtons.YesNo, MessageBoxIcon.Question); if (dr == DialogResult.Yes) { currentX264Settings.AVCLevel = (AVCLevels.Levels)compliantLevel; } } } }
/// <summary> /// tries to open the video source and gets the number of frames from it, or /// exits with an error /// </summary> /// <param name="videoSource">the AviSynth script</param> /// <param name="error">return parameter for all errors</param> /// <returns>true if the file could be opened, false if not</returns> protected bool getInputProperties(VideoJob job, out string error) { double f; int a, b; error = JobUtil.GetAllInputProperties(out numberOfFrames, out f, out hres, out vres, out a, out b, job.Input); darX = job.DARX; darY = job.DARY; if (job.Settings.UsesSAR) { int sarX, sarY; VideoUtil.findSAR(job.DARX, job.DARY, hres, vres, out sarX, out sarY); job.Commandline = CommandLineGenerator.generateVideoCommandline(job.Settings, job.Input, job.Output, sarX, sarY); } su.NbFramesTotal = numberOfFrames; return(error == null); }
/// <summary> /// creates the AVS Script file /// if the file can be properly opened, auto-cropping is performed, then depending on the AR settings /// the proper resolution for automatic resizing, taking into account the derived cropping values /// is calculated, and finally the avisynth script is written and its name returned /// </summary> /// <param name="path">dgindex script</param> /// <param name="aspectRatio">aspect ratio selection to be used</param> /// <param name="customDAR">custom display aspect ratio for this source</param> /// <param name="desiredOutputWidth">desired horizontal resolution of the output</param> /// <param name="settings">the codec settings (used only for x264)</param> /// <param name="sarX">pixel aspect ratio X</param> /// <param name="sarY">pixel aspect ratio Y</param> /// <param name="height">the final height of the video</param> /// <param name="autoCrop">whether or not autoCrop is used for the input</param> /// <returns>the name of the AviSynth script created, empty if there was an error</returns> private string CreateAVSFile(string indexFile, string inputFile, Dar?AR, int desiredOutputWidth, LogItem _log, AviSynthSettings avsSettings, bool autoDeint, VideoCodecSettings settings, bool autoCrop, bool keepInputResolution, bool useChaptersMarks) { Dar? dar = null; Dar customDAR; IMediaFile iMediaFile = null; IVideoReader reader; PossibleSources oPossibleSource; x264Device xTargetDevice = null; CropValues cropValues = new CropValues(); int outputWidthIncludingPadding = 0; int outputHeightIncludingPadding = 0; int outputWidthCropped = 0; int outputHeightCropped = 0; // encode anamorph either when it is selected in the avs profile or the input resolution should not be touched bool signalAR = (avsSettings.Mod16Method != mod16Method.none) || keepInputResolution; // make sure the proper anamorphic encode is selected if the input resolution should not be touched if (keepInputResolution && avsSettings.Mod16Method != mod16Method.nonMod16) { avsSettings.Mod16Method = mod16Method.nonMod16; } // open index file to retrieve information if (job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.DGI) { iMediaFile = new dgiFile(indexFile); oPossibleSource = PossibleSources.dgi; } else if (job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.D2V) { iMediaFile = new d2vFile(indexFile); oPossibleSource = PossibleSources.d2v; } else if (job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.DGM) { iMediaFile = new dgmFile(indexFile); oPossibleSource = PossibleSources.dgm; } else if (job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.FFMS) { iMediaFile = new ffmsFile(inputFile, indexFile); oPossibleSource = PossibleSources.ffindex; } else if (job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.LSMASH) { iMediaFile = new lsmashFile(inputFile, indexFile); oPossibleSource = PossibleSources.lsmash; } else if (job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.AVISOURCE) { string tempAvs = "AVISource(\"" + inputFile + "\", audio=false)" + VideoUtil.getAssumeFPS(0, inputFile); iMediaFile = AvsFile.ParseScript(tempAvs, true); oPossibleSource = PossibleSources.avisource; } else { iMediaFile = AvsFile.OpenScriptFile(inputFile, true); oPossibleSource = PossibleSources.avs; } reader = iMediaFile.GetVideoReader(); // abort if the index file is invalid if (reader.FrameCount < 1) { _log.Error("There are " + reader.FrameCount + " frames in the index file. Aborting..."); return(""); } if (AR == null) { // AR needs to be detected automatically now _log.LogValue("Auto-detect aspect ratio", AR == null); customDAR = iMediaFile.VideoInfo.DAR; if (customDAR.AR <= 0) { customDAR = Dar.ITU16x9PAL; _log.Warn(string.Format("No aspect ratio found, defaulting to {0}.", customDAR)); } } else { customDAR = AR.Value; } _log.LogValue("Aspect ratio", customDAR); // check x264 settings (target device, chapter file) if (settings != null && settings is x264Settings) { x264Settings xs = (x264Settings)settings; xTargetDevice = xs.TargetDevice; _log.LogValue("Target device", xTargetDevice.Name); } // get mod value for resizing int mod = Resolution.GetModValue(avsSettings.ModValue, avsSettings.Mod16Method, signalAR); // crop input as it may be required (autoCrop && !keepInputResolution or Blu-Ray) if (Autocrop.autocrop(out cropValues, reader, signalAR, avsSettings.Mod16Method, avsSettings.ModValue) == false) { _log.Error("Autocrop failed. Aborting..."); return(""); } int inputWidth = (int)iMediaFile.VideoInfo.Width; int inputHeight = (int)iMediaFile.VideoInfo.Height; int inputFPS_D = (int)iMediaFile.VideoInfo.FPS_D; int inputFPS_N = (int)iMediaFile.VideoInfo.FPS_N; int inputFrameCount = (int)iMediaFile.VideoInfo.FrameCount; // force destruction of AVS script iMediaFile.Dispose(); Dar?suggestedDar = null; if (desiredOutputWidth == 0) { desiredOutputWidth = outputWidthIncludingPadding = inputWidth; } else if (!avsSettings.Upsize && desiredOutputWidth > inputWidth) { outputWidthIncludingPadding = inputWidth; } else { outputWidthIncludingPadding = desiredOutputWidth; } CropValues paddingValues; bool resizeEnabled; int outputWidthWithoutUpsizing = outputWidthIncludingPadding; if (avsSettings.Upsize) { resizeEnabled = !keepInputResolution; CropValues cropValuesTemp = cropValues.Clone(); int outputHeightIncludingPaddingTemp = 0; Resolution.GetResolution(inputWidth, inputHeight, customDAR, ref cropValuesTemp, autoCrop && !keepInputResolution, mod, ref resizeEnabled, false, signalAR, true, avsSettings.AcceptableAspectError, xTargetDevice, Convert.ToDouble(inputFPS_N) / inputFPS_D, ref outputWidthWithoutUpsizing, ref outputHeightIncludingPaddingTemp, out paddingValues, out suggestedDar, _log); } resizeEnabled = !keepInputResolution; Resolution.GetResolution(inputWidth, inputHeight, customDAR, ref cropValues, autoCrop && !keepInputResolution, mod, ref resizeEnabled, avsSettings.Upsize, signalAR, true, avsSettings.AcceptableAspectError, xTargetDevice, Convert.ToDouble(inputFPS_N) / inputFPS_D, ref outputWidthIncludingPadding, ref outputHeightIncludingPadding, out paddingValues, out suggestedDar, _log); keepInputResolution = !resizeEnabled; if (signalAR && suggestedDar.HasValue) { dar = suggestedDar; } // log calculated output resolution outputWidthCropped = outputWidthIncludingPadding - paddingValues.left - paddingValues.right; outputHeightCropped = outputHeightIncludingPadding - paddingValues.bottom - paddingValues.top; _log.LogValue("Input resolution", inputWidth + "x" + inputHeight); _log.LogValue("Desired maximum width", desiredOutputWidth); if (!avsSettings.Upsize && outputWidthIncludingPadding < desiredOutputWidth) { _log.LogEvent("Desired maximum width not reached. Enable upsizing in the AviSynth profile if you want to force it."); } if (avsSettings.Upsize && outputWidthIncludingPadding > outputWidthWithoutUpsizing) { _log.LogValue("Desired maximum width reached with upsizing. Target width without upsizing", outputWidthWithoutUpsizing); } if (cropValues.isCropped()) { _log.LogValue("Autocrop values", cropValues); _log.LogValue("Cropped output resolution", outputWidthCropped + "x" + outputHeightCropped); } else { _log.LogValue("Output resolution", outputWidthCropped + "x" + outputHeightCropped); } if (paddingValues.isCropped()) { _log.LogValue("Padded output resolution", outputWidthIncludingPadding + "x" + outputHeightIncludingPadding); } // generate the avs script based on the template string inputLine = "#input"; string deinterlaceLines = "#deinterlace"; string denoiseLines = "#denoise"; string cropLine = "#crop"; string resizeLine = "#resize"; inputLine = ScriptServer.GetInputLine( inputFile, indexFile, false, oPossibleSource, false, false, false, 0, avsSettings.DSS2, NvDeinterlacerType.nvDeInterlacerNone, 0, 0, null); if (IsJobStopped()) { return(""); } _log.LogValue("Automatic deinterlacing", autoDeint); if (autoDeint) { su.Status = "Automatic deinterlacing... ***PLEASE WAIT***"; string d2vPath = indexFile; _sourceDetector = new SourceDetector(inputLine, d2vPath, avsSettings.PreferAnimeDeinterlace, inputFrameCount, Thread.CurrentThread.Priority, MainForm.Instance.Settings.SourceDetectorSettings, new UpdateSourceDetectionStatus(AnalyseUpdate), new FinishedAnalysis(FinishedAnalysis)); finished = false; _sourceDetector.Analyse(); WaitTillAnalyseFinished(); _sourceDetector = null; if (filters != null) { deinterlaceLines = filters[0].Script; if (interlaced) { _log.LogValue("Deinterlacing used", deinterlaceLines, ImageType.Warning); } else { _log.LogValue("Deinterlacing used", deinterlaceLines); } } } if (IsJobStopped()) { return(""); } su.Status = "Finalizing preprocessing... ***PLEASE WAIT***"; // get final input filter line inputLine = ScriptServer.GetInputLine( inputFile, indexFile, interlaced, oPossibleSource, avsSettings.ColourCorrect, avsSettings.MPEG2Deblock, false, 0, avsSettings.DSS2, NvDeinterlacerType.nvDeInterlacerNone, 0, 0, null); // get crop & resize lines if (!keepInputResolution) { if (autoCrop) { cropLine = ScriptServer.GetCropLine(cropValues); } resizeLine = ScriptServer.GetResizeLine(!signalAR || avsSettings.Mod16Method == mod16Method.resize || outputWidthIncludingPadding > 0 || inputWidth != outputWidthCropped, outputWidthCropped, outputHeightCropped, outputWidthIncludingPadding, outputHeightIncludingPadding, (ResizeFilterType)avsSettings.ResizeMethod, autoCrop, cropValues, inputWidth, inputHeight); } // get denoise line denoiseLines = ScriptServer.GetDenoiseLines(avsSettings.Denoise, (DenoiseFilterType)avsSettings.DenoiseMethod); string newScript = ScriptServer.CreateScriptFromTemplate(avsSettings.Template, inputLine, cropLine, resizeLine, denoiseLines, deinterlaceLines); if (dar.HasValue) { newScript = string.Format("global MeGUI_darx = {0}\r\nglobal MeGUI_dary = {1}\r\n{2}", dar.Value.X, dar.Value.Y, newScript); } else { if (xTargetDevice != null && xTargetDevice.BluRay) { string strResolution = outputWidthIncludingPadding + "x" + outputHeightIncludingPadding; x264Settings _xs = (x264Settings)settings; if (strResolution.Equals("720x480")) { _xs.SampleAR = 4; _log.LogEvent("Set --sar to 10:11 as only 40:33 or 10:11 are supported with a resolution of " + strResolution + " as required for " + xTargetDevice.Name + "."); } else if (strResolution.Equals("720x576")) { _xs.SampleAR = 5; _log.LogEvent("Set --sar to 12:11 as only 16:11 or 12:11 are supported with a resolution of " + strResolution + " as required for " + xTargetDevice.Name + "."); } else if (strResolution.Equals("1280x720") || strResolution.Equals("1920x1080")) { _xs.SampleAR = 1; _log.LogEvent("Set --sar to 1:1 as only 1:1 is supported with a resolution of " + strResolution + " as required for " + xTargetDevice.Name + "."); } else if (strResolution.Equals("1440x1080")) { _xs.SampleAR = 2; _log.LogEvent("Set --sar to 4:3 as only 4:3 is supported with a resolution of " + strResolution + " as required for " + xTargetDevice.Name + "."); } } } _log.LogValue("Generated AviSynth script", newScript); string strOutputAVSFile; if (String.IsNullOrEmpty(indexFile)) { strOutputAVSFile = Path.ChangeExtension(Path.Combine(job.PostprocessingProperties.WorkingDirectory, Path.GetFileName(inputFile)), ".avs"); } else { strOutputAVSFile = Path.ChangeExtension(indexFile, ".avs"); } try { StreamWriter sw = new StreamWriter(strOutputAVSFile, false, System.Text.Encoding.Default); sw.Write(newScript); sw.Close(); } catch (Exception i) { _log.LogValue("Error saving AviSynth script", i, ImageType.Error); return(""); } JobUtil.GetAllInputProperties(strOutputAVSFile, out ulong numberOfFrames, out double fps, out int fps_n, out int fps_d, out int hres, out int vres, out Dar d, out AviSynthColorspace colorspace); _log.LogEvent("resolution: " + hres + "x" + vres); _log.LogEvent("frame rate: " + fps_n + "/" + fps_d); _log.LogEvent("frames: " + numberOfFrames); TimeSpan oTime = TimeSpan.FromSeconds((double)numberOfFrames / fps); _log.LogEvent("length: " + string.Format("{0:00}:{1:00}:{2:00}.{3:000}", (int)(oTime.TotalHours), oTime.Minutes, oTime.Seconds, oTime.Milliseconds)); _log.LogValue("aspect ratio", d); _log.LogValue("color space", colorspace.ToString()); if (IsJobStopped()) { return(""); } // create qpf file if necessary and possible if (job.PostprocessingProperties.ChapterInfo.HasChapters && useChaptersMarks && settings != null && settings is x264Settings) { fps = (double)fps_n / fps_d; string strChapterFile = Path.ChangeExtension(strOutputAVSFile, ".qpf"); job.PostprocessingProperties.ChapterInfo.ChangeFps(fps); if (job.PostprocessingProperties.ChapterInfo.SaveQpfile(strChapterFile)) { job.PostprocessingProperties.FilesToDelete.Add(strChapterFile); _log.LogValue("qpf file created", strChapterFile); x264Settings xs = (x264Settings)settings; xs.UseQPFile = true; xs.QPFile = strChapterFile; } } // check if a timestamp file has to be used if (!String.IsNullOrEmpty(job.PostprocessingProperties.TimeStampFile) && settings != null && settings is x264Settings) { x264Settings xs = (x264Settings)settings; xs.TCFile = job.PostprocessingProperties.TimeStampFile; } return(strOutputAVSFile); }
/// <summary> /// tries to open the video source and gets the number of frames from it, or /// exits with an error /// </summary> /// <param name="videoSource">the AviSynth script</param> /// <param name="error">return parameter for all errors</param> /// <returns>true if the file could be opened, false if not</returns> protected void getInputProperties(VideoJob job) { log.LogValue("AviSynth input script", GetAVSFileContent()); double fps; Dar d = Dar.A1x1; AviSynthColorspace colorspace_original; JobUtil.GetAllInputProperties(job.Input, out numberOfFrames, out fps, out fps_n, out fps_d, out hres, out vres, out d, out colorspace_original); Dar?dar = job.DAR; su.NbFramesTotal = numberOfFrames; su.ClipLength = TimeSpan.FromSeconds((double)numberOfFrames / fps); if (!job.DAR.HasValue) { job.DAR = d; } // log if (log == null) { return; } log.LogEvent("resolution: " + hres + "x" + vres); log.LogEvent("frame rate: " + fps_n + "/" + fps_d); log.LogEvent("frames: " + numberOfFrames); log.LogEvent("length: " + string.Format("{0:00}:{1:00}:{2:00}.{3:000}", (int)(su.ClipLength.Value.TotalHours), su.ClipLength.Value.Minutes, su.ClipLength.Value.Seconds, su.ClipLength.Value.Milliseconds)); if (dar.HasValue && d.AR == dar.Value.AR) { log.LogValue("aspect ratio", d); } else { log.LogValue("aspect ratio (avs)", d); if (dar.HasValue) { log.LogValue("aspect ratio (job)", dar.Value); } } if (Int32.TryParse(colorspace_original.ToString(), out int result)) { log.LogValue("color space", colorspace_original.ToString(), ImageType.Warning); } else { log.LogValue("color space", colorspace_original.ToString()); } string strEncoder = "ffmpeg"; if (this is XviDEncoder) { strEncoder = "xvid"; } else if (this is x264Encoder && (MainForm.Instance.Settings.IsMeGUIx64 || !MainForm.Instance.Settings.Usex64Tools)) { strEncoder = "x264"; } AviSynthColorspace colorspace_target = AviSynthColorspaceHelper.GetConvertedColorspace(strEncoder, colorspace_original); if (colorspace_original != colorspace_target && !AviSynthColorspaceHelper.IsConvertedToColorspace(job.Input, colorspace_target.ToString())) { if (MainForm.Instance.DialogManager.AddConvertTo(colorspace_original.ToString(), colorspace_target.ToString())) { AviSynthColorspaceHelper.AppendConvertTo(job.Input, colorspace_target, colorspace_original); log.LogValue("AviSynth input script (appended)", GetAVSFileContent()); // Check everything again, to see if it is all fixed now AviSynthColorspace colorspace_converted; JobUtil.GetAllInputProperties(job.Input, out numberOfFrames, out fps, out fps_n, out fps_d, out hres, out vres, out d, out colorspace_converted); if (colorspace_original != colorspace_converted) { log.LogValue("color space converted", colorspace_converted.ToString()); } else { log.LogEvent("color space not supported, conversion failed", ImageType.Error); } } else { log.LogEvent("color space not supported", ImageType.Error); } } }