public void openVideoFile(string fileName) { info.CreditsStartFrame = -1; info.IntroEndFrame = -1; info.VideoInput = fileName; info.DAR = null; info.Zones = null; if (mainForm.Settings.AutoOpenScript) { openAvisynthScript(fileName); } else { using (AvsFile avi = AvsFile.OpenScriptFile(fileName)) { info.DAR = avi.Info.DAR; } } string filePath; if (string.IsNullOrEmpty(filePath = mainForm.Settings.DefaultOutputDir)) { filePath = Path.GetDirectoryName(fileName); } string fileNameNoExtension = Path.GetFileNameWithoutExtension(fileName); this.VideoOutput = Path.Combine(filePath, fileNameNoExtension) + mainForm.Settings.VideoExtension + ".extension"; this.VideoOutput = Path.ChangeExtension(this.VideoOutput, this.CurrentVideoOutputType.Extension); updateIOConfig(); }
/// <summary> /// gets the number of frames, framerate, horizontal and vertical resolution from a video source /// </summary> /// <param name="nbOfFrames">the number of frames</param> /// <param name="framerate">the framerate</param> /// <param name="hRes">the horizontal resolution</param> /// <param name="vRes">the vertical resolution</param> /// <param name="video">the video whose properties are to be read</param> /// <returns>whether the source could be opened or not</returns> public static bool GetAllInputProperties(out ulong nbOfFrames, out double framerate, out int framerate_n, out int framerate_d, out int hRes, out int vRes, out Dar dar, string video) { nbOfFrames = 0; hRes = vRes = 0; framerate = 0.0; try { using (AvsFile avi = AvsFile.OpenScriptFile(video)) { checked { nbOfFrames = (ulong)avi.VideoInfo.FrameCount; } framerate = avi.VideoInfo.FPS; framerate_n = avi.VideoInfo.FPS_N; framerate_d = avi.VideoInfo.FPS_D; hRes = (int)avi.VideoInfo.Width; vRes = (int)avi.VideoInfo.Height; dar = avi.VideoInfo.DAR; } return(true); } catch (Exception e) { throw new JobRunException("The file " + video + " cannot be opened.\r\n" + "Error message for your reference: " + e.Message, e); } }
/// <summary> /// initializes the ffms reader /// </summary> /// <param name="fileName">the FFMSIndex source file file that this reader will process</param> /// <param name="indexFile">the FFMSIndex index file that this reader will process</param> public ffmsFile(string fileName, string indexFile) { if (!String.IsNullOrEmpty(indexFile) && String.IsNullOrEmpty(fileName)) { this.fileName = indexFile.Substring(0, indexFile.Length - 8); indexFile = null; } else { this.fileName = fileName; } double fps = 0; MediaInfoFile oInfo = null; if (File.Exists(this.fileName)) { oInfo = new MediaInfoFile(this.fileName); if (oInfo.VideoInfo.HasVideo && oInfo.VideoInfo.FPS > 0) { fps = oInfo.VideoInfo.FPS; } } reader = AvsFile.ParseScript(VideoUtil.getFFMSVideoInputLine(this.fileName, indexFile, fps), true); info = reader.VideoInfo.Clone(); if (oInfo != null) { info.DAR = oInfo.VideoInfo.DAR; } }
/// <summary> /// initializes the ffms reader /// </summary> /// <param name="fileName">the FFMSIndex source file file that this reader will process</param> /// <param name="indexFile">the FFMSIndex index file that this reader will process</param> public ffmsFile(string fileName, string indexFile) { string strScript = ""; if (!String.IsNullOrEmpty(indexFile) && String.IsNullOrEmpty(fileName)) { this.fileName = indexFile.Substring(0, indexFile.Length - 8); indexFile = null; } else { this.fileName = fileName; } string strDLL = Path.Combine(Path.GetDirectoryName(MainForm.Instance.Settings.FFMSIndexPath), "ffms2.dll"); strScript = "LoadPlugin(\"" + strDLL + "\")\r\nFFVideoSource(\"" + this.fileName + "\"" + (!string.IsNullOrEmpty(indexFile) ? ", cachefile=\"" + indexFile + "\"" : String.Empty) + (MainForm.Instance.Settings.FFMSThreads > 0 ? ", threads=" + MainForm.Instance.Settings.FFMSThreads : String.Empty) + ")"; reader = AvsFile.ParseScript(strScript); info = reader.VideoInfo.Clone(); if (File.Exists(this.fileName)) { MediaInfoFile oInfo = new MediaInfoFile(this.fileName); info.DAR = oInfo.VideoInfo.DAR; } }
/// <summary> /// initializes the dga reader /// </summary> /// <param name="fileName">the DGAVCIndex project file that this reader will process</param> public dgaFile(string fileName) { this.fileName = fileName; string strPath = Path.GetDirectoryName(MainForm.Instance.Settings.DgavcIndexPath); string strDLL = Path.Combine(strPath, "DGAVCDecode.dll"); reader = AvsFile.ParseScript("LoadPlugin(\"" + strDLL + "\")\r\nAVCSource(\"" + this.fileName + "\")"); this.readFileProperties(); }
/// <summary> /// initializes the d2v reader /// </summary> /// <param name="fileName">the dvd2avi project file that this reader will process</param> public d2vFile(string fileName) { this.fileName = fileName; string strPath = Path.GetDirectoryName(MainForm.Instance.Settings.DgIndexPath); string strDLL = Path.Combine(strPath, "DGDecode.dll"); reader = AvsFile.ParseScript("LoadPlugin(\"" + strDLL + "\")\r\nDGDecode_Mpeg2Source(\"" + this.fileName + "\")"); this.readFileProperties(); }
/// <summary> /// initializes the dga reader /// </summary> /// <param name="fileName">the DGAVCIndex project file that this reader will process</param> public dgaFile(string fileName) { UpdateCacher.CheckPackage("dgavcindex"); this.fileName = fileName; string strPath = Path.GetDirectoryName(MainForm.Instance.Settings.DGAVCIndex.Path); string strDLL = Path.Combine(strPath, "DGAVCDecode.dll"); reader = AvsFile.ParseScript("LoadPlugin(\"" + strDLL + "\")\r\nAVCSource(\"" + this.fileName + "\")"); this.readFileProperties(); }
/// <summary> /// initializes the lsmash reader /// </summary> /// <param name="fileName">the LSMASHIndex source file file that this reader will process</param> /// <param name="indexFile">the LSMASHIndex index file that this reader will process</param> public lsmashFile(string fileName, string indexFile) { MediaInfoFile oInfo = null; reader = AvsFile.ParseScript(VideoUtil.getLSMASHVideoInputLine(fileName, indexFile, 0, ref oInfo), true); info = reader.VideoInfo.Clone(); if (oInfo != null) { info.DAR = oInfo.VideoInfo.DAR; } }
private string checkVideo(string avsFile, bool tryToFix) { try { using (AvsFile avi = AvsFile.OpenScriptFile(avsFile)) { if (avi.Clip.OriginalColorspace != AviSynthColorspace.YV12 && avi.Clip.OriginalColorspace != AviSynthColorspace.I420) { if (tryToFix && !isConvertedToYV12(avsFile)) { bool convert = mainForm.DialogManager.addConvertToYV12(avi.Clip.OriginalColorspace.ToString()); if (convert) { if (appendConvertToYV12(avsFile)) { string sResult = checkVideo(avsFile, false); // Check everything again, to see if it is all fixed now if (sResult == null) { MessageBox.Show("Successfully converted to YV12."); return(null); } else { return(sResult); } } } return("You didn't want me to append ConvertToYV12(). You'll have to fix the colorspace problem yourself."); } return(string.Format("AviSynth clip is in {0} not in YV12, even though ConvertToYV12() has been appended.", avi.Clip.OriginalColorspace.ToString())); } VideoCodecSettings settings = GetCurrentVideoSettings(); if (settings != null && settings.SettingsID != "x264") // mod16 restriction { if (avi.Clip.VideoHeight % 16 != 0 || avi.Clip.VideoWidth % 16 != 0) { return(string.Format("AviSynth clip doesn't have mod16 dimensions:\r\nWidth: {0}\r\nHeight:{1}\r\n" + "This could cause problems with some encoders,\r\n" + "and will also result in a loss of compressibility.\r\n" + "I suggest you resize to a mod16 resolution.", avi.Clip.VideoWidth, avi.Clip.VideoHeight)); } } } } catch (Exception e) { return("Error in AviSynth script:\r\n" + e.Message); } return(null); }
static public void Execute(IWin32Window parent, string script) { using (AvsFile r = AvsFile.ParseScript(script)) { using (CropDialog d = new CropDialog()) { d.file = r; d.reader = r.GetVideoReader(); d.init(); d.ShowDialog(parent); } } }
/// <summary> /// initializes the dgi reader /// </summary> /// <param name="fileName">the DGNVIndex project file that this reader will process</param> public dgmFile(string fileName) { this.fileName = fileName; UpdateCacher.CheckPackage("dgindexim"); string strScript = ""; string strPath = Path.GetDirectoryName(MainForm.Instance.Settings.DGIndexIM.Path); strScript = "LoadPlugin(\"" + Path.Combine(strPath, "DGDecodeIM.dll") + "\")\r\nDGSourceIM(\"" + this.fileName + "\", silent=true)"; reader = AvsFile.ParseScript(strScript, true); this.readFileProperties(); }
/// <summary> /// initializes the dgv reader /// </summary> /// <param name="fileName">the DGVC1Index project file that this reader will process</param> public dgvFile(string fileName) { this.fileName = fileName; int c; string dgdecodenv = ScriptServer.DGDecodeNVdllPath(out c); switch (c) { case 1: reader = AvsFile.ParseScript("DGSource(\"" + this.fileName + "\")"); break; case 2: reader = AvsFile.ParseScript("LoadPlugin(\"" + dgdecodenv + "\")\r\nDGSource(\"" + this.fileName + "\")"); break; } this.readFileProperties(); }
/// <summary> /// sets up encoding /// </summary /// <param name="job">the job to be processed</param> /// <param name="error">output for any errors that might ocurr during this method</param> /// <returns>true if the setup has succeeded, false if it has not</returns> public bool setup(Job job, out string error) { error = ""; if (job is AviSynthJob) { this.job = (AviSynthJob)job; } else { error = "Job '" + job.Name + "' has been given to the AviSynthProcessor, even though it is not an AviSynthJob."; return(false); } stup.JobName = job.Name; try { file = AvsFile.OpenScriptFile(job.Input); reader = file.GetVideoReader(); } catch (Exception ex) { error = ex.Message; return(false); } stup.NbFramesTotal = reader.FrameCount; position = 0; try { processorThread = new Thread(new ThreadStart(process)); } catch (Exception e) { error = e.Message; return(false); } try { statusThread = new Thread(new ThreadStart(update)); } catch (Exception e) { error = e.Message; return(false); } return(true); }
/// <summary> /// initializes the lsmash reader /// </summary> /// <param name="fileName">the LSMASHIndex source file file that this reader will process</param> /// <param name="indexFile">the LSMASHIndex index file that this reader will process</param> public lsmashFile(string fileName, string indexFile) { if (!String.IsNullOrEmpty(indexFile) && String.IsNullOrEmpty(fileName)) { using (StreamReader sr = new StreamReader(indexFile, System.Text.Encoding.Default)) { string line = null; while ((line = sr.ReadLine()) != null) { if (line.StartsWith("<InputFilePath>")) { string strSourceFile = line.Substring(15, line.LastIndexOf("</InputFilePath>") - 15); if (File.Exists(strSourceFile)) { this.fileName = strSourceFile; } break; } } } } else { this.fileName = fileName; } double fps = 0; MediaInfoFile oInfo = null; if (File.Exists(this.fileName)) { oInfo = new MediaInfoFile(this.fileName); if (oInfo.VideoInfo.HasVideo && oInfo.VideoInfo.FPS > 0) { fps = oInfo.VideoInfo.FPS; } } reader = AvsFile.ParseScript(VideoUtil.getLSMASHVideoInputLine(this.fileName, indexFile, fps)); info = reader.VideoInfo.Clone(); if (oInfo != null) { info.DAR = oInfo.VideoInfo.DAR; } }
/// <summary> /// initializes the dgi reader /// </summary> /// <param name="fileName">the DGNVIndex project file that this reader will process</param> public dgiFile(string fileName) { this.fileName = fileName; string strScript = ""; string strPath = Path.GetDirectoryName(MainForm.Instance.Settings.DgnvIndexPath); strScript = "LoadPlugin(\"" + Path.Combine(strPath, "DGDecodeNV.dll") + "\")\r\nDGSource(\"" + this.fileName + "\""; if (MainForm.Instance.Settings.AutoForceFilm && MainForm.Instance.Settings.ForceFilmThreshold <= (decimal)dgiFile.GetFilmPercent(this.fileName)) { strScript += ",fieldop=1)"; } else { strScript += ",fieldop=0)"; } reader = AvsFile.ParseScript(strScript); this.readFileProperties(); }
// stax private void Process(string scriptBlock) { try { using (AvsFile af = AvsFile.ParseScript(scriptBlock)) { int i = 0; int frameCount = (int)af.Info.FrameCount; bool running = true; new Thread(new ThreadStart(delegate { if (analyseUpdate != null) { while (running) { analyseUpdate(i, frameCount); Thread.Sleep(500); } } })).Start(); IntPtr zero = new IntPtr(0); for (i = 0; i < frameCount && continueWorking; i++) { af.Clip.ReadFrame(zero, 0, i); } running = false; } } catch (Exception ex) { error = true; errorMessage = "Error opening analysis script " + ex.Message + "\r\n" + "Check to make sure you have TIVTC.dll in your AviSynth plugins directory.\r\n" + ex.Message; finishProcessing(); } }
public static int getChannelCountFromAVSFile(String strAVSFile) { int iChannelCount = 0; try { string line; using (StreamReader file = new StreamReader(strAVSFile)) { while ((line = file.ReadLine()) != null) { if (line.IndexOf(@"# detected channels: ") == 0) { Int32.TryParse(line.Substring(21).Split(' ')[0], out iChannelCount); break; } } } if (iChannelCount == 0) { using (AvsFile avi = AvsFile.OpenScriptFile(strAVSFile)) { if (avi.Clip.HasAudio) { Int32.TryParse(avi.Clip.ChannelsCount.ToString(), out iChannelCount); } } } return(iChannelCount); } catch { return(iChannelCount); } }
/// <summary> /// sets up encoding /// </summary /// <param name="job">the job to be processed</param> /// <param name="error">output for any errors that might ocurr during this method</param> /// <returns>true if the setup has succeeded, false if it has not</returns> public void setup(Job job, StatusUpdate su, LogItem _) { Debug.Assert(job is AviSynthJob, "Job isn't an AviSynthJob"); stup = su; this.job = (AviSynthJob)job; try { file = AvsFile.OpenScriptFile(job.Input); reader = file.GetVideoReader(); } catch (Exception ex) { throw new JobRunException(ex); } stup.NbFramesTotal = (ulong)reader.FrameCount; stup.ClipLength = TimeSpan.FromSeconds((double)stup.NbFramesTotal / file.VideoInfo.FPS); stup.Status = "Playing through file..."; position = 0; try { processorThread = new Thread(new ThreadStart(process)); } catch (Exception e) { throw new JobRunException(e); } try { statusThread = new Thread(new ThreadStart(update)); } catch (Exception e) { throw new JobRunException(e); } }
public IVideoReader GetVideoReader() { if (!HasVideo || !CanReadVideo) { throw new Exception("Can't read the video stream"); } if (videoSourceFile == null || videoReader == null) { lock (this) { if (videoSourceFile == null) { videoSourceFile = AvsFile.ParseScript(ScriptServer.GetInputLine(file, PossibleSources.directShow, false, false, false, FPS)); videoReader = null; } if (videoReader == null) { videoReader = videoSourceFile.GetVideoReader(); } } } return(videoReader); }
/// <summary> /// loads the video, sets up the proper window size and enables / disables the GUI buttons depending on the /// preview type set /// </summary> /// <param name="path">path of the video file to be loaded</param> /// <param name="type">type of window</param> /// <param name="inlineAvs">true if path contain not filename but avsynth script to be parsed</param> /// <param name="startFrame">Select a specific frame to start off with or -1 for middle of video</param> /// <returns>true if the video could be opened, false if not</returns> public bool loadVideo(MainForm mainForm, string path, PREVIEWTYPE type, bool hasAR, bool inlineAvs, int startFrame, bool originalSize) { videoPreview.UnloadVideo(); bInlineAVS = inlineAvs; strFileName = path; bOriginalSize = originalSize; lock (this) { if (file != null) { file.Dispose(); } } try { if (inlineAvs) { file = AvsFile.ParseScript(path, true); btnReloadVideo.Enabled = false; } else { file = mainForm.MediaFileFactory.Open(path); if (file == null) { throw new Exception("The video stream cannot be opened"); } btnReloadVideo.Enabled = true; } reader = file.GetVideoReader(); } catch (AviSynthException e) { MessageBox.Show("AviSynth script error:\r\n" + e.Message, "AviSynth error", MessageBoxButtons.OK, MessageBoxIcon.Error); return(false); } catch (ArgumentException e) { MessageBox.Show("AviSynth script error:\r\n" + e.Message, "AviSynth error", MessageBoxButtons.OK, MessageBoxIcon.Error); return(false); } catch (Exception e) { MessageBox.Show("The file " + path + " cannot be opened.\r\n" + "Error message: " + e.Message, "Cannot open video input", MessageBoxButtons.OK, MessageBoxIcon.Stop); return(false); } if (reader != null && reader.FrameCount > 0) { this.positionSlider.Minimum = 0; this.positionSlider.Maximum = reader.FrameCount - 1; this.positionSlider.TickFrequency = this.positionSlider.Maximum / 20; this.viewerType = type; this.hasAR = hasAR; zoomMaxWidth = 0; SetMaxZoomWidth(); doInitialAdjustment(); int iStart = 0; if (startFrame >= 0) { iStart = startFrame; } else { iStart = reader.FrameCount / 2; } videoPreview.LoadVideo(reader, file.VideoInfo.FPS, iStart); setTitleText(); return(true); } return(false); }
/// <summary> /// creates the AVS Script file /// if the file can be properly opened, auto-cropping is performed, then depending on the AR settings /// the proper resolution for automatic resizing, taking into account the derived cropping values /// is calculated, and finally the avisynth script is written and its name returned /// </summary> /// <param name="path">dgindex script</param> /// <param name="aspectRatio">aspect ratio selection to be used</param> /// <param name="customDAR">custom display aspect ratio for this source</param> /// <param name="desiredOutputWidth">desired horizontal resolution of the output</param> /// <param name="settings">the codec settings (used only for x264)</param> /// <param name="sarX">pixel aspect ratio X</param> /// <param name="sarY">pixel aspect ratio Y</param> /// <param name="height">the final height of the video</param> /// <param name="signalAR">whether or not ar signalling is to be used for the output /// (depending on this parameter, resizing changes to match the source AR)</param> /// <param name="autoCrop">whether or not autoCrop is used for the input</param> /// <returns>the name of the AviSynth script created, empty if there was an error</returns> private string createAVSFile(string indexFile, string inputFile, Dar?AR, int desiredOutputWidth, bool signalAR, LogItem _log, AviSynthSettings avsSettings, bool autoDeint, VideoCodecSettings settings, out Dar?dar, bool autoCrop, bool keepInputResolution, bool useChaptersMarks) { dar = null; Dar customDAR; IMediaFile iMediaFile = null; IVideoReader reader; PossibleSources oPossibleSource; x264Device xTargetDevice = null; int outputWidthIncludingPadding = 0; int outputHeightIncludingPadding = 0; int outputWidthCropped = 0; int outputHeightCropped = 0; CropValues cropValues = new CropValues(); bool bAdjustResolution = false; bool bCropped = false; // open index file to retrieve information if (job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.DGI) { iMediaFile = new dgiFile(indexFile); oPossibleSource = PossibleSources.dgi; } else if (job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.D2V) { iMediaFile = new d2vFile(indexFile); oPossibleSource = PossibleSources.d2v; } else if (job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.DGA) { iMediaFile = new dgaFile(indexFile); oPossibleSource = PossibleSources.dga; } else if (job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.FFMS) { iMediaFile = new ffmsFile(inputFile, indexFile); oPossibleSource = PossibleSources.ffindex; } else if (job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.AVISOURCE) { string tempAvs = "AVISource(\"" + inputFile + "\", audio=false)" + VideoUtil.getAssumeFPS(0, inputFile); iMediaFile = AvsFile.ParseScript(tempAvs); oPossibleSource = PossibleSources.directShow; } else { iMediaFile = AvsFile.OpenScriptFile(inputFile); oPossibleSource = PossibleSources.avs; } reader = iMediaFile.GetVideoReader(); // abort if the index file is invalid if (reader.FrameCount < 1) { _log.Error("There are 0 frames in the index file. Aborting..."); return(""); } if (AR == null) { // AR needs to be detected automatically now _log.LogValue("Auto-detect aspect ratio", AR == null); customDAR = iMediaFile.VideoInfo.DAR; if (customDAR.ar <= 0) { customDAR = Dar.ITU16x9PAL; _log.Warn(string.Format("No aspect ratio found, defaulting to {0}.", customDAR)); } } else { customDAR = AR.Value; } _log.LogValue("Aspect ratio", customDAR); // check x264 settings (target device, chapter file) if (settings != null && settings is x264Settings) { x264Settings xs = (x264Settings)settings; xTargetDevice = xs.TargetDevice; // create qpf file if necessary if (!String.IsNullOrEmpty(job.PostprocessingProperties.ChapterFile) && useChaptersMarks) { qpfile = job.PostprocessingProperties.ChapterFile; if ((Path.GetExtension(qpfile).ToLower(System.Globalization.CultureInfo.InvariantCulture)) == ".txt") { qpfile = VideoUtil.convertChaptersTextFileTox264QPFile(job.PostprocessingProperties.ChapterFile, iMediaFile.VideoInfo.FPS); } if (File.Exists(qpfile)) { xs.UseQPFile = true; xs.QPFile = qpfile; } } } // if encoding for a specific device select the appropriate resolution setting if (xTargetDevice != null && xTargetDevice.Width > 0 && xTargetDevice.Height > 0) { if (keepInputResolution) { // resolution should not be changed - use input resolution outputWidthCropped = (int)iMediaFile.VideoInfo.Width; outputHeightCropped = (int)iMediaFile.VideoInfo.Height; } else { // crop input video if selected if (autoCrop) { if (Autocrop.autocrop(out cropValues, reader, signalAR, avsSettings.Mod16Method) == false) { _log.Error("Autocrop failed. Aborting..."); return(""); } bCropped = true; } outputWidthCropped = desiredOutputWidth; outputHeightCropped = Resolution.suggestResolution(iMediaFile.VideoInfo.Height, iMediaFile.VideoInfo.Width, (double)customDAR.ar, cropValues, outputWidthCropped, signalAR, mainForm.Settings.AcceptableAspectErrorPercent, out dar); dar = null; } if (xTargetDevice.Width < outputWidthCropped) { // width must be lowered to be target conform bAdjustResolution = true; if (keepInputResolution) { keepInputResolution = false; _log.LogEvent("Disabling \"Keep Input Resolution\" as " + xTargetDevice.Name + " does not support a resolution width of " + outputWidthCropped + ". The maximum value is " + xTargetDevice.Width + "."); } } else if (xTargetDevice.Height < outputHeightCropped) { // height must be lowered to be target conform bAdjustResolution = true; if (keepInputResolution) { keepInputResolution = false; _log.LogEvent("Disabling \"Keep Input Resolution\" as " + xTargetDevice.Name + " does not support a resolution height of " + outputHeightCropped + ". The maximum value is " + xTargetDevice.Height + "."); } } else if (xTargetDevice.BluRay) { string strResolution = outputWidthCropped + "x" + outputHeightCropped; if (!strResolution.Equals("1920x1080") && !strResolution.Equals("1440x1080") && !strResolution.Equals("1280x720") && !strResolution.Equals("720x576") && !strResolution.Equals("720x480")) { bAdjustResolution = true; if (keepInputResolution) { keepInputResolution = false; _log.LogEvent("Disabling \"Keep Input Resolution\" as " + xTargetDevice.Name + " does not support a resolution of " + outputWidthCropped + "x" + outputHeightCropped + ". Supported are 1920x1080, 1440x1080, 1280x720, 720x576 and 720x480."); } } else { outputWidthIncludingPadding = outputWidthCropped; outputHeightIncludingPadding = outputHeightCropped; } } if (bAdjustResolution) { if (!autoCrop) { autoCrop = true; _log.LogEvent("Enabling \"AutoCrop\""); } } } else { outputWidthCropped = desiredOutputWidth; } if (!keepInputResolution && autoCrop && !bCropped) { // crop input video if required if (Autocrop.autocrop(out cropValues, reader, signalAR, avsSettings.Mod16Method) == false) { _log.Error("Autocrop failed. Aborting..."); return(""); } bCropped = true; } if (bAdjustResolution) { // adjust horizontal resolution as width or height are too large if (xTargetDevice.BluRay) { if (outputWidthCropped >= 1920) { outputWidthCropped = 1920; outputHeightIncludingPadding = 1080; _log.LogEvent("Force resolution of 1920x1080 as required for " + xTargetDevice.Name); } else if (outputWidthCropped >= 1280) { outputWidthCropped = 1280; outputHeightIncludingPadding = 720; _log.LogEvent("Force resolution of 1280x720 as required for " + xTargetDevice.Name); } else { outputWidthCropped = 720; Double dfps = Convert.ToDouble(iMediaFile.VideoInfo.FPS_N) / iMediaFile.VideoInfo.FPS_D; if (dfps == 25) { outputHeightIncludingPadding = 576; _log.LogEvent("Force resolution of 720x576 as required for " + xTargetDevice.Name); } else { outputHeightIncludingPadding = 480; _log.LogEvent("Force resolution of 720x480 as required for " + xTargetDevice.Name); } } outputWidthIncludingPadding = outputWidthCropped; } else if (outputWidthCropped > xTargetDevice.Width) { outputWidthCropped = xTargetDevice.Width; _log.LogEvent("Set resolution width to " + outputWidthCropped + " as required for " + xTargetDevice.Name); } // adjust cropped vertical resolution outputHeightCropped = Resolution.suggestResolution(iMediaFile.VideoInfo.Height, iMediaFile.VideoInfo.Width, (double)customDAR.ar, cropValues, outputWidthCropped, signalAR, mainForm.Settings.AcceptableAspectErrorPercent, out dar); while (outputHeightCropped > xTargetDevice.Height || (xTargetDevice.BluRay && outputHeightCropped > outputHeightIncludingPadding)) { outputWidthCropped -= 16; outputHeightCropped = Resolution.suggestResolution(iMediaFile.VideoInfo.Height, iMediaFile.VideoInfo.Width, (double)customDAR.ar, cropValues, outputWidthCropped, signalAR, mainForm.Settings.AcceptableAspectErrorPercent, out dar); } } if (keepInputResolution) { outputWidthCropped = outputWidthIncludingPadding = (int)iMediaFile.VideoInfo.Width; outputHeightCropped = outputHeightIncludingPadding = (int)iMediaFile.VideoInfo.Height; dar = customDAR; } else if (xTargetDevice == null || (xTargetDevice != null && !xTargetDevice.BluRay)) { // Minimise upsizing int sourceHorizontalResolution = (int)iMediaFile.VideoInfo.Width - cropValues.right - cropValues.left; if (autoCrop) { sourceHorizontalResolution = (int)iMediaFile.VideoInfo.Width; } if (outputWidthCropped > sourceHorizontalResolution) { if (avsSettings.Mod16Method == mod16Method.resize) { while (outputWidthCropped > sourceHorizontalResolution + 16) { outputWidthCropped -= 16; } } else { outputWidthCropped = sourceHorizontalResolution; } } } // calculate height if (!keepInputResolution) { outputHeightCropped = Resolution.suggestResolution(iMediaFile.VideoInfo.Height, iMediaFile.VideoInfo.Width, (double)customDAR.ar, cropValues, outputWidthCropped, signalAR, mainForm.Settings.AcceptableAspectErrorPercent, out dar); } // set complete padding if required if (outputHeightIncludingPadding == 0 && outputWidthIncludingPadding > 0) { outputHeightIncludingPadding = outputHeightCropped; } if (outputWidthIncludingPadding == 0 && outputHeightIncludingPadding > 0) { outputWidthIncludingPadding = outputWidthCropped; } // write calculated output resolution into the log _log.LogValue("Input resolution", iMediaFile.VideoInfo.Width + "x" + iMediaFile.VideoInfo.Height); if (autoCrop && !keepInputResolution && cropValues.isCropped()) { _log.LogValue("Autocrop values", cropValues); _log.LogValue("Cropped output resolution", outputWidthCropped + "x" + outputHeightCropped); } else { _log.LogValue("Output resolution", outputWidthCropped + "x" + outputHeightCropped); } if (outputWidthIncludingPadding > 0 && (outputWidthIncludingPadding != outputWidthCropped || outputHeightIncludingPadding != outputHeightCropped)) { _log.LogValue("Padded output resolution", outputWidthIncludingPadding + "x" + outputHeightIncludingPadding); } if (outputWidthCropped <= 0 || outputHeightCropped <= 0) { _log.Error("Error in detection of output resolution"); return(""); } //Generate the avs script based on the template string inputLine = "#input"; string deinterlaceLines = "#deinterlace"; string denoiseLines = "#denoise"; string cropLine = "#crop"; string resizeLine = "#resize"; inputLine = ScriptServer.GetInputLine(inputFile, indexFile, false, oPossibleSource, false, false, false, 0, avsSettings.DSS2); if (!inputLine.EndsWith(")")) { inputLine += ")"; } _log.LogValue("Automatic deinterlacing", autoDeint); if (autoDeint) { raiseEvent("Automatic deinterlacing... ***PLEASE WAIT***"); string d2vPath = indexFile; SourceDetector sd = new SourceDetector(inputLine, d2vPath, false, mainForm.Settings.SourceDetectorSettings, new UpdateSourceDetectionStatus(analyseUpdate), new FinishedAnalysis(finishedAnalysis)); finished = false; sd.analyse(); waitTillAnalyseFinished(); sd.stop(); deinterlaceLines = filters[0].Script; if (interlaced) { _log.LogValue("Deinterlacing used", deinterlaceLines, ImageType.Warning); } else { _log.LogValue("Deinterlacing used", deinterlaceLines); } } raiseEvent("Finalizing preprocessing... ***PLEASE WAIT***"); inputLine = ScriptServer.GetInputLine(inputFile, indexFile, interlaced, oPossibleSource, avsSettings.ColourCorrect, avsSettings.MPEG2Deblock, false, 0, avsSettings.DSS2); if (!inputLine.EndsWith(")")) { inputLine += ")"; } if (!keepInputResolution && autoCrop) { cropLine = ScriptServer.GetCropLine(true, cropValues); } denoiseLines = ScriptServer.GetDenoiseLines(avsSettings.Denoise, (DenoiseFilterType)avsSettings.DenoiseMethod); if (!keepInputResolution) { resizeLine = ScriptServer.GetResizeLine(!signalAR || avsSettings.Mod16Method == mod16Method.resize || outputWidthIncludingPadding > 0 || (int)iMediaFile.VideoInfo.Width != outputWidthCropped, outputWidthCropped, outputHeightCropped, outputWidthIncludingPadding, outputHeightIncludingPadding, (ResizeFilterType)avsSettings.ResizeMethod, autoCrop, cropValues, (int)iMediaFile.VideoInfo.Width, (int)iMediaFile.VideoInfo.Height); } string newScript = ScriptServer.CreateScriptFromTemplate(avsSettings.Template, inputLine, cropLine, resizeLine, denoiseLines, deinterlaceLines); if (dar.HasValue) { newScript = string.Format("global MeGUI_darx = {0}\r\nglobal MeGUI_dary = {1}\r\n{2}", dar.Value.X, dar.Value.Y, newScript); } else { if (xTargetDevice != null && xTargetDevice.BluRay) { string strResolution = outputWidthIncludingPadding + "x" + outputHeightIncludingPadding; x264Settings _xs = (x264Settings)settings; if (strResolution.Equals("720x480")) { _xs.SampleAR = 4; _log.LogEvent("Set --sar to 10:11 as only 40:33 or 10:11 are supported with a resolution of " + strResolution + " as required for " + xTargetDevice.Name + "."); } else if (strResolution.Equals("720x576")) { _xs.SampleAR = 5; _log.LogEvent("Set --sar to 12:11 as only 16:11 or 12:11 are supported with a resolution of " + strResolution + " as required for " + xTargetDevice.Name + "."); } else if (strResolution.Equals("1280x720") || strResolution.Equals("1920x1080")) { _xs.SampleAR = 1; _log.LogEvent("Set --sar to 1:1 as only 1:1 is supported with a resolution of " + strResolution + " as required for " + xTargetDevice.Name + "."); } else if (strResolution.Equals("1440x1080")) { _xs.SampleAR = 2; _log.LogEvent("Set --sar to 4:3 as only 4:3 is supported with a resolution of " + strResolution + " as required for " + xTargetDevice.Name + "."); } } } _log.LogValue("Generated Avisynth script", newScript); string strOutputAVSFile; if (String.IsNullOrEmpty(indexFile)) { strOutputAVSFile = Path.ChangeExtension(Path.Combine(job.PostprocessingProperties.WorkingDirectory, Path.GetFileName(inputFile)), ".avs"); } else { strOutputAVSFile = Path.ChangeExtension(indexFile, ".avs"); } try { StreamWriter sw = new StreamWriter(strOutputAVSFile, false, System.Text.Encoding.Default); sw.Write(newScript); sw.Close(); } catch (IOException i) { _log.LogValue("Error saving AviSynth script", i, ImageType.Error); return(""); } return(strOutputAVSFile); }
/// <summary> /// initializes the d2v reader /// </summary> /// <param name="fileName">the dvd2avi project file that this reader will process</param> public d2vFile(string fileName) { this.fileName = fileName; reader = AvsFile.ParseScript("DGDecode_Mpeg2Source(\"" + this.fileName + "\")"); this.readFileProperties(); }
/// <summary> /// creates the AVS Script file /// if the file can be properly opened, auto-cropping is performed, then depending on the AR settings /// the proper resolution for automatic resizing, taking into account the derived cropping values /// is calculated, and finally the avisynth script is written and its name returned /// </summary> /// <param name="path">dgindex script</param> /// <param name="aspectRatio">aspect ratio selection to be used</param> /// <param name="customDAR">custom display aspect ratio for this source</param> /// <param name="desiredOutputWidth">desired horizontal resolution of the output</param> /// <param name="settings">the codec settings (used only for x264)</param> /// <param name="sarX">pixel aspect ratio X</param> /// <param name="sarY">pixel aspect ratio Y</param> /// <param name="height">the final height of the video</param> /// <param name="autoCrop">whether or not autoCrop is used for the input</param> /// <returns>the name of the AviSynth script created, empty if there was an error</returns> private string CreateAVSFile(string indexFile, string inputFile, Dar?AR, int desiredOutputWidth, LogItem _log, AviSynthSettings avsSettings, bool autoDeint, VideoCodecSettings settings, bool autoCrop, bool keepInputResolution, bool useChaptersMarks) { Dar? dar = null; Dar customDAR; IMediaFile iMediaFile = null; IVideoReader reader; PossibleSources oPossibleSource; x264Device xTargetDevice = null; CropValues cropValues = new CropValues(); int outputWidthIncludingPadding = 0; int outputHeightIncludingPadding = 0; int outputWidthCropped = 0; int outputHeightCropped = 0; // encode anamorph either when it is selected in the avs profile or the input resolution should not be touched bool signalAR = (avsSettings.Mod16Method != mod16Method.none) || keepInputResolution; // make sure the proper anamorphic encode is selected if the input resolution should not be touched if (keepInputResolution && avsSettings.Mod16Method != mod16Method.nonMod16) { avsSettings.Mod16Method = mod16Method.nonMod16; } // open index file to retrieve information if (job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.DGI) { iMediaFile = new dgiFile(indexFile); oPossibleSource = PossibleSources.dgi; } else if (job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.D2V) { iMediaFile = new d2vFile(indexFile); oPossibleSource = PossibleSources.d2v; } else if (job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.DGM) { iMediaFile = new dgmFile(indexFile); oPossibleSource = PossibleSources.dgm; } else if (job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.FFMS) { iMediaFile = new ffmsFile(inputFile, indexFile); oPossibleSource = PossibleSources.ffindex; } else if (job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.LSMASH) { iMediaFile = new lsmashFile(inputFile, indexFile); oPossibleSource = PossibleSources.lsmash; } else if (job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.AVISOURCE) { string tempAvs = "AVISource(\"" + inputFile + "\", audio=false)" + VideoUtil.getAssumeFPS(0, inputFile); iMediaFile = AvsFile.ParseScript(tempAvs, true); oPossibleSource = PossibleSources.avisource; } else { iMediaFile = AvsFile.OpenScriptFile(inputFile, true); oPossibleSource = PossibleSources.avs; } reader = iMediaFile.GetVideoReader(); // abort if the index file is invalid if (reader.FrameCount < 1) { _log.Error("There are " + reader.FrameCount + " frames in the index file. Aborting..."); return(""); } if (AR == null) { // AR needs to be detected automatically now _log.LogValue("Auto-detect aspect ratio", AR == null); customDAR = iMediaFile.VideoInfo.DAR; if (customDAR.AR <= 0) { customDAR = Dar.ITU16x9PAL; _log.Warn(string.Format("No aspect ratio found, defaulting to {0}.", customDAR)); } } else { customDAR = AR.Value; } _log.LogValue("Aspect ratio", customDAR); // check x264 settings (target device, chapter file) if (settings != null && settings is x264Settings) { x264Settings xs = (x264Settings)settings; xTargetDevice = xs.TargetDevice; _log.LogValue("Target device", xTargetDevice.Name); } // get mod value for resizing int mod = Resolution.GetModValue(avsSettings.ModValue, avsSettings.Mod16Method, signalAR); // crop input as it may be required (autoCrop && !keepInputResolution or Blu-Ray) if (Autocrop.autocrop(out cropValues, reader, signalAR, avsSettings.Mod16Method, avsSettings.ModValue) == false) { _log.Error("Autocrop failed. Aborting..."); return(""); } int inputWidth = (int)iMediaFile.VideoInfo.Width; int inputHeight = (int)iMediaFile.VideoInfo.Height; int inputFPS_D = (int)iMediaFile.VideoInfo.FPS_D; int inputFPS_N = (int)iMediaFile.VideoInfo.FPS_N; int inputFrameCount = (int)iMediaFile.VideoInfo.FrameCount; // force destruction of AVS script iMediaFile.Dispose(); Dar?suggestedDar = null; if (desiredOutputWidth == 0) { desiredOutputWidth = outputWidthIncludingPadding = inputWidth; } else if (!avsSettings.Upsize && desiredOutputWidth > inputWidth) { outputWidthIncludingPadding = inputWidth; } else { outputWidthIncludingPadding = desiredOutputWidth; } CropValues paddingValues; bool resizeEnabled; int outputWidthWithoutUpsizing = outputWidthIncludingPadding; if (avsSettings.Upsize) { resizeEnabled = !keepInputResolution; CropValues cropValuesTemp = cropValues.Clone(); int outputHeightIncludingPaddingTemp = 0; Resolution.GetResolution(inputWidth, inputHeight, customDAR, ref cropValuesTemp, autoCrop && !keepInputResolution, mod, ref resizeEnabled, false, signalAR, true, avsSettings.AcceptableAspectError, xTargetDevice, Convert.ToDouble(inputFPS_N) / inputFPS_D, ref outputWidthWithoutUpsizing, ref outputHeightIncludingPaddingTemp, out paddingValues, out suggestedDar, _log); } resizeEnabled = !keepInputResolution; Resolution.GetResolution(inputWidth, inputHeight, customDAR, ref cropValues, autoCrop && !keepInputResolution, mod, ref resizeEnabled, avsSettings.Upsize, signalAR, true, avsSettings.AcceptableAspectError, xTargetDevice, Convert.ToDouble(inputFPS_N) / inputFPS_D, ref outputWidthIncludingPadding, ref outputHeightIncludingPadding, out paddingValues, out suggestedDar, _log); keepInputResolution = !resizeEnabled; if (signalAR && suggestedDar.HasValue) { dar = suggestedDar; } // log calculated output resolution outputWidthCropped = outputWidthIncludingPadding - paddingValues.left - paddingValues.right; outputHeightCropped = outputHeightIncludingPadding - paddingValues.bottom - paddingValues.top; _log.LogValue("Input resolution", inputWidth + "x" + inputHeight); _log.LogValue("Desired maximum width", desiredOutputWidth); if (!avsSettings.Upsize && outputWidthIncludingPadding < desiredOutputWidth) { _log.LogEvent("Desired maximum width not reached. Enable upsizing in the AviSynth profile if you want to force it."); } if (avsSettings.Upsize && outputWidthIncludingPadding > outputWidthWithoutUpsizing) { _log.LogValue("Desired maximum width reached with upsizing. Target width without upsizing", outputWidthWithoutUpsizing); } if (cropValues.isCropped()) { _log.LogValue("Autocrop values", cropValues); _log.LogValue("Cropped output resolution", outputWidthCropped + "x" + outputHeightCropped); } else { _log.LogValue("Output resolution", outputWidthCropped + "x" + outputHeightCropped); } if (paddingValues.isCropped()) { _log.LogValue("Padded output resolution", outputWidthIncludingPadding + "x" + outputHeightIncludingPadding); } // generate the avs script based on the template string inputLine = "#input"; string deinterlaceLines = "#deinterlace"; string denoiseLines = "#denoise"; string cropLine = "#crop"; string resizeLine = "#resize"; inputLine = ScriptServer.GetInputLine( inputFile, indexFile, false, oPossibleSource, false, false, false, 0, avsSettings.DSS2, NvDeinterlacerType.nvDeInterlacerNone, 0, 0, null); if (IsJobStopped()) { return(""); } _log.LogValue("Automatic deinterlacing", autoDeint); if (autoDeint) { su.Status = "Automatic deinterlacing... ***PLEASE WAIT***"; string d2vPath = indexFile; _sourceDetector = new SourceDetector(inputLine, d2vPath, avsSettings.PreferAnimeDeinterlace, inputFrameCount, Thread.CurrentThread.Priority, MainForm.Instance.Settings.SourceDetectorSettings, new UpdateSourceDetectionStatus(AnalyseUpdate), new FinishedAnalysis(FinishedAnalysis)); finished = false; _sourceDetector.Analyse(); WaitTillAnalyseFinished(); _sourceDetector = null; if (filters != null) { deinterlaceLines = filters[0].Script; if (interlaced) { _log.LogValue("Deinterlacing used", deinterlaceLines, ImageType.Warning); } else { _log.LogValue("Deinterlacing used", deinterlaceLines); } } } if (IsJobStopped()) { return(""); } su.Status = "Finalizing preprocessing... ***PLEASE WAIT***"; // get final input filter line inputLine = ScriptServer.GetInputLine( inputFile, indexFile, interlaced, oPossibleSource, avsSettings.ColourCorrect, avsSettings.MPEG2Deblock, false, 0, avsSettings.DSS2, NvDeinterlacerType.nvDeInterlacerNone, 0, 0, null); // get crop & resize lines if (!keepInputResolution) { if (autoCrop) { cropLine = ScriptServer.GetCropLine(cropValues); } resizeLine = ScriptServer.GetResizeLine(!signalAR || avsSettings.Mod16Method == mod16Method.resize || outputWidthIncludingPadding > 0 || inputWidth != outputWidthCropped, outputWidthCropped, outputHeightCropped, outputWidthIncludingPadding, outputHeightIncludingPadding, (ResizeFilterType)avsSettings.ResizeMethod, autoCrop, cropValues, inputWidth, inputHeight); } // get denoise line denoiseLines = ScriptServer.GetDenoiseLines(avsSettings.Denoise, (DenoiseFilterType)avsSettings.DenoiseMethod); string newScript = ScriptServer.CreateScriptFromTemplate(avsSettings.Template, inputLine, cropLine, resizeLine, denoiseLines, deinterlaceLines); if (dar.HasValue) { newScript = string.Format("global MeGUI_darx = {0}\r\nglobal MeGUI_dary = {1}\r\n{2}", dar.Value.X, dar.Value.Y, newScript); } else { if (xTargetDevice != null && xTargetDevice.BluRay) { string strResolution = outputWidthIncludingPadding + "x" + outputHeightIncludingPadding; x264Settings _xs = (x264Settings)settings; if (strResolution.Equals("720x480")) { _xs.SampleAR = 4; _log.LogEvent("Set --sar to 10:11 as only 40:33 or 10:11 are supported with a resolution of " + strResolution + " as required for " + xTargetDevice.Name + "."); } else if (strResolution.Equals("720x576")) { _xs.SampleAR = 5; _log.LogEvent("Set --sar to 12:11 as only 16:11 or 12:11 are supported with a resolution of " + strResolution + " as required for " + xTargetDevice.Name + "."); } else if (strResolution.Equals("1280x720") || strResolution.Equals("1920x1080")) { _xs.SampleAR = 1; _log.LogEvent("Set --sar to 1:1 as only 1:1 is supported with a resolution of " + strResolution + " as required for " + xTargetDevice.Name + "."); } else if (strResolution.Equals("1440x1080")) { _xs.SampleAR = 2; _log.LogEvent("Set --sar to 4:3 as only 4:3 is supported with a resolution of " + strResolution + " as required for " + xTargetDevice.Name + "."); } } } _log.LogValue("Generated AviSynth script", newScript); string strOutputAVSFile; if (String.IsNullOrEmpty(indexFile)) { strOutputAVSFile = Path.ChangeExtension(Path.Combine(job.PostprocessingProperties.WorkingDirectory, Path.GetFileName(inputFile)), ".avs"); } else { strOutputAVSFile = Path.ChangeExtension(indexFile, ".avs"); } try { StreamWriter sw = new StreamWriter(strOutputAVSFile, false, System.Text.Encoding.Default); sw.Write(newScript); sw.Close(); } catch (Exception i) { _log.LogValue("Error saving AviSynth script", i, ImageType.Error); return(""); } JobUtil.GetAllInputProperties(strOutputAVSFile, out ulong numberOfFrames, out double fps, out int fps_n, out int fps_d, out int hres, out int vres, out Dar d, out AviSynthColorspace colorspace); _log.LogEvent("resolution: " + hres + "x" + vres); _log.LogEvent("frame rate: " + fps_n + "/" + fps_d); _log.LogEvent("frames: " + numberOfFrames); TimeSpan oTime = TimeSpan.FromSeconds((double)numberOfFrames / fps); _log.LogEvent("length: " + string.Format("{0:00}:{1:00}:{2:00}.{3:000}", (int)(oTime.TotalHours), oTime.Minutes, oTime.Seconds, oTime.Milliseconds)); _log.LogValue("aspect ratio", d); _log.LogValue("color space", colorspace.ToString()); if (IsJobStopped()) { return(""); } // create qpf file if necessary and possible if (job.PostprocessingProperties.ChapterInfo.HasChapters && useChaptersMarks && settings != null && settings is x264Settings) { fps = (double)fps_n / fps_d; string strChapterFile = Path.ChangeExtension(strOutputAVSFile, ".qpf"); job.PostprocessingProperties.ChapterInfo.ChangeFps(fps); if (job.PostprocessingProperties.ChapterInfo.SaveQpfile(strChapterFile)) { job.PostprocessingProperties.FilesToDelete.Add(strChapterFile); _log.LogValue("qpf file created", strChapterFile); x264Settings xs = (x264Settings)settings; xs.UseQPFile = true; xs.QPFile = strChapterFile; } } // check if a timestamp file has to be used if (!String.IsNullOrEmpty(job.PostprocessingProperties.TimeStampFile) && settings != null && settings is x264Settings) { x264Settings xs = (x264Settings)settings; xs.TCFile = job.PostprocessingProperties.TimeStampFile; } return(strOutputAVSFile); }
public IMediaFile Open(string file) { return(AvsFile.OpenScriptFile(file, true)); }
/// <summary> /// creates the AVS Script file /// if the file can be properly opened, auto-cropping is performed, then depending on the AR settings /// the proper resolution for automatic resizing, taking into account the derived cropping values /// is calculated, and finally the avisynth script is written and its name returned /// </summary> /// <param name="path">dgindex script</param> /// <param name="aspectRatio">aspect ratio selection to be used</param> /// <param name="customDAR">custom display aspect ratio for this source</param> /// <param name="desiredOutputWidth">desired horizontal resolution of the output</param> /// <param name="settings">the codec settings (used only for x264)</param> /// <param name="sarX">pixel aspect ratio X</param> /// <param name="sarY">pixel aspect ratio Y</param> /// <param name="height">the final height of the video</param> /// <param name="signalAR">whether or not ar signalling is to be used for the output /// (depending on this parameter, resizing changes to match the source AR)</param> /// <param name="autoCrop">whether or not autoCrop is used for the input</param> /// <returns>the name of the AviSynth script created, empty if there was an error</returns> private string createAVSFile(string indexFile, string inputFile, Dar?AR, int desiredOutputWidth, bool signalAR, LogItem _log, AviSynthSettings avsSettings, bool autoDeint, VideoCodecSettings settings, out Dar?dar, bool autoCrop, bool keepInputResolution, bool useChaptersMarks) { dar = null; Dar customDAR; IMediaFile iMediaFile = null; IVideoReader reader; PossibleSources oPossibleSource; x264Device xTargetDevice = null; CropValues cropValues = new CropValues(); int outputWidthIncludingPadding = 0; int outputHeightIncludingPadding = 0; int outputWidthCropped = 0; int outputHeightCropped = 0; // open index file to retrieve information if (job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.DGI) { iMediaFile = new dgiFile(indexFile); oPossibleSource = PossibleSources.dgi; } else if (job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.D2V) { iMediaFile = new d2vFile(indexFile); oPossibleSource = PossibleSources.d2v; } else if (job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.DGA) { iMediaFile = new dgaFile(indexFile); oPossibleSource = PossibleSources.dga; } else if (job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.FFMS) { iMediaFile = new ffmsFile(inputFile, indexFile); oPossibleSource = PossibleSources.ffindex; } else if (job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.LSMASH) { iMediaFile = new lsmashFile(inputFile, indexFile); oPossibleSource = PossibleSources.lsmash; } else if (job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.AVISOURCE) { string tempAvs = "AVISource(\"" + inputFile + "\", audio=false)" + VideoUtil.getAssumeFPS(0, inputFile); iMediaFile = AvsFile.ParseScript(tempAvs); oPossibleSource = PossibleSources.directShow; } else { iMediaFile = AvsFile.OpenScriptFile(inputFile); oPossibleSource = PossibleSources.avs; } reader = iMediaFile.GetVideoReader(); // abort if the index file is invalid if (reader.FrameCount < 1) { _log.Error("There are " + reader.FrameCount + " frames in the index file. Aborting..."); return(""); } if (AR == null) { // AR needs to be detected automatically now _log.LogValue("Auto-detect aspect ratio", AR == null); customDAR = iMediaFile.VideoInfo.DAR; if (customDAR.AR <= 0) { customDAR = Dar.ITU16x9PAL; _log.Warn(string.Format("No aspect ratio found, defaulting to {0}.", customDAR)); } } else { customDAR = AR.Value; } _log.LogValue("Aspect ratio", customDAR); // check x264 settings (target device, chapter file) if (settings != null && settings is x264Settings) { x264Settings xs = (x264Settings)settings; xTargetDevice = xs.TargetDevice; _log.LogValue("Target device", xTargetDevice.Name); // create qpf file if necessary if (!String.IsNullOrEmpty(job.PostprocessingProperties.ChapterFile) && useChaptersMarks) { qpfile = job.PostprocessingProperties.ChapterFile; if ((Path.GetExtension(qpfile).ToLowerInvariant()) == ".txt") { qpfile = VideoUtil.convertChaptersTextFileTox264QPFile(job.PostprocessingProperties.ChapterFile, iMediaFile.VideoInfo.FPS); } if (File.Exists(qpfile)) { xs.UseQPFile = true; xs.QPFile = qpfile; } } } // get mod value for resizing int mod = Resolution.GetModValue(avsSettings.ModValue, avsSettings.Mod16Method, signalAR); // crop input as it may be required (autoCrop && !keepInputResolution or Blu-Ray) if (Autocrop.autocrop(out cropValues, reader, signalAR, avsSettings.Mod16Method, avsSettings.ModValue) == false) { _log.Error("Autocrop failed. Aborting..."); return(""); } Dar?suggestedDar = null; if (desiredOutputWidth == 0) { desiredOutputWidth = outputWidthIncludingPadding = (int)iMediaFile.VideoInfo.Width; } else if (!avsSettings.Upsize && desiredOutputWidth > (int)iMediaFile.VideoInfo.Width) { outputWidthIncludingPadding = (int)iMediaFile.VideoInfo.Width; } else { outputWidthIncludingPadding = desiredOutputWidth; } CropValues paddingValues; bool resizeEnabled; int outputWidthWithoutUpsizing = outputWidthIncludingPadding; if (avsSettings.Upsize) { resizeEnabled = !keepInputResolution; CropValues cropValuesTemp = cropValues.Clone(); int outputHeightIncludingPaddingTemp = 0; Resolution.GetResolution((int)iMediaFile.VideoInfo.Width, (int)iMediaFile.VideoInfo.Height, customDAR, ref cropValuesTemp, autoCrop && !keepInputResolution, mod, ref resizeEnabled, false, signalAR, true, avsSettings.AcceptableAspectError, xTargetDevice, Convert.ToDouble(iMediaFile.VideoInfo.FPS_N) / iMediaFile.VideoInfo.FPS_D, ref outputWidthWithoutUpsizing, ref outputHeightIncludingPaddingTemp, out paddingValues, out suggestedDar, _log); } resizeEnabled = !keepInputResolution; Resolution.GetResolution((int)iMediaFile.VideoInfo.Width, (int)iMediaFile.VideoInfo.Height, customDAR, ref cropValues, autoCrop && !keepInputResolution, mod, ref resizeEnabled, avsSettings.Upsize, signalAR, true, avsSettings.AcceptableAspectError, xTargetDevice, Convert.ToDouble(iMediaFile.VideoInfo.FPS_N) / iMediaFile.VideoInfo.FPS_D, ref outputWidthIncludingPadding, ref outputHeightIncludingPadding, out paddingValues, out suggestedDar, _log); keepInputResolution = !resizeEnabled; if ((keepInputResolution || signalAR) && suggestedDar.HasValue) { dar = suggestedDar; } // log calculated output resolution outputWidthCropped = outputWidthIncludingPadding - paddingValues.left - paddingValues.right; outputHeightCropped = outputHeightIncludingPadding - paddingValues.bottom - paddingValues.top; _log.LogValue("Input resolution", iMediaFile.VideoInfo.Width + "x" + iMediaFile.VideoInfo.Height); _log.LogValue("Desired maximum width", desiredOutputWidth); if (!avsSettings.Upsize && outputWidthIncludingPadding < desiredOutputWidth) { _log.LogEvent("Desired maximum width not reached. Enable upsizing in the AviSynth profile if you want to force it."); } if (avsSettings.Upsize && outputWidthIncludingPadding > outputWidthWithoutUpsizing) { _log.LogValue("Desired maximum width reached with upsizing. Target width without upsizing", outputWidthWithoutUpsizing); } if (cropValues.isCropped()) { _log.LogValue("Autocrop values", cropValues); _log.LogValue("Cropped output resolution", outputWidthCropped + "x" + outputHeightCropped); } else { _log.LogValue("Output resolution", outputWidthCropped + "x" + outputHeightCropped); } if (paddingValues.isCropped()) { _log.LogValue("Padded output resolution", outputWidthIncludingPadding + "x" + outputHeightIncludingPadding); } // generate the avs script based on the template string inputLine = "#input"; string deinterlaceLines = "#deinterlace"; string denoiseLines = "#denoise"; string cropLine = "#crop"; string resizeLine = "#resize"; inputLine = ScriptServer.GetInputLine(inputFile, indexFile, false, oPossibleSource, false, false, false, 0, avsSettings.DSS2); if (!inputLine.EndsWith(")")) { inputLine += ")"; } _log.LogValue("Automatic deinterlacing", autoDeint); if (autoDeint) { raiseEvent("Automatic deinterlacing... ***PLEASE WAIT***"); string d2vPath = indexFile; _sourceDetector = new SourceDetector(inputLine, d2vPath, false, mainForm.Settings.SourceDetectorSettings, new UpdateSourceDetectionStatus(analyseUpdate), new FinishedAnalysis(finishedAnalysis)); finished = false; _sourceDetector.analyse(); waitTillAnalyseFinished(); _sourceDetector.stop(); _sourceDetector = null; deinterlaceLines = filters[0].Script; if (interlaced) { _log.LogValue("Deinterlacing used", deinterlaceLines, ImageType.Warning); } else { _log.LogValue("Deinterlacing used", deinterlaceLines); } } raiseEvent("Finalizing preprocessing... ***PLEASE WAIT***"); inputLine = ScriptServer.GetInputLine(inputFile, indexFile, interlaced, oPossibleSource, avsSettings.ColourCorrect, avsSettings.MPEG2Deblock, false, 0, avsSettings.DSS2); if (!inputLine.EndsWith(")")) { inputLine += ")"; } if (!keepInputResolution && autoCrop) { cropLine = ScriptServer.GetCropLine(true, cropValues); } denoiseLines = ScriptServer.GetDenoiseLines(avsSettings.Denoise, (DenoiseFilterType)avsSettings.DenoiseMethod); if (!keepInputResolution) { resizeLine = ScriptServer.GetResizeLine(!signalAR || avsSettings.Mod16Method == mod16Method.resize || outputWidthIncludingPadding > 0 || (int)iMediaFile.VideoInfo.Width != outputWidthCropped, outputWidthCropped, outputHeightCropped, outputWidthIncludingPadding, outputHeightIncludingPadding, (ResizeFilterType)avsSettings.ResizeMethod, autoCrop, cropValues, (int)iMediaFile.VideoInfo.Width, (int)iMediaFile.VideoInfo.Height); } string newScript = ScriptServer.CreateScriptFromTemplate(avsSettings.Template, inputLine, cropLine, resizeLine, denoiseLines, deinterlaceLines); if (dar.HasValue) { newScript = string.Format("global MeGUI_darx = {0}\r\nglobal MeGUI_dary = {1}\r\n{2}", dar.Value.X, dar.Value.Y, newScript); } else { if (xTargetDevice != null && xTargetDevice.BluRay) { string strResolution = outputWidthIncludingPadding + "x" + outputHeightIncludingPadding; x264Settings _xs = (x264Settings)settings; if (strResolution.Equals("720x480")) { _xs.SampleAR = 4; _log.LogEvent("Set --sar to 10:11 as only 40:33 or 10:11 are supported with a resolution of " + strResolution + " as required for " + xTargetDevice.Name + "."); } else if (strResolution.Equals("720x576")) { _xs.SampleAR = 5; _log.LogEvent("Set --sar to 12:11 as only 16:11 or 12:11 are supported with a resolution of " + strResolution + " as required for " + xTargetDevice.Name + "."); } else if (strResolution.Equals("1280x720") || strResolution.Equals("1920x1080")) { _xs.SampleAR = 1; _log.LogEvent("Set --sar to 1:1 as only 1:1 is supported with a resolution of " + strResolution + " as required for " + xTargetDevice.Name + "."); } else if (strResolution.Equals("1440x1080")) { _xs.SampleAR = 2; _log.LogEvent("Set --sar to 4:3 as only 4:3 is supported with a resolution of " + strResolution + " as required for " + xTargetDevice.Name + "."); } } } _log.LogValue("Generated Avisynth script", newScript); string strOutputAVSFile; if (String.IsNullOrEmpty(indexFile)) { strOutputAVSFile = Path.ChangeExtension(Path.Combine(job.PostprocessingProperties.WorkingDirectory, Path.GetFileName(inputFile)), ".avs"); } else { strOutputAVSFile = Path.ChangeExtension(indexFile, ".avs"); } try { StreamWriter sw = new StreamWriter(strOutputAVSFile, false, System.Text.Encoding.Default); sw.Write(newScript); sw.Close(); } catch (IOException i) { _log.LogValue("Error saving AviSynth script", i, ImageType.Error); return(""); } return(strOutputAVSFile); }
private void runScript(int scriptType, int frameCount, string trimLine) { int numFrames = 0; try { using (AvsFile af = AvsFile.ParseScript(script)) { numFrames = (int)af.Info.FrameCount; } } catch (Exception e) { error = true; errorMessage = "The input clip for source detection could not be opened.\r\n" + e.Message; finishProcessing(); return; } if (frameCount > 0) { numFrames = frameCount; } const int selectLength = 5; // This used to be variable, but I found no need to. It's useful to keep this name, though int selectEvery = (int)((100.0 * (double)selectLength) / ((double)settings.AnalysePercent)); int minAnalyseSections = settings.MinimumAnalyseSections; if (scriptType == 1) // Field order script. For this, we separatefields, so we have twice as many frames anyway // It saves time, and costs nothing to halve the minimum sections to analyse for this example { minAnalyseSections = minAnalyseSections / 2 + 1; // We add one to prevent getting 0; } // Check if we need to modify the SelectRangeEvery parameters: if (((double)selectLength * (double)numFrames / (double)selectEvery) < (int)minAnalyseSections * 5) { if (numFrames >= minAnalyseSections * 5) // If there are actually enough frames { selectEvery = (int)((((double)numFrames) / ((double)minAnalyseSections * 5.0)) * (double)selectLength); } else { // if there aren't enough frames, analyse everything -- that's got to be good enough selectEvery = selectLength; } } string logFileName = getLogFileName((scriptType == 1) ? "ff_interlace-" + Guid.NewGuid().ToString("N") + ".log" : "interlace-" + Guid.NewGuid().ToString("N") + ".log"); if (File.Exists(logFileName)) { File.Delete(logFileName); } string resultScript = ScriptServer.getScript(scriptType, script, trimLine, logFileName, selectEvery, selectLength); // stax MethodInvoker mi = delegate { try { Process(resultScript); // stax if (error) { return; } if (!continueWorking) { return; } if (scriptType == 0) // detection { analyse(logFileName, selectEvery, selectLength, numFrames); } else if (scriptType == 1) // field order { analyseFF(logFileName); } } finally { try { File.Delete(logFileName); } catch (Exception) { } } }; Thread t = new Thread(new ThreadStart(mi)); t.Priority = settings.Priority; t.Start(); }
/// <summary> /// reloads the video, sets up the proper window size and enables / disables the GUI buttons depending on the /// preview type set /// </summary> /// <returns>true if the video could be opened, false if not</returns> public bool reloadVideo() { videoPreview.UnloadVideo(); lock (this) { if (file != null) { file.Dispose(); } } try { if (bInlineAVS) { file = AvsFile.ParseScript(strFileName, true); } else { file = mainForm.MediaFileFactory.Open(strFileName); if (file == null) { throw new Exception("The video stream cannot be opened"); } } reader = file.GetVideoReader(); } catch (AviSynthException e) { MessageBox.Show("AviSynth script error:\r\n" + e.Message, "AviSynth error", MessageBoxButtons.OK, MessageBoxIcon.Error); return(false); } catch (ArgumentException e) { MessageBox.Show("AviSynth script error:\r\n" + e.Message, "AviSynth error", MessageBoxButtons.OK, MessageBoxIcon.Error); return(false); } catch (Exception e) { MessageBox.Show("The file " + strFileName + " cannot be opened.\r\n" + "Error message: " + e.Message, "Cannot open video input", MessageBoxButtons.OK, MessageBoxIcon.Stop); return(false); } if (reader != null && reader.FrameCount > 0) { this.positionSlider.Minimum = 0; this.positionSlider.Maximum = reader.FrameCount - 1; this.positionSlider.TickFrequency = this.positionSlider.Maximum / 20; SetMaxZoomWidth(); doInitialAdjustment(); int iStart = 0; if (positionSlider.Value >= 0 && positionSlider.Value <= reader.FrameCount) { iStart = positionSlider.Value; } else { iStart = reader.FrameCount / 2; } videoPreview.LoadVideo(reader, file.VideoInfo.FPS, iStart); setTitleText(); return(true); } return(false); }
private void Process(string scriptBlock, string strLogFile, int scriptType) { try { using (AvsFile af = AvsFile.ParseScript(scriptBlock, false)) { int i = 0; int frameCount = (int)af.VideoInfo.FrameCount; bool running = true; int iNextFrameCheck = frameCount; if (settings.AnalysePercent == 0) { if (settings.MinimumAnalyseSections > settings.MinimumUsefulSections) { iNextFrameCheck = settings.MinimumAnalyseSections * sectionLength; } else { iNextFrameCheck = settings.MinimumUsefulSections * sectionLength; } } new Thread(new ThreadStart(delegate { while (running && continueWorking) { if (analyseUpdate != null) { if (i >= iNextFrameCheck) { analyseUpdate(iNextFrameCheck - 1, iNextFrameCheck); } else { analyseUpdate(i, iNextFrameCheck); } } MeGUI.core.util.Util.Wait(1000); } })).Start(); if (!continueWorking) { isStopped = true; return; } IntPtr zero = new IntPtr(0); if (settings.AnalysePercent == 0) { for (i = 0; i < frameCount && continueWorking; i++) { if (i > iNextFrameCheck) { if (scriptType == 0) { if (!GetSectionCounts(strLogFile)) { // error detected running = false; break; } if (oSourceInfo.numInt + oSourceInfo.numProg + oSourceInfo.numTC > settings.MinimumUsefulSections || CheckDecimate(oSourceInfo.sectionsWithMovingFrames)) { running = false; break; } // no sufficient information yet // try to estimate when the next check should happen iNextFrameCheck = (int)((2 - (decimal)(oSourceInfo.numInt + oSourceInfo.numProg + oSourceInfo.numTC) / settings.MinimumUsefulSections) * i); if (iNextFrameCheck - i < 100) { iNextFrameCheck += 100; } } else { if (!GetSectionCountsFF(strLogFile)) { // error detected running = false; break; } if (oSourceInfo.sectionCountBFF + oSourceInfo.sectionCountTFF > settings.MinimumUsefulSections) { running = false; break; } // no sufficient information yet // try to estimate when the next check should happen iNextFrameCheck = (int)((2 - (decimal)(oSourceInfo.sectionCountBFF + oSourceInfo.sectionCountTFF) / settings.MinimumUsefulSections) * i); if (iNextFrameCheck - i < 100) { iNextFrameCheck += 100; } } } _mre.WaitOne(); af.Clip.ReadFrame(zero, 0, i); } } else { for (i = 0; i < frameCount && continueWorking; i++) { _mre.WaitOne(); af.Clip.ReadFrame(zero, 0, i); } } if (!continueWorking) { isStopped = true; return; } if (running) { if (scriptType == 0) { GetSectionCounts(strLogFile); } else { GetSectionCountsFF(strLogFile); } running = false; } } } catch (Exception ex) { error = true; errorMessage = "Error opening analysis script:\r\n" + ex.Message; FinishProcessing(); } }