Example #1
0
        /// <summary>
        /// creates the AVS Script file
        /// if the file can be properly opened, auto-cropping is performed, then depending on the AR settings
        /// the proper resolution for automatic resizing, taking into account the derived cropping values
        /// is calculated, and finally the avisynth script is written and its name returned
        /// </summary>
        /// <param name="path">dgindex script</param>
        /// <param name="aspectRatio">aspect ratio selection to be used</param>
        /// <param name="customDAR">custom display aspect ratio for this source</param>
        /// <param name="desiredOutputWidth">desired horizontal resolution of the output</param>
        /// <param name="settings">the codec settings (used only for x264)</param>
        /// <param name="sarX">pixel aspect ratio X</param>
        /// <param name="sarY">pixel aspect ratio Y</param>
        /// <param name="height">the final height of the video</param>
        /// <param name="autoCrop">whether or not autoCrop is used for the input</param>
        /// <returns>the name of the AviSynth script created, empty if there was an error</returns>
        private string CreateAVSFile(string indexFile, string inputFile, Dar?AR, int desiredOutputWidth,
                                     LogItem _log, AviSynthSettings avsSettings, bool autoDeint, VideoCodecSettings settings,
                                     bool autoCrop, bool keepInputResolution, bool useChaptersMarks)
        {
            Dar?            dar = null;
            Dar             customDAR;
            IMediaFile      iMediaFile = null;
            IVideoReader    reader;
            PossibleSources oPossibleSource;
            x264Device      xTargetDevice = null;
            CropValues      cropValues    = new CropValues();

            int outputWidthIncludingPadding  = 0;
            int outputHeightIncludingPadding = 0;
            int outputWidthCropped           = 0;
            int outputHeightCropped          = 0;

            // encode anamorph either when it is selected in the avs profile or the input resolution should not be touched
            bool signalAR = (avsSettings.Mod16Method != mod16Method.none) || keepInputResolution;

            // make sure the proper anamorphic encode is selected if the input resolution should not be touched
            if (keepInputResolution && avsSettings.Mod16Method != mod16Method.nonMod16)
            {
                avsSettings.Mod16Method = mod16Method.nonMod16;
            }

            // open index file to retrieve information
            if (job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.DGI)
            {
                iMediaFile      = new dgiFile(indexFile);
                oPossibleSource = PossibleSources.dgi;
            }
            else if (job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.D2V)
            {
                iMediaFile      = new d2vFile(indexFile);
                oPossibleSource = PossibleSources.d2v;
            }
            else if (job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.DGM)
            {
                iMediaFile      = new dgmFile(indexFile);
                oPossibleSource = PossibleSources.dgm;
            }
            else if (job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.FFMS)
            {
                iMediaFile      = new ffmsFile(inputFile, indexFile);
                oPossibleSource = PossibleSources.ffindex;
            }
            else if (job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.LSMASH)
            {
                iMediaFile      = new lsmashFile(inputFile, indexFile);
                oPossibleSource = PossibleSources.lsmash;
            }
            else if (job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.AVISOURCE)
            {
                string tempAvs = "AVISource(\"" + inputFile + "\", audio=false)" + VideoUtil.getAssumeFPS(0, inputFile);
                iMediaFile      = AvsFile.ParseScript(tempAvs, true);
                oPossibleSource = PossibleSources.avisource;
            }
            else
            {
                iMediaFile      = AvsFile.OpenScriptFile(inputFile, true);
                oPossibleSource = PossibleSources.avs;
            }
            reader = iMediaFile.GetVideoReader();

            // abort if the index file is invalid
            if (reader.FrameCount < 1)
            {
                _log.Error("There are " + reader.FrameCount + " frames in the index file. Aborting...");
                return("");
            }

            if (AR == null)
            {
                // AR needs to be detected automatically now
                _log.LogValue("Auto-detect aspect ratio", AR == null);
                customDAR = iMediaFile.VideoInfo.DAR;
                if (customDAR.AR <= 0)
                {
                    customDAR = Dar.ITU16x9PAL;
                    _log.Warn(string.Format("No aspect ratio found, defaulting to {0}.", customDAR));
                }
            }
            else
            {
                customDAR = AR.Value;
            }
            _log.LogValue("Aspect ratio", customDAR);

            // check x264 settings (target device, chapter file)
            if (settings != null && settings is x264Settings)
            {
                x264Settings xs = (x264Settings)settings;
                xTargetDevice = xs.TargetDevice;
                _log.LogValue("Target device", xTargetDevice.Name);
            }

            // get mod value for resizing
            int mod = Resolution.GetModValue(avsSettings.ModValue, avsSettings.Mod16Method, signalAR);

            // crop input as it may be required (autoCrop && !keepInputResolution or Blu-Ray)
            if (Autocrop.autocrop(out cropValues, reader, signalAR, avsSettings.Mod16Method, avsSettings.ModValue) == false)
            {
                _log.Error("Autocrop failed. Aborting...");
                return("");
            }

            int inputWidth      = (int)iMediaFile.VideoInfo.Width;
            int inputHeight     = (int)iMediaFile.VideoInfo.Height;
            int inputFPS_D      = (int)iMediaFile.VideoInfo.FPS_D;
            int inputFPS_N      = (int)iMediaFile.VideoInfo.FPS_N;
            int inputFrameCount = (int)iMediaFile.VideoInfo.FrameCount;

            // force destruction of AVS script
            iMediaFile.Dispose();

            Dar?suggestedDar = null;

            if (desiredOutputWidth == 0)
            {
                desiredOutputWidth = outputWidthIncludingPadding = inputWidth;
            }
            else if (!avsSettings.Upsize && desiredOutputWidth > inputWidth)
            {
                outputWidthIncludingPadding = inputWidth;
            }
            else
            {
                outputWidthIncludingPadding = desiredOutputWidth;
            }
            CropValues paddingValues;

            bool resizeEnabled;
            int  outputWidthWithoutUpsizing = outputWidthIncludingPadding;

            if (avsSettings.Upsize)
            {
                resizeEnabled = !keepInputResolution;
                CropValues cropValuesTemp = cropValues.Clone();
                int        outputHeightIncludingPaddingTemp = 0;
                Resolution.GetResolution(inputWidth, inputHeight, customDAR,
                                         ref cropValuesTemp, autoCrop && !keepInputResolution, mod, ref resizeEnabled, false, signalAR, true,
                                         avsSettings.AcceptableAspectError, xTargetDevice, Convert.ToDouble(inputFPS_N) / inputFPS_D,
                                         ref outputWidthWithoutUpsizing, ref outputHeightIncludingPaddingTemp, out paddingValues, out suggestedDar, _log);
            }

            resizeEnabled = !keepInputResolution;
            Resolution.GetResolution(inputWidth, inputHeight, customDAR,
                                     ref cropValues, autoCrop && !keepInputResolution, mod, ref resizeEnabled, avsSettings.Upsize, signalAR, true,
                                     avsSettings.AcceptableAspectError, xTargetDevice, Convert.ToDouble(inputFPS_N) / inputFPS_D,
                                     ref outputWidthIncludingPadding, ref outputHeightIncludingPadding, out paddingValues, out suggestedDar, _log);
            keepInputResolution = !resizeEnabled;

            if (signalAR && suggestedDar.HasValue)
            {
                dar = suggestedDar;
            }

            // log calculated output resolution
            outputWidthCropped  = outputWidthIncludingPadding - paddingValues.left - paddingValues.right;
            outputHeightCropped = outputHeightIncludingPadding - paddingValues.bottom - paddingValues.top;
            _log.LogValue("Input resolution", inputWidth + "x" + inputHeight);
            _log.LogValue("Desired maximum width", desiredOutputWidth);
            if (!avsSettings.Upsize && outputWidthIncludingPadding < desiredOutputWidth)
            {
                _log.LogEvent("Desired maximum width not reached. Enable upsizing in the AviSynth profile if you want to force it.");
            }
            if (avsSettings.Upsize && outputWidthIncludingPadding > outputWidthWithoutUpsizing)
            {
                _log.LogValue("Desired maximum width reached with upsizing. Target width without upsizing", outputWidthWithoutUpsizing);
            }
            if (cropValues.isCropped())
            {
                _log.LogValue("Autocrop values", cropValues);
                _log.LogValue("Cropped output resolution", outputWidthCropped + "x" + outputHeightCropped);
            }
            else
            {
                _log.LogValue("Output resolution", outputWidthCropped + "x" + outputHeightCropped);
            }
            if (paddingValues.isCropped())
            {
                _log.LogValue("Padded output resolution", outputWidthIncludingPadding + "x" + outputHeightIncludingPadding);
            }

            // generate the avs script based on the template
            string inputLine        = "#input";
            string deinterlaceLines = "#deinterlace";
            string denoiseLines     = "#denoise";
            string cropLine         = "#crop";
            string resizeLine       = "#resize";

            inputLine = ScriptServer.GetInputLine(
                inputFile, indexFile, false, oPossibleSource, false, false, false, 0,
                avsSettings.DSS2, NvDeinterlacerType.nvDeInterlacerNone, 0, 0, null);

            if (IsJobStopped())
            {
                return("");
            }

            _log.LogValue("Automatic deinterlacing", autoDeint);
            if (autoDeint)
            {
                su.Status = "Automatic deinterlacing...   ***PLEASE WAIT***";
                string d2vPath = indexFile;
                _sourceDetector = new SourceDetector(inputLine, d2vPath, avsSettings.PreferAnimeDeinterlace, inputFrameCount,
                                                     Thread.CurrentThread.Priority,
                                                     MainForm.Instance.Settings.SourceDetectorSettings,
                                                     new UpdateSourceDetectionStatus(AnalyseUpdate),
                                                     new FinishedAnalysis(FinishedAnalysis));
                finished = false;
                _sourceDetector.Analyse();
                WaitTillAnalyseFinished();
                _sourceDetector = null;
                if (filters != null)
                {
                    deinterlaceLines = filters[0].Script;
                    if (interlaced)
                    {
                        _log.LogValue("Deinterlacing used", deinterlaceLines, ImageType.Warning);
                    }
                    else
                    {
                        _log.LogValue("Deinterlacing used", deinterlaceLines);
                    }
                }
            }

            if (IsJobStopped())
            {
                return("");
            }

            su.Status = "Finalizing preprocessing...   ***PLEASE WAIT***";

            // get final input filter line
            inputLine = ScriptServer.GetInputLine(
                inputFile, indexFile, interlaced, oPossibleSource, avsSettings.ColourCorrect, avsSettings.MPEG2Deblock,
                false, 0, avsSettings.DSS2, NvDeinterlacerType.nvDeInterlacerNone, 0, 0, null);

            // get crop & resize lines
            if (!keepInputResolution)
            {
                if (autoCrop)
                {
                    cropLine = ScriptServer.GetCropLine(cropValues);
                }
                resizeLine = ScriptServer.GetResizeLine(!signalAR || avsSettings.Mod16Method == mod16Method.resize || outputWidthIncludingPadding > 0 || inputWidth != outputWidthCropped,
                                                        outputWidthCropped, outputHeightCropped, outputWidthIncludingPadding, outputHeightIncludingPadding, (ResizeFilterType)avsSettings.ResizeMethod,
                                                        autoCrop, cropValues, inputWidth, inputHeight);
            }

            // get denoise line
            denoiseLines = ScriptServer.GetDenoiseLines(avsSettings.Denoise, (DenoiseFilterType)avsSettings.DenoiseMethod);

            string newScript = ScriptServer.CreateScriptFromTemplate(avsSettings.Template, inputLine, cropLine, resizeLine, denoiseLines, deinterlaceLines);

            if (dar.HasValue)
            {
                newScript = string.Format("global MeGUI_darx = {0}\r\nglobal MeGUI_dary = {1}\r\n{2}", dar.Value.X, dar.Value.Y, newScript);
            }
            else
            {
                if (xTargetDevice != null && xTargetDevice.BluRay)
                {
                    string       strResolution = outputWidthIncludingPadding + "x" + outputHeightIncludingPadding;
                    x264Settings _xs           = (x264Settings)settings;

                    if (strResolution.Equals("720x480"))
                    {
                        _xs.SampleAR = 4;
                        _log.LogEvent("Set --sar to 10:11 as only 40:33 or 10:11 are supported with a resolution of " +
                                      strResolution + " as required for " + xTargetDevice.Name + ".");
                    }
                    else if (strResolution.Equals("720x576"))
                    {
                        _xs.SampleAR = 5;
                        _log.LogEvent("Set --sar to 12:11 as only 16:11 or 12:11 are supported with a resolution of "
                                      + strResolution + " as required for " + xTargetDevice.Name + ".");
                    }
                    else if (strResolution.Equals("1280x720") || strResolution.Equals("1920x1080"))
                    {
                        _xs.SampleAR = 1;
                        _log.LogEvent("Set --sar to 1:1 as only 1:1 is supported with a resolution of "
                                      + strResolution + " as required for " + xTargetDevice.Name + ".");
                    }
                    else if (strResolution.Equals("1440x1080"))
                    {
                        _xs.SampleAR = 2;
                        _log.LogEvent("Set --sar to 4:3 as only 4:3 is supported with a resolution of "
                                      + strResolution + " as required for " + xTargetDevice.Name + ".");
                    }
                }
            }

            _log.LogValue("Generated AviSynth script", newScript);
            string strOutputAVSFile;

            if (String.IsNullOrEmpty(indexFile))
            {
                strOutputAVSFile = Path.ChangeExtension(Path.Combine(job.PostprocessingProperties.WorkingDirectory, Path.GetFileName(inputFile)), ".avs");
            }
            else
            {
                strOutputAVSFile = Path.ChangeExtension(indexFile, ".avs");
            }

            try
            {
                StreamWriter sw = new StreamWriter(strOutputAVSFile, false, System.Text.Encoding.Default);
                sw.Write(newScript);
                sw.Close();
            }
            catch (Exception i)
            {
                _log.LogValue("Error saving AviSynth script", i, ImageType.Error);
                return("");
            }

            JobUtil.GetAllInputProperties(strOutputAVSFile, out ulong numberOfFrames, out double fps, out int fps_n, out int fps_d, out int hres, out int vres, out Dar d, out AviSynthColorspace colorspace);
            _log.LogEvent("resolution: " + hres + "x" + vres);
            _log.LogEvent("frame rate: " + fps_n + "/" + fps_d);
            _log.LogEvent("frames: " + numberOfFrames);
            TimeSpan oTime = TimeSpan.FromSeconds((double)numberOfFrames / fps);

            _log.LogEvent("length: " + string.Format("{0:00}:{1:00}:{2:00}.{3:000}",
                                                     (int)(oTime.TotalHours), oTime.Minutes, oTime.Seconds, oTime.Milliseconds));
            _log.LogValue("aspect ratio", d);
            _log.LogValue("color space", colorspace.ToString());

            if (IsJobStopped())
            {
                return("");
            }

            // create qpf file if necessary and possible
            if (job.PostprocessingProperties.ChapterInfo.HasChapters && useChaptersMarks && settings != null && settings is x264Settings)
            {
                fps = (double)fps_n / fps_d;
                string strChapterFile = Path.ChangeExtension(strOutputAVSFile, ".qpf");
                job.PostprocessingProperties.ChapterInfo.ChangeFps(fps);
                if (job.PostprocessingProperties.ChapterInfo.SaveQpfile(strChapterFile))
                {
                    job.PostprocessingProperties.FilesToDelete.Add(strChapterFile);
                    _log.LogValue("qpf file created", strChapterFile);
                    x264Settings xs = (x264Settings)settings;
                    xs.UseQPFile = true;
                    xs.QPFile    = strChapterFile;
                }
            }

            // check if a timestamp file has to be used
            if (!String.IsNullOrEmpty(job.PostprocessingProperties.TimeStampFile) && settings != null && settings is x264Settings)
            {
                x264Settings xs = (x264Settings)settings;
                xs.TCFile = job.PostprocessingProperties.TimeStampFile;
            }

            return(strOutputAVSFile);
        }
        /// <summary>
        /// creates the AVS Script file
        /// if the file can be properly opened, auto-cropping is performed, then depending on the AR settings
        /// the proper resolution for automatic resizing, taking into account the derived cropping values
        /// is calculated, and finally the avisynth script is written and its name returned
        /// </summary>
        /// <param name="path">dgindex script</param>
        /// <param name="aspectRatio">aspect ratio selection to be used</param>
        /// <param name="customDAR">custom display aspect ratio for this source</param>
        /// <param name="desiredOutputWidth">desired horizontal resolution of the output</param>
        /// <param name="settings">the codec settings (used only for x264)</param>
        /// <param name="sarX">pixel aspect ratio X</param>
        /// <param name="sarY">pixel aspect ratio Y</param>
        /// <param name="height">the final height of the video</param>
        /// <param name="signalAR">whether or not ar signalling is to be used for the output
        /// (depending on this parameter, resizing changes to match the source AR)</param>
        /// <param name="autoCrop">whether or not autoCrop is used for the input</param>
        /// <returns>the name of the AviSynth script created, empty if there was an error</returns>
        private string createAVSFile(string indexFile, string inputFile, Dar?AR, int desiredOutputWidth,
                                     bool signalAR, LogItem _log, AviSynthSettings avsSettings, bool autoDeint,
                                     VideoCodecSettings settings, out Dar?dar, bool autoCrop, bool keepInputResolution, bool useChaptersMarks)
        {
            dar = null;
            Dar             customDAR;
            IMediaFile      iMediaFile = null;
            IVideoReader    reader;
            PossibleSources oPossibleSource;
            x264Device      xTargetDevice = null;

            int outputWidthIncludingPadding  = 0;
            int outputHeightIncludingPadding = 0;
            int outputWidthCropped           = 0;
            int outputHeightCropped          = 0;

            CropValues cropValues        = new CropValues();
            bool       bAdjustResolution = false;
            bool       bCropped          = false;

            // open index file to retrieve information
            if (job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.DGI)
            {
                iMediaFile      = new dgiFile(indexFile);
                oPossibleSource = PossibleSources.dgi;
            }
            else if (job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.D2V)
            {
                iMediaFile      = new d2vFile(indexFile);
                oPossibleSource = PossibleSources.d2v;
            }
            else if (job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.DGA)
            {
                iMediaFile      = new dgaFile(indexFile);
                oPossibleSource = PossibleSources.dga;
            }
            else if (job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.FFMS)
            {
                iMediaFile      = new ffmsFile(inputFile, indexFile);
                oPossibleSource = PossibleSources.ffindex;
            }
            else if (job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.AVISOURCE)
            {
                string tempAvs = "AVISource(\"" + inputFile + "\", audio=false)" + VideoUtil.getAssumeFPS(0, inputFile);
                iMediaFile      = AvsFile.ParseScript(tempAvs);
                oPossibleSource = PossibleSources.directShow;
            }
            else
            {
                iMediaFile      = AvsFile.OpenScriptFile(inputFile);
                oPossibleSource = PossibleSources.avs;
            }
            reader = iMediaFile.GetVideoReader();

            // abort if the index file is invalid
            if (reader.FrameCount < 1)
            {
                _log.Error("There are 0 frames in the index file. Aborting...");
                return("");
            }

            if (AR == null)
            {
                // AR needs to be detected automatically now
                _log.LogValue("Auto-detect aspect ratio", AR == null);
                customDAR = iMediaFile.VideoInfo.DAR;
                if (customDAR.ar <= 0)
                {
                    customDAR = Dar.ITU16x9PAL;
                    _log.Warn(string.Format("No aspect ratio found, defaulting to {0}.", customDAR));
                }
            }
            else
            {
                customDAR = AR.Value;
            }
            _log.LogValue("Aspect ratio", customDAR);

            // check x264 settings (target device, chapter file)
            if (settings != null && settings is x264Settings)
            {
                x264Settings xs = (x264Settings)settings;
                xTargetDevice = xs.TargetDevice;

                // create qpf file if necessary
                if (!String.IsNullOrEmpty(job.PostprocessingProperties.ChapterFile) && useChaptersMarks)
                {
                    qpfile = job.PostprocessingProperties.ChapterFile;
                    if ((Path.GetExtension(qpfile).ToLower(System.Globalization.CultureInfo.InvariantCulture)) == ".txt")
                    {
                        qpfile = VideoUtil.convertChaptersTextFileTox264QPFile(job.PostprocessingProperties.ChapterFile, iMediaFile.VideoInfo.FPS);
                    }
                    if (File.Exists(qpfile))
                    {
                        xs.UseQPFile = true;
                        xs.QPFile    = qpfile;
                    }
                }
            }

            // if encoding for a specific device select the appropriate resolution setting
            if (xTargetDevice != null && xTargetDevice.Width > 0 && xTargetDevice.Height > 0)
            {
                if (keepInputResolution)
                {
                    // resolution should not be changed - use input resolution
                    outputWidthCropped  = (int)iMediaFile.VideoInfo.Width;
                    outputHeightCropped = (int)iMediaFile.VideoInfo.Height;
                }
                else
                {
                    // crop input video if selected
                    if (autoCrop)
                    {
                        if (Autocrop.autocrop(out cropValues, reader, signalAR, avsSettings.Mod16Method) == false)
                        {
                            _log.Error("Autocrop failed. Aborting...");
                            return("");
                        }
                        bCropped = true;
                    }

                    outputWidthCropped  = desiredOutputWidth;
                    outputHeightCropped = Resolution.suggestResolution(iMediaFile.VideoInfo.Height, iMediaFile.VideoInfo.Width,
                                                                       (double)customDAR.ar, cropValues, outputWidthCropped, signalAR,
                                                                       mainForm.Settings.AcceptableAspectErrorPercent, out dar);
                    dar = null;
                }

                if (xTargetDevice.Width < outputWidthCropped)
                {
                    // width must be lowered to be target conform
                    bAdjustResolution = true;
                    if (keepInputResolution)
                    {
                        keepInputResolution = false;
                        _log.LogEvent("Disabling \"Keep Input Resolution\" as " + xTargetDevice.Name + " does not support a resolution width of "
                                      + outputWidthCropped + ". The maximum value is " + xTargetDevice.Width + ".");
                    }
                }
                else if (xTargetDevice.Height < outputHeightCropped)
                {
                    // height must be lowered to be target conform
                    bAdjustResolution = true;
                    if (keepInputResolution)
                    {
                        keepInputResolution = false;
                        _log.LogEvent("Disabling \"Keep Input Resolution\" as " + xTargetDevice.Name + " does not support a resolution height of "
                                      + outputHeightCropped + ". The maximum value is " + xTargetDevice.Height + ".");
                    }
                }
                else if (xTargetDevice.BluRay)
                {
                    string strResolution = outputWidthCropped + "x" + outputHeightCropped;
                    if (!strResolution.Equals("1920x1080") &&
                        !strResolution.Equals("1440x1080") &&
                        !strResolution.Equals("1280x720") &&
                        !strResolution.Equals("720x576") &&
                        !strResolution.Equals("720x480"))
                    {
                        bAdjustResolution = true;
                        if (keepInputResolution)
                        {
                            keepInputResolution = false;
                            _log.LogEvent("Disabling \"Keep Input Resolution\" as " + xTargetDevice.Name + " does not support a resolution of "
                                          + outputWidthCropped + "x" + outputHeightCropped
                                          + ". Supported are 1920x1080, 1440x1080, 1280x720, 720x576 and 720x480.");
                        }
                    }
                    else
                    {
                        outputWidthIncludingPadding  = outputWidthCropped;
                        outputHeightIncludingPadding = outputHeightCropped;
                    }
                }

                if (bAdjustResolution)
                {
                    if (!autoCrop)
                    {
                        autoCrop = true;
                        _log.LogEvent("Enabling \"AutoCrop\"");
                    }
                }
            }
            else
            {
                outputWidthCropped = desiredOutputWidth;
            }

            if (!keepInputResolution && autoCrop && !bCropped)
            {
                // crop input video if required
                if (Autocrop.autocrop(out cropValues, reader, signalAR, avsSettings.Mod16Method) == false)
                {
                    _log.Error("Autocrop failed. Aborting...");
                    return("");
                }
                bCropped = true;
            }

            if (bAdjustResolution)
            {
                // adjust horizontal resolution as width or height are too large
                if (xTargetDevice.BluRay)
                {
                    if (outputWidthCropped >= 1920)
                    {
                        outputWidthCropped           = 1920;
                        outputHeightIncludingPadding = 1080;
                        _log.LogEvent("Force resolution of 1920x1080 as required for " + xTargetDevice.Name);
                    }
                    else if (outputWidthCropped >= 1280)
                    {
                        outputWidthCropped           = 1280;
                        outputHeightIncludingPadding = 720;
                        _log.LogEvent("Force resolution of 1280x720 as required for " + xTargetDevice.Name);
                    }
                    else
                    {
                        outputWidthCropped = 720;
                        Double dfps = Convert.ToDouble(iMediaFile.VideoInfo.FPS_N) / iMediaFile.VideoInfo.FPS_D;
                        if (dfps == 25)
                        {
                            outputHeightIncludingPadding = 576;
                            _log.LogEvent("Force resolution of 720x576 as required for " + xTargetDevice.Name);
                        }
                        else
                        {
                            outputHeightIncludingPadding = 480;
                            _log.LogEvent("Force resolution of 720x480 as required for " + xTargetDevice.Name);
                        }
                    }
                    outputWidthIncludingPadding = outputWidthCropped;
                }
                else if (outputWidthCropped > xTargetDevice.Width)
                {
                    outputWidthCropped = xTargetDevice.Width;
                    _log.LogEvent("Set resolution width to " + outputWidthCropped + " as required for " + xTargetDevice.Name);
                }

                // adjust cropped vertical resolution
                outputHeightCropped = Resolution.suggestResolution(iMediaFile.VideoInfo.Height, iMediaFile.VideoInfo.Width, (double)customDAR.ar,
                                                                   cropValues, outputWidthCropped, signalAR, mainForm.Settings.AcceptableAspectErrorPercent, out dar);
                while (outputHeightCropped > xTargetDevice.Height || (xTargetDevice.BluRay && outputHeightCropped > outputHeightIncludingPadding))
                {
                    outputWidthCropped -= 16;
                    outputHeightCropped = Resolution.suggestResolution(iMediaFile.VideoInfo.Height, iMediaFile.VideoInfo.Width, (double)customDAR.ar,
                                                                       cropValues, outputWidthCropped, signalAR, mainForm.Settings.AcceptableAspectErrorPercent, out dar);
                }
            }

            if (keepInputResolution)
            {
                outputWidthCropped  = outputWidthIncludingPadding = (int)iMediaFile.VideoInfo.Width;
                outputHeightCropped = outputHeightIncludingPadding = (int)iMediaFile.VideoInfo.Height;
                dar = customDAR;
            }
            else if (xTargetDevice == null || (xTargetDevice != null && !xTargetDevice.BluRay))
            {
                // Minimise upsizing
                int sourceHorizontalResolution = (int)iMediaFile.VideoInfo.Width - cropValues.right - cropValues.left;
                if (autoCrop)
                {
                    sourceHorizontalResolution = (int)iMediaFile.VideoInfo.Width;
                }

                if (outputWidthCropped > sourceHorizontalResolution)
                {
                    if (avsSettings.Mod16Method == mod16Method.resize)
                    {
                        while (outputWidthCropped > sourceHorizontalResolution + 16)
                        {
                            outputWidthCropped -= 16;
                        }
                    }
                    else
                    {
                        outputWidthCropped = sourceHorizontalResolution;
                    }
                }
            }

            // calculate height
            if (!keepInputResolution)
            {
                outputHeightCropped = Resolution.suggestResolution(iMediaFile.VideoInfo.Height, iMediaFile.VideoInfo.Width, (double)customDAR.ar,
                                                                   cropValues, outputWidthCropped, signalAR, mainForm.Settings.AcceptableAspectErrorPercent, out dar);
            }

            // set complete padding if required
            if (outputHeightIncludingPadding == 0 && outputWidthIncludingPadding > 0)
            {
                outputHeightIncludingPadding = outputHeightCropped;
            }
            if (outputWidthIncludingPadding == 0 && outputHeightIncludingPadding > 0)
            {
                outputWidthIncludingPadding = outputWidthCropped;
            }

            // write calculated output resolution into the log
            _log.LogValue("Input resolution", iMediaFile.VideoInfo.Width + "x" + iMediaFile.VideoInfo.Height);
            if (autoCrop && !keepInputResolution && cropValues.isCropped())
            {
                _log.LogValue("Autocrop values", cropValues);
                _log.LogValue("Cropped output resolution", outputWidthCropped + "x" + outputHeightCropped);
            }
            else
            {
                _log.LogValue("Output resolution", outputWidthCropped + "x" + outputHeightCropped);
            }
            if (outputWidthIncludingPadding > 0 && (outputWidthIncludingPadding != outputWidthCropped || outputHeightIncludingPadding != outputHeightCropped))
            {
                _log.LogValue("Padded output resolution", outputWidthIncludingPadding + "x" + outputHeightIncludingPadding);
            }

            if (outputWidthCropped <= 0 || outputHeightCropped <= 0)
            {
                _log.Error("Error in detection of output resolution");
                return("");
            }

            //Generate the avs script based on the template
            string inputLine        = "#input";
            string deinterlaceLines = "#deinterlace";
            string denoiseLines     = "#denoise";
            string cropLine         = "#crop";
            string resizeLine       = "#resize";

            inputLine = ScriptServer.GetInputLine(inputFile, indexFile, false, oPossibleSource, false, false, false, 0, avsSettings.DSS2);
            if (!inputLine.EndsWith(")"))
            {
                inputLine += ")";
            }

            _log.LogValue("Automatic deinterlacing", autoDeint);
            if (autoDeint)
            {
                raiseEvent("Automatic deinterlacing...   ***PLEASE WAIT***");
                string         d2vPath = indexFile;
                SourceDetector sd      = new SourceDetector(inputLine, d2vPath, false,
                                                            mainForm.Settings.SourceDetectorSettings,
                                                            new UpdateSourceDetectionStatus(analyseUpdate),
                                                            new FinishedAnalysis(finishedAnalysis));
                finished = false;
                sd.analyse();
                waitTillAnalyseFinished();
                sd.stop();
                deinterlaceLines = filters[0].Script;
                if (interlaced)
                {
                    _log.LogValue("Deinterlacing used", deinterlaceLines, ImageType.Warning);
                }
                else
                {
                    _log.LogValue("Deinterlacing used", deinterlaceLines);
                }
            }

            raiseEvent("Finalizing preprocessing...   ***PLEASE WAIT***");
            inputLine = ScriptServer.GetInputLine(inputFile, indexFile, interlaced, oPossibleSource, avsSettings.ColourCorrect, avsSettings.MPEG2Deblock, false, 0, avsSettings.DSS2);
            if (!inputLine.EndsWith(")"))
            {
                inputLine += ")";
            }

            if (!keepInputResolution && autoCrop)
            {
                cropLine = ScriptServer.GetCropLine(true, cropValues);
            }

            denoiseLines = ScriptServer.GetDenoiseLines(avsSettings.Denoise, (DenoiseFilterType)avsSettings.DenoiseMethod);

            if (!keepInputResolution)
            {
                resizeLine = ScriptServer.GetResizeLine(!signalAR || avsSettings.Mod16Method == mod16Method.resize || outputWidthIncludingPadding > 0 || (int)iMediaFile.VideoInfo.Width != outputWidthCropped,
                                                        outputWidthCropped, outputHeightCropped, outputWidthIncludingPadding, outputHeightIncludingPadding, (ResizeFilterType)avsSettings.ResizeMethod,
                                                        autoCrop, cropValues, (int)iMediaFile.VideoInfo.Width, (int)iMediaFile.VideoInfo.Height);
            }

            string newScript = ScriptServer.CreateScriptFromTemplate(avsSettings.Template, inputLine, cropLine, resizeLine, denoiseLines, deinterlaceLines);

            if (dar.HasValue)
            {
                newScript = string.Format("global MeGUI_darx = {0}\r\nglobal MeGUI_dary = {1}\r\n{2}", dar.Value.X, dar.Value.Y, newScript);
            }
            else
            {
                if (xTargetDevice != null && xTargetDevice.BluRay)
                {
                    string       strResolution = outputWidthIncludingPadding + "x" + outputHeightIncludingPadding;
                    x264Settings _xs           = (x264Settings)settings;

                    if (strResolution.Equals("720x480"))
                    {
                        _xs.SampleAR = 4;
                        _log.LogEvent("Set --sar to 10:11 as only 40:33 or 10:11 are supported with a resolution of " +
                                      strResolution + " as required for " + xTargetDevice.Name + ".");
                    }
                    else if (strResolution.Equals("720x576"))
                    {
                        _xs.SampleAR = 5;
                        _log.LogEvent("Set --sar to 12:11 as only 16:11 or 12:11 are supported with a resolution of "
                                      + strResolution + " as required for " + xTargetDevice.Name + ".");
                    }
                    else if (strResolution.Equals("1280x720") || strResolution.Equals("1920x1080"))
                    {
                        _xs.SampleAR = 1;
                        _log.LogEvent("Set --sar to 1:1 as only 1:1 is supported with a resolution of "
                                      + strResolution + " as required for " + xTargetDevice.Name + ".");
                    }
                    else if (strResolution.Equals("1440x1080"))
                    {
                        _xs.SampleAR = 2;
                        _log.LogEvent("Set --sar to 4:3 as only 4:3 is supported with a resolution of "
                                      + strResolution + " as required for " + xTargetDevice.Name + ".");
                    }
                }
            }

            _log.LogValue("Generated Avisynth script", newScript);
            string strOutputAVSFile;

            if (String.IsNullOrEmpty(indexFile))
            {
                strOutputAVSFile = Path.ChangeExtension(Path.Combine(job.PostprocessingProperties.WorkingDirectory, Path.GetFileName(inputFile)), ".avs");
            }
            else
            {
                strOutputAVSFile = Path.ChangeExtension(indexFile, ".avs");
            }

            try
            {
                StreamWriter sw = new StreamWriter(strOutputAVSFile, false, System.Text.Encoding.Default);
                sw.Write(newScript);
                sw.Close();
            }
            catch (IOException i)
            {
                _log.LogValue("Error saving AviSynth script", i, ImageType.Error);
                return("");
            }
            return(strOutputAVSFile);
        }
        /// <summary>
        /// creates the AVS Script file
        /// if the file can be properly opened, auto-cropping is performed, then depending on the AR settings
        /// the proper resolution for automatic resizing, taking into account the derived cropping values
        /// is calculated, and finally the avisynth script is written and its name returned
        /// </summary>
        /// <param name="path">dgindex script</param>
        /// <param name="aspectRatio">aspect ratio selection to be used</param>
        /// <param name="customDAR">custom display aspect ratio for this source</param>
        /// <param name="desiredOutputWidth">desired horizontal resolution of the output</param>
        /// <param name="settings">the codec settings (used only for x264)</param>
        /// <param name="sarX">pixel aspect ratio X</param>
        /// <param name="sarY">pixel aspect ratio Y</param>
        /// <param name="height">the final height of the video</param>
        /// <param name="signalAR">whether or not ar signalling is to be used for the output
        /// (depending on this parameter, resizing changes to match the source AR)</param>
        /// <param name="autoCrop">whether or not autoCrop is used for the input</param>
        /// <returns>the name of the AviSynth script created, empty if there was an error</returns>
        private string createAVSFile(string indexFile, string inputFile, Dar?AR, int desiredOutputWidth,
                                     bool signalAR, LogItem _log, AviSynthSettings avsSettings, bool autoDeint,
                                     VideoCodecSettings settings, out Dar?dar, bool autoCrop, bool keepInputResolution, bool useChaptersMarks)
        {
            dar = null;
            Dar             customDAR;
            IMediaFile      iMediaFile = null;
            IVideoReader    reader;
            PossibleSources oPossibleSource;
            x264Device      xTargetDevice = null;
            CropValues      cropValues    = new CropValues();

            int outputWidthIncludingPadding  = 0;
            int outputHeightIncludingPadding = 0;
            int outputWidthCropped           = 0;
            int outputHeightCropped          = 0;

            // open index file to retrieve information
            if (job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.DGI)
            {
                iMediaFile      = new dgiFile(indexFile);
                oPossibleSource = PossibleSources.dgi;
            }
            else if (job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.D2V)
            {
                iMediaFile      = new d2vFile(indexFile);
                oPossibleSource = PossibleSources.d2v;
            }
            else if (job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.DGA)
            {
                iMediaFile      = new dgaFile(indexFile);
                oPossibleSource = PossibleSources.dga;
            }
            else if (job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.FFMS)
            {
                iMediaFile      = new ffmsFile(inputFile, indexFile);
                oPossibleSource = PossibleSources.ffindex;
            }
            else if (job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.LSMASH)
            {
                iMediaFile      = new lsmashFile(inputFile, indexFile);
                oPossibleSource = PossibleSources.lsmash;
            }
            else if (job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.AVISOURCE)
            {
                string tempAvs = "AVISource(\"" + inputFile + "\", audio=false)" + VideoUtil.getAssumeFPS(0, inputFile);
                iMediaFile      = AvsFile.ParseScript(tempAvs);
                oPossibleSource = PossibleSources.directShow;
            }
            else
            {
                iMediaFile      = AvsFile.OpenScriptFile(inputFile);
                oPossibleSource = PossibleSources.avs;
            }
            reader = iMediaFile.GetVideoReader();

            // abort if the index file is invalid
            if (reader.FrameCount < 1)
            {
                _log.Error("There are " + reader.FrameCount + " frames in the index file. Aborting...");
                return("");
            }

            if (AR == null)
            {
                // AR needs to be detected automatically now
                _log.LogValue("Auto-detect aspect ratio", AR == null);
                customDAR = iMediaFile.VideoInfo.DAR;
                if (customDAR.AR <= 0)
                {
                    customDAR = Dar.ITU16x9PAL;
                    _log.Warn(string.Format("No aspect ratio found, defaulting to {0}.", customDAR));
                }
            }
            else
            {
                customDAR = AR.Value;
            }
            _log.LogValue("Aspect ratio", customDAR);

            // check x264 settings (target device, chapter file)
            if (settings != null && settings is x264Settings)
            {
                x264Settings xs = (x264Settings)settings;
                xTargetDevice = xs.TargetDevice;
                _log.LogValue("Target device", xTargetDevice.Name);

                // create qpf file if necessary
                if (!String.IsNullOrEmpty(job.PostprocessingProperties.ChapterFile) && useChaptersMarks)
                {
                    qpfile = job.PostprocessingProperties.ChapterFile;
                    if ((Path.GetExtension(qpfile).ToLowerInvariant()) == ".txt")
                    {
                        qpfile = VideoUtil.convertChaptersTextFileTox264QPFile(job.PostprocessingProperties.ChapterFile, iMediaFile.VideoInfo.FPS);
                    }
                    if (File.Exists(qpfile))
                    {
                        xs.UseQPFile = true;
                        xs.QPFile    = qpfile;
                    }
                }
            }

            // get mod value for resizing
            int mod = Resolution.GetModValue(avsSettings.ModValue, avsSettings.Mod16Method, signalAR);

            // crop input as it may be required (autoCrop && !keepInputResolution or Blu-Ray)
            if (Autocrop.autocrop(out cropValues, reader, signalAR, avsSettings.Mod16Method, avsSettings.ModValue) == false)
            {
                _log.Error("Autocrop failed. Aborting...");
                return("");
            }

            Dar?suggestedDar = null;

            if (desiredOutputWidth == 0)
            {
                desiredOutputWidth = outputWidthIncludingPadding = (int)iMediaFile.VideoInfo.Width;
            }
            else if (!avsSettings.Upsize && desiredOutputWidth > (int)iMediaFile.VideoInfo.Width)
            {
                outputWidthIncludingPadding = (int)iMediaFile.VideoInfo.Width;
            }
            else
            {
                outputWidthIncludingPadding = desiredOutputWidth;
            }
            CropValues paddingValues;

            bool resizeEnabled;
            int  outputWidthWithoutUpsizing = outputWidthIncludingPadding;

            if (avsSettings.Upsize)
            {
                resizeEnabled = !keepInputResolution;
                CropValues cropValuesTemp = cropValues.Clone();
                int        outputHeightIncludingPaddingTemp = 0;
                Resolution.GetResolution((int)iMediaFile.VideoInfo.Width, (int)iMediaFile.VideoInfo.Height, customDAR,
                                         ref cropValuesTemp, autoCrop && !keepInputResolution, mod, ref resizeEnabled, false, signalAR, true,
                                         avsSettings.AcceptableAspectError, xTargetDevice, Convert.ToDouble(iMediaFile.VideoInfo.FPS_N) / iMediaFile.VideoInfo.FPS_D,
                                         ref outputWidthWithoutUpsizing, ref outputHeightIncludingPaddingTemp, out paddingValues, out suggestedDar, _log);
            }

            resizeEnabled = !keepInputResolution;
            Resolution.GetResolution((int)iMediaFile.VideoInfo.Width, (int)iMediaFile.VideoInfo.Height, customDAR,
                                     ref cropValues, autoCrop && !keepInputResolution, mod, ref resizeEnabled, avsSettings.Upsize, signalAR, true,
                                     avsSettings.AcceptableAspectError, xTargetDevice, Convert.ToDouble(iMediaFile.VideoInfo.FPS_N) / iMediaFile.VideoInfo.FPS_D,
                                     ref outputWidthIncludingPadding, ref outputHeightIncludingPadding, out paddingValues, out suggestedDar, _log);
            keepInputResolution = !resizeEnabled;
            if ((keepInputResolution || signalAR) && suggestedDar.HasValue)
            {
                dar = suggestedDar;
            }

            // log calculated output resolution
            outputWidthCropped  = outputWidthIncludingPadding - paddingValues.left - paddingValues.right;
            outputHeightCropped = outputHeightIncludingPadding - paddingValues.bottom - paddingValues.top;
            _log.LogValue("Input resolution", iMediaFile.VideoInfo.Width + "x" + iMediaFile.VideoInfo.Height);
            _log.LogValue("Desired maximum width", desiredOutputWidth);
            if (!avsSettings.Upsize && outputWidthIncludingPadding < desiredOutputWidth)
            {
                _log.LogEvent("Desired maximum width not reached. Enable upsizing in the AviSynth profile if you want to force it.");
            }
            if (avsSettings.Upsize && outputWidthIncludingPadding > outputWidthWithoutUpsizing)
            {
                _log.LogValue("Desired maximum width reached with upsizing. Target width without upsizing", outputWidthWithoutUpsizing);
            }
            if (cropValues.isCropped())
            {
                _log.LogValue("Autocrop values", cropValues);
                _log.LogValue("Cropped output resolution", outputWidthCropped + "x" + outputHeightCropped);
            }
            else
            {
                _log.LogValue("Output resolution", outputWidthCropped + "x" + outputHeightCropped);
            }
            if (paddingValues.isCropped())
            {
                _log.LogValue("Padded output resolution", outputWidthIncludingPadding + "x" + outputHeightIncludingPadding);
            }

            // generate the avs script based on the template
            string inputLine        = "#input";
            string deinterlaceLines = "#deinterlace";
            string denoiseLines     = "#denoise";
            string cropLine         = "#crop";
            string resizeLine       = "#resize";

            inputLine = ScriptServer.GetInputLine(inputFile, indexFile, false, oPossibleSource, false, false, false, 0, avsSettings.DSS2);
            if (!inputLine.EndsWith(")"))
            {
                inputLine += ")";
            }

            _log.LogValue("Automatic deinterlacing", autoDeint);
            if (autoDeint)
            {
                raiseEvent("Automatic deinterlacing...   ***PLEASE WAIT***");
                string d2vPath = indexFile;
                _sourceDetector = new SourceDetector(inputLine, d2vPath, false,
                                                     mainForm.Settings.SourceDetectorSettings,
                                                     new UpdateSourceDetectionStatus(analyseUpdate),
                                                     new FinishedAnalysis(finishedAnalysis));
                finished = false;
                _sourceDetector.analyse();
                waitTillAnalyseFinished();
                _sourceDetector.stop();
                _sourceDetector  = null;
                deinterlaceLines = filters[0].Script;
                if (interlaced)
                {
                    _log.LogValue("Deinterlacing used", deinterlaceLines, ImageType.Warning);
                }
                else
                {
                    _log.LogValue("Deinterlacing used", deinterlaceLines);
                }
            }

            raiseEvent("Finalizing preprocessing...   ***PLEASE WAIT***");
            inputLine = ScriptServer.GetInputLine(inputFile, indexFile, interlaced, oPossibleSource, avsSettings.ColourCorrect, avsSettings.MPEG2Deblock, false, 0, avsSettings.DSS2);
            if (!inputLine.EndsWith(")"))
            {
                inputLine += ")";
            }

            if (!keepInputResolution && autoCrop)
            {
                cropLine = ScriptServer.GetCropLine(true, cropValues);
            }

            denoiseLines = ScriptServer.GetDenoiseLines(avsSettings.Denoise, (DenoiseFilterType)avsSettings.DenoiseMethod);

            if (!keepInputResolution)
            {
                resizeLine = ScriptServer.GetResizeLine(!signalAR || avsSettings.Mod16Method == mod16Method.resize || outputWidthIncludingPadding > 0 || (int)iMediaFile.VideoInfo.Width != outputWidthCropped,
                                                        outputWidthCropped, outputHeightCropped, outputWidthIncludingPadding, outputHeightIncludingPadding, (ResizeFilterType)avsSettings.ResizeMethod,
                                                        autoCrop, cropValues, (int)iMediaFile.VideoInfo.Width, (int)iMediaFile.VideoInfo.Height);
            }

            string newScript = ScriptServer.CreateScriptFromTemplate(avsSettings.Template, inputLine, cropLine, resizeLine, denoiseLines, deinterlaceLines);

            if (dar.HasValue)
            {
                newScript = string.Format("global MeGUI_darx = {0}\r\nglobal MeGUI_dary = {1}\r\n{2}", dar.Value.X, dar.Value.Y, newScript);
            }
            else
            {
                if (xTargetDevice != null && xTargetDevice.BluRay)
                {
                    string       strResolution = outputWidthIncludingPadding + "x" + outputHeightIncludingPadding;
                    x264Settings _xs           = (x264Settings)settings;

                    if (strResolution.Equals("720x480"))
                    {
                        _xs.SampleAR = 4;
                        _log.LogEvent("Set --sar to 10:11 as only 40:33 or 10:11 are supported with a resolution of " +
                                      strResolution + " as required for " + xTargetDevice.Name + ".");
                    }
                    else if (strResolution.Equals("720x576"))
                    {
                        _xs.SampleAR = 5;
                        _log.LogEvent("Set --sar to 12:11 as only 16:11 or 12:11 are supported with a resolution of "
                                      + strResolution + " as required for " + xTargetDevice.Name + ".");
                    }
                    else if (strResolution.Equals("1280x720") || strResolution.Equals("1920x1080"))
                    {
                        _xs.SampleAR = 1;
                        _log.LogEvent("Set --sar to 1:1 as only 1:1 is supported with a resolution of "
                                      + strResolution + " as required for " + xTargetDevice.Name + ".");
                    }
                    else if (strResolution.Equals("1440x1080"))
                    {
                        _xs.SampleAR = 2;
                        _log.LogEvent("Set --sar to 4:3 as only 4:3 is supported with a resolution of "
                                      + strResolution + " as required for " + xTargetDevice.Name + ".");
                    }
                }
            }

            _log.LogValue("Generated Avisynth script", newScript);
            string strOutputAVSFile;

            if (String.IsNullOrEmpty(indexFile))
            {
                strOutputAVSFile = Path.ChangeExtension(Path.Combine(job.PostprocessingProperties.WorkingDirectory, Path.GetFileName(inputFile)), ".avs");
            }
            else
            {
                strOutputAVSFile = Path.ChangeExtension(indexFile, ".avs");
            }

            try
            {
                StreamWriter sw = new StreamWriter(strOutputAVSFile, false, System.Text.Encoding.Default);
                sw.Write(newScript);
                sw.Close();
            }
            catch (IOException i)
            {
                _log.LogValue("Error saving AviSynth script", i, ImageType.Error);
                return("");
            }
            return(strOutputAVSFile);
        }