コード例 #1
0
ファイル: ProfileManager.cs プロジェクト: paulyc/megui
 public static void FixFileNames(Profile profile, Dictionary <string, string> substitutionTable)
 {
     if (profile is GenericProfile <VideoCodecSettings> )
     {
         GenericProfile <VideoCodecSettings> vProf = profile as GenericProfile <VideoCodecSettings>;
         if (vProf.Settings is x264Settings)
         {
             x264Settings xSettings = vProf.Settings as x264Settings;
             if (xSettings.QuantizerMatrixType == 2) // CQM
             {
                 if (substitutionTable.ContainsKey(xSettings.QuantizerMatrix))
                 {
                     xSettings.QuantizerMatrix = substitutionTable[xSettings.QuantizerMatrix];
                 }
             }
         }
         if (vProf.Settings is xvidSettings)
         {
             xvidSettings xSettings = vProf.Settings as xvidSettings;
             if (xSettings.QuantType == 2) // CQM
             {
                 if (substitutionTable.ContainsKey(xSettings.CustomQuantizerMatrix))
                 {
                     xSettings.CustomQuantizerMatrix = substitutionTable[xSettings.CustomQuantizerMatrix];
                 }
             }
         }
     }
 }
コード例 #2
0
 /// <summary>
 /// Check functions to verify elements of the level
 /// </summary>
 /// <param name="level"></param>
 /// <param name="settings"></param>
 /// <returns>true if the settings are compliant with the level</returns>
 private bool checkP4x4Enabled(int level, x264Settings settings)
 {
     //if (level != 15 && (level > 7 || (level == 7 && settings.NbBframes != 0)))
     //    return false;
     //else
     return(true);
 }
コード例 #3
0
        private double pictureBufferSize(x264Settings settings, double bytesInUncompressedFrame)
        {
            double decodedPictureBufferSizeTestValue = 0;

            if (settings != null)
            {
                decodedPictureBufferSizeTestValue = bytesInUncompressedFrame * Math.Min(16, settings.NbRefFrames);
            }
            return(decodedPictureBufferSizeTestValue);
        }
コード例 #4
0
ファイル: AVCLevels.cs プロジェクト: huannguyenfit/MeGUI
        public void Run(MainForm info)
        {
            if (info.Video.VideoInput.Equals(""))
            {
                MessageBox.Show("You first need to load an AviSynth script", "No video configured",
                                MessageBoxButtons.OK, MessageBoxIcon.Warning);
                return;
            }

            bool succ;
            int  hRes, vRes;

            MeGUI.core.util.Dar d;
            ulong  nbFrames;
            double framerate;

            AVCLevels.Levels?compliantLevel      = null;
            x264Settings     currentX264Settings = (x264Settings)MainForm.Instance.Profiles.GetCurrentSettings("x264");

            if (JobUtil.GetAllInputProperties(out nbFrames, out framerate, out hRes, out vRes, out d, info.Video.VideoInput))
            {
                AVCLevels al = new AVCLevels();
                succ = al.validateAVCLevel(hRes, vRes, framerate, currentX264Settings, out compliantLevel);
            }
            else
            {
                succ = false;
            }

            if (succ)
            {
                MessageBox.Show("This file matches the criteria for the level chosen", "Video validated",
                                MessageBoxButtons.OK, MessageBoxIcon.Information);
            }
            else
            {
                if (compliantLevel == null)
                {
                    MessageBox.Show("Unable to open video", "Test failed", MessageBoxButtons.OK, MessageBoxIcon.Error);
                }
                else
                {
                    AVCLevels al      = new AVCLevels();
                    string    message = "This video source cannot be encoded to comply with the chosen level.\n"
                                        + "You need at least Level " + AVCLevels.GetLevelText((AVCLevels.Levels)compliantLevel) + " for this source. Do you want\n"
                                        + "to increase the level automatically now?";
                    DialogResult dr = MessageBox.Show(message, "Test failed", MessageBoxButtons.YesNo,
                                                      MessageBoxIcon.Question);
                    if (dr == DialogResult.Yes)
                    {
                        currentX264Settings.AVCLevel = (AVCLevels.Levels)compliantLevel;
                    }
                }
            }
        }
コード例 #5
0
ファイル: AVCLevels.cs プロジェクト: huannguyenfit/MeGUI
 /// <summary>
 /// Check functions to verify elements of the level
 /// </summary>
 /// <param name="level"></param>
 /// <param name="settings"></param>
 /// <returns>true if the settings are compliant with the level</returns>
 private bool checkP4x4Enabled(int level, x264Settings settings)
 {
     if (level != 15 && (level > 7 || (level == 7 && settings.NbBframes != 0)))
     {
         return(false);
     }
     else
     {
         return(true);
     }
 }
コード例 #6
0
ファイル: AVCLevels.cs プロジェクト: huannguyenfit/MeGUI
 private bool checkP4x4(int level, x264Settings settings)
 {
     if (!checkP4x4Enabled(level, settings))
     {
         if (settings.P4x4mv)
         {
             return(false);
         }
     }
     return(true);
 }
コード例 #7
0
ファイル: AVCLevels.cs プロジェクト: huannguyenfit/MeGUI
 private bool checkMaxDPB(int level, x264Settings settings, double bytesInUncompressedFrame)
 {
     if (pictureBufferSize(settings, bytesInUncompressedFrame) > this.getMaxDPB(level))
     {
         return(false);
     }
     else
     {
         return(true);
     }
 }
コード例 #8
0
ファイル: AVCLevels.cs プロジェクト: huannguyenfit/MeGUI
        /// <summary>
        /// Verifies a group of x264Settings against an AVC Level
        /// </summary>
        /// <param name="settings">the x264Settings to test</param>
        /// <param name="level">the level</param>
        /// <param name="bytesInUncompressedFrame">Number of bytes in an uncompressed frame</param>
        /// <returns>   0 if the settings are compliant with the level
        ///             1 if (level > 3 || level = 3 AND Bframes > 0)
        ///             2 if maxDPB violated</returns>
        public int Verifyx264Settings(x264Settings settings, AVCLevels.Levels avcLevel, double bytesInUncompressedFrame)
        {
            if (!this.checkP4x4(avcLevel, settings))
            {
                return(1);
            }

            if (!this.checkMaxDPB(avcLevel, settings, bytesInUncompressedFrame))
            {
                return(2);
            }

            return(0);
        }
コード例 #9
0
ファイル: x264Encoder.cs プロジェクト: huannguyenfit/MeGUI
        private static string generateX264CLICommandlineStart(x264Settings xs)
        {
            StringBuilder sb = new StringBuilder();

            switch (xs.EncodingMode)
            {
            case 0:     // ABR
                sb.Append("--bitrate " + xs.BitrateQuantizer + " ");
                break;

            case 1:     // CQ
                if (xs.Lossless)
                {
                    sb.Append("--qp 0 ");
                }
                else
                {
                    sb.Append("--qp " + xs.QuantizerCRF.ToString(new CultureInfo("en-us")) + " ");
                }
                break;

            case 2:     // 2 pass first pass
                sb.Append("--pass 1 --bitrate " + xs.BitrateQuantizer + " --stats " + "\"" + xs.Logfile + "\" ");
                break;

            case 3:     // 2 pass second pass
            case 4:     // automated twopass
                sb.Append("--pass 2 --bitrate " + xs.BitrateQuantizer + " --stats " + "\"" + xs.Logfile + "\" ");
                break;

            case 5:     // 3 pass first pass
                sb.Append("--pass 1 --bitrate " + xs.BitrateQuantizer + " --stats " + "\"" + xs.Logfile + "\" ");
                break;

            case 6:     // 3 pass 2nd pass
                sb.Append("--pass 3 --bitrate " + xs.BitrateQuantizer + " --stats " + "\"" + xs.Logfile + "\" ");
                break;

            case 7:     // 3 pass 3rd pass
            case 8:     // automated threepass, show third pass options
                sb.Append("--pass 3 --bitrate " + xs.BitrateQuantizer + " --stats " + "\"" + xs.Logfile + "\" ");
                break;

            case 9:     // constant quality
                sb.Append("--crf " + xs.QuantizerCRF.ToString(new CultureInfo("en-us")) + " ");
                break;
            } // now add the rest of the x264 encoder options
            return(sb.ToString());
        }
コード例 #10
0
ファイル: x264Encoder.cs プロジェクト: huannguyenfit/MeGUI
        private static string generateX264CLICommandlineEnd(x264Settings xs, string input, string output)
        {
            StringBuilder sb = new StringBuilder();

            //add the rest of the mencoder commandline regarding the output
            if (xs.EncodingMode == 2 || xs.EncodingMode == 5)
            {
                sb.Append("--output NUL ");
            }
            else
            {
                sb.Append("--output " + "\"" + output + "\" ");
            }
            sb.Append("\"" + input + "\" ");
            return(sb.ToString());
        }
コード例 #11
0
ファイル: JobUtil.cs プロジェクト: huannguyenfit/MeGUI
        /// <summary>
        /// validates a source against a given AVC level taking into account the rest of the configuration
        /// </summary>
        /// <param name="source">the source to be validated</param>
        /// <param name="level">the level that this source should correspond to</param>
        /// <param name="bframeType">type of b-frames used. 0 = none, 1 = b-frames without pyramid,
        /// 2 = b-frames with pyramid order</param>
        /// <param name="nbReferences">the number of reference frames used</param>
        /// <param name="compliantLevel">the first avc level that can be used to encode this source</param>
        /// <returns>whether or not the current level is okay, if false and compliantLevel is -1,
        /// the source could not be read</returns>
        public bool validateAVCLevel(string source, x264Settings settings, out int compliantLevel)
        {
            int    hRes, vRes;
            Dar    d;
            ulong  nbFrames;
            double framerate;

            compliantLevel = -1;
            if (GetAllInputProperties(out nbFrames, out framerate, out hRes, out vRes, out d, source))
            {
                return(this.al.validateAVCLevel(hRes, vRes, framerate, settings, out compliantLevel));
            }
            else
            {
                return(false);
            }
        }
コード例 #12
0
ファイル: AVCLevels.cs プロジェクト: huannguyenfit/MeGUI
        private double pictureBufferSize(x264Settings settings, double bytesInUncompressedFrame)
        {
            double decodedPictureBufferSizeTestValue = 0;

            if (settings.BFramePyramid)
            {
                decodedPictureBufferSizeTestValue = bytesInUncompressedFrame * Math.Min(16, settings.NbRefFrames + 2);
            }
            else
            if (settings.NbBframes > 0)
            {
                decodedPictureBufferSizeTestValue = bytesInUncompressedFrame * Math.Min(16, settings.NbRefFrames + 1);
            }
            else
            {
                decodedPictureBufferSizeTestValue = bytesInUncompressedFrame * Math.Min(16, settings.NbRefFrames);
            }
            return(decodedPictureBufferSizeTestValue);
        }
コード例 #13
0
ファイル: AVCLevels.cs プロジェクト: huannguyenfit/MeGUI
 public void Run(MainForm info)
 {
     if (info.Video.VideoInput.Equals(""))
     {
         MessageBox.Show("You first need to load an AviSynth script", "No video configured",
                         MessageBoxButtons.OK, MessageBoxIcon.Warning);
     }
     else
     {
         int          compliantLevel      = 15;
         x264Settings currentX264Settings = (x264Settings)MainForm.Instance.Profiles.GetCurrentSettings("x264");
         bool         succ = info.JobUtil.validateAVCLevel(info.Video.VideoInput, currentX264Settings, out compliantLevel);
         if (succ)
         {
             MessageBox.Show("This file matches the criteria for the level chosen", "Video validated",
                             MessageBoxButtons.OK);
         }
         else
         {
             if (compliantLevel == -1)
             {
                 MessageBox.Show("Unable to open video", "Test failed", MessageBoxButtons.OK);
             }
             else
             {
                 AVCLevels al            = new AVCLevels();
                 string[]  levels        = al.getLevels();
                 string    levelRequired = levels[compliantLevel];
                 string    message       = "This video source cannot be encoded to comply with the chosen level.\n"
                                           + "You need at least level " + levelRequired + " for this source. Do you want\n"
                                           + "to increase the level automatically now?";
                 DialogResult dr = MessageBox.Show(message, "Test failed", MessageBoxButtons.YesNo,
                                                   MessageBoxIcon.Question);
                 if (dr == DialogResult.Yes)
                 {
                     currentX264Settings.Level = compliantLevel;
                 }
             }
         }
     }
 }
コード例 #14
0
ファイル: AVCLevels.cs プロジェクト: huannguyenfit/MeGUI
        /// <summary>
        /// validates a source against a given AVC level taking into account the source properties and the x264 settings
        /// <param name="bytesPerFrame">bytesize of a single frame</param>
        /// <param name="FS">frame area in pixels</param>
        /// <param name="MBPS">macroblocks per second</param>
        /// <param name="settings">the codec config to test</param>
        /// <param name="compliantLevel">the first avc level that can be used to encode this source</param>
        /// <returns>whether or not the current level is okay, if false and compliantLevel is -1,
        /// the source could not be read</returns>
        public bool validateAVCLevel(int hRes, int vRes, double framerate, x264Settings settings, out int compliantLevel)
        {
            settings = (x264Settings)settings.Clone(); //Otherwise this sets it to the lowest compliant level anyway.
            const int unrestricted = 15;               // maybe this should be set as a global constant

            compliantLevel = unrestricted;
            if (settings.Level == unrestricted) // 15 = unrestricted
            {
                return(true);
            }

            int    FrameSize            = (int)maxFS(hRes, vRes);
            int    MBPS                 = maxBPS(hRes, vRes, framerate);
            int    hBlocks              = macroblocks(hRes);
            int    vBlocks              = macroblocks(vRes);
            double bufferSize           = pictureBufferSize(settings, bytesPerFrame(hRes, vRes));
            int    allowableBPS         = this.getMaxMBPS(settings.Level);
            int    allowableFS          = this.getMaxFS(settings.Level);
            double dimensionRestriction = Math.Ceiling(Math.Sqrt((double)(allowableFS) * 8));
            double allowableDPB         = this.getMaxDPB(settings.Level);

            if (allowableBPS >= MBPS && allowableFS >= FrameSize && allowableDPB >= bufferSize &&
                dimensionRestriction >= hBlocks && dimensionRestriction >= vBlocks)
            {
                return(true);
            }
            else
            {
                while (settings.Level < unrestricted && (allowableBPS < MBPS || allowableFS < FrameSize ||
                                                         allowableDPB < bufferSize || dimensionRestriction < hBlocks || dimensionRestriction < vBlocks))
                {
                    settings.Level       = settings.Level + 1;
                    allowableBPS         = this.getMaxMBPS(settings.Level);
                    allowableFS          = this.getMaxFS(settings.Level);
                    dimensionRestriction = Math.Ceiling(Math.Sqrt((double)(allowableFS) * 8));
                    allowableDPB         = this.getMaxDPB(settings.Level);
                }
                compliantLevel = settings.Level;
                return(false);
            }
        }
コード例 #15
0
ファイル: AVCLevels.cs プロジェクト: pphh77/MeGui
        /// <summary>
        /// validates a source against a given AVC level taking into account the source properties and the x264 settings
        /// </summary>
        /// <param name="bytesPerFrame">bytesize of a single frame</param>
        /// <param name="FS">frame area in pixels</param>
        /// <param name="MBPS">macroblocks per second</param>
        /// <param name="settings">the codec config to test</param>
        /// <param name="compliantLevel">the first avc level that can be used to encode this source</param>
        /// <returns>whether or not the current level is okay, if false and compliantLevel is -1,
        /// the source could not be read</returns>
        public bool validateAVCLevel(int hRes, int vRes, double framerate, x264Settings settings, out AVCLevels.Levels?compliantLevel)
        {
            settings       = (x264Settings)settings.Clone(); // otherwise this sets it to the lowest compliant level anyway.
            compliantLevel = Levels.L_UNRESTRICTED;
            if (settings.AVCLevel == Levels.L_UNRESTRICTED)
            {
                return(true);
            }

            int    FrameSize            = (int)maxFS(hRes, vRes);
            int    MBPS                 = maxBPS(hRes, vRes, framerate);
            int    hBlocks              = macroblocks(hRes);
            int    vBlocks              = macroblocks(vRes);
            double bufferSize           = pictureBufferSize(settings, bytesPerFrame(hRes, vRes));
            int    allowableBPS         = this.getMaxMBPS(settings.AVCLevel);
            int    allowableFS          = this.getMaxFS(settings.AVCLevel);
            double dimensionRestriction = Math.Ceiling(Math.Sqrt((double)(allowableFS) * 8));
            double allowableDPB         = this.getMaxDPB(settings.AVCLevel) * 3 / 8 * 1024;

            if (allowableBPS >= MBPS && allowableFS >= FrameSize && allowableDPB >= bufferSize &&
                dimensionRestriction >= hBlocks && dimensionRestriction >= vBlocks)
            {
                return(true);
            }
            else
            {
                while (settings.AVCLevel != Levels.L_UNRESTRICTED && (allowableBPS < MBPS || allowableFS < FrameSize ||
                                                                      allowableDPB < bufferSize || dimensionRestriction < hBlocks || dimensionRestriction < vBlocks))
                {
                    settings.AVCLevel    = settings.AVCLevel + 1;
                    allowableBPS         = this.getMaxMBPS(settings.AVCLevel);
                    allowableFS          = this.getMaxFS(settings.AVCLevel);
                    dimensionRestriction = Math.Ceiling(Math.Sqrt((double)(allowableFS) * 8));
                    allowableDPB         = this.getMaxDPB(settings.AVCLevel) * 3 / 8 * 1024;
                }
                compliantLevel = settings.AVCLevel;
                return(false);
            }
        }
コード例 #16
0
ファイル: AVCLevels.cs プロジェクト: huannguyenfit/MeGUI
        /// <summary>
        /// Verifies a group of x264Settings against an AVC Level
        /// </summary>
        /// <param name="settings">the x264Settings to test</param>
        /// <param name="level">the level</param>
        /// <param name="bytesInUncompressedFrame">Number of bytes in an uncompressed frame</param>
        /// <returns>   0 if the settings are compliant with the level
        ///             1 if (level > 3 || level = 3 AND Bframes > 0)
        ///             2 if maxDPB violated
        ///             3 if vbv_maxrate violated
        ///             4 if vbv_bufsize violated</returns>
        public int Verifyx264Settings(x264Settings settings, int level, double bytesInUncompressedFrame)
        {
            if (!this.checkP4x4(level, settings))
            {
                return(1);
            }

            if (!this.checkMaxDPB(level, settings, bytesInUncompressedFrame))
            {
                return(2);
            }

            if (settings.VBVMaxBitrate > this.getMaxBR(level, settings.Profile == 2))
            {
                return(3);
            }

            if (settings.VBVBufferSize > this.getMaxCBP(level, settings.Profile == 2))
            {
                return(4);
            }

            return(0);
        }
コード例 #17
0
        /// <summary>
        /// creates the AVS Script file
        /// if the file can be properly opened, auto-cropping is performed, then depending on the AR settings
        /// the proper resolution for automatic resizing, taking into account the derived cropping values
        /// is calculated, and finally the avisynth script is written and its name returned
        /// </summary>
        /// <param name="path">dgindex script</param>
        /// <param name="aspectRatio">aspect ratio selection to be used</param>
        /// <param name="customDAR">custom display aspect ratio for this source</param>
        /// <param name="desiredOutputWidth">desired horizontal resolution of the output</param>
        /// <param name="settings">the codec settings (used only for x264)</param>
        /// <param name="sarX">pixel aspect ratio X</param>
        /// <param name="sarY">pixel aspect ratio Y</param>
        /// <param name="height">the final height of the video</param>
        /// <param name="signalAR">whether or not ar signalling is to be used for the output
        /// (depending on this parameter, resizing changes to match the source AR)</param>
        /// <param name="autoCrop">whether or not autoCrop is used for the input</param>
        /// <returns>the name of the AviSynth script created, empty if there was an error</returns>
        private string createAVSFile(string indexFile, string inputFile, Dar?AR, int desiredOutputWidth,
                                     bool signalAR, LogItem _log, AviSynthSettings avsSettings, bool autoDeint,
                                     VideoCodecSettings settings, out Dar?dar, bool autoCrop, bool keepInputResolution, bool useChaptersMarks)
        {
            dar = null;
            Dar             customDAR;
            IMediaFile      iMediaFile = null;
            IVideoReader    reader;
            PossibleSources oPossibleSource;
            x264Device      xTargetDevice = null;
            CropValues      cropValues    = new CropValues();

            int outputWidthIncludingPadding  = 0;
            int outputHeightIncludingPadding = 0;
            int outputWidthCropped           = 0;
            int outputHeightCropped          = 0;

            // open index file to retrieve information
            if (job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.DGI)
            {
                iMediaFile      = new dgiFile(indexFile);
                oPossibleSource = PossibleSources.dgi;
            }
            else if (job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.D2V)
            {
                iMediaFile      = new d2vFile(indexFile);
                oPossibleSource = PossibleSources.d2v;
            }
            else if (job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.DGA)
            {
                iMediaFile      = new dgaFile(indexFile);
                oPossibleSource = PossibleSources.dga;
            }
            else if (job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.FFMS)
            {
                iMediaFile      = new ffmsFile(inputFile, indexFile);
                oPossibleSource = PossibleSources.ffindex;
            }
            else if (job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.LSMASH)
            {
                iMediaFile      = new lsmashFile(inputFile, indexFile);
                oPossibleSource = PossibleSources.lsmash;
            }
            else if (job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.AVISOURCE)
            {
                string tempAvs = "AVISource(\"" + inputFile + "\", audio=false)" + VideoUtil.getAssumeFPS(0, inputFile);
                iMediaFile      = AvsFile.ParseScript(tempAvs);
                oPossibleSource = PossibleSources.directShow;
            }
            else
            {
                iMediaFile      = AvsFile.OpenScriptFile(inputFile);
                oPossibleSource = PossibleSources.avs;
            }
            reader = iMediaFile.GetVideoReader();

            // abort if the index file is invalid
            if (reader.FrameCount < 1)
            {
                _log.Error("There are " + reader.FrameCount + " frames in the index file. Aborting...");
                return("");
            }

            if (AR == null)
            {
                // AR needs to be detected automatically now
                _log.LogValue("Auto-detect aspect ratio", AR == null);
                customDAR = iMediaFile.VideoInfo.DAR;
                if (customDAR.AR <= 0)
                {
                    customDAR = Dar.ITU16x9PAL;
                    _log.Warn(string.Format("No aspect ratio found, defaulting to {0}.", customDAR));
                }
            }
            else
            {
                customDAR = AR.Value;
            }
            _log.LogValue("Aspect ratio", customDAR);

            // check x264 settings (target device, chapter file)
            if (settings != null && settings is x264Settings)
            {
                x264Settings xs = (x264Settings)settings;
                xTargetDevice = xs.TargetDevice;
                _log.LogValue("Target device", xTargetDevice.Name);

                // create qpf file if necessary
                if (!String.IsNullOrEmpty(job.PostprocessingProperties.ChapterFile) && useChaptersMarks)
                {
                    qpfile = job.PostprocessingProperties.ChapterFile;
                    if ((Path.GetExtension(qpfile).ToLowerInvariant()) == ".txt")
                    {
                        qpfile = VideoUtil.convertChaptersTextFileTox264QPFile(job.PostprocessingProperties.ChapterFile, iMediaFile.VideoInfo.FPS);
                    }
                    if (File.Exists(qpfile))
                    {
                        xs.UseQPFile = true;
                        xs.QPFile    = qpfile;
                    }
                }
            }

            // get mod value for resizing
            int mod = Resolution.GetModValue(avsSettings.ModValue, avsSettings.Mod16Method, signalAR);

            // crop input as it may be required (autoCrop && !keepInputResolution or Blu-Ray)
            if (Autocrop.autocrop(out cropValues, reader, signalAR, avsSettings.Mod16Method, avsSettings.ModValue) == false)
            {
                _log.Error("Autocrop failed. Aborting...");
                return("");
            }

            Dar?suggestedDar = null;

            if (desiredOutputWidth == 0)
            {
                desiredOutputWidth = outputWidthIncludingPadding = (int)iMediaFile.VideoInfo.Width;
            }
            else if (!avsSettings.Upsize && desiredOutputWidth > (int)iMediaFile.VideoInfo.Width)
            {
                outputWidthIncludingPadding = (int)iMediaFile.VideoInfo.Width;
            }
            else
            {
                outputWidthIncludingPadding = desiredOutputWidth;
            }
            CropValues paddingValues;

            bool resizeEnabled;
            int  outputWidthWithoutUpsizing = outputWidthIncludingPadding;

            if (avsSettings.Upsize)
            {
                resizeEnabled = !keepInputResolution;
                CropValues cropValuesTemp = cropValues.Clone();
                int        outputHeightIncludingPaddingTemp = 0;
                Resolution.GetResolution((int)iMediaFile.VideoInfo.Width, (int)iMediaFile.VideoInfo.Height, customDAR,
                                         ref cropValuesTemp, autoCrop && !keepInputResolution, mod, ref resizeEnabled, false, signalAR, true,
                                         avsSettings.AcceptableAspectError, xTargetDevice, Convert.ToDouble(iMediaFile.VideoInfo.FPS_N) / iMediaFile.VideoInfo.FPS_D,
                                         ref outputWidthWithoutUpsizing, ref outputHeightIncludingPaddingTemp, out paddingValues, out suggestedDar, _log);
            }

            resizeEnabled = !keepInputResolution;
            Resolution.GetResolution((int)iMediaFile.VideoInfo.Width, (int)iMediaFile.VideoInfo.Height, customDAR,
                                     ref cropValues, autoCrop && !keepInputResolution, mod, ref resizeEnabled, avsSettings.Upsize, signalAR, true,
                                     avsSettings.AcceptableAspectError, xTargetDevice, Convert.ToDouble(iMediaFile.VideoInfo.FPS_N) / iMediaFile.VideoInfo.FPS_D,
                                     ref outputWidthIncludingPadding, ref outputHeightIncludingPadding, out paddingValues, out suggestedDar, _log);
            keepInputResolution = !resizeEnabled;
            if ((keepInputResolution || signalAR) && suggestedDar.HasValue)
            {
                dar = suggestedDar;
            }

            // log calculated output resolution
            outputWidthCropped  = outputWidthIncludingPadding - paddingValues.left - paddingValues.right;
            outputHeightCropped = outputHeightIncludingPadding - paddingValues.bottom - paddingValues.top;
            _log.LogValue("Input resolution", iMediaFile.VideoInfo.Width + "x" + iMediaFile.VideoInfo.Height);
            _log.LogValue("Desired maximum width", desiredOutputWidth);
            if (!avsSettings.Upsize && outputWidthIncludingPadding < desiredOutputWidth)
            {
                _log.LogEvent("Desired maximum width not reached. Enable upsizing in the AviSynth profile if you want to force it.");
            }
            if (avsSettings.Upsize && outputWidthIncludingPadding > outputWidthWithoutUpsizing)
            {
                _log.LogValue("Desired maximum width reached with upsizing. Target width without upsizing", outputWidthWithoutUpsizing);
            }
            if (cropValues.isCropped())
            {
                _log.LogValue("Autocrop values", cropValues);
                _log.LogValue("Cropped output resolution", outputWidthCropped + "x" + outputHeightCropped);
            }
            else
            {
                _log.LogValue("Output resolution", outputWidthCropped + "x" + outputHeightCropped);
            }
            if (paddingValues.isCropped())
            {
                _log.LogValue("Padded output resolution", outputWidthIncludingPadding + "x" + outputHeightIncludingPadding);
            }

            // generate the avs script based on the template
            string inputLine        = "#input";
            string deinterlaceLines = "#deinterlace";
            string denoiseLines     = "#denoise";
            string cropLine         = "#crop";
            string resizeLine       = "#resize";

            inputLine = ScriptServer.GetInputLine(inputFile, indexFile, false, oPossibleSource, false, false, false, 0, avsSettings.DSS2);
            if (!inputLine.EndsWith(")"))
            {
                inputLine += ")";
            }

            _log.LogValue("Automatic deinterlacing", autoDeint);
            if (autoDeint)
            {
                raiseEvent("Automatic deinterlacing...   ***PLEASE WAIT***");
                string d2vPath = indexFile;
                _sourceDetector = new SourceDetector(inputLine, d2vPath, false,
                                                     mainForm.Settings.SourceDetectorSettings,
                                                     new UpdateSourceDetectionStatus(analyseUpdate),
                                                     new FinishedAnalysis(finishedAnalysis));
                finished = false;
                _sourceDetector.analyse();
                waitTillAnalyseFinished();
                _sourceDetector.stop();
                _sourceDetector  = null;
                deinterlaceLines = filters[0].Script;
                if (interlaced)
                {
                    _log.LogValue("Deinterlacing used", deinterlaceLines, ImageType.Warning);
                }
                else
                {
                    _log.LogValue("Deinterlacing used", deinterlaceLines);
                }
            }

            raiseEvent("Finalizing preprocessing...   ***PLEASE WAIT***");
            inputLine = ScriptServer.GetInputLine(inputFile, indexFile, interlaced, oPossibleSource, avsSettings.ColourCorrect, avsSettings.MPEG2Deblock, false, 0, avsSettings.DSS2);
            if (!inputLine.EndsWith(")"))
            {
                inputLine += ")";
            }

            if (!keepInputResolution && autoCrop)
            {
                cropLine = ScriptServer.GetCropLine(true, cropValues);
            }

            denoiseLines = ScriptServer.GetDenoiseLines(avsSettings.Denoise, (DenoiseFilterType)avsSettings.DenoiseMethod);

            if (!keepInputResolution)
            {
                resizeLine = ScriptServer.GetResizeLine(!signalAR || avsSettings.Mod16Method == mod16Method.resize || outputWidthIncludingPadding > 0 || (int)iMediaFile.VideoInfo.Width != outputWidthCropped,
                                                        outputWidthCropped, outputHeightCropped, outputWidthIncludingPadding, outputHeightIncludingPadding, (ResizeFilterType)avsSettings.ResizeMethod,
                                                        autoCrop, cropValues, (int)iMediaFile.VideoInfo.Width, (int)iMediaFile.VideoInfo.Height);
            }

            string newScript = ScriptServer.CreateScriptFromTemplate(avsSettings.Template, inputLine, cropLine, resizeLine, denoiseLines, deinterlaceLines);

            if (dar.HasValue)
            {
                newScript = string.Format("global MeGUI_darx = {0}\r\nglobal MeGUI_dary = {1}\r\n{2}", dar.Value.X, dar.Value.Y, newScript);
            }
            else
            {
                if (xTargetDevice != null && xTargetDevice.BluRay)
                {
                    string       strResolution = outputWidthIncludingPadding + "x" + outputHeightIncludingPadding;
                    x264Settings _xs           = (x264Settings)settings;

                    if (strResolution.Equals("720x480"))
                    {
                        _xs.SampleAR = 4;
                        _log.LogEvent("Set --sar to 10:11 as only 40:33 or 10:11 are supported with a resolution of " +
                                      strResolution + " as required for " + xTargetDevice.Name + ".");
                    }
                    else if (strResolution.Equals("720x576"))
                    {
                        _xs.SampleAR = 5;
                        _log.LogEvent("Set --sar to 12:11 as only 16:11 or 12:11 are supported with a resolution of "
                                      + strResolution + " as required for " + xTargetDevice.Name + ".");
                    }
                    else if (strResolution.Equals("1280x720") || strResolution.Equals("1920x1080"))
                    {
                        _xs.SampleAR = 1;
                        _log.LogEvent("Set --sar to 1:1 as only 1:1 is supported with a resolution of "
                                      + strResolution + " as required for " + xTargetDevice.Name + ".");
                    }
                    else if (strResolution.Equals("1440x1080"))
                    {
                        _xs.SampleAR = 2;
                        _log.LogEvent("Set --sar to 4:3 as only 4:3 is supported with a resolution of "
                                      + strResolution + " as required for " + xTargetDevice.Name + ".");
                    }
                }
            }

            _log.LogValue("Generated Avisynth script", newScript);
            string strOutputAVSFile;

            if (String.IsNullOrEmpty(indexFile))
            {
                strOutputAVSFile = Path.ChangeExtension(Path.Combine(job.PostprocessingProperties.WorkingDirectory, Path.GetFileName(inputFile)), ".avs");
            }
            else
            {
                strOutputAVSFile = Path.ChangeExtension(indexFile, ".avs");
            }

            try
            {
                StreamWriter sw = new StreamWriter(strOutputAVSFile, false, System.Text.Encoding.Default);
                sw.Write(newScript);
                sw.Close();
            }
            catch (IOException i)
            {
                _log.LogValue("Error saving AviSynth script", i, ImageType.Error);
                return("");
            }
            return(strOutputAVSFile);
        }
コード例 #18
0
/*        private void getAudioStreams(Dictionary<int, string> audioFiles, OneClickWindow.PartialAudioStream[] partialAudioStream, out List<AudioJob> encodableAudioStreams, out List<MuxStream> muxOnlyAudioStreams)
 *      {
 *          muxOnlyAudioStreams = new List<MuxStream>();
 *          encodableAudioStreams = new List<AudioJob>();
 *          int counter = 0;
 *          foreach (OneClickWindow.PartialAudioStream propertiesStream in job.PostprocessingProperties.AudioStreams)
 *          {
 *              counter++; // The track number starts at 1, so we increment right here. This also ensures it will always be incremented
 *
 *              bool error = false;
 *              string input = null, output = null, language = null;
 *              AudioCodecSettings settings = null;
 *              // Input
 *              if (string.IsNullOrEmpty(propertiesStream.input))
 *                  continue; // Here we have an unconfigured stream. Let's just go on to the next one
 *
 *              if (propertiesStream.useExternalInput)
 *                  input = propertiesStream.input;
 *              else if (audioFiles.ContainsKey(propertiesStream.trackNumber))
 *                  input = audioFiles[propertiesStream.trackNumber];
 *              else
 *                  error = true;
 *
 *              // Settings
 *              if (propertiesStream.dontEncode)
 *                  settings = null;
 *              else if (propertiesStream.settings != null)
 *                  settings = propertiesStream.settings;
 *              else
 *                  error = true;
 *
 *              // Output
 *              if (propertiesStream.dontEncode)
 *                  output = input;
 *              else if (!error)
 *                  output = Path.Combine(
 *                      Path.GetDirectoryName(input),
 *                      Path.GetFileNameWithoutExtension(input) + "_" +
 *                      propertiesStream.trackNumber + ".file");
 *
 *              // Language
 *              if (!string.IsNullOrEmpty(propertiesStream.language))
 *                  language = propertiesStream.language;
 *              else
 *                  language = "";
 *
 *              if (error)
 *              {
 *                  logBuilder.AppendFormat("Trouble with audio track {0}. Skipping track...{1}", counter, Environment.NewLine);
 *                  output = null;
 *                  input = null;
 *                  input = null;
 *              }
 *              else
 *              {
 *                  if (propertiesStream.dontEncode)
 *                  {
 *                      MuxStream newStream = new MuxStream();
 *                      newStream.path = input;
 *                      newStream.name = "";
 *                      newStream.language = language;
 *                      muxOnlyAudioStreams.Add(newStream);
 *                  }
 *                  else
 *                  {
 *                      AudioJob encodeStream = new AudioJob();
 *                      encodeStream.Input = input;
 *                      encodeStream.Output = output;
 *                      encodeStream.Settings = settings;
 *                      encodableAudioStreams.Add(encodeStream);
 *                  }
 *              }
 *          }
 *      }*/

        /// <summary>
        /// opens a dgindex script
        /// if the file can be properly opened, auto-cropping is performed, then depending on the AR settings
        /// the proper resolution for automatic resizing, taking into account the derived cropping values
        /// is calculated, and finally the avisynth script is written and its name returned
        /// </summary>
        /// <param name="path">dgindex script</param>
        /// <param name="aspectRatio">aspect ratio selection to be used</param>
        /// <param name="customDAR">custom display aspect ratio for this source</param>
        /// <param name="horizontalResolution">desired horizontal resolution of the output</param>
        /// <param name="logBuilder">stringbuilder where to append log messages</param>
        /// <param name="settings">the codec settings (used only for x264)</param>
        /// <param name="sarX">pixel aspect ratio X</param>
        /// <param name="sarY">pixel aspect ratio Y</param>
        /// <param name="height">the final height of the video</param>
        /// <param name="signalAR">whether or not ar signalling is to be used for the output
        /// (depending on this parameter, resizing changes to match the source AR)</param>
        /// <returns>the name of the AviSynth script created, empty of there was an error</returns>
        private string openVideo(string path, Dar?AR, int horizontalResolution,
                                 bool signalAR, StringBuilder logBuilder, AviSynthSettings avsSettings, bool autoDeint,
                                 VideoCodecSettings settings, out Dar?dar)
        {
            dar = null;
            IMediaFile   d2v    = new d2vFile(path);
            IVideoReader reader = d2v.GetVideoReader();

            if (reader.FrameCount < 1)
            {
                logBuilder.Append("DGDecode reported 0 frames in this file.\r\nThis is a fatal error.\r\n\r\nPlease recreate the DGIndex project");
                return("");
            }

            //Autocrop
            CropValues final = Autocrop.autocrop(reader);

            if (signalAR)
            {
                if (avsSettings.Mod16Method == mod16Method.overcrop)
                {
                    ScriptServer.overcrop(ref final);
                }
                else if (avsSettings.Mod16Method == mod16Method.mod4Horizontal)
                {
                    ScriptServer.cropMod4Horizontal(ref final);
                }
            }

            bool error = (final.left == -1);

            if (!error)
            {
                logBuilder.Append("Autocropping successful. Using the following crop values: left: " + final.left +
                                  ", top: " + final.top + ", right: " + final.right + ", bottom: " + final.bottom + ".\r\n");
            }
            else
            {
                logBuilder.Append("Autocropping did not find 3 frames that have matching crop values\r\n"
                                  + "Autocrop failed, aborting now");
                return("");
            }

            decimal customDAR;

            //Check if AR needs to be autodetected now
            if (AR == null) // it does
            {
                logBuilder.Append("Aspect Ratio set to auto-detect later, detecting now. ");
                customDAR = d2v.Info.DAR.ar;
                if (customDAR > 0)
                {
                    logBuilder.AppendFormat("Found aspect ratio of {0}.{1}", customDAR, Environment.NewLine);
                }
                else
                {
                    customDAR = Dar.ITU16x9PAL.ar;
                    logBuilder.AppendFormat("No aspect ratio found, defaulting to {0}.{1}", customDAR, Environment.NewLine);
                }
            }
            else
            {
                customDAR = AR.Value.ar;
            }

            //Suggest a resolution (taken from AvisynthWindow.suggestResolution_CheckedChanged)
            int scriptVerticalResolution = Resolution.suggestResolution(d2v.Info.Height, d2v.Info.Width, (double)customDAR,
                                                                        final, horizontalResolution, signalAR, mainForm.Settings.AcceptableAspectErrorPercent, out dar);

            if (settings != null && settings is x264Settings) // verify that the video corresponds to the chosen avc level, if not, change the resolution until it does fit
            {
                x264Settings xs = (x264Settings)settings;
                if (xs.Level != 15)
                {
                    int compliantLevel = 15;
                    while (!this.al.validateAVCLevel(horizontalResolution, scriptVerticalResolution, d2v.Info.FPS, xs, out compliantLevel))
                    { // resolution not profile compliant, reduce horizontal resolution by 16, get the new vertical resolution and try again
                        AVCLevels al        = new AVCLevels();
                        string    levelName = al.getLevels()[xs.Level];
                        logBuilder.Append("Your chosen AVC level " + levelName + " is too strict to allow your chosen resolution of " +
                                          horizontalResolution + "*" + scriptVerticalResolution + ". Reducing horizontal resolution by 16.\r\n");
                        horizontalResolution    -= 16;
                        scriptVerticalResolution = Resolution.suggestResolution(d2v.Info.Height, d2v.Info.Width, (double)customDAR,
                                                                                final, horizontalResolution, signalAR, mainForm.Settings.AcceptableAspectErrorPercent, out dar);
                    }
                    logBuilder.Append("Final resolution that is compatible with the chosen AVC Level: " + horizontalResolution + "*"
                                      + scriptVerticalResolution + "\r\n");
                }
            }

            //Generate the avs script based on the template
            string inputLine        = "#input";
            string deinterlaceLines = "#deinterlace";
            string denoiseLines     = "#denoise";
            string cropLine         = "#crop";
            string resizeLine       = "#resize";

            inputLine = ScriptServer.GetInputLine(path, false, PossibleSources.d2v,
                                                  false, false, false, 0);

            if (autoDeint)
            {
                logBuilder.AppendLine("Automatic deinterlacing was checked. Running now...");
                string         d2vPath = path;
                SourceDetector sd      = new SourceDetector(inputLine, d2vPath, false,
                                                            mainForm.Settings.SourceDetectorSettings,
                                                            new UpdateSourceDetectionStatus(analyseUpdate),
                                                            new FinishedAnalysis(finishedAnalysis));
                finished = false;
                sd.analyse();
                waitTillAnalyseFinished();
                deinterlaceLines = filters[0].Script;
                logBuilder.AppendLine("Deinterlacing used: " + deinterlaceLines);
            }

            inputLine = ScriptServer.GetInputLine(path, interlaced, PossibleSources.d2v,
                                                  avsSettings.ColourCorrect, avsSettings.MPEG2Deblock, false, 0);

            cropLine     = ScriptServer.GetCropLine(true, final);
            denoiseLines = ScriptServer.GetDenoiseLines(avsSettings.Denoise, (DenoiseFilterType)avsSettings.DenoiseMethod);
            resizeLine   = ScriptServer.GetResizeLine(!signalAR || avsSettings.Mod16Method == mod16Method.resize, horizontalResolution, scriptVerticalResolution, (ResizeFilterType)avsSettings.ResizeMethod);

            string newScript = ScriptServer.CreateScriptFromTemplate(avsSettings.Template, inputLine, cropLine, resizeLine, denoiseLines, deinterlaceLines);

            if (dar.HasValue)
            {
                newScript = string.Format("global MeGUI_darx = {0}\r\nglobal MeGUI_dary = {1}\r\n{2}", dar.Value.X, dar.Value.Y, newScript);
            }
            logBuilder.Append("Avisynth script created:\r\n");
            logBuilder.Append(newScript);
            try
            {
                StreamWriter sw = new StreamWriter(Path.ChangeExtension(path, ".avs"));
                sw.Write(newScript);
                sw.Close();
            }
            catch (IOException i)
            {
                logBuilder.Append("An error ocurred when trying to save the AviSynth script:\r\n" + i.Message);
                return("");
            }
            return(Path.ChangeExtension(path, ".avs"));
        }
コード例 #19
0
ファイル: x264Encoder.cs プロジェクト: RoDaniel/featurehouse
        public static string genCommandline(string input, string output, Dar? d, int hres, int vres, x264Settings xs, Zone[] zones)
        {
            int qp;
            bool display = false;
            StringBuilder sb = new StringBuilder();
            CultureInfo ci = new CultureInfo("en-us");

            ///<summary>
            /// x264 Main Tab Settings
            ///</summary>
            // AVC Profiles
            if (!xs.CustomEncoderOptions.Contains("--profile "))
            {
                switch (xs.Profile)
                {
                    case 0: sb.Append("--profile baseline "); break;
                    case 1: sb.Append("--profile main "); break;
                    case 2: break; // --profile high is the default value
                }
            }

            // AVC Levels
            if (!xs.CustomEncoderOptions.Contains("--level "))
                if (xs.Level != 15) // unrestricted
                    sb.Append("--level " + AVCLevels.getCLILevelNames()[xs.Level] + " ");

            // x264 Presets
            if (!xs.CustomEncoderOptions.Contains("--preset "))
            {
                switch (xs.x264PresetLevel)
                {
                    case x264Settings.x264PresetLevelModes.ultrafast: sb.Append("--preset ultrafast "); break;
                    case x264Settings.x264PresetLevelModes.superfast: sb.Append("--preset superfast "); break;
                    case x264Settings.x264PresetLevelModes.veryfast: sb.Append("--preset veryfast "); break;
                    case x264Settings.x264PresetLevelModes.faster: sb.Append("--preset faster "); break;
                    case x264Settings.x264PresetLevelModes.fast: sb.Append("--preset fast "); break;
                    //case x264Settings.x264PresetLevelModes.medium: sb.Append("--preset medium "); break; // default value
                    case x264Settings.x264PresetLevelModes.slow: sb.Append("--preset slow "); break;
                    case x264Settings.x264PresetLevelModes.slower: sb.Append("--preset slower "); break;
                    case x264Settings.x264PresetLevelModes.veryslow: sb.Append("--preset veryslow "); break;
                    case x264Settings.x264PresetLevelModes.placebo: sb.Append("--preset placebo "); break;
                }
            }

            // x264 Tunings
            if (!xs.CustomEncoderOptions.Contains("--tune"))
            {
                switch (xs.x264Tuning)
                {
                    case 1: sb.Append("--tune film "); break;
                    case 2: sb.Append("--tune animation "); break;
                    case 3: sb.Append("--tune grain "); break;
                    case 4: sb.Append("--tune psnr "); break;
                    case 5: sb.Append("--tune ssim "); break;
                    case 6: sb.Append("--tune fastdecode "); break;
                    case 7: sb.Append("--tune touhou "); break;
                    default: break; // default
                }
            }

            // Encoding Modes
            switch (xs.EncodingMode)
            {
                case 0: // ABR
                    if (!xs.CustomEncoderOptions.Contains("--bitrate")) sb.Append("--bitrate " + xs.BitrateQuantizer + " ");
                    break;
                case 1: // CQ
                    if (!xs.CustomEncoderOptions.Contains("--qp "))
                    {
                        qp = (int)xs.QuantizerCRF;
                        sb.Append("--qp " + qp.ToString(ci) + " ");
                    }
                    break;
                case 2: // 2 pass first pass
                    sb.Append("--pass 1 --bitrate " + xs.BitrateQuantizer + " --stats " + "\"" + xs.Logfile + "\" ");
                    break;
                case 3: // 2 pass second pass
                case 4: // automated twopass
                    sb.Append("--pass 2 --bitrate " + xs.BitrateQuantizer + " --stats " + "\"" + xs.Logfile + "\" ");
                    break;
                case 5: // 3 pass first pass
                    sb.Append("--pass 1 --bitrate " + xs.BitrateQuantizer + " --stats " + "\"" + xs.Logfile + "\" ");
                    break;
                case 6: // 3 pass 2nd pass
                    sb.Append("--pass 3 --bitrate " + xs.BitrateQuantizer + " --stats " + "\"" + xs.Logfile + "\" ");
                    break;
                case 7: // 3 pass 3rd pass
                case 8: // automated threepass, show third pass options
                    sb.Append("--pass 3 --bitrate " + xs.BitrateQuantizer + " --stats " + "\"" + xs.Logfile + "\" ");
                    break;
                case 9: // constant quality
                    if (!xs.CustomEncoderOptions.Contains("--crf"))
                        if (xs.QuantizerCRF != 23)
                            sb.Append("--crf " + xs.QuantizerCRF.ToString(ci) + " ");
                    break;
            }

            // Slow 1st Pass
            if (!xs.CustomEncoderOptions.Contains("--slow-firstpass"))
                if ((xs.X264SlowFirstpass) && xs.x264PresetLevel < x264Settings.x264PresetLevelModes.placebo &&
                   ((xs.EncodingMode == 2) || // 2 pass first pass
                    (xs.EncodingMode == 4) || // automated twopass
                    (xs.EncodingMode == 5) || // 3 pass first pass
                    (xs.EncodingMode == 8)))  // automated threepass
                    sb.Append("--slow-firstpass ");

            // Threads
            if (!xs.CustomEncoderOptions.Contains("--thread-input"))
                if (xs.ThreadInput && xs.NbThreads == 1)
                    sb.Append("--thread-input ");
            if (!xs.CustomEncoderOptions.Contains("--threads"))
                if (xs.NbThreads > 0)
                    sb.Append("--threads " + xs.NbThreads + " ");

            ///<summary>
            /// x264 Frame-Type Tab Settings
            ///</summary>

            // H.264 Features
            if (xs.Deblock)
            {
                display = false;

                switch (xs.x264Tuning)
                {
                    case 1:
                    case 7: if (xs.AlphaDeblock != -1 || xs.BetaDeblock != -1) display = true; break;
                    case 2: if (xs.AlphaDeblock != 1 || xs.BetaDeblock != 1) display = true; break;
                    case 3: if (xs.AlphaDeblock != -2 || xs.BetaDeblock != -2) display = true; break;
                    default: if (xs.AlphaDeblock != 0 || xs.BetaDeblock != 0) display = true; break;
                }

                if (!xs.CustomEncoderOptions.Contains("--deblock "))
                    if (display)
                        sb.Append("--deblock " + xs.AlphaDeblock + ":" + xs.BetaDeblock + " ");
            }
            else
            {
                if (!xs.CustomEncoderOptions.Contains("--no-deblock"))
                    if (xs.x264PresetLevel != x264Settings.x264PresetLevelModes.ultrafast || (xs.x264Tuning != 0 && xs.x264Tuning != 6))
                        sb.Append("--no-deblock ");
            }

            if (!xs.CustomEncoderOptions.Contains("--no-cabac"))
            {
                if (!xs.Cabac)
                {
                    if (xs.Profile > 0 && (xs.x264PresetLevel != x264Settings.x264PresetLevelModes.ultrafast || (xs.x264Tuning != 0 && xs.x264Tuning != 6)))
                        sb.Append("--no-cabac ");
                }
            }

            // GOP Size
            if (!xs.CustomEncoderOptions.Contains("--keyint"))
                if (xs.KeyframeInterval != 250) // gop size of 250 is default
                    sb.Append("--keyint " + xs.KeyframeInterval + " ");
            if (!xs.CustomEncoderOptions.Contains("--min-keyint"))
                if (xs.MinGOPSize != 25)
                    sb.Append("--min-keyint " + xs.MinGOPSize + " ");

            // B-Frames
            if (xs.Profile > 0 && !xs.CustomEncoderOptions.Contains("--bframes"))  // baseline profile always uses 0 bframes
            {
                int iDefaultSettings = 3;
                switch (xs.x264PresetLevel)
                {
                    case x264Settings.x264PresetLevelModes.ultrafast:   iDefaultSettings = 0; break;
                    case x264Settings.x264PresetLevelModes.veryslow:    iDefaultSettings = 8; break;
                    case x264Settings.x264PresetLevelModes.placebo:     iDefaultSettings = 16; break;
                }
                if (xs.x264Tuning == 2) // animation
                    iDefaultSettings += 2;
                if (xs.NbBframes != iDefaultSettings)
                    sb.Append("--bframes " + xs.NbBframes + " ");
            }

            if (xs.NbBframes > 0)
            {
                if (!xs.CustomEncoderOptions.Contains("-b-adapt"))
                {
                    display = false;
                    if (xs.x264PresetLevel > x264Settings.x264PresetLevelModes.medium)
                    {
                        if (xs.NewAdaptiveBFrames != 2)
                            display = true;
                    }
                    else if (xs.x264PresetLevel > x264Settings.x264PresetLevelModes.ultrafast)
                    {
                        if (xs.NewAdaptiveBFrames != 1)
                            display = true;
                    }
                    else
                    {
                        if (xs.NewAdaptiveBFrames != 0)
                            display = true;
                    }
                    if (display)
                        sb.Append("--b-adapt " + xs.NewAdaptiveBFrames + " ");
                }

                if (xs.NbBframes > 1 && !xs.CustomEncoderOptions.Contains("--b-pyramid"))
                {
                    switch (xs.x264BFramePyramid) // pyramid needs a minimum of 2 b frames
                    {
                        case 1: sb.Append("--b-pyramid strict "); break;
                        case 0: sb.Append("--b-pyramid none "); break;
                    }
                }

                if (!xs.CustomEncoderOptions.Contains("--no-weightb"))
                    if (!xs.WeightedBPrediction && xs.x264Tuning != 6 && xs.x264PresetLevel != x264Settings.x264PresetLevelModes.ultrafast)
                        sb.Append("--no-weightb ");
            }

            // B-Frames bias
            if (!xs.CustomEncoderOptions.Contains("--b-bias "))
                if (xs.BframeBias != 0.0M)
                    sb.Append("--b-bias " + xs.BframeBias.ToString(ci) + " ");

            // Other
            if (!xs.CustomEncoderOptions.Contains("--interlaced"))
                if (xs.EncodeInterlaced)
                    sb.Append("--interlaced ");

            if (xs.Scenecut)
            {
                if (!xs.CustomEncoderOptions.Contains("--scenecut "))
                    if ((xs.SCDSensitivity != 40M && xs.x264PresetLevel != x264Settings.x264PresetLevelModes.ultrafast) ||
                        (xs.SCDSensitivity != 0M && xs.x264PresetLevel == x264Settings.x264PresetLevelModes.ultrafast))
                        sb.Append("--scenecut " + xs.SCDSensitivity.ToString(ci) + " ");
            }
            else
            {
                if (!xs.CustomEncoderOptions.Contains("--no-scenecut"))
                    if (xs.x264PresetLevel != x264Settings.x264PresetLevelModes.ultrafast)
                        sb.Append("--no-scenecut ");
            }

            // reference frames
            if (!xs.CustomEncoderOptions.Contains("--ref "))
            {
                int iDefaultSettings = 0;
                switch (xs.x264PresetLevel)
                {
                    case x264Settings.x264PresetLevelModes.ultrafast:
                    case x264Settings.x264PresetLevelModes.superfast:
                    case x264Settings.x264PresetLevelModes.veryfast:    iDefaultSettings = 1; break;
                    case x264Settings.x264PresetLevelModes.faster:
                    case x264Settings.x264PresetLevelModes.fast:        iDefaultSettings = 2; break;
                    case x264Settings.x264PresetLevelModes.medium:      iDefaultSettings = 3; break;
                    case x264Settings.x264PresetLevelModes.slow:        iDefaultSettings = 5; break;
                    case x264Settings.x264PresetLevelModes.slower:      iDefaultSettings = 8; break;
                    case x264Settings.x264PresetLevelModes.veryslow:
                    case x264Settings.x264PresetLevelModes.placebo:     iDefaultSettings = 16; break;
                }
                if ((xs.x264Tuning == 2 || xs.x264Tuning == 7) && iDefaultSettings > 1)
                    iDefaultSettings = iDefaultSettings * 2;

                if (iDefaultSettings != xs.NbRefFrames)
                    sb.Append("--ref " + xs.NbRefFrames + " ");
            }

            // WeightedPPrediction
            if (!xs.CustomEncoderOptions.Contains("--weightp "))
            {
                display = false;
                switch (xs.x264PresetLevel)
                {
                    case x264Settings.x264PresetLevelModes.ultrafast:
                    case x264Settings.x264PresetLevelModes.superfast:
                    case x264Settings.x264PresetLevelModes.veryfast: if (xs.WeightedPPrediction != 0) display = true; break;
                    case x264Settings.x264PresetLevelModes.faster: if (xs.WeightedPPrediction != 1) display = true; break;
                    default: if (xs.WeightedPPrediction != 2) display = true; break;
                }
                if (xs.x264Tuning == 6 && xs.WeightedPPrediction != 0)
                    display = true;
                if (xs.Profile == 0)
                    display = false;
                if (display)
                    sb.Append("--weightp " + xs.WeightedPPrediction + " ");
            }

            // Slicing
            if (!xs.CustomEncoderOptions.Contains("--slices "))
                if (xs.SlicesNb != 0)
                    sb.Append("--slices " + xs.SlicesNb + " ");
            if (!xs.CustomEncoderOptions.Contains("--slice-max-size "))
                if (xs.MaxSliceSyzeBytes != 0)
                    sb.Append("--slice-max-size " + xs.MaxSliceSyzeBytes + " ");
            if (!xs.CustomEncoderOptions.Contains("--slice-max-mbs "))
                if (xs.MaxSliceSyzeMBs != 0)
                    sb.Append("--slice-max-mbs " + xs.MaxSliceSyzeMBs + " ");

            ///<summary>
            /// x264 Rate Control Tab Settings
            /// </summary>

            if (!xs.CustomEncoderOptions.Contains("--qpmin "))
                if (xs.MinQuantizer != 10)
                    sb.Append("--qpmin " + xs.MinQuantizer + " ");
            if (!xs.CustomEncoderOptions.Contains("--qpmax "))
                if (xs.MaxQuantizer != 51)
                    sb.Append("--qpmax " + xs.MaxQuantizer + " ");
            if (!xs.CustomEncoderOptions.Contains("--qpstep "))
                if (xs.MaxQuantDelta != 4)
                    sb.Append("--qpstep " + xs.MaxQuantDelta + " ");

            if (xs.IPFactor != 1.4M)
            {
                display = true;
                if (xs.x264Tuning == 3 && xs.IPFactor == 1.1M)
                    display = false;

                if (!xs.CustomEncoderOptions.Contains("--ipratio "))
                    if (display)
                        sb.Append("--ipratio " + xs.IPFactor.ToString(ci) + " ");
            }

            if (xs.PBFactor != 1.3M)
            {
                display = true;
                if (xs.x264Tuning == 3 && xs.PBFactor == 1.1M)
                    display = false;

                if (!xs.CustomEncoderOptions.Contains("--pbratio "))
                    if (display)
                        sb.Append("--pbratio " + xs.PBFactor.ToString(ci) + " ");
            }

            if (!xs.CustomEncoderOptions.Contains("--chroma-qp-offset "))
                if (xs.ChromaQPOffset != 0.0M)
                    sb.Append("--chroma-qp-offset " + xs.ChromaQPOffset.ToString(ci) + " ");

            if (xs.EncodingMode != 1) // doesn't apply to CQ mode
            {
                if (!xs.CustomEncoderOptions.Contains("--vbv-bufsize "))
                    if (xs.VBVBufferSize > 0)
                        sb.Append("--vbv-bufsize " + xs.VBVBufferSize + " ");
                if (!xs.CustomEncoderOptions.Contains("--vbv-maxrate "))
                    if (xs.VBVMaxBitrate > 0)
                        sb.Append("--vbv-maxrate " + xs.VBVMaxBitrate + " ");
                if (!xs.CustomEncoderOptions.Contains("--vbv-init "))
                    if (xs.VBVInitialBuffer != 0.9M)
                        sb.Append("--vbv-init " + xs.VBVInitialBuffer.ToString(ci) + " ");
                if (!xs.CustomEncoderOptions.Contains("--ratetol "))
                    if (xs.BitrateVariance != 1.0M)
                        sb.Append("--ratetol " + xs.BitrateVariance.ToString(ci) + " ");

                if (!xs.CustomEncoderOptions.Contains("--qcomp "))
                {
                    display = true;
                    if ((xs.x264Tuning == 3 && xs.QuantCompression == 0.8M) || (xs.x264Tuning != 3 && xs.QuantCompression == 0.6M))
                        display = false;
                    if (display)
                        sb.Append("--qcomp " + xs.QuantCompression.ToString(ci) + " ");
                }

                if (xs.EncodingMode > 1) // applies only to twopass
                {
                    if (!xs.CustomEncoderOptions.Contains("--cplxblur "))
                        if (xs.TempComplexityBlur != 20)
                            sb.Append("--cplxblur " + xs.TempComplexityBlur.ToString(ci) + " ");
                    if (!xs.CustomEncoderOptions.Contains("--qblur "))
                        if (xs.TempQuanBlurCC != 0.5M)
                            sb.Append("--qblur " + xs.TempQuanBlurCC.ToString(ci) + " ");
                }
            }

            // Dead Zones
            if (!xs.CustomEncoderOptions.Contains("--deadzone-inter "))
            {
                display = true;
                if ((xs.x264Tuning != 3 && xs.DeadZoneInter == 21) || (xs.x264Tuning == 3 && xs.DeadZoneInter == 6))
                    display = false;
                if (display)
                    sb.Append("--deadzone-inter " + xs.DeadZoneInter + " ");
            }

            if (!xs.CustomEncoderOptions.Contains("--deadzone-intra "))
            {
                display = true;
                if ((xs.x264Tuning != 3 && xs.DeadZoneIntra == 11) || (xs.x264Tuning == 3 && xs.DeadZoneIntra == 6))
                    display = false;
                if (display)
                    sb.Append("--deadzone-intra " + xs.DeadZoneIntra + " ");
            }

            // Disable Macroblok Tree
            if (!xs.NoMBTree)
            {
                if (!xs.CustomEncoderOptions.Contains("--no-mbtree"))
                    if (xs.x264PresetLevel > x264Settings.x264PresetLevelModes.veryfast)
                        sb.Append("--no-mbtree ");
            }
            else
            {
                // RC Lookahead
                if (!xs.CustomEncoderOptions.Contains("--rc-lookahead "))
                {
                    display = false;
                    switch (xs.x264PresetLevel)
                    {
                        case x264Settings.x264PresetLevelModes.faster: if (xs.Lookahead != 20) display = true; break;
                        case x264Settings.x264PresetLevelModes.fast: if (xs.Lookahead != 30) display = true; break;
                        case x264Settings.x264PresetLevelModes.medium: if (xs.Lookahead != 40) display = true; break;
                        case x264Settings.x264PresetLevelModes.slow: if (xs.Lookahead != 50) display = true; break;
                        case x264Settings.x264PresetLevelModes.slower:
                        case x264Settings.x264PresetLevelModes.veryslow:
                        case x264Settings.x264PresetLevelModes.placebo: if (xs.Lookahead != 60) display = true; break;
                    }
                    if (display)
                        sb.Append("--rc-lookahead " + xs.Lookahead + " ");
                }
            }

            // AQ-Mode
            if (xs.EncodingMode != (int)VideoCodecSettings.Mode.CQ)
            {
                if (xs.AQmode > 0)
                {
                    if (!xs.CustomEncoderOptions.Contains("--aq-mode "))
                    {
                        display = true;
                        if ((xs.x264Tuning != 5 && xs.AQmode == 1) || (xs.x264Tuning == 5 && xs.AQmode == 2))
                            display = false;
                        if (display)
                            sb.Append("--aq-mode " + xs.AQmode.ToString() + " ");
                    }

                    display = false;
                    switch (xs.x264Tuning)
                    {
                        case 2: if (xs.AQstrength != 0.6M) display = true; break;
                        case 3: if (xs.AQstrength != 0.5M) display = true; break;
                        case 7: if (xs.AQstrength != 1.3M) display = true; break;
                        default: if (xs.AQstrength != 1.0M) display = true; break;
                    }
                    if (!xs.CustomEncoderOptions.Contains("--aq-strength "))
                        if (display)
                            sb.Append("--aq-strength " + xs.AQstrength.ToString(ci) + " ");
                }
                else
                {
                    if (!xs.CustomEncoderOptions.Contains("--aq-mode "))
                        if (xs.x264PresetLevel != x264Settings.x264PresetLevelModes.ultrafast && xs.x264Tuning != 4)
                            sb.Append("--aq-mode 0 ");
                }
            }

            // custom matrices
            if (xs.QuantizerMatrixType > 0)
            {
                switch (xs.QuantizerMatrixType)
                {
                    case 1: if (!xs.CustomEncoderOptions.Contains("--cqm ")) sb.Append("--cqm \"jvt\" "); break;
                    case 2: if (!xs.CustomEncoderOptions.Contains("--cqmfile")) sb.Append("--cqmfile \"" + xs.QuantizerMatrix + "\" "); break;
                }
            }

            ///<summary>
            /// x264 Analysis Tab Settings
            /// </summary>

            // Disable Chroma Motion Estimation
            if (!xs.CustomEncoderOptions.Contains("--no-chroma-me"))
                if (!xs.ChromaME)
                    sb.Append("--no-chroma-me ");

            // Motion Estimation Range
            if (!xs.CustomEncoderOptions.Contains("--merange "))
            {
                if ((xs.x264PresetLevel <= x264Settings.x264PresetLevelModes.slower && xs.MERange != 16) ||
                    (xs.x264PresetLevel >= x264Settings.x264PresetLevelModes.veryslow && xs.MERange != 24))
                    sb.Append("--merange " + xs.MERange + " ");
            }

            // ME Type
            if (!xs.CustomEncoderOptions.Contains("--me "))
            {
                display = false;
                switch (xs.x264PresetLevel)
                {
                    case x264Settings.x264PresetLevelModes.ultrafast:
                    case x264Settings.x264PresetLevelModes.superfast:   if (xs.METype != 0) display = true; break;
                    case x264Settings.x264PresetLevelModes.veryfast:
                    case x264Settings.x264PresetLevelModes.faster:
                    case x264Settings.x264PresetLevelModes.fast:
                    case x264Settings.x264PresetLevelModes.medium:      if (xs.METype != 1) display = true; break;
                    case x264Settings.x264PresetLevelModes.slow:
                    case x264Settings.x264PresetLevelModes.slower:
                    case x264Settings.x264PresetLevelModes.veryslow:    if (xs.METype != 2) display = true; break;
                    case x264Settings.x264PresetLevelModes.placebo:     if (xs.METype != 4) display = true; break;
                }

                if (display)
                {
                    switch (xs.METype)
                    {
                        case 0: sb.Append("--me dia "); break;
                        case 1: sb.Append("--me hex "); break;
                        case 2: sb.Append("--me umh "); break;
                        case 3: sb.Append("--me esa "); break;
                        case 4: sb.Append("--me tesa "); break;
                    }
                }

            }

            if (!xs.CustomEncoderOptions.Contains("--direct "))
            {
                display = false;
                if (xs.x264PresetLevel > x264Settings.x264PresetLevelModes.medium)
                {
                    if (xs.BframePredictionMode != 3)
                        display = true;
                }
                else if (xs.BframePredictionMode != 1)
                    display = true;

                if (display)
                {
                    switch (xs.BframePredictionMode)
                    {
                        case 0: sb.Append("--direct none "); break;
                        case 1: sb.Append("--direct spatial "); break;
                        case 2: sb.Append("--direct temporal "); break;
                        case 3: sb.Append("--direct auto "); break;
                    }
                }
            }

            if (!xs.CustomEncoderOptions.Contains("--nr "))
                if (xs.NoiseReduction > 0)
                    sb.Append("--nr " + xs.NoiseReduction + " ");

            // subpel refinement
            if (!xs.CustomEncoderOptions.Contains("--subme "))
            {
                display = false;
                switch (xs.x264PresetLevel)
                {
                    case x264Settings.x264PresetLevelModes.ultrafast:   if (xs.SubPelRefinement != 0) display = true; break;
                    case x264Settings.x264PresetLevelModes.superfast:   if (xs.SubPelRefinement != 1) display = true; break;
                    case x264Settings.x264PresetLevelModes.veryfast:    if (xs.SubPelRefinement != 2) display = true; break;
                    case x264Settings.x264PresetLevelModes.faster:      if (xs.SubPelRefinement != 4) display = true; break;
                    case x264Settings.x264PresetLevelModes.fast:        if (xs.SubPelRefinement != 6) display = true; break;
                    case x264Settings.x264PresetLevelModes.medium:      if (xs.SubPelRefinement != 7) display = true; break;
                    case x264Settings.x264PresetLevelModes.slow:        if (xs.SubPelRefinement != 8) display = true; break;
                    case x264Settings.x264PresetLevelModes.slower:      if (xs.SubPelRefinement != 9) display = true; break;
                    case x264Settings.x264PresetLevelModes.veryslow:    if (xs.SubPelRefinement != 10) display = true; break;
                    case x264Settings.x264PresetLevelModes.placebo:     if (xs.SubPelRefinement != 10) display = true; break;
                }
                if (display)
                    sb.Append("--subme " + (xs.SubPelRefinement) + " ");
            }

            // macroblock types
            if (!xs.CustomEncoderOptions.Contains("--partitions "))
            {
                bool bExpectedP8x8mv = true;
                bool bExpectedB8x8mv = true;
                bool bExpectedI4x4mv = true;
                bool bExpectedI8x8mv = true;
                bool bExpectedP4x4mv = true;

                switch (xs.x264PresetLevel)
                {
                    case x264Settings.x264PresetLevelModes.ultrafast: bExpectedP8x8mv = false; bExpectedB8x8mv = false; bExpectedI4x4mv = false;
                            bExpectedI8x8mv = false; bExpectedP4x4mv = false; break;
                    case x264Settings.x264PresetLevelModes.superfast: bExpectedP8x8mv = false; bExpectedB8x8mv = false; bExpectedP4x4mv = false; break;
                    case x264Settings.x264PresetLevelModes.veryfast:
                    case x264Settings.x264PresetLevelModes.faster:
                    case x264Settings.x264PresetLevelModes.fast:
                    case x264Settings.x264PresetLevelModes.medium:
                    case x264Settings.x264PresetLevelModes.slow: bExpectedP4x4mv = false; break;
                }
                if (xs.x264Tuning == 7 && bExpectedP8x8mv)
                    bExpectedP4x4mv = true;

                if (bExpectedP8x8mv != xs.P8x8mv || bExpectedB8x8mv != xs.B8x8mv
                    || bExpectedI4x4mv != xs.I4x4mv || bExpectedI8x8mv != xs.I8x8mv
                    || bExpectedP4x4mv != xs.P4x4mv)
                {
                    if (xs.P8x8mv || xs.B8x8mv || xs.I4x4mv || xs.I8x8mv || xs.P4x4mv)
                    {
                        sb.Append("--partitions ");
                        if (xs.I4x4mv && xs.I8x8mv && xs.P4x4mv && xs.P8x8mv && xs.B8x8mv)
                            sb.Append("all ");
                        else
                        {
                            if (xs.P8x8mv) // default is checked
                                sb.Append("p8x8,");
                            if (xs.B8x8mv) // default is checked
                                sb.Append("b8x8,");
                            if (xs.I4x4mv) // default is checked
                                sb.Append("i4x4,");
                            if (xs.P4x4mv) // default is unchecked
                                sb.Append("p4x4,");
                            if (xs.I8x8mv) // default is checked
                                sb.Append("i8x8");
                            if (sb.ToString().EndsWith(","))
                                sb.Remove(sb.Length - 1, 1);
                        }

                        if (!sb.ToString().EndsWith(" "))
                            sb.Append(" ");
                    }
                    else
                        sb.Append("--partitions none ");
                }
            }

            if (!xs.CustomEncoderOptions.Contains("--no-8x8dct"))
                if (!xs.AdaptiveDCT)
                    if (xs.Profile > 0 && xs.x264PresetLevel > x264Settings.x264PresetLevelModes.ultrafast)
                        sb.Append("--no-8x8dct ");

            // Trellis
            if (!xs.CustomEncoderOptions.Contains("--trellis ") && xs.Cabac)
            {
                display = false;
                switch (xs.x264PresetLevel)
                {
                    case x264Settings.x264PresetLevelModes.ultrafast:
                    case x264Settings.x264PresetLevelModes.superfast:
                    case x264Settings.x264PresetLevelModes.veryfast:    if (xs.X264Trellis != 0) display = true; break;
                    case x264Settings.x264PresetLevelModes.faster:
                    case x264Settings.x264PresetLevelModes.fast:
                    case x264Settings.x264PresetLevelModes.medium:
                    case x264Settings.x264PresetLevelModes.slow:        if (xs.X264Trellis != 1) display = true; break;
                    case x264Settings.x264PresetLevelModes.slower:
                    case x264Settings.x264PresetLevelModes.veryslow:
                    case x264Settings.x264PresetLevelModes.placebo:     if (xs.X264Trellis != 2) display = true; break;
                }
                if (display)
                    sb.Append("--trellis " + xs.X264Trellis + " ");
            }

            if (!xs.CustomEncoderOptions.Contains("--psy-rd "))
            {
                if (xs.SubPelRefinement > 5)
                {
                    display = false;
                    switch (xs.x264Tuning)
                    {
                        case 1: if ((xs.PsyRDO != 1.0M) && (xs.PsyTrellis != 0.15M)) display = true; break;
                        case 2: if ((xs.PsyRDO != 0.4M) && (xs.PsyTrellis != 0.0M)) display = true; break;
                        case 3: if ((xs.PsyRDO != 1.0M) && (xs.PsyTrellis != 0.25M)) display = true; break;
                        case 7: if ((xs.PsyRDO != 1.0M) && (xs.PsyTrellis != 0.2M)) display = true; break;
                        default: if ((xs.PsyRDO != 1.0M) || (xs.PsyTrellis != 0.0M)) display = true; break;
                    }

                    if (display)
                        sb.Append("--psy-rd " + xs.PsyRDO.ToString(ci) + ":" + xs.PsyTrellis.ToString(ci) + " ");
                }
            }
            else
            {
                display = false;
                switch (xs.x264Tuning)
                {
                    case 1: if (xs.PsyTrellis != 0.15M) display = true; break;
                    case 3: if (xs.PsyTrellis != 0.25M) display = true; break;
                    case 7: if (xs.PsyTrellis != 0.2M) display = true; break;
                    case 0:
                    case 4:
                        {
                            if (xs.PsyTrellis != 0.0M)
                                display = true;
                        } break;
                }
                if (!xs.CustomEncoderOptions.Contains("--psy-rd 0: "))
                    if (display)
                        sb.Append("--psy-rd 0:" + xs.PsyTrellis.ToString(ci) + " ");
            }

            if (!xs.CustomEncoderOptions.Contains("--no-mixed-refs"))
                if (xs.NoMixedRefs)
                    if (xs.x264PresetLevel >= x264Settings.x264PresetLevelModes.fast)
                        sb.Append("--no-mixed-refs ");

            if (!xs.CustomEncoderOptions.Contains("--no-dct-decimate"))
                if (xs.NoDCTDecimate)
                    if (xs.x264Tuning != 3)
                        sb.Append("--no-dct-decimate ");

            if (!xs.CustomEncoderOptions.Contains("--no-fast-pskip"))
                if (xs.NoFastPSkip)
                    if (xs.x264PresetLevel != x264Settings.x264PresetLevelModes.placebo)
                        sb.Append("--no-fast-pskip ");

            if (!xs.CustomEncoderOptions.Contains("--no-psy"))
                if (xs.NoPsy && (xs.x264Tuning != 4 && xs.x264Tuning != 5))
                    sb.Append("--no-psy ");

            if (!xs.CustomEncoderOptions.Contains("--aud"))
                if (xs.X264Aud)
                    sb.Append("--aud ");

            if (!xs.CustomEncoderOptions.Contains("--nal-hrd"))
                if (xs.X264Nalhrd)
                    sb.Append("--nal-hrd vbr ");

            ///<summary>
            /// x264 Misc Tab Settings
            /// </summary>

            // QPFile
            if (!xs.CustomEncoderOptions.Contains("-qpfile "))
                if (xs.UseQPFile)
                    if (xs.EncodingMode == 0 ||
                        xs.EncodingMode == 1 ||
                        xs.EncodingMode == 2 ||
                        xs.EncodingMode == 5 ||
                        xs.EncodingMode == 9)
                        sb.Append("--qpfile " + "\"" + xs.QPFile + "\" ");

            if (!xs.CustomEncoderOptions.Contains("--psnr"))
                if (xs.PSNRCalculation)
                    sb.Append("--psnr ");

            if (!xs.CustomEncoderOptions.Contains("--ssim"))
                if (xs.SSIMCalculation)
                    sb.Append("--ssim ");

            if (!xs.CustomEncoderOptions.Contains("--fullrange on"))
                if (xs.fullRange)
                    sb.Append("--fullrange on ");

            if (!xs.CustomEncoderOptions.Equals("")) // add custom encoder options
                sb.Append(xs.CustomEncoderOptions + " ");

            if (zones != null && zones.Length > 0 && xs.CreditsQuantizer >= 1.0M)
            {
                sb.Append("--zones ");
                foreach (Zone zone in zones)
                {
                    sb.Append(zone.startFrame + "," + zone.endFrame + ",");
                    if (zone.mode == ZONEMODE.Quantizer)
                    {
                        sb.Append("q=");
                        sb.Append(zone.modifier + "/");
                    }
                    if (zone.mode == ZONEMODE.Weight)
                    {
                        sb.Append("b=");
                        double mod = (double)zone.modifier / 100.0;
                        sb.Append(mod.ToString(ci) + "/");
                    }
                }
                sb.Remove(sb.Length - 1, 1);
                sb.Append(" ");
            }

            if (!xs.CustomEncoderOptions.Contains("--sar "))
            {
                if (d.HasValue)
                {
                    Sar s = d.Value.ToSar(hres, vres);
                    sb.Append("--sar " + s.X + ":" + s.Y + " ");
                }
            }

            //add the rest of the commandline regarding the output
            if (xs.EncodingMode == 2 || xs.EncodingMode == 5)
                sb.Append("--output NUL ");
            else
                sb.Append("--output " + "\"" + output + "\" ");
            sb.Append("\"" + input + "\" ");

            return sb.ToString();
        }
コード例 #20
0
        /// <summary>
        /// creates the AVS Script file
        /// if the file can be properly opened, auto-cropping is performed, then depending on the AR settings
        /// the proper resolution for automatic resizing, taking into account the derived cropping values
        /// is calculated, and finally the avisynth script is written and its name returned
        /// </summary>
        /// <param name="path">dgindex script</param>
        /// <param name="aspectRatio">aspect ratio selection to be used</param>
        /// <param name="customDAR">custom display aspect ratio for this source</param>
        /// <param name="desiredOutputWidth">desired horizontal resolution of the output</param>
        /// <param name="settings">the codec settings (used only for x264)</param>
        /// <param name="sarX">pixel aspect ratio X</param>
        /// <param name="sarY">pixel aspect ratio Y</param>
        /// <param name="height">the final height of the video</param>
        /// <param name="autoCrop">whether or not autoCrop is used for the input</param>
        /// <returns>the name of the AviSynth script created, empty if there was an error</returns>
        private string CreateAVSFile(string indexFile, string inputFile, Dar?AR, int desiredOutputWidth,
                                     LogItem _log, AviSynthSettings avsSettings, bool autoDeint, VideoCodecSettings settings,
                                     bool autoCrop, bool keepInputResolution, bool useChaptersMarks)
        {
            Dar?            dar = null;
            Dar             customDAR;
            IMediaFile      iMediaFile = null;
            IVideoReader    reader;
            PossibleSources oPossibleSource;
            x264Device      xTargetDevice = null;
            CropValues      cropValues    = new CropValues();

            int outputWidthIncludingPadding  = 0;
            int outputHeightIncludingPadding = 0;
            int outputWidthCropped           = 0;
            int outputHeightCropped          = 0;

            // encode anamorph either when it is selected in the avs profile or the input resolution should not be touched
            bool signalAR = (avsSettings.Mod16Method != mod16Method.none) || keepInputResolution;

            // make sure the proper anamorphic encode is selected if the input resolution should not be touched
            if (keepInputResolution && avsSettings.Mod16Method != mod16Method.nonMod16)
            {
                avsSettings.Mod16Method = mod16Method.nonMod16;
            }

            // open index file to retrieve information
            if (job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.DGI)
            {
                iMediaFile      = new dgiFile(indexFile);
                oPossibleSource = PossibleSources.dgi;
            }
            else if (job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.D2V)
            {
                iMediaFile      = new d2vFile(indexFile);
                oPossibleSource = PossibleSources.d2v;
            }
            else if (job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.DGM)
            {
                iMediaFile      = new dgmFile(indexFile);
                oPossibleSource = PossibleSources.dgm;
            }
            else if (job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.FFMS)
            {
                iMediaFile      = new ffmsFile(inputFile, indexFile);
                oPossibleSource = PossibleSources.ffindex;
            }
            else if (job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.LSMASH)
            {
                iMediaFile      = new lsmashFile(inputFile, indexFile);
                oPossibleSource = PossibleSources.lsmash;
            }
            else if (job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.AVISOURCE)
            {
                string tempAvs = "AVISource(\"" + inputFile + "\", audio=false)" + VideoUtil.getAssumeFPS(0, inputFile);
                iMediaFile      = AvsFile.ParseScript(tempAvs, true);
                oPossibleSource = PossibleSources.avisource;
            }
            else
            {
                iMediaFile      = AvsFile.OpenScriptFile(inputFile, true);
                oPossibleSource = PossibleSources.avs;
            }
            reader = iMediaFile.GetVideoReader();

            // abort if the index file is invalid
            if (reader.FrameCount < 1)
            {
                _log.Error("There are " + reader.FrameCount + " frames in the index file. Aborting...");
                return("");
            }

            if (AR == null)
            {
                // AR needs to be detected automatically now
                _log.LogValue("Auto-detect aspect ratio", AR == null);
                customDAR = iMediaFile.VideoInfo.DAR;
                if (customDAR.AR <= 0)
                {
                    customDAR = Dar.ITU16x9PAL;
                    _log.Warn(string.Format("No aspect ratio found, defaulting to {0}.", customDAR));
                }
            }
            else
            {
                customDAR = AR.Value;
            }
            _log.LogValue("Aspect ratio", customDAR);

            // check x264 settings (target device, chapter file)
            if (settings != null && settings is x264Settings)
            {
                x264Settings xs = (x264Settings)settings;
                xTargetDevice = xs.TargetDevice;
                _log.LogValue("Target device", xTargetDevice.Name);
            }

            // get mod value for resizing
            int mod = Resolution.GetModValue(avsSettings.ModValue, avsSettings.Mod16Method, signalAR);

            // crop input as it may be required (autoCrop && !keepInputResolution or Blu-Ray)
            if (Autocrop.autocrop(out cropValues, reader, signalAR, avsSettings.Mod16Method, avsSettings.ModValue) == false)
            {
                _log.Error("Autocrop failed. Aborting...");
                return("");
            }

            int inputWidth      = (int)iMediaFile.VideoInfo.Width;
            int inputHeight     = (int)iMediaFile.VideoInfo.Height;
            int inputFPS_D      = (int)iMediaFile.VideoInfo.FPS_D;
            int inputFPS_N      = (int)iMediaFile.VideoInfo.FPS_N;
            int inputFrameCount = (int)iMediaFile.VideoInfo.FrameCount;

            // force destruction of AVS script
            iMediaFile.Dispose();

            Dar?suggestedDar = null;

            if (desiredOutputWidth == 0)
            {
                desiredOutputWidth = outputWidthIncludingPadding = inputWidth;
            }
            else if (!avsSettings.Upsize && desiredOutputWidth > inputWidth)
            {
                outputWidthIncludingPadding = inputWidth;
            }
            else
            {
                outputWidthIncludingPadding = desiredOutputWidth;
            }
            CropValues paddingValues;

            bool resizeEnabled;
            int  outputWidthWithoutUpsizing = outputWidthIncludingPadding;

            if (avsSettings.Upsize)
            {
                resizeEnabled = !keepInputResolution;
                CropValues cropValuesTemp = cropValues.Clone();
                int        outputHeightIncludingPaddingTemp = 0;
                Resolution.GetResolution(inputWidth, inputHeight, customDAR,
                                         ref cropValuesTemp, autoCrop && !keepInputResolution, mod, ref resizeEnabled, false, signalAR, true,
                                         avsSettings.AcceptableAspectError, xTargetDevice, Convert.ToDouble(inputFPS_N) / inputFPS_D,
                                         ref outputWidthWithoutUpsizing, ref outputHeightIncludingPaddingTemp, out paddingValues, out suggestedDar, _log);
            }

            resizeEnabled = !keepInputResolution;
            Resolution.GetResolution(inputWidth, inputHeight, customDAR,
                                     ref cropValues, autoCrop && !keepInputResolution, mod, ref resizeEnabled, avsSettings.Upsize, signalAR, true,
                                     avsSettings.AcceptableAspectError, xTargetDevice, Convert.ToDouble(inputFPS_N) / inputFPS_D,
                                     ref outputWidthIncludingPadding, ref outputHeightIncludingPadding, out paddingValues, out suggestedDar, _log);
            keepInputResolution = !resizeEnabled;

            if (signalAR && suggestedDar.HasValue)
            {
                dar = suggestedDar;
            }

            // log calculated output resolution
            outputWidthCropped  = outputWidthIncludingPadding - paddingValues.left - paddingValues.right;
            outputHeightCropped = outputHeightIncludingPadding - paddingValues.bottom - paddingValues.top;
            _log.LogValue("Input resolution", inputWidth + "x" + inputHeight);
            _log.LogValue("Desired maximum width", desiredOutputWidth);
            if (!avsSettings.Upsize && outputWidthIncludingPadding < desiredOutputWidth)
            {
                _log.LogEvent("Desired maximum width not reached. Enable upsizing in the AviSynth profile if you want to force it.");
            }
            if (avsSettings.Upsize && outputWidthIncludingPadding > outputWidthWithoutUpsizing)
            {
                _log.LogValue("Desired maximum width reached with upsizing. Target width without upsizing", outputWidthWithoutUpsizing);
            }
            if (cropValues.isCropped())
            {
                _log.LogValue("Autocrop values", cropValues);
                _log.LogValue("Cropped output resolution", outputWidthCropped + "x" + outputHeightCropped);
            }
            else
            {
                _log.LogValue("Output resolution", outputWidthCropped + "x" + outputHeightCropped);
            }
            if (paddingValues.isCropped())
            {
                _log.LogValue("Padded output resolution", outputWidthIncludingPadding + "x" + outputHeightIncludingPadding);
            }

            // generate the avs script based on the template
            string inputLine        = "#input";
            string deinterlaceLines = "#deinterlace";
            string denoiseLines     = "#denoise";
            string cropLine         = "#crop";
            string resizeLine       = "#resize";

            inputLine = ScriptServer.GetInputLine(
                inputFile, indexFile, false, oPossibleSource, false, false, false, 0,
                avsSettings.DSS2, NvDeinterlacerType.nvDeInterlacerNone, 0, 0, null);

            if (IsJobStopped())
            {
                return("");
            }

            _log.LogValue("Automatic deinterlacing", autoDeint);
            if (autoDeint)
            {
                su.Status = "Automatic deinterlacing...   ***PLEASE WAIT***";
                string d2vPath = indexFile;
                _sourceDetector = new SourceDetector(inputLine, d2vPath, avsSettings.PreferAnimeDeinterlace, inputFrameCount,
                                                     Thread.CurrentThread.Priority,
                                                     MainForm.Instance.Settings.SourceDetectorSettings,
                                                     new UpdateSourceDetectionStatus(AnalyseUpdate),
                                                     new FinishedAnalysis(FinishedAnalysis));
                finished = false;
                _sourceDetector.Analyse();
                WaitTillAnalyseFinished();
                _sourceDetector = null;
                if (filters != null)
                {
                    deinterlaceLines = filters[0].Script;
                    if (interlaced)
                    {
                        _log.LogValue("Deinterlacing used", deinterlaceLines, ImageType.Warning);
                    }
                    else
                    {
                        _log.LogValue("Deinterlacing used", deinterlaceLines);
                    }
                }
            }

            if (IsJobStopped())
            {
                return("");
            }

            su.Status = "Finalizing preprocessing...   ***PLEASE WAIT***";

            // get final input filter line
            inputLine = ScriptServer.GetInputLine(
                inputFile, indexFile, interlaced, oPossibleSource, avsSettings.ColourCorrect, avsSettings.MPEG2Deblock,
                false, 0, avsSettings.DSS2, NvDeinterlacerType.nvDeInterlacerNone, 0, 0, null);

            // get crop & resize lines
            if (!keepInputResolution)
            {
                if (autoCrop)
                {
                    cropLine = ScriptServer.GetCropLine(cropValues);
                }
                resizeLine = ScriptServer.GetResizeLine(!signalAR || avsSettings.Mod16Method == mod16Method.resize || outputWidthIncludingPadding > 0 || inputWidth != outputWidthCropped,
                                                        outputWidthCropped, outputHeightCropped, outputWidthIncludingPadding, outputHeightIncludingPadding, (ResizeFilterType)avsSettings.ResizeMethod,
                                                        autoCrop, cropValues, inputWidth, inputHeight);
            }

            // get denoise line
            denoiseLines = ScriptServer.GetDenoiseLines(avsSettings.Denoise, (DenoiseFilterType)avsSettings.DenoiseMethod);

            string newScript = ScriptServer.CreateScriptFromTemplate(avsSettings.Template, inputLine, cropLine, resizeLine, denoiseLines, deinterlaceLines);

            if (dar.HasValue)
            {
                newScript = string.Format("global MeGUI_darx = {0}\r\nglobal MeGUI_dary = {1}\r\n{2}", dar.Value.X, dar.Value.Y, newScript);
            }
            else
            {
                if (xTargetDevice != null && xTargetDevice.BluRay)
                {
                    string       strResolution = outputWidthIncludingPadding + "x" + outputHeightIncludingPadding;
                    x264Settings _xs           = (x264Settings)settings;

                    if (strResolution.Equals("720x480"))
                    {
                        _xs.SampleAR = 4;
                        _log.LogEvent("Set --sar to 10:11 as only 40:33 or 10:11 are supported with a resolution of " +
                                      strResolution + " as required for " + xTargetDevice.Name + ".");
                    }
                    else if (strResolution.Equals("720x576"))
                    {
                        _xs.SampleAR = 5;
                        _log.LogEvent("Set --sar to 12:11 as only 16:11 or 12:11 are supported with a resolution of "
                                      + strResolution + " as required for " + xTargetDevice.Name + ".");
                    }
                    else if (strResolution.Equals("1280x720") || strResolution.Equals("1920x1080"))
                    {
                        _xs.SampleAR = 1;
                        _log.LogEvent("Set --sar to 1:1 as only 1:1 is supported with a resolution of "
                                      + strResolution + " as required for " + xTargetDevice.Name + ".");
                    }
                    else if (strResolution.Equals("1440x1080"))
                    {
                        _xs.SampleAR = 2;
                        _log.LogEvent("Set --sar to 4:3 as only 4:3 is supported with a resolution of "
                                      + strResolution + " as required for " + xTargetDevice.Name + ".");
                    }
                }
            }

            _log.LogValue("Generated AviSynth script", newScript);
            string strOutputAVSFile;

            if (String.IsNullOrEmpty(indexFile))
            {
                strOutputAVSFile = Path.ChangeExtension(Path.Combine(job.PostprocessingProperties.WorkingDirectory, Path.GetFileName(inputFile)), ".avs");
            }
            else
            {
                strOutputAVSFile = Path.ChangeExtension(indexFile, ".avs");
            }

            try
            {
                StreamWriter sw = new StreamWriter(strOutputAVSFile, false, System.Text.Encoding.Default);
                sw.Write(newScript);
                sw.Close();
            }
            catch (Exception i)
            {
                _log.LogValue("Error saving AviSynth script", i, ImageType.Error);
                return("");
            }

            JobUtil.GetAllInputProperties(strOutputAVSFile, out ulong numberOfFrames, out double fps, out int fps_n, out int fps_d, out int hres, out int vres, out Dar d, out AviSynthColorspace colorspace);
            _log.LogEvent("resolution: " + hres + "x" + vres);
            _log.LogEvent("frame rate: " + fps_n + "/" + fps_d);
            _log.LogEvent("frames: " + numberOfFrames);
            TimeSpan oTime = TimeSpan.FromSeconds((double)numberOfFrames / fps);

            _log.LogEvent("length: " + string.Format("{0:00}:{1:00}:{2:00}.{3:000}",
                                                     (int)(oTime.TotalHours), oTime.Minutes, oTime.Seconds, oTime.Milliseconds));
            _log.LogValue("aspect ratio", d);
            _log.LogValue("color space", colorspace.ToString());

            if (IsJobStopped())
            {
                return("");
            }

            // create qpf file if necessary and possible
            if (job.PostprocessingProperties.ChapterInfo.HasChapters && useChaptersMarks && settings != null && settings is x264Settings)
            {
                fps = (double)fps_n / fps_d;
                string strChapterFile = Path.ChangeExtension(strOutputAVSFile, ".qpf");
                job.PostprocessingProperties.ChapterInfo.ChangeFps(fps);
                if (job.PostprocessingProperties.ChapterInfo.SaveQpfile(strChapterFile))
                {
                    job.PostprocessingProperties.FilesToDelete.Add(strChapterFile);
                    _log.LogValue("qpf file created", strChapterFile);
                    x264Settings xs = (x264Settings)settings;
                    xs.UseQPFile = true;
                    xs.QPFile    = strChapterFile;
                }
            }

            // check if a timestamp file has to be used
            if (!String.IsNullOrEmpty(job.PostprocessingProperties.TimeStampFile) && settings != null && settings is x264Settings)
            {
                x264Settings xs = (x264Settings)settings;
                xs.TCFile = job.PostprocessingProperties.TimeStampFile;
            }

            return(strOutputAVSFile);
        }
コード例 #21
0
ファイル: x264Settings.cs プロジェクト: huannguyenfit/MeGUI
 /// <summary>
 ///  Handles assessment of whether the encoding options vary between two x264Settings instances
 /// The following are excluded from the comparison:
 /// BitrateQuantizer
 /// CreditsQuantizer
 /// Logfile
 /// NbThreads
 /// SARX
 /// SARY
 /// Zones
 /// </summary>
 /// <param name="otherSettings"></param>
 /// <returns>true if the settings differ</returns>
 public bool IsAltered(x264Settings otherSettings)
 {
     if (
         this.AdaptiveBFrames != otherSettings.AdaptiveBFrames ||
         this.AdaptiveDCT != otherSettings.AdaptiveDCT ||
         this.AlphaDeblock != otherSettings.AlphaDeblock ||
         this.NoFastPSkip != otherSettings.NoFastPSkip ||
         this.B8x8mv != otherSettings.B8x8mv ||
         this.BetaDeblock != otherSettings.BetaDeblock ||
         this.BframeBias != otherSettings.BframeBias ||
         this.BframePredictionMode != otherSettings.BframePredictionMode ||
         this.BFramePyramid != otherSettings.BFramePyramid ||
         this.BitrateVariance != otherSettings.BitrateVariance ||
         this.biME != otherSettings.biME ||
         this.BRDO != otherSettings.BRDO ||
         this.Cabac != otherSettings.Cabac ||
         this.ChromaME != otherSettings.ChromaME ||
         this.ChromaQPOffset != otherSettings.ChromaQPOffset ||
         this.CustomEncoderOptions != otherSettings.CustomEncoderOptions ||
         this.Deblock != otherSettings.Deblock ||
         this.EncodingMode != otherSettings.EncodingMode ||
         this.FourCC != otherSettings.FourCC ||
         this.I4x4mv != otherSettings.I4x4mv ||
         this.I8x8mv != otherSettings.I8x8mv ||
         this.IPFactor != otherSettings.IPFactor ||
         this.KeyframeInterval != otherSettings.KeyframeInterval ||
         this.Level != otherSettings.Level ||
         this.Lossless != otherSettings.Lossless ||
         this.MaxQuantDelta != otherSettings.MaxQuantDelta ||
         this.MaxQuantizer != otherSettings.MaxQuantizer ||
         this.MERange != otherSettings.MERange ||
         this.METype != otherSettings.METype ||
         this.MinGOPSize != otherSettings.MinGOPSize ||
         this.MinQuantizer != otherSettings.MinQuantizer ||
         this.MixedRefs != otherSettings.MixedRefs ||
         this.NbBframes != otherSettings.NbBframes ||
         this.NbRefFrames != otherSettings.NbRefFrames ||
         this.noiseReduction != otherSettings.noiseReduction ||
         this.P4x4mv != otherSettings.P4x4mv ||
         this.P8x8mv != otherSettings.P8x8mv ||
         this.PBFactor != otherSettings.PBFactor ||
         this.Profile != otherSettings.Profile ||
         this.QPel != otherSettings.QPel ||
         this.QuantCompression != otherSettings.QuantCompression ||
         this.QuantizerMatrix != otherSettings.QuantizerMatrix ||
         this.QuantizerMatrixType != otherSettings.QuantizerMatrixType ||
         this.SCDSensitivity != otherSettings.SCDSensitivity ||
         this.SubPelRefinement != otherSettings.SubPelRefinement ||
         this.TempComplexityBlur != otherSettings.TempComplexityBlur ||
         this.TempQuanBlurCC != otherSettings.TempQuanBlurCC ||
         this.TempQuantBlur != otherSettings.TempQuantBlur ||
         this.Trellis != otherSettings.Trellis ||
         this.Turbo != otherSettings.Turbo ||
         this.V4MV != otherSettings.V4MV ||
         this.VBVBufferSize != otherSettings.VBVBufferSize ||
         this.VBVInitialBuffer != otherSettings.VBVInitialBuffer ||
         this.VBVMaxBitrate != otherSettings.VBVMaxBitrate ||
         this.WeightedBPrediction != otherSettings.WeightedBPrediction ||
         this.X264Trellis != otherSettings.X264Trellis
         )
     {
         return(true);
     }
     else
     {
         return(false);
     }
 }
コード例 #22
0
ファイル: AVCLevels.cs プロジェクト: RoDaniel/featurehouse
        /// <summary>
        /// Verifies a group of x264Settings against an AVC Level 
        /// </summary>
        /// <param name="settings">the x264Settings to test</param>
        /// <param name="level">the level</param>
        /// <param name="bytesInUncompressedFrame">Number of bytes in an uncompressed frame</param>
        /// <returns>   0 if the settings are compliant with the level
        ///             1 if (level > 3 || level = 3 AND Bframes > 0)
        ///             2 if maxDPB violated
        ///             3 if vbv_maxrate violated
        ///             4 if vbv_bufsize violated</returns>
        public int Verifyx264Settings(x264Settings settings, int level, double bytesInUncompressedFrame)
        {
            if (!this.checkP4x4(level, settings))
                return 1;

            if (!this.checkMaxDPB(level, settings, bytesInUncompressedFrame))
                return 2;

            if (settings.VBVMaxBitrate > this.getMaxBR(level, settings.Profile == 2))
                return 3;

            if (settings.VBVBufferSize > this.getMaxCBP(level, settings.Profile == 2))
                return 4;

            return 0;
        }
コード例 #23
0
ファイル: x264Encoder.cs プロジェクト: huannguyenfit/MeGUI
        public static string genCommandline(string input, string output, Dar?d, int hres, int vres, x264Settings xs)
        {
            StringBuilder sb = new StringBuilder();
            CultureInfo   ci = new CultureInfo("en-us");

            if (xs.EncodingMode == 4 || xs.EncodingMode == 7)
            {
                xs.Turbo = false; // turn off turbo to prevent inconsistent commandline preview
            }
            switch (xs.EncodingMode)
            {
            case 0:     // ABR
                sb.Append("--bitrate " + xs.BitrateQuantizer + " ");
                break;

            case 1:     // CQ
                if (xs.Lossless)
                {
                    sb.Append("--qp 0 ");
                }
                else
                {
                    sb.Append("--qp " + xs.QuantizerCRF.ToString(new CultureInfo("en-us")) + " ");
                }
                break;

            case 2:     // 2 pass first pass
                sb.Append("--pass 1 --bitrate " + xs.BitrateQuantizer + " --stats " + "\"" + xs.Logfile + "\" ");
                break;

            case 3:     // 2 pass second pass
            case 4:     // automated twopass
                sb.Append("--pass 2 --bitrate " + xs.BitrateQuantizer + " --stats " + "\"" + xs.Logfile + "\" ");
                break;

            case 5:     // 3 pass first pass
                sb.Append("--pass 1 --bitrate " + xs.BitrateQuantizer + " --stats " + "\"" + xs.Logfile + "\" ");
                break;

            case 6:     // 3 pass 2nd pass
                sb.Append("--pass 3 --bitrate " + xs.BitrateQuantizer + " --stats " + "\"" + xs.Logfile + "\" ");
                break;

            case 7:     // 3 pass 3rd pass
            case 8:     // automated threepass, show third pass options
                sb.Append("--pass 3 --bitrate " + xs.BitrateQuantizer + " --stats " + "\"" + xs.Logfile + "\" ");
                break;

            case 9:     // constant quality
                sb.Append("--crf " + xs.QuantizerCRF.ToString(new CultureInfo("en-us")) + " ");
                break;
            } // now add the rest of the x264 encoder options

            // AVC Level
            if (xs.Level != 15) // unrestricted (x264.exe default)
            {
                sb.Append("--level " + AVCLevels.getCLILevelNames()[xs.Level] + " ");
            }
            if (xs.KeyframeInterval != 250) // gop size of 250 is default
            {
                sb.Append("--keyint " + xs.KeyframeInterval + " ");
            }
            if (xs.MinGOPSize != 25)
            {
                sb.Append("--min-keyint " + xs.MinGOPSize + " ");
            }
            if (xs.Turbo)
            {
                xs.NbRefFrames         = 1;
                xs.SubPelRefinement    = 0; // Q-Pel 1 iteration
                xs.METype              = 0; // diamond search
                xs.I4x4mv              = false;
                xs.P4x4mv              = false;
                xs.I8x8mv              = false;
                xs.P8x8mv              = false;
                xs.B8x8mv              = false;
                xs.AdaptiveDCT         = false;
                xs.MixedRefs           = false;
                xs.BRDO                = false;
                xs.X264Trellis         = 0; // disable trellis
                xs.noFastPSkip         = false;
                xs.WeightedBPrediction = false;
                xs.biME                = false;
            }
            if (xs.DeadZoneInter != 21)
            {
                sb.Append("--deadzone-inter " + xs.DeadZoneInter + " ");
            }
            if (xs.DeadZoneIntra != 11)
            {
                sb.Append("--deadzone-intra " + xs.DeadZoneIntra + " ");
            }
            if (xs.NbRefFrames != 1) // 1 ref frame is default
            {
                sb.Append("--ref " + xs.NbRefFrames + " ");
            }
            if (xs.MixedRefs)
            {
                sb.Append("--mixed-refs ");
            }
            if (xs.noFastPSkip)
            {
                sb.Append("--no-fast-pskip ");
            }
            if (xs.NbBframes != 0) // 0 is default value, adaptive and pyramid are conditional on b frames being enabled
            {
                sb.Append("--bframes " + xs.NbBframes + " ");
                if (!xs.AdaptiveBFrames)
                {
                    sb.Append("--no-b-adapt ");
                }
                if (xs.NbBframes > 1 && xs.BFramePyramid) // pyramid needs a minimum of 2 b frames
                {
                    sb.Append("--b-pyramid ");
                }
                if (xs.BRDO)
                {
                    sb.Append("--b-rdo ");
                }
                if (xs.biME)
                {
                    sb.Append("--bime ");
                }
                if (xs.WeightedBPrediction)
                {
                    sb.Append("--weightb ");
                }
                if (xs.BframePredictionMode != 1)
                {
                    sb.Append("--direct ");
                    if (xs.BframePredictionMode == 0)
                    {
                        sb.Append("none ");
                    }
                    else if (xs.BframePredictionMode == 2)
                    {
                        sb.Append("temporal ");
                    }
                    else if (xs.BframePredictionMode == 3)
                    {
                        sb.Append("auto ");
                    }
                }
            }
            if (xs.Deblock)                                      // deblocker active, add options
            {
                if (xs.AlphaDeblock != 0 || xs.BetaDeblock != 0) // 0 is default value
                {
                    sb.Append("--filter " + xs.AlphaDeblock + "," + xs.BetaDeblock + " ");
                }
            }
            else // no deblocking
            {
                sb.Append("--nf ");
            }
            if (!xs.Cabac) // no cabac
            {
                sb.Append("--no-cabac ");
            }
            if (xs.SubPelRefinement + 1 != 5) // non default subpel refinement
            {
                int subq = xs.SubPelRefinement + 1;
                sb.Append("--subme " + subq + " ");
            }
            if (!xs.ChromaME)
            {
                sb.Append("--no-chroma-me ");
            }
            if (xs.X264Trellis > 0)
            {
                sb.Append("--trellis " + xs.X264Trellis + " ");
            }
            // now it's time for the macroblock types
            if (xs.P8x8mv || xs.B8x8mv || xs.I4x4mv || xs.I8x8mv || xs.P4x4mv || xs.AdaptiveDCT)
            {
                sb.Append("--partitions ");
                if (xs.I4x4mv && xs.P4x4mv && xs.I8x8mv && xs.P8x8mv && xs.B8x8mv)
                {
                    sb.Append("all ");
                }
                else
                {
                    if (xs.P8x8mv) // default is checked
                    {
                        sb.Append("p8x8,");
                    }
                    if (xs.B8x8mv) // default is checked
                    {
                        sb.Append("b8x8,");
                    }
                    if (xs.I4x4mv) // default is checked
                    {
                        sb.Append("i4x4,");
                    }
                    if (xs.P4x4mv) // default is unchecked
                    {
                        sb.Append("p4x4,");
                    }
                    if (xs.I8x8mv) // default is checked
                    {
                        sb.Append("i8x8");
                    }
                    if (sb.ToString().EndsWith(","))
                    {
                        sb.Remove(sb.Length - 1, 1);
                    }
                }
                if (xs.AdaptiveDCT) // default is unchecked
                {
                    sb.Append(" --8x8dct ");
                }
                if (!sb.ToString().EndsWith(" "))
                {
                    sb.Append(" ");
                }
            }
            else
            {
                sb.Append("--partitions none ");
            }
            if (xs.EncodingMode != 1)      // doesn't apply to CQ mode
            {
                if (xs.MinQuantizer != 10) // default min quantizer is 10
                {
                    sb.Append("--qpmin " + xs.MinQuantizer + " ");
                }
                if (xs.MaxQuantizer != 51) // 51 is the default max quanitzer
                {
                    sb.Append("--qpmax " + xs.MaxQuantizer + " ");
                }
                if (xs.MaxQuantDelta != 4) // 4 is the default value
                {
                    sb.Append("--qpstep " + xs.MaxQuantDelta + " ");
                }
                if (xs.IPFactor != new decimal(1.4)) // 1.4 is the default value
                {
                    sb.Append("--ipratio " + xs.IPFactor.ToString(ci) + " ");
                }
                if (xs.PBFactor != new decimal(1.3)) // 1.3 is the default value here
                {
                    sb.Append("--pbratio " + xs.PBFactor.ToString(ci) + " ");
                }
                if (xs.ChromaQPOffset != new decimal(0.0))
                {
                    sb.Append("--chroma-qp-offset " + xs.ChromaQPOffset.ToString(ci) + " ");
                }
                if (xs.VBVBufferSize > 0)
                {
                    sb.Append("--vbv-bufsize " + xs.VBVBufferSize + " ");
                }
                if (xs.VBVMaxBitrate > 0)
                {
                    sb.Append("--vbv-maxrate " + xs.VBVMaxBitrate + " ");
                }
                if (xs.VBVInitialBuffer != new decimal(0.9))
                {
                    sb.Append("--vbv-init " + xs.VBVInitialBuffer.ToString(ci) + " ");
                }
                if (xs.BitrateVariance != 1)
                {
                    sb.Append("--ratetol " + xs.BitrateVariance.ToString(ci) + " ");
                }
                if (xs.QuantCompression != new decimal(0.6))
                {
                    sb.Append("--qcomp " + xs.QuantCompression.ToString(ci) + " ");
                }
                if (xs.EncodingMode > 1) // applies only to twopass
                {
                    if (xs.TempComplexityBlur != 20)
                    {
                        sb.Append("--cplxblur " + xs.TempComplexityBlur.ToString(ci) + " ");
                    }
                    if (xs.TempQuanBlurCC != new decimal(0.5))
                    {
                        sb.Append("--qblur " + xs.TempQuanBlurCC.ToString(ci) + " ");
                    }
                }
            }
            if (xs.SCDSensitivity != new decimal(40))
            {
                sb.Append("--scenecut " + xs.SCDSensitivity.ToString(ci) + " ");
            }
            if (xs.BframeBias != new decimal(0))
            {
                sb.Append("--b-bias " + xs.BframeBias.ToString(ci) + " ");
            }
            if (xs.METype + 1 != 2)
            {
                sb.Append("--me ");
                if (xs.METype + 1 == 1)
                {
                    sb.Append("dia ");
                }
                if (xs.METype + 1 == 3)
                {
                    sb.Append("umh ");
                }
                if (xs.METype + 1 == 4)
                {
                    sb.Append("esa ");
                }
                if (xs.METype + 1 == 5)
                {
                    sb.Append("tesa ");
                }
            }
            if (xs.MERange != 16)
            {
                sb.Append("--merange " + xs.MERange + " ");
            }
            if (xs.NbThreads > 1)
            {
                sb.Append("--threads " + xs.NbThreads + " ");
            }
            if (xs.NbThreads == 0)
            {
                sb.Append("--threads auto ");
            }
            sb.Append("--thread-input ");
            if (xs.Zones != null && xs.Zones.Length > 0 && xs.CreditsQuantizer >= new decimal(1))
            {
                sb.Append("--zones ");
                foreach (Zone zone in xs.Zones)
                {
                    sb.Append(zone.startFrame + "," + zone.endFrame + ",");
                    if (zone.mode == ZONEMODE.QUANTIZER)
                    {
                        sb.Append("q=");
                        sb.Append(zone.modifier + "/");
                    }
                    if (zone.mode == ZONEMODE.WEIGHT)
                    {
                        sb.Append("b=");
                        double mod = (double)zone.modifier / 100.0;
                        sb.Append(mod.ToString(ci) + "/");
                    }
                }
                sb.Remove(sb.Length - 1, 1);
                sb.Append(" ");
            }
            if (d.HasValue)
            {
                Sar s = d.Value.ToSar(hres, vres);
                sb.Append("--sar " + s.X + ":" + s.Y + " ");
            }
            if (xs.QuantizerMatrixType > 0) // custom matrices enabled
            {
                if (xs.QuantizerMatrixType == 1)
                {
                    sb.Append("--cqm \"jvt\" ");
                }
                if (xs.QuantizerMatrixType == 2)
                {
                    sb.Append("--cqmfile \"" + xs.QuantizerMatrix + "\" ");
                }
            }
            sb.Append("--progress "); // ensure that the progress is shown
            if (xs.NoDCTDecimate)
            {
                sb.Append("--no-dct-decimate ");
            }
            if (!xs.PSNRCalculation)
            {
                sb.Append("--no-psnr ");
            }
            if (!xs.SSIMCalculation)
            {
                sb.Append("--no-ssim ");
            }
            if (xs.EncodeInterlaced)
            {
                sb.Append("--interlaced ");
            }
            if (xs.NoiseReduction > 0)
            {
                sb.Append("--nr " + xs.NoiseReduction + " ");
            }
            //add the rest of the mencoder commandline regarding the output
            if (xs.EncodingMode == 2 || xs.EncodingMode == 5)
            {
                sb.Append("--output NUL ");
            }
            else
            {
                sb.Append("--output " + "\"" + output + "\" ");
            }
            sb.Append("\"" + input + "\" ");
            if (!xs.CustomEncoderOptions.Equals("")) // add custom encoder options
            {
                sb.Append(xs.CustomEncoderOptions);
            }
            return(sb.ToString());
        }
コード例 #24
0
ファイル: OneClickWindow.cs プロジェクト: huannguyenfit/MeGUI
        /// <summary>
        /// opens a dgindex script
        /// if the file can be properly opened, auto-cropping is performed, then depending on the AR settings
        /// the proper resolution for automatic resizing, taking into account the derived cropping values
        /// is calculated, and finally the avisynth script is written and its name returned
        /// </summary>
        /// <param name="path">dgindex script</param>
        /// <param name="aspectRatio">aspect ratio selection to be used</param>
        /// <param name="customDAR">custom display aspect ratio for this source</param>
        /// <param name="horizontalResolution">desired horizontal resolution of the output</param>
        /// <param name="settings">the codec settings (used only for x264)</param>
        /// <param name="sarX">pixel aspect ratio X</param>
        /// <param name="sarY">pixel aspect ratio Y</param>
        /// <param name="height">the final height of the video</param>
        /// <param name="signalAR">whether or not ar signalling is to be used for the output
        /// (depending on this parameter, resizing changes to match the source AR)</param>
        /// <returns>the name of the AviSynth script created, empty of there was an error</returns>
        private string openVideo(string path, Dar?AR, int horizontalResolution,
                                 bool signalAR, LogItem log, AviSynthSettings avsSettings, bool autoDeint,
                                 VideoCodecSettings settings, out Dar?dar)
        {
            dar = null;
            IMediaFile   d2v    = new d2vFile(path);
            IVideoReader reader = d2v.GetVideoReader();

            if (reader.FrameCount < 1)
            {
                log.Error("DGDecode reported 0 frames in this file. This is a fatal error. Please recreate the DGIndex project");
                return("");
            }

            //Autocrop
            CropValues final = Autocrop.autocrop(reader);

            if (signalAR)
            {
                if (avsSettings.Mod16Method == mod16Method.overcrop)
                {
                    ScriptServer.overcrop(ref final);
                }
                else if (avsSettings.Mod16Method == mod16Method.mod4Horizontal)
                {
                    ScriptServer.cropMod4Horizontal(ref final);
                }
                else if (avsSettings.Mod16Method == mod16Method.undercrop)
                {
                    ScriptServer.undercrop(ref final);
                }
            }

            bool error = (final.left == -1);

            if (!error)
            {
                log.LogValue("Autocrop values", final);
            }
            else
            {
                log.Error("Autocrop failed, aborting now");
                return("");
            }

            decimal customDAR;

            log.LogValue("Auto-detect aspect ratio now", AR == null);
            //Check if AR needs to be autodetected now
            if (AR == null) // it does
            {
                customDAR = d2v.Info.DAR.ar;
                if (customDAR > 0)
                {
                    log.LogValue("Aspect ratio", customDAR);
                }
                else
                {
                    customDAR = Dar.ITU16x9PAL.ar;
                    log.Warn(string.Format("No aspect ratio found, defaulting to {0}.", customDAR));
                }
            }
            else
            {
                customDAR = AR.Value.ar;
            }

            // Minimise upsizing
            int sourceHorizontalResolution = (int)d2v.Info.Width - final.right - final.left;

            if (horizontalResolution > sourceHorizontalResolution)
            {
                if (avsSettings.Mod16Method == mod16Method.resize)
                {
                    while (horizontalResolution > sourceHorizontalResolution + 16)
                    {
                        horizontalResolution -= 16;
                    }
                }
                else
                {
                    horizontalResolution = sourceHorizontalResolution;
                }
            }

            //Suggest a resolution (taken from AvisynthWindow.suggestResolution_CheckedChanged)
            int scriptVerticalResolution = Resolution.suggestResolution(d2v.Info.Height, d2v.Info.Width, (double)customDAR,
                                                                        final, horizontalResolution, signalAR, mainForm.Settings.AcceptableAspectErrorPercent, out dar);

            log.LogValue("Output resolution", horizontalResolution + "x" + scriptVerticalResolution);

            if (settings != null && settings is x264Settings) // verify that the video corresponds to the chosen avc level, if not, change the resolution until it does fit
            {
                x264Settings xs = (x264Settings)settings;
                if (xs.Level != 15)
                {
                    AVCLevels al = new AVCLevels();
                    log.LogValue("AVC level", al.getLevels()[xs.Level]);

                    int compliantLevel = 15;
                    while (!this.al.validateAVCLevel(horizontalResolution, scriptVerticalResolution, d2v.Info.FPS, xs, out compliantLevel))
                    { // resolution not profile compliant, reduce horizontal resolution by 16, get the new vertical resolution and try again
                        string levelName = al.getLevels()[xs.Level];
                        horizontalResolution    -= 16;
                        scriptVerticalResolution = Resolution.suggestResolution(d2v.Info.Height, d2v.Info.Width, (double)customDAR,
                                                                                final, horizontalResolution, signalAR, mainForm.Settings.AcceptableAspectErrorPercent, out dar);
                    }
                    log.LogValue("Resolution adjusted for AVC Level", horizontalResolution + "x" + scriptVerticalResolution);
                }
            }

            //Generate the avs script based on the template
            string inputLine        = "#input";
            string deinterlaceLines = "#deinterlace";
            string denoiseLines     = "#denoise";
            string cropLine         = "#crop";
            string resizeLine       = "#resize";

            inputLine = ScriptServer.GetInputLine(path, false, PossibleSources.d2v,
                                                  false, false, false, 0);

            log.LogValue("Automatic deinterlacing", autoDeint);
            if (autoDeint)
            {
                string         d2vPath = path;
                SourceDetector sd      = new SourceDetector(inputLine, d2vPath, false,
                                                            mainForm.Settings.SourceDetectorSettings,
                                                            new UpdateSourceDetectionStatus(analyseUpdate),
                                                            new FinishedAnalysis(finishedAnalysis));
                finished = false;
                sd.analyse();
                waitTillAnalyseFinished();
                deinterlaceLines = filters[0].Script;
                log.LogValue("Deinterlacing used", deinterlaceLines);
            }

            inputLine = ScriptServer.GetInputLine(path, interlaced, PossibleSources.d2v, avsSettings.ColourCorrect, avsSettings.MPEG2Deblock, false, 0);

            cropLine     = ScriptServer.GetCropLine(true, final);
            denoiseLines = ScriptServer.GetDenoiseLines(avsSettings.Denoise, (DenoiseFilterType)avsSettings.DenoiseMethod);
            resizeLine   = ScriptServer.GetResizeLine(!signalAR || avsSettings.Mod16Method == mod16Method.resize, horizontalResolution, scriptVerticalResolution, (ResizeFilterType)avsSettings.ResizeMethod);

            string newScript = ScriptServer.CreateScriptFromTemplate(avsSettings.Template, inputLine, cropLine, resizeLine, denoiseLines, deinterlaceLines);

            if (dar.HasValue)
            {
                newScript = string.Format("global MeGUI_darx = {0}\r\nglobal MeGUI_dary = {1}\r\n{2}", dar.Value.X, dar.Value.Y, newScript);
            }

            log.LogValue("Generated Avisynth script", newScript);
            try
            {
                StreamWriter sw = new StreamWriter(Path.ChangeExtension(path, ".avs"));
                sw.Write(newScript);
                sw.Close();
            }
            catch (IOException i)
            {
                log.LogValue("Error saving AviSynth script", i, ImageType.Error);
                return("");
            }
            return(Path.ChangeExtension(path, ".avs"));
        }
コード例 #25
0
ファイル: AVCLevels.cs プロジェクト: RoDaniel/featurehouse
        /// <summary>
        /// Checks a collection of x264Settings and modifies them if needed to fit within the level constraints.
        /// </summary>
        /// <param name="level">the level to enforce</param>
        /// <param name="inputSettings">the collection of x264Settings to check</param>
        /// <param name="frameSize">the size of the decoded video frame in bytes</param>
        /// <returns>A compliant set of x264Settings</returns>
        public x264Settings EnforceSettings(int level, x264Settings inputSettings, double frameSize, out AVCLevelEnforcementReturn enforcement)
        {
            x264Settings enforcedSettings = (x264Settings) inputSettings.Clone();
            enforcement = new AVCLevelEnforcementReturn();
            enforcement.Altered = false;
            enforcement.EnableP4x4mv = true;
            enforcement.EnableVBVBufferSize = true;
            enforcement.EnableVBVMaxRate = true;
            enforcement.Panic = false;
            enforcement.PanicString = "";

            if (!checkP4x4(level, inputSettings))
            {
                enforcement.Altered = true;
                enforcedSettings.P4x4mv = false;
            }
            if (checkP4x4Enabled(level, inputSettings))
                enforcement.EnableP4x4mv = true;
            else
                enforcement.EnableP4x4mv = false;

            // step through various options to enforce the max decoded picture buffer size
            while (!this.checkMaxDPB(level,enforcedSettings, frameSize))
            {
                if (enforcedSettings.NbRefFrames > 1)
                {
                    enforcement.Altered = true;
                    enforcedSettings.NbRefFrames -= 1; // try reducing the number of reference frames
                }
                else
                {
                    enforcement.Panic = true;
                    enforcement.PanicString = "Can't force settings to conform to level (the frame size is too large)";
                    // reset output settings to original and set level to unrestrained
                    enforcedSettings = (x264Settings)inputSettings.Clone();
                    enforcedSettings.Level = 15;
                    return enforcedSettings;
                }
            }

            // Disallow independent specification of MaxBitrate and MaxBufferSize unless Unrestrained
            if (level < 15)
            {
                enforcement.EnableVBVMaxRate = false;
                enforcedSettings.VBVMaxBitrate = -1;
                enforcement.EnableVBVBufferSize = false;
                enforcedSettings.VBVBufferSize = -1;
            }
            else
            {
                enforcement.EnableVBVMaxRate = true;
                enforcement.EnableVBVBufferSize = true;
            }

            return enforcedSettings;
        }
コード例 #26
0
ファイル: AVCLevels.cs プロジェクト: RoDaniel/featurehouse
 private double pictureBufferSize(x264Settings settings, double bytesInUncompressedFrame)
 {
     double decodedPictureBufferSizeTestValue = 0;
     if (settings != null)
         decodedPictureBufferSizeTestValue = bytesInUncompressedFrame * Math.Min(16, settings.NbRefFrames);
     return decodedPictureBufferSizeTestValue;
 }
コード例 #27
0
ファイル: x264Settings.cs プロジェクト: RoDaniel/featurehouse
 /// <summary>
 ///  Handles assessment of whether the encoding options vary between two x264Settings instances
 /// The following are excluded from the comparison:
 /// BitrateQuantizer
 /// CreditsQuantizer
 /// Logfile
 /// NbThreads
 /// SARX
 /// SARY
 /// Zones
 /// </summary>
 /// <param name="otherSettings"></param>
 /// <returns>true if the settings differ</returns>
 public bool IsAltered(x264Settings otherSettings)
 {
     if (
         this.NewAdaptiveBFrames != otherSettings.NewAdaptiveBFrames ||
         this.AdaptiveDCT != otherSettings.AdaptiveDCT ||
         this.AlphaDeblock != otherSettings.AlphaDeblock ||
         this.NoFastPSkip != otherSettings.NoFastPSkip ||
         this.B8x8mv != otherSettings.B8x8mv ||
         this.BetaDeblock != otherSettings.BetaDeblock ||
         this.BframeBias != otherSettings.BframeBias ||
         this.BframePredictionMode != otherSettings.BframePredictionMode ||
         this.x264BFramePyramid != otherSettings.x264BFramePyramid ||
         this.BitrateVariance != otherSettings.BitrateVariance ||
         this.PsyRDO != otherSettings.PsyRDO ||
         this.PsyTrellis != otherSettings.PsyTrellis ||
         this.Cabac != otherSettings.Cabac ||
         this.ChromaME != otherSettings.ChromaME ||
         this.ChromaQPOffset != otherSettings.ChromaQPOffset ||
         this.CustomEncoderOptions != otherSettings.CustomEncoderOptions ||
         this.Deblock != otherSettings.Deblock ||
         this.EncodingMode != otherSettings.EncodingMode ||
         this.I4x4mv != otherSettings.I4x4mv ||
         this.I8x8mv != otherSettings.I8x8mv ||
         this.IPFactor != otherSettings.IPFactor ||
         this.KeyframeInterval != otherSettings.KeyframeInterval ||
         this.Level != otherSettings.Level ||
         this.MaxQuantDelta != otherSettings.MaxQuantDelta ||
         this.MaxQuantizer != otherSettings.MaxQuantizer ||
         this.MERange != otherSettings.MERange ||
         this.METype != otherSettings.METype ||
         this.MinGOPSize != otherSettings.MinGOPSize ||
         this.MinQuantizer != otherSettings.MinQuantizer ||
         this.NoMixedRefs != otherSettings.NoMixedRefs ||
         this.NbBframes != otherSettings.NbBframes ||
         this.NbRefFrames != otherSettings.NbRefFrames ||
         this.noiseReduction != otherSettings.noiseReduction ||
         this.P4x4mv != otherSettings.P4x4mv ||
         this.P8x8mv != otherSettings.P8x8mv ||
         this.PBFactor != otherSettings.PBFactor ||
         this.Profile != otherSettings.Profile ||
         this.QPel != otherSettings.QPel ||
         this.QuantCompression != otherSettings.QuantCompression ||
         this.QuantizerMatrix != otherSettings.QuantizerMatrix ||
         this.QuantizerMatrixType != otherSettings.QuantizerMatrixType ||
         this.SCDSensitivity != otherSettings.SCDSensitivity ||
         this.SubPelRefinement != otherSettings.SubPelRefinement ||
         this.TempComplexityBlur != otherSettings.TempComplexityBlur ||
         this.TempQuanBlurCC != otherSettings.TempQuanBlurCC ||
         this.TempQuantBlur != otherSettings.TempQuantBlur ||
         this.Trellis != otherSettings.Trellis ||
         this.x264SlowFirstpass != otherSettings.x264SlowFirstpass ||
         this.V4MV != otherSettings.V4MV ||
         this.VBVBufferSize != otherSettings.VBVBufferSize ||
         this.VBVInitialBuffer != otherSettings.VBVInitialBuffer ||
         this.VBVMaxBitrate != otherSettings.VBVMaxBitrate ||
         this.WeightedBPrediction != otherSettings.WeightedBPrediction ||
         this.WeightedPPrediction != otherSettings.WeightedPPrediction ||
         this.X264Trellis != otherSettings.X264Trellis ||
         this.AQmode != otherSettings.AQmode ||
         this.AQstrength != otherSettings.AQstrength ||
         this.UseQPFile != otherSettings.UseQPFile ||
         this.fullRange != otherSettings.fullRange ||
         this.MacroBlockOptions != otherSettings.MacroBlockOptions ||
         this.x264PresetLevel != otherSettings.x264PresetLevel ||
         this.x264Tuning != otherSettings.x264Tuning ||
         this.x264AdvancedSettings != otherSettings.x264AdvancedSettings ||
         this.Lookahead != otherSettings.Lookahead ||
         this.NoMBTree != otherSettings.NoMBTree ||
         this.ThreadInput != otherSettings.ThreadInput ||
         this.NoPsy != otherSettings.NoPsy ||
         this.Scenecut != otherSettings.Scenecut ||
         this.SlicesNb != otherSettings.SlicesNb ||
         this.X264Nalhrd != otherSettings.X264Nalhrd ||
         this.X264Aud != otherSettings.X264Aud ||
         this.MaxSliceSyzeBytes != otherSettings.MaxSliceSyzeBytes ||
         this.MaxSliceSyzeMBs != otherSettings.MaxSliceSyzeMBs
         )
         return true;
     else
         return false;
 }
コード例 #28
0
ファイル: JobUtil.cs プロジェクト: RoDaniel/featurehouse
 /// <summary>
 /// validates a source against a given AVC level taking into account the rest of the configuration
 /// </summary>
 /// <param name="source">the source to be validated</param>
 /// <param name="level">the level that this source should correspond to</param>
 /// <param name="bframeType">type of b-frames used. 0 = none, 1 = b-frames without pyramid, 
 /// 2 = b-frames with pyramid order</param>
 /// <param name="nbReferences">the number of reference frames used</param>
 /// <param name="compliantLevel">the first avc level that can be used to encode this source</param>
 /// <returns>whether or not the current level is okay, if false and compliantLevel is -1, 
 /// the source could not be read</returns>
 public bool validateAVCLevel(string source, x264Settings settings, out int compliantLevel)
 {
     int hRes, vRes;
     Dar d;
     ulong nbFrames;
     double framerate;
     compliantLevel = -1;
     if (getAllInputProperties(out nbFrames, out framerate, out hRes, out vRes, out d, source))
     {
         return this.al.validateAVCLevel(hRes, vRes, framerate, settings, out compliantLevel);
     }
     else
         return false;
 }
コード例 #29
0
 /// <summary>
 ///  Handles assessment of whether the encoding options vary between two x264Settings instances
 /// The following are excluded from the comparison:
 /// BitrateQuantizer
 /// CreditsQuantizer
 /// Logfile
 /// NbThreads
 /// SARX
 /// SARY
 /// Zones
 /// </summary>
 /// <param name="otherSettings"></param>
 /// <returns>true if the settings differ</returns>
 public bool IsAltered(x264Settings otherSettings)
 {
     if (
         this.NewAdaptiveBFrames != otherSettings.NewAdaptiveBFrames ||
         this.AdaptiveDCT != otherSettings.AdaptiveDCT ||
         this.AlphaDeblock != otherSettings.AlphaDeblock ||
         this.NoFastPSkip != otherSettings.NoFastPSkip ||
         this.B8x8mv != otherSettings.B8x8mv ||
         this.BetaDeblock != otherSettings.BetaDeblock ||
         this.BframeBias != otherSettings.BframeBias ||
         this.BframePredictionMode != otherSettings.BframePredictionMode ||
         this.x264BFramePyramid != otherSettings.x264BFramePyramid ||
         this.x264GOPCalculation != otherSettings.x264GOPCalculation ||
         this.BitrateVariance != otherSettings.BitrateVariance ||
         this.PsyRDO != otherSettings.PsyRDO ||
         this.PsyTrellis != otherSettings.PsyTrellis ||
         this.Cabac != otherSettings.Cabac ||
         this.ChromaME != otherSettings.ChromaME ||
         this.ChromaQPOffset != otherSettings.ChromaQPOffset ||
         this.CustomEncoderOptions != otherSettings.CustomEncoderOptions ||
         this.Deblock != otherSettings.Deblock ||
         this.EncodingMode != otherSettings.EncodingMode ||
         this.I4x4mv != otherSettings.I4x4mv ||
         this.I8x8mv != otherSettings.I8x8mv ||
         this.IPFactor != otherSettings.IPFactor ||
         this.KeyframeInterval != otherSettings.KeyframeInterval ||
         this.AVCLevel != otherSettings.AVCLevel ||
         this.MaxQuantDelta != otherSettings.MaxQuantDelta ||
         this.MaxQuantizer != otherSettings.MaxQuantizer ||
         this.MERange != otherSettings.MERange ||
         this.METype != otherSettings.METype ||
         this.MinGOPSize != otherSettings.MinGOPSize ||
         this.MinQuantizer != otherSettings.MinQuantizer ||
         this.NoMixedRefs != otherSettings.NoMixedRefs ||
         this.NbBframes != otherSettings.NbBframes ||
         this.NbRefFrames != otherSettings.NbRefFrames ||
         this.noiseReduction != otherSettings.noiseReduction ||
         this.P4x4mv != otherSettings.P4x4mv ||
         this.P8x8mv != otherSettings.P8x8mv ||
         this.PBFactor != otherSettings.PBFactor ||
         this.Profile != otherSettings.Profile ||
         this.QPel != otherSettings.QPel ||
         this.QuantCompression != otherSettings.QuantCompression ||
         this.QuantizerMatrix != otherSettings.QuantizerMatrix ||
         this.QuantizerMatrixType != otherSettings.QuantizerMatrixType ||
         this.SCDSensitivity != otherSettings.SCDSensitivity ||
         this.SubPelRefinement != otherSettings.SubPelRefinement ||
         this.TempComplexityBlur != otherSettings.TempComplexityBlur ||
         this.TempQuanBlurCC != otherSettings.TempQuanBlurCC ||
         this.TempQuantBlur != otherSettings.TempQuantBlur ||
         this.Trellis != otherSettings.Trellis ||
         this.x264SlowFirstpass != otherSettings.x264SlowFirstpass ||
         this.V4MV != otherSettings.V4MV ||
         this.VBVBufferSize != otherSettings.VBVBufferSize ||
         this.VBVInitialBuffer != otherSettings.VBVInitialBuffer ||
         this.VBVMaxBitrate != otherSettings.VBVMaxBitrate ||
         this.WeightedBPrediction != otherSettings.WeightedBPrediction ||
         this.WeightedPPrediction != otherSettings.WeightedPPrediction ||
         this.X264Trellis != otherSettings.X264Trellis ||
         this.AQmode != otherSettings.AQmode ||
         this.AQstrength != otherSettings.AQstrength ||
         this.UseQPFile != otherSettings.UseQPFile ||
         this.QPFile != otherSettings.QPFile ||
         this.FullRange != otherSettings.FullRange ||
         this.Range != otherSettings.Range ||
         this.MacroBlockOptions != otherSettings.MacroBlockOptions ||
         this.x264PresetLevel != otherSettings.x264PresetLevel ||
         this.x264PsyTuning != otherSettings.x264PsyTuning ||
         this.x264AdvancedSettings != otherSettings.x264AdvancedSettings ||
         this.Lookahead != otherSettings.Lookahead ||
         this.NoMBTree != otherSettings.NoMBTree ||
         this.ThreadInput != otherSettings.ThreadInput ||
         this.NoPsy != otherSettings.NoPsy ||
         this.Scenecut != otherSettings.Scenecut ||
         this.SlicesNb != otherSettings.SlicesNb ||
         this.Nalhrd != otherSettings.Nalhrd ||
         this.X264Aud != otherSettings.X264Aud ||
         this.OpenGop != otherSettings.OpenGop ||
         this.X264PullDown != otherSettings.X264PullDown ||
         this.SampleAR != otherSettings.SampleAR ||
         this.ColorMatrix != otherSettings.ColorMatrix ||
         this.Transfer != otherSettings.Transfer ||
         this.ColorPrim != otherSettings.ColorPrim ||
         this.PicStruct != otherSettings.PicStruct ||
         this.FakeInterlaced != otherSettings.FakeInterlaced ||
         this.NonDeterministic != otherSettings.NonDeterministic ||
         this.MaxSliceSyzeBytes != otherSettings.MaxSliceSyzeBytes ||
         this.InterlacedMode != otherSettings.InterlacedMode ||
         this.TargetDevice.ID != otherSettings.TargetDevice.ID ||
         this.BlurayCompat != otherSettings.BlurayCompat ||
         this.MaxSliceSyzeMBs != otherSettings.MaxSliceSyzeMBs ||
         this.tuneFastDecode != otherSettings.tuneFastDecode ||
         this.tuneZeroLatency != otherSettings.tuneZeroLatency ||
         this.X26410Bits != otherSettings.X26410Bits
         )
     {
         return(true);
     }
     else
     {
         return(false);
     }
 }
コード例 #30
0
ファイル: AVCLevels.cs プロジェクト: RoDaniel/featurehouse
 private bool checkMaxDPB(int level, x264Settings settings, double bytesInUncompressedFrame)
 {
     if (pictureBufferSize(settings, bytesInUncompressedFrame) > this.getMaxDPB(level))
         return false;
     else
         return true;
 }
コード例 #31
0
ファイル: AVCLevels.cs プロジェクト: RoDaniel/featurehouse
        /// <summary>
        /// validates a source against a given AVC level taking into account the source properties and the x264 settings
        /// <param name="bytesPerFrame">bytesize of a single frame</param>
        /// <param name="FS">frame area in pixels</param>
        /// <param name="MBPS">macroblocks per second</param>
        /// <param name="settings">the codec config to test</param>
        /// <param name="compliantLevel">the first avc level that can be used to encode this source</param>
        /// <returns>whether or not the current level is okay, if false and compliantLevel is -1, 
        /// the source could not be read</returns>
        public bool validateAVCLevel( int hRes, int vRes, double framerate, x264Settings settings, out int compliantLevel)
        {
            settings = (x264Settings)settings.Clone(); //Otherwise this sets it to the lowest compliant level anyway.
            const int unrestricted = 15; // maybe this should be set as a global constant
            compliantLevel = unrestricted;
            if (settings.Level == unrestricted) // 15 = unrestricted
                return true;

            int FrameSize = (int)maxFS(hRes, vRes);
            int MBPS = maxBPS(hRes, vRes, framerate);
            int hBlocks = macroblocks(hRes);
            int vBlocks = macroblocks(vRes);
            double bufferSize = pictureBufferSize(settings, bytesPerFrame(hRes, vRes));
            int allowableBPS = this.getMaxMBPS(settings.Level);
            int allowableFS = this.getMaxFS(settings.Level);
            double dimensionRestriction = Math.Ceiling(Math.Sqrt((double)(allowableFS)*8));
            double allowableDPB = this.getMaxDPB(settings.Level);

            if (allowableBPS >= MBPS && allowableFS >= FrameSize && allowableDPB >= bufferSize
                && dimensionRestriction >= hBlocks && dimensionRestriction >= vBlocks)
                return true;
            else
            {
                while (settings.Level < unrestricted && (allowableBPS < MBPS || allowableFS < FrameSize ||
                    allowableDPB < bufferSize || dimensionRestriction < hBlocks || dimensionRestriction < vBlocks))
                {
                    settings.Level = settings.Level + 1;
                    allowableBPS = this.getMaxMBPS(settings.Level);
                    allowableFS = this.getMaxFS(settings.Level);
                    dimensionRestriction = Math.Ceiling(Math.Sqrt((double)(allowableFS)*8));
                    allowableDPB = this.getMaxDPB(settings.Level);
                }
                compliantLevel = settings.Level;
                return false;
            }
        }
コード例 #32
0
ファイル: AVCLevels.cs プロジェクト: huannguyenfit/MeGUI
        /// <summary>
        /// Checks a collection of x264Settings and modifies them if needed to fit within the level constraints.
        /// </summary>
        /// <param name="level">the level to enforce</param>
        /// <param name="inputSettings">the collection of x264Settings to check</param>
        /// <param name="frameSize">the size of the decoded video frame in bytes</param>
        /// <returns>A compliant set of x264Settings</returns>
        public x264Settings EnforceSettings(int level, x264Settings inputSettings, double frameSize, out AVCLevelEnforcementReturn enforcement)
        {
            x264Settings enforcedSettings = (x264Settings)inputSettings.Clone();

            enforcement                     = new AVCLevelEnforcementReturn();
            enforcement.Altered             = false;
            enforcement.EnableP4x4mv        = true;
            enforcement.EnableVBVBufferSize = true;
            enforcement.EnableVBVMaxRate    = true;
            enforcement.Panic               = false;
            enforcement.PanicString         = "";

            if (!checkP4x4(level, inputSettings))
            {
                enforcement.Altered     = true;
                enforcedSettings.P4x4mv = false;
            }
            if (checkP4x4Enabled(level, inputSettings))
            {
                enforcement.EnableP4x4mv = true;
            }
            else
            {
                enforcement.EnableP4x4mv = false;
            }

            // step through various options to enforce the max decoded picture buffer size
            while (!this.checkMaxDPB(level, enforcedSettings, frameSize))
            {
                if (enforcedSettings.BFramePyramid)
                {
                    enforcement.Altered            = true;
                    enforcedSettings.BFramePyramid = false; // try turning off pyramid first
                }
                else
                if (enforcedSettings.NbRefFrames > 1)
                {
                    enforcement.Altered           = true;
                    enforcedSettings.NbRefFrames -= 1;     // try reducing the number of reference frames
                }
                else
                if (enforcedSettings.NbBframes > 0)
                {
                    enforcement.Altered        = true;
                    enforcedSettings.NbBframes = 0;         // try turning off B frames
                }
                else
                {
                    enforcement.Panic       = true;
                    enforcement.PanicString = "Can't force settings to conform to level (the frame size is too large)";
                    // reset output settings to original and set level to unrestrained
                    enforcedSettings       = (x264Settings)inputSettings.Clone();
                    enforcedSettings.Level = 15;
                    return(enforcedSettings);
                }
            }

            // Disallow independent specification of MaxBitrate and MaxBufferSize unless Unrestrained
            if (level < 15)
            {
                enforcement.EnableVBVMaxRate    = false;
                enforcedSettings.VBVMaxBitrate  = -1;
                enforcement.EnableVBVBufferSize = false;
                enforcedSettings.VBVBufferSize  = -1;
            }
            else
            {
                enforcement.EnableVBVMaxRate    = true;
                enforcement.EnableVBVBufferSize = true;
            }

            return(enforcedSettings);
        }
コード例 #33
0
ファイル: AVCLevels.cs プロジェクト: RoDaniel/featurehouse
 private bool checkP4x4(int level, x264Settings settings)
 {
     if (!checkP4x4Enabled(level, settings))
         if (settings.P4x4mv)
             return false;
     return true;
 }
コード例 #34
0
        /// <summary>
        /// creates the AVS Script file
        /// if the file can be properly opened, auto-cropping is performed, then depending on the AR settings
        /// the proper resolution for automatic resizing, taking into account the derived cropping values
        /// is calculated, and finally the avisynth script is written and its name returned
        /// </summary>
        /// <param name="path">dgindex script</param>
        /// <param name="aspectRatio">aspect ratio selection to be used</param>
        /// <param name="customDAR">custom display aspect ratio for this source</param>
        /// <param name="desiredOutputWidth">desired horizontal resolution of the output</param>
        /// <param name="settings">the codec settings (used only for x264)</param>
        /// <param name="sarX">pixel aspect ratio X</param>
        /// <param name="sarY">pixel aspect ratio Y</param>
        /// <param name="height">the final height of the video</param>
        /// <param name="signalAR">whether or not ar signalling is to be used for the output
        /// (depending on this parameter, resizing changes to match the source AR)</param>
        /// <param name="autoCrop">whether or not autoCrop is used for the input</param>
        /// <returns>the name of the AviSynth script created, empty if there was an error</returns>
        private string createAVSFile(string indexFile, string inputFile, Dar?AR, int desiredOutputWidth,
                                     bool signalAR, LogItem _log, AviSynthSettings avsSettings, bool autoDeint,
                                     VideoCodecSettings settings, out Dar?dar, bool autoCrop, bool keepInputResolution, bool useChaptersMarks)
        {
            dar = null;
            Dar             customDAR;
            IMediaFile      iMediaFile = null;
            IVideoReader    reader;
            PossibleSources oPossibleSource;
            x264Device      xTargetDevice = null;

            int outputWidthIncludingPadding  = 0;
            int outputHeightIncludingPadding = 0;
            int outputWidthCropped           = 0;
            int outputHeightCropped          = 0;

            CropValues cropValues        = new CropValues();
            bool       bAdjustResolution = false;
            bool       bCropped          = false;

            // open index file to retrieve information
            if (job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.DGI)
            {
                iMediaFile      = new dgiFile(indexFile);
                oPossibleSource = PossibleSources.dgi;
            }
            else if (job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.D2V)
            {
                iMediaFile      = new d2vFile(indexFile);
                oPossibleSource = PossibleSources.d2v;
            }
            else if (job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.DGA)
            {
                iMediaFile      = new dgaFile(indexFile);
                oPossibleSource = PossibleSources.dga;
            }
            else if (job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.FFMS)
            {
                iMediaFile      = new ffmsFile(inputFile, indexFile);
                oPossibleSource = PossibleSources.ffindex;
            }
            else if (job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.AVISOURCE)
            {
                string tempAvs = "AVISource(\"" + inputFile + "\", audio=false)" + VideoUtil.getAssumeFPS(0, inputFile);
                iMediaFile      = AvsFile.ParseScript(tempAvs);
                oPossibleSource = PossibleSources.directShow;
            }
            else
            {
                iMediaFile      = AvsFile.OpenScriptFile(inputFile);
                oPossibleSource = PossibleSources.avs;
            }
            reader = iMediaFile.GetVideoReader();

            // abort if the index file is invalid
            if (reader.FrameCount < 1)
            {
                _log.Error("There are 0 frames in the index file. Aborting...");
                return("");
            }

            if (AR == null)
            {
                // AR needs to be detected automatically now
                _log.LogValue("Auto-detect aspect ratio", AR == null);
                customDAR = iMediaFile.VideoInfo.DAR;
                if (customDAR.ar <= 0)
                {
                    customDAR = Dar.ITU16x9PAL;
                    _log.Warn(string.Format("No aspect ratio found, defaulting to {0}.", customDAR));
                }
            }
            else
            {
                customDAR = AR.Value;
            }
            _log.LogValue("Aspect ratio", customDAR);

            // check x264 settings (target device, chapter file)
            if (settings != null && settings is x264Settings)
            {
                x264Settings xs = (x264Settings)settings;
                xTargetDevice = xs.TargetDevice;

                // create qpf file if necessary
                if (!String.IsNullOrEmpty(job.PostprocessingProperties.ChapterFile) && useChaptersMarks)
                {
                    qpfile = job.PostprocessingProperties.ChapterFile;
                    if ((Path.GetExtension(qpfile).ToLower(System.Globalization.CultureInfo.InvariantCulture)) == ".txt")
                    {
                        qpfile = VideoUtil.convertChaptersTextFileTox264QPFile(job.PostprocessingProperties.ChapterFile, iMediaFile.VideoInfo.FPS);
                    }
                    if (File.Exists(qpfile))
                    {
                        xs.UseQPFile = true;
                        xs.QPFile    = qpfile;
                    }
                }
            }

            // if encoding for a specific device select the appropriate resolution setting
            if (xTargetDevice != null && xTargetDevice.Width > 0 && xTargetDevice.Height > 0)
            {
                if (keepInputResolution)
                {
                    // resolution should not be changed - use input resolution
                    outputWidthCropped  = (int)iMediaFile.VideoInfo.Width;
                    outputHeightCropped = (int)iMediaFile.VideoInfo.Height;
                }
                else
                {
                    // crop input video if selected
                    if (autoCrop)
                    {
                        if (Autocrop.autocrop(out cropValues, reader, signalAR, avsSettings.Mod16Method) == false)
                        {
                            _log.Error("Autocrop failed. Aborting...");
                            return("");
                        }
                        bCropped = true;
                    }

                    outputWidthCropped  = desiredOutputWidth;
                    outputHeightCropped = Resolution.suggestResolution(iMediaFile.VideoInfo.Height, iMediaFile.VideoInfo.Width,
                                                                       (double)customDAR.ar, cropValues, outputWidthCropped, signalAR,
                                                                       mainForm.Settings.AcceptableAspectErrorPercent, out dar);
                    dar = null;
                }

                if (xTargetDevice.Width < outputWidthCropped)
                {
                    // width must be lowered to be target conform
                    bAdjustResolution = true;
                    if (keepInputResolution)
                    {
                        keepInputResolution = false;
                        _log.LogEvent("Disabling \"Keep Input Resolution\" as " + xTargetDevice.Name + " does not support a resolution width of "
                                      + outputWidthCropped + ". The maximum value is " + xTargetDevice.Width + ".");
                    }
                }
                else if (xTargetDevice.Height < outputHeightCropped)
                {
                    // height must be lowered to be target conform
                    bAdjustResolution = true;
                    if (keepInputResolution)
                    {
                        keepInputResolution = false;
                        _log.LogEvent("Disabling \"Keep Input Resolution\" as " + xTargetDevice.Name + " does not support a resolution height of "
                                      + outputHeightCropped + ". The maximum value is " + xTargetDevice.Height + ".");
                    }
                }
                else if (xTargetDevice.BluRay)
                {
                    string strResolution = outputWidthCropped + "x" + outputHeightCropped;
                    if (!strResolution.Equals("1920x1080") &&
                        !strResolution.Equals("1440x1080") &&
                        !strResolution.Equals("1280x720") &&
                        !strResolution.Equals("720x576") &&
                        !strResolution.Equals("720x480"))
                    {
                        bAdjustResolution = true;
                        if (keepInputResolution)
                        {
                            keepInputResolution = false;
                            _log.LogEvent("Disabling \"Keep Input Resolution\" as " + xTargetDevice.Name + " does not support a resolution of "
                                          + outputWidthCropped + "x" + outputHeightCropped
                                          + ". Supported are 1920x1080, 1440x1080, 1280x720, 720x576 and 720x480.");
                        }
                    }
                    else
                    {
                        outputWidthIncludingPadding  = outputWidthCropped;
                        outputHeightIncludingPadding = outputHeightCropped;
                    }
                }

                if (bAdjustResolution)
                {
                    if (!autoCrop)
                    {
                        autoCrop = true;
                        _log.LogEvent("Enabling \"AutoCrop\"");
                    }
                }
            }
            else
            {
                outputWidthCropped = desiredOutputWidth;
            }

            if (!keepInputResolution && autoCrop && !bCropped)
            {
                // crop input video if required
                if (Autocrop.autocrop(out cropValues, reader, signalAR, avsSettings.Mod16Method) == false)
                {
                    _log.Error("Autocrop failed. Aborting...");
                    return("");
                }
                bCropped = true;
            }

            if (bAdjustResolution)
            {
                // adjust horizontal resolution as width or height are too large
                if (xTargetDevice.BluRay)
                {
                    if (outputWidthCropped >= 1920)
                    {
                        outputWidthCropped           = 1920;
                        outputHeightIncludingPadding = 1080;
                        _log.LogEvent("Force resolution of 1920x1080 as required for " + xTargetDevice.Name);
                    }
                    else if (outputWidthCropped >= 1280)
                    {
                        outputWidthCropped           = 1280;
                        outputHeightIncludingPadding = 720;
                        _log.LogEvent("Force resolution of 1280x720 as required for " + xTargetDevice.Name);
                    }
                    else
                    {
                        outputWidthCropped = 720;
                        Double dfps = Convert.ToDouble(iMediaFile.VideoInfo.FPS_N) / iMediaFile.VideoInfo.FPS_D;
                        if (dfps == 25)
                        {
                            outputHeightIncludingPadding = 576;
                            _log.LogEvent("Force resolution of 720x576 as required for " + xTargetDevice.Name);
                        }
                        else
                        {
                            outputHeightIncludingPadding = 480;
                            _log.LogEvent("Force resolution of 720x480 as required for " + xTargetDevice.Name);
                        }
                    }
                    outputWidthIncludingPadding = outputWidthCropped;
                }
                else if (outputWidthCropped > xTargetDevice.Width)
                {
                    outputWidthCropped = xTargetDevice.Width;
                    _log.LogEvent("Set resolution width to " + outputWidthCropped + " as required for " + xTargetDevice.Name);
                }

                // adjust cropped vertical resolution
                outputHeightCropped = Resolution.suggestResolution(iMediaFile.VideoInfo.Height, iMediaFile.VideoInfo.Width, (double)customDAR.ar,
                                                                   cropValues, outputWidthCropped, signalAR, mainForm.Settings.AcceptableAspectErrorPercent, out dar);
                while (outputHeightCropped > xTargetDevice.Height || (xTargetDevice.BluRay && outputHeightCropped > outputHeightIncludingPadding))
                {
                    outputWidthCropped -= 16;
                    outputHeightCropped = Resolution.suggestResolution(iMediaFile.VideoInfo.Height, iMediaFile.VideoInfo.Width, (double)customDAR.ar,
                                                                       cropValues, outputWidthCropped, signalAR, mainForm.Settings.AcceptableAspectErrorPercent, out dar);
                }
            }

            if (keepInputResolution)
            {
                outputWidthCropped  = outputWidthIncludingPadding = (int)iMediaFile.VideoInfo.Width;
                outputHeightCropped = outputHeightIncludingPadding = (int)iMediaFile.VideoInfo.Height;
                dar = customDAR;
            }
            else if (xTargetDevice == null || (xTargetDevice != null && !xTargetDevice.BluRay))
            {
                // Minimise upsizing
                int sourceHorizontalResolution = (int)iMediaFile.VideoInfo.Width - cropValues.right - cropValues.left;
                if (autoCrop)
                {
                    sourceHorizontalResolution = (int)iMediaFile.VideoInfo.Width;
                }

                if (outputWidthCropped > sourceHorizontalResolution)
                {
                    if (avsSettings.Mod16Method == mod16Method.resize)
                    {
                        while (outputWidthCropped > sourceHorizontalResolution + 16)
                        {
                            outputWidthCropped -= 16;
                        }
                    }
                    else
                    {
                        outputWidthCropped = sourceHorizontalResolution;
                    }
                }
            }

            // calculate height
            if (!keepInputResolution)
            {
                outputHeightCropped = Resolution.suggestResolution(iMediaFile.VideoInfo.Height, iMediaFile.VideoInfo.Width, (double)customDAR.ar,
                                                                   cropValues, outputWidthCropped, signalAR, mainForm.Settings.AcceptableAspectErrorPercent, out dar);
            }

            // set complete padding if required
            if (outputHeightIncludingPadding == 0 && outputWidthIncludingPadding > 0)
            {
                outputHeightIncludingPadding = outputHeightCropped;
            }
            if (outputWidthIncludingPadding == 0 && outputHeightIncludingPadding > 0)
            {
                outputWidthIncludingPadding = outputWidthCropped;
            }

            // write calculated output resolution into the log
            _log.LogValue("Input resolution", iMediaFile.VideoInfo.Width + "x" + iMediaFile.VideoInfo.Height);
            if (autoCrop && !keepInputResolution && cropValues.isCropped())
            {
                _log.LogValue("Autocrop values", cropValues);
                _log.LogValue("Cropped output resolution", outputWidthCropped + "x" + outputHeightCropped);
            }
            else
            {
                _log.LogValue("Output resolution", outputWidthCropped + "x" + outputHeightCropped);
            }
            if (outputWidthIncludingPadding > 0 && (outputWidthIncludingPadding != outputWidthCropped || outputHeightIncludingPadding != outputHeightCropped))
            {
                _log.LogValue("Padded output resolution", outputWidthIncludingPadding + "x" + outputHeightIncludingPadding);
            }

            if (outputWidthCropped <= 0 || outputHeightCropped <= 0)
            {
                _log.Error("Error in detection of output resolution");
                return("");
            }

            //Generate the avs script based on the template
            string inputLine        = "#input";
            string deinterlaceLines = "#deinterlace";
            string denoiseLines     = "#denoise";
            string cropLine         = "#crop";
            string resizeLine       = "#resize";

            inputLine = ScriptServer.GetInputLine(inputFile, indexFile, false, oPossibleSource, false, false, false, 0, avsSettings.DSS2);
            if (!inputLine.EndsWith(")"))
            {
                inputLine += ")";
            }

            _log.LogValue("Automatic deinterlacing", autoDeint);
            if (autoDeint)
            {
                raiseEvent("Automatic deinterlacing...   ***PLEASE WAIT***");
                string         d2vPath = indexFile;
                SourceDetector sd      = new SourceDetector(inputLine, d2vPath, false,
                                                            mainForm.Settings.SourceDetectorSettings,
                                                            new UpdateSourceDetectionStatus(analyseUpdate),
                                                            new FinishedAnalysis(finishedAnalysis));
                finished = false;
                sd.analyse();
                waitTillAnalyseFinished();
                sd.stop();
                deinterlaceLines = filters[0].Script;
                if (interlaced)
                {
                    _log.LogValue("Deinterlacing used", deinterlaceLines, ImageType.Warning);
                }
                else
                {
                    _log.LogValue("Deinterlacing used", deinterlaceLines);
                }
            }

            raiseEvent("Finalizing preprocessing...   ***PLEASE WAIT***");
            inputLine = ScriptServer.GetInputLine(inputFile, indexFile, interlaced, oPossibleSource, avsSettings.ColourCorrect, avsSettings.MPEG2Deblock, false, 0, avsSettings.DSS2);
            if (!inputLine.EndsWith(")"))
            {
                inputLine += ")";
            }

            if (!keepInputResolution && autoCrop)
            {
                cropLine = ScriptServer.GetCropLine(true, cropValues);
            }

            denoiseLines = ScriptServer.GetDenoiseLines(avsSettings.Denoise, (DenoiseFilterType)avsSettings.DenoiseMethod);

            if (!keepInputResolution)
            {
                resizeLine = ScriptServer.GetResizeLine(!signalAR || avsSettings.Mod16Method == mod16Method.resize || outputWidthIncludingPadding > 0 || (int)iMediaFile.VideoInfo.Width != outputWidthCropped,
                                                        outputWidthCropped, outputHeightCropped, outputWidthIncludingPadding, outputHeightIncludingPadding, (ResizeFilterType)avsSettings.ResizeMethod,
                                                        autoCrop, cropValues, (int)iMediaFile.VideoInfo.Width, (int)iMediaFile.VideoInfo.Height);
            }

            string newScript = ScriptServer.CreateScriptFromTemplate(avsSettings.Template, inputLine, cropLine, resizeLine, denoiseLines, deinterlaceLines);

            if (dar.HasValue)
            {
                newScript = string.Format("global MeGUI_darx = {0}\r\nglobal MeGUI_dary = {1}\r\n{2}", dar.Value.X, dar.Value.Y, newScript);
            }
            else
            {
                if (xTargetDevice != null && xTargetDevice.BluRay)
                {
                    string       strResolution = outputWidthIncludingPadding + "x" + outputHeightIncludingPadding;
                    x264Settings _xs           = (x264Settings)settings;

                    if (strResolution.Equals("720x480"))
                    {
                        _xs.SampleAR = 4;
                        _log.LogEvent("Set --sar to 10:11 as only 40:33 or 10:11 are supported with a resolution of " +
                                      strResolution + " as required for " + xTargetDevice.Name + ".");
                    }
                    else if (strResolution.Equals("720x576"))
                    {
                        _xs.SampleAR = 5;
                        _log.LogEvent("Set --sar to 12:11 as only 16:11 or 12:11 are supported with a resolution of "
                                      + strResolution + " as required for " + xTargetDevice.Name + ".");
                    }
                    else if (strResolution.Equals("1280x720") || strResolution.Equals("1920x1080"))
                    {
                        _xs.SampleAR = 1;
                        _log.LogEvent("Set --sar to 1:1 as only 1:1 is supported with a resolution of "
                                      + strResolution + " as required for " + xTargetDevice.Name + ".");
                    }
                    else if (strResolution.Equals("1440x1080"))
                    {
                        _xs.SampleAR = 2;
                        _log.LogEvent("Set --sar to 4:3 as only 4:3 is supported with a resolution of "
                                      + strResolution + " as required for " + xTargetDevice.Name + ".");
                    }
                }
            }

            _log.LogValue("Generated Avisynth script", newScript);
            string strOutputAVSFile;

            if (String.IsNullOrEmpty(indexFile))
            {
                strOutputAVSFile = Path.ChangeExtension(Path.Combine(job.PostprocessingProperties.WorkingDirectory, Path.GetFileName(inputFile)), ".avs");
            }
            else
            {
                strOutputAVSFile = Path.ChangeExtension(indexFile, ".avs");
            }

            try
            {
                StreamWriter sw = new StreamWriter(strOutputAVSFile, false, System.Text.Encoding.Default);
                sw.Write(newScript);
                sw.Close();
            }
            catch (IOException i)
            {
                _log.LogValue("Error saving AviSynth script", i, ImageType.Error);
                return("");
            }
            return(strOutputAVSFile);
        }
コード例 #35
0
ファイル: AVCLevels.cs プロジェクト: RoDaniel/featurehouse
 /// <summary>
 /// Check functions to verify elements of the level
 /// </summary>
 /// <param name="level"></param>
 /// <param name="settings"></param>
 /// <returns>true if the settings are compliant with the level</returns>
 private bool checkP4x4Enabled(int level, x264Settings settings)
 {
     //if (level != 15 && (level > 7 || (level == 7 && settings.NbBframes != 0)))
     //    return false;
     //else
         return true;
 }