/// <summary> /// Handles assessment of whether the encoding options vary between two xvidSettings instances /// The following are excluded from the comparison: /// BitrateQuantizer /// CreditsQuantizer /// Logfile /// PAR /// PARs /// SARX /// SARY /// Zones /// </summary> /// <param name="otherSettings"></param> /// <returns>true if the settings differ</returns> public bool IsAltered(VideoCodecSettings settings) { if (!(settings is xvidSettings)) { return(true); } xvidSettings otherSettings = (xvidSettings)settings; if ( this.AveragingPeriod != otherSettings.AveragingPeriod || this.BframeThreshold != otherSettings.BframeThreshold || this.BQuantOffset != otherSettings.BQuantOffset || this.BQuantRatio != otherSettings.BQuantRatio || this.ChromaMotion != otherSettings.ChromaMotion || this.ClosedGOP != otherSettings.ClosedGOP || this.CustomEncoderOptions != otherSettings.CustomEncoderOptions || this.EncodingMode != otherSettings.EncodingMode || this.FrameDropRatio != otherSettings.FrameDropRatio || this.GMC != otherSettings.GMC || this.HighBitrateDegradation != otherSettings.HighBitrateDegradation || this.Interlaced != otherSettings.Interlaced || this.KeyFrameBoost != otherSettings.KeyFrameBoost || this.KeyframeInterval != otherSettings.KeyframeInterval || this.KeyframeReduction != otherSettings.KeyframeReduction || this.KeyframeThreshold != otherSettings.KeyframeThreshold || this.LowBitrateImprovement != otherSettings.LowBitrateImprovement || this.MaxBQuant != otherSettings.MaxBQuant || this.MaxOverflowDegradation != otherSettings.MaxOverflowDegradation || this.MaxOverflowImprovement != otherSettings.MaxOverflowImprovement || this.MaxPQuant != otherSettings.MaxPQuant || this.MaxQuantizer != otherSettings.MaxQuantizer || this.MinBQuant != otherSettings.MinBQuant || this.MinPQuant != otherSettings.MinPQuant || this.MinQuantizer != otherSettings.MinQuantizer || this.MotionSearchPrecision != otherSettings.MotionSearchPrecision || this.NbBframes != otherSettings.NbBframes || this.OverflowControlStrength != otherSettings.OverflowControlStrength || this.PackedBitstream != otherSettings.PackedBitstream || this.QPel != otherSettings.QPel || this.RateControlBuffer != otherSettings.RateControlBuffer || this.ReactionDelayFactor != otherSettings.ReactionDelayFactor || this.Trellis != otherSettings.Trellis || this.Turbo != otherSettings.Turbo || this.V4MV != otherSettings.V4MV || this.VHQForBframes != otherSettings.VHQForBframes || this.XvidProfile != otherSettings.XvidProfile || this.VbvBuffer != otherSettings.VbvBuffer || this.VbvMaxRate != otherSettings.VbvMaxRate || this.VbvPeakRate != otherSettings.VbvPeakRate || this.VHQMode != otherSettings.VHQMode || this.HVSMasking != otherSettings.HVSMasking ) { return(true); } else { return(false); } }
/// <summary> /// generates a videojob from the given settings /// returns the job and whether or not this is an automated job (in which case another job /// will have to be created) /// </summary> /// <param name="input">the video input (avisynth script)</param> /// <param name="output">the video output</param> /// <param name="settings">the codec settings for this job</param> /// <returns>the generated job or null if there was an error with the video source</returns> public VideoJob generateVideoJob(string input, string output, VideoCodecSettings settings, bool skipVideoCheck, Dar?dar, Zone[] zones) { VideoJob job = new VideoJob(input, output, settings, dar, zones); if (Path.GetDirectoryName(settings.Logfile).Equals("")) // no path set { settings.Logfile = Path.ChangeExtension(output, ".stats"); } if (job.Settings.SettingsID.Equals("x264")) { mbtreeFile = Path.ChangeExtension(output, ".stats.mbtree"); } if (job.Settings.EncodingMode == 4) // automated 2 pass, change type to 2 pass 2nd pass { job.Settings.EncodingMode = 3; } else if (job.Settings.EncodingMode == 8) // automated 3 pass, change type to 3 pass first pass { if (mainForm.Settings.OverwriteStats) { job.Settings.EncodingMode = 7; } else { job.Settings.EncodingMode = 3; // 2 pass 2nd pass.. doesn't overwrite the stats file } } if (!skipVideoCheck) { checkVideo(job.Input); } return(job); }
public VideoJob(string input, string output, VideoCodecSettings settings, Dar?dar) : base(input, output) { Settings = settings; DAR = dar; }
/// <summary> /// generates a videojob from the given settings /// returns the job and whether or not this is an automated job (in which case another job /// will have to be created) /// </summary> /// <param name="input">the video input (avisynth script)</param> /// <param name="output">the video output</param> /// <param name="settings">the codec settings for this job</param> /// <returns>the generated job or null if there was an error with the video source</returns> public static VideoJob generateVideoJob(string input, string output, VideoCodecSettings settings, Dar?dar, Zone[] zones) { VideoJob job = new VideoJob(input, output, settings, dar, zones); if (Path.GetDirectoryName(settings.Logfile).Equals("")) // no path set { settings.Logfile = Path.ChangeExtension(output, ".stats"); } if (job.Settings.VideoEncodingType == VideoCodecSettings.VideoEncodingMode.twopassAutomated) // automated 2 pass, change type to 2 pass 2nd pass { job.Settings.VideoEncodingType = VideoCodecSettings.VideoEncodingMode.twopass2; } else if (job.Settings.VideoEncodingType == VideoCodecSettings.VideoEncodingMode.threepassAutomated) // automated 3 pass, change type to 3 pass first pass { if (MainForm.Instance.Settings.OverwriteStats) { job.Settings.VideoEncodingType = VideoCodecSettings.VideoEncodingMode.threepass3; } else { job.Settings.VideoEncodingType = VideoCodecSettings.VideoEncodingMode.twopass2; // 2 pass 2nd pass.. doesn't overwrite the stats file } } return(job); }
/// <summary> /// Handles assessment of whether the encoding options vary between two xvidSettings instances /// The following are excluded from the comparison: /// BitrateQuantizer /// CreditsQuantizer /// Logfile /// PAR /// PARs /// SARX /// SARY /// Zones /// </summary> /// <param name="otherSettings"></param> /// <returns>true if the settings differ</returns> public bool IsAltered(VideoCodecSettings settings) { if (!(settings is DivXAVCSettings)) { return(true); } DivXAVCSettings otherSettings = (DivXAVCSettings)settings; if ( this.EncodingMode != otherSettings.EncodingMode || this.AQO != otherSettings.AQO || this.GOPLength != otherSettings.GOPLength || this.MaxBFrames != otherSettings.MaxBFrames || this.InterlaceMode != otherSettings.InterlaceMode || this.MaxRefFrames != otherSettings.MaxRefFrames || this.Pyramid != otherSettings.Pyramid || this.BasRef != otherSettings.BasRef || this.Turbo != otherSettings.Turbo ) { return(true); } else { return(false); } }
public VideoJob(string input, string output, VideoCodecSettings settings, Dar?dar, Zone[] zones) : base(input, output) { Settings = settings; DAR = dar; Zones = zones; }
public void Run(MainForm info) { // normal video verification string error = null; if ((error = info.Video.verifyVideoSettings()) != null) { MessageBox.Show(error, "Unsupported video configuration", MessageBoxButtons.OK, MessageBoxIcon.Stop); return; } if ((error = info.Audio.verifyAudioSettings()) != null && !error.Equals("No audio input defined.")) { MessageBox.Show(error, "Unsupported audio configuration", MessageBoxButtons.OK, MessageBoxIcon.Stop); return; } if (info.Video.CurrentSettings.VideoEncodingType == VideoCodecSettings.VideoEncodingMode.twopass1 || info.Video.CurrentSettings.VideoEncodingType == VideoCodecSettings.VideoEncodingMode.threepass1) { MessageBox.Show("First pass encoding is not supported for automated encoding as no output is generated.\nPlease choose another encoding mode", "Improper configuration", MessageBoxButtons.OK, MessageBoxIcon.Stop); return; } // close video player so that the AviSynth script is also closed info.ClosePlayer(); JobUtil.GetInputProperties(info.Video.VideoInput, out ulong frameCount, out double frameRate); VideoCodecSettings vSettings = info.Video.CurrentSettings.Clone(); Zone[] zones = info.Video.Info.Zones; // We can't simply modify the zones in place because that would reveal the final zones config to the user, including the credits/start zones bool cont = JobUtil.GetFinalZoneConfiguration(vSettings, info.Video.Info.IntroEndFrame, info.Video.Info.CreditsStartFrame, ref zones, (int)frameCount); if (cont) { VideoStream myVideo = new VideoStream(); myVideo.Input = info.Video.Info.VideoInput; myVideo.Output = info.Video.Info.VideoOutput; myVideo.NumberOfFrames = frameCount; myVideo.Framerate = (decimal)frameRate; myVideo.VideoType = info.Video.CurrentMuxableVideoType; myVideo.Settings = vSettings; VideoInfo vInfo = info.Video.Info.Clone(); // so we don't modify the data on the main form vInfo.Zones = zones; using (AutoEncodeWindow aew = new AutoEncodeWindow(myVideo, info.Audio.AudioStreams, info.Video.PrerenderJob, vInfo)) { if (aew.init()) { aew.ShowDialog(); } else { MessageBox.Show("The currently selected combination of video and audio output cannot be muxed", "Unsupported configuration", MessageBoxButtons.OK, MessageBoxIcon.Warning); } } } }
private void queueVideoButton_Click(object sender, System.EventArgs e) { fileType_SelectedIndexChanged(sender, e); // to select always correct output file extension string settingsError = verifyVideoSettings(); // basic input, logfile and output file settings are okay if (settingsError != null) { MessageBox.Show(settingsError, "Unsupported configuration", MessageBoxButtons.OK, MessageBoxIcon.Stop); return; } VideoCodecSettings vSettings = this.CurrentSettings.Clone(); string videoOutput = info.VideoOutput; if ((MainForm.Instance.Settings.UseExternalMuxerX264 || fileType.Text.Equals("MP4")) && (!fileType.Text.StartsWith("RAW") && vSettings.SettingsID.StartsWith("x26"))) { if (vSettings.SettingsID.Equals("x264")) { videoOutput = Path.ChangeExtension(videoOutput, "264"); } else if (vSettings.SettingsID.Equals("x265")) { videoOutput = Path.ChangeExtension(videoOutput, "hevc"); } } JobChain prepareJobs = mainForm.JobUtil.AddVideoJobs(info.VideoInput, videoOutput, this.CurrentSettings.Clone(), info.IntroEndFrame, info.CreditsStartFrame, info.DAR, PrerenderJob, true, info.Zones); if ((MainForm.Instance.Settings.UseExternalMuxerX264 || fileType.Text.Equals("MP4")) && (!fileType.Text.StartsWith("RAW") && vSettings.SettingsID.StartsWith("x26"))) { // create job MuxJob mJob = new MuxJob(); mJob.Input = videoOutput; if (fileType.Text.Equals("MKV")) { mJob.MuxType = MuxerType.MKVMERGE; mJob.Output = Path.ChangeExtension(videoOutput, "mkv"); } else { mJob.MuxType = MuxerType.MP4BOX; mJob.Output = Path.ChangeExtension(videoOutput, "mp4"); } mJob.Settings.MuxAll = true; mJob.Settings.Framerate = decimal.Round((decimal)FrameRate, 3, MidpointRounding.AwayFromZero); mJob.Settings.MuxedInput = mJob.Input; mJob.Settings.MuxedOutput = mJob.Output; mJob.FilesToDelete.Add(videoOutput); // add job to queue prepareJobs = new SequentialChain(prepareJobs, new SequentialChain(mJob)); } mainForm.Jobs.addJobsWithDependencies(prepareJobs, true); }
public LogItem postprocess() { audioFiles = vUtil.getAllDemuxedAudio(job.AudioTracks, job.Output, 8); fillInAudioInformation(); log.LogValue("Desired size", job.PostprocessingProperties.OutputSize); log.LogValue("Split size", job.PostprocessingProperties.Splitting); VideoCodecSettings videoSettings = job.PostprocessingProperties.VideoSettings; string videoOutput = Path.Combine(Path.GetDirectoryName(job.Output), Path.GetFileNameWithoutExtension(job.Output) + "_Video"); string muxedOutput = job.PostprocessingProperties.FinalOutput; //Open the video Dar? dar; string videoInput = openVideo(job.Output, job.PostprocessingProperties.DAR, job.PostprocessingProperties.HorizontalOutputResolution, job.PostprocessingProperties.SignalAR, log, job.PostprocessingProperties.AvsSettings, job.PostprocessingProperties.AutoDeinterlace, videoSettings, out dar); VideoStream myVideo = new VideoStream(); ulong length; double framerate; JobUtil.getInputProperties(out length, out framerate, videoInput); myVideo.Input = videoInput; myVideo.Output = videoOutput; myVideo.NumberOfFrames = length; myVideo.Framerate = (decimal)framerate; myVideo.DAR = dar; myVideo.VideoType = new MuxableType((new VideoEncoderProvider().GetSupportedOutput(videoSettings.EncoderType))[0], videoSettings.Codec); myVideo.Settings = videoSettings; List <string> intermediateFiles = new List <string>(); intermediateFiles.Add(videoInput); intermediateFiles.Add(job.Output); intermediateFiles.AddRange(audioFiles.Values); if (!string.IsNullOrEmpty(videoInput)) { //Create empty subtitles for muxing (subtitles not supported in one click mode) MuxStream[] subtitles = new MuxStream[0]; JobChain c = vUtil.GenerateJobSeries(myVideo, muxedOutput, job.PostprocessingProperties.AudioJobs, subtitles, job.PostprocessingProperties.ChapterFile, job.PostprocessingProperties.OutputSize, job.PostprocessingProperties.Splitting, job.PostprocessingProperties.Container, false, job.PostprocessingProperties.DirectMuxAudio, log); if (c == null) { log.Warn("Job creation aborted"); return(log); } c = CleanupJob.AddAfter(c, intermediateFiles); mainForm.Jobs.addJobsWithDependencies(c); } return(log); }
/// <summary> /// Handles assessment of whether the encoding options vary between two lavcSettings instances /// The following are excluded from the comparison: /// BitrateQuantizer /// CreditsQuantizer /// Logfile /// Nbthreads /// SARX /// SARY /// Zones /// </summary> /// <param name="otherSettings"></param> /// <returns>true if the settings differ</returns> public override bool IsAltered(VideoCodecSettings settings) { if (!(settings is lavcSettings)) { return(true); } lavcSettings otherSettings = (lavcSettings)settings; if ( this.AvoidHighMoBframes != otherSettings.AvoidHighMoBframes || this.BorderMask != otherSettings.BorderMask || this.BQuantFactor != otherSettings.BQuantFactor || this.BufferSize != otherSettings.BufferSize || this.CustomEncoderOptions != otherSettings.CustomEncoderOptions || this.DarkMask != otherSettings.DarkMask || this.EncodingMode != otherSettings.EncodingMode || this.FieldOrder != otherSettings.FieldOrder || this.FilesizeTolerance != otherSettings.FilesizeTolerance || this.FourCC != otherSettings.FourCC || // this.FourCCs != otherSettings.FourCCs || this.GreyScale != otherSettings.GreyScale || this.InitialBufferOccupancy != otherSettings.InitialBufferOccupancy || this.Interlaced != otherSettings.Interlaced || this.InterMatrix != otherSettings.InterMatrix || this.IntraMatrix != otherSettings.IntraMatrix || this.IPFactor != otherSettings.IPFactor || this.KeyframeInterval != otherSettings.KeyframeInterval || this.LumiMasking != otherSettings.LumiMasking || this.MaxBitrate != otherSettings.MaxBitrate || this.MaxQuantDelta != otherSettings.MaxQuantDelta || this.MaxQuantizer != otherSettings.MaxQuantizer || this.MbDecisionAlgo != otherSettings.MbDecisionAlgo || this.MERange != otherSettings.MERange || this.MinBitrate != otherSettings.MinBitrate || this.MinQuantizer != otherSettings.MinQuantizer || this.NbBframes != otherSettings.NbBframes || this.NbMotionPredictors != otherSettings.NbMotionPredictors || this.PBFactor != otherSettings.PBFactor || this.QPel != otherSettings.QPel || this.QuantizerBlur != otherSettings.QuantizerBlur || this.QuantizerCompression != otherSettings.QuantizerCompression || this.SCD != otherSettings.SCD || this.SpatialMask != otherSettings.SpatialMask || this.SubpelRefinement != otherSettings.SubpelRefinement || this.TemporalMask != otherSettings.TemporalMask || this.Trellis != otherSettings.Trellis || this.Turbo != otherSettings.Turbo || this.V4MV != otherSettings.V4MV ) { return(true); } else { return(false); } }
public void Run(MainForm info) { using (Calculator calc = new Calculator(info)) { ulong nbFrames = 0; double framerate = 0.0; int hRes = 0, vRes = 0; Dar dar = new Dar(); if (!string.IsNullOrEmpty(info.Video.VideoInput)) { JobUtil.GetAllInputProperties(out nbFrames, out framerate, out hRes, out vRes, out dar, info.Video.VideoInput); } calc.SetDefaults(nbFrames, framerate, hRes, vRes, info.Video.CurrentSettings, info.Audio.AudioStreams); DialogResult dr = calc.ShowDialog(); if (dr != DialogResult.OK) { return; } if (info.Video.CurrentSettings.EncoderType != calc.SelectedVCodec) { return; } VideoCodecSettings settings = info.Video.CurrentSettings; if (settings.VideoEncodingType == VideoCodecSettings.VideoEncodingMode.CQ || settings.VideoEncodingType == VideoCodecSettings.VideoEncodingMode.quality) { dr = MessageBox.Show("Copy calculated bitrate into current video settings and change encoding mode to automated " + info.Settings.NbPasses + "-pass?", "Save calculated bitrate?", MessageBoxButtons.YesNo, MessageBoxIcon.Question); if (dr != DialogResult.Yes) { return; } if (info.Settings.NbPasses == 3) { settings.VideoEncodingType = VideoCodecSettings.VideoEncodingMode.threepassAutomated; // Automated 3-pass } else { settings.VideoEncodingType = VideoCodecSettings.VideoEncodingMode.twopassAutomated; // Automated 2-pass } } else { dr = MessageBox.Show("Copy calculated bitrate into current video settings?", "Save calculated bitrate?", MessageBoxButtons.YesNo, MessageBoxIcon.Question); if (dr != DialogResult.Yes) { return; } } settings.BitrateQuantizer = calc.VideoBitrate; } }
public void postprocess() { audioFiles = vUtil.getAllDemuxedAudio(job.Output, 8); fillInAudioInformation(); logBuilder.Append("Desired size of this automated encoding series: " + job.PostprocessingProperties.OutputSize + " split size: " + job.PostprocessingProperties.Splitting + "\r\n"); VideoCodecSettings videoSettings = job.PostprocessingProperties.VideoSettings; string videoOutput = Path.Combine(Path.GetDirectoryName(job.Output), Path.GetFileNameWithoutExtension(job.Output) + "_Video"); string muxedOutput = job.PostprocessingProperties.FinalOutput; //Open the video Dar? dar; string videoInput = openVideo(job.Output, job.PostprocessingProperties.DAR, job.PostprocessingProperties.HorizontalOutputResolution, job.PostprocessingProperties.SignalAR, logBuilder, job.PostprocessingProperties.AvsSettings, job.PostprocessingProperties.AutoDeinterlace, videoSettings, out dar); VideoStream myVideo = new VideoStream(); ulong length; double framerate; JobUtil.getInputProperties(out length, out framerate, videoInput); myVideo.Input = videoInput; myVideo.Output = videoOutput; myVideo.NumberOfFrames = length; myVideo.Framerate = (decimal)framerate; myVideo.DAR = dar; myVideo.VideoType = new MuxableType((new VideoEncoderProvider().GetSupportedOutput(videoSettings.EncoderType))[0], videoSettings.Codec); myVideo.Settings = videoSettings; List <string> intermediateFiles = new List <string>(); intermediateFiles.Add(videoInput); intermediateFiles.Add(job.Output); intermediateFiles.AddRange(audioFiles.Values); if (!string.IsNullOrEmpty(videoInput)) { //Create empty subtitles for muxing (subtitles not supported in one click mode) MuxStream[] subtitles = new MuxStream[0]; JobChain c = vUtil.GenerateJobSeries(myVideo, muxedOutput, job.PostprocessingProperties.AudioJobs, subtitles, job.PostprocessingProperties.ChapterFile, job.PostprocessingProperties.OutputSize, job.PostprocessingProperties.Splitting, job.PostprocessingProperties.Container, false, job.PostprocessingProperties.DirectMuxAudio); /* vUtil.generateJobSeries(videoInput, videoOutput, muxedOutput, videoSettings, * audioStreams, audio, subtitles, job.PostprocessingProperties.ChapterFile, * job.PostprocessingProperties.OutputSize, job.PostprocessingProperties.SplitSize, * containerOverhead, type, new string[] { job.Output, videoInput });*/ c = CleanupJob.AddAfter(c, intermediateFiles); mainForm.Jobs.addJobsWithDependencies(c); } mainForm.addToLog(logBuilder.ToString()); }
public void Run(MainForm info) { // normal video verification string error = null; // update the current audio stream with the latest data // updateAudioStreams(); if ((error = info.Video.verifyVideoSettings()) != null) { MessageBox.Show(error, "Unsupported video configuration", MessageBoxButtons.OK, MessageBoxIcon.Stop); return; } if ((error = info.Audio.verifyAudioSettings()) != null && !error.Equals("No audio input defined.")) { MessageBox.Show(error, "Unsupported audio configuration", MessageBoxButtons.OK, MessageBoxIcon.Stop); return; } #warning must be fixed up to be more generic if (info.Video.CurrentVideoCodecSettings.EncodingMode == 2 || info.Video.CurrentVideoCodecSettings.EncodingMode == 5) { MessageBox.Show("First pass encoding is not supported for automated encoding as no output is generated.\nPlease choose another encoding mode", "Improper configuration", MessageBoxButtons.OK, MessageBoxIcon.Stop); return; } VideoCodecSettings vSettings = info.Video.CurrentVideoCodecSettings.clone(); bool cont = info.JobUtil.getFinalZoneConfiguration(vSettings, info.Video.Info.IntroEndFrame, info.Video.Info.CreditsStartFrame); if (cont) { ulong length = 0; double framerate = 0.0; VideoStream myVideo = new VideoStream(); JobUtil.getInputProperties(out length, out framerate, info.Video.VideoInput); myVideo.Input = info.Video.Info.VideoInput; myVideo.Output = info.Video.Info.VideoOutput; myVideo.NumberOfFrames = length; myVideo.Framerate = (decimal)framerate; myVideo.DAR = info.Video.Info.DAR; myVideo.VideoType = info.Video.CurrentMuxableVideoType; myVideo.Settings = vSettings; using (AutoEncodeWindow aew = new AutoEncodeWindow(myVideo, info.Audio.AudioStreams, info, info.Video.PrerenderJob)) { if (aew.init()) { info.ClosePlayer(); aew.ShowDialog(); } else { MessageBox.Show("The currently selected combination of video and audio output cannot be muxed", "Unsupported configuration", MessageBoxButtons.OK, MessageBoxIcon.Warning); } } } }
private string checkVideo(string avsFile, bool tryToFix) { try { using (AvsFile avi = AvsFile.OpenScriptFile(avsFile)) { if (avi.Clip.OriginalColorspace != AviSynthColorspace.YV12 && avi.Clip.OriginalColorspace != AviSynthColorspace.I420) { if (tryToFix && !isConvertedToYV12(avsFile)) { bool convert = mainForm.DialogManager.addConvertToYV12(avi.Clip.OriginalColorspace.ToString()); if (convert) { if (appendConvertToYV12(avsFile)) { string sResult = checkVideo(avsFile, false); // Check everything again, to see if it is all fixed now if (sResult == null) { MessageBox.Show("Successfully converted to YV12."); return(null); } else { return(sResult); } } } return("You didn't want me to append ConvertToYV12(). You'll have to fix the colorspace problem yourself."); } return(string.Format("AviSynth clip is in {0} not in YV12, even though ConvertToYV12() has been appended.", avi.Clip.OriginalColorspace.ToString())); } VideoCodecSettings settings = GetCurrentVideoSettings(); if (settings != null && settings.SettingsID != "x264") // mod16 restriction { if (avi.Clip.VideoHeight % 16 != 0 || avi.Clip.VideoWidth % 16 != 0) { return(string.Format("AviSynth clip doesn't have mod16 dimensions:\r\nWidth: {0}\r\nHeight:{1}\r\n" + "This could cause problems with some encoders,\r\n" + "and will also result in a loss of compressibility.\r\n" + "I suggest you resize to a mod16 resolution.", avi.Clip.VideoWidth, avi.Clip.VideoHeight)); } } } } catch (Exception e) { return("Error in AviSynth script:\r\n" + e.Message); } return(null); }
public JobChain AddVideoJobs(string movieInput, string movieOutput, VideoCodecSettings settings, int introEndFrame, int creditsStartFrame, Dar?dar, bool prerender, bool checkVideo, Zone[] zones) { JobChain jobs = null; bool cont = getFinalZoneConfiguration(settings, introEndFrame, creditsStartFrame, ref zones); if (!cont) // abort { return(jobs); } return(prepareVideoJob(movieInput, movieOutput, settings, dar, prerender, checkVideo, zones)); }
private bool isFirstPass() { VideoCodecSettings settings = CurrentVideoCodecSettings; if (settings.EncodingMode == 2 || settings.EncodingMode == 5) { return(true); } else { return(false); } }
private bool isFirstPass() { VideoCodecSettings settings = CurrentSettings; if (settings.VideoEncodingType == VideoCodecSettings.VideoEncodingMode.twopass1 || settings.VideoEncodingType == VideoCodecSettings.VideoEncodingMode.threepass1) { return(true); } else { return(false); } }
/* private void videoConfigButton_Click(object sender, System.EventArgs e) * { #warning fix this * /*if (player != null) * player.Hide(); * VideoCodecSettings settings = CurrentVideoCodecSettingsProvider.GetCurrentSettings(); * string selectedProfile; * if (CurrentVideoCodecSettingsProvider.EditSettings(mainForm, this.VideoProfile.Text, * this.VideoIO, new int[] { this.introEndFrame, this.creditsStartFrame }, out selectedProfile)) * { * this.VideoProfile.Items.Clear(); * foreach (string name in mainForm.Profiles.VideoProfiles.Keys) * { * this.VideoProfile.Items.Add(name); * } * int index = this.VideoProfile.Items.IndexOf(selectedProfile); * if (index != -1) * this.VideoProfile.SelectedIndex = index; * } * if (player != null) * player.Show(); * updateIOConfig();*/ //} private void queueVideoButton_Click(object sender, System.EventArgs e) { string settingsError = verifyVideoSettings(); // basic input, logfile and output file settings are okay if (settingsError != null) { MessageBox.Show(settingsError, "Unsupported configuration", MessageBoxButtons.OK, MessageBoxIcon.Stop); return; } VideoCodecSettings vSettings = this.CurrentVideoCodecSettings.clone(); mainForm.JobUtil.AddVideoJobs(info.VideoInput, info.VideoOutput, this.CurrentVideoCodecSettings.clone(), info.IntroEndFrame, info.CreditsStartFrame, info.DAR, addPrerenderJob.Checked, true); }
/// <summary> /// sets the number of encoder threads in function of the number of processors found on the system /// </summary> /// <param name="settings"></param> private void adjustNbThreads(VideoCodecSettings settings) { string nbProc = System.Environment.GetEnvironmentVariable("NUMBER_OF_PROCESSORS"); if (!String.IsNullOrEmpty(nbProc)) { try { int nbCPUs = int.Parse(nbProc); settings.setAdjustedNbThreads(nbCPUs); } catch (Exception) { } } }
/// <summary> /// iteratees through all zones and makes sure we get no intersection by applying the current intro settings /// </summary> /// <param name="introEndFrame">the frame where the intro ends</param> /// <returns>true if the intro zone does not interesect with a zone, false otherwise</returns> private bool validateIntro(int introEndFrame) { VideoCodecSettings settings = this.CurrentVideoCodecSettings; foreach (Zone z in settings.Zones) { if (introEndFrame >= z.startFrame) { MessageBox.Show("The end of the intro intersects with an already configured zone\ngoing from frame " + z.startFrame + " to frame " + z.endFrame + "\nPlease select another credits start frame or reconfigure the zone in the codec configuration.", "Zone intersection detected", MessageBoxButtons.OK, MessageBoxIcon.Stop); return(false); } } return(true); }
public bool AddVideoJobs(string movieInput, string movieOutput, VideoCodecSettings settings, int introEndFrame, int creditsStartFrame, Dar?dar, bool prerender, bool checkVideo, Zone[] zones) { bool cont = getFinalZoneConfiguration(settings, introEndFrame, creditsStartFrame, ref zones); if (!cont) // abort { return(false); } JobChain jobs = prepareVideoJob(movieInput, movieOutput, settings, dar, prerender, checkVideo, zones); if (jobs == null) { return(false); } mainForm.Jobs.addJobsWithDependencies(jobs); return(false); }
public void InitializeDropdowns() { videoCodec.Items.Clear(); videoCodec.Items.AddRange(mainForm.PackageSystem.VideoSettingsProviders.ValuesArray); try { videoCodec.SelectedItem = mainForm.PackageSystem.VideoSettingsProviders["x264"]; } catch (Exception) { try { videoCodec.SelectedIndex = 0; } catch (Exception) { MessageBox.Show("No valid video codecs are set up", "No valid video codecs", MessageBoxButtons.OK, MessageBoxIcon.Error); } } fileTypeHandler = new FileTypeHandler <VideoType>(fileType, videoCodec, new FileTypeHandler <VideoType> .SupportedOutputGetter(delegate { return(videoEncoderProvider.GetSupportedOutput(codecHandler.CurrentSettingsProvider.EncoderType)); })); fileTypeHandler.FileTypeChanged += new FileTypeHandler <VideoType> .FileTypeEvent(delegate (object sender, VideoType currentType) { VideoCodecSettings settings = CurrentSettings; this.updateIOConfig(); if (MainForm.verifyOutputFile(this.VideoOutput) == null) { this.VideoOutput = Path.ChangeExtension(this.VideoOutput, currentType.Extension); } }); codecHandler = new MultipleConfigurersHandler <VideoCodecSettings, VideoInfo, VideoCodec, VideoEncoderType>(videoCodec); profileHandler = new ProfilesControlHandler <VideoCodecSettings, VideoInfo>("Video", mainForm, profileControl1, codecHandler.EditSettings, new InfoGetter <VideoInfo>(getInfo), codecHandler.Getter, codecHandler.Setter); codecHandler.Register(profileHandler); fileTypeHandler.RefreshFiletypes(); }
/// <summary> /// generates a videojob from the given settings /// returns the job and whether or not this is an automated job (in which case another job /// will have to be created) /// </summary> /// <param name="input">the video input (avisynth script)</param> /// <param name="output">the video output</param> /// <param name="settings">the codec settings for this job</param> /// <returns>the generated job or null if there was an error with the video source</returns> public VideoJob generateVideoJob(string input, string output, VideoCodecSettings settings, bool skipVideoCheck, Dar?dar) { VideoJob job = new VideoJob(input, output, settings, dar); if (mainForm.Settings.AutoSetNbThreads) { adjustNbThreads(settings); } if (Path.GetDirectoryName(settings.Logfile).Equals("")) // no path set { settings.Logfile = Path.ChangeExtension(input, ".stats"); } if (job.Settings.EncodingMode == 4) // automated 2 pass, change type to 2 pass 2nd pass { job.Settings.EncodingMode = 3; job.Settings.Turbo = false; } else if (job.Settings.EncodingMode == 8) // automated 3 pass, change type to 3 pass first pass { if (mainForm.Settings.OverwriteStats) { job.Settings.EncodingMode = 7; } else { job.Settings.EncodingMode = 3; // 2 pass 2nd pass.. doesn't overwrite the stats file } job.Settings.Turbo = false; } if (!skipVideoCheck) { checkVideo(job.Input); } return(job); }
/// <summary> /// Handles assessment of whether the encoding options vary between two snowSettings instances /// The following are excluded from the comparison: /// BitrateQuantizer /// CreditsQuantizer /// Logfile /// Quantizer /// SARX /// SARY /// Zones /// </summary> /// <param name="otherSettings"></param> /// <returns>true if the settings differ</returns> public override bool IsAltered(VideoCodecSettings settings) { if (!(settings is snowSettings)) { return(true); } snowSettings otherSettings = (snowSettings)settings; if ( this.CustomEncoderOptions != otherSettings.CustomEncoderOptions || this.EncodingMode != otherSettings.EncodingMode || this.FourCC != otherSettings.FourCC || // this.FourCCs != otherSettings.FourCCs || this.KeyframeInterval != otherSettings.KeyframeInterval || this.LosslessMode != otherSettings.LosslessMode || this.MaxQuantizer != otherSettings.MaxQuantizer || this.MBComp != otherSettings.MBComp || this.MECompFullpel != otherSettings.MECompFullpel || this.MECompHpel != otherSettings.MECompHpel || this.MinQuantizer != otherSettings.MinQuantizer || this.NbBframes != otherSettings.NbBframes || this.NbMotionPredictors != otherSettings.NbMotionPredictors || this.PredictionMode != otherSettings.PredictionMode || this.QPel != otherSettings.QPel || this.Trellis != otherSettings.Trellis || this.Turbo != otherSettings.Turbo || this.V4MV != otherSettings.V4MV ) { return(true); } else { return(false); } }
public void openVideoFile(string fileName) { info.CreditsStartFrame = -1; info.IntroEndFrame = -1; info.VideoInput = fileName; info.DARX = info.DARY = -1; //reset the zones for all codecs, zones are supposed to be source bound foreach (ISettingsProvider <VideoCodecSettings, VideoInfo, VideoCodec, VideoEncoderType> p in (videoCodec.Items)) { VideoCodecSettings s = p.GetCurrentSettings(); s.Zones = new Zone[0]; p.LoadSettings(s); } if (mainForm.Settings.AutoOpenScript) { openAvisynthScript(fileName); } string filePath = Path.GetDirectoryName(fileName); string fileNameNoExtension = Path.GetFileNameWithoutExtension(fileName); this.VideoOutput = Path.Combine(filePath, fileNameNoExtension) + mainForm.Settings.VideoExtension + ".extension"; this.VideoOutput = Path.ChangeExtension(this.VideoOutput, this.CurrentVideoOutputType.Extension); updateIOConfig(); }
/// <summary> /// creates the AVS Script file /// if the file can be properly opened, auto-cropping is performed, then depending on the AR settings /// the proper resolution for automatic resizing, taking into account the derived cropping values /// is calculated, and finally the avisynth script is written and its name returned /// </summary> /// <param name="path">dgindex script</param> /// <param name="aspectRatio">aspect ratio selection to be used</param> /// <param name="customDAR">custom display aspect ratio for this source</param> /// <param name="desiredOutputWidth">desired horizontal resolution of the output</param> /// <param name="settings">the codec settings (used only for x264)</param> /// <param name="sarX">pixel aspect ratio X</param> /// <param name="sarY">pixel aspect ratio Y</param> /// <param name="height">the final height of the video</param> /// <param name="signalAR">whether or not ar signalling is to be used for the output /// (depending on this parameter, resizing changes to match the source AR)</param> /// <param name="autoCrop">whether or not autoCrop is used for the input</param> /// <returns>the name of the AviSynth script created, empty if there was an error</returns> private string createAVSFile(string indexFile, string inputFile, Dar?AR, int desiredOutputWidth, bool signalAR, LogItem _log, AviSynthSettings avsSettings, bool autoDeint, VideoCodecSettings settings, out Dar?dar, bool autoCrop, bool keepInputResolution, bool useChaptersMarks) { dar = null; Dar customDAR; IMediaFile iMediaFile = null; IVideoReader reader; PossibleSources oPossibleSource; x264Device xTargetDevice = null; int outputWidthIncludingPadding = 0; int outputHeightIncludingPadding = 0; int outputWidthCropped = 0; int outputHeightCropped = 0; CropValues cropValues = new CropValues(); bool bAdjustResolution = false; bool bCropped = false; // open index file to retrieve information if (job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.DGI) { iMediaFile = new dgiFile(indexFile); oPossibleSource = PossibleSources.dgi; } else if (job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.D2V) { iMediaFile = new d2vFile(indexFile); oPossibleSource = PossibleSources.d2v; } else if (job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.DGA) { iMediaFile = new dgaFile(indexFile); oPossibleSource = PossibleSources.dga; } else if (job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.FFMS) { iMediaFile = new ffmsFile(inputFile, indexFile); oPossibleSource = PossibleSources.ffindex; } else if (job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.AVISOURCE) { string tempAvs = "AVISource(\"" + inputFile + "\", audio=false)" + VideoUtil.getAssumeFPS(0, inputFile); iMediaFile = AvsFile.ParseScript(tempAvs); oPossibleSource = PossibleSources.directShow; } else { iMediaFile = AvsFile.OpenScriptFile(inputFile); oPossibleSource = PossibleSources.avs; } reader = iMediaFile.GetVideoReader(); // abort if the index file is invalid if (reader.FrameCount < 1) { _log.Error("There are 0 frames in the index file. Aborting..."); return(""); } if (AR == null) { // AR needs to be detected automatically now _log.LogValue("Auto-detect aspect ratio", AR == null); customDAR = iMediaFile.VideoInfo.DAR; if (customDAR.ar <= 0) { customDAR = Dar.ITU16x9PAL; _log.Warn(string.Format("No aspect ratio found, defaulting to {0}.", customDAR)); } } else { customDAR = AR.Value; } _log.LogValue("Aspect ratio", customDAR); // check x264 settings (target device, chapter file) if (settings != null && settings is x264Settings) { x264Settings xs = (x264Settings)settings; xTargetDevice = xs.TargetDevice; // create qpf file if necessary if (!String.IsNullOrEmpty(job.PostprocessingProperties.ChapterFile) && useChaptersMarks) { qpfile = job.PostprocessingProperties.ChapterFile; if ((Path.GetExtension(qpfile).ToLower(System.Globalization.CultureInfo.InvariantCulture)) == ".txt") { qpfile = VideoUtil.convertChaptersTextFileTox264QPFile(job.PostprocessingProperties.ChapterFile, iMediaFile.VideoInfo.FPS); } if (File.Exists(qpfile)) { xs.UseQPFile = true; xs.QPFile = qpfile; } } } // if encoding for a specific device select the appropriate resolution setting if (xTargetDevice != null && xTargetDevice.Width > 0 && xTargetDevice.Height > 0) { if (keepInputResolution) { // resolution should not be changed - use input resolution outputWidthCropped = (int)iMediaFile.VideoInfo.Width; outputHeightCropped = (int)iMediaFile.VideoInfo.Height; } else { // crop input video if selected if (autoCrop) { if (Autocrop.autocrop(out cropValues, reader, signalAR, avsSettings.Mod16Method) == false) { _log.Error("Autocrop failed. Aborting..."); return(""); } bCropped = true; } outputWidthCropped = desiredOutputWidth; outputHeightCropped = Resolution.suggestResolution(iMediaFile.VideoInfo.Height, iMediaFile.VideoInfo.Width, (double)customDAR.ar, cropValues, outputWidthCropped, signalAR, mainForm.Settings.AcceptableAspectErrorPercent, out dar); dar = null; } if (xTargetDevice.Width < outputWidthCropped) { // width must be lowered to be target conform bAdjustResolution = true; if (keepInputResolution) { keepInputResolution = false; _log.LogEvent("Disabling \"Keep Input Resolution\" as " + xTargetDevice.Name + " does not support a resolution width of " + outputWidthCropped + ". The maximum value is " + xTargetDevice.Width + "."); } } else if (xTargetDevice.Height < outputHeightCropped) { // height must be lowered to be target conform bAdjustResolution = true; if (keepInputResolution) { keepInputResolution = false; _log.LogEvent("Disabling \"Keep Input Resolution\" as " + xTargetDevice.Name + " does not support a resolution height of " + outputHeightCropped + ". The maximum value is " + xTargetDevice.Height + "."); } } else if (xTargetDevice.BluRay) { string strResolution = outputWidthCropped + "x" + outputHeightCropped; if (!strResolution.Equals("1920x1080") && !strResolution.Equals("1440x1080") && !strResolution.Equals("1280x720") && !strResolution.Equals("720x576") && !strResolution.Equals("720x480")) { bAdjustResolution = true; if (keepInputResolution) { keepInputResolution = false; _log.LogEvent("Disabling \"Keep Input Resolution\" as " + xTargetDevice.Name + " does not support a resolution of " + outputWidthCropped + "x" + outputHeightCropped + ". Supported are 1920x1080, 1440x1080, 1280x720, 720x576 and 720x480."); } } else { outputWidthIncludingPadding = outputWidthCropped; outputHeightIncludingPadding = outputHeightCropped; } } if (bAdjustResolution) { if (!autoCrop) { autoCrop = true; _log.LogEvent("Enabling \"AutoCrop\""); } } } else { outputWidthCropped = desiredOutputWidth; } if (!keepInputResolution && autoCrop && !bCropped) { // crop input video if required if (Autocrop.autocrop(out cropValues, reader, signalAR, avsSettings.Mod16Method) == false) { _log.Error("Autocrop failed. Aborting..."); return(""); } bCropped = true; } if (bAdjustResolution) { // adjust horizontal resolution as width or height are too large if (xTargetDevice.BluRay) { if (outputWidthCropped >= 1920) { outputWidthCropped = 1920; outputHeightIncludingPadding = 1080; _log.LogEvent("Force resolution of 1920x1080 as required for " + xTargetDevice.Name); } else if (outputWidthCropped >= 1280) { outputWidthCropped = 1280; outputHeightIncludingPadding = 720; _log.LogEvent("Force resolution of 1280x720 as required for " + xTargetDevice.Name); } else { outputWidthCropped = 720; Double dfps = Convert.ToDouble(iMediaFile.VideoInfo.FPS_N) / iMediaFile.VideoInfo.FPS_D; if (dfps == 25) { outputHeightIncludingPadding = 576; _log.LogEvent("Force resolution of 720x576 as required for " + xTargetDevice.Name); } else { outputHeightIncludingPadding = 480; _log.LogEvent("Force resolution of 720x480 as required for " + xTargetDevice.Name); } } outputWidthIncludingPadding = outputWidthCropped; } else if (outputWidthCropped > xTargetDevice.Width) { outputWidthCropped = xTargetDevice.Width; _log.LogEvent("Set resolution width to " + outputWidthCropped + " as required for " + xTargetDevice.Name); } // adjust cropped vertical resolution outputHeightCropped = Resolution.suggestResolution(iMediaFile.VideoInfo.Height, iMediaFile.VideoInfo.Width, (double)customDAR.ar, cropValues, outputWidthCropped, signalAR, mainForm.Settings.AcceptableAspectErrorPercent, out dar); while (outputHeightCropped > xTargetDevice.Height || (xTargetDevice.BluRay && outputHeightCropped > outputHeightIncludingPadding)) { outputWidthCropped -= 16; outputHeightCropped = Resolution.suggestResolution(iMediaFile.VideoInfo.Height, iMediaFile.VideoInfo.Width, (double)customDAR.ar, cropValues, outputWidthCropped, signalAR, mainForm.Settings.AcceptableAspectErrorPercent, out dar); } } if (keepInputResolution) { outputWidthCropped = outputWidthIncludingPadding = (int)iMediaFile.VideoInfo.Width; outputHeightCropped = outputHeightIncludingPadding = (int)iMediaFile.VideoInfo.Height; dar = customDAR; } else if (xTargetDevice == null || (xTargetDevice != null && !xTargetDevice.BluRay)) { // Minimise upsizing int sourceHorizontalResolution = (int)iMediaFile.VideoInfo.Width - cropValues.right - cropValues.left; if (autoCrop) { sourceHorizontalResolution = (int)iMediaFile.VideoInfo.Width; } if (outputWidthCropped > sourceHorizontalResolution) { if (avsSettings.Mod16Method == mod16Method.resize) { while (outputWidthCropped > sourceHorizontalResolution + 16) { outputWidthCropped -= 16; } } else { outputWidthCropped = sourceHorizontalResolution; } } } // calculate height if (!keepInputResolution) { outputHeightCropped = Resolution.suggestResolution(iMediaFile.VideoInfo.Height, iMediaFile.VideoInfo.Width, (double)customDAR.ar, cropValues, outputWidthCropped, signalAR, mainForm.Settings.AcceptableAspectErrorPercent, out dar); } // set complete padding if required if (outputHeightIncludingPadding == 0 && outputWidthIncludingPadding > 0) { outputHeightIncludingPadding = outputHeightCropped; } if (outputWidthIncludingPadding == 0 && outputHeightIncludingPadding > 0) { outputWidthIncludingPadding = outputWidthCropped; } // write calculated output resolution into the log _log.LogValue("Input resolution", iMediaFile.VideoInfo.Width + "x" + iMediaFile.VideoInfo.Height); if (autoCrop && !keepInputResolution && cropValues.isCropped()) { _log.LogValue("Autocrop values", cropValues); _log.LogValue("Cropped output resolution", outputWidthCropped + "x" + outputHeightCropped); } else { _log.LogValue("Output resolution", outputWidthCropped + "x" + outputHeightCropped); } if (outputWidthIncludingPadding > 0 && (outputWidthIncludingPadding != outputWidthCropped || outputHeightIncludingPadding != outputHeightCropped)) { _log.LogValue("Padded output resolution", outputWidthIncludingPadding + "x" + outputHeightIncludingPadding); } if (outputWidthCropped <= 0 || outputHeightCropped <= 0) { _log.Error("Error in detection of output resolution"); return(""); } //Generate the avs script based on the template string inputLine = "#input"; string deinterlaceLines = "#deinterlace"; string denoiseLines = "#denoise"; string cropLine = "#crop"; string resizeLine = "#resize"; inputLine = ScriptServer.GetInputLine(inputFile, indexFile, false, oPossibleSource, false, false, false, 0, avsSettings.DSS2); if (!inputLine.EndsWith(")")) { inputLine += ")"; } _log.LogValue("Automatic deinterlacing", autoDeint); if (autoDeint) { raiseEvent("Automatic deinterlacing... ***PLEASE WAIT***"); string d2vPath = indexFile; SourceDetector sd = new SourceDetector(inputLine, d2vPath, false, mainForm.Settings.SourceDetectorSettings, new UpdateSourceDetectionStatus(analyseUpdate), new FinishedAnalysis(finishedAnalysis)); finished = false; sd.analyse(); waitTillAnalyseFinished(); sd.stop(); deinterlaceLines = filters[0].Script; if (interlaced) { _log.LogValue("Deinterlacing used", deinterlaceLines, ImageType.Warning); } else { _log.LogValue("Deinterlacing used", deinterlaceLines); } } raiseEvent("Finalizing preprocessing... ***PLEASE WAIT***"); inputLine = ScriptServer.GetInputLine(inputFile, indexFile, interlaced, oPossibleSource, avsSettings.ColourCorrect, avsSettings.MPEG2Deblock, false, 0, avsSettings.DSS2); if (!inputLine.EndsWith(")")) { inputLine += ")"; } if (!keepInputResolution && autoCrop) { cropLine = ScriptServer.GetCropLine(true, cropValues); } denoiseLines = ScriptServer.GetDenoiseLines(avsSettings.Denoise, (DenoiseFilterType)avsSettings.DenoiseMethod); if (!keepInputResolution) { resizeLine = ScriptServer.GetResizeLine(!signalAR || avsSettings.Mod16Method == mod16Method.resize || outputWidthIncludingPadding > 0 || (int)iMediaFile.VideoInfo.Width != outputWidthCropped, outputWidthCropped, outputHeightCropped, outputWidthIncludingPadding, outputHeightIncludingPadding, (ResizeFilterType)avsSettings.ResizeMethod, autoCrop, cropValues, (int)iMediaFile.VideoInfo.Width, (int)iMediaFile.VideoInfo.Height); } string newScript = ScriptServer.CreateScriptFromTemplate(avsSettings.Template, inputLine, cropLine, resizeLine, denoiseLines, deinterlaceLines); if (dar.HasValue) { newScript = string.Format("global MeGUI_darx = {0}\r\nglobal MeGUI_dary = {1}\r\n{2}", dar.Value.X, dar.Value.Y, newScript); } else { if (xTargetDevice != null && xTargetDevice.BluRay) { string strResolution = outputWidthIncludingPadding + "x" + outputHeightIncludingPadding; x264Settings _xs = (x264Settings)settings; if (strResolution.Equals("720x480")) { _xs.SampleAR = 4; _log.LogEvent("Set --sar to 10:11 as only 40:33 or 10:11 are supported with a resolution of " + strResolution + " as required for " + xTargetDevice.Name + "."); } else if (strResolution.Equals("720x576")) { _xs.SampleAR = 5; _log.LogEvent("Set --sar to 12:11 as only 16:11 or 12:11 are supported with a resolution of " + strResolution + " as required for " + xTargetDevice.Name + "."); } else if (strResolution.Equals("1280x720") || strResolution.Equals("1920x1080")) { _xs.SampleAR = 1; _log.LogEvent("Set --sar to 1:1 as only 1:1 is supported with a resolution of " + strResolution + " as required for " + xTargetDevice.Name + "."); } else if (strResolution.Equals("1440x1080")) { _xs.SampleAR = 2; _log.LogEvent("Set --sar to 4:3 as only 4:3 is supported with a resolution of " + strResolution + " as required for " + xTargetDevice.Name + "."); } } } _log.LogValue("Generated Avisynth script", newScript); string strOutputAVSFile; if (String.IsNullOrEmpty(indexFile)) { strOutputAVSFile = Path.ChangeExtension(Path.Combine(job.PostprocessingProperties.WorkingDirectory, Path.GetFileName(inputFile)), ".avs"); } else { strOutputAVSFile = Path.ChangeExtension(indexFile, ".avs"); } try { StreamWriter sw = new StreamWriter(strOutputAVSFile, false, System.Text.Encoding.Default); sw.Write(newScript); sw.Close(); } catch (IOException i) { _log.LogValue("Error saving AviSynth script", i, ImageType.Error); return(""); } return(strOutputAVSFile); }
private void StartPostProcessing() { Thread t = null; try { _log.LogEvent("Processing thread started"); raiseEvent("Preprocessing... ***PLEASE WAIT***"); _start = DateTime.Now; t = new Thread(new ThreadStart(delegate { while (true) { updateTime(); Thread.Sleep(1000); } })); t.Start(); List <string> arrAudioFilesDelete = new List <string>(); audioFiles = new Dictionary <int, string>(); List <AudioTrackInfo> arrAudioTracks = new List <AudioTrackInfo>(); List <AudioJob> arrAudioJobs = new List <AudioJob>(); List <MuxStream> arrMuxStreams = new List <MuxStream>(); List <string> intermediateFiles = new List <string>(); FileUtil.ensureDirectoryExists(job.PostprocessingProperties.WorkingDirectory); foreach (OneClickAudioTrack oAudioTrack in job.PostprocessingProperties.AudioTracks) { if (oAudioTrack.ExtractMKVTrack) { audioFiles.Add(oAudioTrack.AudioTrackInfo.TrackID, job.PostprocessingProperties.WorkingDirectory + "\\" + oAudioTrack.AudioTrackInfo.DemuxFileName); arrAudioFilesDelete.Add(job.PostprocessingProperties.WorkingDirectory + "\\" + oAudioTrack.AudioTrackInfo.DemuxFileName); } else if (oAudioTrack.AudioTrackInfo != null) { arrAudioTracks.Add(oAudioTrack.AudioTrackInfo); } if (oAudioTrack.AudioJob != null) { if (job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.NONE && String.IsNullOrEmpty(oAudioTrack.AudioJob.Input)) { oAudioTrack.AudioJob.Input = job.Input; } arrAudioJobs.Add(oAudioTrack.AudioJob); } if (oAudioTrack.DirectMuxAudio != null) { arrMuxStreams.Add(oAudioTrack.DirectMuxAudio); } } if (audioFiles.Count == 0 && job.PostprocessingProperties.IndexType != FileIndexerWindow.IndexType.NONE && !job.PostprocessingProperties.Eac3toDemux) { audioFiles = vUtil.getAllDemuxedAudio(arrAudioTracks, new List <AudioTrackInfo>(), out arrAudioFilesDelete, job.IndexFile, _log); } fillInAudioInformation(arrAudioJobs, arrMuxStreams); if (!String.IsNullOrEmpty(job.PostprocessingProperties.VideoFileToMux)) { _log.LogEvent("Don't encode video: True"); } else { _log.LogEvent("Desired size: " + job.PostprocessingProperties.OutputSize); } _log.LogEvent("Split size: " + job.PostprocessingProperties.Splitting); // chapter file handling if (String.IsNullOrEmpty(job.PostprocessingProperties.ChapterFile)) { job.PostprocessingProperties.ChapterFile = null; } else if (job.PostprocessingProperties.Container == ContainerType.AVI) { _log.LogEvent("Chapter handling disabled because of the AVI target container"); job.PostprocessingProperties.ChapterFile = null; } else if (!File.Exists(job.PostprocessingProperties.ChapterFile)) { if (job.PostprocessingProperties.ChapterFile.StartsWith("<") || job.PostprocessingProperties.ChapterExtracted) { // internal chapter file string strTempFile = job.PostprocessingProperties.ChapterFile; if (Path.GetExtension(job.PostprocessingProperties.VideoInput).ToLower(System.Globalization.CultureInfo.InvariantCulture).Equals(".mkv")) { MediaInfoFile oInfo = new MediaInfoFile(job.PostprocessingProperties.VideoInput, ref _log); if (oInfo.hasMKVChapters()) { job.PostprocessingProperties.ChapterFile = Path.Combine(job.PostprocessingProperties.WorkingDirectory, Path.GetFileNameWithoutExtension(job.IndexFile) + " - Chapter Information.txt"); if (oInfo.extractMKVChapters(job.PostprocessingProperties.ChapterFile)) { intermediateFiles.Add(job.PostprocessingProperties.ChapterFile); job.PostprocessingProperties.ChapterExtracted = true; } else { job.PostprocessingProperties.ChapterFile = strTempFile; } } } else if (File.Exists(job.PostprocessingProperties.IFOInput)) { job.PostprocessingProperties.ChapterFile = VideoUtil.getChaptersFromIFO(job.PostprocessingProperties.IFOInput, false, job.PostprocessingProperties.WorkingDirectory, job.PostprocessingProperties.TitleNumberToProcess); if (!String.IsNullOrEmpty(job.PostprocessingProperties.ChapterFile)) { intermediateFiles.Add(job.PostprocessingProperties.ChapterFile); job.PostprocessingProperties.ChapterExtracted = true; } else { job.PostprocessingProperties.ChapterFile = strTempFile; } } } if (!File.Exists(job.PostprocessingProperties.ChapterFile)) { _log.LogEvent("File not found: " + job.PostprocessingProperties.ChapterFile, ImageType.Error); job.PostprocessingProperties.ChapterFile = null; } } else if (job.PostprocessingProperties.ChapterExtracted) { intermediateFiles.Add(job.PostprocessingProperties.ChapterFile); } string avsFile = String.Empty; VideoStream myVideo = new VideoStream(); VideoCodecSettings videoSettings = job.PostprocessingProperties.VideoSettings; if (String.IsNullOrEmpty(job.PostprocessingProperties.VideoFileToMux)) { //Open the video Dar?dar; avsFile = createAVSFile(job.IndexFile, job.Input, job.PostprocessingProperties.DAR, job.PostprocessingProperties.HorizontalOutputResolution, job.PostprocessingProperties.SignalAR, _log, job.PostprocessingProperties.AvsSettings, job.PostprocessingProperties.AutoDeinterlace, videoSettings, out dar, job.PostprocessingProperties.AutoCrop, job.PostprocessingProperties.KeepInputResolution, job.PostprocessingProperties.UseChaptersMarks); ulong length; double framerate; JobUtil.getInputProperties(out length, out framerate, avsFile); myVideo.Input = avsFile; myVideo.Output = Path.Combine(job.PostprocessingProperties.WorkingDirectory, Path.GetFileNameWithoutExtension(job.Input) + "_Video"); myVideo.NumberOfFrames = length; myVideo.Framerate = (decimal)framerate; myVideo.DAR = dar; myVideo.VideoType = new MuxableType((new VideoEncoderProvider().GetSupportedOutput(videoSettings.EncoderType))[0], videoSettings.Codec); myVideo.Settings = videoSettings; } else { myVideo.Output = job.PostprocessingProperties.VideoFileToMux; myVideo.Settings = videoSettings; MediaInfoFile oInfo = new MediaInfoFile(myVideo.Output, ref _log); videoSettings.VideoName = oInfo.VideoInfo.Track.Name; myVideo.Framerate = (decimal)oInfo.VideoInfo.FPS; } intermediateFiles.Add(avsFile); intermediateFiles.Add(job.IndexFile); intermediateFiles.AddRange(audioFiles.Values); if (!string.IsNullOrEmpty(qpfile)) { intermediateFiles.Add(qpfile); } foreach (string file in arrAudioFilesDelete) { intermediateFiles.Add(file); } if (File.Exists(Path.Combine(Path.GetDirectoryName(job.Input), Path.GetFileNameWithoutExtension(job.Input) + "._log"))) { intermediateFiles.Add(Path.Combine(Path.GetDirectoryName(job.Input), Path.GetFileNameWithoutExtension(job.Input) + "._log")); } foreach (string file in job.PostprocessingProperties.FilesToDelete) { intermediateFiles.Add(file); } if (!string.IsNullOrEmpty(avsFile) || !String.IsNullOrEmpty(job.PostprocessingProperties.VideoFileToMux)) { MuxStream[] subtitles; if (job.PostprocessingProperties.SubtitleTracks.Count == 0) { //Create empty subtitles for muxing subtitles = new MuxStream[0]; } else { subtitles = new MuxStream[job.PostprocessingProperties.SubtitleTracks.Count]; int i = 0; foreach (OneClickStream oTrack in job.PostprocessingProperties.SubtitleTracks) { if (oTrack.TrackInfo.IsMKVContainer()) { //demuxed MKV string trackFile = Path.GetDirectoryName(job.IndexFile) + "\\" + oTrack.TrackInfo.DemuxFileName; if (File.Exists(trackFile)) { intermediateFiles.Add(trackFile); if (Path.GetExtension(trackFile).ToLower(System.Globalization.CultureInfo.InvariantCulture).Equals(".idx")) { intermediateFiles.Add(FileUtil.GetPathWithoutExtension(trackFile) + ".sub"); } subtitles[i] = new MuxStream(trackFile, oTrack.Language, oTrack.Name, oTrack.Delay, oTrack.DefaultStream, oTrack.ForcedStream, null); } else { _log.LogEvent("File not found: " + trackFile, ImageType.Error); } } else { subtitles[i] = new MuxStream(oTrack.DemuxFilePath, oTrack.Language, oTrack.Name, oTrack.Delay, oTrack.DefaultStream, oTrack.ForcedStream, null); } i++; } } JobChain c = vUtil.GenerateJobSeries(myVideo, job.PostprocessingProperties.FinalOutput, arrAudioJobs.ToArray(), subtitles, job.PostprocessingProperties.ChapterFile, job.PostprocessingProperties.OutputSize, job.PostprocessingProperties.Splitting, job.PostprocessingProperties.Container, job.PostprocessingProperties.PrerenderJob, arrMuxStreams.ToArray(), _log, job.PostprocessingProperties.DeviceOutputType, null, job.PostprocessingProperties.VideoFileToMux, job.PostprocessingProperties.AudioTracks.ToArray()); if (c == null) { _log.Warn("Job creation aborted"); return; } c = CleanupJob.AddAfter(c, intermediateFiles, job.PostprocessingProperties.FinalOutput); mainForm.Jobs.addJobsWithDependencies(c); // batch processing other input files if necessary if (job.PostprocessingProperties.FilesToProcess.Count > 0) { OneClickWindow ocw = new OneClickWindow(mainForm); ocw.setBatchProcessing(job.PostprocessingProperties.FilesToProcess, job.PostprocessingProperties.OneClickSetting); } } } catch (Exception e) { t.Abort(); if (e is ThreadAbortException) { _log.LogEvent("Aborting..."); su.WasAborted = true; su.IsComplete = true; raiseEvent(); } else { _log.LogValue("An error occurred", e, ImageType.Error); su.HasError = true; su.IsComplete = true; raiseEvent(); } return; } t.Abort(); su.IsComplete = true; raiseEvent(); }
public VideoJob generateVideoJob(string input, string output, VideoCodecSettings settings, Dar? dar, Zone[] zones) { return generateVideoJob(input, output, settings, false, dar, zones); }
protected override void RunInThread() { JobChain c = null; List <string> intermediateFiles = new List <string>(); bool bError = false; try { log.LogEvent("Processing thread started"); su.Status = "Preprocessing... ***PLEASE WAIT***"; su.ResetTime(); List <string> arrAudioFilesDelete = new List <string>(); audioFiles = new Dictionary <int, string>(); List <AudioTrackInfo> arrAudioTracks = new List <AudioTrackInfo>(); List <AudioJob> arrAudioJobs = new List <AudioJob>(); List <MuxStream> arrMuxStreams = new List <MuxStream>(); FileUtil.ensureDirectoryExists(job.PostprocessingProperties.WorkingDirectory); // audio handling foreach (OneClickAudioTrack oAudioTrack in job.PostprocessingProperties.AudioTracks) { if (IsJobStopped()) { return; } if (oAudioTrack.AudioTrackInfo != null) { if (oAudioTrack.AudioTrackInfo.ExtractMKVTrack) { if (job.PostprocessingProperties.ApplyDelayCorrection && File.Exists(job.PostprocessingProperties.IntermediateMKVFile)) { MediaInfoFile oFile = new MediaInfoFile(job.PostprocessingProperties.IntermediateMKVFile, ref log); bool bFound = false; foreach (AudioTrackInfo oAudioInfo in oFile.AudioInfo.Tracks) { if (oAudioInfo.MMGTrackID == oAudioTrack.AudioTrackInfo.MMGTrackID) { bFound = true; } } int mmgTrackID = 0; if (!bFound) { mmgTrackID = oFile.AudioInfo.Tracks[oAudioTrack.AudioTrackInfo.TrackIndex].MMGTrackID; } else { mmgTrackID = oAudioTrack.AudioTrackInfo.MMGTrackID; } foreach (AudioTrackInfo oAudioInfo in oFile.AudioInfo.Tracks) { if (oAudioInfo.MMGTrackID == mmgTrackID) { if (oAudioTrack.DirectMuxAudio != null) { oAudioTrack.DirectMuxAudio.delay = oAudioInfo.Delay; } if (oAudioTrack.AudioJob != null) { oAudioTrack.AudioJob.Delay = oAudioInfo.Delay; } break; } } } if (!audioFiles.ContainsKey(oAudioTrack.AudioTrackInfo.TrackID)) { audioFiles.Add(oAudioTrack.AudioTrackInfo.TrackID, job.PostprocessingProperties.WorkingDirectory + "\\" + oAudioTrack.AudioTrackInfo.DemuxFileName); arrAudioFilesDelete.Add(job.PostprocessingProperties.WorkingDirectory + "\\" + oAudioTrack.AudioTrackInfo.DemuxFileName); } } else { arrAudioTracks.Add(oAudioTrack.AudioTrackInfo); } } if (oAudioTrack.AudioJob != null) { if (job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.NONE && String.IsNullOrEmpty(oAudioTrack.AudioJob.Input)) { oAudioTrack.AudioJob.Input = job.Input; } arrAudioJobs.Add(oAudioTrack.AudioJob); } if (oAudioTrack.DirectMuxAudio != null) { arrMuxStreams.Add(oAudioTrack.DirectMuxAudio); } } if (audioFiles.Count == 0 && !job.PostprocessingProperties.Eac3toDemux && job.PostprocessingProperties.IndexType != FileIndexerWindow.IndexType.NONE && job.PostprocessingProperties.IndexType != FileIndexerWindow.IndexType.AVISOURCE) { if ((job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.DGI || job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.DGM) && File.Exists(Path.ChangeExtension(job.IndexFile, ".log"))) { job.PostprocessingProperties.FilesToDelete.Add(Path.ChangeExtension(job.IndexFile, ".log")); audioFiles = AudioUtil.GetAllDemuxedAudioFromDGI(arrAudioTracks, out arrAudioFilesDelete, job.IndexFile, log); } else { audioFiles = VideoUtil.getAllDemuxedAudio(arrAudioTracks, new List <AudioTrackInfo>(), out arrAudioFilesDelete, job.IndexFile, log); } } FillInAudioInformation(ref arrAudioJobs, arrMuxStreams); if (!String.IsNullOrEmpty(job.PostprocessingProperties.VideoFileToMux)) { log.LogEvent("Don't encode video: True"); } else { log.LogEvent("Desired size: " + job.PostprocessingProperties.OutputSize); } log.LogEvent("Split size: " + job.PostprocessingProperties.Splitting); if (IsJobStopped()) { return; } // video file handling string avsFile = String.Empty; VideoStream myVideo = new VideoStream(); VideoCodecSettings videoSettings = job.PostprocessingProperties.VideoSettings; if (String.IsNullOrEmpty(job.PostprocessingProperties.VideoFileToMux)) { //Open the video try { avsFile = CreateAVSFile(job.IndexFile, job.Input, job.PostprocessingProperties.DAR, job.PostprocessingProperties.HorizontalOutputResolution, log, job.PostprocessingProperties.AvsSettings, job.PostprocessingProperties.AutoDeinterlace, videoSettings, job.PostprocessingProperties.AutoCrop, job.PostprocessingProperties.KeepInputResolution, job.PostprocessingProperties.UseChaptersMarks); } catch (Exception ex) { log.LogValue("An error occurred creating the AVS file", ex, ImageType.Error); } if (IsJobStopped()) { return; } if (!String.IsNullOrEmpty(avsFile)) { // check AVS file JobUtil.GetInputProperties(avsFile, out ulong frameCount, out double frameRate); myVideo.Input = avsFile; myVideo.Output = Path.Combine(job.PostprocessingProperties.WorkingDirectory, Path.GetFileNameWithoutExtension(job.Input) + "_Video"); myVideo.NumberOfFrames = frameCount; myVideo.Framerate = (decimal)frameRate; myVideo.VideoType = new MuxableType((new VideoEncoderProvider().GetSupportedOutput(videoSettings.EncoderType))[0], videoSettings.Codec); myVideo.Settings = videoSettings; } else { bError = true; } } else { myVideo.DAR = job.PostprocessingProperties.ForcedDAR; myVideo.Output = job.PostprocessingProperties.VideoFileToMux; MediaInfoFile oInfo = new MediaInfoFile(myVideo.Output, ref log); if (Path.GetExtension(job.PostprocessingProperties.VideoFileToMux).Equals(".unknown") && !String.IsNullOrEmpty(oInfo.ContainerFileTypeString)) { job.PostprocessingProperties.VideoFileToMux = Path.ChangeExtension(job.PostprocessingProperties.VideoFileToMux, oInfo.ContainerFileTypeString.ToLowerInvariant()); File.Move(myVideo.Output, job.PostprocessingProperties.VideoFileToMux); myVideo.Output = job.PostprocessingProperties.VideoFileToMux; job.PostprocessingProperties.FilesToDelete.Add(myVideo.Output); } myVideo.Settings = videoSettings; myVideo.Framerate = (decimal)oInfo.VideoInfo.FPS; myVideo.NumberOfFrames = oInfo.VideoInfo.FrameCount; } if (IsJobStopped()) { return; } intermediateFiles.Add(avsFile); intermediateFiles.Add(job.IndexFile); intermediateFiles.AddRange(audioFiles.Values); foreach (string file in arrAudioFilesDelete) { intermediateFiles.Add(file); } intermediateFiles.Add(Path.ChangeExtension(job.Input, ".log")); foreach (string file in job.PostprocessingProperties.FilesToDelete) { intermediateFiles.Add(file); } // subtitle handling List <MuxStream> subtitles = new List <MuxStream>(); if (job.PostprocessingProperties.SubtitleTracks.Count > 0) { foreach (OneClickStream oTrack in job.PostprocessingProperties.SubtitleTracks) { if (oTrack.TrackInfo.ExtractMKVTrack) { //demuxed MKV string trackFile = Path.GetDirectoryName(job.IndexFile) + "\\" + oTrack.TrackInfo.DemuxFileName; if (File.Exists(trackFile)) { intermediateFiles.Add(trackFile); if (Path.GetExtension(trackFile).ToLowerInvariant().Equals(".idx")) { intermediateFiles.Add(FileUtil.GetPathWithoutExtension(trackFile) + ".sub"); } subtitles.Add(new MuxStream(trackFile, oTrack.Language, oTrack.Name, oTrack.Delay, oTrack.DefaultStream, oTrack.ForcedStream, null)); } else { log.LogEvent("Ignoring subtitle as the it cannot be found: " + trackFile, ImageType.Warning); } } else { // sometimes the language is detected differently by vsrip and the IFO parser. Therefore search also for other files string strDemuxFile = oTrack.DemuxFilePath; if (!File.Exists(strDemuxFile) && Path.GetFileNameWithoutExtension(strDemuxFile).Contains("_")) { string strDemuxFileName = Path.GetFileNameWithoutExtension(strDemuxFile); strDemuxFileName = strDemuxFileName.Substring(0, strDemuxFileName.LastIndexOf("_")) + "_*" + Path.GetExtension(strDemuxFile); foreach (string strFileName in Directory.GetFiles(Path.GetDirectoryName(strDemuxFile), strDemuxFileName)) { strDemuxFile = Path.Combine(Path.GetDirectoryName(strDemuxFile), strFileName); intermediateFiles.Add(strDemuxFile); intermediateFiles.Add(Path.ChangeExtension(strDemuxFile, ".sub")); log.LogEvent("Subtitle + " + oTrack.DemuxFilePath + " cannot be found. " + strFileName + " will be used instead", ImageType.Information); break; } } if (File.Exists(strDemuxFile)) { string strTrackName = oTrack.Name; // check if a forced stream is available string strForcedFile = Path.Combine(Path.GetDirectoryName(strDemuxFile), Path.GetFileNameWithoutExtension(strDemuxFile) + "_forced.idx"); if (File.Exists(strForcedFile)) { subtitles.Add(new MuxStream(strForcedFile, oTrack.Language, SubtitleUtil.ApplyForcedStringToTrackName(true, oTrack.Name), oTrack.Delay, oTrack.DefaultStream, true, null)); intermediateFiles.Add(strForcedFile); intermediateFiles.Add(Path.ChangeExtension(strForcedFile, ".sub")); } subtitles.Add(new MuxStream(strDemuxFile, oTrack.Language, SubtitleUtil.ApplyForcedStringToTrackName(false, oTrack.Name), oTrack.Delay, oTrack.DefaultStream, (File.Exists(strForcedFile) ? false : oTrack.ForcedStream), null)); } else { log.LogEvent("Ignoring subtitle as the it cannot be found: " + oTrack.DemuxFilePath, ImageType.Warning); } } } } if (IsJobStopped()) { return; } if (!bError) { c = VideoUtil.GenerateJobSeries(myVideo, job.PostprocessingProperties.FinalOutput, arrAudioJobs.ToArray(), subtitles.ToArray(), job.PostprocessingProperties.Attachments, job.PostprocessingProperties.TimeStampFile, job.PostprocessingProperties.ChapterInfo, job.PostprocessingProperties.OutputSize, job.PostprocessingProperties.Splitting, job.PostprocessingProperties.Container, job.PostprocessingProperties.PrerenderJob, arrMuxStreams.ToArray(), log, job.PostprocessingProperties.DeviceOutputType, null, job.PostprocessingProperties.VideoFileToMux, job.PostprocessingProperties.AudioTracks.ToArray(), true); } if (c != null && !String.IsNullOrEmpty(job.PostprocessingProperties.TimeStampFile) && c.Jobs[c.Jobs.Length - 1].Job is MuxJob && (c.Jobs[c.Jobs.Length - 1].Job as MuxJob).MuxType == MuxerType.MP4BOX) { // last job is a mp4box job and vfr timecode data has to be applied MP4FpsModJob mp4FpsMod = new MP4FpsModJob(((MuxJob)c.Jobs[c.Jobs.Length - 1].Job).Output, job.PostprocessingProperties.TimeStampFile); c = new SequentialChain(c, new SequentialChain(mp4FpsMod)); } } catch (Exception e) { log.LogValue("An error occurred", e, ImageType.Error); bError = true; } if (c == null || bError) { log.Error("Job creation aborted"); su.HasError = true; } // add cleanup job also in case of an error c = CleanupJob.AddAfter(c, intermediateFiles, job.PostprocessingProperties.FinalOutput); MainForm.Instance.Jobs.AddJobsWithDependencies(c, false); // batch processing other input files if necessary if (job.PostprocessingProperties.FilesToProcess.Count > 0) { OneClickWindow ocw = new OneClickWindow(); ocw.setBatchProcessing(job.PostprocessingProperties.FilesToProcess, job.PostprocessingProperties.OneClickSetting); } su.IsComplete = true; }
/// <summary> /// creates the AVS Script file /// if the file can be properly opened, auto-cropping is performed, then depending on the AR settings /// the proper resolution for automatic resizing, taking into account the derived cropping values /// is calculated, and finally the avisynth script is written and its name returned /// </summary> /// <param name="path">dgindex script</param> /// <param name="aspectRatio">aspect ratio selection to be used</param> /// <param name="customDAR">custom display aspect ratio for this source</param> /// <param name="desiredOutputWidth">desired horizontal resolution of the output</param> /// <param name="settings">the codec settings (used only for x264)</param> /// <param name="sarX">pixel aspect ratio X</param> /// <param name="sarY">pixel aspect ratio Y</param> /// <param name="height">the final height of the video</param> /// <param name="autoCrop">whether or not autoCrop is used for the input</param> /// <returns>the name of the AviSynth script created, empty if there was an error</returns> private string CreateAVSFile(string indexFile, string inputFile, Dar?AR, int desiredOutputWidth, LogItem _log, AviSynthSettings avsSettings, bool autoDeint, VideoCodecSettings settings, bool autoCrop, bool keepInputResolution, bool useChaptersMarks) { Dar? dar = null; Dar customDAR; IMediaFile iMediaFile = null; IVideoReader reader; PossibleSources oPossibleSource; x264Device xTargetDevice = null; CropValues cropValues = new CropValues(); int outputWidthIncludingPadding = 0; int outputHeightIncludingPadding = 0; int outputWidthCropped = 0; int outputHeightCropped = 0; // encode anamorph either when it is selected in the avs profile or the input resolution should not be touched bool signalAR = (avsSettings.Mod16Method != mod16Method.none) || keepInputResolution; // make sure the proper anamorphic encode is selected if the input resolution should not be touched if (keepInputResolution && avsSettings.Mod16Method != mod16Method.nonMod16) { avsSettings.Mod16Method = mod16Method.nonMod16; } // open index file to retrieve information if (job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.DGI) { iMediaFile = new dgiFile(indexFile); oPossibleSource = PossibleSources.dgi; } else if (job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.D2V) { iMediaFile = new d2vFile(indexFile); oPossibleSource = PossibleSources.d2v; } else if (job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.DGM) { iMediaFile = new dgmFile(indexFile); oPossibleSource = PossibleSources.dgm; } else if (job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.FFMS) { iMediaFile = new ffmsFile(inputFile, indexFile); oPossibleSource = PossibleSources.ffindex; } else if (job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.LSMASH) { iMediaFile = new lsmashFile(inputFile, indexFile); oPossibleSource = PossibleSources.lsmash; } else if (job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.AVISOURCE) { string tempAvs = "AVISource(\"" + inputFile + "\", audio=false)" + VideoUtil.getAssumeFPS(0, inputFile); iMediaFile = AvsFile.ParseScript(tempAvs, true); oPossibleSource = PossibleSources.avisource; } else { iMediaFile = AvsFile.OpenScriptFile(inputFile, true); oPossibleSource = PossibleSources.avs; } reader = iMediaFile.GetVideoReader(); // abort if the index file is invalid if (reader.FrameCount < 1) { _log.Error("There are " + reader.FrameCount + " frames in the index file. Aborting..."); return(""); } if (AR == null) { // AR needs to be detected automatically now _log.LogValue("Auto-detect aspect ratio", AR == null); customDAR = iMediaFile.VideoInfo.DAR; if (customDAR.AR <= 0) { customDAR = Dar.ITU16x9PAL; _log.Warn(string.Format("No aspect ratio found, defaulting to {0}.", customDAR)); } } else { customDAR = AR.Value; } _log.LogValue("Aspect ratio", customDAR); // check x264 settings (target device, chapter file) if (settings != null && settings is x264Settings) { x264Settings xs = (x264Settings)settings; xTargetDevice = xs.TargetDevice; _log.LogValue("Target device", xTargetDevice.Name); } // get mod value for resizing int mod = Resolution.GetModValue(avsSettings.ModValue, avsSettings.Mod16Method, signalAR); // crop input as it may be required (autoCrop && !keepInputResolution or Blu-Ray) if (Autocrop.autocrop(out cropValues, reader, signalAR, avsSettings.Mod16Method, avsSettings.ModValue) == false) { _log.Error("Autocrop failed. Aborting..."); return(""); } int inputWidth = (int)iMediaFile.VideoInfo.Width; int inputHeight = (int)iMediaFile.VideoInfo.Height; int inputFPS_D = (int)iMediaFile.VideoInfo.FPS_D; int inputFPS_N = (int)iMediaFile.VideoInfo.FPS_N; int inputFrameCount = (int)iMediaFile.VideoInfo.FrameCount; // force destruction of AVS script iMediaFile.Dispose(); Dar?suggestedDar = null; if (desiredOutputWidth == 0) { desiredOutputWidth = outputWidthIncludingPadding = inputWidth; } else if (!avsSettings.Upsize && desiredOutputWidth > inputWidth) { outputWidthIncludingPadding = inputWidth; } else { outputWidthIncludingPadding = desiredOutputWidth; } CropValues paddingValues; bool resizeEnabled; int outputWidthWithoutUpsizing = outputWidthIncludingPadding; if (avsSettings.Upsize) { resizeEnabled = !keepInputResolution; CropValues cropValuesTemp = cropValues.Clone(); int outputHeightIncludingPaddingTemp = 0; Resolution.GetResolution(inputWidth, inputHeight, customDAR, ref cropValuesTemp, autoCrop && !keepInputResolution, mod, ref resizeEnabled, false, signalAR, true, avsSettings.AcceptableAspectError, xTargetDevice, Convert.ToDouble(inputFPS_N) / inputFPS_D, ref outputWidthWithoutUpsizing, ref outputHeightIncludingPaddingTemp, out paddingValues, out suggestedDar, _log); } resizeEnabled = !keepInputResolution; Resolution.GetResolution(inputWidth, inputHeight, customDAR, ref cropValues, autoCrop && !keepInputResolution, mod, ref resizeEnabled, avsSettings.Upsize, signalAR, true, avsSettings.AcceptableAspectError, xTargetDevice, Convert.ToDouble(inputFPS_N) / inputFPS_D, ref outputWidthIncludingPadding, ref outputHeightIncludingPadding, out paddingValues, out suggestedDar, _log); keepInputResolution = !resizeEnabled; if (signalAR && suggestedDar.HasValue) { dar = suggestedDar; } // log calculated output resolution outputWidthCropped = outputWidthIncludingPadding - paddingValues.left - paddingValues.right; outputHeightCropped = outputHeightIncludingPadding - paddingValues.bottom - paddingValues.top; _log.LogValue("Input resolution", inputWidth + "x" + inputHeight); _log.LogValue("Desired maximum width", desiredOutputWidth); if (!avsSettings.Upsize && outputWidthIncludingPadding < desiredOutputWidth) { _log.LogEvent("Desired maximum width not reached. Enable upsizing in the AviSynth profile if you want to force it."); } if (avsSettings.Upsize && outputWidthIncludingPadding > outputWidthWithoutUpsizing) { _log.LogValue("Desired maximum width reached with upsizing. Target width without upsizing", outputWidthWithoutUpsizing); } if (cropValues.isCropped()) { _log.LogValue("Autocrop values", cropValues); _log.LogValue("Cropped output resolution", outputWidthCropped + "x" + outputHeightCropped); } else { _log.LogValue("Output resolution", outputWidthCropped + "x" + outputHeightCropped); } if (paddingValues.isCropped()) { _log.LogValue("Padded output resolution", outputWidthIncludingPadding + "x" + outputHeightIncludingPadding); } // generate the avs script based on the template string inputLine = "#input"; string deinterlaceLines = "#deinterlace"; string denoiseLines = "#denoise"; string cropLine = "#crop"; string resizeLine = "#resize"; inputLine = ScriptServer.GetInputLine( inputFile, indexFile, false, oPossibleSource, false, false, false, 0, avsSettings.DSS2, NvDeinterlacerType.nvDeInterlacerNone, 0, 0, null); if (IsJobStopped()) { return(""); } _log.LogValue("Automatic deinterlacing", autoDeint); if (autoDeint) { su.Status = "Automatic deinterlacing... ***PLEASE WAIT***"; string d2vPath = indexFile; _sourceDetector = new SourceDetector(inputLine, d2vPath, avsSettings.PreferAnimeDeinterlace, inputFrameCount, Thread.CurrentThread.Priority, MainForm.Instance.Settings.SourceDetectorSettings, new UpdateSourceDetectionStatus(AnalyseUpdate), new FinishedAnalysis(FinishedAnalysis)); finished = false; _sourceDetector.Analyse(); WaitTillAnalyseFinished(); _sourceDetector = null; if (filters != null) { deinterlaceLines = filters[0].Script; if (interlaced) { _log.LogValue("Deinterlacing used", deinterlaceLines, ImageType.Warning); } else { _log.LogValue("Deinterlacing used", deinterlaceLines); } } } if (IsJobStopped()) { return(""); } su.Status = "Finalizing preprocessing... ***PLEASE WAIT***"; // get final input filter line inputLine = ScriptServer.GetInputLine( inputFile, indexFile, interlaced, oPossibleSource, avsSettings.ColourCorrect, avsSettings.MPEG2Deblock, false, 0, avsSettings.DSS2, NvDeinterlacerType.nvDeInterlacerNone, 0, 0, null); // get crop & resize lines if (!keepInputResolution) { if (autoCrop) { cropLine = ScriptServer.GetCropLine(cropValues); } resizeLine = ScriptServer.GetResizeLine(!signalAR || avsSettings.Mod16Method == mod16Method.resize || outputWidthIncludingPadding > 0 || inputWidth != outputWidthCropped, outputWidthCropped, outputHeightCropped, outputWidthIncludingPadding, outputHeightIncludingPadding, (ResizeFilterType)avsSettings.ResizeMethod, autoCrop, cropValues, inputWidth, inputHeight); } // get denoise line denoiseLines = ScriptServer.GetDenoiseLines(avsSettings.Denoise, (DenoiseFilterType)avsSettings.DenoiseMethod); string newScript = ScriptServer.CreateScriptFromTemplate(avsSettings.Template, inputLine, cropLine, resizeLine, denoiseLines, deinterlaceLines); if (dar.HasValue) { newScript = string.Format("global MeGUI_darx = {0}\r\nglobal MeGUI_dary = {1}\r\n{2}", dar.Value.X, dar.Value.Y, newScript); } else { if (xTargetDevice != null && xTargetDevice.BluRay) { string strResolution = outputWidthIncludingPadding + "x" + outputHeightIncludingPadding; x264Settings _xs = (x264Settings)settings; if (strResolution.Equals("720x480")) { _xs.SampleAR = 4; _log.LogEvent("Set --sar to 10:11 as only 40:33 or 10:11 are supported with a resolution of " + strResolution + " as required for " + xTargetDevice.Name + "."); } else if (strResolution.Equals("720x576")) { _xs.SampleAR = 5; _log.LogEvent("Set --sar to 12:11 as only 16:11 or 12:11 are supported with a resolution of " + strResolution + " as required for " + xTargetDevice.Name + "."); } else if (strResolution.Equals("1280x720") || strResolution.Equals("1920x1080")) { _xs.SampleAR = 1; _log.LogEvent("Set --sar to 1:1 as only 1:1 is supported with a resolution of " + strResolution + " as required for " + xTargetDevice.Name + "."); } else if (strResolution.Equals("1440x1080")) { _xs.SampleAR = 2; _log.LogEvent("Set --sar to 4:3 as only 4:3 is supported with a resolution of " + strResolution + " as required for " + xTargetDevice.Name + "."); } } } _log.LogValue("Generated AviSynth script", newScript); string strOutputAVSFile; if (String.IsNullOrEmpty(indexFile)) { strOutputAVSFile = Path.ChangeExtension(Path.Combine(job.PostprocessingProperties.WorkingDirectory, Path.GetFileName(inputFile)), ".avs"); } else { strOutputAVSFile = Path.ChangeExtension(indexFile, ".avs"); } try { StreamWriter sw = new StreamWriter(strOutputAVSFile, false, System.Text.Encoding.Default); sw.Write(newScript); sw.Close(); } catch (Exception i) { _log.LogValue("Error saving AviSynth script", i, ImageType.Error); return(""); } JobUtil.GetAllInputProperties(strOutputAVSFile, out ulong numberOfFrames, out double fps, out int fps_n, out int fps_d, out int hres, out int vres, out Dar d, out AviSynthColorspace colorspace); _log.LogEvent("resolution: " + hres + "x" + vres); _log.LogEvent("frame rate: " + fps_n + "/" + fps_d); _log.LogEvent("frames: " + numberOfFrames); TimeSpan oTime = TimeSpan.FromSeconds((double)numberOfFrames / fps); _log.LogEvent("length: " + string.Format("{0:00}:{1:00}:{2:00}.{3:000}", (int)(oTime.TotalHours), oTime.Minutes, oTime.Seconds, oTime.Milliseconds)); _log.LogValue("aspect ratio", d); _log.LogValue("color space", colorspace.ToString()); if (IsJobStopped()) { return(""); } // create qpf file if necessary and possible if (job.PostprocessingProperties.ChapterInfo.HasChapters && useChaptersMarks && settings != null && settings is x264Settings) { fps = (double)fps_n / fps_d; string strChapterFile = Path.ChangeExtension(strOutputAVSFile, ".qpf"); job.PostprocessingProperties.ChapterInfo.ChangeFps(fps); if (job.PostprocessingProperties.ChapterInfo.SaveQpfile(strChapterFile)) { job.PostprocessingProperties.FilesToDelete.Add(strChapterFile); _log.LogValue("qpf file created", strChapterFile); x264Settings xs = (x264Settings)settings; xs.UseQPFile = true; xs.QPFile = strChapterFile; } } // check if a timestamp file has to be used if (!String.IsNullOrEmpty(job.PostprocessingProperties.TimeStampFile) && settings != null && settings is x264Settings) { x264Settings xs = (x264Settings)settings; xs.TCFile = job.PostprocessingProperties.TimeStampFile; } return(strOutputAVSFile); }
/// <summary> /// generates a videojob from the given settings /// returns the job and whether or not this is an automated job (in which case another job /// will have to be created) /// </summary> /// <param name="input">the video input (avisynth script)</param> /// <param name="output">the video output</param> /// <param name="settings">the codec settings for this job</param> /// <returns>the generated job or null if there was an error with the video source</returns> public VideoJob generateVideoJob(string input, string output, VideoCodecSettings settings, bool skipVideoCheck, Dar? dar, Zone[] zones) { VideoJob job = new VideoJob(input, output, settings, dar, zones); if (Path.GetDirectoryName(settings.Logfile).Equals("")) // no path set settings.Logfile = Path.ChangeExtension(output, ".stats"); if (job.Settings.SettingsID.Equals("x264")) mbtreeFile = Path.ChangeExtension(output, ".stats.mbtree"); if (job.Settings.EncodingMode == 4) // automated 2 pass, change type to 2 pass 2nd pass { job.Settings.EncodingMode = 3; } else if (job.Settings.EncodingMode == 8) // automated 3 pass, change type to 3 pass first pass { if (mainForm.Settings.OverwriteStats) job.Settings.EncodingMode = 7; else job.Settings.EncodingMode = 3; // 2 pass 2nd pass.. doesn't overwrite the stats file } if (!skipVideoCheck) checkVideo(job.Input); return job; }
/// <summary> /// compiles the final zone configuration based on intro end frame, credits start frame and the configured zones /// </summary> /// <param name="vSettings">the video settings containing the list of configured zones</param> /// <param name="introEndFrame">the frame where the intro ends</param> /// <param name="creditsStartFrame">the frame where the credits begin</param> /// <param name="newZones">the zones that are returned</param> /// <returns>an array of zones objects in the proper order</returns> public bool getFinalZoneConfiguration(VideoCodecSettings vSettings, int introEndFrame, int creditsStartFrame, ref Zone[] zones) { Zone introZone = new Zone(); Zone creditsZone = new Zone(); ulong nbOfFrames = getNumberOfFrames(mainForm.Video.VideoInput); bool doIntroZone = false, doCreditsZone = false; int flushZonesStart = 0, flushZonesEnd = 0; if (introEndFrame > 0) // add the intro zone { introZone.startFrame = 0; introZone.endFrame = introEndFrame; introZone.mode = ZONEMODE.Quantizer; introZone.modifier = vSettings.CreditsQuantizer; if (zones.Length > 0) { Zone z = zones[0]; if (z.startFrame > introZone.endFrame) // the first configured zone starts after the intro zone { doIntroZone = true; } else { flushZonesStart = 1; int numberOfConfiguredZones = zones.Length; while (flushZonesStart <= numberOfConfiguredZones) // iterate through all zones backwards until we find the first that goes with the intro { Zone conflict = zones[flushZonesStart]; if (conflict.startFrame <= introZone.endFrame) // zone starts before the end of the intro -> conflict { flushZonesStart++; } else { break; } } DialogResult dr = MessageBox.Show("Your intro zone overlaps " + flushZonesStart + " zone(s) configured\nin the codec settings.\n" + "Do you want to remove those zones and add the intro zone instead?", "Zone overlap detected", MessageBoxButtons.YesNoCancel, MessageBoxIcon.Warning); if (dr == DialogResult.Yes) { doIntroZone = true; } else if (dr == DialogResult.Cancel) // abort { return(false); } else // discard the intro zone { flushZonesStart = 0; } } } else { doIntroZone = true; } } if (creditsStartFrame > 0) // add the credits zone { creditsZone.startFrame = creditsStartFrame; creditsZone.endFrame = (int)nbOfFrames - 1; creditsZone.mode = ZONEMODE.Quantizer; creditsZone.modifier = vSettings.CreditsQuantizer; if (zones.Length > 0) { Zone z = zones[zones.Length - 1]; // get the last zone if (z.endFrame < creditsZone.startFrame) // the last configured zone ends before the credits start zone { doCreditsZone = true; } else { flushZonesEnd = 1; int numberOfConfiguredZones = zones.Length; while (numberOfConfiguredZones - flushZonesEnd - 1 >= 0) // iterate through all zones backwards until we find the first that goes with the credits { Zone conflict = zones[numberOfConfiguredZones - flushZonesEnd - 1]; if (conflict.endFrame >= creditsZone.startFrame) // zone ends after the end of the credits -> conflict { flushZonesEnd++; } else { break; } } DialogResult dr = MessageBox.Show("Your credits zone overlaps " + flushZonesEnd + " zone(s) configured\nin the codec settings.\n" + "Do you want to remove those zones and add the credits zone instead?", "Zone overlap detected", MessageBoxButtons.YesNoCancel, MessageBoxIcon.Warning); if (dr == DialogResult.Yes) { doCreditsZone = true; } else if (dr == DialogResult.Cancel) // abort { return(false); } else // discard the credits zone { flushZonesEnd = 0; } } } else // no additional zones configured { doCreditsZone = true; } } int newZoneSize = zones.Length - flushZonesStart - flushZonesEnd; if (doIntroZone) { newZoneSize++; } if (doCreditsZone) { newZoneSize++; } Zone[] newZones = new Zone[newZoneSize]; int index = 0; if (doIntroZone) { newZones[index] = introZone; index++; } for (int i = flushZonesStart; i < zones.Length - flushZonesEnd; i++) { newZones[index] = zones[i]; index++; } if (doCreditsZone) { newZones[index] = creditsZone; index++; } if (vSettings is xvidSettings && newZones.Length > 0) { Zone[] xvidZones = createHelperZones(newZones, (int)nbOfFrames); if (xvidZones == null) { return(false); } else { zones = xvidZones; return(true); } } zones = newZones; return(true); }
/// <summary> /// compiles the final zone configuration based on intro end frame, credits start frame and the configured zones /// </summary> /// <param name="vSettings">the video settings containing the list of configured zones</param> /// <param name="introEndFrame">the frame where the intro ends</param> /// <param name="creditsStartFrame">the frame where the credits begin</param> /// <param name="newZones">the zones that are returned</param> /// <returns>an array of zones objects in the proper order</returns> public bool getFinalZoneConfiguration(VideoCodecSettings vSettings, int introEndFrame, int creditsStartFrame, ref Zone[] zones) { Zone introZone = new Zone(); Zone creditsZone = new Zone(); ulong nbOfFrames = getNumberOfFrames(mainForm.Video.VideoInput); bool doIntroZone = false, doCreditsZone = false; int flushZonesStart = 0, flushZonesEnd = 0; if (introEndFrame > 0) // add the intro zone { introZone.startFrame = 0; introZone.endFrame = introEndFrame; introZone.mode = ZONEMODE.Quantizer; introZone.modifier = vSettings.CreditsQuantizer; if (zones.Length > 0) { Zone z = zones[0]; if (z.startFrame > introZone.endFrame) // the first configured zone starts after the intro zone doIntroZone = true; else { flushZonesStart = 1; int numberOfConfiguredZones = zones.Length; while (flushZonesStart <= numberOfConfiguredZones)// iterate through all zones backwards until we find the first that goes with the intro { Zone conflict = zones[flushZonesStart]; if (conflict.startFrame <= introZone.endFrame) // zone starts before the end of the intro -> conflict flushZonesStart++; else break; } DialogResult dr = MessageBox.Show("Your intro zone overlaps " + flushZonesStart + " zone(s) configured\nin the codec settings.\n" + "Do you want to remove those zones and add the intro zone instead?", "Zone overlap detected", MessageBoxButtons.YesNoCancel, MessageBoxIcon.Warning); if (dr == DialogResult.Yes) doIntroZone = true; else if (dr == DialogResult.Cancel) // abort return false; else // discard the intro zone flushZonesStart = 0; } } else doIntroZone = true; } if (creditsStartFrame > 0) // add the credits zone { creditsZone.startFrame = creditsStartFrame; creditsZone.endFrame = (int)nbOfFrames-1; creditsZone.mode = ZONEMODE.Quantizer; creditsZone.modifier = vSettings.CreditsQuantizer; if (zones.Length > 0) { Zone z = zones[zones.Length - 1]; // get the last zone if (z.endFrame < creditsZone.startFrame) // the last configured zone ends before the credits start zone doCreditsZone = true; else { flushZonesEnd = 1; int numberOfConfiguredZones = zones.Length; while (numberOfConfiguredZones - flushZonesEnd -1 >= 0)// iterate through all zones backwards until we find the first that goes with the credits { Zone conflict = zones[numberOfConfiguredZones - flushZonesEnd -1]; if (conflict.endFrame >= creditsZone.startFrame) // zone ends after the end of the credits -> conflict flushZonesEnd++; else break; } DialogResult dr = MessageBox.Show("Your credits zone overlaps " + flushZonesEnd + " zone(s) configured\nin the codec settings.\n" + "Do you want to remove those zones and add the credits zone instead?", "Zone overlap detected", MessageBoxButtons.YesNoCancel, MessageBoxIcon.Warning); if (dr == DialogResult.Yes) doCreditsZone = true; else if (dr == DialogResult.Cancel) // abort return false; else // discard the credits zone flushZonesEnd = 0; } } else // no additional zones configured doCreditsZone = true; } int newZoneSize = zones.Length - flushZonesStart - flushZonesEnd; if (doIntroZone) newZoneSize++; if (doCreditsZone) newZoneSize++; Zone[] newZones = new Zone[newZoneSize]; int index = 0; if (doIntroZone) { newZones[index] = introZone; index++; } for (int i = flushZonesStart; i < zones.Length - flushZonesEnd; i++) { newZones[index] = zones[i]; index++; } if (doCreditsZone) { newZones[index] = creditsZone; index++; } if (vSettings is xvidSettings && newZones.Length > 0) { Zone[] xvidZones = createHelperZones(newZones, (int)nbOfFrames); if (xvidZones == null) return false; else { zones = xvidZones; return true; } } zones = newZones; return true; }
/// <summary> /// at first, the job from the currently configured settings is generated. In addition, we find out if this job is /// a part of an automated series of jobs. If so, it means the first generated job was the second pass, and we have /// to create the first pass using the same settings /// then, all the generated jobs are returned /// </summary> /// <returns>an Array of VideoJobs in the order they are to be encoded</returns> public JobChain prepareVideoJob(string movieInput, string movieOutput, VideoCodecSettings settings, Dar? dar, bool prerender, bool checkVideo, Zone[] zones) { bool twoPasses = false, threePasses = false; if (settings.EncodingMode == 4) // automated twopass twoPasses = true; else if (settings.EncodingMode == 8) // automated threepass threePasses = true; VideoJob prerenderJob = null; string hfyuFile = null; string inputAVS = movieInput; if (prerender) { hfyuFile = Path.Combine(Path.GetDirectoryName(movieInput), "hfyu_" + Path.GetFileNameWithoutExtension(movieInput) + ".avi"); inputAVS = Path.ChangeExtension(hfyuFile, ".avs"); if (File.Exists(hfyuFile)) { if (MessageBox.Show("The intended temporary file, " + hfyuFile + " already exists.\r\n" + "Do you wish to over-write it?", "File already exists", MessageBoxButtons.YesNo, MessageBoxIcon.Exclamation) == DialogResult.No) return null; } if (File.Exists(inputAVS)) { if (MessageBox.Show("The intended temporary file, " + inputAVS + " already exists.\r\n" + "Do you wish to over-write it?", "File already exists", MessageBoxButtons.YesNo, MessageBoxIcon.Exclamation) == DialogResult.No) return null; } try { StreamWriter hfyuWrapper = new StreamWriter(inputAVS, false, Encoding.Default); hfyuWrapper.WriteLine("AviSource(\"" + hfyuFile + "\")"); hfyuWrapper.Close(); } catch (IOException) { return null; } prerenderJob = this.generateVideoJob(movieInput, hfyuFile, new hfyuSettings(), dar, zones); if (prerenderJob == null) return null; } if (checkVideo) { VideoUtil vUtil = new VideoUtil(mainForm); string error = vUtil.checkVideo(movieInput); if (error != null) { bool bContinue = mainForm.DialogManager.createJobs(error); if (!bContinue) { MessageBox.Show("Job creation aborted due to invalid AviSynth script"); return null; } } } VideoJob job = this.generateVideoJob(inputAVS, movieOutput, settings, prerender, dar, zones); VideoJob firstpass = null; VideoJob middlepass = null; if (job != null) { if (twoPasses || threePasses) // we just created the last pass, now create previous one(s) { job.FilesToDelete.Add(job.Settings.Logfile); if (job.Settings.SettingsID.Equals("x264")) job.FilesToDelete.Add(mbtreeFile); firstpass = cloneJob(job); firstpass.Output = ""; // the first pass has no output firstpass.Settings.EncodingMode = 2; firstpass.DAR = dar; if (threePasses) { firstpass.Settings.EncodingMode = 5; // change to 3 pass 3rd pass just for show middlepass = cloneJob(job); middlepass.Settings.EncodingMode = 6; // 3 pass 2nd pass if (mainForm.Settings.Keep2of3passOutput) // give the 2nd pass a new name { middlepass.Output = Path.Combine(Path.GetDirectoryName(job.Output), Path.GetFileNameWithoutExtension(job.Output) + "-2ndpass" + Path.GetExtension(job.Output)); job.FilesToDelete.Add(middlepass.Output); } middlepass.DAR = dar; } } if (prerender) { job.FilesToDelete.Add(hfyuFile); job.FilesToDelete.Add(inputAVS); } List<VideoJob> jobList = new List<VideoJob>(); if (prerenderJob != null) jobList.Add(prerenderJob); if (firstpass != null) jobList.Add(firstpass); if (middlepass != null) // we have a middle pass jobList.Add(middlepass); jobList.Add(job); return new SequentialChain(jobList.ToArray()); } return null; }
/// <summary> /// sets video, audio and codec defaults /// </summary> /// <param name="nbFrames">number of frames of the video source</param> /// <param name="framerate">framerate of the video source</param> /// <param name="codec">codec selected</param> /// <param name="container">container</param> /// <param name="audio1Bitrate">bitrate of the first audio track</param> /// <param name="audio2Bitrate">bitrate of the second audio track</param> public void SetDefaults(ulong nbFrames, double framerate, int hRes, int vRes, VideoCodecSettings vSettings, List<AudioJob> audioStreams) { fpsChooser.Value = (decimal)framerate; if (nbFrames > 0) this.nbFrames.Value = nbFrames; if (hRes > 0) this.width.Value = hRes; if (vRes > 0) this.height.Value = vRes; if (vSettings != null) { bframes.Checked = vSettings.NbBframes > 0; if (videoCodec.Items.Contains(vSettings.EncoderType)) videoCodec.SelectedItem = vSettings.EncoderType; } foreach (AudioJob job in audioStreams) { var a = AddAudio(); a.SetAudioJob(job); } // make sure there is at least one audio displayed if (GetAudioStreams().Count() == 0) AddAudio(); }
/// <summary> /// opens a dgindex script /// if the file can be properly opened, auto-cropping is performed, then depending on the AR settings /// the proper resolution for automatic resizing, taking into account the derived cropping values /// is calculated, and finally the avisynth script is written and its name returned /// </summary> /// <param name="path">dgindex script</param> /// <param name="aspectRatio">aspect ratio selection to be used</param> /// <param name="customDAR">custom display aspect ratio for this source</param> /// <param name="horizontalResolution">desired horizontal resolution of the output</param> /// <param name="settings">the codec settings (used only for x264)</param> /// <param name="sarX">pixel aspect ratio X</param> /// <param name="sarY">pixel aspect ratio Y</param> /// <param name="height">the final height of the video</param> /// <param name="signalAR">whether or not ar signalling is to be used for the output /// (depending on this parameter, resizing changes to match the source AR)</param> /// <param name="autoCrop">whether or not autoCrop is used for the input</param> /// <returns>the name of the AviSynth script created, empty of there was an error</returns> private string openVideo(string path, Dar? AR, int horizontalResolution, bool signalAR, LogItem log, AviSynthSettings avsSettings, bool autoDeint, VideoCodecSettings settings, out Dar? dar, bool autoCrop, bool keepInputResolution, bool useChaptersMarks) { dar = null; CropValues final = new CropValues(); Dar customDAR; IMediaFile d2v = new d2vFile(path); IVideoReader reader = d2v.GetVideoReader(); if (reader.FrameCount < 1) { log.Error("DGDecode reported 0 frames in this file. This is a fatal error. Please recreate the DGIndex project"); return ""; } if (!keepInputResolution) { //Autocrop final = Autocrop.autocrop(reader); if (signalAR) { if (avsSettings.Mod16Method == mod16Method.overcrop) ScriptServer.overcrop(ref final); else if (avsSettings.Mod16Method == mod16Method.mod4Horizontal) ScriptServer.cropMod4Horizontal(ref final); else if (avsSettings.Mod16Method == mod16Method.undercrop) ScriptServer.undercrop(ref final); } if (autoCrop) { bool error = (final.left == -1); if (!error) log.LogValue("Autocrop values", final); else { log.Error("Autocrop failed, aborting now"); return ""; } } } log.LogValue("Auto-detect aspect ratio now", AR == null); //Check if AR needs to be autodetected now if (AR == null) // it does { customDAR = d2v.Info.DAR; if (customDAR.ar > 0) log.LogValue("Aspect ratio", customDAR); else { customDAR = Dar.ITU16x9PAL; log.Warn(string.Format("No aspect ratio found, defaulting to {0}.", customDAR)); } } else customDAR = AR.Value; if (keepInputResolution) { horizontalResolution = (int)d2v.Info.Width; dar = customDAR; } else { // Minimise upsizing int sourceHorizontalResolution = (int)d2v.Info.Width - final.right - final.left; if (autoCrop) sourceHorizontalResolution = (int)d2v.Info.Width; if (horizontalResolution > sourceHorizontalResolution) { if (avsSettings.Mod16Method == mod16Method.resize) while (horizontalResolution > sourceHorizontalResolution + 16) horizontalResolution -= 16; else horizontalResolution = sourceHorizontalResolution; } } //Suggest a resolution (taken from AvisynthWindow.suggestResolution_CheckedChanged) int scriptVerticalResolution = 0; if (keepInputResolution) { scriptVerticalResolution = (int)d2v.Info.Height; log.LogValue("Output resolution", horizontalResolution + "x" + scriptVerticalResolution); } else { scriptVerticalResolution = Resolution.suggestResolution(d2v.Info.Height, d2v.Info.Width, (double)customDAR.ar, final, horizontalResolution, signalAR, mainForm.Settings.AcceptableAspectErrorPercent, out dar); log.LogValue("Output resolution", horizontalResolution + "x" + scriptVerticalResolution); if (settings != null && settings is x264Settings) // verify that the video corresponds to the chosen avc level, if not, change the resolution until it does fit { x264Settings xs = (x264Settings)settings; if (xs.Level != 15) { AVCLevels al = new AVCLevels(); log.LogValue("AVC level", al.getLevels()[xs.Level]); int compliantLevel = 15; while (!this.al.validateAVCLevel(horizontalResolution, scriptVerticalResolution, d2v.Info.FPS, xs, out compliantLevel)) { // resolution not profile compliant, reduce horizontal resolution by 16, get the new vertical resolution and try again string levelName = al.getLevels()[xs.Level]; horizontalResolution -= 16; scriptVerticalResolution = Resolution.suggestResolution(d2v.Info.Height, d2v.Info.Width, (double)customDAR.ar, final, horizontalResolution, signalAR, mainForm.Settings.AcceptableAspectErrorPercent, out dar); } log.LogValue("Resolution adjusted for AVC Level", horizontalResolution + "x" + scriptVerticalResolution); } if (useChaptersMarks) { qpfile = job.PostprocessingProperties.ChapterFile; if ((Path.GetExtension(qpfile).ToLower()) == ".txt") qpfile = VideoUtil.convertChaptersTextFileTox264QPFile(job.PostprocessingProperties.ChapterFile, d2v.Info.FPS); if (File.Exists(qpfile)) { xs.UseQPFile = true; xs.QPFile = qpfile; } } } } //Generate the avs script based on the template string inputLine = "#input"; string deinterlaceLines = "#deinterlace"; string denoiseLines = "#denoise"; string cropLine = "#crop"; string resizeLine = "#resize"; inputLine = ScriptServer.GetInputLine(path, false, PossibleSources.d2v, false, false, false, 0, false); log.LogValue("Automatic deinterlacing", autoDeint); if (autoDeint) { string d2vPath = path; SourceDetector sd = new SourceDetector(inputLine, d2vPath, false, mainForm.Settings.SourceDetectorSettings, new UpdateSourceDetectionStatus(analyseUpdate), new FinishedAnalysis(finishedAnalysis)); finished = false; sd.analyse(); waitTillAnalyseFinished(); deinterlaceLines = filters[0].Script; log.LogValue("Deinterlacing used", deinterlaceLines); } inputLine = ScriptServer.GetInputLine(path, interlaced, PossibleSources.d2v, avsSettings.ColourCorrect, avsSettings.MPEG2Deblock, false, 0, false); if (autoCrop) cropLine = ScriptServer.GetCropLine(true, final); else cropLine = ScriptServer.GetCropLine(false, final); denoiseLines = ScriptServer.GetDenoiseLines(avsSettings.Denoise, (DenoiseFilterType)avsSettings.DenoiseMethod); if (!keepInputResolution) resizeLine = ScriptServer.GetResizeLine(!signalAR || avsSettings.Mod16Method == mod16Method.resize, horizontalResolution, scriptVerticalResolution, (ResizeFilterType)avsSettings.ResizeMethod); string newScript = ScriptServer.CreateScriptFromTemplate(avsSettings.Template, inputLine, cropLine, resizeLine, denoiseLines, deinterlaceLines); if (dar.HasValue) newScript = string.Format("global MeGUI_darx = {0}\r\nglobal MeGUI_dary = {1}\r\n{2}", dar.Value.X, dar.Value.Y, newScript); log.LogValue("Generated Avisynth script", newScript); try { StreamWriter sw = new StreamWriter(Path.ChangeExtension(path, ".avs"),false, System.Text.Encoding.Default); sw.Write(newScript); sw.Close(); } catch (IOException i) { log.LogValue("Error saving AviSynth script", i, ImageType.Error); return ""; } return Path.ChangeExtension(path, ".avs"); }
/// <summary> /// Handles assessment of whether the encoding options vary between two xvidSettings instances /// The following are excluded from the comparison: /// BitrateQuantizer /// CreditsQuantizer /// Logfile /// PAR /// PARs /// SARX /// SARY /// Zones /// </summary> /// <param name="otherSettings"></param> /// <returns>true if the settings differ</returns> public bool IsAltered(VideoCodecSettings settings) { if (!(settings is xvidSettings)) return true; xvidSettings otherSettings = (xvidSettings)settings; if ( this.AdaptiveQuant != otherSettings.AdaptiveQuant || this.AveragingPeriod != otherSettings.AveragingPeriod || this.BframeThreshold != otherSettings.BframeThreshold || this.BQuantOffset != otherSettings.BQuantOffset || this.BQuantRatio != otherSettings.BQuantRatio || this.ChromaMotion != otherSettings.ChromaMotion || this.ClosedGOP != otherSettings.ClosedGOP || this.CustomEncoderOptions != otherSettings.CustomEncoderOptions || this.EncodingMode != otherSettings.EncodingMode || this.FrameDropRatio != otherSettings.FrameDropRatio || this.GMC != otherSettings.GMC || this.HighBitrateDegradation != otherSettings.HighBitrateDegradation || this.Interlaced != otherSettings.Interlaced || this.KeyFrameBoost != otherSettings.KeyFrameBoost || this.KeyframeInterval != otherSettings.KeyframeInterval || this.KeyframeReduction != otherSettings.KeyframeReduction || this.KeyframeThreshold != otherSettings.KeyframeThreshold || this.LowBitrateImprovement != otherSettings.LowBitrateImprovement || this.MaxBQuant != otherSettings.MaxBQuant || this.MaxOverflowDegradation != otherSettings.MaxOverflowDegradation || this.MaxOverflowImprovement != otherSettings.MaxOverflowImprovement || this.MaxPQuant != otherSettings.MaxPQuant || this.MaxQuantizer != otherSettings.MaxQuantizer || this.MinBQuant != otherSettings.MinBQuant || this.MinPQuant != otherSettings.MinPQuant || this.MinQuantizer != otherSettings.MinQuantizer || this.MotionSearchPrecision != otherSettings.MotionSearchPrecision || this.NbBframes != otherSettings.NbBframes || this.OverflowControlStrength != otherSettings.OverflowControlStrength || this.PackedBitstream != otherSettings.PackedBitstream || this.QPel != otherSettings.QPel || this.RateControlBuffer != otherSettings.RateControlBuffer || this.ReactionDelayFactor != otherSettings.ReactionDelayFactor || this.Trellis != otherSettings.Trellis || this.Turbo != otherSettings.Turbo || this.V4MV != otherSettings.V4MV || this.VHQForBframes != otherSettings.VHQForBframes || this.XvidProfile != otherSettings.XvidProfile || this.VbvBuffer != otherSettings.VbvBuffer || this.VbvMaxRate != otherSettings.VbvMaxRate || this.VbvPeakRate != otherSettings.VbvPeakRate || this.VHQMode != otherSettings.VHQMode || this.HVSMasking != otherSettings.HVSMasking ) return true; else return false; }
public bool AddVideoJobs(string movieInput, string movieOutput, VideoCodecSettings settings, int introEndFrame, int creditsStartFrame, Dar? dar, bool prerender, bool checkVideo, Zone[] zones) { bool cont = getFinalZoneConfiguration(settings, introEndFrame, creditsStartFrame, ref zones); if (!cont) // abort return false; JobChain jobs = prepareVideoJob(movieInput, movieOutput, settings, dar, prerender, checkVideo, zones); if (jobs == null) return false; mainForm.Jobs.addJobsWithDependencies(jobs); return false; }