public static DialogResult ShowDialog(Dar defaultDar, out Dar newDar) { AspectRatioChooser n = new AspectRatioChooser(); n.SetValues(defaultDar); if (defaultDar.Y < 1) { n.radioButton2.Checked = true; } else { n.radioButton1.Checked = true; } DialogResult r = n.ShowDialog(); if (n.radioButton1.Checked) { newDar = new Dar(n.numericUpDown1.Value); } else { newDar = new Dar((ulong)n.numericUpDown2.Value, (ulong)n.numericUpDown3.Value); } return(r); }
/// <summary> /// reads the dgv file, which is essentially a text file /// </summary> private void readFileProperties() { info = reader.Info.Clone(); Dar dar = new Dar(reader.Info.Width, reader.Info.Height); info.DAR = dar; }
/// <summary> /// Gets the hash code /// </summary> /// <returns>Hash code</returns> public override int GetHashCode() { unchecked // Overflow is fine, just wrap { int hashCode = 41; // Suitable nullity checks etc, of course :) if (Cr != null) { hashCode = hashCode * 59 + Cr.GetHashCode(); } if (Qr != null) { hashCode = hashCode * 59 + Qr.GetHashCode(); } if (Car != null) { hashCode = hashCode * 59 + Car.GetHashCode(); } if (Tlgr != null) { hashCode = hashCode * 59 + Tlgr.GetHashCode(); } if (Dar != null) { hashCode = hashCode * 59 + Dar.GetHashCode(); } if (Em != null) { hashCode = hashCode * 59 + Em.GetHashCode(); } return(hashCode); } }
public void Run(MainForm info) { using (Calculator calc = new Calculator(info)) { ulong nbFrames = 0; double framerate = 0.0; int hRes = 0, vRes = 0; Dar dar = new Dar(); if (!string.IsNullOrEmpty(info.Video.VideoInput)) { JobUtil.GetAllInputProperties(out nbFrames, out framerate, out hRes, out vRes, out dar, info.Video.VideoInput); } calc.SetDefaults(nbFrames, framerate, hRes, vRes, info.Video.CurrentSettings, info.Audio.AudioStreams); DialogResult dr = calc.ShowDialog(); if (dr != DialogResult.OK) { return; } if (info.Video.CurrentSettings.EncoderType != calc.SelectedVCodec) { return; } VideoCodecSettings settings = info.Video.CurrentSettings; if (settings.VideoEncodingType == VideoCodecSettings.VideoEncodingMode.CQ || settings.VideoEncodingType == VideoCodecSettings.VideoEncodingMode.quality) { dr = MessageBox.Show("Copy calculated bitrate into current video settings and change encoding mode to automated " + info.Settings.NbPasses + "-pass?", "Save calculated bitrate?", MessageBoxButtons.YesNo, MessageBoxIcon.Question); if (dr != DialogResult.Yes) { return; } if (info.Settings.NbPasses == 3) { settings.VideoEncodingType = VideoCodecSettings.VideoEncodingMode.threepassAutomated; // Automated 3-pass } else { settings.VideoEncodingType = VideoCodecSettings.VideoEncodingMode.twopassAutomated; // Automated 2-pass } } else { dr = MessageBox.Show("Copy calculated bitrate into current video settings?", "Save calculated bitrate?", MessageBoxButtons.YesNo, MessageBoxIcon.Question); if (dr != DialogResult.Yes) { return; } } settings.BitrateQuantizer = calc.VideoBitrate; } }
public VideoInfo2(ulong width, ulong height, Dar dar, ulong framecount, double fps) { Width = width; Height = height; DAR = dar; FrameCount = framecount; FPS = fps; }
/// <summary> /// reads the d2v file, which is essentially a text file /// the first few lines contain the video properties in plain text and the /// last line contains the film percentage /// this method reads all this information and stores it internally, then /// closes the d2v file again /// </summary> private void readFileProperties() { info = reader.Info.Clone(); Dar dar = Dar.A1x1; using (StreamReader sr = new StreamReader(fileName)) { string line = sr.ReadLine(); while ((line = sr.ReadLine()) != null) { if (line.IndexOf("Aspect_Ratio") != -1) // this is the aspect ratio line { string ar = line.Substring(13); if (reader.Info.Width == 720 && reader.Info.Height == 576) { if (ar.Equals("16:9")) { dar = Dar.ITU16x9PAL; } else if (ar.Equals("4:3")) { dar = Dar.ITU4x3PAL; } } else if (reader.Info.Width == 720 && reader.Info.Height == 480) { if (ar.Equals("16:9")) { dar = Dar.ITU16x9NTSC; } else if (ar.Equals("4:3")) { dar = Dar.ITU4x3NTSC; } } } if (line.IndexOf("Field_Operation") != -1) { string fieldOp = line.Substring(16, 1); this.fieldOperation = Int32.Parse(fieldOp); } if (line.IndexOf("FINISHED") != -1 && line.IndexOf("FILM") != -1) // dgindex now reports VIDEO % if it's > 50% { int end = line.IndexOf("%"); string percentage = line.Substring(10, end - 10); this.filmPercentage = Double.Parse(percentage, System.Globalization.CultureInfo.InvariantCulture); } } } info.DAR = dar; }
public MediaFileInfo(bool hasVideo, ulong width, ulong height, Dar dar, ulong frameCount, double fps, bool hasAudio) { HasVideo = hasVideo; Width = width; Height = height; DAR = dar; FrameCount = frameCount; FPS = fps; HasAudio = hasAudio; }
private void numericUpDown2_ValueChanged(object sender, EventArgs e) { if (bDisableEvents) { return; } bDisableEvents = true; Dar ar = new Dar((ulong)numericUpDown2.Value, (ulong)numericUpDown3.Value); if (ar.AR >= numericUpDown1.Minimum && ar.AR <= numericUpDown1.Maximum) { numericUpDown1.Value = ar.AR; } bDisableEvents = false; }
public void SetValues(Dar ar) { bDisableEvents = true; if (ar.AR >= numericUpDown1.Minimum && ar.AR <= numericUpDown1.Maximum) { numericUpDown1.Value = ar.AR; } if (ar.X >= numericUpDown2.Minimum && ar.X <= numericUpDown2.Maximum) { numericUpDown2.Value = ar.X; } if (ar.Y >= numericUpDown3.Minimum && ar.Y <= numericUpDown3.Maximum) { numericUpDown3.Value = ar.Y; } bDisableEvents = false; }
private void resize(int targetWidth, bool PAR) { zoomWidth = targetWidth; Dar d = new Dar(file.VideoInfo.Width, file.VideoInfo.Height); if (PAR) { d = arChooser.Value ?? d; } int height = (int)Math.Round((decimal)targetWidth / d.AR); videoWindowWidth = targetWidth; videoWindowHeight = height; sizeLock = true; adjustSize(); VideoPlayer_Shown(null, null); sizeLock = false; }
/// <summary> /// gets the number of frames, framerate, horizontal and vertical resolution from a video source /// </summary> /// <param name="nbOfFrames">the number of frames</param> /// <param name="framerate">the framerate</param> /// <param name="hRes">the horizontal resolution</param> /// <param name="vRes">the vertical resolution</param> /// <param name="video">the video whose properties are to be read</param> /// <returns>whether the source could be opened or not</returns> public static bool getAllInputProperties(out ulong nbOfFrames, out double framerate, out int hRes, out int vRes, out Dar dar, string video) { try { GetAllInputProperties(out nbOfFrames, out framerate, out hRes, out vRes, out dar, video); return(true); } catch (Exception e) { MessageBox.Show(e.Message, "Cannot open video input", MessageBoxButtons.OK, MessageBoxIcon.Stop); nbOfFrames = 0; hRes = vRes = 0; framerate = 0; dar = Dar.ITU16x9PAL; return(false); } }
/// <summary> /// Returns true if Solvency instances are equal /// </summary> /// <param name="other">Instance of Solvency to be compared</param> /// <returns>Boolean</returns> public bool Equals(Solvency other) { if (other is null) { return(false); } if (ReferenceEquals(this, other)) { return(true); } return (( Cr == other.Cr || Cr != null && Cr.Equals(other.Cr) ) && ( Qr == other.Qr || Qr != null && Qr.Equals(other.Qr) ) && ( Car == other.Car || Car != null && Car.Equals(other.Car) ) && ( Tlgr == other.Tlgr || Tlgr != null && Tlgr.Equals(other.Tlgr) ) && ( Dar == other.Dar || Dar != null && Dar.Equals(other.Dar) ) && ( Em == other.Em || Em != null && Em.Equals(other.Em) )); }
private void numericUpDown1_ValueChanged(object sender, EventArgs e) { if (bDisableEvents) { return; } bDisableEvents = true; Dar ar = new Dar(numericUpDown1.Value); if (ar.X >= numericUpDown2.Minimum && ar.X <= numericUpDown2.Maximum) { numericUpDown2.Value = ar.X; } if (ar.Y >= numericUpDown3.Minimum && ar.Y <= numericUpDown3.Maximum) { numericUpDown3.Value = ar.Y; } bDisableEvents = false; }
public void Run(MainForm info) { using (Calculator calc = new Calculator(info)) { ulong nbFrames = 0; double framerate = 0.0; int hRes = 0, vRes = 0; Dar dar = new Dar(); if (!string.IsNullOrEmpty(info.Video.VideoInput)) JobUtil.getAllInputProperties(out nbFrames, out framerate, out hRes, out vRes, out dar, info.Video.VideoInput); calc.SetDefaults(nbFrames, framerate, hRes, vRes, info.Video.CurrentSettings, info.Audio.AudioStreams); DialogResult dr = calc.ShowDialog(); if (dr != DialogResult.OK) return; if (info.Video.CurrentSettings.EncoderType != calc.SelectedVCodec) return; VideoCodecSettings settings = info.Video.CurrentSettings; if (settings.EncodingMode == 1 || settings.EncodingMode == 9) { dr = MessageBox.Show("Copy calculated bitrate into current video settings and change encoding mode to automated " + info.Settings.NbPasses + "-pass?", "Save calculated bitrate?", MessageBoxButtons.YesNo, MessageBoxIcon.Question); if (dr != DialogResult.Yes) return; if (info.Settings.NbPasses == 3) settings.EncodingMode = 8; // Automated 3-pass else settings.EncodingMode = 4; // Automated 2-pass } else { dr = MessageBox.Show("Copy calculated bitrate into current video settings?", "Save calculated bitrate?", MessageBoxButtons.YesNo, MessageBoxIcon.Question); if (dr != DialogResult.Yes) return; } settings.BitrateQuantizer = calc.VideoBitrate; } }
public static void GetAllInputProperties(out ulong nbOfFrames, out double framerate, out int hRes, out int vRes, out Dar dar, string video) { nbOfFrames = 0; hRes = vRes = 0; framerate = 0.0; try { using (AvsFile avi = AvsFile.OpenScriptFile(video)) { checked { nbOfFrames = (ulong)avi.Info.FrameCount; } framerate = avi.Info.FPS; hRes = (int)avi.Info.Width; vRes = (int)avi.Info.Height; dar = avi.Info.DAR; } } catch (Exception e) { throw new JobRunException("The file " + video + " cannot be opened.\r\n" + "Error message for your reference: " + e.Message, e); } }
/// <summary> /// tries to open the video source and gets the number of frames from it, or /// exits with an error /// </summary> /// <param name="videoSource">the AviSynth script</param> /// <param name="error">return parameter for all errors</param> /// <returns>true if the file could be opened, false if not</returns> protected void getInputProperties(VideoJob job) { log.LogValue("AviSynth input script", GetAVSFileContent()); double fps; Dar d = Dar.A1x1; AviSynthColorspace colorspace_original; JobUtil.GetAllInputProperties(job.Input, out numberOfFrames, out fps, out fps_n, out fps_d, out hres, out vres, out d, out colorspace_original); Dar?dar = job.DAR; su.NbFramesTotal = numberOfFrames; su.ClipLength = TimeSpan.FromSeconds((double)numberOfFrames / fps); if (!job.DAR.HasValue) { job.DAR = d; } // log if (log == null) { return; } log.LogEvent("resolution: " + hres + "x" + vres); log.LogEvent("frame rate: " + fps_n + "/" + fps_d); log.LogEvent("frames: " + numberOfFrames); log.LogEvent("length: " + string.Format("{0:00}:{1:00}:{2:00}.{3:000}", (int)(su.ClipLength.Value.TotalHours), su.ClipLength.Value.Minutes, su.ClipLength.Value.Seconds, su.ClipLength.Value.Milliseconds)); if (dar.HasValue && d.AR == dar.Value.AR) { log.LogValue("aspect ratio", d); } else { log.LogValue("aspect ratio (avs)", d); if (dar.HasValue) { log.LogValue("aspect ratio (job)", dar.Value); } } if (Int32.TryParse(colorspace_original.ToString(), out int result)) { log.LogValue("color space", colorspace_original.ToString(), ImageType.Warning); } else { log.LogValue("color space", colorspace_original.ToString()); } string strEncoder = "ffmpeg"; if (this is XviDEncoder) { strEncoder = "xvid"; } else if (this is x264Encoder && (MainForm.Instance.Settings.IsMeGUIx64 || !MainForm.Instance.Settings.Usex64Tools)) { strEncoder = "x264"; } AviSynthColorspace colorspace_target = AviSynthColorspaceHelper.GetConvertedColorspace(strEncoder, colorspace_original); if (colorspace_original != colorspace_target && !AviSynthColorspaceHelper.IsConvertedToColorspace(job.Input, colorspace_target.ToString())) { if (MainForm.Instance.DialogManager.AddConvertTo(colorspace_original.ToString(), colorspace_target.ToString())) { AviSynthColorspaceHelper.AppendConvertTo(job.Input, colorspace_target, colorspace_original); log.LogValue("AviSynth input script (appended)", GetAVSFileContent()); // Check everything again, to see if it is all fixed now AviSynthColorspace colorspace_converted; JobUtil.GetAllInputProperties(job.Input, out numberOfFrames, out fps, out fps_n, out fps_d, out hres, out vres, out d, out colorspace_converted); if (colorspace_original != colorspace_converted) { log.LogValue("color space converted", colorspace_converted.ToString()); } else { log.LogEvent("color space not supported, conversion failed", ImageType.Error); } } else { log.LogEvent("color space not supported", ImageType.Error); } } }
public bool AddVideoJobs(string movieInput, string movieOutput, VideoCodecSettings settings, int introEndFrame, int creditsStartFrame, Dar? dar, bool prerender, bool checkVideo, Zone[] zones) { bool cont = getFinalZoneConfiguration(settings, introEndFrame, creditsStartFrame, ref zones); if (!cont) // abort return false; JobChain jobs = prepareVideoJob(movieInput, movieOutput, settings, dar, prerender, checkVideo, zones); if (jobs == null) return false; mainForm.Jobs.addJobsWithDependencies(jobs); return false; }
public static string genCommandline(string input, string output, Dar? d, xvidSettings xs, int hres, int vres, Zone[] zones) { StringBuilder sb = new StringBuilder(); CultureInfo ci = new CultureInfo("en-us"); sb.Append("-i \"" + input + "\" "); switch (xs.EncodingMode) { case 0: // CBR sb.Append("-single -bitrate " + xs.BitrateQuantizer + " "); // add bitrate break; case 1: // CQ sb.Append("-single -cq " + xs.Quantizer.ToString(ci) + " "); // add quantizer break; case 2: // 2 pass first pass sb.Append("-pass1 " + "\"" + xs.Logfile + "\" -bitrate " + xs.BitrateQuantizer + " "); // add logfile break; case 3: // 2 pass second pass case 4: // automated twopass sb.Append("-pass2 " + "\"" + xs.Logfile + "\" -bitrate " + xs.BitrateQuantizer + " "); // add logfile break; } if (xs.EncodingMode <= 1) // 1 pass modes { if (xs.ReactionDelayFactor != 16) sb.Append("-reaction " + xs.ReactionDelayFactor + " "); if (xs.AveragingPeriod != 100) sb.Append("-averaging " + xs.AveragingPeriod + " "); if (xs.RateControlBuffer != 100) sb.Append("-smoother " + xs.RateControlBuffer + " "); } else // two pass modes { if (xs.KeyFrameBoost != 10) sb.Append("-kboost " + xs.KeyFrameBoost + " "); if (xs.KeyframeThreshold != 1) sb.Append("-kthresh " + xs.KeyframeThreshold + " "); if (xs.KeyframeReduction != 20) sb.Append("-kreduction " + xs.KeyframeReduction + " "); if (xs.OverflowControlStrength != 5) sb.Append("-ostrength " + xs.OverflowControlStrength + " "); if (xs.MaxOverflowImprovement != 5) sb.Append("-oimprove " + xs.MaxOverflowImprovement + " "); if (xs.MaxOverflowDegradation != 5) sb.Append("-odegrade " + xs.MaxOverflowDegradation + " "); if (xs.HighBitrateDegradation != 0) sb.Append("-chigh " + xs.HighBitrateDegradation + " "); if (xs.LowBitrateImprovement != 0) sb.Append("-clow " + xs.LowBitrateImprovement + " "); sb.Append("-overhead 0 "); if (xs.XvidProfile != 0) { switch (xs.XvidProfile) { case 0: break; case 1: sb.Append("-vbvmax 4854000 -vbvsize 3145728 -vbvpeak 2359296 "); break; case 2: sb.Append("-vbvmax 9708400 -vbvsize 6291456 -vbvpeak 4718592 "); break; case 3: sb.Append("-vbvmax 20000000 -vbvsize 16000000 -vbvpeak 12000000 "); break; case 4: sb.Append("-vbvmax 200000 -vbvsize 262144 -vbvpeak 196608 "); break; case 5: sb.Append("-vbvmax 600000 -vbvsize 655360 -vbvpeak 491520 "); break; case 6: if (xs.VbvBuffer != 0) sb.Append("-vbvsize " + xs.VbvBuffer + " "); if (xs.VbvMaxRate != 0) sb.Append("-vbvmax " + xs.VbvMaxRate + " "); if (xs.VbvPeakRate != 0) sb.Append("-vbvpeak " + xs.VbvPeakRate + " "); break; } } } if (xs.Turbo) sb.Append("-turbo "); if (xs.KeyframeInterval != 300) sb.Append("-max_key_interval " + xs.KeyframeInterval + " "); if (!xs.PackedBitstream) // default is on in encraw sb.Append("-nopacked "); if (xs.MotionSearchPrecision != 6) sb.Append("-quality " + xs.MotionSearchPrecision + " "); if (xs.VHQMode != 1) sb.Append("-vhqmode " + xs.VHQMode + " "); if (xs.QPel) sb.Append("-qpel "); if (xs.GMC) sb.Append("-gmc "); if (xs.QuantizerMatrix == xvidSettings.MPEGMatrix) sb.Append("-qtype 1 "); else if (xs.QuantizerMatrix != xvidSettings.H263Matrix && !string.IsNullOrEmpty(xs.QuantizerMatrix)) sb.Append("-qmatrix \"" + xs.QuantizerMatrix + "\" "); if (xs.Interlaced) { sb.Append("-interlaced "); if (xs.BottomFieldFirst) sb.Append("1 "); else sb.Append("2 "); } if (xs.HVSMasking != 0) sb.Append("-masking " + xs.HVSMasking + " "); if (!xs.Trellis) sb.Append("-notrellis "); if (!xs.ChromaMotion) sb.Append("-nochromame "); if (xs.MinQuantizer != 2) sb.Append("-imin " + xs.MinQuantizer + " "); if (xs.MaxQuantizer != 31) sb.Append("-imax " + xs.MaxQuantizer + " "); if (xs.MinPQuant != 2) sb.Append("-pmin " + xs.MinPQuant + " "); if (xs.MaxPQuant != 31) sb.Append("-pmax " + xs.MaxPQuant + " "); if (!xs.ClosedGOP) sb.Append("-noclosed_gop "); if (xs.FrameDropRatio != 0) sb.Append("-drop " + xs.FrameDropRatio + " "); if (xs.NbBframes != 2) sb.Append("-max_bframes " + xs.NbBframes + " "); if (xs.NbBframes > 0) { if (xs.VHQForBframes) sb.Append("-bvhq "); if (xs.BQuantRatio != 150) sb.Append("-bquant_ratio " + xs.BQuantRatio + " "); if (xs.BQuantOffset != 100) sb.Append("-bquant_offset " + xs.BQuantOffset + " "); if (xs.MinBQuant != 2) sb.Append("-bmin " + xs.MinBQuant + " "); if (xs.MaxBQuant != 31) sb.Append("-bmax " + xs.MaxBQuant + " "); } if (d.HasValue) // custom PAR mode { Sar s = d.Value.ToSar(hres, vres); sb.Append("-par " + s.X + ":" + s.Y + " "); } sb.Append("-threads " + xs.NbThreads + " "); if (zones != null && zones.Length > 0 && xs.CreditsQuantizer >= new decimal(1) && xs.EncodingMode != 1) // only for non CQ mode at the moment { foreach (Zone zone in zones) { if (zone.mode == ZONEMODE.Quantizer) sb.Append("-zq " + zone.startFrame + " " + zone.modifier + " "); if (zone.mode == ZONEMODE.Weight) { sb.Append("-zw " + zone.startFrame + " "); double mod = (double)zone.modifier / 100.0; sb.Append(mod.ToString(ci) + " "); } } } if (xs.EncodingMode != 2) // not 2 pass vbr first pass, add output filename and output type { string extension = Path.GetExtension(output).ToLower(); if (extension.Equals(".mkv")) sb.Append(" -mkv \"" + output + "\""); else if (extension.Equals(".avi")) sb.Append(" -avi \"" + output + "\""); else sb.Append(" -o \"" + output + "\""); } if (!xs.CustomEncoderOptions.Equals("")) // add custom encoder options sb.Append(" " + xs.CustomEncoderOptions); return sb.ToString(); }
public VideoJob generateVideoJob(string input, string output, VideoCodecSettings settings, Dar? dar, Zone[] zones) { return generateVideoJob(input, output, settings, false, dar, zones); }
/// <summary> /// reads the dga file, which is essentially a text file /// </summary> private void readFileProperties() { info = reader.Info.Clone(); Dar dar = new Dar(reader.Info.Width, reader.Info.Height); info.DAR = dar; }
/// <summary> /// gets the number of frames, framerate, horizontal and vertical resolution from a video source /// </summary> /// <param name="video">the video whose properties are to be read</param> /// <param name="nbOfFrames">the number of frames</param> /// <param name="framerate">the framerate</param> /// <param name="framerate_d">the FPS_D</param> /// <param name="framerate_n">the FPS_N</param> /// <param name="hRes">the horizontal resolution</param> /// <param name="vRes">the vertical resolution</param> /// <param name="dar">the dar value</param> /// <returns>whether the source could be opened or not</returns> public static bool GetAllInputProperties(string video, out ulong nbOfFrames, out double framerate, out int framerate_n, out int framerate_d, out int hRes, out int vRes, out Dar dar, out AviSynthColorspace colorspace) { try { using (AvsFile avi = AvsFile.OpenScriptFile(video)) { checked { nbOfFrames = (ulong)avi.VideoInfo.FrameCount; } framerate = avi.VideoInfo.FPS; framerate_n = avi.VideoInfo.FPS_N; framerate_d = avi.VideoInfo.FPS_D; hRes = (int)avi.VideoInfo.Width; vRes = (int)avi.VideoInfo.Height; dar = avi.VideoInfo.DAR; colorspace = avi.Clip.OriginalColorspace; } return(true); } catch (Exception e) { throw new JobRunException("The file " + video + " cannot be opened.\r\n" + "Error message for your reference: " + e.Message, e); } }
private void setConfig(string videoInput, string muxedInput, decimal? framerate, MuxStream[] audioStreams, MuxStream[] subtitleStreams, string chapterFile, string output, FileSize? splitSize, Dar? dar, string deviceType) { base.setConfig(videoInput, framerate, audioStreams, subtitleStreams, chapterFile, output, splitSize, dar, deviceType); this.muxedInput.Filename = muxedInput; this.checkIO(); }
/// <summary> /// gets the number of frames, framerate, horizontal and vertical resolution from a video source /// </summary> /// <param name="nbOfFrames">the number of frames</param> /// <param name="framerate">the framerate</param> /// <param name="hRes">the horizontal resolution</param> /// <param name="vRes">the vertical resolution</param> /// <param name="video">the video whose properties are to be read</param> /// <returns>whether the source could be opened or not</returns> public static bool getAllInputProperties(out ulong nbOfFrames, out double framerate, out int hRes, out int vRes, out Dar dar, string video) { try { GetAllInputProperties(out nbOfFrames, out framerate, out hRes, out vRes, out dar, video); return true; } catch (Exception e) { MessageBox.Show(e.Message, "Cannot open video input", MessageBoxButtons.OK, MessageBoxIcon.Stop); nbOfFrames = 0; hRes = vRes = 0; framerate = 0; dar = Dar.ITU16x9PAL; return false; } }
public MediaInfoFile(string file) { this.file = file; MediaInfo info = new MediaInfo(file); bool hasVideo = (info.Video.Count > 0); aCodecs = new AudioCodec[info.Audio.Count]; aBitrateModes = new BitrateManagementMode[info.Audio.Count]; int i = 0; foreach (MediaInfoWrapper.AudioTrack track in info.Audio) { aCodecs[i] = getAudioCodec(track.Codec); if (track.BitRateMode == "VBR") { aBitrateModes[i] = BitrateManagementMode.VBR; } else { aBitrateModes[i] = BitrateManagementMode.CBR; } } if (info.General.Count < 1) { cType = null; } else { cType = getContainerType(info.General[0].Format, info.General[0].FormatString); } if (aCodecs.Length == 1) { aType = getAudioType(aCodecs[0], cType, file); } else { aType = null; } if (hasVideo) { MediaInfoWrapper.VideoTrack track = info.Video[0]; checked { ulong width = (ulong)easyParseInt(track.Width); ulong height = (ulong)easyParseInt(track.Height); ulong frameCount = (ulong)easyParseInt(track.FrameCount); double fps = (easyParseDouble(track.FrameRate) ?? 25.0); vCodec = getVideoCodec(track.Codec); vType = getVideoType(vCodec, cType, file); Dar dar = new Dar((decimal?)easyParseDouble(track.AspectRatio), width, height); this.info = new MediaFileInfo(hasVideo, width, height, dar, frameCount, fps, aCodecs.Length > 0); } } else { this.info = new MediaFileInfo(false, 0, 0, Dar.A1x1, 0, 0, aCodecs.Length > 0); } }
public static MediaFile Open(string file) { try { MediaInfo m = new MediaInfo(file); // tracks List <MediaTrack> tracks = new List <MediaTrack>(); foreach (MediaInfoWrapper.VideoTrack t in m.Video) { VideoTrack v = new VideoTrack(); v.Codec = v.VCodec = getVideoCodec(t.Codec); v.Info = new MeGUI.core.details.TrackInfo(t.Language, t.Title); ulong width = ulong.Parse(t.Width); ulong height = ulong.Parse(t.Height); ulong frameCount = ulong.Parse(t.FrameCount); double fps = double.Parse(t.FrameRate); decimal?ar = easyParse <decimal>(delegate { return(decimal.Parse(t.AspectRatio)); }); Dar dar = new Dar(ar, width, height); v.StreamInfo = new VideoInfo2(width, height, dar, frameCount, fps); v.TrackNumber = uint.Parse(t.ID); tracks.Add(v); } foreach (MediaInfoWrapper.AudioTrack t in m.Audio) { AudioTrack a = new AudioTrack(); a.Codec = a.ACodec = getAudioCodec(t.Codec); a.Info = new MeGUI.core.details.TrackInfo(t.Language, t.Title); a.StreamInfo = new AudioInfo(); a.TrackNumber = uint.Parse(t.ID); tracks.Add(a); } foreach (MediaInfoWrapper.TextTrack t in m.Text) { SubtitleTrack s = new SubtitleTrack(); s.Codec = s.SCodec = getSubtitleCodec(t.Codec); s.Info = new MeGUI.core.details.TrackInfo(t.Language, t.Title); s.StreamInfo = new SubtitleInfo2(); s.TrackNumber = uint.Parse(t.ID); tracks.Add(s); } if (m.General.Count != 1) { throw new Exception("Expected one general track"); } GeneralTrack g = m.General[0]; ContainerType cType = getContainerType(g.Format, g.FormatString); TimeSpan playTime = TimeSpan.Parse(g.PlayTimeString3); Chapters chapters = null; if (m.Chapters.Count == 1) { chapters = parseChapters(m.Chapters[0]); } return(new MediaFile(tracks, chapters, playTime, cType)); } catch (Exception) { return(null); } }
public static string genCommandline(string input, string output, Dar? d, int hres, int vres, x264Settings xs, Zone[] zones) { int qp; bool display = false; StringBuilder sb = new StringBuilder(); CultureInfo ci = new CultureInfo("en-us"); ///<summary> /// x264 Main Tab Settings ///</summary> // AVC Profiles if (!xs.CustomEncoderOptions.Contains("--profile ")) { switch (xs.Profile) { case 0: sb.Append("--profile baseline "); break; case 1: sb.Append("--profile main "); break; case 2: break; // --profile high is the default value } } // AVC Levels if (!xs.CustomEncoderOptions.Contains("--level ")) if (xs.Level != 15) // unrestricted sb.Append("--level " + AVCLevels.getCLILevelNames()[xs.Level] + " "); // x264 Presets if (!xs.CustomEncoderOptions.Contains("--preset ")) { switch (xs.x264PresetLevel) { case x264Settings.x264PresetLevelModes.ultrafast: sb.Append("--preset ultrafast "); break; case x264Settings.x264PresetLevelModes.superfast: sb.Append("--preset superfast "); break; case x264Settings.x264PresetLevelModes.veryfast: sb.Append("--preset veryfast "); break; case x264Settings.x264PresetLevelModes.faster: sb.Append("--preset faster "); break; case x264Settings.x264PresetLevelModes.fast: sb.Append("--preset fast "); break; //case x264Settings.x264PresetLevelModes.medium: sb.Append("--preset medium "); break; // default value case x264Settings.x264PresetLevelModes.slow: sb.Append("--preset slow "); break; case x264Settings.x264PresetLevelModes.slower: sb.Append("--preset slower "); break; case x264Settings.x264PresetLevelModes.veryslow: sb.Append("--preset veryslow "); break; case x264Settings.x264PresetLevelModes.placebo: sb.Append("--preset placebo "); break; } } // x264 Tunings if (!xs.CustomEncoderOptions.Contains("--tune")) { switch (xs.x264Tuning) { case 1: sb.Append("--tune film "); break; case 2: sb.Append("--tune animation "); break; case 3: sb.Append("--tune grain "); break; case 4: sb.Append("--tune psnr "); break; case 5: sb.Append("--tune ssim "); break; case 6: sb.Append("--tune fastdecode "); break; case 7: sb.Append("--tune touhou "); break; default: break; // default } } // Encoding Modes switch (xs.EncodingMode) { case 0: // ABR if (!xs.CustomEncoderOptions.Contains("--bitrate")) sb.Append("--bitrate " + xs.BitrateQuantizer + " "); break; case 1: // CQ if (!xs.CustomEncoderOptions.Contains("--qp ")) { qp = (int)xs.QuantizerCRF; sb.Append("--qp " + qp.ToString(ci) + " "); } break; case 2: // 2 pass first pass sb.Append("--pass 1 --bitrate " + xs.BitrateQuantizer + " --stats " + "\"" + xs.Logfile + "\" "); break; case 3: // 2 pass second pass case 4: // automated twopass sb.Append("--pass 2 --bitrate " + xs.BitrateQuantizer + " --stats " + "\"" + xs.Logfile + "\" "); break; case 5: // 3 pass first pass sb.Append("--pass 1 --bitrate " + xs.BitrateQuantizer + " --stats " + "\"" + xs.Logfile + "\" "); break; case 6: // 3 pass 2nd pass sb.Append("--pass 3 --bitrate " + xs.BitrateQuantizer + " --stats " + "\"" + xs.Logfile + "\" "); break; case 7: // 3 pass 3rd pass case 8: // automated threepass, show third pass options sb.Append("--pass 3 --bitrate " + xs.BitrateQuantizer + " --stats " + "\"" + xs.Logfile + "\" "); break; case 9: // constant quality if (!xs.CustomEncoderOptions.Contains("--crf")) if (xs.QuantizerCRF != 23) sb.Append("--crf " + xs.QuantizerCRF.ToString(ci) + " "); break; } // Slow 1st Pass if (!xs.CustomEncoderOptions.Contains("--slow-firstpass")) if ((xs.X264SlowFirstpass) && xs.x264PresetLevel < x264Settings.x264PresetLevelModes.placebo && ((xs.EncodingMode == 2) || // 2 pass first pass (xs.EncodingMode == 4) || // automated twopass (xs.EncodingMode == 5) || // 3 pass first pass (xs.EncodingMode == 8))) // automated threepass sb.Append("--slow-firstpass "); // Threads if (!xs.CustomEncoderOptions.Contains("--thread-input")) if (xs.ThreadInput && xs.NbThreads == 1) sb.Append("--thread-input "); if (!xs.CustomEncoderOptions.Contains("--threads")) if (xs.NbThreads > 0) sb.Append("--threads " + xs.NbThreads + " "); ///<summary> /// x264 Frame-Type Tab Settings ///</summary> // H.264 Features if (xs.Deblock) { display = false; switch (xs.x264Tuning) { case 1: case 7: if (xs.AlphaDeblock != -1 || xs.BetaDeblock != -1) display = true; break; case 2: if (xs.AlphaDeblock != 1 || xs.BetaDeblock != 1) display = true; break; case 3: if (xs.AlphaDeblock != -2 || xs.BetaDeblock != -2) display = true; break; default: if (xs.AlphaDeblock != 0 || xs.BetaDeblock != 0) display = true; break; } if (!xs.CustomEncoderOptions.Contains("--deblock ")) if (display) sb.Append("--deblock " + xs.AlphaDeblock + ":" + xs.BetaDeblock + " "); } else { if (!xs.CustomEncoderOptions.Contains("--no-deblock")) if (xs.x264PresetLevel != x264Settings.x264PresetLevelModes.ultrafast || (xs.x264Tuning != 0 && xs.x264Tuning != 6)) sb.Append("--no-deblock "); } if (!xs.CustomEncoderOptions.Contains("--no-cabac")) { if (!xs.Cabac) { if (xs.Profile > 0 && (xs.x264PresetLevel != x264Settings.x264PresetLevelModes.ultrafast || (xs.x264Tuning != 0 && xs.x264Tuning != 6))) sb.Append("--no-cabac "); } } // GOP Size if (!xs.CustomEncoderOptions.Contains("--keyint")) if (xs.KeyframeInterval != 250) // gop size of 250 is default sb.Append("--keyint " + xs.KeyframeInterval + " "); if (!xs.CustomEncoderOptions.Contains("--min-keyint")) if (xs.MinGOPSize != 25) sb.Append("--min-keyint " + xs.MinGOPSize + " "); // B-Frames if (xs.Profile > 0 && !xs.CustomEncoderOptions.Contains("--bframes")) // baseline profile always uses 0 bframes { int iDefaultSettings = 3; switch (xs.x264PresetLevel) { case x264Settings.x264PresetLevelModes.ultrafast: iDefaultSettings = 0; break; case x264Settings.x264PresetLevelModes.veryslow: iDefaultSettings = 8; break; case x264Settings.x264PresetLevelModes.placebo: iDefaultSettings = 16; break; } if (xs.x264Tuning == 2) // animation iDefaultSettings += 2; if (xs.NbBframes != iDefaultSettings) sb.Append("--bframes " + xs.NbBframes + " "); } if (xs.NbBframes > 0) { if (!xs.CustomEncoderOptions.Contains("-b-adapt")) { display = false; if (xs.x264PresetLevel > x264Settings.x264PresetLevelModes.medium) { if (xs.NewAdaptiveBFrames != 2) display = true; } else if (xs.x264PresetLevel > x264Settings.x264PresetLevelModes.ultrafast) { if (xs.NewAdaptiveBFrames != 1) display = true; } else { if (xs.NewAdaptiveBFrames != 0) display = true; } if (display) sb.Append("--b-adapt " + xs.NewAdaptiveBFrames + " "); } if (xs.NbBframes > 1 && !xs.CustomEncoderOptions.Contains("--b-pyramid")) { switch (xs.x264BFramePyramid) // pyramid needs a minimum of 2 b frames { case 1: sb.Append("--b-pyramid strict "); break; case 0: sb.Append("--b-pyramid none "); break; } } if (!xs.CustomEncoderOptions.Contains("--no-weightb")) if (!xs.WeightedBPrediction && xs.x264Tuning != 6 && xs.x264PresetLevel != x264Settings.x264PresetLevelModes.ultrafast) sb.Append("--no-weightb "); } // B-Frames bias if (!xs.CustomEncoderOptions.Contains("--b-bias ")) if (xs.BframeBias != 0.0M) sb.Append("--b-bias " + xs.BframeBias.ToString(ci) + " "); // Other if (!xs.CustomEncoderOptions.Contains("--interlaced")) if (xs.EncodeInterlaced) sb.Append("--interlaced "); if (xs.Scenecut) { if (!xs.CustomEncoderOptions.Contains("--scenecut ")) if ((xs.SCDSensitivity != 40M && xs.x264PresetLevel != x264Settings.x264PresetLevelModes.ultrafast) || (xs.SCDSensitivity != 0M && xs.x264PresetLevel == x264Settings.x264PresetLevelModes.ultrafast)) sb.Append("--scenecut " + xs.SCDSensitivity.ToString(ci) + " "); } else { if (!xs.CustomEncoderOptions.Contains("--no-scenecut")) if (xs.x264PresetLevel != x264Settings.x264PresetLevelModes.ultrafast) sb.Append("--no-scenecut "); } // reference frames if (!xs.CustomEncoderOptions.Contains("--ref ")) { int iDefaultSettings = 0; switch (xs.x264PresetLevel) { case x264Settings.x264PresetLevelModes.ultrafast: case x264Settings.x264PresetLevelModes.superfast: case x264Settings.x264PresetLevelModes.veryfast: iDefaultSettings = 1; break; case x264Settings.x264PresetLevelModes.faster: case x264Settings.x264PresetLevelModes.fast: iDefaultSettings = 2; break; case x264Settings.x264PresetLevelModes.medium: iDefaultSettings = 3; break; case x264Settings.x264PresetLevelModes.slow: iDefaultSettings = 5; break; case x264Settings.x264PresetLevelModes.slower: iDefaultSettings = 8; break; case x264Settings.x264PresetLevelModes.veryslow: case x264Settings.x264PresetLevelModes.placebo: iDefaultSettings = 16; break; } if ((xs.x264Tuning == 2 || xs.x264Tuning == 7) && iDefaultSettings > 1) iDefaultSettings = iDefaultSettings * 2; if (iDefaultSettings != xs.NbRefFrames) sb.Append("--ref " + xs.NbRefFrames + " "); } // WeightedPPrediction if (!xs.CustomEncoderOptions.Contains("--weightp ")) { display = false; switch (xs.x264PresetLevel) { case x264Settings.x264PresetLevelModes.ultrafast: case x264Settings.x264PresetLevelModes.superfast: case x264Settings.x264PresetLevelModes.veryfast: if (xs.WeightedPPrediction != 0) display = true; break; case x264Settings.x264PresetLevelModes.faster: if (xs.WeightedPPrediction != 1) display = true; break; default: if (xs.WeightedPPrediction != 2) display = true; break; } if (xs.x264Tuning == 6 && xs.WeightedPPrediction != 0) display = true; if (xs.Profile == 0) display = false; if (display) sb.Append("--weightp " + xs.WeightedPPrediction + " "); } // Slicing if (!xs.CustomEncoderOptions.Contains("--slices ")) if (xs.SlicesNb != 0) sb.Append("--slices " + xs.SlicesNb + " "); if (!xs.CustomEncoderOptions.Contains("--slice-max-size ")) if (xs.MaxSliceSyzeBytes != 0) sb.Append("--slice-max-size " + xs.MaxSliceSyzeBytes + " "); if (!xs.CustomEncoderOptions.Contains("--slice-max-mbs ")) if (xs.MaxSliceSyzeMBs != 0) sb.Append("--slice-max-mbs " + xs.MaxSliceSyzeMBs + " "); ///<summary> /// x264 Rate Control Tab Settings /// </summary> if (!xs.CustomEncoderOptions.Contains("--qpmin ")) if (xs.MinQuantizer != 10) sb.Append("--qpmin " + xs.MinQuantizer + " "); if (!xs.CustomEncoderOptions.Contains("--qpmax ")) if (xs.MaxQuantizer != 51) sb.Append("--qpmax " + xs.MaxQuantizer + " "); if (!xs.CustomEncoderOptions.Contains("--qpstep ")) if (xs.MaxQuantDelta != 4) sb.Append("--qpstep " + xs.MaxQuantDelta + " "); if (xs.IPFactor != 1.4M) { display = true; if (xs.x264Tuning == 3 && xs.IPFactor == 1.1M) display = false; if (!xs.CustomEncoderOptions.Contains("--ipratio ")) if (display) sb.Append("--ipratio " + xs.IPFactor.ToString(ci) + " "); } if (xs.PBFactor != 1.3M) { display = true; if (xs.x264Tuning == 3 && xs.PBFactor == 1.1M) display = false; if (!xs.CustomEncoderOptions.Contains("--pbratio ")) if (display) sb.Append("--pbratio " + xs.PBFactor.ToString(ci) + " "); } if (!xs.CustomEncoderOptions.Contains("--chroma-qp-offset ")) if (xs.ChromaQPOffset != 0.0M) sb.Append("--chroma-qp-offset " + xs.ChromaQPOffset.ToString(ci) + " "); if (xs.EncodingMode != 1) // doesn't apply to CQ mode { if (!xs.CustomEncoderOptions.Contains("--vbv-bufsize ")) if (xs.VBVBufferSize > 0) sb.Append("--vbv-bufsize " + xs.VBVBufferSize + " "); if (!xs.CustomEncoderOptions.Contains("--vbv-maxrate ")) if (xs.VBVMaxBitrate > 0) sb.Append("--vbv-maxrate " + xs.VBVMaxBitrate + " "); if (!xs.CustomEncoderOptions.Contains("--vbv-init ")) if (xs.VBVInitialBuffer != 0.9M) sb.Append("--vbv-init " + xs.VBVInitialBuffer.ToString(ci) + " "); if (!xs.CustomEncoderOptions.Contains("--ratetol ")) if (xs.BitrateVariance != 1.0M) sb.Append("--ratetol " + xs.BitrateVariance.ToString(ci) + " "); if (!xs.CustomEncoderOptions.Contains("--qcomp ")) { display = true; if ((xs.x264Tuning == 3 && xs.QuantCompression == 0.8M) || (xs.x264Tuning != 3 && xs.QuantCompression == 0.6M)) display = false; if (display) sb.Append("--qcomp " + xs.QuantCompression.ToString(ci) + " "); } if (xs.EncodingMode > 1) // applies only to twopass { if (!xs.CustomEncoderOptions.Contains("--cplxblur ")) if (xs.TempComplexityBlur != 20) sb.Append("--cplxblur " + xs.TempComplexityBlur.ToString(ci) + " "); if (!xs.CustomEncoderOptions.Contains("--qblur ")) if (xs.TempQuanBlurCC != 0.5M) sb.Append("--qblur " + xs.TempQuanBlurCC.ToString(ci) + " "); } } // Dead Zones if (!xs.CustomEncoderOptions.Contains("--deadzone-inter ")) { display = true; if ((xs.x264Tuning != 3 && xs.DeadZoneInter == 21) || (xs.x264Tuning == 3 && xs.DeadZoneInter == 6)) display = false; if (display) sb.Append("--deadzone-inter " + xs.DeadZoneInter + " "); } if (!xs.CustomEncoderOptions.Contains("--deadzone-intra ")) { display = true; if ((xs.x264Tuning != 3 && xs.DeadZoneIntra == 11) || (xs.x264Tuning == 3 && xs.DeadZoneIntra == 6)) display = false; if (display) sb.Append("--deadzone-intra " + xs.DeadZoneIntra + " "); } // Disable Macroblok Tree if (!xs.NoMBTree) { if (!xs.CustomEncoderOptions.Contains("--no-mbtree")) if (xs.x264PresetLevel > x264Settings.x264PresetLevelModes.veryfast) sb.Append("--no-mbtree "); } else { // RC Lookahead if (!xs.CustomEncoderOptions.Contains("--rc-lookahead ")) { display = false; switch (xs.x264PresetLevel) { case x264Settings.x264PresetLevelModes.faster: if (xs.Lookahead != 20) display = true; break; case x264Settings.x264PresetLevelModes.fast: if (xs.Lookahead != 30) display = true; break; case x264Settings.x264PresetLevelModes.medium: if (xs.Lookahead != 40) display = true; break; case x264Settings.x264PresetLevelModes.slow: if (xs.Lookahead != 50) display = true; break; case x264Settings.x264PresetLevelModes.slower: case x264Settings.x264PresetLevelModes.veryslow: case x264Settings.x264PresetLevelModes.placebo: if (xs.Lookahead != 60) display = true; break; } if (display) sb.Append("--rc-lookahead " + xs.Lookahead + " "); } } // AQ-Mode if (xs.EncodingMode != (int)VideoCodecSettings.Mode.CQ) { if (xs.AQmode > 0) { if (!xs.CustomEncoderOptions.Contains("--aq-mode ")) { display = true; if ((xs.x264Tuning != 5 && xs.AQmode == 1) || (xs.x264Tuning == 5 && xs.AQmode == 2)) display = false; if (display) sb.Append("--aq-mode " + xs.AQmode.ToString() + " "); } display = false; switch (xs.x264Tuning) { case 2: if (xs.AQstrength != 0.6M) display = true; break; case 3: if (xs.AQstrength != 0.5M) display = true; break; case 7: if (xs.AQstrength != 1.3M) display = true; break; default: if (xs.AQstrength != 1.0M) display = true; break; } if (!xs.CustomEncoderOptions.Contains("--aq-strength ")) if (display) sb.Append("--aq-strength " + xs.AQstrength.ToString(ci) + " "); } else { if (!xs.CustomEncoderOptions.Contains("--aq-mode ")) if (xs.x264PresetLevel != x264Settings.x264PresetLevelModes.ultrafast && xs.x264Tuning != 4) sb.Append("--aq-mode 0 "); } } // custom matrices if (xs.QuantizerMatrixType > 0) { switch (xs.QuantizerMatrixType) { case 1: if (!xs.CustomEncoderOptions.Contains("--cqm ")) sb.Append("--cqm \"jvt\" "); break; case 2: if (!xs.CustomEncoderOptions.Contains("--cqmfile")) sb.Append("--cqmfile \"" + xs.QuantizerMatrix + "\" "); break; } } ///<summary> /// x264 Analysis Tab Settings /// </summary> // Disable Chroma Motion Estimation if (!xs.CustomEncoderOptions.Contains("--no-chroma-me")) if (!xs.ChromaME) sb.Append("--no-chroma-me "); // Motion Estimation Range if (!xs.CustomEncoderOptions.Contains("--merange ")) { if ((xs.x264PresetLevel <= x264Settings.x264PresetLevelModes.slower && xs.MERange != 16) || (xs.x264PresetLevel >= x264Settings.x264PresetLevelModes.veryslow && xs.MERange != 24)) sb.Append("--merange " + xs.MERange + " "); } // ME Type if (!xs.CustomEncoderOptions.Contains("--me ")) { display = false; switch (xs.x264PresetLevel) { case x264Settings.x264PresetLevelModes.ultrafast: case x264Settings.x264PresetLevelModes.superfast: if (xs.METype != 0) display = true; break; case x264Settings.x264PresetLevelModes.veryfast: case x264Settings.x264PresetLevelModes.faster: case x264Settings.x264PresetLevelModes.fast: case x264Settings.x264PresetLevelModes.medium: if (xs.METype != 1) display = true; break; case x264Settings.x264PresetLevelModes.slow: case x264Settings.x264PresetLevelModes.slower: case x264Settings.x264PresetLevelModes.veryslow: if (xs.METype != 2) display = true; break; case x264Settings.x264PresetLevelModes.placebo: if (xs.METype != 4) display = true; break; } if (display) { switch (xs.METype) { case 0: sb.Append("--me dia "); break; case 1: sb.Append("--me hex "); break; case 2: sb.Append("--me umh "); break; case 3: sb.Append("--me esa "); break; case 4: sb.Append("--me tesa "); break; } } } if (!xs.CustomEncoderOptions.Contains("--direct ")) { display = false; if (xs.x264PresetLevel > x264Settings.x264PresetLevelModes.medium) { if (xs.BframePredictionMode != 3) display = true; } else if (xs.BframePredictionMode != 1) display = true; if (display) { switch (xs.BframePredictionMode) { case 0: sb.Append("--direct none "); break; case 1: sb.Append("--direct spatial "); break; case 2: sb.Append("--direct temporal "); break; case 3: sb.Append("--direct auto "); break; } } } if (!xs.CustomEncoderOptions.Contains("--nr ")) if (xs.NoiseReduction > 0) sb.Append("--nr " + xs.NoiseReduction + " "); // subpel refinement if (!xs.CustomEncoderOptions.Contains("--subme ")) { display = false; switch (xs.x264PresetLevel) { case x264Settings.x264PresetLevelModes.ultrafast: if (xs.SubPelRefinement != 0) display = true; break; case x264Settings.x264PresetLevelModes.superfast: if (xs.SubPelRefinement != 1) display = true; break; case x264Settings.x264PresetLevelModes.veryfast: if (xs.SubPelRefinement != 2) display = true; break; case x264Settings.x264PresetLevelModes.faster: if (xs.SubPelRefinement != 4) display = true; break; case x264Settings.x264PresetLevelModes.fast: if (xs.SubPelRefinement != 6) display = true; break; case x264Settings.x264PresetLevelModes.medium: if (xs.SubPelRefinement != 7) display = true; break; case x264Settings.x264PresetLevelModes.slow: if (xs.SubPelRefinement != 8) display = true; break; case x264Settings.x264PresetLevelModes.slower: if (xs.SubPelRefinement != 9) display = true; break; case x264Settings.x264PresetLevelModes.veryslow: if (xs.SubPelRefinement != 10) display = true; break; case x264Settings.x264PresetLevelModes.placebo: if (xs.SubPelRefinement != 10) display = true; break; } if (display) sb.Append("--subme " + (xs.SubPelRefinement) + " "); } // macroblock types if (!xs.CustomEncoderOptions.Contains("--partitions ")) { bool bExpectedP8x8mv = true; bool bExpectedB8x8mv = true; bool bExpectedI4x4mv = true; bool bExpectedI8x8mv = true; bool bExpectedP4x4mv = true; switch (xs.x264PresetLevel) { case x264Settings.x264PresetLevelModes.ultrafast: bExpectedP8x8mv = false; bExpectedB8x8mv = false; bExpectedI4x4mv = false; bExpectedI8x8mv = false; bExpectedP4x4mv = false; break; case x264Settings.x264PresetLevelModes.superfast: bExpectedP8x8mv = false; bExpectedB8x8mv = false; bExpectedP4x4mv = false; break; case x264Settings.x264PresetLevelModes.veryfast: case x264Settings.x264PresetLevelModes.faster: case x264Settings.x264PresetLevelModes.fast: case x264Settings.x264PresetLevelModes.medium: case x264Settings.x264PresetLevelModes.slow: bExpectedP4x4mv = false; break; } if (xs.x264Tuning == 7 && bExpectedP8x8mv) bExpectedP4x4mv = true; if (bExpectedP8x8mv != xs.P8x8mv || bExpectedB8x8mv != xs.B8x8mv || bExpectedI4x4mv != xs.I4x4mv || bExpectedI8x8mv != xs.I8x8mv || bExpectedP4x4mv != xs.P4x4mv) { if (xs.P8x8mv || xs.B8x8mv || xs.I4x4mv || xs.I8x8mv || xs.P4x4mv) { sb.Append("--partitions "); if (xs.I4x4mv && xs.I8x8mv && xs.P4x4mv && xs.P8x8mv && xs.B8x8mv) sb.Append("all "); else { if (xs.P8x8mv) // default is checked sb.Append("p8x8,"); if (xs.B8x8mv) // default is checked sb.Append("b8x8,"); if (xs.I4x4mv) // default is checked sb.Append("i4x4,"); if (xs.P4x4mv) // default is unchecked sb.Append("p4x4,"); if (xs.I8x8mv) // default is checked sb.Append("i8x8"); if (sb.ToString().EndsWith(",")) sb.Remove(sb.Length - 1, 1); } if (!sb.ToString().EndsWith(" ")) sb.Append(" "); } else sb.Append("--partitions none "); } } if (!xs.CustomEncoderOptions.Contains("--no-8x8dct")) if (!xs.AdaptiveDCT) if (xs.Profile > 0 && xs.x264PresetLevel > x264Settings.x264PresetLevelModes.ultrafast) sb.Append("--no-8x8dct "); // Trellis if (!xs.CustomEncoderOptions.Contains("--trellis ") && xs.Cabac) { display = false; switch (xs.x264PresetLevel) { case x264Settings.x264PresetLevelModes.ultrafast: case x264Settings.x264PresetLevelModes.superfast: case x264Settings.x264PresetLevelModes.veryfast: if (xs.X264Trellis != 0) display = true; break; case x264Settings.x264PresetLevelModes.faster: case x264Settings.x264PresetLevelModes.fast: case x264Settings.x264PresetLevelModes.medium: case x264Settings.x264PresetLevelModes.slow: if (xs.X264Trellis != 1) display = true; break; case x264Settings.x264PresetLevelModes.slower: case x264Settings.x264PresetLevelModes.veryslow: case x264Settings.x264PresetLevelModes.placebo: if (xs.X264Trellis != 2) display = true; break; } if (display) sb.Append("--trellis " + xs.X264Trellis + " "); } if (!xs.CustomEncoderOptions.Contains("--psy-rd ")) { if (xs.SubPelRefinement > 5) { display = false; switch (xs.x264Tuning) { case 1: if ((xs.PsyRDO != 1.0M) && (xs.PsyTrellis != 0.15M)) display = true; break; case 2: if ((xs.PsyRDO != 0.4M) && (xs.PsyTrellis != 0.0M)) display = true; break; case 3: if ((xs.PsyRDO != 1.0M) && (xs.PsyTrellis != 0.25M)) display = true; break; case 7: if ((xs.PsyRDO != 1.0M) && (xs.PsyTrellis != 0.2M)) display = true; break; default: if ((xs.PsyRDO != 1.0M) || (xs.PsyTrellis != 0.0M)) display = true; break; } if (display) sb.Append("--psy-rd " + xs.PsyRDO.ToString(ci) + ":" + xs.PsyTrellis.ToString(ci) + " "); } } else { display = false; switch (xs.x264Tuning) { case 1: if (xs.PsyTrellis != 0.15M) display = true; break; case 3: if (xs.PsyTrellis != 0.25M) display = true; break; case 7: if (xs.PsyTrellis != 0.2M) display = true; break; case 0: case 4: { if (xs.PsyTrellis != 0.0M) display = true; } break; } if (!xs.CustomEncoderOptions.Contains("--psy-rd 0: ")) if (display) sb.Append("--psy-rd 0:" + xs.PsyTrellis.ToString(ci) + " "); } if (!xs.CustomEncoderOptions.Contains("--no-mixed-refs")) if (xs.NoMixedRefs) if (xs.x264PresetLevel >= x264Settings.x264PresetLevelModes.fast) sb.Append("--no-mixed-refs "); if (!xs.CustomEncoderOptions.Contains("--no-dct-decimate")) if (xs.NoDCTDecimate) if (xs.x264Tuning != 3) sb.Append("--no-dct-decimate "); if (!xs.CustomEncoderOptions.Contains("--no-fast-pskip")) if (xs.NoFastPSkip) if (xs.x264PresetLevel != x264Settings.x264PresetLevelModes.placebo) sb.Append("--no-fast-pskip "); if (!xs.CustomEncoderOptions.Contains("--no-psy")) if (xs.NoPsy && (xs.x264Tuning != 4 && xs.x264Tuning != 5)) sb.Append("--no-psy "); if (!xs.CustomEncoderOptions.Contains("--aud")) if (xs.X264Aud) sb.Append("--aud "); if (!xs.CustomEncoderOptions.Contains("--nal-hrd")) if (xs.X264Nalhrd) sb.Append("--nal-hrd vbr "); ///<summary> /// x264 Misc Tab Settings /// </summary> // QPFile if (!xs.CustomEncoderOptions.Contains("-qpfile ")) if (xs.UseQPFile) if (xs.EncodingMode == 0 || xs.EncodingMode == 1 || xs.EncodingMode == 2 || xs.EncodingMode == 5 || xs.EncodingMode == 9) sb.Append("--qpfile " + "\"" + xs.QPFile + "\" "); if (!xs.CustomEncoderOptions.Contains("--psnr")) if (xs.PSNRCalculation) sb.Append("--psnr "); if (!xs.CustomEncoderOptions.Contains("--ssim")) if (xs.SSIMCalculation) sb.Append("--ssim "); if (!xs.CustomEncoderOptions.Contains("--fullrange on")) if (xs.fullRange) sb.Append("--fullrange on "); if (!xs.CustomEncoderOptions.Equals("")) // add custom encoder options sb.Append(xs.CustomEncoderOptions + " "); if (zones != null && zones.Length > 0 && xs.CreditsQuantizer >= 1.0M) { sb.Append("--zones "); foreach (Zone zone in zones) { sb.Append(zone.startFrame + "," + zone.endFrame + ","); if (zone.mode == ZONEMODE.Quantizer) { sb.Append("q="); sb.Append(zone.modifier + "/"); } if (zone.mode == ZONEMODE.Weight) { sb.Append("b="); double mod = (double)zone.modifier / 100.0; sb.Append(mod.ToString(ci) + "/"); } } sb.Remove(sb.Length - 1, 1); sb.Append(" "); } if (!xs.CustomEncoderOptions.Contains("--sar ")) { if (d.HasValue) { Sar s = d.Value.ToSar(hres, vres); sb.Append("--sar " + s.X + ":" + s.Y + " "); } } //add the rest of the commandline regarding the output if (xs.EncodingMode == 2 || xs.EncodingMode == 5) sb.Append("--output NUL "); else sb.Append("--output " + "\"" + output + "\" "); sb.Append("\"" + input + "\" "); return sb.ToString(); }
/// <summary> /// gets the number of frames, framerate, horizontal and vertical resolution from a video source /// </summary> /// <param name="nbOfFrames">the number of frames</param> /// <param name="framerate">the framerate</param> /// <param name="hRes">the horizontal resolution</param> /// <param name="vRes">the vertical resolution</param> /// <param name="video">the video whose properties are to be read</param> /// <returns>whether the source could be opened or not</returns> public static bool GetAllInputProperties(out ulong nbOfFrames, out double framerate, out int hRes, out int vRes, out Dar dar, string video) { int fn, fd; return(GetAllInputProperties(out nbOfFrames, out framerate, out fn, out fd, out hRes, out vRes, out dar, video)); }
/// <summary> /// at first, the job from the currently configured settings is generated. In addition, we find out if this job is /// a part of an automated series of jobs. If so, it means the first generated job was the second pass, and we have /// to create the first pass using the same settings /// then, all the generated jobs are returned /// </summary> /// <returns>an Array of VideoJobs in the order they are to be encoded</returns> public JobChain prepareVideoJob(string movieInput, string movieOutput, VideoCodecSettings settings, Dar? dar, bool prerender, bool checkVideo, Zone[] zones) { bool twoPasses = false, threePasses = false; if (settings.EncodingMode == 4) // automated twopass twoPasses = true; else if (settings.EncodingMode == 8) // automated threepass threePasses = true; VideoJob prerenderJob = null; string hfyuFile = null; string inputAVS = movieInput; if (prerender) { hfyuFile = Path.Combine(Path.GetDirectoryName(movieInput), "hfyu_" + Path.GetFileNameWithoutExtension(movieInput) + ".avi"); inputAVS = Path.ChangeExtension(hfyuFile, ".avs"); if (File.Exists(hfyuFile)) { if (MessageBox.Show("The intended temporary file, " + hfyuFile + " already exists.\r\n" + "Do you wish to over-write it?", "File already exists", MessageBoxButtons.YesNo, MessageBoxIcon.Exclamation) == DialogResult.No) return null; } if (File.Exists(inputAVS)) { if (MessageBox.Show("The intended temporary file, " + inputAVS + " already exists.\r\n" + "Do you wish to over-write it?", "File already exists", MessageBoxButtons.YesNo, MessageBoxIcon.Exclamation) == DialogResult.No) return null; } try { StreamWriter hfyuWrapper = new StreamWriter(inputAVS, false, Encoding.Default); hfyuWrapper.WriteLine("AviSource(\"" + hfyuFile + "\")"); hfyuWrapper.Close(); } catch (IOException) { return null; } prerenderJob = this.generateVideoJob(movieInput, hfyuFile, new hfyuSettings(), dar, zones); if (prerenderJob == null) return null; } if (checkVideo) { VideoUtil vUtil = new VideoUtil(mainForm); string error = vUtil.checkVideo(movieInput); if (error != null) { bool bContinue = mainForm.DialogManager.createJobs(error); if (!bContinue) { MessageBox.Show("Job creation aborted due to invalid AviSynth script"); return null; } } } VideoJob job = this.generateVideoJob(inputAVS, movieOutput, settings, prerender, dar, zones); VideoJob firstpass = null; VideoJob middlepass = null; if (job != null) { if (twoPasses || threePasses) // we just created the last pass, now create previous one(s) { job.FilesToDelete.Add(job.Settings.Logfile); if (job.Settings.SettingsID.Equals("x264")) job.FilesToDelete.Add(mbtreeFile); firstpass = cloneJob(job); firstpass.Output = ""; // the first pass has no output firstpass.Settings.EncodingMode = 2; firstpass.DAR = dar; if (threePasses) { firstpass.Settings.EncodingMode = 5; // change to 3 pass 3rd pass just for show middlepass = cloneJob(job); middlepass.Settings.EncodingMode = 6; // 3 pass 2nd pass if (mainForm.Settings.Keep2of3passOutput) // give the 2nd pass a new name { middlepass.Output = Path.Combine(Path.GetDirectoryName(job.Output), Path.GetFileNameWithoutExtension(job.Output) + "-2ndpass" + Path.GetExtension(job.Output)); job.FilesToDelete.Add(middlepass.Output); } middlepass.DAR = dar; } } if (prerender) { job.FilesToDelete.Add(hfyuFile); job.FilesToDelete.Add(inputAVS); } List<VideoJob> jobList = new List<VideoJob>(); if (prerenderJob != null) jobList.Add(prerenderJob); if (firstpass != null) jobList.Add(firstpass); if (middlepass != null) // we have a middle pass jobList.Add(middlepass); jobList.Add(job); return new SequentialChain(jobList.ToArray()); } return null; }
/// <summary> /// sets the configuration of the GUI /// used when a job is loaded (jobs have everything already filled out) /// </summary> /// <param name="videoInput">the video input (raw or mp4)</param> /// <param name="framerate">framerate of the input</param> /// <param name="audioStreams">the audiostreams</param> /// <param name="subtitleStreams">the subtitle streams</param> /// <param name="output">name of the output</param> /// <param name="splitSize">split size of the output</param> public void setConfig(string videoInput, decimal? framerate, MuxStream[] audioStreams, MuxStream[] subtitleStreams, string chapterFile, string output, FileSize? splitSize, Dar? dar, string deviceType) { this.dar = dar; vInput.Filename = videoInput; fps.Value = framerate; int index = 0; foreach (MuxStream stream in audioStreams) { if (audioTracks.Count == index) AudioAddTrack(); audioTracks[index].Stream = stream; index++; } index = 0; foreach (MuxStream stream in subtitleStreams) { if (subtitleTracks.Count == index) SubtitleAddTrack(); subtitleTracks[index].Stream = stream; index++; } chapters.Filename = chapterFile; this.output.Filename = output; this.splitting.Value = splitSize; this.muxButton.Text = "Update"; this.cbType.Text = deviceType; checkIO(); }
/// <summary> /// opens a dgindex script /// if the file can be properly opened, auto-cropping is performed, then depending on the AR settings /// the proper resolution for automatic resizing, taking into account the derived cropping values /// is calculated, and finally the avisynth script is written and its name returned /// </summary> /// <param name="path">dgindex script</param> /// <param name="aspectRatio">aspect ratio selection to be used</param> /// <param name="customDAR">custom display aspect ratio for this source</param> /// <param name="horizontalResolution">desired horizontal resolution of the output</param> /// <param name="settings">the codec settings (used only for x264)</param> /// <param name="sarX">pixel aspect ratio X</param> /// <param name="sarY">pixel aspect ratio Y</param> /// <param name="height">the final height of the video</param> /// <param name="signalAR">whether or not ar signalling is to be used for the output /// (depending on this parameter, resizing changes to match the source AR)</param> /// <param name="autoCrop">whether or not autoCrop is used for the input</param> /// <returns>the name of the AviSynth script created, empty of there was an error</returns> private string openVideo(string path, Dar? AR, int horizontalResolution, bool signalAR, LogItem log, AviSynthSettings avsSettings, bool autoDeint, VideoCodecSettings settings, out Dar? dar, bool autoCrop, bool keepInputResolution, bool useChaptersMarks) { dar = null; CropValues final = new CropValues(); Dar customDAR; IMediaFile d2v = new d2vFile(path); IVideoReader reader = d2v.GetVideoReader(); if (reader.FrameCount < 1) { log.Error("DGDecode reported 0 frames in this file. This is a fatal error. Please recreate the DGIndex project"); return ""; } if (!keepInputResolution) { //Autocrop final = Autocrop.autocrop(reader); if (signalAR) { if (avsSettings.Mod16Method == mod16Method.overcrop) ScriptServer.overcrop(ref final); else if (avsSettings.Mod16Method == mod16Method.mod4Horizontal) ScriptServer.cropMod4Horizontal(ref final); else if (avsSettings.Mod16Method == mod16Method.undercrop) ScriptServer.undercrop(ref final); } if (autoCrop) { bool error = (final.left == -1); if (!error) log.LogValue("Autocrop values", final); else { log.Error("Autocrop failed, aborting now"); return ""; } } } log.LogValue("Auto-detect aspect ratio now", AR == null); //Check if AR needs to be autodetected now if (AR == null) // it does { customDAR = d2v.Info.DAR; if (customDAR.ar > 0) log.LogValue("Aspect ratio", customDAR); else { customDAR = Dar.ITU16x9PAL; log.Warn(string.Format("No aspect ratio found, defaulting to {0}.", customDAR)); } } else customDAR = AR.Value; if (keepInputResolution) { horizontalResolution = (int)d2v.Info.Width; dar = customDAR; } else { // Minimise upsizing int sourceHorizontalResolution = (int)d2v.Info.Width - final.right - final.left; if (autoCrop) sourceHorizontalResolution = (int)d2v.Info.Width; if (horizontalResolution > sourceHorizontalResolution) { if (avsSettings.Mod16Method == mod16Method.resize) while (horizontalResolution > sourceHorizontalResolution + 16) horizontalResolution -= 16; else horizontalResolution = sourceHorizontalResolution; } } //Suggest a resolution (taken from AvisynthWindow.suggestResolution_CheckedChanged) int scriptVerticalResolution = 0; if (keepInputResolution) { scriptVerticalResolution = (int)d2v.Info.Height; log.LogValue("Output resolution", horizontalResolution + "x" + scriptVerticalResolution); } else { scriptVerticalResolution = Resolution.suggestResolution(d2v.Info.Height, d2v.Info.Width, (double)customDAR.ar, final, horizontalResolution, signalAR, mainForm.Settings.AcceptableAspectErrorPercent, out dar); log.LogValue("Output resolution", horizontalResolution + "x" + scriptVerticalResolution); if (settings != null && settings is x264Settings) // verify that the video corresponds to the chosen avc level, if not, change the resolution until it does fit { x264Settings xs = (x264Settings)settings; if (xs.Level != 15) { AVCLevels al = new AVCLevels(); log.LogValue("AVC level", al.getLevels()[xs.Level]); int compliantLevel = 15; while (!this.al.validateAVCLevel(horizontalResolution, scriptVerticalResolution, d2v.Info.FPS, xs, out compliantLevel)) { // resolution not profile compliant, reduce horizontal resolution by 16, get the new vertical resolution and try again string levelName = al.getLevels()[xs.Level]; horizontalResolution -= 16; scriptVerticalResolution = Resolution.suggestResolution(d2v.Info.Height, d2v.Info.Width, (double)customDAR.ar, final, horizontalResolution, signalAR, mainForm.Settings.AcceptableAspectErrorPercent, out dar); } log.LogValue("Resolution adjusted for AVC Level", horizontalResolution + "x" + scriptVerticalResolution); } if (useChaptersMarks) { qpfile = job.PostprocessingProperties.ChapterFile; if ((Path.GetExtension(qpfile).ToLower()) == ".txt") qpfile = VideoUtil.convertChaptersTextFileTox264QPFile(job.PostprocessingProperties.ChapterFile, d2v.Info.FPS); if (File.Exists(qpfile)) { xs.UseQPFile = true; xs.QPFile = qpfile; } } } } //Generate the avs script based on the template string inputLine = "#input"; string deinterlaceLines = "#deinterlace"; string denoiseLines = "#denoise"; string cropLine = "#crop"; string resizeLine = "#resize"; inputLine = ScriptServer.GetInputLine(path, false, PossibleSources.d2v, false, false, false, 0, false); log.LogValue("Automatic deinterlacing", autoDeint); if (autoDeint) { string d2vPath = path; SourceDetector sd = new SourceDetector(inputLine, d2vPath, false, mainForm.Settings.SourceDetectorSettings, new UpdateSourceDetectionStatus(analyseUpdate), new FinishedAnalysis(finishedAnalysis)); finished = false; sd.analyse(); waitTillAnalyseFinished(); deinterlaceLines = filters[0].Script; log.LogValue("Deinterlacing used", deinterlaceLines); } inputLine = ScriptServer.GetInputLine(path, interlaced, PossibleSources.d2v, avsSettings.ColourCorrect, avsSettings.MPEG2Deblock, false, 0, false); if (autoCrop) cropLine = ScriptServer.GetCropLine(true, final); else cropLine = ScriptServer.GetCropLine(false, final); denoiseLines = ScriptServer.GetDenoiseLines(avsSettings.Denoise, (DenoiseFilterType)avsSettings.DenoiseMethod); if (!keepInputResolution) resizeLine = ScriptServer.GetResizeLine(!signalAR || avsSettings.Mod16Method == mod16Method.resize, horizontalResolution, scriptVerticalResolution, (ResizeFilterType)avsSettings.ResizeMethod); string newScript = ScriptServer.CreateScriptFromTemplate(avsSettings.Template, inputLine, cropLine, resizeLine, denoiseLines, deinterlaceLines); if (dar.HasValue) newScript = string.Format("global MeGUI_darx = {0}\r\nglobal MeGUI_dary = {1}\r\n{2}", dar.Value.X, dar.Value.Y, newScript); log.LogValue("Generated Avisynth script", newScript); try { StreamWriter sw = new StreamWriter(Path.ChangeExtension(path, ".avs"),false, System.Text.Encoding.Default); sw.Write(newScript); sw.Close(); } catch (IOException i) { log.LogValue("Error saving AviSynth script", i, ImageType.Error); return ""; } return Path.ChangeExtension(path, ".avs"); }
/// <summary> /// generates a videojob from the given settings /// returns the job and whether or not this is an automated job (in which case another job /// will have to be created) /// </summary> /// <param name="input">the video input (avisynth script)</param> /// <param name="output">the video output</param> /// <param name="settings">the codec settings for this job</param> /// <returns>the generated job or null if there was an error with the video source</returns> public VideoJob generateVideoJob(string input, string output, VideoCodecSettings settings, bool skipVideoCheck, Dar? dar, Zone[] zones) { VideoJob job = new VideoJob(input, output, settings, dar, zones); if (Path.GetDirectoryName(settings.Logfile).Equals("")) // no path set settings.Logfile = Path.ChangeExtension(output, ".stats"); if (job.Settings.SettingsID.Equals("x264")) mbtreeFile = Path.ChangeExtension(output, ".stats.mbtree"); if (job.Settings.EncodingMode == 4) // automated 2 pass, change type to 2 pass 2nd pass { job.Settings.EncodingMode = 3; } else if (job.Settings.EncodingMode == 8) // automated 3 pass, change type to 3 pass first pass { if (mainForm.Settings.OverwriteStats) job.Settings.EncodingMode = 7; else job.Settings.EncodingMode = 3; // 2 pass 2nd pass.. doesn't overwrite the stats file } if (!skipVideoCheck) checkVideo(job.Input); return job; }
private void resize(int targetWidth, bool PAR) { Dar d = new Dar(file.Info.Width, file.Info.Height); if (PAR) d = arChooser.Value ?? d; int height = (int)Math.Round((decimal)targetWidth / d.ar); videoWindowWidth = targetWidth; videoWindowHeight = (int)height; sizeLock = true; adjustSize(); sizeLock = false; positionSlider_Scroll(null, null); }
/// <summary> /// sets the maximum zoom width so that the video fits the screen including controls /// </summary> private void SetMaxZoomWidth() { Size oSizeScreen = Screen.GetWorkingArea(this).Size; int iScreenHeight = oSizeScreen.Height - 2 * SystemInformation.FixedFrameBorderSize.Height; int iScreenWidth = oSizeScreen.Width - 2 * SystemInformation.FixedFrameBorderSize.Width; // does the video fit into the screen? if ((int)file.VideoInfo.Height + formHeightDelta > iScreenHeight || (int)file.VideoInfo.Width > iScreenWidth) { Dar d = new Dar(file.VideoInfo.Width, file.VideoInfo.Height); if (showPAR.Checked) { d = arChooser.Value ?? d; } int height = 0; if ((int)file.VideoInfo.Width > iScreenWidth) { zoomMaxWidth = iScreenWidth; height = (int)Math.Round((decimal)zoomMaxWidth / d.AR); if (height + formHeightDelta > iScreenHeight) { height = iScreenHeight - formHeightDelta; zoomMaxWidth = (int)Math.Round((decimal)height * d.AR); } } else { height = iScreenHeight - formHeightDelta; zoomMaxWidth = (int)Math.Round((decimal)height * d.AR); } videoWindowWidth = zoomMaxWidth; videoWindowHeight = height; } else { zoomMaxWidth = (int)file.VideoInfo.Width; videoWindowWidth = zoomMaxWidth; videoWindowHeight = (int)file.VideoInfo.Height; } if (zoomFactor != 100) { zoomWidth = (int)(zoomMaxWidth * zoomFactor / 100); Dar d = new Dar(file.VideoInfo.Width, file.VideoInfo.Height); if (showPAR.Checked) { d = arChooser.Value ?? d; } int height = (int)Math.Round((decimal)zoomWidth / d.AR); videoWindowWidth = zoomWidth; videoWindowHeight = (int)height; } if (zoomMaxWidth < zoomWidth) { if (!bOriginalSize) { btnFitScreen_Click(null, null); } else { originalSizeButton_Click(null, null); } } }