protected override void doExitConfig() { if (job.DemuxMode > 0 && !su.HasError && !su.WasAborted && job.AudioTracks.Count > 0) { int iTracksFound = 0; int iCurrentAudioTrack = -1; for (int iCurrentTrack = 0; iCurrentTrack <= 29; iCurrentTrack++) // hard limit to max. 30 tracks { StringBuilder strAVSScript = new StringBuilder(); strAVSScript.Append(VideoUtil.getFFMSAudioInputLine(job.Input, job.Output, iCurrentTrack)); // is this an audio track? string strErrorText; if (AudioUtil.AVSScriptHasAudio(strAVSScript.ToString(), out strErrorText) == false) { continue; } iCurrentAudioTrack++; foreach (AudioTrackInfo oAudioTrack in job.AudioTracks) { if (oAudioTrack.TrackIndex != iCurrentAudioTrack) { continue; } // write avs file string strAudioAVSFile; strAudioAVSFile = Path.GetFileNameWithoutExtension(job.Output) + "_track_" + (oAudioTrack.TrackIndex + 1) + "_" + oAudioTrack.Language.ToLower(System.Globalization.CultureInfo.InvariantCulture) + ".avs"; strAudioAVSFile = Path.Combine(Path.GetDirectoryName(job.Output), Path.GetFileName(strAudioAVSFile)); try { strAVSScript.AppendLine(@"# detected channels: " + oAudioTrack.NbChannels); strAVSScript.Append(@"# detected channel positions: " + oAudioTrack.ChannelPositions); StreamWriter oAVSWriter = new StreamWriter(strAudioAVSFile, false, Encoding.Default); oAVSWriter.Write(strAVSScript); oAVSWriter.Close(); } catch (Exception ex) { log.LogValue("Error creating audio AVS file", ex); } break; } if (++iTracksFound == job.AudioTracks.Count) { break; } } } base.doExitConfig(); }
protected virtual bool isFPSRequired() { try { if (vInput.Filename.Length > 0) { return(VideoUtil.guessVideoType(vInput.Filename).ContainerType == null); } return(true); } catch (NullReferenceException) // This will throw if it can't guess the video type { return(true); } }
public AutoEncodeWindow(VideoStream videoStream, List <AudioJob> audioStreams, MainForm mainForm, bool prerender, VideoInfo vInfo) : this() { this.vInfo = vInfo; mainForm.Log.Add(log); this.videoStream = videoStream; this.audioStreams = audioStreams; this.prerender = prerender; this.mainForm = mainForm; jobUtil = new JobUtil(mainForm); vUtil = new VideoUtil(mainForm); muxProvider = mainForm.MuxProvider; container.Items.AddRange(muxProvider.GetSupportedContainers().ToArray()); splitting.MinimumFileSize = new FileSize(Unit.MB, 1); }
public AdaptiveMuxWindow() : base(MainForm.Instance, null) { InitializeComponent(); muxProvider = mainForm.MuxProvider; audioTracks[0].Filter = VideoUtil.GenerateCombinedFilter(ContainerManager.AudioTypes.ValuesArray); subtitleTracks[0].Filter = VideoUtil.GenerateCombinedFilter(ContainerManager.SubtitleTypes.ValuesArray); vInput.Filter = VideoUtil.GenerateCombinedFilter(ContainerManager.VideoTypes.ValuesArray); chapters.Filter = VideoUtil.GenerateCombinedFilter(ContainerManager.ChapterTypes.ValuesArray); cbContainer.Visible = true; lbContainer.Visible = true; subtitleTracks[0].chkDefaultStream.CheckedChanged += new System.EventHandler(base.chkDefaultStream_CheckedChanged); base.muxButton.Click += new System.EventHandler(this.muxButton_Click); }
private void openVideo(string fileName) { if (input.Filename != fileName) { input.Filename = fileName; } string projectPath; bool avcStream = VideoUtil.detecAVCStreamFromFile(fileName); string fileNameNoPath = Path.GetFileName(fileName); if (string.IsNullOrEmpty(projectPath = mainForm.Settings.DefaultOutputDir)) { projectPath = Path.GetDirectoryName(fileName); } AudioTracks.Items.Clear(); if (avcStream) { int unused; List <AudioTrackInfo> audioTracks; projectName.Text = Path.Combine(projectPath, Path.ChangeExtension(fileNameNoPath, ".dga")); vUtil.getSourceMediaInfo(fileName, out audioTracks, out unused); foreach (AudioTrackInfo atrack in audioTracks) { AudioTracks.Items.Add(atrack); } } else { MessageBox.Show("MeGUI is not able to find an AVC stream from " + Path.GetFileName(fileName) + "...", "Information", MessageBoxButtons.OK, MessageBoxIcon.Information); } if (AudioTracks.Items.Count < 1) { MessageBox.Show("MeGUI cannot find audio track information. Audio Tracks selection will be disabled.", "Warning", MessageBoxButtons.OK, MessageBoxIcon.Warning); demuxNoAudiotracks.Enabled = false; demuxAll.Enabled = false; } else { demuxNoAudiotracks.Enabled = true; demuxAll.Enabled = true; } }
/// <summary> /// reads the d2v file, which is essentially a text file /// the first few lines contain the video properties in plain text and the /// last line contains the film percentage /// this method reads all this information and stores it internally, then /// closes the d2v file again /// </summary> private void readFileProperties() { using (StreamReader sr = new StreamReader(fileName)) { string line = sr.ReadLine(); while ((line = sr.ReadLine()) != null) { if (line.IndexOf("Aspect_Ratio") != -1) // this is the aspect ratio line { string ar = line.Substring(13); if (ar.Equals("16:9")) { this.aspectRatio = AspectRatio.ITU16x9; } else if (ar.Equals("4:3")) { this.aspectRatio = AspectRatio.ITU4x3; } else if (ar.Equals("1:1")) { this.aspectRatio = AspectRatio.A1x1; } else { this.aspectRatio = AspectRatio.CUSTOM; } double AR = VideoUtil.getAspectRatio(aspectRatio); if (AR > 0) { VideoUtil.approximate(AR, out darX, out darY); } } if (line.IndexOf("Field_Operation") != -1) { string fieldOp = line.Substring(16, 1); this.fieldOperation = Int32.Parse(fieldOp); } if (line.IndexOf("FINISHED") != -1 && line.IndexOf("FILM") != -1) // dgindex now reports VIDEO % if it's > 50% { int end = line.IndexOf("%"); string percentage = line.Substring(10, end - 10); this.filmPercentage = Double.Parse(percentage, System.Globalization.CultureInfo.InvariantCulture); } } } }
private void button3_Click(object sender, EventArgs e) { CropValues final = VideoUtil.autocrop(reader); bool error = (final.left == -1); if (!error) { cropLeft = final.left; cropTop = final.top; cropRight = final.right; cropBottom = final.bottom; } else { MessageBox.Show("I'm afraid I was unable to find 3 frames that have matching crop values"); } }
/// <summary> /// tries to open the video source and gets the number of frames from it, or /// exits with an error /// </summary> /// <param name="videoSource">the AviSynth script</param> /// <param name="error">return parameter for all errors</param> /// <returns>true if the file could be opened, false if not</returns> protected bool getInputProperties(VideoJob job, out string error) { double f; int a, b; error = JobUtil.GetAllInputProperties(out numberOfFrames, out f, out hres, out vres, out a, out b, job.Input); darX = job.DARX; darY = job.DARY; if (job.Settings.UsesSAR) { int sarX, sarY; VideoUtil.findSAR(job.DARX, job.DARY, hres, vres, out sarX, out sarY); job.Commandline = CommandLineGenerator.generateVideoCommandline(job.Settings, job.Input, job.Output, sarX, sarY); } su.NbFramesTotal = numberOfFrames; return(error == null); }
public AdaptiveMuxWindow(MainForm mainForm) : base(mainForm, null) { InitializeComponent(); jobUtil = new JobUtil(mainForm); muxProvider = mainForm.MuxProvider; audioTracks[0].Filter = VideoUtil.GenerateCombinedFilter(ContainerManager.AudioTypes.ValuesArray); subtitleTracks[0].Filter = VideoUtil.GenerateCombinedFilter(ContainerManager.SubtitleTypes.ValuesArray); vInput.Filter = VideoUtil.GenerateCombinedFilter(ContainerManager.VideoTypes.ValuesArray); chapters.Filter = VideoUtil.GenerateCombinedFilter(ContainerManager.ChapterTypes.ValuesArray); cbContainer.Visible = true; lbContainer.Visible = true; subtitleTracks[0].chkDefaultStream.CheckedChanged += new System.EventHandler(base.chkDefaultStream_CheckedChanged); this.cbContainer.SelectedIndexChanged += new System.EventHandler(this.cbContainer_SelectedIndexChanged); }
/// <summary> /// initializes the lsmash reader /// </summary> /// <param name="fileName">the LSMASHIndex source file file that this reader will process</param> /// <param name="indexFile">the LSMASHIndex index file that this reader will process</param> public lsmashFile(string fileName, string indexFile) { if (!String.IsNullOrEmpty(indexFile) && String.IsNullOrEmpty(fileName)) { using (StreamReader sr = new StreamReader(indexFile, System.Text.Encoding.Default)) { string line = null; while ((line = sr.ReadLine()) != null) { if (line.StartsWith("<InputFilePath>")) { string strSourceFile = line.Substring(15, line.LastIndexOf("</InputFilePath>") - 15); if (File.Exists(strSourceFile)) { this.fileName = strSourceFile; } break; } } } } else { this.fileName = fileName; } double fps = 0; MediaInfoFile oInfo = null; if (File.Exists(this.fileName)) { oInfo = new MediaInfoFile(this.fileName); if (oInfo.VideoInfo.HasVideo && oInfo.VideoInfo.FPS > 0) { fps = oInfo.VideoInfo.FPS; } } reader = AvsFile.ParseScript(VideoUtil.getLSMASHVideoInputLine(this.fileName, indexFile, fps)); info = reader.VideoInfo.Clone(); if (oInfo != null) { info.DAR = oInfo.VideoInfo.DAR; } }
public AutoEncodeWindow(VideoStream videoStream, AudioStream[] audioStreams, MainForm mainForm, bool prerender) : this() { if (videoStream.Settings.EncodingMode == 1 || videoStream.Settings.EncodingMode == 9) // CQ and CRF -- no bitrate possible { averageBitrateRadio.Enabled = false; FileSizeRadio.Enabled = false; noTargetRadio.Checked = true; } this.videoStream = videoStream; this.audioStreams = audioStreams; this.prerender = prerender; this.mainForm = mainForm; jobUtil = new JobUtil(mainForm); vUtil = new VideoUtil(mainForm); muxProvider = mainForm.MuxProvider; container.Items.AddRange(muxProvider.GetSupportedContainers().ToArray()); }
protected override void RunInThread() { try { // job output file in case of LWLibavVideoSource() base.jobOutputFile = job.Input + ".lwi"; // generate the avs script StringBuilder strAVSScript = new StringBuilder(); MediaInfoFile oInfo = null; strAVSScript.Append(VideoUtil.getLSMASHVideoInputLine(job.Input, job.Output, 0, ref oInfo)); if (oInfo != null) { oInfo.Dispose(); } base.log.LogValue("AviSynth script", strAVSScript.ToString(), ImageType.Information); // check if the script has a video track, also this call will create the index file if there is one string strErrorText = "no video track found"; bool openSuccess = false; try { strErrorText = String.Empty; using (AviSynthScriptEnvironment env = new AviSynthScriptEnvironment()) using (AviSynthClip a = env.ParseScript(strAVSScript.ToString(), false, false)) openSuccess = a.HasVideo; } catch (Exception ex) { strErrorText = ex.Message; } if (!openSuccess) { // avs script has no video track or an error has been thrown base.log.LogEvent(strErrorText, ImageType.Error); su.HasError = true; } } catch (Exception ex) { base.log.LogValue("Error: ", ex.Message, ImageType.Error); su.HasError = true; } }
private static LogItem postprocess(MainForm mainForm, Job ajob) { if (!(ajob is IndexJob)) { return(null); } IndexJob job = (IndexJob)ajob; if (job.PostprocessingProperties != null) { return(null); } StringBuilder logBuilder = new StringBuilder(); VideoUtil vUtil = new VideoUtil(mainForm); Dictionary <int, string> audioFiles = vUtil.getAllDemuxedAudio(job.AudioTracks, job.Output, 8); if (job.LoadSources) { if (job.DemuxMode != 0 && audioFiles.Count > 0) { string[] files = new string[audioFiles.Values.Count]; audioFiles.Values.CopyTo(files, 0); Util.ThreadSafeRun(mainForm, new MethodInvoker( delegate { mainForm.Audio.openAudioFile(files); })); } // if the above needed delegation for openAudioFile this needs it for openVideoFile? // It seems to fix the problem of ASW dissapearing as soon as it appears on a system (Vista X64) Util.ThreadSafeRun(mainForm, new MethodInvoker( delegate { AviSynthWindow asw = new AviSynthWindow(mainForm, job.Output); asw.OpenScript += new OpenScriptCallback(mainForm.Video.openVideoFile); asw.Show(); })); } return(null); }
public AutoEncodeWindow(VideoStream videoStream, List <AudioJob> audioStreams, MainForm mainForm, bool prerender, VideoInfo vInfo) : this() { this.vInfo = vInfo; mainForm.Log.Add(log); if (videoStream.Settings.EncodingMode == 1 || videoStream.Settings.EncodingMode == 9) // CQ and CRF -- no bitrate possible { averageBitrateRadio.Enabled = false; FileSizeRadio.Enabled = false; noTargetRadio.Checked = true; } this.videoStream = videoStream; this.audioStreams = audioStreams; this.prerender = prerender; this.mainForm = mainForm; jobUtil = new JobUtil(mainForm); vUtil = new VideoUtil(mainForm); muxProvider = mainForm.MuxProvider; container.Items.AddRange(muxProvider.GetSupportedContainers().ToArray()); splitting.MinimumFileSize = new FileSize(Unit.MB, 1); }
private void getTypes(out AudioEncoderType[] aCodec, out MuxableType[] audioTypes, out MuxableType[] subtitleTypes) { List <MuxableType> audioTypesList = new List <MuxableType>(); List <MuxableType> subTypesList = new List <MuxableType>(); List <AudioEncoderType> audioCodecList = new List <AudioEncoderType>(); int counter = 0; foreach (MuxStreamControl c in audioTracks) { if (minimizedMode && knownAudioTypes.Length > counter) { audioCodecList.Add(knownAudioTypes[counter]); } else if (c.Stream != null) { MuxableType audioType = VideoUtil.guessAudioMuxableType(c.Stream.path, true); if (audioType != null) { audioTypesList.Add(audioType); } } counter++; } foreach (MuxStreamControl c in subtitleTracks) { if (c.Stream == null) { continue; } SubtitleType subtitleType = VideoUtil.guessSubtitleType(c.Stream.path); if (subtitleType != null) { subTypesList.Add(new MuxableType(subtitleType, null)); } } audioTypes = audioTypesList.ToArray(); subtitleTypes = subTypesList.ToArray(); aCodec = audioCodecList.ToArray(); }
/* /// <summary> * /// gets all the audio languages from a defined source info file * /// </summary> * /// <param name="infoFile">the info file containing the language info</param> * /// <returns>an array listing all tracks and their language</returns> * public List<string> getAudioLanguages(string infoFile) * { * List<string> retval = new List<string>(); * List<AudioTrackInfo> audioTracks; * List<SubtitleInfo> subtitles; * Dar? ar; * int maxHorizontalResolution; * getSourceInfo(infoFile, out audioTracks, out subtitles, out ar, out maxHorizontalResolution); * foreach (AudioTrackInfo ati in audioTracks) * { * retval.Add(ati.TrackInfo.Language); * } * return retval; * * }*/ #endregion #region dgindex preprocessing /// <summary> /// opens a video source and fills out the track selector dropdowns /// </summary> /// <param name="fileName">the video input file</param> /// <param name="track1">combobox for audio track selection</param> /// <param name="track2">combobox for audio track selection</param> /// <param name="ar">aspect ratio of the video</param> /// <param name="trackIDs">an arraylist that will contain the track IDs of the source if found</param> /// <returns>true if a source info file has been found, false if not</returns> public bool openVideoSource(string fileName, out List <AudioTrackInfo> audioTracks, out List <SubtitleInfo> subtitles, out Dar?ar, out int maxHorizontalResolution) { audioTracks = new List <AudioTrackInfo>(); subtitles = new List <SubtitleInfo>(); string infoFile = VideoUtil.getInfoFileName(fileName); bool putDummyTracks = true; // indicates whether audio tracks have been found or not ar = null; maxHorizontalResolution = 5000; if (!string.IsNullOrEmpty(infoFile)) { getSourceInfo(infoFile, out audioTracks, out subtitles, out ar, out maxHorizontalResolution); if (audioTracks.Count > 0) { putDummyTracks = false; } } else { if (Path.GetExtension(fileName).ToLower().Equals(".vob") || Path.GetExtension(fileName).ToLower().Equals(".ifo")) { MessageBox.Show("Could not find DVD Decrypter generated info file " + infoFile, "Missing File", MessageBoxButtons.OK); } } if (putDummyTracks) { for (int i = 1; i <= 8; i++) { audioTracks.Add(new AudioTrackInfo("Track " + i, "", "", i)); } subtitles.Clear(); for (int i = 1; i <= 32; i++) { subtitles.Add(new SubtitleInfo("Track " + i, i)); } } return(putDummyTracks); }
private static void postprocess(MainForm mainForm, Job ajob) { if (!(ajob is IndexJob)) { return; } IndexJob job = (IndexJob)ajob; if (job.PostprocessingProperties != null) { return; } StringBuilder logBuilder = new StringBuilder(); VideoUtil vUtil = new VideoUtil(mainForm); Dictionary <int, string> audioFiles = vUtil.getAllDemuxedAudio(job.Output, 8); if (job.LoadSources) { if (job.DemuxMode != 0) { int counter = 0; foreach (int i in audioFiles.Keys) { mainForm.setAudioTrack(counter, audioFiles[i]); if (counter >= 2) { break; } counter++; } } AviSynthWindow asw = new AviSynthWindow(mainForm, job.Output); asw.OpenScript += new OpenScriptCallback(mainForm.Video.openVideoFile); asw.Show(); } }
private void getTypes(out AudioEncoderType[] aCodec, out MuxableType[] audioTypes, out MuxableType[] subtitleTypes) { List <MuxableType> audioTypesList = new List <MuxableType>(); List <MuxableType> subTypesList = new List <MuxableType>(); List <AudioEncoderType> audioCodecList = new List <AudioEncoderType>(); int counter = 0; foreach (SubStream stream in audioStreams) { if (minimizedMode && knownAudioTypes.Length > counter) { audioCodecList.Add((AudioEncoderType)knownAudioTypes[counter].codec); } else { MuxableType audioType = VideoUtil.guessAudioMuxableType(stream.path, true); if (audioType != null) { audioTypesList.Add(audioType); } } counter++; } foreach (SubStream stream in subtitleStreams) { SubtitleType subtitleType = VideoUtil.guessSubtitleType(stream.path); if (subtitleType != null) { subTypesList.Add(new MuxableType(subtitleType, null)); } } audioTypes = audioTypesList.ToArray(); subtitleTypes = subTypesList.ToArray(); aCodec = audioCodecList.ToArray(); }
public string GetAudioInputFilter() { return(VideoUtil.GenerateCombinedFilter(supportedAudioTypes.ToArray())); }
/// <summary> /// creates the AVS Script file /// if the file can be properly opened, auto-cropping is performed, then depending on the AR settings /// the proper resolution for automatic resizing, taking into account the derived cropping values /// is calculated, and finally the avisynth script is written and its name returned /// </summary> /// <param name="path">dgindex script</param> /// <param name="aspectRatio">aspect ratio selection to be used</param> /// <param name="customDAR">custom display aspect ratio for this source</param> /// <param name="desiredOutputWidth">desired horizontal resolution of the output</param> /// <param name="settings">the codec settings (used only for x264)</param> /// <param name="sarX">pixel aspect ratio X</param> /// <param name="sarY">pixel aspect ratio Y</param> /// <param name="height">the final height of the video</param> /// <param name="autoCrop">whether or not autoCrop is used for the input</param> /// <returns>the name of the AviSynth script created, empty if there was an error</returns> private string CreateAVSFile(string indexFile, string inputFile, Dar?AR, int desiredOutputWidth, LogItem _log, AviSynthSettings avsSettings, bool autoDeint, VideoCodecSettings settings, bool autoCrop, bool keepInputResolution, bool useChaptersMarks) { Dar? dar = null; Dar customDAR; IMediaFile iMediaFile = null; IVideoReader reader; PossibleSources oPossibleSource; x264Device xTargetDevice = null; CropValues cropValues = new CropValues(); int outputWidthIncludingPadding = 0; int outputHeightIncludingPadding = 0; int outputWidthCropped = 0; int outputHeightCropped = 0; // encode anamorph either when it is selected in the avs profile or the input resolution should not be touched bool signalAR = (avsSettings.Mod16Method != mod16Method.none) || keepInputResolution; // make sure the proper anamorphic encode is selected if the input resolution should not be touched if (keepInputResolution && avsSettings.Mod16Method != mod16Method.nonMod16) { avsSettings.Mod16Method = mod16Method.nonMod16; } // open index file to retrieve information if (job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.DGI) { iMediaFile = new dgiFile(indexFile); oPossibleSource = PossibleSources.dgi; } else if (job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.D2V) { iMediaFile = new d2vFile(indexFile); oPossibleSource = PossibleSources.d2v; } else if (job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.DGM) { iMediaFile = new dgmFile(indexFile); oPossibleSource = PossibleSources.dgm; } else if (job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.FFMS) { iMediaFile = new ffmsFile(inputFile, indexFile); oPossibleSource = PossibleSources.ffindex; } else if (job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.LSMASH) { iMediaFile = new lsmashFile(inputFile, indexFile); oPossibleSource = PossibleSources.lsmash; } else if (job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.AVISOURCE) { string tempAvs = "AVISource(\"" + inputFile + "\", audio=false)" + VideoUtil.getAssumeFPS(0, inputFile); iMediaFile = AvsFile.ParseScript(tempAvs, true); oPossibleSource = PossibleSources.avisource; } else { iMediaFile = AvsFile.OpenScriptFile(inputFile, true); oPossibleSource = PossibleSources.avs; } reader = iMediaFile.GetVideoReader(); // abort if the index file is invalid if (reader.FrameCount < 1) { _log.Error("There are " + reader.FrameCount + " frames in the index file. Aborting..."); return(""); } if (AR == null) { // AR needs to be detected automatically now _log.LogValue("Auto-detect aspect ratio", AR == null); customDAR = iMediaFile.VideoInfo.DAR; if (customDAR.AR <= 0) { customDAR = Dar.ITU16x9PAL; _log.Warn(string.Format("No aspect ratio found, defaulting to {0}.", customDAR)); } } else { customDAR = AR.Value; } _log.LogValue("Aspect ratio", customDAR); // check x264 settings (target device, chapter file) if (settings != null && settings is x264Settings) { x264Settings xs = (x264Settings)settings; xTargetDevice = xs.TargetDevice; _log.LogValue("Target device", xTargetDevice.Name); } // get mod value for resizing int mod = Resolution.GetModValue(avsSettings.ModValue, avsSettings.Mod16Method, signalAR); // crop input as it may be required (autoCrop && !keepInputResolution or Blu-Ray) if (Autocrop.autocrop(out cropValues, reader, signalAR, avsSettings.Mod16Method, avsSettings.ModValue) == false) { _log.Error("Autocrop failed. Aborting..."); return(""); } int inputWidth = (int)iMediaFile.VideoInfo.Width; int inputHeight = (int)iMediaFile.VideoInfo.Height; int inputFPS_D = (int)iMediaFile.VideoInfo.FPS_D; int inputFPS_N = (int)iMediaFile.VideoInfo.FPS_N; int inputFrameCount = (int)iMediaFile.VideoInfo.FrameCount; // force destruction of AVS script iMediaFile.Dispose(); Dar?suggestedDar = null; if (desiredOutputWidth == 0) { desiredOutputWidth = outputWidthIncludingPadding = inputWidth; } else if (!avsSettings.Upsize && desiredOutputWidth > inputWidth) { outputWidthIncludingPadding = inputWidth; } else { outputWidthIncludingPadding = desiredOutputWidth; } CropValues paddingValues; bool resizeEnabled; int outputWidthWithoutUpsizing = outputWidthIncludingPadding; if (avsSettings.Upsize) { resizeEnabled = !keepInputResolution; CropValues cropValuesTemp = cropValues.Clone(); int outputHeightIncludingPaddingTemp = 0; Resolution.GetResolution(inputWidth, inputHeight, customDAR, ref cropValuesTemp, autoCrop && !keepInputResolution, mod, ref resizeEnabled, false, signalAR, true, avsSettings.AcceptableAspectError, xTargetDevice, Convert.ToDouble(inputFPS_N) / inputFPS_D, ref outputWidthWithoutUpsizing, ref outputHeightIncludingPaddingTemp, out paddingValues, out suggestedDar, _log); } resizeEnabled = !keepInputResolution; Resolution.GetResolution(inputWidth, inputHeight, customDAR, ref cropValues, autoCrop && !keepInputResolution, mod, ref resizeEnabled, avsSettings.Upsize, signalAR, true, avsSettings.AcceptableAspectError, xTargetDevice, Convert.ToDouble(inputFPS_N) / inputFPS_D, ref outputWidthIncludingPadding, ref outputHeightIncludingPadding, out paddingValues, out suggestedDar, _log); keepInputResolution = !resizeEnabled; if (signalAR && suggestedDar.HasValue) { dar = suggestedDar; } // log calculated output resolution outputWidthCropped = outputWidthIncludingPadding - paddingValues.left - paddingValues.right; outputHeightCropped = outputHeightIncludingPadding - paddingValues.bottom - paddingValues.top; _log.LogValue("Input resolution", inputWidth + "x" + inputHeight); _log.LogValue("Desired maximum width", desiredOutputWidth); if (!avsSettings.Upsize && outputWidthIncludingPadding < desiredOutputWidth) { _log.LogEvent("Desired maximum width not reached. Enable upsizing in the AviSynth profile if you want to force it."); } if (avsSettings.Upsize && outputWidthIncludingPadding > outputWidthWithoutUpsizing) { _log.LogValue("Desired maximum width reached with upsizing. Target width without upsizing", outputWidthWithoutUpsizing); } if (cropValues.isCropped()) { _log.LogValue("Autocrop values", cropValues); _log.LogValue("Cropped output resolution", outputWidthCropped + "x" + outputHeightCropped); } else { _log.LogValue("Output resolution", outputWidthCropped + "x" + outputHeightCropped); } if (paddingValues.isCropped()) { _log.LogValue("Padded output resolution", outputWidthIncludingPadding + "x" + outputHeightIncludingPadding); } // generate the avs script based on the template string inputLine = "#input"; string deinterlaceLines = "#deinterlace"; string denoiseLines = "#denoise"; string cropLine = "#crop"; string resizeLine = "#resize"; inputLine = ScriptServer.GetInputLine( inputFile, indexFile, false, oPossibleSource, false, false, false, 0, avsSettings.DSS2, NvDeinterlacerType.nvDeInterlacerNone, 0, 0, null); if (IsJobStopped()) { return(""); } _log.LogValue("Automatic deinterlacing", autoDeint); if (autoDeint) { su.Status = "Automatic deinterlacing... ***PLEASE WAIT***"; string d2vPath = indexFile; _sourceDetector = new SourceDetector(inputLine, d2vPath, avsSettings.PreferAnimeDeinterlace, inputFrameCount, Thread.CurrentThread.Priority, MainForm.Instance.Settings.SourceDetectorSettings, new UpdateSourceDetectionStatus(AnalyseUpdate), new FinishedAnalysis(FinishedAnalysis)); finished = false; _sourceDetector.Analyse(); WaitTillAnalyseFinished(); _sourceDetector = null; if (filters != null) { deinterlaceLines = filters[0].Script; if (interlaced) { _log.LogValue("Deinterlacing used", deinterlaceLines, ImageType.Warning); } else { _log.LogValue("Deinterlacing used", deinterlaceLines); } } } if (IsJobStopped()) { return(""); } su.Status = "Finalizing preprocessing... ***PLEASE WAIT***"; // get final input filter line inputLine = ScriptServer.GetInputLine( inputFile, indexFile, interlaced, oPossibleSource, avsSettings.ColourCorrect, avsSettings.MPEG2Deblock, false, 0, avsSettings.DSS2, NvDeinterlacerType.nvDeInterlacerNone, 0, 0, null); // get crop & resize lines if (!keepInputResolution) { if (autoCrop) { cropLine = ScriptServer.GetCropLine(cropValues); } resizeLine = ScriptServer.GetResizeLine(!signalAR || avsSettings.Mod16Method == mod16Method.resize || outputWidthIncludingPadding > 0 || inputWidth != outputWidthCropped, outputWidthCropped, outputHeightCropped, outputWidthIncludingPadding, outputHeightIncludingPadding, (ResizeFilterType)avsSettings.ResizeMethod, autoCrop, cropValues, inputWidth, inputHeight); } // get denoise line denoiseLines = ScriptServer.GetDenoiseLines(avsSettings.Denoise, (DenoiseFilterType)avsSettings.DenoiseMethod); string newScript = ScriptServer.CreateScriptFromTemplate(avsSettings.Template, inputLine, cropLine, resizeLine, denoiseLines, deinterlaceLines); if (dar.HasValue) { newScript = string.Format("global MeGUI_darx = {0}\r\nglobal MeGUI_dary = {1}\r\n{2}", dar.Value.X, dar.Value.Y, newScript); } else { if (xTargetDevice != null && xTargetDevice.BluRay) { string strResolution = outputWidthIncludingPadding + "x" + outputHeightIncludingPadding; x264Settings _xs = (x264Settings)settings; if (strResolution.Equals("720x480")) { _xs.SampleAR = 4; _log.LogEvent("Set --sar to 10:11 as only 40:33 or 10:11 are supported with a resolution of " + strResolution + " as required for " + xTargetDevice.Name + "."); } else if (strResolution.Equals("720x576")) { _xs.SampleAR = 5; _log.LogEvent("Set --sar to 12:11 as only 16:11 or 12:11 are supported with a resolution of " + strResolution + " as required for " + xTargetDevice.Name + "."); } else if (strResolution.Equals("1280x720") || strResolution.Equals("1920x1080")) { _xs.SampleAR = 1; _log.LogEvent("Set --sar to 1:1 as only 1:1 is supported with a resolution of " + strResolution + " as required for " + xTargetDevice.Name + "."); } else if (strResolution.Equals("1440x1080")) { _xs.SampleAR = 2; _log.LogEvent("Set --sar to 4:3 as only 4:3 is supported with a resolution of " + strResolution + " as required for " + xTargetDevice.Name + "."); } } } _log.LogValue("Generated AviSynth script", newScript); string strOutputAVSFile; if (String.IsNullOrEmpty(indexFile)) { strOutputAVSFile = Path.ChangeExtension(Path.Combine(job.PostprocessingProperties.WorkingDirectory, Path.GetFileName(inputFile)), ".avs"); } else { strOutputAVSFile = Path.ChangeExtension(indexFile, ".avs"); } try { StreamWriter sw = new StreamWriter(strOutputAVSFile, false, System.Text.Encoding.Default); sw.Write(newScript); sw.Close(); } catch (Exception i) { _log.LogValue("Error saving AviSynth script", i, ImageType.Error); return(""); } JobUtil.GetAllInputProperties(strOutputAVSFile, out ulong numberOfFrames, out double fps, out int fps_n, out int fps_d, out int hres, out int vres, out Dar d, out AviSynthColorspace colorspace); _log.LogEvent("resolution: " + hres + "x" + vres); _log.LogEvent("frame rate: " + fps_n + "/" + fps_d); _log.LogEvent("frames: " + numberOfFrames); TimeSpan oTime = TimeSpan.FromSeconds((double)numberOfFrames / fps); _log.LogEvent("length: " + string.Format("{0:00}:{1:00}:{2:00}.{3:000}", (int)(oTime.TotalHours), oTime.Minutes, oTime.Seconds, oTime.Milliseconds)); _log.LogValue("aspect ratio", d); _log.LogValue("color space", colorspace.ToString()); if (IsJobStopped()) { return(""); } // create qpf file if necessary and possible if (job.PostprocessingProperties.ChapterInfo.HasChapters && useChaptersMarks && settings != null && settings is x264Settings) { fps = (double)fps_n / fps_d; string strChapterFile = Path.ChangeExtension(strOutputAVSFile, ".qpf"); job.PostprocessingProperties.ChapterInfo.ChangeFps(fps); if (job.PostprocessingProperties.ChapterInfo.SaveQpfile(strChapterFile)) { job.PostprocessingProperties.FilesToDelete.Add(strChapterFile); _log.LogValue("qpf file created", strChapterFile); x264Settings xs = (x264Settings)settings; xs.UseQPFile = true; xs.QPFile = strChapterFile; } } // check if a timestamp file has to be used if (!String.IsNullOrEmpty(job.PostprocessingProperties.TimeStampFile) && settings != null && settings is x264Settings) { x264Settings xs = (x264Settings)settings; xs.TCFile = job.PostprocessingProperties.TimeStampFile; } return(strOutputAVSFile); }
protected override void RunInThread() { JobChain c = null; List <string> intermediateFiles = new List <string>(); bool bError = false; try { log.LogEvent("Processing thread started"); su.Status = "Preprocessing... ***PLEASE WAIT***"; su.ResetTime(); List <string> arrAudioFilesDelete = new List <string>(); audioFiles = new Dictionary <int, string>(); List <AudioTrackInfo> arrAudioTracks = new List <AudioTrackInfo>(); List <AudioJob> arrAudioJobs = new List <AudioJob>(); List <MuxStream> arrMuxStreams = new List <MuxStream>(); FileUtil.ensureDirectoryExists(job.PostprocessingProperties.WorkingDirectory); // audio handling foreach (OneClickAudioTrack oAudioTrack in job.PostprocessingProperties.AudioTracks) { if (IsJobStopped()) { return; } if (oAudioTrack.AudioTrackInfo != null) { if (oAudioTrack.AudioTrackInfo.ExtractMKVTrack) { if (job.PostprocessingProperties.ApplyDelayCorrection && File.Exists(job.PostprocessingProperties.IntermediateMKVFile)) { MediaInfoFile oFile = new MediaInfoFile(job.PostprocessingProperties.IntermediateMKVFile, ref log); bool bFound = false; foreach (AudioTrackInfo oAudioInfo in oFile.AudioInfo.Tracks) { if (oAudioInfo.MMGTrackID == oAudioTrack.AudioTrackInfo.MMGTrackID) { bFound = true; } } int mmgTrackID = 0; if (!bFound) { mmgTrackID = oFile.AudioInfo.Tracks[oAudioTrack.AudioTrackInfo.TrackIndex].MMGTrackID; } else { mmgTrackID = oAudioTrack.AudioTrackInfo.MMGTrackID; } foreach (AudioTrackInfo oAudioInfo in oFile.AudioInfo.Tracks) { if (oAudioInfo.MMGTrackID == mmgTrackID) { if (oAudioTrack.DirectMuxAudio != null) { oAudioTrack.DirectMuxAudio.delay = oAudioInfo.Delay; } if (oAudioTrack.AudioJob != null) { oAudioTrack.AudioJob.Delay = oAudioInfo.Delay; } break; } } } if (!audioFiles.ContainsKey(oAudioTrack.AudioTrackInfo.TrackID)) { audioFiles.Add(oAudioTrack.AudioTrackInfo.TrackID, job.PostprocessingProperties.WorkingDirectory + "\\" + oAudioTrack.AudioTrackInfo.DemuxFileName); arrAudioFilesDelete.Add(job.PostprocessingProperties.WorkingDirectory + "\\" + oAudioTrack.AudioTrackInfo.DemuxFileName); } } else { arrAudioTracks.Add(oAudioTrack.AudioTrackInfo); } } if (oAudioTrack.AudioJob != null) { if (job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.NONE && String.IsNullOrEmpty(oAudioTrack.AudioJob.Input)) { oAudioTrack.AudioJob.Input = job.Input; } arrAudioJobs.Add(oAudioTrack.AudioJob); } if (oAudioTrack.DirectMuxAudio != null) { arrMuxStreams.Add(oAudioTrack.DirectMuxAudio); } } if (audioFiles.Count == 0 && !job.PostprocessingProperties.Eac3toDemux && job.PostprocessingProperties.IndexType != FileIndexerWindow.IndexType.NONE && job.PostprocessingProperties.IndexType != FileIndexerWindow.IndexType.AVISOURCE) { if ((job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.DGI || job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.DGM) && File.Exists(Path.ChangeExtension(job.IndexFile, ".log"))) { job.PostprocessingProperties.FilesToDelete.Add(Path.ChangeExtension(job.IndexFile, ".log")); audioFiles = AudioUtil.GetAllDemuxedAudioFromDGI(arrAudioTracks, out arrAudioFilesDelete, job.IndexFile, log); } else { audioFiles = VideoUtil.getAllDemuxedAudio(arrAudioTracks, new List <AudioTrackInfo>(), out arrAudioFilesDelete, job.IndexFile, log); } } FillInAudioInformation(ref arrAudioJobs, arrMuxStreams); if (!String.IsNullOrEmpty(job.PostprocessingProperties.VideoFileToMux)) { log.LogEvent("Don't encode video: True"); } else { log.LogEvent("Desired size: " + job.PostprocessingProperties.OutputSize); } log.LogEvent("Split size: " + job.PostprocessingProperties.Splitting); if (IsJobStopped()) { return; } // video file handling string avsFile = String.Empty; VideoStream myVideo = new VideoStream(); VideoCodecSettings videoSettings = job.PostprocessingProperties.VideoSettings; if (String.IsNullOrEmpty(job.PostprocessingProperties.VideoFileToMux)) { //Open the video try { avsFile = CreateAVSFile(job.IndexFile, job.Input, job.PostprocessingProperties.DAR, job.PostprocessingProperties.HorizontalOutputResolution, log, job.PostprocessingProperties.AvsSettings, job.PostprocessingProperties.AutoDeinterlace, videoSettings, job.PostprocessingProperties.AutoCrop, job.PostprocessingProperties.KeepInputResolution, job.PostprocessingProperties.UseChaptersMarks); } catch (Exception ex) { log.LogValue("An error occurred creating the AVS file", ex, ImageType.Error); } if (IsJobStopped()) { return; } if (!String.IsNullOrEmpty(avsFile)) { // check AVS file JobUtil.GetInputProperties(avsFile, out ulong frameCount, out double frameRate); myVideo.Input = avsFile; myVideo.Output = Path.Combine(job.PostprocessingProperties.WorkingDirectory, Path.GetFileNameWithoutExtension(job.Input) + "_Video"); myVideo.NumberOfFrames = frameCount; myVideo.Framerate = (decimal)frameRate; myVideo.VideoType = new MuxableType((new VideoEncoderProvider().GetSupportedOutput(videoSettings.EncoderType))[0], videoSettings.Codec); myVideo.Settings = videoSettings; } else { bError = true; } } else { myVideo.DAR = job.PostprocessingProperties.ForcedDAR; myVideo.Output = job.PostprocessingProperties.VideoFileToMux; MediaInfoFile oInfo = new MediaInfoFile(myVideo.Output, ref log); if (Path.GetExtension(job.PostprocessingProperties.VideoFileToMux).Equals(".unknown") && !String.IsNullOrEmpty(oInfo.ContainerFileTypeString)) { job.PostprocessingProperties.VideoFileToMux = Path.ChangeExtension(job.PostprocessingProperties.VideoFileToMux, oInfo.ContainerFileTypeString.ToLowerInvariant()); File.Move(myVideo.Output, job.PostprocessingProperties.VideoFileToMux); myVideo.Output = job.PostprocessingProperties.VideoFileToMux; job.PostprocessingProperties.FilesToDelete.Add(myVideo.Output); } myVideo.Settings = videoSettings; myVideo.Framerate = (decimal)oInfo.VideoInfo.FPS; myVideo.NumberOfFrames = oInfo.VideoInfo.FrameCount; } if (IsJobStopped()) { return; } intermediateFiles.Add(avsFile); intermediateFiles.Add(job.IndexFile); intermediateFiles.AddRange(audioFiles.Values); foreach (string file in arrAudioFilesDelete) { intermediateFiles.Add(file); } intermediateFiles.Add(Path.ChangeExtension(job.Input, ".log")); foreach (string file in job.PostprocessingProperties.FilesToDelete) { intermediateFiles.Add(file); } // subtitle handling List <MuxStream> subtitles = new List <MuxStream>(); if (job.PostprocessingProperties.SubtitleTracks.Count > 0) { foreach (OneClickStream oTrack in job.PostprocessingProperties.SubtitleTracks) { if (oTrack.TrackInfo.ExtractMKVTrack) { //demuxed MKV string trackFile = Path.GetDirectoryName(job.IndexFile) + "\\" + oTrack.TrackInfo.DemuxFileName; if (File.Exists(trackFile)) { intermediateFiles.Add(trackFile); if (Path.GetExtension(trackFile).ToLowerInvariant().Equals(".idx")) { intermediateFiles.Add(FileUtil.GetPathWithoutExtension(trackFile) + ".sub"); } subtitles.Add(new MuxStream(trackFile, oTrack.Language, oTrack.Name, oTrack.Delay, oTrack.DefaultStream, oTrack.ForcedStream, null)); } else { log.LogEvent("Ignoring subtitle as the it cannot be found: " + trackFile, ImageType.Warning); } } else { // sometimes the language is detected differently by vsrip and the IFO parser. Therefore search also for other files string strDemuxFile = oTrack.DemuxFilePath; if (!File.Exists(strDemuxFile) && Path.GetFileNameWithoutExtension(strDemuxFile).Contains("_")) { string strDemuxFileName = Path.GetFileNameWithoutExtension(strDemuxFile); strDemuxFileName = strDemuxFileName.Substring(0, strDemuxFileName.LastIndexOf("_")) + "_*" + Path.GetExtension(strDemuxFile); foreach (string strFileName in Directory.GetFiles(Path.GetDirectoryName(strDemuxFile), strDemuxFileName)) { strDemuxFile = Path.Combine(Path.GetDirectoryName(strDemuxFile), strFileName); intermediateFiles.Add(strDemuxFile); intermediateFiles.Add(Path.ChangeExtension(strDemuxFile, ".sub")); log.LogEvent("Subtitle + " + oTrack.DemuxFilePath + " cannot be found. " + strFileName + " will be used instead", ImageType.Information); break; } } if (File.Exists(strDemuxFile)) { string strTrackName = oTrack.Name; // check if a forced stream is available string strForcedFile = Path.Combine(Path.GetDirectoryName(strDemuxFile), Path.GetFileNameWithoutExtension(strDemuxFile) + "_forced.idx"); if (File.Exists(strForcedFile)) { subtitles.Add(new MuxStream(strForcedFile, oTrack.Language, SubtitleUtil.ApplyForcedStringToTrackName(true, oTrack.Name), oTrack.Delay, oTrack.DefaultStream, true, null)); intermediateFiles.Add(strForcedFile); intermediateFiles.Add(Path.ChangeExtension(strForcedFile, ".sub")); } subtitles.Add(new MuxStream(strDemuxFile, oTrack.Language, SubtitleUtil.ApplyForcedStringToTrackName(false, oTrack.Name), oTrack.Delay, oTrack.DefaultStream, (File.Exists(strForcedFile) ? false : oTrack.ForcedStream), null)); } else { log.LogEvent("Ignoring subtitle as the it cannot be found: " + oTrack.DemuxFilePath, ImageType.Warning); } } } } if (IsJobStopped()) { return; } if (!bError) { c = VideoUtil.GenerateJobSeries(myVideo, job.PostprocessingProperties.FinalOutput, arrAudioJobs.ToArray(), subtitles.ToArray(), job.PostprocessingProperties.Attachments, job.PostprocessingProperties.TimeStampFile, job.PostprocessingProperties.ChapterInfo, job.PostprocessingProperties.OutputSize, job.PostprocessingProperties.Splitting, job.PostprocessingProperties.Container, job.PostprocessingProperties.PrerenderJob, arrMuxStreams.ToArray(), log, job.PostprocessingProperties.DeviceOutputType, null, job.PostprocessingProperties.VideoFileToMux, job.PostprocessingProperties.AudioTracks.ToArray(), true); } if (c != null && !String.IsNullOrEmpty(job.PostprocessingProperties.TimeStampFile) && c.Jobs[c.Jobs.Length - 1].Job is MuxJob && (c.Jobs[c.Jobs.Length - 1].Job as MuxJob).MuxType == MuxerType.MP4BOX) { // last job is a mp4box job and vfr timecode data has to be applied MP4FpsModJob mp4FpsMod = new MP4FpsModJob(((MuxJob)c.Jobs[c.Jobs.Length - 1].Job).Output, job.PostprocessingProperties.TimeStampFile); c = new SequentialChain(c, new SequentialChain(mp4FpsMod)); } } catch (Exception e) { log.LogValue("An error occurred", e, ImageType.Error); bError = true; } if (c == null || bError) { log.Error("Job creation aborted"); su.HasError = true; } // add cleanup job also in case of an error c = CleanupJob.AddAfter(c, intermediateFiles, job.PostprocessingProperties.FinalOutput); MainForm.Instance.Jobs.AddJobsWithDependencies(c, false); // batch processing other input files if necessary if (job.PostprocessingProperties.FilesToProcess.Count > 0) { OneClickWindow ocw = new OneClickWindow(); ocw.setBatchProcessing(job.PostprocessingProperties.FilesToProcess, job.PostprocessingProperties.OneClickSetting); } su.IsComplete = true; }
public JobChain GenerateJobSeries(VideoStream video, string muxedOutput, AudioJob[] audioStreams, MuxStream[] subtitles, string chapters, FileSize?desiredSize, FileSize?splitSize, ContainerType container, bool prerender, MuxStream[] muxOnlyAudio) { StringBuilder logBuilder = new StringBuilder(); if (desiredSize.HasValue) { logBuilder.Append("Generating jobs. Desired size: " + desiredSize.Value.ToString() + "\r\n"); if (video.Settings.EncodingMode != 4 && video.Settings.EncodingMode != 8) // no automated 2/3 pass { if (this.mainForm.Settings.NbPasses == 2) { video.Settings.EncodingMode = 4; // automated 2 pass } else if (video.Settings.MaxNumberOfPasses == 3) { video.Settings.EncodingMode = 8; } } } else { logBuilder.Append("Generating jobs. No desired size.\r\n"); } fixFileNameExtensions(video, audioStreams, container); string videoOutput = video.Output; logBuilder.Append(eliminatedDuplicateFilenames(ref videoOutput, ref muxedOutput, audioStreams)); video.Output = videoOutput; JobChain vjobs = jobUtil.prepareVideoJob(video.Input, video.Output, video.Settings, video.DAR, prerender, true); if (vjobs == null) { return(null); } /* Here, we guess the types of the files based on extension. * This is guaranteed to work with MeGUI-encoded files, because * the extension will always be recognised. For non-MeGUI files, * we can only ever hope.*/ List <MuxStream> allAudioToMux = new List <MuxStream>(); List <MuxableType> allInputAudioTypes = new List <MuxableType>(); foreach (MuxStream muxStream in muxOnlyAudio) { if (VideoUtil.guessAudioMuxableType(muxStream.path, true) != null) { allInputAudioTypes.Add(VideoUtil.guessAudioMuxableType(muxStream.path, true)); allAudioToMux.Add(muxStream); } } foreach (AudioJob stream in audioStreams) { allAudioToMux.Add(stream.ToMuxStream()); allInputAudioTypes.Add(stream.ToMuxableType()); } List <MuxableType> allInputSubtitleTypes = new List <MuxableType>(); foreach (MuxStream muxStream in subtitles) { if (VideoUtil.guessSubtitleType(muxStream.path) != null) { allInputSubtitleTypes.Add(new MuxableType(VideoUtil.guessSubtitleType(muxStream.path), null)); } } MuxableType chapterInputType = null; if (!String.IsNullOrEmpty(chapters)) { ChapterType type = VideoUtil.guessChapterType(chapters); if (type != null) { chapterInputType = new MuxableType(type, null); } } JobChain muxJobs = this.jobUtil.GenerateMuxJobs(video, video.Framerate, allAudioToMux.ToArray(), allInputAudioTypes.ToArray(), subtitles, allInputSubtitleTypes.ToArray(), chapters, chapterInputType, container, muxedOutput, splitSize, true); /* foreach (Job mJob in muxJobs) * foreach (Job job in jobs) * mJob.AddDependency(job);*/ /* * foreach (VideoJob job in vjobs) * { * jobs.Add(job); * } * foreach (MuxJob job in muxJobs) * { * jobs.Add(job); * } */ if (desiredSize.HasValue) { /* if (encodedAudioPresent) // no audio encoding, we can calculate the video bitrate directly * { * logBuilder.Append("No audio encoding. Calculating desired video bitrate directly.\r\n"); * List<AudioStream> calculationAudioStreams = new List<AudioStream>(); * foreach (SubStream stream in muxOnlyAudio) * { * FileInfo fi = new FileInfo(stream.path); * AudioStream newStream = new AudioStream(); * newStream.SizeBytes = fi.Length; * newStream.Type = guessAudioType(stream.path); * newStream.BitrateMode = BitrateManagementMode.VBR; * calculationAudioStreams.Add(newStream); * logBuilder.Append("Encoded audio file is present: " + stream.path + * " It has a size of " + fi.Length + " bytes. \r\n"); * } * * long videoSizeKB; * bool useBframes = false; * if (video.Settings.NbBframes > 0) * useBframes = true; * * bitrateKBits = calc.CalculateBitrateKBits(video.Settings.Codec, useBframes, container, calculationAudioStreams.ToArray(), * desiredSizeBytes, video.NumberOfFrames, video.Framerate, out videoSizeKB); * desiredSizeBytes = (long)videoSizeKB * 1024L; // convert kb back to bytes * logBuilder.Append("Setting video bitrate for the video jobs to " + bitrateKBits + " kbit/s\r\n"); * foreach (VideoJob vJob in vjobs) * { * jobUtil.updateVideoBitrate(vJob, bitrateKBits); * } * }*/ BitrateCalculationInfo b = new BitrateCalculationInfo(); List <string> audiofiles = new List <string>(); foreach (MuxStream s in allAudioToMux) { audiofiles.Add(s.path); } b.AudioFiles = audiofiles; b.Container = container; b.VideoJobs = new List <TaggedJob>(vjobs.Jobs); b.DesiredSize = desiredSize.Value; ((VideoJob)vjobs.Jobs[0].Job).BitrateCalculationInfo = b; } mainForm.addToLog(logBuilder.ToString()); return (new SequentialChain( new ParallelChain((Job[])audioStreams), new SequentialChain(vjobs), new SequentialChain(muxJobs))); }
public string GetOutputTypeFilter() { return(VideoUtil.GenerateCombinedFilter(supportedContainers.ToArray())); }
public string GetMuxedInputFilter() { return(VideoUtil.GenerateCombinedFilter(GetSupportedContainerInputTypes().ToArray())); }
public static string GetInputLine(string input, string indexFile, bool interlaced, PossibleSources sourceType, bool colormatrix, bool mpeg2deblock, bool flipVertical, double fps, bool dss2) { string inputLine = "#input"; string strDLLPath = ""; switch (sourceType) { case PossibleSources.avs: inputLine = "Import(\"" + input + "\")"; break; case PossibleSources.d2v: if (String.IsNullOrEmpty(indexFile)) { indexFile = input; } strDLLPath = Path.Combine(Path.GetDirectoryName(MainForm.Instance.Settings.DgIndexPath), "DGDecode.dll"); inputLine = "LoadPlugin(\"" + strDLLPath + "\")\r\nDGDecode_mpeg2source(\"" + indexFile + "\""; if (mpeg2deblock) { inputLine += ", cpu=4"; } if (colormatrix) { inputLine += ", info=3"; } inputLine += ")"; if (colormatrix) { inputLine += string.Format("\r\nLoadPlugin(\"" + Path.Combine(MainForm.Instance.Settings.AvisynthPluginsPath, "ColorMatrix.dll") + "\")\r\nColorMatrix(hints=true{0}, threads=0)", interlaced ? ", interlaced=true" : ""); } break; case PossibleSources.dga: if (String.IsNullOrEmpty(indexFile)) { indexFile = input; } strDLLPath = Path.Combine(Path.GetDirectoryName(MainForm.Instance.Settings.DgavcIndexPath), "DGAVCDecode.dll"); inputLine = "LoadPlugin(\"" + strDLLPath + "\")\r\nAVCSource(\"" + indexFile + "\")"; break; case PossibleSources.dgi: if (String.IsNullOrEmpty(indexFile)) { indexFile = input; } strDLLPath = Path.Combine(Path.GetDirectoryName(MainForm.Instance.Settings.DgnvIndexPath), "DGDecodeNV.dll"); inputLine = "LoadPlugin(\"" + strDLLPath + "\")\r\nDGSource(\"" + indexFile + "\""; if (MainForm.Instance.Settings.AutoForceFilm && MainForm.Instance.Settings.ForceFilmThreshold <= (decimal)dgiFile.GetFilmPercent(indexFile)) { inputLine += ",fieldop=1"; } else { inputLine += ",fieldop=0"; } break; case PossibleSources.ffindex: strDLLPath = Path.Combine(Path.GetDirectoryName(MainForm.Instance.Settings.FFMSIndexPath), "ffms2.dll"); if (input.ToLower(System.Globalization.CultureInfo.InvariantCulture).EndsWith(".ffindex")) { inputLine = "LoadPlugin(\"" + strDLLPath + "\")\r\nFFVideoSource(\"" + input.Substring(0, input.Length - 8) + "\"" + (MainForm.Instance.Settings.FFMSThreads > 0 ? ", threads=" + MainForm.Instance.Settings.FFMSThreads : String.Empty) + ")"; } else if (!String.IsNullOrEmpty(indexFile)) { inputLine = "LoadPlugin(\"" + strDLLPath + "\")\r\nFFVideoSource(\"" + input + "\"" + (!string.IsNullOrEmpty(indexFile) ? ", cachefile=\"" + indexFile + "\"" : String.Empty) + (MainForm.Instance.Settings.FFMSThreads > 0 ? ", threads=" + MainForm.Instance.Settings.FFMSThreads : String.Empty) + ")"; } else { inputLine = "LoadPlugin(\"" + strDLLPath + "\")\r\nFFVideoSource(\"" + input + "\"" + (MainForm.Instance.Settings.FFMSThreads > 0 ? ", threads=" + MainForm.Instance.Settings.FFMSThreads : String.Empty) + ")"; } break; case PossibleSources.vdr: inputLine = "AVISource(\"" + input + "\", audio=false)" + VideoUtil.getAssumeFPS(fps, input); break; case PossibleSources.directShow: if (input.ToLower(System.Globalization.CultureInfo.InvariantCulture).EndsWith(".avi")) { inputLine = "AVISource(\"" + input + "\", audio=false)" + VideoUtil.getAssumeFPS(fps, input); } else { if (dss2) { inputLine = "LoadPlugin(\"" + MeGUISettings.HaaliMSPath + "\\avss.dll" + "\")\r\ndss2(\"" + input + "\"" + ((fps > 0) ? ", fps=" + fps.ToString("F3", new CultureInfo("en-us")) : string.Empty) + ")" + VideoUtil.getAssumeFPS(fps, input); } else { inputLine = "LoadPlugin(\"" + Path.Combine(Path.GetDirectoryName(MainForm.Instance.Settings.AviSynthPath), "directshowsource.dll") + "\")\r\nDirectShowSource(\"" + input + "\"" + ((fps > 0) ? ", fps=" + fps.ToString("F3", new CultureInfo("en-us")) : string.Empty) + ", audio=false, convertfps=true)" + VideoUtil.getAssumeFPS(fps, input); } if (flipVertical) { inputLine = inputLine + "\r\nFlipVertical()"; } } break; } return(inputLine); }
/// <summary> /// creates the AVS Script file /// if the file can be properly opened, auto-cropping is performed, then depending on the AR settings /// the proper resolution for automatic resizing, taking into account the derived cropping values /// is calculated, and finally the avisynth script is written and its name returned /// </summary> /// <param name="path">dgindex script</param> /// <param name="aspectRatio">aspect ratio selection to be used</param> /// <param name="customDAR">custom display aspect ratio for this source</param> /// <param name="desiredOutputWidth">desired horizontal resolution of the output</param> /// <param name="settings">the codec settings (used only for x264)</param> /// <param name="sarX">pixel aspect ratio X</param> /// <param name="sarY">pixel aspect ratio Y</param> /// <param name="height">the final height of the video</param> /// <param name="signalAR">whether or not ar signalling is to be used for the output /// (depending on this parameter, resizing changes to match the source AR)</param> /// <param name="autoCrop">whether or not autoCrop is used for the input</param> /// <returns>the name of the AviSynth script created, empty if there was an error</returns> private string createAVSFile(string indexFile, string inputFile, Dar?AR, int desiredOutputWidth, bool signalAR, LogItem _log, AviSynthSettings avsSettings, bool autoDeint, VideoCodecSettings settings, out Dar?dar, bool autoCrop, bool keepInputResolution, bool useChaptersMarks) { dar = null; Dar customDAR; IMediaFile iMediaFile = null; IVideoReader reader; PossibleSources oPossibleSource; x264Device xTargetDevice = null; int outputWidthIncludingPadding = 0; int outputHeightIncludingPadding = 0; int outputWidthCropped = 0; int outputHeightCropped = 0; CropValues cropValues = new CropValues(); bool bAdjustResolution = false; bool bCropped = false; // open index file to retrieve information if (job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.DGI) { iMediaFile = new dgiFile(indexFile); oPossibleSource = PossibleSources.dgi; } else if (job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.D2V) { iMediaFile = new d2vFile(indexFile); oPossibleSource = PossibleSources.d2v; } else if (job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.DGA) { iMediaFile = new dgaFile(indexFile); oPossibleSource = PossibleSources.dga; } else if (job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.FFMS) { iMediaFile = new ffmsFile(inputFile, indexFile); oPossibleSource = PossibleSources.ffindex; } else if (job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.AVISOURCE) { string tempAvs = "AVISource(\"" + inputFile + "\", audio=false)" + VideoUtil.getAssumeFPS(0, inputFile); iMediaFile = AvsFile.ParseScript(tempAvs); oPossibleSource = PossibleSources.directShow; } else { iMediaFile = AvsFile.OpenScriptFile(inputFile); oPossibleSource = PossibleSources.avs; } reader = iMediaFile.GetVideoReader(); // abort if the index file is invalid if (reader.FrameCount < 1) { _log.Error("There are 0 frames in the index file. Aborting..."); return(""); } if (AR == null) { // AR needs to be detected automatically now _log.LogValue("Auto-detect aspect ratio", AR == null); customDAR = iMediaFile.VideoInfo.DAR; if (customDAR.ar <= 0) { customDAR = Dar.ITU16x9PAL; _log.Warn(string.Format("No aspect ratio found, defaulting to {0}.", customDAR)); } } else { customDAR = AR.Value; } _log.LogValue("Aspect ratio", customDAR); // check x264 settings (target device, chapter file) if (settings != null && settings is x264Settings) { x264Settings xs = (x264Settings)settings; xTargetDevice = xs.TargetDevice; // create qpf file if necessary if (!String.IsNullOrEmpty(job.PostprocessingProperties.ChapterFile) && useChaptersMarks) { qpfile = job.PostprocessingProperties.ChapterFile; if ((Path.GetExtension(qpfile).ToLower(System.Globalization.CultureInfo.InvariantCulture)) == ".txt") { qpfile = VideoUtil.convertChaptersTextFileTox264QPFile(job.PostprocessingProperties.ChapterFile, iMediaFile.VideoInfo.FPS); } if (File.Exists(qpfile)) { xs.UseQPFile = true; xs.QPFile = qpfile; } } } // if encoding for a specific device select the appropriate resolution setting if (xTargetDevice != null && xTargetDevice.Width > 0 && xTargetDevice.Height > 0) { if (keepInputResolution) { // resolution should not be changed - use input resolution outputWidthCropped = (int)iMediaFile.VideoInfo.Width; outputHeightCropped = (int)iMediaFile.VideoInfo.Height; } else { // crop input video if selected if (autoCrop) { if (Autocrop.autocrop(out cropValues, reader, signalAR, avsSettings.Mod16Method) == false) { _log.Error("Autocrop failed. Aborting..."); return(""); } bCropped = true; } outputWidthCropped = desiredOutputWidth; outputHeightCropped = Resolution.suggestResolution(iMediaFile.VideoInfo.Height, iMediaFile.VideoInfo.Width, (double)customDAR.ar, cropValues, outputWidthCropped, signalAR, mainForm.Settings.AcceptableAspectErrorPercent, out dar); dar = null; } if (xTargetDevice.Width < outputWidthCropped) { // width must be lowered to be target conform bAdjustResolution = true; if (keepInputResolution) { keepInputResolution = false; _log.LogEvent("Disabling \"Keep Input Resolution\" as " + xTargetDevice.Name + " does not support a resolution width of " + outputWidthCropped + ". The maximum value is " + xTargetDevice.Width + "."); } } else if (xTargetDevice.Height < outputHeightCropped) { // height must be lowered to be target conform bAdjustResolution = true; if (keepInputResolution) { keepInputResolution = false; _log.LogEvent("Disabling \"Keep Input Resolution\" as " + xTargetDevice.Name + " does not support a resolution height of " + outputHeightCropped + ". The maximum value is " + xTargetDevice.Height + "."); } } else if (xTargetDevice.BluRay) { string strResolution = outputWidthCropped + "x" + outputHeightCropped; if (!strResolution.Equals("1920x1080") && !strResolution.Equals("1440x1080") && !strResolution.Equals("1280x720") && !strResolution.Equals("720x576") && !strResolution.Equals("720x480")) { bAdjustResolution = true; if (keepInputResolution) { keepInputResolution = false; _log.LogEvent("Disabling \"Keep Input Resolution\" as " + xTargetDevice.Name + " does not support a resolution of " + outputWidthCropped + "x" + outputHeightCropped + ". Supported are 1920x1080, 1440x1080, 1280x720, 720x576 and 720x480."); } } else { outputWidthIncludingPadding = outputWidthCropped; outputHeightIncludingPadding = outputHeightCropped; } } if (bAdjustResolution) { if (!autoCrop) { autoCrop = true; _log.LogEvent("Enabling \"AutoCrop\""); } } } else { outputWidthCropped = desiredOutputWidth; } if (!keepInputResolution && autoCrop && !bCropped) { // crop input video if required if (Autocrop.autocrop(out cropValues, reader, signalAR, avsSettings.Mod16Method) == false) { _log.Error("Autocrop failed. Aborting..."); return(""); } bCropped = true; } if (bAdjustResolution) { // adjust horizontal resolution as width or height are too large if (xTargetDevice.BluRay) { if (outputWidthCropped >= 1920) { outputWidthCropped = 1920; outputHeightIncludingPadding = 1080; _log.LogEvent("Force resolution of 1920x1080 as required for " + xTargetDevice.Name); } else if (outputWidthCropped >= 1280) { outputWidthCropped = 1280; outputHeightIncludingPadding = 720; _log.LogEvent("Force resolution of 1280x720 as required for " + xTargetDevice.Name); } else { outputWidthCropped = 720; Double dfps = Convert.ToDouble(iMediaFile.VideoInfo.FPS_N) / iMediaFile.VideoInfo.FPS_D; if (dfps == 25) { outputHeightIncludingPadding = 576; _log.LogEvent("Force resolution of 720x576 as required for " + xTargetDevice.Name); } else { outputHeightIncludingPadding = 480; _log.LogEvent("Force resolution of 720x480 as required for " + xTargetDevice.Name); } } outputWidthIncludingPadding = outputWidthCropped; } else if (outputWidthCropped > xTargetDevice.Width) { outputWidthCropped = xTargetDevice.Width; _log.LogEvent("Set resolution width to " + outputWidthCropped + " as required for " + xTargetDevice.Name); } // adjust cropped vertical resolution outputHeightCropped = Resolution.suggestResolution(iMediaFile.VideoInfo.Height, iMediaFile.VideoInfo.Width, (double)customDAR.ar, cropValues, outputWidthCropped, signalAR, mainForm.Settings.AcceptableAspectErrorPercent, out dar); while (outputHeightCropped > xTargetDevice.Height || (xTargetDevice.BluRay && outputHeightCropped > outputHeightIncludingPadding)) { outputWidthCropped -= 16; outputHeightCropped = Resolution.suggestResolution(iMediaFile.VideoInfo.Height, iMediaFile.VideoInfo.Width, (double)customDAR.ar, cropValues, outputWidthCropped, signalAR, mainForm.Settings.AcceptableAspectErrorPercent, out dar); } } if (keepInputResolution) { outputWidthCropped = outputWidthIncludingPadding = (int)iMediaFile.VideoInfo.Width; outputHeightCropped = outputHeightIncludingPadding = (int)iMediaFile.VideoInfo.Height; dar = customDAR; } else if (xTargetDevice == null || (xTargetDevice != null && !xTargetDevice.BluRay)) { // Minimise upsizing int sourceHorizontalResolution = (int)iMediaFile.VideoInfo.Width - cropValues.right - cropValues.left; if (autoCrop) { sourceHorizontalResolution = (int)iMediaFile.VideoInfo.Width; } if (outputWidthCropped > sourceHorizontalResolution) { if (avsSettings.Mod16Method == mod16Method.resize) { while (outputWidthCropped > sourceHorizontalResolution + 16) { outputWidthCropped -= 16; } } else { outputWidthCropped = sourceHorizontalResolution; } } } // calculate height if (!keepInputResolution) { outputHeightCropped = Resolution.suggestResolution(iMediaFile.VideoInfo.Height, iMediaFile.VideoInfo.Width, (double)customDAR.ar, cropValues, outputWidthCropped, signalAR, mainForm.Settings.AcceptableAspectErrorPercent, out dar); } // set complete padding if required if (outputHeightIncludingPadding == 0 && outputWidthIncludingPadding > 0) { outputHeightIncludingPadding = outputHeightCropped; } if (outputWidthIncludingPadding == 0 && outputHeightIncludingPadding > 0) { outputWidthIncludingPadding = outputWidthCropped; } // write calculated output resolution into the log _log.LogValue("Input resolution", iMediaFile.VideoInfo.Width + "x" + iMediaFile.VideoInfo.Height); if (autoCrop && !keepInputResolution && cropValues.isCropped()) { _log.LogValue("Autocrop values", cropValues); _log.LogValue("Cropped output resolution", outputWidthCropped + "x" + outputHeightCropped); } else { _log.LogValue("Output resolution", outputWidthCropped + "x" + outputHeightCropped); } if (outputWidthIncludingPadding > 0 && (outputWidthIncludingPadding != outputWidthCropped || outputHeightIncludingPadding != outputHeightCropped)) { _log.LogValue("Padded output resolution", outputWidthIncludingPadding + "x" + outputHeightIncludingPadding); } if (outputWidthCropped <= 0 || outputHeightCropped <= 0) { _log.Error("Error in detection of output resolution"); return(""); } //Generate the avs script based on the template string inputLine = "#input"; string deinterlaceLines = "#deinterlace"; string denoiseLines = "#denoise"; string cropLine = "#crop"; string resizeLine = "#resize"; inputLine = ScriptServer.GetInputLine(inputFile, indexFile, false, oPossibleSource, false, false, false, 0, avsSettings.DSS2); if (!inputLine.EndsWith(")")) { inputLine += ")"; } _log.LogValue("Automatic deinterlacing", autoDeint); if (autoDeint) { raiseEvent("Automatic deinterlacing... ***PLEASE WAIT***"); string d2vPath = indexFile; SourceDetector sd = new SourceDetector(inputLine, d2vPath, false, mainForm.Settings.SourceDetectorSettings, new UpdateSourceDetectionStatus(analyseUpdate), new FinishedAnalysis(finishedAnalysis)); finished = false; sd.analyse(); waitTillAnalyseFinished(); sd.stop(); deinterlaceLines = filters[0].Script; if (interlaced) { _log.LogValue("Deinterlacing used", deinterlaceLines, ImageType.Warning); } else { _log.LogValue("Deinterlacing used", deinterlaceLines); } } raiseEvent("Finalizing preprocessing... ***PLEASE WAIT***"); inputLine = ScriptServer.GetInputLine(inputFile, indexFile, interlaced, oPossibleSource, avsSettings.ColourCorrect, avsSettings.MPEG2Deblock, false, 0, avsSettings.DSS2); if (!inputLine.EndsWith(")")) { inputLine += ")"; } if (!keepInputResolution && autoCrop) { cropLine = ScriptServer.GetCropLine(true, cropValues); } denoiseLines = ScriptServer.GetDenoiseLines(avsSettings.Denoise, (DenoiseFilterType)avsSettings.DenoiseMethod); if (!keepInputResolution) { resizeLine = ScriptServer.GetResizeLine(!signalAR || avsSettings.Mod16Method == mod16Method.resize || outputWidthIncludingPadding > 0 || (int)iMediaFile.VideoInfo.Width != outputWidthCropped, outputWidthCropped, outputHeightCropped, outputWidthIncludingPadding, outputHeightIncludingPadding, (ResizeFilterType)avsSettings.ResizeMethod, autoCrop, cropValues, (int)iMediaFile.VideoInfo.Width, (int)iMediaFile.VideoInfo.Height); } string newScript = ScriptServer.CreateScriptFromTemplate(avsSettings.Template, inputLine, cropLine, resizeLine, denoiseLines, deinterlaceLines); if (dar.HasValue) { newScript = string.Format("global MeGUI_darx = {0}\r\nglobal MeGUI_dary = {1}\r\n{2}", dar.Value.X, dar.Value.Y, newScript); } else { if (xTargetDevice != null && xTargetDevice.BluRay) { string strResolution = outputWidthIncludingPadding + "x" + outputHeightIncludingPadding; x264Settings _xs = (x264Settings)settings; if (strResolution.Equals("720x480")) { _xs.SampleAR = 4; _log.LogEvent("Set --sar to 10:11 as only 40:33 or 10:11 are supported with a resolution of " + strResolution + " as required for " + xTargetDevice.Name + "."); } else if (strResolution.Equals("720x576")) { _xs.SampleAR = 5; _log.LogEvent("Set --sar to 12:11 as only 16:11 or 12:11 are supported with a resolution of " + strResolution + " as required for " + xTargetDevice.Name + "."); } else if (strResolution.Equals("1280x720") || strResolution.Equals("1920x1080")) { _xs.SampleAR = 1; _log.LogEvent("Set --sar to 1:1 as only 1:1 is supported with a resolution of " + strResolution + " as required for " + xTargetDevice.Name + "."); } else if (strResolution.Equals("1440x1080")) { _xs.SampleAR = 2; _log.LogEvent("Set --sar to 4:3 as only 4:3 is supported with a resolution of " + strResolution + " as required for " + xTargetDevice.Name + "."); } } } _log.LogValue("Generated Avisynth script", newScript); string strOutputAVSFile; if (String.IsNullOrEmpty(indexFile)) { strOutputAVSFile = Path.ChangeExtension(Path.Combine(job.PostprocessingProperties.WorkingDirectory, Path.GetFileName(inputFile)), ".avs"); } else { strOutputAVSFile = Path.ChangeExtension(indexFile, ".avs"); } try { StreamWriter sw = new StreamWriter(strOutputAVSFile, false, System.Text.Encoding.Default); sw.Write(newScript); sw.Close(); } catch (IOException i) { _log.LogValue("Error saving AviSynth script", i, ImageType.Error); return(""); } return(strOutputAVSFile); }
private void StartPostProcessing() { Thread t = null; try { _log.LogEvent("Processing thread started"); raiseEvent("Preprocessing... ***PLEASE WAIT***"); _start = DateTime.Now; t = new Thread(new ThreadStart(delegate { while (true) { updateTime(); Thread.Sleep(1000); } })); t.Start(); List <string> arrAudioFilesDelete = new List <string>(); audioFiles = new Dictionary <int, string>(); List <AudioTrackInfo> arrAudioTracks = new List <AudioTrackInfo>(); List <AudioJob> arrAudioJobs = new List <AudioJob>(); List <MuxStream> arrMuxStreams = new List <MuxStream>(); List <string> intermediateFiles = new List <string>(); FileUtil.ensureDirectoryExists(job.PostprocessingProperties.WorkingDirectory); foreach (OneClickAudioTrack oAudioTrack in job.PostprocessingProperties.AudioTracks) { if (oAudioTrack.ExtractMKVTrack) { audioFiles.Add(oAudioTrack.AudioTrackInfo.TrackID, job.PostprocessingProperties.WorkingDirectory + "\\" + oAudioTrack.AudioTrackInfo.DemuxFileName); arrAudioFilesDelete.Add(job.PostprocessingProperties.WorkingDirectory + "\\" + oAudioTrack.AudioTrackInfo.DemuxFileName); } else if (oAudioTrack.AudioTrackInfo != null) { arrAudioTracks.Add(oAudioTrack.AudioTrackInfo); } if (oAudioTrack.AudioJob != null) { if (job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.NONE && String.IsNullOrEmpty(oAudioTrack.AudioJob.Input)) { oAudioTrack.AudioJob.Input = job.Input; } arrAudioJobs.Add(oAudioTrack.AudioJob); } if (oAudioTrack.DirectMuxAudio != null) { arrMuxStreams.Add(oAudioTrack.DirectMuxAudio); } } if (audioFiles.Count == 0 && job.PostprocessingProperties.IndexType != FileIndexerWindow.IndexType.NONE && !job.PostprocessingProperties.Eac3toDemux) { audioFiles = vUtil.getAllDemuxedAudio(arrAudioTracks, new List <AudioTrackInfo>(), out arrAudioFilesDelete, job.IndexFile, _log); } fillInAudioInformation(arrAudioJobs, arrMuxStreams); if (!String.IsNullOrEmpty(job.PostprocessingProperties.VideoFileToMux)) { _log.LogEvent("Don't encode video: True"); } else { _log.LogEvent("Desired size: " + job.PostprocessingProperties.OutputSize); } _log.LogEvent("Split size: " + job.PostprocessingProperties.Splitting); // chapter file handling if (String.IsNullOrEmpty(job.PostprocessingProperties.ChapterFile)) { job.PostprocessingProperties.ChapterFile = null; } else if (job.PostprocessingProperties.Container == ContainerType.AVI) { _log.LogEvent("Chapter handling disabled because of the AVI target container"); job.PostprocessingProperties.ChapterFile = null; } else if (!File.Exists(job.PostprocessingProperties.ChapterFile)) { if (job.PostprocessingProperties.ChapterFile.StartsWith("<") || job.PostprocessingProperties.ChapterExtracted) { // internal chapter file string strTempFile = job.PostprocessingProperties.ChapterFile; if (Path.GetExtension(job.PostprocessingProperties.VideoInput).ToLower(System.Globalization.CultureInfo.InvariantCulture).Equals(".mkv")) { MediaInfoFile oInfo = new MediaInfoFile(job.PostprocessingProperties.VideoInput, ref _log); if (oInfo.hasMKVChapters()) { job.PostprocessingProperties.ChapterFile = Path.Combine(job.PostprocessingProperties.WorkingDirectory, Path.GetFileNameWithoutExtension(job.IndexFile) + " - Chapter Information.txt"); if (oInfo.extractMKVChapters(job.PostprocessingProperties.ChapterFile)) { intermediateFiles.Add(job.PostprocessingProperties.ChapterFile); job.PostprocessingProperties.ChapterExtracted = true; } else { job.PostprocessingProperties.ChapterFile = strTempFile; } } } else if (File.Exists(job.PostprocessingProperties.IFOInput)) { job.PostprocessingProperties.ChapterFile = VideoUtil.getChaptersFromIFO(job.PostprocessingProperties.IFOInput, false, job.PostprocessingProperties.WorkingDirectory, job.PostprocessingProperties.TitleNumberToProcess); if (!String.IsNullOrEmpty(job.PostprocessingProperties.ChapterFile)) { intermediateFiles.Add(job.PostprocessingProperties.ChapterFile); job.PostprocessingProperties.ChapterExtracted = true; } else { job.PostprocessingProperties.ChapterFile = strTempFile; } } } if (!File.Exists(job.PostprocessingProperties.ChapterFile)) { _log.LogEvent("File not found: " + job.PostprocessingProperties.ChapterFile, ImageType.Error); job.PostprocessingProperties.ChapterFile = null; } } else if (job.PostprocessingProperties.ChapterExtracted) { intermediateFiles.Add(job.PostprocessingProperties.ChapterFile); } string avsFile = String.Empty; VideoStream myVideo = new VideoStream(); VideoCodecSettings videoSettings = job.PostprocessingProperties.VideoSettings; if (String.IsNullOrEmpty(job.PostprocessingProperties.VideoFileToMux)) { //Open the video Dar?dar; avsFile = createAVSFile(job.IndexFile, job.Input, job.PostprocessingProperties.DAR, job.PostprocessingProperties.HorizontalOutputResolution, job.PostprocessingProperties.SignalAR, _log, job.PostprocessingProperties.AvsSettings, job.PostprocessingProperties.AutoDeinterlace, videoSettings, out dar, job.PostprocessingProperties.AutoCrop, job.PostprocessingProperties.KeepInputResolution, job.PostprocessingProperties.UseChaptersMarks); ulong length; double framerate; JobUtil.getInputProperties(out length, out framerate, avsFile); myVideo.Input = avsFile; myVideo.Output = Path.Combine(job.PostprocessingProperties.WorkingDirectory, Path.GetFileNameWithoutExtension(job.Input) + "_Video"); myVideo.NumberOfFrames = length; myVideo.Framerate = (decimal)framerate; myVideo.DAR = dar; myVideo.VideoType = new MuxableType((new VideoEncoderProvider().GetSupportedOutput(videoSettings.EncoderType))[0], videoSettings.Codec); myVideo.Settings = videoSettings; } else { myVideo.Output = job.PostprocessingProperties.VideoFileToMux; myVideo.Settings = videoSettings; MediaInfoFile oInfo = new MediaInfoFile(myVideo.Output, ref _log); videoSettings.VideoName = oInfo.VideoInfo.Track.Name; myVideo.Framerate = (decimal)oInfo.VideoInfo.FPS; } intermediateFiles.Add(avsFile); intermediateFiles.Add(job.IndexFile); intermediateFiles.AddRange(audioFiles.Values); if (!string.IsNullOrEmpty(qpfile)) { intermediateFiles.Add(qpfile); } foreach (string file in arrAudioFilesDelete) { intermediateFiles.Add(file); } if (File.Exists(Path.Combine(Path.GetDirectoryName(job.Input), Path.GetFileNameWithoutExtension(job.Input) + "._log"))) { intermediateFiles.Add(Path.Combine(Path.GetDirectoryName(job.Input), Path.GetFileNameWithoutExtension(job.Input) + "._log")); } foreach (string file in job.PostprocessingProperties.FilesToDelete) { intermediateFiles.Add(file); } if (!string.IsNullOrEmpty(avsFile) || !String.IsNullOrEmpty(job.PostprocessingProperties.VideoFileToMux)) { MuxStream[] subtitles; if (job.PostprocessingProperties.SubtitleTracks.Count == 0) { //Create empty subtitles for muxing subtitles = new MuxStream[0]; } else { subtitles = new MuxStream[job.PostprocessingProperties.SubtitleTracks.Count]; int i = 0; foreach (OneClickStream oTrack in job.PostprocessingProperties.SubtitleTracks) { if (oTrack.TrackInfo.IsMKVContainer()) { //demuxed MKV string trackFile = Path.GetDirectoryName(job.IndexFile) + "\\" + oTrack.TrackInfo.DemuxFileName; if (File.Exists(trackFile)) { intermediateFiles.Add(trackFile); if (Path.GetExtension(trackFile).ToLower(System.Globalization.CultureInfo.InvariantCulture).Equals(".idx")) { intermediateFiles.Add(FileUtil.GetPathWithoutExtension(trackFile) + ".sub"); } subtitles[i] = new MuxStream(trackFile, oTrack.Language, oTrack.Name, oTrack.Delay, oTrack.DefaultStream, oTrack.ForcedStream, null); } else { _log.LogEvent("File not found: " + trackFile, ImageType.Error); } } else { subtitles[i] = new MuxStream(oTrack.DemuxFilePath, oTrack.Language, oTrack.Name, oTrack.Delay, oTrack.DefaultStream, oTrack.ForcedStream, null); } i++; } } JobChain c = vUtil.GenerateJobSeries(myVideo, job.PostprocessingProperties.FinalOutput, arrAudioJobs.ToArray(), subtitles, job.PostprocessingProperties.ChapterFile, job.PostprocessingProperties.OutputSize, job.PostprocessingProperties.Splitting, job.PostprocessingProperties.Container, job.PostprocessingProperties.PrerenderJob, arrMuxStreams.ToArray(), _log, job.PostprocessingProperties.DeviceOutputType, null, job.PostprocessingProperties.VideoFileToMux, job.PostprocessingProperties.AudioTracks.ToArray()); if (c == null) { _log.Warn("Job creation aborted"); return; } c = CleanupJob.AddAfter(c, intermediateFiles, job.PostprocessingProperties.FinalOutput); mainForm.Jobs.addJobsWithDependencies(c); // batch processing other input files if necessary if (job.PostprocessingProperties.FilesToProcess.Count > 0) { OneClickWindow ocw = new OneClickWindow(mainForm); ocw.setBatchProcessing(job.PostprocessingProperties.FilesToProcess, job.PostprocessingProperties.OneClickSetting); } } } catch (Exception e) { t.Abort(); if (e is ThreadAbortException) { _log.LogEvent("Aborting..."); su.WasAborted = true; su.IsComplete = true; raiseEvent(); } else { _log.LogValue("An error occurred", e, ImageType.Error); su.HasError = true; su.IsComplete = true; raiseEvent(); } return; } t.Abort(); su.IsComplete = true; raiseEvent(); }
public string GetChapterInputFilter() { return(VideoUtil.GenerateCombinedFilter(supportedChapterTypes.ToArray())); }
internal OneClickPostProcessing(MainForm mf) { mainForm = mf; this.vUtil = new VideoUtil(mainForm); }
public string GetSubtitleInputFilter() { return(VideoUtil.GenerateCombinedFilter(supportedSubtitleTypes.ToArray())); }