/// <summary> /// handles the go button for automated encoding /// checks if we're in automated 2 pass video mode and that we're not using the snow codec /// then the video and audio configuration is checked, and if it checks out /// the audio job, video jobs and muxing job are generated, audio and video job are linked /// and encoding is started /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private void queueButton_Click(object sender, System.EventArgs e) { if (!string.IsNullOrEmpty(this.muxedOutput.Filename)) { FileSize?desiredSize = targetSize.Value; FileSize?splitSize = splitting.Value; if (FileSizeRadio.Checked) { log.LogValue("Desired Size ", desiredSize); } else if (averageBitrateRadio.Checked) { log.LogValue("Projected Bitrate ", string.Format("{0}kbps", projectedBitrateKBits.Text)); } log.LogValue("Split Size ", splitSize); MuxStream[] audio; AudioJob[] aStreams; AudioEncoderType[] muxTypes; separateEncodableAndMuxableAudioStreams(out aStreams, out audio, out muxTypes); MuxStream[] subtitles = new MuxStream[0]; string chapters = ""; string videoInput = vInfo.VideoInput; string videoOutput = vInfo.VideoOutput; string muxedOutput = this.muxedOutput.Filename; ContainerType cot = this.container.SelectedItem as ContainerType; if (addSubsNChapters.Checked) { AdaptiveMuxWindow amw = new AdaptiveMuxWindow(mainForm); amw.setMinimizedMode(videoOutput, videoStream.Settings.EncoderType, jobUtil.getFramerate(videoInput), audio, muxTypes, muxedOutput, splitSize, cot); if (amw.ShowDialog() == DialogResult.OK) { amw.getAdditionalStreams(out audio, out subtitles, out chapters, out muxedOutput, out cot); } else // user aborted, abort the whole process { return; } } removeStreamsToBeEncoded(ref audio, aStreams); mainForm.Jobs.addJobsWithDependencies(vUtil.GenerateJobSeries(this.videoStream, muxedOutput, aStreams, subtitles, chapters, desiredSize, splitSize, cot, this.prerender, audio, log)); this.Close(); } }
/// <summary> /// handles the go button for automated encoding /// checks if we're in automated 2 pass video mode /// then the video and audio configuration is checked, and if it checks out /// the audio job, video jobs and muxing job are generated, audio and video job are linked /// and encoding is started /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private void queueButton_Click(object sender, System.EventArgs e) { if (String.IsNullOrEmpty(this.muxedOutput.Filename)) { return; } FileSize?desiredSize = targetSize.Value; FileSize?splitSize = splitting.Value; LogItem log = new LogItem(this.muxedOutput.Filename); MainForm.Instance.AutoEncodeLog.Add(log); if (FileSizeRadio.Checked) { log.LogValue("Desired Size", desiredSize); } else if (averageBitrateRadio.Checked) { log.LogValue("Projected Bitrate", string.Format("{0}kbps", projectedBitrateKBits.Text)); } else { log.LogEvent("No Target Size (use profile settings)"); } log.LogValue("Split Size", splitSize); MuxStream[] audio; AudioJob[] aStreams; AudioEncoderType[] muxTypes; separateEncodableAndMuxableAudioStreams(out aStreams, out audio, out muxTypes); MuxStream[] subtitles = new MuxStream[0]; ChapterInfo chapters = new ChapterInfo(); string videoInput = vInfo.VideoInput; string videoOutput = vInfo.VideoOutput; string muxedOutput = this.muxedOutput.Filename; ContainerType cot = this.container.SelectedItem as ContainerType; // determine audio language foreach (MuxStream stream in audio) { string strLanguage = LanguageSelectionContainer.GetLanguageFromFileName(Path.GetFileNameWithoutExtension(stream.path)); if (!String.IsNullOrEmpty(strLanguage)) { stream.language = strLanguage; } } if (addSubsNChapters.Checked) { AdaptiveMuxWindow amw = new AdaptiveMuxWindow(); amw.setMinimizedMode(videoOutput, "", videoStream.Settings.EncoderType, JobUtil.getFramerate(videoInput), audio, muxTypes, muxedOutput, splitSize, cot); if (amw.ShowDialog() == DialogResult.OK) { amw.getAdditionalStreams(out audio, out subtitles, out chapters, out muxedOutput, out cot); } else // user aborted, abort the whole process { return; } } removeStreamsToBeEncoded(ref audio, aStreams); MainForm.Instance.Jobs.AddJobsWithDependencies(VideoUtil.GenerateJobSeries(videoStream, muxedOutput, aStreams, subtitles, new List <string>(), String.Empty, chapters, desiredSize, splitSize, cot, prerender, audio, log, device.Text, vInfo.Zones, null, null, false), true); this.Close(); }
private void mnuToolsAdaptiveMuxer_Click(object sender, EventArgs e) { AdaptiveMuxWindow amw = new AdaptiveMuxWindow(this); if (amw.ShowDialog() == DialogResult.OK) Jobs.addJobsWithDependencies(amw.Jobs); }
/// <summary> /// handles the go button for automated encoding /// checks if we're in automated 2 pass video mode and that we're not using the snow codec /// then the video and audio configuration is checked, and if it checks out /// the audio job, video jobs and muxing job are generated, audio and video job are linked /// and encoding is started /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private void queueButton_Click(object sender, System.EventArgs e) { if (!this.muxedOutput.Text.Equals("")) { long desiredSizeBytes; if (!noTargetRadio.Checked) { try { desiredSizeBytes = Int64.Parse(this.muxedSizeMBs.Text) * 1048576L; } catch (Exception f) { MessageBox.Show("I'm not sure how you want me to reach a target size of <empty>.\r\nWhere I'm from that number doesn't exist.\r\n", "Target size undefined", MessageBoxButtons.OK); Console.Write(f.Message); return; } } else { desiredSizeBytes = -1; } int splitSize = 0; if (splitOutput.Checked) { splitSize = Int32.Parse(this.splitSize.Text); } if (desiredSizeBytes > 0) { logBuilder.Append("Desired size of this automated encoding series: " + desiredSizeBytes + " bytes, split size: " + splitSize + "MB\r\n"); } else { logBuilder.Append("No desired size of this encode. The profile settings will be used"); } SubStream[] audio; AudioStream[] aStreams; MuxableType[] muxTypes; separateEncodableAndMuxableAudioStreams(out aStreams, out audio, out muxTypes); SubStream[] subtitles = new SubStream[0]; string chapters = ""; string videoInput = mainForm.Video.Info.VideoInput; string videoOutput = mainForm.Video.Info.VideoOutput; string muxedOutput = this.muxedOutput.Text; ContainerType cot = this.container.SelectedItem as ContainerType; if (addSubsNChapters.Checked) { AdaptiveMuxWindow amw = new AdaptiveMuxWindow(mainForm); amw.setMinimizedMode(videoOutput, videoStream.VideoType, jobUtil.getFramerate(videoInput), audio, muxTypes, muxedOutput, splitSize, cot); if (amw.ShowDialog() == DialogResult.OK) { amw.getAdditionalStreams(out audio, out subtitles, out chapters, out muxedOutput, out cot); } else // user aborted, abort the whole process { return; } } removeStreamsToBeEncoded(ref audio, aStreams); this.vUtil.GenerateJobSeries(this.videoStream, muxedOutput, aStreams, subtitles, chapters, desiredSizeBytes, splitSize, cot, this.prerender, audio, new List <string>()); this.Close(); } }
/// <summary> /// handles the go button for automated encoding /// checks if we're in automated 2 pass video mode /// then the video and audio configuration is checked, and if it checks out /// the audio job, video jobs and muxing job are generated, audio and video job are linked /// and encoding is started /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private void queueButton_Click(object sender, System.EventArgs e) { if (!string.IsNullOrEmpty(this.muxedOutput.Filename)) { FileSize?desiredSize = targetSize.Value; FileSize?splitSize = splitting.Value; if (FileSizeRadio.Checked) { log.LogValue("Desired Size ", desiredSize); } else if (averageBitrateRadio.Checked) { log.LogValue("Projected Bitrate ", string.Format("{0}kbps", projectedBitrateKBits.Text)); } log.LogValue("Split Size ", splitSize); MuxStream[] audio; AudioJob[] aStreams; AudioEncoderType[] muxTypes; separateEncodableAndMuxableAudioStreams(out aStreams, out audio, out muxTypes); MuxStream[] subtitles = new MuxStream[0]; string chapters = ""; string videoInput = vInfo.VideoInput; string videoOutput = vInfo.VideoOutput; string muxedOutput = this.muxedOutput.Filename; ContainerType cot = this.container.SelectedItem as ContainerType; // determine audio language foreach (MuxStream stream in audio) { foreach (KeyValuePair <string, string> strLanguage in LanguageSelectionContainer.Languages) { if (Path.GetFileNameWithoutExtension(stream.path).ToLower(System.Globalization.CultureInfo.InvariantCulture).Contains(strLanguage.Key.ToLower(System.Globalization.CultureInfo.InvariantCulture))) { stream.language = strLanguage.Key; break; } } } if (addSubsNChapters.Checked) { AdaptiveMuxWindow amw = new AdaptiveMuxWindow(mainForm); amw.setMinimizedMode(videoOutput, "", videoStream.Settings.EncoderType, jobUtil.getFramerate(videoInput), audio, muxTypes, muxedOutput, splitSize, cot); if (amw.ShowDialog() == DialogResult.OK) { amw.getAdditionalStreams(out audio, out subtitles, out chapters, out muxedOutput, out cot); } else // user aborted, abort the whole process { return; } } removeStreamsToBeEncoded(ref audio, aStreams); mainForm.Jobs.addJobsWithDependencies(vUtil.GenerateJobSeries(this.videoStream, muxedOutput, aStreams, subtitles, chapters, desiredSize, splitSize, cot, this.prerender, audio, log, this.device.Text, vInfo.Zones, null, null)); this.Close(); } }
/// <summary> /// handles the go button for automated encoding /// checks if we're in automated 2 pass video mode and that we're not using the snow codec /// then the video and audio configuration is checked, and if it checks out /// the audio job, video jobs and muxing job are generated, audio and video job are linked /// and encoding is started /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private void queueButton_Click(object sender, System.EventArgs e) { if (!string.IsNullOrEmpty(this.muxedOutput.Filename)) { FileSize? desiredSize = targetSize.Value; FileSize? splitSize = splitting.Value; if (FileSizeRadio.Checked) log.LogValue("Desired Size ", desiredSize); else if (averageBitrateRadio.Checked) log.LogValue("Projected Bitrate ", string.Format("{0}kbps", projectedBitrateKBits.Text)); log.LogValue("Split Size ", splitSize); MuxStream[] audio; AudioJob[] aStreams; AudioEncoderType[] muxTypes; separateEncodableAndMuxableAudioStreams(out aStreams, out audio, out muxTypes); MuxStream[] subtitles = new MuxStream[0]; string chapters = ""; string videoInput = vInfo.VideoInput; string videoOutput = vInfo.VideoOutput; string muxedOutput = this.muxedOutput.Filename; ContainerType cot = this.container.SelectedItem as ContainerType; if (addSubsNChapters.Checked) { AdaptiveMuxWindow amw = new AdaptiveMuxWindow(mainForm); amw.setMinimizedMode(videoOutput, videoStream.Settings.EncoderType, jobUtil.getFramerate(videoInput), audio, muxTypes, muxedOutput, splitSize, cot); if (amw.ShowDialog() == DialogResult.OK) amw.getAdditionalStreams(out audio, out subtitles, out chapters, out muxedOutput, out cot); else // user aborted, abort the whole process return; } removeStreamsToBeEncoded(ref audio, aStreams); mainForm.Jobs.addJobsWithDependencies(vUtil.GenerateJobSeries(this.videoStream, muxedOutput, aStreams, subtitles, chapters, desiredSize, splitSize, cot, this.prerender, audio, log, this.device.Text)); this.Close(); } }