/// <summary> /// sets the GUI to a minimal mode allowing to configure audio track languages, configure subtitles, and chapters /// the rest of the options are deactivated /// </summary> /// <param name="videoInput">the video input</param> /// <param name="framerate">the framerate of the video input</param> /// <param name="audioStreams">the audio streams whose languages have to be assigned</param> /// <param name="output">the output file</param> /// <param name="splitSize">the output split size</param> public void setMinimizedMode(string videoInput, MuxableType videoType, double framerate, SubStream[] audioStreams, MuxableType[] audioTypes, string output, int splitSize, ContainerType cft) { minimizedMode = true; knownVideoType = videoType; knownAudioTypes = audioTypes; videoGroupbox.Enabled = false; this.videoInput.Text = videoInput; int fpsIndex = muxFPS.Items.IndexOf(framerate); if (fpsIndex != -1) { muxFPS.SelectedIndex = fpsIndex; } if (audioStreams.Length == 1) // 1 stream predefined { preconfigured = new bool[] { true, false }; this.audioStreams[0] = audioStreams[0]; audioInputOpenButton.Enabled = false; removeAudioTrackButton.Enabled = false; this.audioDelay.Enabled = false; audioInput.Text = audioStreams[0].path; } else if (audioStreams.Length == 2) // both streams are defined, disable audio opening facilities { preconfigured = new bool[] { true, true }; this.audioStreams = audioStreams; removeAudioTrackButton.Enabled = false; audioInput.Text = audioStreams[0].path; audioInputOpenButton.Enabled = false; removeAudioTrackButton.Enabled = false; this.audioDelay.Enabled = false; } else // no audio tracks predefined { preconfigured = new bool[] { false, false }; } muxedOutput.Text = output; this.splitSize.Text = splitSize.ToString(); if (splitSize > 0) { enableSplit.Checked = true; } this.muxButton.Text = "Go"; updatePossibleContainers(); if (this.containerFormat.Items.Contains(cft)) { containerFormat.SelectedItem = cft; } checkIO(); }
/// <summary> /// separates encodable from muxable audio streams /// in addition to returning the two types separately an array of SubStreams is returned /// which is plugged into the muxer.. it contains the names of all the audio files /// that have to be muxed /// </summary> /// <param name="encodable">encodeable audio streams</param> /// <param name="muxable">muxable Audio Streams with the path filled out and a blank language</param> private void separateEncodableAndMuxableAudioStreams(out AudioStream[] encodable, out SubStream[] muxable, out MuxableType[] muxTypes) { encodable = this.getConfiguredAudioJobs(); // discards improperly configured ones // the rest of the job is all encodeable muxable = new SubStream[encodable.Length]; muxTypes = new MuxableType[encodable.Length]; int j = 0; foreach (AudioStream stream in encodable) { muxable[j].path = stream.output; muxable[j].language = ""; muxTypes[j] = new MuxableType(stream.Type, stream.settings.Codec); j++; } }
/// <summary> /// Finds the best mux path if some of the inputs have not yet been /// produced (they are yet to be encoded). When this is the case, /// there is more flexibility, as some encoders can produce outputs /// in multiple formats. This function suggests the output formats /// they should produce as well as the mux path. /// </summary> /// <param name="undecidedInputs">List of encoder types for the inputs which have not yet been encoded</param> /// <param name="decidedInputs">List of file types for the inputs which are already encoded</param> /// <param name="containerType">Target container type</param> /// <returns></returns> private MuxPath findBestMuxPathAndConfig(List <IEncoderType> undecidedInputs, List <MuxableType> decidedInputs, ContainerType containerType) { if (undecidedInputs.Count == 0) { return(getShortestMuxPath(new MuxPath(decidedInputs, containerType), decidedInputs, containerType)); } else { List <MuxPath> allPaths = new List <MuxPath>(); IEncoderType undecidedInput = undecidedInputs[0]; undecidedInputs.RemoveAt(0); if (undecidedInput is VideoEncoderType) { VideoType[] allTypes = vProvider.GetSupportedOutput((VideoEncoderType)undecidedInput); foreach (VideoType v in allTypes) { MuxableType input = new MuxableType(v, undecidedInput.Codec); decidedInputs.Add(input); MuxPath path = findBestMuxPathAndConfig(undecidedInputs, decidedInputs, containerType); if (path != null) { allPaths.Add(path); } decidedInputs.Remove(input); } } if (undecidedInput is AudioEncoderType) { AudioType[] allTypes = aProvider.GetSupportedOutput((AudioEncoderType)undecidedInput); foreach (AudioType a in allTypes) { MuxableType input = new MuxableType(a, undecidedInput.Codec); decidedInputs.Add(input); MuxPath path = findBestMuxPathAndConfig(undecidedInputs, decidedInputs, containerType); if (path != null) { allPaths.Add(path); } decidedInputs.Remove(input); } } undecidedInputs.Insert(0, undecidedInput); return(comparer.GetBestMuxPath(allPaths)); } }
/// <summary> /// Step in the recursive stage which chooses, of all the MuxableTypes which /// *could* be handled, whether they should be. That means, it generates a /// mux path which involves muxing in each of the 2^n combinations of inputs /// at this stage. /// /// I'm not sure if this step is actually necessary. The only possible /// use I can think of is if you have a specific muxpath rule which says /// that only one file can be muxed in at a time, or only some specific /// combination of files can be muxed in at a time. /// -- berrinam /// </summary> /// <param name="currentMuxPath"></param> /// <param name="muxer"></param> /// <param name="decidedHandledTypes"></param> /// <param name="undecidedPossibleHandledTypes"></param> /// <param name="unhandledInputTypes"></param> /// <param name="desiredContainerType"></param> /// <returns></returns> private MuxPath getShortestMuxPath(MuxPath currentMuxPath, IMuxing muxer, List <MuxableType> decidedHandledTypes, List <MuxableType> undecidedPossibleHandledTypes, List <MuxableType> unhandledInputTypes, ContainerType desiredContainerType) { if (undecidedPossibleHandledTypes.Count == 0) { MuxPathLeg mpl = new MuxPathLeg(); mpl.muxerInterface = muxer; mpl.handledInputTypes = new List <MuxableType>(decidedHandledTypes); mpl.unhandledInputTypes = new List <MuxableType>(unhandledInputTypes); MuxPath newMuxPath = currentMuxPath.Clone(); newMuxPath.Add(mpl); if (decidedHandledTypes.Count == 0) { return(null); } return(getShortestMuxPath(newMuxPath, unhandledInputTypes, desiredContainerType)); } else { List <MuxPath> allMuxPaths = new List <MuxPath>(); MuxableType type = undecidedPossibleHandledTypes[0]; undecidedPossibleHandledTypes.RemoveAt(0); decidedHandledTypes.Add(type); MuxPath shortestMuxPath = getShortestMuxPath(currentMuxPath, muxer, decidedHandledTypes, undecidedPossibleHandledTypes, unhandledInputTypes, desiredContainerType); if (shortestMuxPath != null) { allMuxPaths.Add(shortestMuxPath); } decidedHandledTypes.Remove(type); unhandledInputTypes.Add(type); shortestMuxPath = getShortestMuxPath(currentMuxPath, muxer, decidedHandledTypes, undecidedPossibleHandledTypes, unhandledInputTypes, desiredContainerType); if (shortestMuxPath != null) { allMuxPaths.Add(shortestMuxPath); } unhandledInputTypes.Remove(type); undecidedPossibleHandledTypes.Add(type); return(comparer.GetBestMuxPath(allMuxPaths)); } }
private void getTypes(out AudioEncoderType[] aCodec, out MuxableType[] audioTypes, out MuxableType[] subtitleTypes) { List <MuxableType> audioTypesList = new List <MuxableType>(); List <MuxableType> subTypesList = new List <MuxableType>(); List <AudioEncoderType> audioCodecList = new List <AudioEncoderType>(); int counter = 0; foreach (MuxStreamControl c in audioTracks) { if (minimizedMode && knownAudioTypes.Length > counter) { audioCodecList.Add(knownAudioTypes[counter]); } else if (c.Stream != null) { MuxableType audioType = VideoUtil.guessAudioMuxableType(c.Stream.path, true); if (audioType != null) { audioTypesList.Add(audioType); } } counter++; } foreach (MuxStreamControl c in subtitleTracks) { if (c.Stream == null) { continue; } SubtitleType subtitleType = VideoUtil.guessSubtitleType(c.Stream.path); if (subtitleType != null) { subTypesList.Add(new MuxableType(subtitleType, null)); } } audioTypes = audioTypesList.ToArray(); subtitleTypes = subTypesList.ToArray(); aCodec = audioCodecList.ToArray(); }
/// <summary> /// Returns the number of the type if it is supported, otherwise -1 /// </summary> /// <param name="type"></param> /// <returns></returns> private int getSupportedType(MuxableType type) { if (type.outputType is VideoType && supportedVideoTypes.Contains((VideoType)type.outputType) && (supportsAnyInputtableVideoCodec || supportedVideoCodecs.Contains((VideoCodec)type.codec))) { return(0); } if (type.outputType is AudioType && supportedAudioTypes.Contains((AudioType)type.outputType) && (supportsAnyInputtableAudioCodec || supportedAudioCodecs.Contains((AudioCodec)type.codec))) { return(1); } if (type.outputType is SubtitleType && supportedSubtitleTypes.Contains((SubtitleType)type.outputType)) { return(2); } if (type.outputType is ChapterType && supportedChapterTypes.Contains((ChapterType)type.outputType)) { return(3); } return(-1); }
private void getTypes(out AudioEncoderType[] aCodec, out MuxableType[] audioTypes, out MuxableType[] subtitleTypes) { List <MuxableType> audioTypesList = new List <MuxableType>(); List <MuxableType> subTypesList = new List <MuxableType>(); List <AudioEncoderType> audioCodecList = new List <AudioEncoderType>(); int counter = 0; foreach (SubStream stream in audioStreams) { if (minimizedMode && knownAudioTypes.Length > counter) { audioCodecList.Add((AudioEncoderType)knownAudioTypes[counter].codec); } else { MuxableType audioType = VideoUtil.guessAudioMuxableType(stream.path, true); if (audioType != null) { audioTypesList.Add(audioType); } } counter++; } foreach (SubStream stream in subtitleStreams) { SubtitleType subtitleType = VideoUtil.guessSubtitleType(stream.path); if (subtitleType != null) { subTypesList.Add(new MuxableType(subtitleType, null)); } } audioTypes = audioTypesList.ToArray(); subtitleTypes = subTypesList.ToArray(); aCodec = audioCodecList.ToArray(); }
public JobChain GenerateJobSeries(VideoStream video, string muxedOutput, AudioJob[] audioStreams, MuxStream[] subtitles, string chapters, FileSize?desiredSize, FileSize?splitSize, ContainerType container, bool prerender, MuxStream[] muxOnlyAudio) { StringBuilder logBuilder = new StringBuilder(); if (desiredSize.HasValue) { logBuilder.Append("Generating jobs. Desired size: " + desiredSize.Value.ToString() + "\r\n"); if (video.Settings.EncodingMode != 4 && video.Settings.EncodingMode != 8) // no automated 2/3 pass { if (this.mainForm.Settings.NbPasses == 2) { video.Settings.EncodingMode = 4; // automated 2 pass } else if (video.Settings.MaxNumberOfPasses == 3) { video.Settings.EncodingMode = 8; } } } else { logBuilder.Append("Generating jobs. No desired size.\r\n"); } fixFileNameExtensions(video, audioStreams, container); string videoOutput = video.Output; logBuilder.Append(eliminatedDuplicateFilenames(ref videoOutput, ref muxedOutput, audioStreams)); video.Output = videoOutput; JobChain vjobs = jobUtil.prepareVideoJob(video.Input, video.Output, video.Settings, video.DAR, prerender, true); if (vjobs == null) { return(null); } /* Here, we guess the types of the files based on extension. * This is guaranteed to work with MeGUI-encoded files, because * the extension will always be recognised. For non-MeGUI files, * we can only ever hope.*/ List <MuxStream> allAudioToMux = new List <MuxStream>(); List <MuxableType> allInputAudioTypes = new List <MuxableType>(); foreach (MuxStream muxStream in muxOnlyAudio) { if (VideoUtil.guessAudioMuxableType(muxStream.path, true) != null) { allInputAudioTypes.Add(VideoUtil.guessAudioMuxableType(muxStream.path, true)); allAudioToMux.Add(muxStream); } } foreach (AudioJob stream in audioStreams) { allAudioToMux.Add(stream.ToMuxStream()); allInputAudioTypes.Add(stream.ToMuxableType()); } List <MuxableType> allInputSubtitleTypes = new List <MuxableType>(); foreach (MuxStream muxStream in subtitles) { if (VideoUtil.guessSubtitleType(muxStream.path) != null) { allInputSubtitleTypes.Add(new MuxableType(VideoUtil.guessSubtitleType(muxStream.path), null)); } } MuxableType chapterInputType = null; if (!String.IsNullOrEmpty(chapters)) { ChapterType type = VideoUtil.guessChapterType(chapters); if (type != null) { chapterInputType = new MuxableType(type, null); } } JobChain muxJobs = this.jobUtil.GenerateMuxJobs(video, video.Framerate, allAudioToMux.ToArray(), allInputAudioTypes.ToArray(), subtitles, allInputSubtitleTypes.ToArray(), chapters, chapterInputType, container, muxedOutput, splitSize, true); /* foreach (Job mJob in muxJobs) * foreach (Job job in jobs) * mJob.AddDependency(job);*/ /* * foreach (VideoJob job in vjobs) * { * jobs.Add(job); * } * foreach (MuxJob job in muxJobs) * { * jobs.Add(job); * } */ if (desiredSize.HasValue) { /* if (encodedAudioPresent) // no audio encoding, we can calculate the video bitrate directly * { * logBuilder.Append("No audio encoding. Calculating desired video bitrate directly.\r\n"); * List<AudioStream> calculationAudioStreams = new List<AudioStream>(); * foreach (SubStream stream in muxOnlyAudio) * { * FileInfo fi = new FileInfo(stream.path); * AudioStream newStream = new AudioStream(); * newStream.SizeBytes = fi.Length; * newStream.Type = guessAudioType(stream.path); * newStream.BitrateMode = BitrateManagementMode.VBR; * calculationAudioStreams.Add(newStream); * logBuilder.Append("Encoded audio file is present: " + stream.path + * " It has a size of " + fi.Length + " bytes. \r\n"); * } * * long videoSizeKB; * bool useBframes = false; * if (video.Settings.NbBframes > 0) * useBframes = true; * * bitrateKBits = calc.CalculateBitrateKBits(video.Settings.Codec, useBframes, container, calculationAudioStreams.ToArray(), * desiredSizeBytes, video.NumberOfFrames, video.Framerate, out videoSizeKB); * desiredSizeBytes = (long)videoSizeKB * 1024L; // convert kb back to bytes * logBuilder.Append("Setting video bitrate for the video jobs to " + bitrateKBits + " kbit/s\r\n"); * foreach (VideoJob vJob in vjobs) * { * jobUtil.updateVideoBitrate(vJob, bitrateKBits); * } * }*/ BitrateCalculationInfo b = new BitrateCalculationInfo(); List <string> audiofiles = new List <string>(); foreach (MuxStream s in allAudioToMux) { audiofiles.Add(s.path); } b.AudioFiles = audiofiles; b.Container = container; b.VideoJobs = new List <TaggedJob>(vjobs.Jobs); b.DesiredSize = desiredSize.Value; ((VideoJob)vjobs.Jobs[0].Job).BitrateCalculationInfo = b; } mainForm.addToLog(logBuilder.ToString()); return (new SequentialChain( new ParallelChain((Job[])audioStreams), new SequentialChain(vjobs), new SequentialChain(muxJobs))); }
public JobChain GenerateMuxJobs(VideoStream video, decimal?framerate, MuxStream[] audioStreamsArray, MuxableType[] audioTypes, MuxStream[] subtitleStreamsArray, MuxableType[] subTypes, string chapterFile, MuxableType chapterInputType, ContainerType container, string output, FileSize?splitSize, List <string> inputsToDelete, string deviceType, MuxableType deviceOutputType, bool alwaysMuxOutput) { Debug.Assert(splitSize == null || splitSize.Value != FileSize.Empty); MuxProvider prov = mainForm.MuxProvider; List <MuxableType> allTypes = new List <MuxableType>(); List <MuxableType> tempTypes = new List <MuxableType>(); List <MuxableType> duplicateTypes = new List <MuxableType>(); tempTypes.AddRange(audioTypes); tempTypes.AddRange(subTypes); allTypes.Add(video.VideoType); // remove duplicate entries to speed up the process foreach (MuxableType oType in tempTypes) { bool bFound = false; foreach (MuxableType oAllType in allTypes) { if (oType.outputType.ID.Equals(oAllType.outputType.ID)) { bFound = true; break; } } if (!bFound) { allTypes.Add(oType); } else { duplicateTypes.Add(oType); } } if (chapterInputType != null) { allTypes.Add(chapterInputType); } if (deviceOutputType != null) { allTypes.Add(deviceOutputType); } // get mux path MuxPath muxPath = prov.GetMuxPath(container, alwaysMuxOutput || splitSize.HasValue, allTypes.ToArray()); // add duplicate entries back into the mux path muxPath.InitialInputTypes.AddRange(duplicateTypes); while (duplicateTypes.Count > 0) { int iPath = 0; for (int i = 0; i < muxPath.Length; i++) { foreach (MuxableType oType in muxPath[i].handledInputTypes) { if (oType.outputType.ID.Equals(duplicateTypes[0].outputType.ID)) { iPath = i; } } } muxPath[iPath].handledInputTypes.Add(duplicateTypes[0]); duplicateTypes.RemoveAt(0); } List <MuxJob> jobs = new List <MuxJob>(); List <MuxStream> subtitleStreams = new List <MuxStream>(subtitleStreamsArray); List <MuxStream> audioStreams = new List <MuxStream>(audioStreamsArray); int index = 0; int tempNumber = 1; string previousOutput = null; foreach (MuxPathLeg mpl in muxPath) { List <string> filesToDeleteThisJob = new List <string>(); MuxJob mjob = new MuxJob(); if (previousOutput != null) { mjob.Settings.MuxedInput = previousOutput; filesToDeleteThisJob.Add(previousOutput); } if (video.Settings != null) { mjob.NbOfBFrames = video.Settings.NbBframes; mjob.Codec = video.Settings.Codec.ToString(); mjob.Settings.VideoName = video.Settings.VideoName; } mjob.NbOfFrames = video.NumberOfFrames; string fpsFormated = String.Format("{0:##.###}", framerate); // this formating is required for mkvmerge at least to avoid fps rounding error mjob.Settings.Framerate = Convert.ToDecimal(fpsFormated); string tempOutputName = Path.Combine(Path.GetDirectoryName(output), Path.GetFileNameWithoutExtension(output) + tempNumber + "."); tempNumber++; foreach (MuxableType o in mpl.handledInputTypes) { if (o.outputType is VideoType) { mjob.Settings.VideoInput = video.Output; if (inputsToDelete.Contains(video.Output)) { filesToDeleteThisJob.Add(video.Output); } mjob.Settings.DAR = video.DAR; } else if (o.outputType is AudioType) { MuxStream stream = audioStreams.Find(delegate(MuxStream m) { return(VideoUtil.guessAudioType(m.path) == o.outputType); }); if (stream != null) { mjob.Settings.AudioStreams.Add(stream); audioStreams.Remove(stream); if (inputsToDelete.Contains(stream.path)) { filesToDeleteThisJob.Add(stream.path); } } } else if (o.outputType is SubtitleType) { MuxStream stream = subtitleStreams.Find(delegate(MuxStream m) { return(VideoUtil.guessSubtitleType(m.path) == o.outputType); }); if (stream != null) { mjob.Settings.SubtitleStreams.Add(stream); subtitleStreams.Remove(stream); if (inputsToDelete.Contains(stream.path)) { filesToDeleteThisJob.Add(stream.path); } } } else if (o.outputType is ChapterType) { if ((VideoUtil.guessChapterType(chapterFile) == o.outputType)) { mjob.Settings.ChapterFile = chapterFile; } if (inputsToDelete.Contains(chapterFile)) { filesToDeleteThisJob.Add(chapterFile); } } else if (o.outputType is DeviceType) { if ((VideoUtil.guessDeviceType(deviceType) == o.outputType)) { mjob.Settings.DeviceType = deviceType; } } } foreach (MuxStream s in mjob.Settings.AudioStreams) { audioStreams.Remove(s); } foreach (MuxStream s in mjob.Settings.SubtitleStreams) { subtitleStreams.Remove(s); } mjob.FilesToDelete.AddRange(filesToDeleteThisJob); if (index == muxPath.Length - 1) { mjob.Settings.MuxedOutput = output; mjob.Settings.SplitSize = splitSize; mjob.Settings.DAR = video.DAR; mjob.ContainerType = container; } else { ContainerType cot = mpl.muxerInterface.GetContainersInCommon(muxPath[index + 1].muxerInterface)[0]; mjob.Settings.MuxedOutput = tempOutputName + cot.Extension; mjob.ContainerType = cot; } previousOutput = mjob.Settings.MuxedOutput; index++; jobs.Add(mjob); if (string.IsNullOrEmpty(mjob.Settings.VideoInput)) { mjob.Input = mjob.Settings.MuxedInput; } else { mjob.Input = mjob.Settings.VideoInput; } mjob.Output = mjob.Settings.MuxedOutput; mjob.MuxType = mpl.muxerInterface.MuxerType; } return(new SequentialChain(jobs.ToArray())); }
public JobChain GenerateMuxJobs(VideoStream video, decimal?framerate, MuxStream[] audioStreamsArray, MuxableType[] audioTypes, MuxStream[] subtitleStreamsArray, MuxableType[] subTypes, string chapterFile, MuxableType chapterInputType, ContainerType container, string output, FileSize?splitSize, List <string> inputsToDelete) { Debug.Assert(splitSize == null || splitSize.Value != FileSize.Empty); MuxProvider prov = mainForm.MuxProvider; List <MuxableType> allTypes = new List <MuxableType>(); allTypes.Add(video.VideoType); allTypes.AddRange(audioTypes); allTypes.AddRange(subTypes); if (chapterInputType != null) { allTypes.Add(chapterInputType); } MuxPath muxPath = prov.GetMuxPath(container, allTypes.ToArray()); List <MuxJob> jobs = new List <MuxJob>(); List <MuxStream> subtitleStreams = new List <MuxStream>(subtitleStreamsArray); List <MuxStream> audioStreams = new List <MuxStream>(audioStreamsArray); int index = 0; int tempNumber = 1; string previousOutput = null; foreach (MuxPathLeg mpl in muxPath) { List <string> filesToDeleteThisJob = new List <string>(); MuxJob mjob = new MuxJob(); if (previousOutput != null) { mjob.Settings.MuxedInput = previousOutput; filesToDeleteThisJob.Add(previousOutput); } mjob.NbOfFrames = video.NumberOfFrames; mjob.NbOfBFrames = video.Settings.NbBframes; mjob.Codec = video.Settings.Codec.ToString(); mjob.Settings.Framerate = framerate; string tempOutputName = Path.Combine(Path.GetDirectoryName(output), Path.GetFileNameWithoutExtension(output) + tempNumber + "."); tempNumber++; foreach (MuxableType o in mpl.handledInputTypes) { if (o.outputType is VideoType) { mjob.Settings.VideoInput = video.Output; if (inputsToDelete.Contains(video.Output)) { filesToDeleteThisJob.Add(video.Output); } mjob.Settings.DAR = video.DAR; } else if (o.outputType is AudioType) { MuxStream stream = audioStreams.Find(delegate(MuxStream m) { return(VideoUtil.guessAudioType(m.path) == o.outputType); }); if (stream != null) { mjob.Settings.AudioStreams.Add(stream); audioStreams.Remove(stream); if (inputsToDelete.Contains(stream.path)) { filesToDeleteThisJob.Add(stream.path); } } } else if (o.outputType is SubtitleType) { MuxStream stream = subtitleStreams.Find(delegate(MuxStream m) { return(VideoUtil.guessSubtitleType(m.path) == o.outputType); }); if (stream != null) { mjob.Settings.SubtitleStreams.Add(stream); subtitleStreams.Remove(stream); if (inputsToDelete.Contains(stream.path)) { filesToDeleteThisJob.Add(stream.path); } } } else if (o.outputType is ChapterType) { if ((VideoUtil.guessChapterType(chapterFile) == o.outputType)) { mjob.Settings.ChapterFile = chapterFile; } if (inputsToDelete.Contains(chapterFile)) { filesToDeleteThisJob.Add(chapterFile); } } } foreach (MuxStream s in mjob.Settings.AudioStreams) { audioStreams.Remove(s); } foreach (MuxStream s in mjob.Settings.SubtitleStreams) { subtitleStreams.Remove(s); } mjob.FilesToDelete.AddRange(filesToDeleteThisJob); if (index == muxPath.Length - 1) { mjob.Settings.MuxedOutput = output; mjob.Settings.SplitSize = splitSize; mjob.Settings.DAR = video.DAR; mjob.ContainerType = container; } else { ContainerType cot = mpl.muxerInterface.GetContainersInCommon(muxPath[index + 1].muxerInterface)[0]; mjob.Settings.MuxedOutput = tempOutputName + cot.Extension; mjob.ContainerType = cot; } previousOutput = mjob.Settings.MuxedOutput; index++; jobs.Add(mjob); if (string.IsNullOrEmpty(mjob.Settings.VideoInput)) { mjob.Input = mjob.Settings.MuxedInput; } else { mjob.Input = mjob.Settings.VideoInput; } mjob.Output = mjob.Settings.MuxedOutput; mjob.MuxType = mpl.muxerInterface.MuxerType; } return(new SequentialChain(jobs.ToArray())); }
public JobChain GenerateJobSeries(VideoStream video, string muxedOutput, AudioJob[] audioStreams, MuxStream[] subtitles, string chapters, FileSize?desiredSize, FileSize?splitSize, ContainerType container, bool prerender, MuxStream[] muxOnlyAudio, LogItem log, string deviceType, Zone[] zones, string videoFileToMux, OneClickAudioTrack[] audioTracks) { if (desiredSize.HasValue && String.IsNullOrEmpty(videoFileToMux)) { if (video.Settings.EncodingMode != 4 && video.Settings.EncodingMode != 8) // no automated 2/3 pass { if (this.mainForm.Settings.NbPasses == 2) { video.Settings.EncodingMode = 4; // automated 2 pass } else if (video.Settings.MaxNumberOfPasses == 3) { video.Settings.EncodingMode = 8; } } } fixFileNameExtensions(video, audioStreams, container); string videoOutput = video.Output; log.Add(eliminatedDuplicateFilenames(ref videoOutput, ref muxedOutput, audioStreams)); JobChain vjobs = null; if (!String.IsNullOrEmpty(videoFileToMux)) { video.Output = videoFileToMux; } else { video.Output = videoOutput; vjobs = jobUtil.prepareVideoJob(video.Input, video.Output, video.Settings, video.DAR, prerender, true, zones); if (vjobs == null) { return(null); } } /* Here, we guess the types of the files based on extension. * This is guaranteed to work with MeGUI-encoded files, because * the extension will always be recognised. For non-MeGUI files, * we can only ever hope.*/ List <MuxStream> allAudioToMux = new List <MuxStream>(); List <MuxableType> allInputAudioTypes = new List <MuxableType>(); if (audioTracks != null) { // OneClick mode foreach (OneClickAudioTrack ocAudioTrack in audioTracks) { if (ocAudioTrack.DirectMuxAudio != null) { if (VideoUtil.guessAudioMuxableType(ocAudioTrack.DirectMuxAudio.path, true) != null) { allInputAudioTypes.Add(VideoUtil.guessAudioMuxableType(ocAudioTrack.DirectMuxAudio.path, true)); allAudioToMux.Add(ocAudioTrack.DirectMuxAudio); } } if (ocAudioTrack.AudioJob != null) { allAudioToMux.Add(ocAudioTrack.AudioJob.ToMuxStream()); allInputAudioTypes.Add(ocAudioTrack.AudioJob.ToMuxableType()); } } } else { // AutoEncode mode foreach (AudioJob stream in audioStreams) { allAudioToMux.Add(stream.ToMuxStream()); allInputAudioTypes.Add(stream.ToMuxableType()); } foreach (MuxStream muxStream in muxOnlyAudio) { if (VideoUtil.guessAudioMuxableType(muxStream.path, true) != null) { allInputAudioTypes.Add(VideoUtil.guessAudioMuxableType(muxStream.path, true)); allAudioToMux.Add(muxStream); } } } List <MuxableType> allInputSubtitleTypes = new List <MuxableType>(); foreach (MuxStream muxStream in subtitles) { if (VideoUtil.guessSubtitleType(muxStream.path) != null) { allInputSubtitleTypes.Add(new MuxableType(VideoUtil.guessSubtitleType(muxStream.path), null)); } } MuxableType chapterInputType = null; if (!String.IsNullOrEmpty(chapters)) { ChapterType type = VideoUtil.guessChapterType(chapters); if (type != null) { chapterInputType = new MuxableType(type, null); } } MuxableType deviceOutputType = null; if (!String.IsNullOrEmpty(deviceType)) { DeviceType type = VideoUtil.guessDeviceType(deviceType); if (type != null) { deviceOutputType = new MuxableType(type, null); } } List <string> inputsToDelete = new List <string>(); if (String.IsNullOrEmpty(videoFileToMux)) { inputsToDelete.Add(video.Output); } inputsToDelete.AddRange(Array.ConvertAll <AudioJob, string>(audioStreams, delegate(AudioJob a) { return(a.Output); })); JobChain muxJobs = jobUtil.GenerateMuxJobs(video, video.Framerate, allAudioToMux.ToArray(), allInputAudioTypes.ToArray(), subtitles, allInputSubtitleTypes.ToArray(), chapters, chapterInputType, container, muxedOutput, splitSize, inputsToDelete, deviceType, deviceOutputType); if (desiredSize.HasValue && String.IsNullOrEmpty(videoFileToMux)) { BitrateCalculationInfo b = new BitrateCalculationInfo(); List <string> audiofiles = new List <string>(); foreach (MuxStream s in allAudioToMux) { audiofiles.Add(s.path); } b.AudioFiles = audiofiles; b.Container = container; b.VideoJobs = new List <TaggedJob>(vjobs.Jobs); b.DesiredSize = desiredSize.Value; ((VideoJob)vjobs.Jobs[0].Job).BitrateCalculationInfo = b; } if (!String.IsNullOrEmpty(videoFileToMux)) { return(new SequentialChain(new SequentialChain((Job[])audioStreams), new SequentialChain(muxJobs))); } else { return(new SequentialChain( new SequentialChain((Job[])audioStreams), new SequentialChain(vjobs), new SequentialChain(muxJobs))); } }