Exemplo n.º 1
0
        public void postprocess()
        {
            audioFiles = vUtil.getAllDemuxedAudio(job.Output, 8);

            fillInAudioInformation();


            logBuilder.Append("Desired size of this automated encoding series: " + job.PostprocessingProperties.OutputSize
                              + " split size: " + job.PostprocessingProperties.Splitting + "\r\n");
            VideoCodecSettings videoSettings = job.PostprocessingProperties.VideoSettings;

            string videoOutput = Path.Combine(Path.GetDirectoryName(job.Output),
                                              Path.GetFileNameWithoutExtension(job.Output) + "_Video");
            string muxedOutput = job.PostprocessingProperties.FinalOutput;

            //Open the video
            Dar?   dar;
            string videoInput = openVideo(job.Output, job.PostprocessingProperties.DAR,
                                          job.PostprocessingProperties.HorizontalOutputResolution, job.PostprocessingProperties.SignalAR, logBuilder,
                                          job.PostprocessingProperties.AvsSettings, job.PostprocessingProperties.AutoDeinterlace, videoSettings, out dar);

            VideoStream myVideo = new VideoStream();
            ulong       length;
            double      framerate;

            JobUtil.getInputProperties(out length, out framerate, videoInput);
            myVideo.Input          = videoInput;
            myVideo.Output         = videoOutput;
            myVideo.NumberOfFrames = length;
            myVideo.Framerate      = (decimal)framerate;
            myVideo.DAR            = dar;
            myVideo.VideoType      = new MuxableType((new VideoEncoderProvider().GetSupportedOutput(videoSettings.EncoderType))[0], videoSettings.Codec);
            myVideo.Settings       = videoSettings;
            List <string> intermediateFiles = new List <string>();

            intermediateFiles.Add(videoInput);
            intermediateFiles.Add(job.Output);
            intermediateFiles.AddRange(audioFiles.Values);
            if (!string.IsNullOrEmpty(videoInput))
            {
                //Create empty subtitles for muxing (subtitles not supported in one click mode)
                MuxStream[] subtitles = new MuxStream[0];
                JobChain    c         = vUtil.GenerateJobSeries(myVideo, muxedOutput, job.PostprocessingProperties.AudioJobs, subtitles,
                                                                job.PostprocessingProperties.ChapterFile, job.PostprocessingProperties.OutputSize,
                                                                job.PostprocessingProperties.Splitting, job.PostprocessingProperties.Container,
                                                                false, job.PostprocessingProperties.DirectMuxAudio);

                /*                    vUtil.generateJobSeries(videoInput, videoOutput, muxedOutput, videoSettings,
                 *                      audioStreams, audio, subtitles, job.PostprocessingProperties.ChapterFile,
                 *                      job.PostprocessingProperties.OutputSize, job.PostprocessingProperties.SplitSize,
                 *                      containerOverhead, type, new string[] { job.Output, videoInput });*/
                c = CleanupJob.AddAfter(c, intermediateFiles);
                mainForm.Jobs.addJobsWithDependencies(c);
            }
            mainForm.addToLog(logBuilder.ToString());
        }
Exemplo n.º 2
0
        /// <summary>
        /// handles the go button for automated encoding
        /// checks if we're in automated 2 pass video mode and that we're not using the snow codec
        /// then the video and audio configuration is checked, and if it checks out
        /// the audio job, video jobs and muxing job are generated, audio and video job are linked
        /// and encoding is started
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="e"></param>
        private void queueButton_Click(object sender, System.EventArgs e)
        {
            if (!string.IsNullOrEmpty(this.muxedOutput.Text))
            {
                FileSize?desiredSize = targetSize.Value;
                FileSize?splitSize   = splitting.Value;

                logBuilder.AppendLine("Desired size of this job series: " + (Util.ToStringOrNull(desiredSize) ?? "N/A"));
                logBuilder.AppendLine("Split size of this job series: " + (Util.ToStringOrNull(splitSize) ?? "N/A"));

                MuxStream[]        audio;
                AudioJob[]         aStreams;
                AudioEncoderType[] muxTypes;
                separateEncodableAndMuxableAudioStreams(out aStreams, out audio, out muxTypes);
                MuxStream[]   subtitles   = new MuxStream[0];
                string        chapters    = "";
                string        videoInput  = mainForm.Video.Info.VideoInput;
                string        videoOutput = mainForm.Video.Info.VideoOutput;
                string        muxedOutput = this.muxedOutput.Text;
                ContainerType cot         = this.container.SelectedItem as ContainerType;
                if (addSubsNChapters.Checked)
                {
                    AdaptiveMuxWindow amw = new AdaptiveMuxWindow(mainForm);
                    amw.setMinimizedMode(videoOutput, videoStream.Settings.EncoderType, jobUtil.getFramerate(videoInput), audio,
                                         muxTypes, muxedOutput, splitSize, cot);
                    if (amw.ShowDialog() == DialogResult.OK)
                    {
                        amw.getAdditionalStreams(out audio, out subtitles, out chapters, out muxedOutput, out cot);
                    }
                    else // user aborted, abort the whole process
                    {
                        return;
                    }
                }
                removeStreamsToBeEncoded(ref audio, aStreams);
                mainForm.Jobs.addJobsWithDependencies(vUtil.GenerateJobSeries(this.videoStream, muxedOutput, aStreams, subtitles, chapters,
                                                                              desiredSize, splitSize, cot, this.prerender, audio));
                mainForm.addToLog(logBuilder.ToString());
                this.Close();
            }
        }
Exemplo n.º 3
0
        public JobChain GenerateJobSeries(VideoStream video, string muxedOutput, AudioJob[] audioStreams,
                                          MuxStream[] subtitles, string chapters, FileSize?desiredSize, FileSize?splitSize, ContainerType container, bool prerender, MuxStream[] muxOnlyAudio)
        {
            StringBuilder logBuilder = new StringBuilder();

            if (desiredSize.HasValue)
            {
                logBuilder.Append("Generating jobs. Desired size: " + desiredSize.Value.ToString() + "\r\n");
                if (video.Settings.EncodingMode != 4 && video.Settings.EncodingMode != 8) // no automated 2/3 pass
                {
                    if (this.mainForm.Settings.NbPasses == 2)
                    {
                        video.Settings.EncodingMode = 4; // automated 2 pass
                    }
                    else if (video.Settings.MaxNumberOfPasses == 3)
                    {
                        video.Settings.EncodingMode = 8;
                    }
                }
            }
            else
            {
                logBuilder.Append("Generating jobs. No desired size.\r\n");
            }

            fixFileNameExtensions(video, audioStreams, container);
            string videoOutput = video.Output;

            logBuilder.Append(eliminatedDuplicateFilenames(ref videoOutput, ref muxedOutput, audioStreams));
            video.Output = videoOutput;

            JobChain vjobs = jobUtil.prepareVideoJob(video.Input, video.Output, video.Settings, video.DAR, prerender, true);

            if (vjobs == null)
            {
                return(null);
            }

            /* Here, we guess the types of the files based on extension.
             * This is guaranteed to work with MeGUI-encoded files, because
             * the extension will always be recognised. For non-MeGUI files,
             * we can only ever hope.*/
            List <MuxStream>   allAudioToMux      = new List <MuxStream>();
            List <MuxableType> allInputAudioTypes = new List <MuxableType>();

            foreach (MuxStream muxStream in muxOnlyAudio)
            {
                if (VideoUtil.guessAudioMuxableType(muxStream.path, true) != null)
                {
                    allInputAudioTypes.Add(VideoUtil.guessAudioMuxableType(muxStream.path, true));
                    allAudioToMux.Add(muxStream);
                }
            }

            foreach (AudioJob stream in audioStreams)
            {
                allAudioToMux.Add(stream.ToMuxStream());
                allInputAudioTypes.Add(stream.ToMuxableType());
            }


            List <MuxableType> allInputSubtitleTypes = new List <MuxableType>();

            foreach (MuxStream muxStream in subtitles)
            {
                if (VideoUtil.guessSubtitleType(muxStream.path) != null)
                {
                    allInputSubtitleTypes.Add(new MuxableType(VideoUtil.guessSubtitleType(muxStream.path), null));
                }
            }

            MuxableType chapterInputType = null;

            if (!String.IsNullOrEmpty(chapters))
            {
                ChapterType type = VideoUtil.guessChapterType(chapters);
                if (type != null)
                {
                    chapterInputType = new MuxableType(type, null);
                }
            }

            JobChain muxJobs = this.jobUtil.GenerateMuxJobs(video, video.Framerate, allAudioToMux.ToArray(), allInputAudioTypes.ToArray(),
                                                            subtitles, allInputSubtitleTypes.ToArray(), chapters, chapterInputType, container, muxedOutput, splitSize, true);



            /*                foreach (Job mJob in muxJobs)
             *                  foreach (Job job in jobs)
             *                      mJob.AddDependency(job);*/


            /*
             * foreach (VideoJob job in vjobs)
             * {
             *  jobs.Add(job);
             * }
             * foreach (MuxJob job in muxJobs)
             * {
             *  jobs.Add(job);
             * }
             */

            if (desiredSize.HasValue)
            {
                /*                    if (encodedAudioPresent) // no audio encoding, we can calculate the video bitrate directly
                 *                  {
                 *                      logBuilder.Append("No audio encoding. Calculating desired video bitrate directly.\r\n");
                 *                      List<AudioStream> calculationAudioStreams = new List<AudioStream>();
                 *                      foreach (SubStream stream in muxOnlyAudio)
                 *                      {
                 *                          FileInfo fi = new FileInfo(stream.path);
                 *                          AudioStream newStream = new AudioStream();
                 *                          newStream.SizeBytes = fi.Length;
                 *                          newStream.Type = guessAudioType(stream.path);
                 *                          newStream.BitrateMode = BitrateManagementMode.VBR;
                 *                          calculationAudioStreams.Add(newStream);
                 *                          logBuilder.Append("Encoded audio file is present: " + stream.path +
                 *                              " It has a size of " + fi.Length + " bytes. \r\n");
                 *                      }
                 *
                 *                      long videoSizeKB;
                 *                      bool useBframes = false;
                 *                      if (video.Settings.NbBframes > 0)
                 *                          useBframes = true;
                 *
                 *                      bitrateKBits = calc.CalculateBitrateKBits(video.Settings.Codec, useBframes, container, calculationAudioStreams.ToArray(),
                 *                          desiredSizeBytes, video.NumberOfFrames, video.Framerate, out videoSizeKB);
                 *                      desiredSizeBytes = (long)videoSizeKB * 1024L; // convert kb back to bytes
                 *                      logBuilder.Append("Setting video bitrate for the video jobs to " + bitrateKBits + " kbit/s\r\n");
                 *                      foreach (VideoJob vJob in vjobs)
                 *                      {
                 *                          jobUtil.updateVideoBitrate(vJob, bitrateKBits);
                 *                      }
                 *                  }*/
                BitrateCalculationInfo b = new BitrateCalculationInfo();

                List <string> audiofiles = new List <string>();
                foreach (MuxStream s in allAudioToMux)
                {
                    audiofiles.Add(s.path);
                }
                b.AudioFiles = audiofiles;

                b.Container   = container;
                b.VideoJobs   = new List <TaggedJob>(vjobs.Jobs);
                b.DesiredSize = desiredSize.Value;
                ((VideoJob)vjobs.Jobs[0].Job).BitrateCalculationInfo = b;
            }
            mainForm.addToLog(logBuilder.ToString());


            return
                (new SequentialChain(
                     new ParallelChain((Job[])audioStreams),
                     new SequentialChain(vjobs),
                     new SequentialChain(muxJobs)));
        }