public VobSubIndexWindow(MainForm mainForm)
 {
     InitializeComponent();
     this.mainForm = mainForm;
     this.vUtil = new VideoUtil(mainForm);
     this.jobUtil = new JobUtil(mainForm);
 }
        /// <summary>
        /// compiles final bitrate statistics
        /// </summary>
        private void compileFinalStats()
        {
            try
            {
                if (!string.IsNullOrEmpty(job.Output) && File.Exists(job.Output))
                {
                    FileInfo fi   = new FileInfo(job.Output);
                    long     size = fi.Length; // size in bytes

                    ulong  framecount;
                    double framerate;
                    JobUtil.getInputProperties(out framecount, out framerate, job.Input);

                    double numberOfSeconds = (double)framecount / framerate;
                    long   bitrate         = (long)((double)(size * 8.0) / (numberOfSeconds * 1000.0));
                    if (job.Settings.EncodingMode != 1)
                    {
                        log.Append("desired video bitrate of this job: " + job.Settings.BitrateQuantizer + " kbit/s - obtained video bitrate (approximate): " + bitrate + " kbit/s");
                    }
                    else
                    {
                        log.Append("This is a CQ job so there's no desired bitrate. Obtained video bitrate: " + bitrate + " kbit/s");
                    }
                }
            }
            catch (Exception e)
            {
                log.Append("Exception in compileFinalStats. Message: " + e.Message + " stacktrace: " + e.StackTrace);
            }
        }
Example #3
0
 public VobSubIndexWindow(MainForm mainForm)
 {
     InitializeComponent();
     this.mainForm = mainForm;
     this.vUtil    = new VideoUtil(mainForm);
     this.jobUtil  = new JobUtil(mainForm);
 }
Example #4
0
 public OneClickPostProcessor(MainForm mainForm, D2VIndexJob ijob)
 {
     this.job = ijob;
     this.mainForm = mainForm;
     this.jobUtil = mainForm.JobUtil;
     this.vUtil = new VideoUtil(mainForm);
 }
Example #5
0
 public OneClickPostProcessor(MainForm mainForm, IndexJob ijob)
 {
     this.job      = ijob;
     this.mainForm = mainForm;
     this.jobUtil  = mainForm.JobUtil;
     this.vUtil    = new VideoUtil(mainForm);
 }
Example #6
0
 public DGVinputWindow(MainForm mainForm)
 {
     InitializeComponent();
     this.mainForm = mainForm;
     this.vUtil    = new VideoUtil(mainForm);
     this.jobUtil  = new JobUtil(mainForm);
 }
Example #7
0
        public OneClickWindow(MainForm mainForm, JobUtil jobUtil, VideoEncoderProvider vProv, AudioEncoderProvider aProv)
        {
            this.mainForm            = mainForm;
            vUtil                    = new VideoUtil(mainForm);
            this.muxProvider         = mainForm.MuxProvider;
            acceptableContainerTypes = muxProvider.GetSupportedContainers().ToArray();

            InitializeComponent();

            initTabs();

            videoProfile.Manager = mainForm.Profiles;
            initAudioHandler();
            avsProfile.Manager = mainForm.Profiles;
            initOneClickHandler();

            audioTrack1.StandardItems = audioTrack2.StandardItems = new object[] { "None" };
            audioTrack1.SelectedIndex = audioTrack2.SelectedIndex = 0;

            //if containerFormat has not yet been set by the oneclick profile, add supported containers
            if (containerFormat.Items.Count == 0)
            {
                containerFormat.Items.AddRange(muxProvider.GetSupportedContainers().ToArray());
                this.containerFormat.SelectedIndex = 0;
            }

            showAdvancedOptions_CheckedChanged(null, null);
        }
Example #8
0
        public void Run(MainForm info)
        {
            // normal video verification
            string error = null;

            if ((error = info.Video.verifyVideoSettings()) != null)
            {
                MessageBox.Show(error, "Unsupported video configuration", MessageBoxButtons.OK, MessageBoxIcon.Stop);
                return;
            }
            if ((error = info.Audio.verifyAudioSettings()) != null && !error.Equals("No audio input defined."))
            {
                MessageBox.Show(error, "Unsupported audio configuration", MessageBoxButtons.OK, MessageBoxIcon.Stop);
                return;
            }
            if (info.Video.CurrentSettings.VideoEncodingType == VideoCodecSettings.VideoEncodingMode.twopass1 ||
                info.Video.CurrentSettings.VideoEncodingType == VideoCodecSettings.VideoEncodingMode.threepass1)
            {
                MessageBox.Show("First pass encoding is not supported for automated encoding as no output is generated.\nPlease choose another encoding mode", "Improper configuration",
                                MessageBoxButtons.OK, MessageBoxIcon.Stop);
                return;
            }

            // close video player so that the AviSynth script is also closed
            info.ClosePlayer();

            JobUtil.GetInputProperties(info.Video.VideoInput, out ulong frameCount, out double frameRate);

            VideoCodecSettings vSettings = info.Video.CurrentSettings.Clone();

            Zone[] zones = info.Video.Info.Zones; // We can't simply modify the zones in place because that would reveal the final zones config to the user, including the credits/start zones
            bool   cont  = JobUtil.GetFinalZoneConfiguration(vSettings, info.Video.Info.IntroEndFrame, info.Video.Info.CreditsStartFrame, ref zones, (int)frameCount);

            if (cont)
            {
                VideoStream myVideo = new VideoStream();
                myVideo.Input          = info.Video.Info.VideoInput;
                myVideo.Output         = info.Video.Info.VideoOutput;
                myVideo.NumberOfFrames = frameCount;
                myVideo.Framerate      = (decimal)frameRate;
                myVideo.VideoType      = info.Video.CurrentMuxableVideoType;
                myVideo.Settings       = vSettings;

                VideoInfo vInfo = info.Video.Info.Clone(); // so we don't modify the data on the main form
                vInfo.Zones = zones;

                using (AutoEncodeWindow aew = new AutoEncodeWindow(myVideo, info.Audio.AudioStreams, info.Video.PrerenderJob, vInfo))
                {
                    if (aew.init())
                    {
                        aew.ShowDialog();
                    }
                    else
                    {
                        MessageBox.Show("The currently selected combination of video and audio output cannot be muxed", "Unsupported configuration", MessageBoxButtons.OK, MessageBoxIcon.Warning);
                    }
                }
            }
        }
Example #9
0
 public VobinputWindow(MainForm mainForm)
 {
     InitializeComponent();
     this.mainForm = mainForm;
     this.vUtil    = new VideoUtil(mainForm);
     this.jobUtil  = new JobUtil(mainForm);
     this.Closing += new CancelEventHandler(VobinputWindow_Closing);
 }
Example #10
0
 public FileIndexerWindow(MainForm mainForm)
 {
     InitializeComponent();
     this.mainForm = mainForm;
     this.vUtil    = new VideoUtil(mainForm);
     this.jobUtil  = new JobUtil(mainForm);
     CheckDGIIndexer();
 }
Example #11
0
        public LogItem postprocess()
        {
            audioFiles = vUtil.getAllDemuxedAudio(job.AudioTracks, job.Output, 8);

            fillInAudioInformation();

            log.LogValue("Desired size", job.PostprocessingProperties.OutputSize);
            log.LogValue("Split size", job.PostprocessingProperties.Splitting);

            VideoCodecSettings videoSettings = job.PostprocessingProperties.VideoSettings;

            string videoOutput = Path.Combine(Path.GetDirectoryName(job.Output),
                                              Path.GetFileNameWithoutExtension(job.Output) + "_Video");
            string muxedOutput = job.PostprocessingProperties.FinalOutput;

            //Open the video
            Dar?   dar;
            string videoInput = openVideo(job.Output, job.PostprocessingProperties.DAR,
                                          job.PostprocessingProperties.HorizontalOutputResolution, job.PostprocessingProperties.SignalAR, log,
                                          job.PostprocessingProperties.AvsSettings, job.PostprocessingProperties.AutoDeinterlace, videoSettings, out dar);

            VideoStream myVideo = new VideoStream();
            ulong       length;
            double      framerate;

            JobUtil.getInputProperties(out length, out framerate, videoInput);
            myVideo.Input          = videoInput;
            myVideo.Output         = videoOutput;
            myVideo.NumberOfFrames = length;
            myVideo.Framerate      = (decimal)framerate;
            myVideo.DAR            = dar;
            myVideo.VideoType      = new MuxableType((new VideoEncoderProvider().GetSupportedOutput(videoSettings.EncoderType))[0], videoSettings.Codec);
            myVideo.Settings       = videoSettings;
            List <string> intermediateFiles = new List <string>();

            intermediateFiles.Add(videoInput);
            intermediateFiles.Add(job.Output);
            intermediateFiles.AddRange(audioFiles.Values);

            if (!string.IsNullOrEmpty(videoInput))
            {
                //Create empty subtitles for muxing (subtitles not supported in one click mode)
                MuxStream[] subtitles = new MuxStream[0];
                JobChain    c         = vUtil.GenerateJobSeries(myVideo, muxedOutput, job.PostprocessingProperties.AudioJobs, subtitles,
                                                                job.PostprocessingProperties.ChapterFile, job.PostprocessingProperties.OutputSize,
                                                                job.PostprocessingProperties.Splitting, job.PostprocessingProperties.Container,
                                                                false, job.PostprocessingProperties.DirectMuxAudio, log);
                if (c == null)
                {
                    log.Warn("Job creation aborted");
                    return(log);
                }

                c = CleanupJob.AddAfter(c, intermediateFiles);
                mainForm.Jobs.addJobsWithDependencies(c);
            }
            return(log);
        }
Example #12
0
        public void Run(MainForm info)
        {
            using (Calculator calc = new Calculator(info))
            {
                ulong  nbFrames = 0;
                double framerate = 0.0;
                int    hRes = 0, vRes = 0;
                Dar    dar = new Dar();

                if (!string.IsNullOrEmpty(info.Video.VideoInput))
                {
                    JobUtil.GetAllInputProperties(out nbFrames, out framerate, out hRes, out vRes, out dar, info.Video.VideoInput);
                }

                calc.SetDefaults(nbFrames, framerate, hRes, vRes, info.Video.CurrentSettings, info.Audio.AudioStreams);

                DialogResult dr = calc.ShowDialog();
                if (dr != DialogResult.OK)
                {
                    return;
                }

                if (info.Video.CurrentSettings.EncoderType != calc.SelectedVCodec)
                {
                    return;
                }

                VideoCodecSettings settings = info.Video.CurrentSettings;

                if (settings.VideoEncodingType == VideoCodecSettings.VideoEncodingMode.CQ ||
                    settings.VideoEncodingType == VideoCodecSettings.VideoEncodingMode.quality)
                {
                    dr = MessageBox.Show("Copy calculated bitrate into current video settings and change encoding mode to automated " + info.Settings.NbPasses + "-pass?", "Save calculated bitrate?", MessageBoxButtons.YesNo, MessageBoxIcon.Question);
                    if (dr != DialogResult.Yes)
                    {
                        return;
                    }
                    if (info.Settings.NbPasses == 3)
                    {
                        settings.VideoEncodingType = VideoCodecSettings.VideoEncodingMode.threepassAutomated;  // Automated 3-pass
                    }
                    else
                    {
                        settings.VideoEncodingType = VideoCodecSettings.VideoEncodingMode.twopassAutomated;  // Automated 2-pass
                    }
                }
                else
                {
                    dr = MessageBox.Show("Copy calculated bitrate into current video settings?", "Save calculated bitrate?", MessageBoxButtons.YesNo, MessageBoxIcon.Question);
                    if (dr != DialogResult.Yes)
                    {
                        return;
                    }
                }
                settings.BitrateQuantizer = calc.VideoBitrate;
            }
        }
Example #13
0
        public void postprocess()
        {
            audioFiles = vUtil.getAllDemuxedAudio(job.Output, 8);

            fillInAudioInformation();


            logBuilder.Append("Desired size of this automated encoding series: " + job.PostprocessingProperties.OutputSize
                              + " split size: " + job.PostprocessingProperties.Splitting + "\r\n");
            VideoCodecSettings videoSettings = job.PostprocessingProperties.VideoSettings;

            string videoOutput = Path.Combine(Path.GetDirectoryName(job.Output),
                                              Path.GetFileNameWithoutExtension(job.Output) + "_Video");
            string muxedOutput = job.PostprocessingProperties.FinalOutput;

            //Open the video
            Dar?   dar;
            string videoInput = openVideo(job.Output, job.PostprocessingProperties.DAR,
                                          job.PostprocessingProperties.HorizontalOutputResolution, job.PostprocessingProperties.SignalAR, logBuilder,
                                          job.PostprocessingProperties.AvsSettings, job.PostprocessingProperties.AutoDeinterlace, videoSettings, out dar);

            VideoStream myVideo = new VideoStream();
            ulong       length;
            double      framerate;

            JobUtil.getInputProperties(out length, out framerate, videoInput);
            myVideo.Input          = videoInput;
            myVideo.Output         = videoOutput;
            myVideo.NumberOfFrames = length;
            myVideo.Framerate      = (decimal)framerate;
            myVideo.DAR            = dar;
            myVideo.VideoType      = new MuxableType((new VideoEncoderProvider().GetSupportedOutput(videoSettings.EncoderType))[0], videoSettings.Codec);
            myVideo.Settings       = videoSettings;
            List <string> intermediateFiles = new List <string>();

            intermediateFiles.Add(videoInput);
            intermediateFiles.Add(job.Output);
            intermediateFiles.AddRange(audioFiles.Values);
            if (!string.IsNullOrEmpty(videoInput))
            {
                //Create empty subtitles for muxing (subtitles not supported in one click mode)
                MuxStream[] subtitles = new MuxStream[0];
                JobChain    c         = vUtil.GenerateJobSeries(myVideo, muxedOutput, job.PostprocessingProperties.AudioJobs, subtitles,
                                                                job.PostprocessingProperties.ChapterFile, job.PostprocessingProperties.OutputSize,
                                                                job.PostprocessingProperties.Splitting, job.PostprocessingProperties.Container,
                                                                false, job.PostprocessingProperties.DirectMuxAudio);

                /*                    vUtil.generateJobSeries(videoInput, videoOutput, muxedOutput, videoSettings,
                 *                      audioStreams, audio, subtitles, job.PostprocessingProperties.ChapterFile,
                 *                      job.PostprocessingProperties.OutputSize, job.PostprocessingProperties.SplitSize,
                 *                      containerOverhead, type, new string[] { job.Output, videoInput });*/
                c = CleanupJob.AddAfter(c, intermediateFiles);
                mainForm.Jobs.addJobsWithDependencies(c);
            }
            mainForm.addToLog(logBuilder.ToString());
        }
        /// <summary>
        /// tries to open the video source and gets the number of frames from it, or
        /// exits with an error
        /// </summary>
        /// <param name="videoSource">the AviSynth script</param>
        /// <param name="error">return parameter for all errors</param>
        /// <returns>true if the file could be opened, false if not</returns>
        protected void getInputProperties(VideoJob job)
        {
            double fps;
            Dar    d;

            JobUtil.GetAllInputProperties(out numberOfFrames, out fps, out hres, out vres, out d, job.Input);
            dar = job.DAR;
            su.NbFramesTotal = numberOfFrames;
            su.ClipLength    = TimeSpan.FromSeconds((double)numberOfFrames / fps);
        }
Example #15
0
        public AdaptiveMuxWindow(MainForm mainForm)
        {
            InitializeComponent();
            jobUtil     = new JobUtil(mainForm);
            muxProvider = mainForm.MuxProvider;

            audioTracks[0].Filter    = VideoUtil.GenerateCombinedFilter(ContainerManager.AudioTypes.ValuesArray);
            subtitleTracks[0].Filter = VideoUtil.GenerateCombinedFilter(ContainerManager.SubtitleTypes.ValuesArray);
            vInput.Filter            = VideoUtil.GenerateCombinedFilter(ContainerManager.VideoTypes.ValuesArray);
        }
Example #16
0
        public void Run(MainForm info)
        {
            // normal video verification
            string error = null;

            // update the current audio stream with the latest data
            //            updateAudioStreams();
            if ((error = info.Video.verifyVideoSettings()) != null)
            {
                MessageBox.Show(error, "Unsupported video configuration", MessageBoxButtons.OK, MessageBoxIcon.Stop);
                return;
            }
            if ((error = info.Audio.verifyAudioSettings()) != null && !error.Equals("No audio input defined."))
            {
                MessageBox.Show(error, "Unsupported audio configuration", MessageBoxButtons.OK, MessageBoxIcon.Stop);
                return;
            }
#warning must be fixed up to be more generic
            if (info.Video.CurrentVideoCodecSettings.EncodingMode == 2 || info.Video.CurrentVideoCodecSettings.EncodingMode == 5)
            {
                MessageBox.Show("First pass encoding is not supported for automated encoding as no output is generated.\nPlease choose another encoding mode", "Improper configuration",
                                MessageBoxButtons.OK, MessageBoxIcon.Stop);
                return;
            }

            VideoCodecSettings vSettings = info.Video.CurrentVideoCodecSettings.clone();
            bool cont = info.JobUtil.getFinalZoneConfiguration(vSettings, info.Video.Info.IntroEndFrame, info.Video.Info.CreditsStartFrame);
            if (cont)
            {
                ulong       length    = 0;
                double      framerate = 0.0;
                VideoStream myVideo   = new VideoStream();
                JobUtil.getInputProperties(out length, out framerate, info.Video.VideoInput);
                myVideo.Input          = info.Video.Info.VideoInput;
                myVideo.Output         = info.Video.Info.VideoOutput;
                myVideo.NumberOfFrames = length;
                myVideo.Framerate      = (decimal)framerate;
                myVideo.DAR            = info.Video.Info.DAR;
                myVideo.VideoType      = info.Video.CurrentMuxableVideoType;
                myVideo.Settings       = vSettings;

                using (AutoEncodeWindow aew = new AutoEncodeWindow(myVideo, info.Audio.AudioStreams, info, info.Video.PrerenderJob))
                {
                    if (aew.init())
                    {
                        info.ClosePlayer();
                        aew.ShowDialog();
                    }
                    else
                    {
                        MessageBox.Show("The currently selected combination of video and audio output cannot be muxed", "Unsupported configuration", MessageBoxButtons.OK, MessageBoxIcon.Warning);
                    }
                }
            }
        }
Example #17
0
        public void Run(MainForm info)
        {
            if (info.Video.VideoInput.Equals(""))
            {
                MessageBox.Show("You first need to load an AviSynth script", "No video configured",
                                MessageBoxButtons.OK, MessageBoxIcon.Warning);
                return;
            }

            bool succ;
            int  hRes, vRes;

            MeGUI.core.util.Dar d;
            ulong  nbFrames;
            double framerate;

            AVCLevels.Levels?compliantLevel      = null;
            x264Settings     currentX264Settings = (x264Settings)MainForm.Instance.Profiles.GetCurrentSettings("x264");

            if (JobUtil.GetAllInputProperties(out nbFrames, out framerate, out hRes, out vRes, out d, info.Video.VideoInput))
            {
                AVCLevels al = new AVCLevels();
                succ = al.validateAVCLevel(hRes, vRes, framerate, currentX264Settings, out compliantLevel);
            }
            else
            {
                succ = false;
            }

            if (succ)
            {
                MessageBox.Show("This file matches the criteria for the level chosen", "Video validated",
                                MessageBoxButtons.OK, MessageBoxIcon.Information);
            }
            else
            {
                if (compliantLevel == null)
                {
                    MessageBox.Show("Unable to open video", "Test failed", MessageBoxButtons.OK, MessageBoxIcon.Error);
                }
                else
                {
                    AVCLevels al      = new AVCLevels();
                    string    message = "This video source cannot be encoded to comply with the chosen level.\n"
                                        + "You need at least Level " + AVCLevels.GetLevelText((AVCLevels.Levels)compliantLevel) + " for this source. Do you want\n"
                                        + "to increase the level automatically now?";
                    DialogResult dr = MessageBox.Show(message, "Test failed", MessageBoxButtons.YesNo,
                                                      MessageBoxIcon.Question);
                    if (dr == DialogResult.Yes)
                    {
                        currentX264Settings.AVCLevel = (AVCLevels.Levels)compliantLevel;
                    }
                }
            }
        }
Example #18
0
 public AutoEncodeWindow(VideoStream videoStream, List <AudioJob> audioStreams, MainForm mainForm, bool prerender, VideoInfo vInfo)
     : this()
 {
     this.vInfo = vInfo;
     mainForm.Log.Add(log);
     this.videoStream  = videoStream;
     this.audioStreams = audioStreams;
     this.prerender    = prerender;
     this.mainForm     = mainForm;
     jobUtil           = new JobUtil(mainForm);
     vUtil             = new VideoUtil(mainForm);
     muxProvider       = mainForm.MuxProvider;
     container.Items.AddRange(muxProvider.GetSupportedContainers().ToArray());
     splitting.MinimumFileSize = new FileSize(Unit.MB, 1);
 }
        public AdaptiveMuxWindow(MainForm mainForm)
            : base(mainForm)
        {
            InitializeComponent();
            jobUtil = new JobUtil(mainForm);
            muxProvider = mainForm.MuxProvider;

            audioTracks[0].Filter = VideoUtil.GenerateCombinedFilter(ContainerManager.AudioTypes.ValuesArray);
            subtitleTracks[0].Filter = VideoUtil.GenerateCombinedFilter(ContainerManager.SubtitleTypes.ValuesArray);
            vInput.Filter = VideoUtil.GenerateCombinedFilter(ContainerManager.VideoTypes.ValuesArray);
            cbContainer.Visible = true;
            lbContainer.Visible = true;

            this.cbContainer.SelectedIndexChanged += new System.EventHandler(this.cbContainer_SelectedIndexChanged);
        }
Example #20
0
        public AdaptiveMuxWindow(MainForm mainForm)
            : base(mainForm, null)
        {
            InitializeComponent();
            jobUtil     = new JobUtil(mainForm);
            muxProvider = mainForm.MuxProvider;

            audioTracks[0].Filter    = VideoUtil.GenerateCombinedFilter(ContainerManager.AudioTypes.ValuesArray);
            subtitleTracks[0].Filter = VideoUtil.GenerateCombinedFilter(ContainerManager.SubtitleTypes.ValuesArray);
            vInput.Filter            = VideoUtil.GenerateCombinedFilter(ContainerManager.VideoTypes.ValuesArray);
            chapters.Filter          = VideoUtil.GenerateCombinedFilter(ContainerManager.ChapterTypes.ValuesArray);
            cbContainer.Visible      = true;
            lbContainer.Visible      = true;

            subtitleTracks[0].chkDefaultStream.CheckedChanged += new System.EventHandler(base.chkDefaultStream_CheckedChanged);
            base.muxButton.Click += new System.EventHandler(this.muxButton_Click);
        }
Example #21
0
        /// <summary>
        /// tries to open the video source and gets the number of frames from it, or
        /// exits with an error
        /// </summary>
        /// <param name="videoSource">the AviSynth script</param>
        /// <param name="error">return parameter for all errors</param>
        /// <returns>true if the file could be opened, false if not</returns>
        protected bool getInputProperties(VideoJob job, out string error)
        {
            double f;
            int    a, b;

            error = JobUtil.GetAllInputProperties(out numberOfFrames, out f, out hres, out vres, out a, out b, job.Input);
            darX  = job.DARX;
            darY  = job.DARY;
            if (job.Settings.UsesSAR)
            {
                int sarX, sarY;
                VideoUtil.findSAR(job.DARX, job.DARY, hres, vres, out sarX, out sarY);
                job.Commandline = CommandLineGenerator.generateVideoCommandline(job.Settings, job.Input, job.Output, sarX, sarY);
            }
            su.NbFramesTotal = numberOfFrames;
            return(error == null);
        }
Example #22
0
 public AutoEncodeWindow(VideoStream videoStream, AudioStream[] audioStreams, MainForm mainForm, bool prerender)
     : this()
 {
     if (videoStream.Settings.EncodingMode == 1 || videoStream.Settings.EncodingMode == 9) // CQ and CRF -- no bitrate possible
     {
         averageBitrateRadio.Enabled = false;
         FileSizeRadio.Enabled       = false;
         noTargetRadio.Checked       = true;
     }
     this.videoStream  = videoStream;
     this.audioStreams = audioStreams;
     this.prerender    = prerender;
     this.mainForm     = mainForm;
     jobUtil           = new JobUtil(mainForm);
     vUtil             = new VideoUtil(mainForm);
     muxProvider       = mainForm.MuxProvider;
     container.Items.AddRange(muxProvider.GetSupportedContainers().ToArray());
 }
Example #23
0
 public AutoEncodeWindow(VideoStream videoStream, List <AudioJob> audioStreams, MainForm mainForm, bool prerender, VideoInfo vInfo)
     : this()
 {
     this.vInfo = vInfo;
     mainForm.Log.Add(log);
     if (videoStream.Settings.EncodingMode == 1 || videoStream.Settings.EncodingMode == 9) // CQ and CRF -- no bitrate possible
     {
         averageBitrateRadio.Enabled = false;
         FileSizeRadio.Enabled       = false;
         noTargetRadio.Checked       = true;
     }
     this.videoStream  = videoStream;
     this.audioStreams = audioStreams;
     this.prerender    = prerender;
     this.mainForm     = mainForm;
     jobUtil           = new JobUtil(mainForm);
     vUtil             = new VideoUtil(mainForm);
     muxProvider       = mainForm.MuxProvider;
     container.Items.AddRange(muxProvider.GetSupportedContainers().ToArray());
     splitting.MinimumFileSize = new FileSize(Unit.MB, 1);
 }
        /// <summary>
        /// compiles final bitrate statistics
        /// </summary>
        protected void compileFinalStats()
        {
            try
            {
                if (!string.IsNullOrEmpty(job.Output) && File.Exists(job.Output))
                {
                    FileInfo fi   = new FileInfo(job.Output);
                    long     size = fi.Length; // size in bytes

                    ulong  framecount;
                    double framerate;
                    JobUtil.getInputProperties(out framecount, out framerate, job.Input);

                    double numberOfSeconds = (double)framecount / framerate;
                    long   bitrate         = (long)((double)(size * 8.0) / (numberOfSeconds * 1000.0));

                    LogItem stats = log.Info("Final statistics");

                    if (job.Settings.EncodingMode == 1) // QP mode
                    {
                        stats.LogValue("Constant Quantizer Mode", "Quantizer " + job.Settings.BitrateQuantizer + " computed...");
                    }
                    else if (job.Settings.EncodingMode == 9) // CRF mode
                    {
                        stats.LogValue("Constant Quality Mode", "Quality " + job.Settings.BitrateQuantizer + " computed...");
                    }
                    else
                    {
                        stats.LogValue("Video Bitrate Desired", job.Settings.BitrateQuantizer + " kbit/s");
                    }

                    stats.LogValue("Video Bitrate Obtained (approximate)", bitrate + " kbit/s");
                }
            }
            catch (Exception e)
            {
                log.LogValue("Exception in compileFinalStats", e, ImageType.Warning);
            }
        }
        private void StartPostProcessing()
        {
            Thread t = null;

            try
            {
                _log.LogEvent("Processing thread started");
                raiseEvent("Preprocessing...   ***PLEASE WAIT***");
                _start = DateTime.Now;
                t      = new Thread(new ThreadStart(delegate
                {
                    while (true)
                    {
                        updateTime();
                        Thread.Sleep(1000);
                    }
                }));
                t.Start();

                List <string> arrAudioFilesDelete = new List <string>();
                audioFiles = new Dictionary <int, string>();
                List <AudioTrackInfo> arrAudioTracks    = new List <AudioTrackInfo>();
                List <AudioJob>       arrAudioJobs      = new List <AudioJob>();
                List <MuxStream>      arrMuxStreams     = new List <MuxStream>();
                List <string>         intermediateFiles = new List <string>();

                FileUtil.ensureDirectoryExists(job.PostprocessingProperties.WorkingDirectory);
                foreach (OneClickAudioTrack oAudioTrack in job.PostprocessingProperties.AudioTracks)
                {
                    if (oAudioTrack.ExtractMKVTrack)
                    {
                        audioFiles.Add(oAudioTrack.AudioTrackInfo.TrackID, job.PostprocessingProperties.WorkingDirectory + "\\" + oAudioTrack.AudioTrackInfo.DemuxFileName);
                        arrAudioFilesDelete.Add(job.PostprocessingProperties.WorkingDirectory + "\\" + oAudioTrack.AudioTrackInfo.DemuxFileName);
                    }
                    else if (oAudioTrack.AudioTrackInfo != null)
                    {
                        arrAudioTracks.Add(oAudioTrack.AudioTrackInfo);
                    }
                    if (oAudioTrack.AudioJob != null)
                    {
                        if (job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.NONE &&
                            String.IsNullOrEmpty(oAudioTrack.AudioJob.Input))
                        {
                            oAudioTrack.AudioJob.Input = job.Input;
                        }
                        arrAudioJobs.Add(oAudioTrack.AudioJob);
                    }
                    if (oAudioTrack.DirectMuxAudio != null)
                    {
                        arrMuxStreams.Add(oAudioTrack.DirectMuxAudio);
                    }
                }
                if (audioFiles.Count == 0 && job.PostprocessingProperties.IndexType != FileIndexerWindow.IndexType.NONE && !job.PostprocessingProperties.Eac3toDemux)
                {
                    audioFiles = vUtil.getAllDemuxedAudio(arrAudioTracks, new List <AudioTrackInfo>(), out arrAudioFilesDelete, job.IndexFile, _log);
                }

                fillInAudioInformation(arrAudioJobs, arrMuxStreams);

                if (!String.IsNullOrEmpty(job.PostprocessingProperties.VideoFileToMux))
                {
                    _log.LogEvent("Don't encode video: True");
                }
                else
                {
                    _log.LogEvent("Desired size: " + job.PostprocessingProperties.OutputSize);
                }
                _log.LogEvent("Split size: " + job.PostprocessingProperties.Splitting);


                // chapter file handling
                if (String.IsNullOrEmpty(job.PostprocessingProperties.ChapterFile))
                {
                    job.PostprocessingProperties.ChapterFile = null;
                }
                else if (job.PostprocessingProperties.Container == ContainerType.AVI)
                {
                    _log.LogEvent("Chapter handling disabled because of the AVI target container");
                    job.PostprocessingProperties.ChapterFile = null;
                }
                else if (!File.Exists(job.PostprocessingProperties.ChapterFile))
                {
                    if (job.PostprocessingProperties.ChapterFile.StartsWith("<") || job.PostprocessingProperties.ChapterExtracted)
                    {
                        // internal chapter file
                        string strTempFile = job.PostprocessingProperties.ChapterFile;
                        if (Path.GetExtension(job.PostprocessingProperties.VideoInput).ToLower(System.Globalization.CultureInfo.InvariantCulture).Equals(".mkv"))
                        {
                            MediaInfoFile oInfo = new MediaInfoFile(job.PostprocessingProperties.VideoInput, ref _log);
                            if (oInfo.hasMKVChapters())
                            {
                                job.PostprocessingProperties.ChapterFile = Path.Combine(job.PostprocessingProperties.WorkingDirectory, Path.GetFileNameWithoutExtension(job.IndexFile) + " - Chapter Information.txt");
                                if (oInfo.extractMKVChapters(job.PostprocessingProperties.ChapterFile))
                                {
                                    intermediateFiles.Add(job.PostprocessingProperties.ChapterFile);
                                    job.PostprocessingProperties.ChapterExtracted = true;
                                }
                                else
                                {
                                    job.PostprocessingProperties.ChapterFile = strTempFile;
                                }
                            }
                        }
                        else if (File.Exists(job.PostprocessingProperties.IFOInput))
                        {
                            job.PostprocessingProperties.ChapterFile = VideoUtil.getChaptersFromIFO(job.PostprocessingProperties.IFOInput, false, job.PostprocessingProperties.WorkingDirectory, job.PostprocessingProperties.TitleNumberToProcess);
                            if (!String.IsNullOrEmpty(job.PostprocessingProperties.ChapterFile))
                            {
                                intermediateFiles.Add(job.PostprocessingProperties.ChapterFile);
                                job.PostprocessingProperties.ChapterExtracted = true;
                            }
                            else
                            {
                                job.PostprocessingProperties.ChapterFile = strTempFile;
                            }
                        }
                    }
                    if (!File.Exists(job.PostprocessingProperties.ChapterFile))
                    {
                        _log.LogEvent("File not found: " + job.PostprocessingProperties.ChapterFile, ImageType.Error);
                        job.PostprocessingProperties.ChapterFile = null;
                    }
                }
                else if (job.PostprocessingProperties.ChapterExtracted)
                {
                    intermediateFiles.Add(job.PostprocessingProperties.ChapterFile);
                }

                string             avsFile       = String.Empty;
                VideoStream        myVideo       = new VideoStream();
                VideoCodecSettings videoSettings = job.PostprocessingProperties.VideoSettings;
                if (String.IsNullOrEmpty(job.PostprocessingProperties.VideoFileToMux))
                {
                    //Open the video
                    Dar?dar;
                    avsFile = createAVSFile(job.IndexFile, job.Input, job.PostprocessingProperties.DAR,
                                            job.PostprocessingProperties.HorizontalOutputResolution, job.PostprocessingProperties.SignalAR, _log,
                                            job.PostprocessingProperties.AvsSettings, job.PostprocessingProperties.AutoDeinterlace, videoSettings, out dar,
                                            job.PostprocessingProperties.AutoCrop, job.PostprocessingProperties.KeepInputResolution,
                                            job.PostprocessingProperties.UseChaptersMarks);

                    ulong  length;
                    double framerate;
                    JobUtil.getInputProperties(out length, out framerate, avsFile);
                    myVideo.Input  = avsFile;
                    myVideo.Output = Path.Combine(job.PostprocessingProperties.WorkingDirectory,
                                                  Path.GetFileNameWithoutExtension(job.Input) + "_Video");
                    myVideo.NumberOfFrames = length;
                    myVideo.Framerate      = (decimal)framerate;
                    myVideo.DAR            = dar;
                    myVideo.VideoType      = new MuxableType((new VideoEncoderProvider().GetSupportedOutput(videoSettings.EncoderType))[0], videoSettings.Codec);
                    myVideo.Settings       = videoSettings;
                }
                else
                {
                    myVideo.Output   = job.PostprocessingProperties.VideoFileToMux;
                    myVideo.Settings = videoSettings;

                    MediaInfoFile oInfo = new MediaInfoFile(myVideo.Output, ref _log);
                    videoSettings.VideoName = oInfo.VideoInfo.Track.Name;
                    myVideo.Framerate       = (decimal)oInfo.VideoInfo.FPS;
                }

                intermediateFiles.Add(avsFile);
                intermediateFiles.Add(job.IndexFile);
                intermediateFiles.AddRange(audioFiles.Values);
                if (!string.IsNullOrEmpty(qpfile))
                {
                    intermediateFiles.Add(qpfile);
                }
                foreach (string file in arrAudioFilesDelete)
                {
                    intermediateFiles.Add(file);
                }
                if (File.Exists(Path.Combine(Path.GetDirectoryName(job.Input), Path.GetFileNameWithoutExtension(job.Input) + "._log")))
                {
                    intermediateFiles.Add(Path.Combine(Path.GetDirectoryName(job.Input), Path.GetFileNameWithoutExtension(job.Input) + "._log"));
                }
                foreach (string file in job.PostprocessingProperties.FilesToDelete)
                {
                    intermediateFiles.Add(file);
                }

                if (!string.IsNullOrEmpty(avsFile) || !String.IsNullOrEmpty(job.PostprocessingProperties.VideoFileToMux))
                {
                    MuxStream[] subtitles;
                    if (job.PostprocessingProperties.SubtitleTracks.Count == 0)
                    {
                        //Create empty subtitles for muxing
                        subtitles = new MuxStream[0];
                    }
                    else
                    {
                        subtitles = new MuxStream[job.PostprocessingProperties.SubtitleTracks.Count];
                        int i = 0;
                        foreach (OneClickStream oTrack in job.PostprocessingProperties.SubtitleTracks)
                        {
                            if (oTrack.TrackInfo.IsMKVContainer())
                            {
                                //demuxed MKV
                                string trackFile = Path.GetDirectoryName(job.IndexFile) + "\\" + oTrack.TrackInfo.DemuxFileName;
                                if (File.Exists(trackFile))
                                {
                                    intermediateFiles.Add(trackFile);
                                    if (Path.GetExtension(trackFile).ToLower(System.Globalization.CultureInfo.InvariantCulture).Equals(".idx"))
                                    {
                                        intermediateFiles.Add(FileUtil.GetPathWithoutExtension(trackFile) + ".sub");
                                    }

                                    subtitles[i] = new MuxStream(trackFile, oTrack.Language, oTrack.Name, oTrack.Delay, oTrack.DefaultStream, oTrack.ForcedStream, null);
                                }
                                else
                                {
                                    _log.LogEvent("File not found: " + trackFile, ImageType.Error);
                                }
                            }
                            else
                            {
                                subtitles[i] = new MuxStream(oTrack.DemuxFilePath, oTrack.Language, oTrack.Name, oTrack.Delay, oTrack.DefaultStream, oTrack.ForcedStream, null);
                            }
                            i++;
                        }
                    }

                    JobChain c = vUtil.GenerateJobSeries(myVideo, job.PostprocessingProperties.FinalOutput, arrAudioJobs.ToArray(),
                                                         subtitles, job.PostprocessingProperties.ChapterFile, job.PostprocessingProperties.OutputSize,
                                                         job.PostprocessingProperties.Splitting, job.PostprocessingProperties.Container,
                                                         job.PostprocessingProperties.PrerenderJob, arrMuxStreams.ToArray(),
                                                         _log, job.PostprocessingProperties.DeviceOutputType, null, job.PostprocessingProperties.VideoFileToMux, job.PostprocessingProperties.AudioTracks.ToArray());
                    if (c == null)
                    {
                        _log.Warn("Job creation aborted");
                        return;
                    }

                    c = CleanupJob.AddAfter(c, intermediateFiles, job.PostprocessingProperties.FinalOutput);
                    mainForm.Jobs.addJobsWithDependencies(c);

                    // batch processing other input files if necessary
                    if (job.PostprocessingProperties.FilesToProcess.Count > 0)
                    {
                        OneClickWindow ocw = new OneClickWindow(mainForm);
                        ocw.setBatchProcessing(job.PostprocessingProperties.FilesToProcess, job.PostprocessingProperties.OneClickSetting);
                    }
                }
            }
            catch (Exception e)
            {
                t.Abort();
                if (e is ThreadAbortException)
                {
                    _log.LogEvent("Aborting...");
                    su.WasAborted = true;
                    su.IsComplete = true;
                    raiseEvent();
                }
                else
                {
                    _log.LogValue("An error occurred", e, ImageType.Error);
                    su.HasError   = true;
                    su.IsComplete = true;
                    raiseEvent();
                }
                return;
            }
            t.Abort();
            su.IsComplete = true;
            raiseEvent();
        }
Example #26
0
        public static JobChain GenerateJobSeries(VideoStream video, string muxedOutput, AudioJob[] audioStreams,
                                                 MuxStream[] subtitles, List <string> attachments, string timeStampFile, ChapterInfo chapterInfo, FileSize?desiredSize, FileSize?splitSize,
                                                 ContainerType container, bool prerender, MuxStream[] muxOnlyAudio, LogItem log, string deviceType,
                                                 Zone[] zones, string videoFileToMux, OneClickAudioTrack[] audioTracks, bool alwaysMuxOutput)
        {
            if (desiredSize.HasValue && String.IsNullOrEmpty(videoFileToMux))
            {
                if (video.Settings.VideoEncodingType != VideoCodecSettings.VideoEncodingMode.twopassAutomated &&
                    video.Settings.VideoEncodingType != VideoCodecSettings.VideoEncodingMode.threepassAutomated)    // no automated 2/3 pass
                {
                    if (MainForm.Instance.Settings.NbPasses == 2)
                    {
                        video.Settings.VideoEncodingType = VideoCodecSettings.VideoEncodingMode.twopassAutomated; // automated 2 pass
                    }
                    else if (video.Settings.MaxNumberOfPasses == 3)
                    {
                        video.Settings.VideoEncodingType = VideoCodecSettings.VideoEncodingMode.threepassAutomated;
                    }
                }
            }

            fixFileNameExtensions(video, audioStreams, container);
            string videoOutput = video.Output;

            log.Add(eliminatedDuplicateFilenames(ref videoOutput, ref muxedOutput, audioStreams));

            JobChain vjobs = null;

            if (!String.IsNullOrEmpty(videoFileToMux))
            {
                video.Output = videoFileToMux;
            }
            else
            {
                video.Output = videoOutput;
                vjobs        = JobUtil.prepareVideoJob(video.Input, video.Output, video.Settings, video.DAR, prerender, zones);
                if (vjobs == null)
                {
                    return(null);
                }
            }

            /* Here, we guess the types of the files based on extension.
             * This is guaranteed to work with MeGUI-encoded files, because
             * the extension will always be recognised. For non-MeGUI files,
             * we can only ever hope.*/
            List <MuxStream>   allAudioToMux      = new List <MuxStream>();
            List <MuxableType> allInputAudioTypes = new List <MuxableType>();

            if (audioTracks != null)
            {
                // OneClick mode
                foreach (OneClickAudioTrack ocAudioTrack in audioTracks)
                {
                    if (ocAudioTrack.DirectMuxAudio != null)
                    {
                        if (VideoUtil.guessAudioMuxableType(ocAudioTrack.DirectMuxAudio.path, true) != null)
                        {
                            allInputAudioTypes.Add(VideoUtil.guessAudioMuxableType(ocAudioTrack.DirectMuxAudio.path, true));
                            allAudioToMux.Add(ocAudioTrack.DirectMuxAudio);
                        }
                    }
                    if (ocAudioTrack.AudioJob != null && !String.IsNullOrEmpty(ocAudioTrack.AudioJob.Input))
                    {
                        allAudioToMux.Add(ocAudioTrack.AudioJob.ToMuxStream());
                        allInputAudioTypes.Add(ocAudioTrack.AudioJob.ToMuxableType());
                    }
                }
            }
            else
            {
                // AutoEncode mode
                foreach (AudioJob stream in audioStreams)
                {
                    allAudioToMux.Add(stream.ToMuxStream());
                    allInputAudioTypes.Add(stream.ToMuxableType());
                }

                foreach (MuxStream muxStream in muxOnlyAudio)
                {
                    if (VideoUtil.guessAudioMuxableType(muxStream.path, true) != null)
                    {
                        allInputAudioTypes.Add(VideoUtil.guessAudioMuxableType(muxStream.path, true));
                        allAudioToMux.Add(muxStream);
                    }
                }
            }

            List <MuxableType> allInputSubtitleTypes = new List <MuxableType>();

            foreach (MuxStream muxStream in subtitles)
            {
                if (VideoUtil.guessSubtitleType(muxStream.path) != null)
                {
                    allInputSubtitleTypes.Add(new MuxableType(VideoUtil.guessSubtitleType(muxStream.path), null));
                }
            }

            MuxableType chapterInputType = null;

            if (chapterInfo.HasChapters)
            {
                chapterInputType = new MuxableType(ChapterType.OGG_TXT, null);
            }

            MuxableType deviceOutputType = null;

            if (!String.IsNullOrEmpty(deviceType))
            {
                DeviceType type = VideoUtil.guessDeviceType(deviceType);
                if (type != null)
                {
                    deviceOutputType = new MuxableType(type, null);
                }
            }

            List <string> inputsToDelete = new List <string>();

            if (String.IsNullOrEmpty(videoFileToMux))
            {
                inputsToDelete.Add(video.Output);
            }
            inputsToDelete.AddRange(Array.ConvertAll <AudioJob, string>(audioStreams, delegate(AudioJob a) { return(a.Output); }));

            JobChain muxJobs = JobUtil.GenerateMuxJobs(video, null, allAudioToMux.ToArray(), allInputAudioTypes.ToArray(),
                                                       subtitles, allInputSubtitleTypes.ToArray(), attachments, chapterInfo, chapterInputType, container, muxedOutput, timeStampFile,
                                                       splitSize, inputsToDelete, deviceType, deviceOutputType, alwaysMuxOutput);

            if (desiredSize.HasValue && String.IsNullOrEmpty(videoFileToMux))
            {
                BitrateCalculationInfo b = new BitrateCalculationInfo();

                List <string> audiofiles = new List <string>();
                foreach (MuxStream s in allAudioToMux)
                {
                    audiofiles.Add(s.path);
                }
                b.AudioFiles = audiofiles;

                b.Container   = container;
                b.VideoJobs   = new List <TaggedJob>(vjobs.Jobs);
                b.DesiredSize = desiredSize.Value;
                ((VideoJob)vjobs.Jobs[0].Job).BitrateCalculationInfo = b;
            }

            if (!String.IsNullOrEmpty(videoFileToMux))
            {
                return(new SequentialChain(new SequentialChain((Job[])audioStreams), new SequentialChain(muxJobs)));
            }
            else
            {
                return(new SequentialChain(
                           new SequentialChain((Job[])audioStreams),
                           new SequentialChain(vjobs),
                           new SequentialChain(muxJobs)));
            }
        }
Example #27
0
        private void queueVideoButton_Click(object sender, System.EventArgs e)
        {
            fileType_SelectedIndexChanged(sender, e);     // to select always correct output file extension
            string settingsError = verifyVideoSettings(); // basic input, logfile and output file settings are okay

            if (settingsError != null)
            {
                MessageBox.Show(settingsError, "Unsupported configuration", MessageBoxButtons.OK, MessageBoxIcon.Stop);
                return;
            }
            VideoCodecSettings vSettings = this.CurrentSettings.Clone();

            string videoOutput = info.VideoOutput;

            // special handling as the encoders cannot output all desired container types
            if (!fileType.Text.StartsWith("RAW") &&
                (!vSettings.SettingsID.Equals("x264") || !fileType.Text.Equals("MKV") || MainForm.Instance.Settings.UseExternalMuxerX264))
            {
                if (vSettings.SettingsID.Equals("x264"))
                {
                    videoOutput = Path.ChangeExtension(videoOutput, "264");
                }
                else if (vSettings.SettingsID.Equals("x265"))
                {
                    videoOutput = Path.ChangeExtension(videoOutput, "hevc");
                }
                else if (vSettings.SettingsID.Equals("XviD"))
                {
                    videoOutput = Path.ChangeExtension(videoOutput, "m4v");
                }
            }

            JobUtil.GetInputProperties(info.VideoInput, out ulong frameCount, out double frameRate);

            JobChain prepareJobs = JobUtil.AddVideoJobs(info.VideoInput, videoOutput, this.CurrentSettings.Clone(),
                                                        info.IntroEndFrame, info.CreditsStartFrame, info.DAR, PrerenderJob, info.Zones, (int)frameCount);

            if (!fileType.Text.StartsWith("RAW") &&
                vSettings.VideoEncodingType != MeGUI.VideoCodecSettings.VideoEncodingMode.twopass1 && vSettings.VideoEncodingType != MeGUI.VideoCodecSettings.VideoEncodingMode.threepass1 &&
                (!vSettings.SettingsID.Equals("x264") || !fileType.Text.Equals("MKV") || MainForm.Instance.Settings.UseExternalMuxerX264))
            {
                // create mux job
                MuxJob mJob = new MuxJob();
                mJob.Input = videoOutput;

                if (vSettings.SettingsID.Equals("XviD"))
                {
                    mJob.MuxType = MuxerType.FFMPEG;
                    mJob.Output  = Path.ChangeExtension(videoOutput, "avi");
                    if (fileType.Text.Equals("MKV"))
                    {
                        mJob.Output = Path.ChangeExtension(videoOutput, "mkv");
                    }
                }
                else if (fileType.Text.Equals("MKV"))
                {
                    mJob.MuxType = MuxerType.MKVMERGE;
                    mJob.Output  = Path.ChangeExtension(videoOutput, "mkv");
                }
                else
                {
                    mJob.MuxType = MuxerType.MP4BOX;
                    mJob.Output  = Path.ChangeExtension(videoOutput, "mp4");
                }

                mJob.Settings.MuxAll      = true;
                mJob.Settings.MuxedInput  = mJob.Input;
                mJob.Settings.MuxedOutput = mJob.Output;
                mJob.FilesToDelete.Add(videoOutput);

                // add job to queue
                prepareJobs = new SequentialChain(prepareJobs, new SequentialChain(mJob));
            }
            MainForm.Instance.Jobs.AddJobsWithDependencies(prepareJobs, true);
        }
Example #28
0
        public AviSynthWindow(MainForm mainForm)
        {
            this.loaded = false;
            this.mainForm = mainForm;
            InitializeComponent();
            avsProfile.Manager = MainForm.Instance.Profiles;

            this.controlsToDisable = new List<Control>();

            this.controlsToDisable.Add(reopenOriginal);
            this.controlsToDisable.Add(filtersGroupbox);
            this.controlsToDisable.Add(deinterlacingGroupBox);
            this.controlsToDisable.Add(mpegOptGroupBox);
            this.controlsToDisable.Add(aviOptGroupBox);
            this.controlsToDisable.Add(resNCropGroupbox);
            this.controlsToDisable.Add(previewAvsButton);
            this.controlsToDisable.Add(saveButton);
            this.controlsToDisable.Add(arChooser);
            this.controlsToDisable.Add(inputDARLabel);
            this.controlsToDisable.Add(signalAR);
            this.controlsToDisable.Add(avisynthScript);
            this.controlsToDisable.Add(openDLLButton);
            this.controlsToDisable.Add(dgOptions);

            enableControls(false);
            script = new StringBuilder();

            this.path = mainForm.MeGUIPath;

            this.resizeFilterType.Items.Clear();
            this.resizeFilterType.DataSource = ScriptServer.ListOfResizeFilterType;
            this.resizeFilterType.BindingContext = new BindingContext();
            this.noiseFilterType.Items.Clear();
            this.noiseFilterType.DataSource = ScriptServer.ListOfDenoiseFilterType;
            this.noiseFilterType.BindingContext = new BindingContext();

            this.deintFieldOrder.Items.Clear();
            this.deintFieldOrder.DataSource = ScriptServer.ListOfFieldOrders;
            this.deintFieldOrder.BindingContext = new BindingContext();
            this.deintSourceType.Items.Clear();
            this.deintSourceType.DataSource = ScriptServer.ListOfSourceTypes;
            this.deintSourceType.BindingContext = new BindingContext();
            this.cbNvDeInt.Items.Clear();
            this.cbNvDeInt.DataSource = ScriptServer.ListOfNvDeIntType;
            this.cbNvDeInt.BindingContext = new BindingContext();
            deintFieldOrder.SelectedIndex = -1;
            deintSourceType.SelectedIndex = -1;
            cbNvDeInt.SelectedIndex = 0;
            cbCharset.SelectedIndex = 0;

            this.noiseFilterType.SelectedIndexChanged += new System.EventHandler(this.noiseFilterType_SelectedIndexChanged);
            this.resizeFilterType.SelectedIndexChanged += new System.EventHandler(this.resizeFilterType_SelectedIndexChanged);
            this.cbNvDeInt.SelectedIndexChanged += new System.EventHandler(this.cbNvDeInt_SelectedIndexChanged);

            player = null;
            this.crop.Checked = false;
            this.cropLeft.Value = 0;
            this.cropTop.Value = 0;
            this.cropRight.Value = 0;
            this.cropBottom.Value = 0;

            deinterlaceType.DataSource = new DeinterlaceFilter[] { new DeinterlaceFilter("Do nothing (source not detected)", "#blank deinterlace line") };
            this.jobUtil = new JobUtil(mainForm);
            this.showScript();

            this.loaded = true;
            ProfileChanged(null, null);
        }
 public FileIndexerWindow(MainForm mainForm)
 {
     InitializeComponent();
     this.mainForm = mainForm;
     this.vUtil = new VideoUtil(mainForm);
     this.jobUtil = new JobUtil(mainForm);
     CheckDGIIndexer();
 }
Example #30
0
 /// <summary>
 /// default constructor
 /// initializes all the GUI components, initializes the internal objects and makes a default selection for all the GUI dropdowns
 /// In addition, all the jobs and profiles are being loaded from the harddisk
 /// </summary>
 public void constructMeGUIInfo()
 {
     muxProvider = new MuxProvider(this);
     this.codecs = new CodecManager();
     this.path = System.Windows.Forms.Application.StartupPath;
     this.jobUtil = new JobUtil(this);
     this.settings = new MeGUISettings();
     addPackages();
     this.profileManager = new ProfileManager(this.path);
     this.profileManager.LoadProfiles();
     this.mediaFileFactory = new MediaFileFactory(this);
     this.loadSettings();
     this.dialogManager = new DialogManager(this);
 }
Example #31
0
        public OneClickWindow(MainForm mainForm, JobUtil jobUtil, VideoEncoderProvider vProv, AudioEncoderProvider aProv)
        {
            this.mainForm = mainForm;
            vUtil = new VideoUtil(mainForm);
            this.muxProvider = mainForm.MuxProvider;
            acceptableContainerTypes = muxProvider.GetSupportedContainers().ToArray();

            InitializeComponent();

            initTabs();

            videoProfile.Manager = mainForm.Profiles;
            initAudioHandler();
            avsProfile.Manager = mainForm.Profiles;
            initOneClickHandler();

            audioTrack1.StandardItems = audioTrack2.StandardItems = new object[] { "None" };
            audioTrack1.SelectedIndex = audioTrack2.SelectedIndex = 0;

            //if containerFormat has not yet been set by the oneclick profile, add supported containers
            if (containerFormat.Items.Count == 0)
            {
                containerFormat.Items.AddRange(muxProvider.GetSupportedContainers().ToArray());
                this.containerFormat.SelectedIndex = 0;
            }

            //add device type
            if (devicetype.Items.Count == 0)
            {
                devicetype.Items.Add("Standard");
                devicetype.Items.AddRange(muxProvider.GetSupportedDevices().ToArray());
                this.devicetype.SelectedIndex = 0;
            }

            showAdvancedOptions_CheckedChanged(null, null);
        }
Example #32
0
        protected override void RunInThread()
        {
            JobChain      c = null;
            List <string> intermediateFiles = new List <string>();
            bool          bError            = false;

            try
            {
                log.LogEvent("Processing thread started");
                su.Status = "Preprocessing...   ***PLEASE WAIT***";
                su.ResetTime();

                List <string> arrAudioFilesDelete = new List <string>();
                audioFiles = new Dictionary <int, string>();
                List <AudioTrackInfo> arrAudioTracks = new List <AudioTrackInfo>();
                List <AudioJob>       arrAudioJobs   = new List <AudioJob>();
                List <MuxStream>      arrMuxStreams  = new List <MuxStream>();
                FileUtil.ensureDirectoryExists(job.PostprocessingProperties.WorkingDirectory);

                // audio handling
                foreach (OneClickAudioTrack oAudioTrack in job.PostprocessingProperties.AudioTracks)
                {
                    if (IsJobStopped())
                    {
                        return;
                    }

                    if (oAudioTrack.AudioTrackInfo != null)
                    {
                        if (oAudioTrack.AudioTrackInfo.ExtractMKVTrack)
                        {
                            if (job.PostprocessingProperties.ApplyDelayCorrection && File.Exists(job.PostprocessingProperties.IntermediateMKVFile))
                            {
                                MediaInfoFile oFile  = new MediaInfoFile(job.PostprocessingProperties.IntermediateMKVFile, ref log);
                                bool          bFound = false;
                                foreach (AudioTrackInfo oAudioInfo in oFile.AudioInfo.Tracks)
                                {
                                    if (oAudioInfo.MMGTrackID == oAudioTrack.AudioTrackInfo.MMGTrackID)
                                    {
                                        bFound = true;
                                    }
                                }
                                int mmgTrackID = 0;
                                if (!bFound)
                                {
                                    mmgTrackID = oFile.AudioInfo.Tracks[oAudioTrack.AudioTrackInfo.TrackIndex].MMGTrackID;
                                }
                                else
                                {
                                    mmgTrackID = oAudioTrack.AudioTrackInfo.MMGTrackID;
                                }
                                foreach (AudioTrackInfo oAudioInfo in oFile.AudioInfo.Tracks)
                                {
                                    if (oAudioInfo.MMGTrackID == mmgTrackID)
                                    {
                                        if (oAudioTrack.DirectMuxAudio != null)
                                        {
                                            oAudioTrack.DirectMuxAudio.delay = oAudioInfo.Delay;
                                        }
                                        if (oAudioTrack.AudioJob != null)
                                        {
                                            oAudioTrack.AudioJob.Delay = oAudioInfo.Delay;
                                        }
                                        break;
                                    }
                                }
                            }
                            if (!audioFiles.ContainsKey(oAudioTrack.AudioTrackInfo.TrackID))
                            {
                                audioFiles.Add(oAudioTrack.AudioTrackInfo.TrackID, job.PostprocessingProperties.WorkingDirectory + "\\" + oAudioTrack.AudioTrackInfo.DemuxFileName);
                                arrAudioFilesDelete.Add(job.PostprocessingProperties.WorkingDirectory + "\\" + oAudioTrack.AudioTrackInfo.DemuxFileName);
                            }
                        }
                        else
                        {
                            arrAudioTracks.Add(oAudioTrack.AudioTrackInfo);
                        }
                    }
                    if (oAudioTrack.AudioJob != null)
                    {
                        if (job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.NONE &&
                            String.IsNullOrEmpty(oAudioTrack.AudioJob.Input))
                        {
                            oAudioTrack.AudioJob.Input = job.Input;
                        }
                        arrAudioJobs.Add(oAudioTrack.AudioJob);
                    }
                    if (oAudioTrack.DirectMuxAudio != null)
                    {
                        arrMuxStreams.Add(oAudioTrack.DirectMuxAudio);
                    }
                }
                if (audioFiles.Count == 0 && !job.PostprocessingProperties.Eac3toDemux &&
                    job.PostprocessingProperties.IndexType != FileIndexerWindow.IndexType.NONE &&
                    job.PostprocessingProperties.IndexType != FileIndexerWindow.IndexType.AVISOURCE)
                {
                    if ((job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.DGI || job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.DGM) &&
                        File.Exists(Path.ChangeExtension(job.IndexFile, ".log")))
                    {
                        job.PostprocessingProperties.FilesToDelete.Add(Path.ChangeExtension(job.IndexFile, ".log"));
                        audioFiles = AudioUtil.GetAllDemuxedAudioFromDGI(arrAudioTracks, out arrAudioFilesDelete, job.IndexFile, log);
                    }
                    else
                    {
                        audioFiles = VideoUtil.getAllDemuxedAudio(arrAudioTracks, new List <AudioTrackInfo>(), out arrAudioFilesDelete, job.IndexFile, log);
                    }
                }

                FillInAudioInformation(ref arrAudioJobs, arrMuxStreams);

                if (!String.IsNullOrEmpty(job.PostprocessingProperties.VideoFileToMux))
                {
                    log.LogEvent("Don't encode video: True");
                }
                else
                {
                    log.LogEvent("Desired size: " + job.PostprocessingProperties.OutputSize);
                }
                log.LogEvent("Split size: " + job.PostprocessingProperties.Splitting);

                if (IsJobStopped())
                {
                    return;
                }

                // video file handling
                string             avsFile       = String.Empty;
                VideoStream        myVideo       = new VideoStream();
                VideoCodecSettings videoSettings = job.PostprocessingProperties.VideoSettings;
                if (String.IsNullOrEmpty(job.PostprocessingProperties.VideoFileToMux))
                {
                    //Open the video
                    try
                    {
                        avsFile = CreateAVSFile(job.IndexFile, job.Input, job.PostprocessingProperties.DAR,
                                                job.PostprocessingProperties.HorizontalOutputResolution, log,
                                                job.PostprocessingProperties.AvsSettings, job.PostprocessingProperties.AutoDeinterlace, videoSettings,
                                                job.PostprocessingProperties.AutoCrop, job.PostprocessingProperties.KeepInputResolution,
                                                job.PostprocessingProperties.UseChaptersMarks);
                    }
                    catch (Exception ex)
                    {
                        log.LogValue("An error occurred creating the AVS file", ex, ImageType.Error);
                    }

                    if (IsJobStopped())
                    {
                        return;
                    }

                    if (!String.IsNullOrEmpty(avsFile))
                    {
                        // check AVS file
                        JobUtil.GetInputProperties(avsFile, out ulong frameCount, out double frameRate);

                        myVideo.Input          = avsFile;
                        myVideo.Output         = Path.Combine(job.PostprocessingProperties.WorkingDirectory, Path.GetFileNameWithoutExtension(job.Input) + "_Video");
                        myVideo.NumberOfFrames = frameCount;
                        myVideo.Framerate      = (decimal)frameRate;
                        myVideo.VideoType      = new MuxableType((new VideoEncoderProvider().GetSupportedOutput(videoSettings.EncoderType))[0], videoSettings.Codec);
                        myVideo.Settings       = videoSettings;
                    }
                    else
                    {
                        bError = true;
                    }
                }
                else
                {
                    myVideo.DAR    = job.PostprocessingProperties.ForcedDAR;
                    myVideo.Output = job.PostprocessingProperties.VideoFileToMux;
                    MediaInfoFile oInfo = new MediaInfoFile(myVideo.Output, ref log);
                    if (Path.GetExtension(job.PostprocessingProperties.VideoFileToMux).Equals(".unknown") && !String.IsNullOrEmpty(oInfo.ContainerFileTypeString))
                    {
                        job.PostprocessingProperties.VideoFileToMux = Path.ChangeExtension(job.PostprocessingProperties.VideoFileToMux, oInfo.ContainerFileTypeString.ToLowerInvariant());
                        File.Move(myVideo.Output, job.PostprocessingProperties.VideoFileToMux);
                        myVideo.Output = job.PostprocessingProperties.VideoFileToMux;
                        job.PostprocessingProperties.FilesToDelete.Add(myVideo.Output);
                    }

                    myVideo.Settings       = videoSettings;
                    myVideo.Framerate      = (decimal)oInfo.VideoInfo.FPS;
                    myVideo.NumberOfFrames = oInfo.VideoInfo.FrameCount;
                }

                if (IsJobStopped())
                {
                    return;
                }

                intermediateFiles.Add(avsFile);
                intermediateFiles.Add(job.IndexFile);
                intermediateFiles.AddRange(audioFiles.Values);
                foreach (string file in arrAudioFilesDelete)
                {
                    intermediateFiles.Add(file);
                }
                intermediateFiles.Add(Path.ChangeExtension(job.Input, ".log"));
                foreach (string file in job.PostprocessingProperties.FilesToDelete)
                {
                    intermediateFiles.Add(file);
                }

                // subtitle handling
                List <MuxStream> subtitles = new List <MuxStream>();
                if (job.PostprocessingProperties.SubtitleTracks.Count > 0)
                {
                    foreach (OneClickStream oTrack in job.PostprocessingProperties.SubtitleTracks)
                    {
                        if (oTrack.TrackInfo.ExtractMKVTrack)
                        {
                            //demuxed MKV
                            string trackFile = Path.GetDirectoryName(job.IndexFile) + "\\" + oTrack.TrackInfo.DemuxFileName;
                            if (File.Exists(trackFile))
                            {
                                intermediateFiles.Add(trackFile);
                                if (Path.GetExtension(trackFile).ToLowerInvariant().Equals(".idx"))
                                {
                                    intermediateFiles.Add(FileUtil.GetPathWithoutExtension(trackFile) + ".sub");
                                }

                                subtitles.Add(new MuxStream(trackFile, oTrack.Language, oTrack.Name, oTrack.Delay, oTrack.DefaultStream, oTrack.ForcedStream, null));
                            }
                            else
                            {
                                log.LogEvent("Ignoring subtitle as the it cannot be found: " + trackFile, ImageType.Warning);
                            }
                        }
                        else
                        {
                            // sometimes the language is detected differently by vsrip and the IFO parser. Therefore search also for other files
                            string strDemuxFile = oTrack.DemuxFilePath;
                            if (!File.Exists(strDemuxFile) && Path.GetFileNameWithoutExtension(strDemuxFile).Contains("_"))
                            {
                                string strDemuxFileName = Path.GetFileNameWithoutExtension(strDemuxFile);
                                strDemuxFileName = strDemuxFileName.Substring(0, strDemuxFileName.LastIndexOf("_")) + "_*" + Path.GetExtension(strDemuxFile);
                                foreach (string strFileName in Directory.GetFiles(Path.GetDirectoryName(strDemuxFile), strDemuxFileName))
                                {
                                    strDemuxFile = Path.Combine(Path.GetDirectoryName(strDemuxFile), strFileName);
                                    intermediateFiles.Add(strDemuxFile);
                                    intermediateFiles.Add(Path.ChangeExtension(strDemuxFile, ".sub"));
                                    log.LogEvent("Subtitle + " + oTrack.DemuxFilePath + " cannot be found. " + strFileName + " will be used instead", ImageType.Information);
                                    break;
                                }
                            }
                            if (File.Exists(strDemuxFile))
                            {
                                string strTrackName = oTrack.Name;

                                // check if a forced stream is available
                                string strForcedFile = Path.Combine(Path.GetDirectoryName(strDemuxFile), Path.GetFileNameWithoutExtension(strDemuxFile) + "_forced.idx");
                                if (File.Exists(strForcedFile))
                                {
                                    subtitles.Add(new MuxStream(strForcedFile, oTrack.Language, SubtitleUtil.ApplyForcedStringToTrackName(true, oTrack.Name), oTrack.Delay, oTrack.DefaultStream, true, null));
                                    intermediateFiles.Add(strForcedFile);
                                    intermediateFiles.Add(Path.ChangeExtension(strForcedFile, ".sub"));
                                }
                                subtitles.Add(new MuxStream(strDemuxFile, oTrack.Language, SubtitleUtil.ApplyForcedStringToTrackName(false, oTrack.Name), oTrack.Delay, oTrack.DefaultStream, (File.Exists(strForcedFile) ? false : oTrack.ForcedStream), null));
                            }
                            else
                            {
                                log.LogEvent("Ignoring subtitle as the it cannot be found: " + oTrack.DemuxFilePath, ImageType.Warning);
                            }
                        }
                    }
                }

                if (IsJobStopped())
                {
                    return;
                }

                if (!bError)
                {
                    c = VideoUtil.GenerateJobSeries(myVideo, job.PostprocessingProperties.FinalOutput, arrAudioJobs.ToArray(),
                                                    subtitles.ToArray(), job.PostprocessingProperties.Attachments, job.PostprocessingProperties.TimeStampFile,
                                                    job.PostprocessingProperties.ChapterInfo, job.PostprocessingProperties.OutputSize,
                                                    job.PostprocessingProperties.Splitting, job.PostprocessingProperties.Container,
                                                    job.PostprocessingProperties.PrerenderJob, arrMuxStreams.ToArray(),
                                                    log, job.PostprocessingProperties.DeviceOutputType, null, job.PostprocessingProperties.VideoFileToMux,
                                                    job.PostprocessingProperties.AudioTracks.ToArray(), true);
                }

                if (c != null && !String.IsNullOrEmpty(job.PostprocessingProperties.TimeStampFile) &&
                    c.Jobs[c.Jobs.Length - 1].Job is MuxJob && (c.Jobs[c.Jobs.Length - 1].Job as MuxJob).MuxType == MuxerType.MP4BOX)
                {
                    // last job is a mp4box job and vfr timecode data has to be applied
                    MP4FpsModJob mp4FpsMod = new MP4FpsModJob(((MuxJob)c.Jobs[c.Jobs.Length - 1].Job).Output, job.PostprocessingProperties.TimeStampFile);
                    c = new SequentialChain(c, new SequentialChain(mp4FpsMod));
                }
            }
            catch (Exception e)
            {
                log.LogValue("An error occurred", e, ImageType.Error);
                bError = true;
            }

            if (c == null || bError)
            {
                log.Error("Job creation aborted");
                su.HasError = true;
            }

            // add cleanup job also in case of an error
            c = CleanupJob.AddAfter(c, intermediateFiles, job.PostprocessingProperties.FinalOutput);
            MainForm.Instance.Jobs.AddJobsWithDependencies(c, false);

            // batch processing other input files if necessary
            if (job.PostprocessingProperties.FilesToProcess.Count > 0)
            {
                OneClickWindow ocw = new OneClickWindow();
                ocw.setBatchProcessing(job.PostprocessingProperties.FilesToProcess, job.PostprocessingProperties.OneClickSetting);
            }

            su.IsComplete = true;
        }
Example #33
0
        /// <summary>
        /// creates the AVS Script file
        /// if the file can be properly opened, auto-cropping is performed, then depending on the AR settings
        /// the proper resolution for automatic resizing, taking into account the derived cropping values
        /// is calculated, and finally the avisynth script is written and its name returned
        /// </summary>
        /// <param name="path">dgindex script</param>
        /// <param name="aspectRatio">aspect ratio selection to be used</param>
        /// <param name="customDAR">custom display aspect ratio for this source</param>
        /// <param name="desiredOutputWidth">desired horizontal resolution of the output</param>
        /// <param name="settings">the codec settings (used only for x264)</param>
        /// <param name="sarX">pixel aspect ratio X</param>
        /// <param name="sarY">pixel aspect ratio Y</param>
        /// <param name="height">the final height of the video</param>
        /// <param name="autoCrop">whether or not autoCrop is used for the input</param>
        /// <returns>the name of the AviSynth script created, empty if there was an error</returns>
        private string CreateAVSFile(string indexFile, string inputFile, Dar?AR, int desiredOutputWidth,
                                     LogItem _log, AviSynthSettings avsSettings, bool autoDeint, VideoCodecSettings settings,
                                     bool autoCrop, bool keepInputResolution, bool useChaptersMarks)
        {
            Dar?            dar = null;
            Dar             customDAR;
            IMediaFile      iMediaFile = null;
            IVideoReader    reader;
            PossibleSources oPossibleSource;
            x264Device      xTargetDevice = null;
            CropValues      cropValues    = new CropValues();

            int outputWidthIncludingPadding  = 0;
            int outputHeightIncludingPadding = 0;
            int outputWidthCropped           = 0;
            int outputHeightCropped          = 0;

            // encode anamorph either when it is selected in the avs profile or the input resolution should not be touched
            bool signalAR = (avsSettings.Mod16Method != mod16Method.none) || keepInputResolution;

            // make sure the proper anamorphic encode is selected if the input resolution should not be touched
            if (keepInputResolution && avsSettings.Mod16Method != mod16Method.nonMod16)
            {
                avsSettings.Mod16Method = mod16Method.nonMod16;
            }

            // open index file to retrieve information
            if (job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.DGI)
            {
                iMediaFile      = new dgiFile(indexFile);
                oPossibleSource = PossibleSources.dgi;
            }
            else if (job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.D2V)
            {
                iMediaFile      = new d2vFile(indexFile);
                oPossibleSource = PossibleSources.d2v;
            }
            else if (job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.DGM)
            {
                iMediaFile      = new dgmFile(indexFile);
                oPossibleSource = PossibleSources.dgm;
            }
            else if (job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.FFMS)
            {
                iMediaFile      = new ffmsFile(inputFile, indexFile);
                oPossibleSource = PossibleSources.ffindex;
            }
            else if (job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.LSMASH)
            {
                iMediaFile      = new lsmashFile(inputFile, indexFile);
                oPossibleSource = PossibleSources.lsmash;
            }
            else if (job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.AVISOURCE)
            {
                string tempAvs = "AVISource(\"" + inputFile + "\", audio=false)" + VideoUtil.getAssumeFPS(0, inputFile);
                iMediaFile      = AvsFile.ParseScript(tempAvs, true);
                oPossibleSource = PossibleSources.avisource;
            }
            else
            {
                iMediaFile      = AvsFile.OpenScriptFile(inputFile, true);
                oPossibleSource = PossibleSources.avs;
            }
            reader = iMediaFile.GetVideoReader();

            // abort if the index file is invalid
            if (reader.FrameCount < 1)
            {
                _log.Error("There are " + reader.FrameCount + " frames in the index file. Aborting...");
                return("");
            }

            if (AR == null)
            {
                // AR needs to be detected automatically now
                _log.LogValue("Auto-detect aspect ratio", AR == null);
                customDAR = iMediaFile.VideoInfo.DAR;
                if (customDAR.AR <= 0)
                {
                    customDAR = Dar.ITU16x9PAL;
                    _log.Warn(string.Format("No aspect ratio found, defaulting to {0}.", customDAR));
                }
            }
            else
            {
                customDAR = AR.Value;
            }
            _log.LogValue("Aspect ratio", customDAR);

            // check x264 settings (target device, chapter file)
            if (settings != null && settings is x264Settings)
            {
                x264Settings xs = (x264Settings)settings;
                xTargetDevice = xs.TargetDevice;
                _log.LogValue("Target device", xTargetDevice.Name);
            }

            // get mod value for resizing
            int mod = Resolution.GetModValue(avsSettings.ModValue, avsSettings.Mod16Method, signalAR);

            // crop input as it may be required (autoCrop && !keepInputResolution or Blu-Ray)
            if (Autocrop.autocrop(out cropValues, reader, signalAR, avsSettings.Mod16Method, avsSettings.ModValue) == false)
            {
                _log.Error("Autocrop failed. Aborting...");
                return("");
            }

            int inputWidth      = (int)iMediaFile.VideoInfo.Width;
            int inputHeight     = (int)iMediaFile.VideoInfo.Height;
            int inputFPS_D      = (int)iMediaFile.VideoInfo.FPS_D;
            int inputFPS_N      = (int)iMediaFile.VideoInfo.FPS_N;
            int inputFrameCount = (int)iMediaFile.VideoInfo.FrameCount;

            // force destruction of AVS script
            iMediaFile.Dispose();

            Dar?suggestedDar = null;

            if (desiredOutputWidth == 0)
            {
                desiredOutputWidth = outputWidthIncludingPadding = inputWidth;
            }
            else if (!avsSettings.Upsize && desiredOutputWidth > inputWidth)
            {
                outputWidthIncludingPadding = inputWidth;
            }
            else
            {
                outputWidthIncludingPadding = desiredOutputWidth;
            }
            CropValues paddingValues;

            bool resizeEnabled;
            int  outputWidthWithoutUpsizing = outputWidthIncludingPadding;

            if (avsSettings.Upsize)
            {
                resizeEnabled = !keepInputResolution;
                CropValues cropValuesTemp = cropValues.Clone();
                int        outputHeightIncludingPaddingTemp = 0;
                Resolution.GetResolution(inputWidth, inputHeight, customDAR,
                                         ref cropValuesTemp, autoCrop && !keepInputResolution, mod, ref resizeEnabled, false, signalAR, true,
                                         avsSettings.AcceptableAspectError, xTargetDevice, Convert.ToDouble(inputFPS_N) / inputFPS_D,
                                         ref outputWidthWithoutUpsizing, ref outputHeightIncludingPaddingTemp, out paddingValues, out suggestedDar, _log);
            }

            resizeEnabled = !keepInputResolution;
            Resolution.GetResolution(inputWidth, inputHeight, customDAR,
                                     ref cropValues, autoCrop && !keepInputResolution, mod, ref resizeEnabled, avsSettings.Upsize, signalAR, true,
                                     avsSettings.AcceptableAspectError, xTargetDevice, Convert.ToDouble(inputFPS_N) / inputFPS_D,
                                     ref outputWidthIncludingPadding, ref outputHeightIncludingPadding, out paddingValues, out suggestedDar, _log);
            keepInputResolution = !resizeEnabled;

            if (signalAR && suggestedDar.HasValue)
            {
                dar = suggestedDar;
            }

            // log calculated output resolution
            outputWidthCropped  = outputWidthIncludingPadding - paddingValues.left - paddingValues.right;
            outputHeightCropped = outputHeightIncludingPadding - paddingValues.bottom - paddingValues.top;
            _log.LogValue("Input resolution", inputWidth + "x" + inputHeight);
            _log.LogValue("Desired maximum width", desiredOutputWidth);
            if (!avsSettings.Upsize && outputWidthIncludingPadding < desiredOutputWidth)
            {
                _log.LogEvent("Desired maximum width not reached. Enable upsizing in the AviSynth profile if you want to force it.");
            }
            if (avsSettings.Upsize && outputWidthIncludingPadding > outputWidthWithoutUpsizing)
            {
                _log.LogValue("Desired maximum width reached with upsizing. Target width without upsizing", outputWidthWithoutUpsizing);
            }
            if (cropValues.isCropped())
            {
                _log.LogValue("Autocrop values", cropValues);
                _log.LogValue("Cropped output resolution", outputWidthCropped + "x" + outputHeightCropped);
            }
            else
            {
                _log.LogValue("Output resolution", outputWidthCropped + "x" + outputHeightCropped);
            }
            if (paddingValues.isCropped())
            {
                _log.LogValue("Padded output resolution", outputWidthIncludingPadding + "x" + outputHeightIncludingPadding);
            }

            // generate the avs script based on the template
            string inputLine        = "#input";
            string deinterlaceLines = "#deinterlace";
            string denoiseLines     = "#denoise";
            string cropLine         = "#crop";
            string resizeLine       = "#resize";

            inputLine = ScriptServer.GetInputLine(
                inputFile, indexFile, false, oPossibleSource, false, false, false, 0,
                avsSettings.DSS2, NvDeinterlacerType.nvDeInterlacerNone, 0, 0, null);

            if (IsJobStopped())
            {
                return("");
            }

            _log.LogValue("Automatic deinterlacing", autoDeint);
            if (autoDeint)
            {
                su.Status = "Automatic deinterlacing...   ***PLEASE WAIT***";
                string d2vPath = indexFile;
                _sourceDetector = new SourceDetector(inputLine, d2vPath, avsSettings.PreferAnimeDeinterlace, inputFrameCount,
                                                     Thread.CurrentThread.Priority,
                                                     MainForm.Instance.Settings.SourceDetectorSettings,
                                                     new UpdateSourceDetectionStatus(AnalyseUpdate),
                                                     new FinishedAnalysis(FinishedAnalysis));
                finished = false;
                _sourceDetector.Analyse();
                WaitTillAnalyseFinished();
                _sourceDetector = null;
                if (filters != null)
                {
                    deinterlaceLines = filters[0].Script;
                    if (interlaced)
                    {
                        _log.LogValue("Deinterlacing used", deinterlaceLines, ImageType.Warning);
                    }
                    else
                    {
                        _log.LogValue("Deinterlacing used", deinterlaceLines);
                    }
                }
            }

            if (IsJobStopped())
            {
                return("");
            }

            su.Status = "Finalizing preprocessing...   ***PLEASE WAIT***";

            // get final input filter line
            inputLine = ScriptServer.GetInputLine(
                inputFile, indexFile, interlaced, oPossibleSource, avsSettings.ColourCorrect, avsSettings.MPEG2Deblock,
                false, 0, avsSettings.DSS2, NvDeinterlacerType.nvDeInterlacerNone, 0, 0, null);

            // get crop & resize lines
            if (!keepInputResolution)
            {
                if (autoCrop)
                {
                    cropLine = ScriptServer.GetCropLine(cropValues);
                }
                resizeLine = ScriptServer.GetResizeLine(!signalAR || avsSettings.Mod16Method == mod16Method.resize || outputWidthIncludingPadding > 0 || inputWidth != outputWidthCropped,
                                                        outputWidthCropped, outputHeightCropped, outputWidthIncludingPadding, outputHeightIncludingPadding, (ResizeFilterType)avsSettings.ResizeMethod,
                                                        autoCrop, cropValues, inputWidth, inputHeight);
            }

            // get denoise line
            denoiseLines = ScriptServer.GetDenoiseLines(avsSettings.Denoise, (DenoiseFilterType)avsSettings.DenoiseMethod);

            string newScript = ScriptServer.CreateScriptFromTemplate(avsSettings.Template, inputLine, cropLine, resizeLine, denoiseLines, deinterlaceLines);

            if (dar.HasValue)
            {
                newScript = string.Format("global MeGUI_darx = {0}\r\nglobal MeGUI_dary = {1}\r\n{2}", dar.Value.X, dar.Value.Y, newScript);
            }
            else
            {
                if (xTargetDevice != null && xTargetDevice.BluRay)
                {
                    string       strResolution = outputWidthIncludingPadding + "x" + outputHeightIncludingPadding;
                    x264Settings _xs           = (x264Settings)settings;

                    if (strResolution.Equals("720x480"))
                    {
                        _xs.SampleAR = 4;
                        _log.LogEvent("Set --sar to 10:11 as only 40:33 or 10:11 are supported with a resolution of " +
                                      strResolution + " as required for " + xTargetDevice.Name + ".");
                    }
                    else if (strResolution.Equals("720x576"))
                    {
                        _xs.SampleAR = 5;
                        _log.LogEvent("Set --sar to 12:11 as only 16:11 or 12:11 are supported with a resolution of "
                                      + strResolution + " as required for " + xTargetDevice.Name + ".");
                    }
                    else if (strResolution.Equals("1280x720") || strResolution.Equals("1920x1080"))
                    {
                        _xs.SampleAR = 1;
                        _log.LogEvent("Set --sar to 1:1 as only 1:1 is supported with a resolution of "
                                      + strResolution + " as required for " + xTargetDevice.Name + ".");
                    }
                    else if (strResolution.Equals("1440x1080"))
                    {
                        _xs.SampleAR = 2;
                        _log.LogEvent("Set --sar to 4:3 as only 4:3 is supported with a resolution of "
                                      + strResolution + " as required for " + xTargetDevice.Name + ".");
                    }
                }
            }

            _log.LogValue("Generated AviSynth script", newScript);
            string strOutputAVSFile;

            if (String.IsNullOrEmpty(indexFile))
            {
                strOutputAVSFile = Path.ChangeExtension(Path.Combine(job.PostprocessingProperties.WorkingDirectory, Path.GetFileName(inputFile)), ".avs");
            }
            else
            {
                strOutputAVSFile = Path.ChangeExtension(indexFile, ".avs");
            }

            try
            {
                StreamWriter sw = new StreamWriter(strOutputAVSFile, false, System.Text.Encoding.Default);
                sw.Write(newScript);
                sw.Close();
            }
            catch (Exception i)
            {
                _log.LogValue("Error saving AviSynth script", i, ImageType.Error);
                return("");
            }

            JobUtil.GetAllInputProperties(strOutputAVSFile, out ulong numberOfFrames, out double fps, out int fps_n, out int fps_d, out int hres, out int vres, out Dar d, out AviSynthColorspace colorspace);
            _log.LogEvent("resolution: " + hres + "x" + vres);
            _log.LogEvent("frame rate: " + fps_n + "/" + fps_d);
            _log.LogEvent("frames: " + numberOfFrames);
            TimeSpan oTime = TimeSpan.FromSeconds((double)numberOfFrames / fps);

            _log.LogEvent("length: " + string.Format("{0:00}:{1:00}:{2:00}.{3:000}",
                                                     (int)(oTime.TotalHours), oTime.Minutes, oTime.Seconds, oTime.Milliseconds));
            _log.LogValue("aspect ratio", d);
            _log.LogValue("color space", colorspace.ToString());

            if (IsJobStopped())
            {
                return("");
            }

            // create qpf file if necessary and possible
            if (job.PostprocessingProperties.ChapterInfo.HasChapters && useChaptersMarks && settings != null && settings is x264Settings)
            {
                fps = (double)fps_n / fps_d;
                string strChapterFile = Path.ChangeExtension(strOutputAVSFile, ".qpf");
                job.PostprocessingProperties.ChapterInfo.ChangeFps(fps);
                if (job.PostprocessingProperties.ChapterInfo.SaveQpfile(strChapterFile))
                {
                    job.PostprocessingProperties.FilesToDelete.Add(strChapterFile);
                    _log.LogValue("qpf file created", strChapterFile);
                    x264Settings xs = (x264Settings)settings;
                    xs.UseQPFile = true;
                    xs.QPFile    = strChapterFile;
                }
            }

            // check if a timestamp file has to be used
            if (!String.IsNullOrEmpty(job.PostprocessingProperties.TimeStampFile) && settings != null && settings is x264Settings)
            {
                x264Settings xs = (x264Settings)settings;
                xs.TCFile = job.PostprocessingProperties.TimeStampFile;
            }

            return(strOutputAVSFile);
        }
Example #34
0
 public VideoUtil(MainForm mainForm)
 {
     this.mainForm = mainForm;
     jobUtil       = new JobUtil(mainForm);
 }
Example #35
0
 public AutoEncodeWindow(VideoStream videoStream, List<AudioJob> audioStreams, MainForm mainForm, bool prerender, VideoInfo vInfo)
     : this()
 {
     this.vInfo = vInfo;
     mainForm.Log.Add(log);
     this.videoStream = videoStream;
     this.audioStreams = audioStreams;
     this.prerender = prerender;
     this.mainForm = mainForm;
     jobUtil = new JobUtil(mainForm);
     vUtil = new VideoUtil(mainForm);
     muxProvider = mainForm.MuxProvider;
     container.Items.AddRange(muxProvider.GetSupportedContainers().ToArray());
     splitting.MinimumFileSize = new FileSize(Unit.MB, 1);
 }
Example #36
0
        /// <summary>
        /// handles the go button for automated encoding
        /// checks if we're in automated 2 pass video mode
        /// then the video and audio configuration is checked, and if it checks out
        /// the audio job, video jobs and muxing job are generated, audio and video job are linked
        /// and encoding is started
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="e"></param>
        private void queueButton_Click(object sender, System.EventArgs e)
        {
            if (String.IsNullOrEmpty(this.muxedOutput.Filename))
            {
                return;
            }

            FileSize?desiredSize = targetSize.Value;
            FileSize?splitSize   = splitting.Value;
            LogItem  log         = new LogItem(this.muxedOutput.Filename);

            MainForm.Instance.AutoEncodeLog.Add(log);

            if (FileSizeRadio.Checked)
            {
                log.LogValue("Desired Size", desiredSize);
            }
            else if (averageBitrateRadio.Checked)
            {
                log.LogValue("Projected Bitrate", string.Format("{0}kbps", projectedBitrateKBits.Text));
            }
            else
            {
                log.LogEvent("No Target Size (use profile settings)");
            }

            log.LogValue("Split Size", splitSize);

            MuxStream[]        audio;
            AudioJob[]         aStreams;
            AudioEncoderType[] muxTypes;
            separateEncodableAndMuxableAudioStreams(out aStreams, out audio, out muxTypes);
            MuxStream[]   subtitles   = new MuxStream[0];
            ChapterInfo   chapters    = new ChapterInfo();
            string        videoInput  = vInfo.VideoInput;
            string        videoOutput = vInfo.VideoOutput;
            string        muxedOutput = this.muxedOutput.Filename;
            ContainerType cot         = this.container.SelectedItem as ContainerType;

            // determine audio language
            foreach (MuxStream stream in audio)
            {
                string strLanguage = LanguageSelectionContainer.GetLanguageFromFileName(Path.GetFileNameWithoutExtension(stream.path));
                if (!String.IsNullOrEmpty(strLanguage))
                {
                    stream.language = strLanguage;
                }
            }

            if (addSubsNChapters.Checked)
            {
                AdaptiveMuxWindow amw = new AdaptiveMuxWindow();
                amw.setMinimizedMode(videoOutput, "", videoStream.Settings.EncoderType, JobUtil.getFramerate(videoInput), audio,
                                     muxTypes, muxedOutput, splitSize, cot);
                if (amw.ShowDialog() == DialogResult.OK)
                {
                    amw.getAdditionalStreams(out audio, out subtitles, out chapters, out muxedOutput, out cot);
                }
                else // user aborted, abort the whole process
                {
                    return;
                }
            }
            removeStreamsToBeEncoded(ref audio, aStreams);
            MainForm.Instance.Jobs.AddJobsWithDependencies(VideoUtil.GenerateJobSeries(videoStream, muxedOutput, aStreams, subtitles, new List <string>(), String.Empty, chapters,
                                                                                       desiredSize, splitSize, cot, prerender, audio, log, device.Text, vInfo.Zones, null, null, false), true);
            this.Close();
        }
Example #37
0
        /// <summary>
        /// tries to open the video source and gets the number of frames from it, or
        /// exits with an error
        /// </summary>
        /// <param name="videoSource">the AviSynth script</param>
        /// <param name="error">return parameter for all errors</param>
        /// <returns>true if the file could be opened, false if not</returns>
        protected void getInputProperties(VideoJob job)
        {
            log.LogValue("AviSynth input script", GetAVSFileContent());

            double             fps;
            Dar                d = Dar.A1x1;
            AviSynthColorspace colorspace_original;

            JobUtil.GetAllInputProperties(job.Input, out numberOfFrames, out fps, out fps_n, out fps_d, out hres, out vres, out d, out colorspace_original);

            Dar?dar = job.DAR;

            su.NbFramesTotal = numberOfFrames;
            su.ClipLength    = TimeSpan.FromSeconds((double)numberOfFrames / fps);

            if (!job.DAR.HasValue)
            {
                job.DAR = d;
            }

            // log
            if (log == null)
            {
                return;
            }

            log.LogEvent("resolution: " + hres + "x" + vres);
            log.LogEvent("frame rate: " + fps_n + "/" + fps_d);
            log.LogEvent("frames: " + numberOfFrames);
            log.LogEvent("length: " + string.Format("{0:00}:{1:00}:{2:00}.{3:000}",
                                                    (int)(su.ClipLength.Value.TotalHours), su.ClipLength.Value.Minutes, su.ClipLength.Value.Seconds, su.ClipLength.Value.Milliseconds));
            if (dar.HasValue && d.AR == dar.Value.AR)
            {
                log.LogValue("aspect ratio", d);
            }
            else
            {
                log.LogValue("aspect ratio (avs)", d);
                if (dar.HasValue)
                {
                    log.LogValue("aspect ratio (job)", dar.Value);
                }
            }

            if (Int32.TryParse(colorspace_original.ToString(), out int result))
            {
                log.LogValue("color space", colorspace_original.ToString(), ImageType.Warning);
            }
            else
            {
                log.LogValue("color space", colorspace_original.ToString());
            }

            string strEncoder = "ffmpeg";

            if (this is XviDEncoder)
            {
                strEncoder = "xvid";
            }
            else if (this is x264Encoder && (MainForm.Instance.Settings.IsMeGUIx64 || !MainForm.Instance.Settings.Usex64Tools))
            {
                strEncoder = "x264";
            }

            AviSynthColorspace colorspace_target = AviSynthColorspaceHelper.GetConvertedColorspace(strEncoder, colorspace_original);

            if (colorspace_original != colorspace_target &&
                !AviSynthColorspaceHelper.IsConvertedToColorspace(job.Input, colorspace_target.ToString()))
            {
                if (MainForm.Instance.DialogManager.AddConvertTo(colorspace_original.ToString(), colorspace_target.ToString()))
                {
                    AviSynthColorspaceHelper.AppendConvertTo(job.Input, colorspace_target, colorspace_original);
                    log.LogValue("AviSynth input script (appended)", GetAVSFileContent());

                    // Check everything again, to see if it is all fixed now
                    AviSynthColorspace colorspace_converted;
                    JobUtil.GetAllInputProperties(job.Input, out numberOfFrames, out fps, out fps_n, out fps_d, out hres, out vres, out d, out colorspace_converted);
                    if (colorspace_original != colorspace_converted)
                    {
                        log.LogValue("color space converted", colorspace_converted.ToString());
                    }
                    else
                    {
                        log.LogEvent("color space not supported, conversion failed", ImageType.Error);
                    }
                }
                else
                {
                    log.LogEvent("color space not supported", ImageType.Error);
                }
            }
        }
Example #38
0
        public static string genCommandline(string input, string output, Dar?d, DivXAVCSettings xs, int hres, int vres, Zone[] zones)
        {
            StringBuilder sb = new StringBuilder();
            CultureInfo   ci = new CultureInfo("en-us");

            sb.Append(" -i \"" + input + "\"");
            sb.Append(" -o \"" + output + "\"");

            switch (xs.EncodingMode)
            {
            case 0:                                                                                // 2pass - 1st pass
                sb.Append(" -npass 1 -sf " + "\"" + xs.Logfile + "\" -br " + xs.BitrateQuantizer); // add logfile
                break;

            case 1:                                                                                // 2pass - 2nd pass
            case 2:                                                                                // Automated 2 pass
                sb.Append(" -npass 2 -sf " + "\"" + xs.Logfile + "\" -br " + xs.BitrateQuantizer); // add logfile
                break;
            }

            double   framerate = 0.0;
            ulong    length    = 0;
            MainForm mainForm  = MainForm.Instance;

            if (!string.IsNullOrEmpty(mainForm.Video.VideoInput))
            {
                JobUtil.getInputProperties(out length, out framerate, mainForm.Video.VideoInput);
                sb.Append(" -fps " + framerate.ToString(ci));
            }

            if (xs.Turbo)
            {
                xs.AQO          = 0;
                xs.Pyramid      = false;
                xs.BasRef       = false;
                xs.MaxRefFrames = 1;
                xs.MaxBFrames   = 0;
            }

            if (xs.InterlaceMode != 0)
            {
                sb.Append(" -fmode " + xs.InterlaceMode);
            }
            if (xs.AQO != 1)
            {
                sb.Append(" -aqo " + xs.AQO);
            }
            if (xs.GOPLength != 4)
            {
                sb.Append(" -I " + xs.GOPLength);
            }
            if (xs.MaxBFrames != 2)
            {
                sb.Append(" -bf " + xs.MaxBFrames);
            }
            if (xs.MaxRefFrames != 4)
            {
                sb.Append(" -ref " + xs.MaxRefFrames);
            }
            if (xs.BasRef)
            {
                sb.Append(" -bref");
            }
            if (xs.Pyramid)
            {
                sb.Append(" -pyramid");
            }

            sb.Append(" -threads " + xs.NbThreads);

            return(sb.ToString());
        }