Example #1
0
        protected override void RunInThread()
        {
            JobChain      c = null;
            List <string> intermediateFiles = new List <string>();
            bool          bError            = false;

            try
            {
                log.LogEvent("Processing thread started");
                su.Status = "Preprocessing...   ***PLEASE WAIT***";
                su.ResetTime();

                List <string> arrAudioFilesDelete = new List <string>();
                audioFiles = new Dictionary <int, string>();
                List <AudioTrackInfo> arrAudioTracks = new List <AudioTrackInfo>();
                List <AudioJob>       arrAudioJobs   = new List <AudioJob>();
                List <MuxStream>      arrMuxStreams  = new List <MuxStream>();
                FileUtil.ensureDirectoryExists(job.PostprocessingProperties.WorkingDirectory);

                // audio handling
                foreach (OneClickAudioTrack oAudioTrack in job.PostprocessingProperties.AudioTracks)
                {
                    if (IsJobStopped())
                    {
                        return;
                    }

                    if (oAudioTrack.AudioTrackInfo != null)
                    {
                        if (oAudioTrack.AudioTrackInfo.ExtractMKVTrack)
                        {
                            if (job.PostprocessingProperties.ApplyDelayCorrection && File.Exists(job.PostprocessingProperties.IntermediateMKVFile))
                            {
                                MediaInfoFile oFile  = new MediaInfoFile(job.PostprocessingProperties.IntermediateMKVFile, ref log);
                                bool          bFound = false;
                                foreach (AudioTrackInfo oAudioInfo in oFile.AudioInfo.Tracks)
                                {
                                    if (oAudioInfo.MMGTrackID == oAudioTrack.AudioTrackInfo.MMGTrackID)
                                    {
                                        bFound = true;
                                    }
                                }
                                int mmgTrackID = 0;
                                if (!bFound)
                                {
                                    mmgTrackID = oFile.AudioInfo.Tracks[oAudioTrack.AudioTrackInfo.TrackIndex].MMGTrackID;
                                }
                                else
                                {
                                    mmgTrackID = oAudioTrack.AudioTrackInfo.MMGTrackID;
                                }
                                foreach (AudioTrackInfo oAudioInfo in oFile.AudioInfo.Tracks)
                                {
                                    if (oAudioInfo.MMGTrackID == mmgTrackID)
                                    {
                                        if (oAudioTrack.DirectMuxAudio != null)
                                        {
                                            oAudioTrack.DirectMuxAudio.delay = oAudioInfo.Delay;
                                        }
                                        if (oAudioTrack.AudioJob != null)
                                        {
                                            oAudioTrack.AudioJob.Delay = oAudioInfo.Delay;
                                        }
                                        break;
                                    }
                                }
                            }
                            if (!audioFiles.ContainsKey(oAudioTrack.AudioTrackInfo.TrackID))
                            {
                                audioFiles.Add(oAudioTrack.AudioTrackInfo.TrackID, job.PostprocessingProperties.WorkingDirectory + "\\" + oAudioTrack.AudioTrackInfo.DemuxFileName);
                                arrAudioFilesDelete.Add(job.PostprocessingProperties.WorkingDirectory + "\\" + oAudioTrack.AudioTrackInfo.DemuxFileName);
                            }
                        }
                        else
                        {
                            arrAudioTracks.Add(oAudioTrack.AudioTrackInfo);
                        }
                    }
                    if (oAudioTrack.AudioJob != null)
                    {
                        if (job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.NONE &&
                            String.IsNullOrEmpty(oAudioTrack.AudioJob.Input))
                        {
                            oAudioTrack.AudioJob.Input = job.Input;
                        }
                        arrAudioJobs.Add(oAudioTrack.AudioJob);
                    }
                    if (oAudioTrack.DirectMuxAudio != null)
                    {
                        arrMuxStreams.Add(oAudioTrack.DirectMuxAudio);
                    }
                }
                if (audioFiles.Count == 0 && !job.PostprocessingProperties.Eac3toDemux &&
                    job.PostprocessingProperties.IndexType != FileIndexerWindow.IndexType.NONE &&
                    job.PostprocessingProperties.IndexType != FileIndexerWindow.IndexType.AVISOURCE)
                {
                    if ((job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.DGI || job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.DGM) &&
                        File.Exists(Path.ChangeExtension(job.IndexFile, ".log")))
                    {
                        job.PostprocessingProperties.FilesToDelete.Add(Path.ChangeExtension(job.IndexFile, ".log"));
                        audioFiles = AudioUtil.GetAllDemuxedAudioFromDGI(arrAudioTracks, out arrAudioFilesDelete, job.IndexFile, log);
                    }
                    else
                    {
                        audioFiles = VideoUtil.getAllDemuxedAudio(arrAudioTracks, new List <AudioTrackInfo>(), out arrAudioFilesDelete, job.IndexFile, log);
                    }
                }

                FillInAudioInformation(ref arrAudioJobs, arrMuxStreams);

                if (!String.IsNullOrEmpty(job.PostprocessingProperties.VideoFileToMux))
                {
                    log.LogEvent("Don't encode video: True");
                }
                else
                {
                    log.LogEvent("Desired size: " + job.PostprocessingProperties.OutputSize);
                }
                log.LogEvent("Split size: " + job.PostprocessingProperties.Splitting);

                if (IsJobStopped())
                {
                    return;
                }

                // video file handling
                string             avsFile       = String.Empty;
                VideoStream        myVideo       = new VideoStream();
                VideoCodecSettings videoSettings = job.PostprocessingProperties.VideoSettings;
                if (String.IsNullOrEmpty(job.PostprocessingProperties.VideoFileToMux))
                {
                    //Open the video
                    try
                    {
                        avsFile = CreateAVSFile(job.IndexFile, job.Input, job.PostprocessingProperties.DAR,
                                                job.PostprocessingProperties.HorizontalOutputResolution, log,
                                                job.PostprocessingProperties.AvsSettings, job.PostprocessingProperties.AutoDeinterlace, videoSettings,
                                                job.PostprocessingProperties.AutoCrop, job.PostprocessingProperties.KeepInputResolution,
                                                job.PostprocessingProperties.UseChaptersMarks);
                    }
                    catch (Exception ex)
                    {
                        log.LogValue("An error occurred creating the AVS file", ex, ImageType.Error);
                    }

                    if (IsJobStopped())
                    {
                        return;
                    }

                    if (!String.IsNullOrEmpty(avsFile))
                    {
                        // check AVS file
                        JobUtil.GetInputProperties(avsFile, out ulong frameCount, out double frameRate);

                        myVideo.Input          = avsFile;
                        myVideo.Output         = Path.Combine(job.PostprocessingProperties.WorkingDirectory, Path.GetFileNameWithoutExtension(job.Input) + "_Video");
                        myVideo.NumberOfFrames = frameCount;
                        myVideo.Framerate      = (decimal)frameRate;
                        myVideo.VideoType      = new MuxableType((new VideoEncoderProvider().GetSupportedOutput(videoSettings.EncoderType))[0], videoSettings.Codec);
                        myVideo.Settings       = videoSettings;
                    }
                    else
                    {
                        bError = true;
                    }
                }
                else
                {
                    myVideo.DAR    = job.PostprocessingProperties.ForcedDAR;
                    myVideo.Output = job.PostprocessingProperties.VideoFileToMux;
                    MediaInfoFile oInfo = new MediaInfoFile(myVideo.Output, ref log);
                    if (Path.GetExtension(job.PostprocessingProperties.VideoFileToMux).Equals(".unknown") && !String.IsNullOrEmpty(oInfo.ContainerFileTypeString))
                    {
                        job.PostprocessingProperties.VideoFileToMux = Path.ChangeExtension(job.PostprocessingProperties.VideoFileToMux, oInfo.ContainerFileTypeString.ToLowerInvariant());
                        File.Move(myVideo.Output, job.PostprocessingProperties.VideoFileToMux);
                        myVideo.Output = job.PostprocessingProperties.VideoFileToMux;
                        job.PostprocessingProperties.FilesToDelete.Add(myVideo.Output);
                    }

                    myVideo.Settings       = videoSettings;
                    myVideo.Framerate      = (decimal)oInfo.VideoInfo.FPS;
                    myVideo.NumberOfFrames = oInfo.VideoInfo.FrameCount;
                }

                if (IsJobStopped())
                {
                    return;
                }

                intermediateFiles.Add(avsFile);
                intermediateFiles.Add(job.IndexFile);
                intermediateFiles.AddRange(audioFiles.Values);
                foreach (string file in arrAudioFilesDelete)
                {
                    intermediateFiles.Add(file);
                }
                intermediateFiles.Add(Path.ChangeExtension(job.Input, ".log"));
                foreach (string file in job.PostprocessingProperties.FilesToDelete)
                {
                    intermediateFiles.Add(file);
                }

                // subtitle handling
                List <MuxStream> subtitles = new List <MuxStream>();
                if (job.PostprocessingProperties.SubtitleTracks.Count > 0)
                {
                    foreach (OneClickStream oTrack in job.PostprocessingProperties.SubtitleTracks)
                    {
                        if (oTrack.TrackInfo.ExtractMKVTrack)
                        {
                            //demuxed MKV
                            string trackFile = Path.GetDirectoryName(job.IndexFile) + "\\" + oTrack.TrackInfo.DemuxFileName;
                            if (File.Exists(trackFile))
                            {
                                intermediateFiles.Add(trackFile);
                                if (Path.GetExtension(trackFile).ToLowerInvariant().Equals(".idx"))
                                {
                                    intermediateFiles.Add(FileUtil.GetPathWithoutExtension(trackFile) + ".sub");
                                }

                                subtitles.Add(new MuxStream(trackFile, oTrack.Language, oTrack.Name, oTrack.Delay, oTrack.DefaultStream, oTrack.ForcedStream, null));
                            }
                            else
                            {
                                log.LogEvent("Ignoring subtitle as the it cannot be found: " + trackFile, ImageType.Warning);
                            }
                        }
                        else
                        {
                            // sometimes the language is detected differently by vsrip and the IFO parser. Therefore search also for other files
                            string strDemuxFile = oTrack.DemuxFilePath;
                            if (!File.Exists(strDemuxFile) && Path.GetFileNameWithoutExtension(strDemuxFile).Contains("_"))
                            {
                                string strDemuxFileName = Path.GetFileNameWithoutExtension(strDemuxFile);
                                strDemuxFileName = strDemuxFileName.Substring(0, strDemuxFileName.LastIndexOf("_")) + "_*" + Path.GetExtension(strDemuxFile);
                                foreach (string strFileName in Directory.GetFiles(Path.GetDirectoryName(strDemuxFile), strDemuxFileName))
                                {
                                    strDemuxFile = Path.Combine(Path.GetDirectoryName(strDemuxFile), strFileName);
                                    intermediateFiles.Add(strDemuxFile);
                                    intermediateFiles.Add(Path.ChangeExtension(strDemuxFile, ".sub"));
                                    log.LogEvent("Subtitle + " + oTrack.DemuxFilePath + " cannot be found. " + strFileName + " will be used instead", ImageType.Information);
                                    break;
                                }
                            }
                            if (File.Exists(strDemuxFile))
                            {
                                string strTrackName = oTrack.Name;

                                // check if a forced stream is available
                                string strForcedFile = Path.Combine(Path.GetDirectoryName(strDemuxFile), Path.GetFileNameWithoutExtension(strDemuxFile) + "_forced.idx");
                                if (File.Exists(strForcedFile))
                                {
                                    subtitles.Add(new MuxStream(strForcedFile, oTrack.Language, SubtitleUtil.ApplyForcedStringToTrackName(true, oTrack.Name), oTrack.Delay, oTrack.DefaultStream, true, null));
                                    intermediateFiles.Add(strForcedFile);
                                    intermediateFiles.Add(Path.ChangeExtension(strForcedFile, ".sub"));
                                }
                                subtitles.Add(new MuxStream(strDemuxFile, oTrack.Language, SubtitleUtil.ApplyForcedStringToTrackName(false, oTrack.Name), oTrack.Delay, oTrack.DefaultStream, (File.Exists(strForcedFile) ? false : oTrack.ForcedStream), null));
                            }
                            else
                            {
                                log.LogEvent("Ignoring subtitle as the it cannot be found: " + oTrack.DemuxFilePath, ImageType.Warning);
                            }
                        }
                    }
                }

                if (IsJobStopped())
                {
                    return;
                }

                if (!bError)
                {
                    c = VideoUtil.GenerateJobSeries(myVideo, job.PostprocessingProperties.FinalOutput, arrAudioJobs.ToArray(),
                                                    subtitles.ToArray(), job.PostprocessingProperties.Attachments, job.PostprocessingProperties.TimeStampFile,
                                                    job.PostprocessingProperties.ChapterInfo, job.PostprocessingProperties.OutputSize,
                                                    job.PostprocessingProperties.Splitting, job.PostprocessingProperties.Container,
                                                    job.PostprocessingProperties.PrerenderJob, arrMuxStreams.ToArray(),
                                                    log, job.PostprocessingProperties.DeviceOutputType, null, job.PostprocessingProperties.VideoFileToMux,
                                                    job.PostprocessingProperties.AudioTracks.ToArray(), true);
                }

                if (c != null && !String.IsNullOrEmpty(job.PostprocessingProperties.TimeStampFile) &&
                    c.Jobs[c.Jobs.Length - 1].Job is MuxJob && (c.Jobs[c.Jobs.Length - 1].Job as MuxJob).MuxType == MuxerType.MP4BOX)
                {
                    // last job is a mp4box job and vfr timecode data has to be applied
                    MP4FpsModJob mp4FpsMod = new MP4FpsModJob(((MuxJob)c.Jobs[c.Jobs.Length - 1].Job).Output, job.PostprocessingProperties.TimeStampFile);
                    c = new SequentialChain(c, new SequentialChain(mp4FpsMod));
                }
            }
            catch (Exception e)
            {
                log.LogValue("An error occurred", e, ImageType.Error);
                bError = true;
            }

            if (c == null || bError)
            {
                log.Error("Job creation aborted");
                su.HasError = true;
            }

            // add cleanup job also in case of an error
            c = CleanupJob.AddAfter(c, intermediateFiles, job.PostprocessingProperties.FinalOutput);
            MainForm.Instance.Jobs.AddJobsWithDependencies(c, false);

            // batch processing other input files if necessary
            if (job.PostprocessingProperties.FilesToProcess.Count > 0)
            {
                OneClickWindow ocw = new OneClickWindow();
                ocw.setBatchProcessing(job.PostprocessingProperties.FilesToProcess, job.PostprocessingProperties.OneClickSetting);
            }

            su.IsComplete = true;
        }
        private void StartPostProcessing()
        {
            Thread t = null;

            try
            {
                _log.LogEvent("Processing thread started");
                raiseEvent("Preprocessing...   ***PLEASE WAIT***");
                _start = DateTime.Now;
                t      = new Thread(new ThreadStart(delegate
                {
                    while (true)
                    {
                        updateTime();
                        Thread.Sleep(1000);
                    }
                }));
                t.Start();

                List <string> arrAudioFilesDelete = new List <string>();
                audioFiles = new Dictionary <int, string>();
                List <AudioTrackInfo> arrAudioTracks    = new List <AudioTrackInfo>();
                List <AudioJob>       arrAudioJobs      = new List <AudioJob>();
                List <MuxStream>      arrMuxStreams     = new List <MuxStream>();
                List <string>         intermediateFiles = new List <string>();

                FileUtil.ensureDirectoryExists(job.PostprocessingProperties.WorkingDirectory);
                foreach (OneClickAudioTrack oAudioTrack in job.PostprocessingProperties.AudioTracks)
                {
                    if (oAudioTrack.ExtractMKVTrack)
                    {
                        audioFiles.Add(oAudioTrack.AudioTrackInfo.TrackID, job.PostprocessingProperties.WorkingDirectory + "\\" + oAudioTrack.AudioTrackInfo.DemuxFileName);
                        arrAudioFilesDelete.Add(job.PostprocessingProperties.WorkingDirectory + "\\" + oAudioTrack.AudioTrackInfo.DemuxFileName);
                    }
                    else if (oAudioTrack.AudioTrackInfo != null)
                    {
                        arrAudioTracks.Add(oAudioTrack.AudioTrackInfo);
                    }
                    if (oAudioTrack.AudioJob != null)
                    {
                        if (job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.NONE &&
                            String.IsNullOrEmpty(oAudioTrack.AudioJob.Input))
                        {
                            oAudioTrack.AudioJob.Input = job.Input;
                        }
                        arrAudioJobs.Add(oAudioTrack.AudioJob);
                    }
                    if (oAudioTrack.DirectMuxAudio != null)
                    {
                        arrMuxStreams.Add(oAudioTrack.DirectMuxAudio);
                    }
                }
                if (audioFiles.Count == 0 && job.PostprocessingProperties.IndexType != FileIndexerWindow.IndexType.NONE && !job.PostprocessingProperties.Eac3toDemux)
                {
                    audioFiles = vUtil.getAllDemuxedAudio(arrAudioTracks, new List <AudioTrackInfo>(), out arrAudioFilesDelete, job.IndexFile, _log);
                }

                fillInAudioInformation(arrAudioJobs, arrMuxStreams);

                if (!String.IsNullOrEmpty(job.PostprocessingProperties.VideoFileToMux))
                {
                    _log.LogEvent("Don't encode video: True");
                }
                else
                {
                    _log.LogEvent("Desired size: " + job.PostprocessingProperties.OutputSize);
                }
                _log.LogEvent("Split size: " + job.PostprocessingProperties.Splitting);


                // chapter file handling
                if (String.IsNullOrEmpty(job.PostprocessingProperties.ChapterFile))
                {
                    job.PostprocessingProperties.ChapterFile = null;
                }
                else if (job.PostprocessingProperties.Container == ContainerType.AVI)
                {
                    _log.LogEvent("Chapter handling disabled because of the AVI target container");
                    job.PostprocessingProperties.ChapterFile = null;
                }
                else if (!File.Exists(job.PostprocessingProperties.ChapterFile))
                {
                    if (job.PostprocessingProperties.ChapterFile.StartsWith("<") || job.PostprocessingProperties.ChapterExtracted)
                    {
                        // internal chapter file
                        string strTempFile = job.PostprocessingProperties.ChapterFile;
                        if (Path.GetExtension(job.PostprocessingProperties.VideoInput).ToLower(System.Globalization.CultureInfo.InvariantCulture).Equals(".mkv"))
                        {
                            MediaInfoFile oInfo = new MediaInfoFile(job.PostprocessingProperties.VideoInput, ref _log);
                            if (oInfo.hasMKVChapters())
                            {
                                job.PostprocessingProperties.ChapterFile = Path.Combine(job.PostprocessingProperties.WorkingDirectory, Path.GetFileNameWithoutExtension(job.IndexFile) + " - Chapter Information.txt");
                                if (oInfo.extractMKVChapters(job.PostprocessingProperties.ChapterFile))
                                {
                                    intermediateFiles.Add(job.PostprocessingProperties.ChapterFile);
                                    job.PostprocessingProperties.ChapterExtracted = true;
                                }
                                else
                                {
                                    job.PostprocessingProperties.ChapterFile = strTempFile;
                                }
                            }
                        }
                        else if (File.Exists(job.PostprocessingProperties.IFOInput))
                        {
                            job.PostprocessingProperties.ChapterFile = VideoUtil.getChaptersFromIFO(job.PostprocessingProperties.IFOInput, false, job.PostprocessingProperties.WorkingDirectory, job.PostprocessingProperties.TitleNumberToProcess);
                            if (!String.IsNullOrEmpty(job.PostprocessingProperties.ChapterFile))
                            {
                                intermediateFiles.Add(job.PostprocessingProperties.ChapterFile);
                                job.PostprocessingProperties.ChapterExtracted = true;
                            }
                            else
                            {
                                job.PostprocessingProperties.ChapterFile = strTempFile;
                            }
                        }
                    }
                    if (!File.Exists(job.PostprocessingProperties.ChapterFile))
                    {
                        _log.LogEvent("File not found: " + job.PostprocessingProperties.ChapterFile, ImageType.Error);
                        job.PostprocessingProperties.ChapterFile = null;
                    }
                }
                else if (job.PostprocessingProperties.ChapterExtracted)
                {
                    intermediateFiles.Add(job.PostprocessingProperties.ChapterFile);
                }

                string             avsFile       = String.Empty;
                VideoStream        myVideo       = new VideoStream();
                VideoCodecSettings videoSettings = job.PostprocessingProperties.VideoSettings;
                if (String.IsNullOrEmpty(job.PostprocessingProperties.VideoFileToMux))
                {
                    //Open the video
                    Dar?dar;
                    avsFile = createAVSFile(job.IndexFile, job.Input, job.PostprocessingProperties.DAR,
                                            job.PostprocessingProperties.HorizontalOutputResolution, job.PostprocessingProperties.SignalAR, _log,
                                            job.PostprocessingProperties.AvsSettings, job.PostprocessingProperties.AutoDeinterlace, videoSettings, out dar,
                                            job.PostprocessingProperties.AutoCrop, job.PostprocessingProperties.KeepInputResolution,
                                            job.PostprocessingProperties.UseChaptersMarks);

                    ulong  length;
                    double framerate;
                    JobUtil.getInputProperties(out length, out framerate, avsFile);
                    myVideo.Input  = avsFile;
                    myVideo.Output = Path.Combine(job.PostprocessingProperties.WorkingDirectory,
                                                  Path.GetFileNameWithoutExtension(job.Input) + "_Video");
                    myVideo.NumberOfFrames = length;
                    myVideo.Framerate      = (decimal)framerate;
                    myVideo.DAR            = dar;
                    myVideo.VideoType      = new MuxableType((new VideoEncoderProvider().GetSupportedOutput(videoSettings.EncoderType))[0], videoSettings.Codec);
                    myVideo.Settings       = videoSettings;
                }
                else
                {
                    myVideo.Output   = job.PostprocessingProperties.VideoFileToMux;
                    myVideo.Settings = videoSettings;

                    MediaInfoFile oInfo = new MediaInfoFile(myVideo.Output, ref _log);
                    videoSettings.VideoName = oInfo.VideoInfo.Track.Name;
                    myVideo.Framerate       = (decimal)oInfo.VideoInfo.FPS;
                }

                intermediateFiles.Add(avsFile);
                intermediateFiles.Add(job.IndexFile);
                intermediateFiles.AddRange(audioFiles.Values);
                if (!string.IsNullOrEmpty(qpfile))
                {
                    intermediateFiles.Add(qpfile);
                }
                foreach (string file in arrAudioFilesDelete)
                {
                    intermediateFiles.Add(file);
                }
                if (File.Exists(Path.Combine(Path.GetDirectoryName(job.Input), Path.GetFileNameWithoutExtension(job.Input) + "._log")))
                {
                    intermediateFiles.Add(Path.Combine(Path.GetDirectoryName(job.Input), Path.GetFileNameWithoutExtension(job.Input) + "._log"));
                }
                foreach (string file in job.PostprocessingProperties.FilesToDelete)
                {
                    intermediateFiles.Add(file);
                }

                if (!string.IsNullOrEmpty(avsFile) || !String.IsNullOrEmpty(job.PostprocessingProperties.VideoFileToMux))
                {
                    MuxStream[] subtitles;
                    if (job.PostprocessingProperties.SubtitleTracks.Count == 0)
                    {
                        //Create empty subtitles for muxing
                        subtitles = new MuxStream[0];
                    }
                    else
                    {
                        subtitles = new MuxStream[job.PostprocessingProperties.SubtitleTracks.Count];
                        int i = 0;
                        foreach (OneClickStream oTrack in job.PostprocessingProperties.SubtitleTracks)
                        {
                            if (oTrack.TrackInfo.IsMKVContainer())
                            {
                                //demuxed MKV
                                string trackFile = Path.GetDirectoryName(job.IndexFile) + "\\" + oTrack.TrackInfo.DemuxFileName;
                                if (File.Exists(trackFile))
                                {
                                    intermediateFiles.Add(trackFile);
                                    if (Path.GetExtension(trackFile).ToLower(System.Globalization.CultureInfo.InvariantCulture).Equals(".idx"))
                                    {
                                        intermediateFiles.Add(FileUtil.GetPathWithoutExtension(trackFile) + ".sub");
                                    }

                                    subtitles[i] = new MuxStream(trackFile, oTrack.Language, oTrack.Name, oTrack.Delay, oTrack.DefaultStream, oTrack.ForcedStream, null);
                                }
                                else
                                {
                                    _log.LogEvent("File not found: " + trackFile, ImageType.Error);
                                }
                            }
                            else
                            {
                                subtitles[i] = new MuxStream(oTrack.DemuxFilePath, oTrack.Language, oTrack.Name, oTrack.Delay, oTrack.DefaultStream, oTrack.ForcedStream, null);
                            }
                            i++;
                        }
                    }

                    JobChain c = vUtil.GenerateJobSeries(myVideo, job.PostprocessingProperties.FinalOutput, arrAudioJobs.ToArray(),
                                                         subtitles, job.PostprocessingProperties.ChapterFile, job.PostprocessingProperties.OutputSize,
                                                         job.PostprocessingProperties.Splitting, job.PostprocessingProperties.Container,
                                                         job.PostprocessingProperties.PrerenderJob, arrMuxStreams.ToArray(),
                                                         _log, job.PostprocessingProperties.DeviceOutputType, null, job.PostprocessingProperties.VideoFileToMux, job.PostprocessingProperties.AudioTracks.ToArray());
                    if (c == null)
                    {
                        _log.Warn("Job creation aborted");
                        return;
                    }

                    c = CleanupJob.AddAfter(c, intermediateFiles, job.PostprocessingProperties.FinalOutput);
                    mainForm.Jobs.addJobsWithDependencies(c);

                    // batch processing other input files if necessary
                    if (job.PostprocessingProperties.FilesToProcess.Count > 0)
                    {
                        OneClickWindow ocw = new OneClickWindow(mainForm);
                        ocw.setBatchProcessing(job.PostprocessingProperties.FilesToProcess, job.PostprocessingProperties.OneClickSetting);
                    }
                }
            }
            catch (Exception e)
            {
                t.Abort();
                if (e is ThreadAbortException)
                {
                    _log.LogEvent("Aborting...");
                    su.WasAborted = true;
                    su.IsComplete = true;
                    raiseEvent();
                }
                else
                {
                    _log.LogValue("An error occurred", e, ImageType.Error);
                    su.HasError   = true;
                    su.IsComplete = true;
                    raiseEvent();
                }
                return;
            }
            t.Abort();
            su.IsComplete = true;
            raiseEvent();
        }
Example #3
0
        public JobChain GenerateMuxJobs(VideoStream video, decimal?framerate, MuxStream[] audioStreamsArray,
                                        MuxableType[] audioTypes, MuxStream[] subtitleStreamsArray, MuxableType[] subTypes,
                                        string chapterFile, MuxableType chapterInputType, ContainerType container, string output,
                                        FileSize?splitSize, List <string> inputsToDelete, string deviceType, MuxableType deviceOutputType, bool alwaysMuxOutput)
        {
            Debug.Assert(splitSize == null || splitSize.Value != FileSize.Empty);

            MuxProvider        prov           = mainForm.MuxProvider;
            List <MuxableType> allTypes       = new List <MuxableType>();
            List <MuxableType> tempTypes      = new List <MuxableType>();
            List <MuxableType> duplicateTypes = new List <MuxableType>();

            tempTypes.AddRange(audioTypes);
            tempTypes.AddRange(subTypes);
            allTypes.Add(video.VideoType);

            // remove duplicate entries to speed up the process
            foreach (MuxableType oType in tempTypes)
            {
                bool bFound = false;
                foreach (MuxableType oAllType in allTypes)
                {
                    if (oType.outputType.ID.Equals(oAllType.outputType.ID))
                    {
                        bFound = true;
                        break;
                    }
                }
                if (!bFound)
                {
                    allTypes.Add(oType);
                }
                else
                {
                    duplicateTypes.Add(oType);
                }
            }
            if (chapterInputType != null)
            {
                allTypes.Add(chapterInputType);
            }
            if (deviceOutputType != null)
            {
                allTypes.Add(deviceOutputType);
            }

            // get mux path
            MuxPath muxPath = prov.GetMuxPath(container, alwaysMuxOutput || splitSize.HasValue, allTypes.ToArray());

            // add duplicate entries back into the mux path
            muxPath.InitialInputTypes.AddRange(duplicateTypes);
            while (duplicateTypes.Count > 0)
            {
                int iPath = 0;
                for (int i = 0; i < muxPath.Length; i++)
                {
                    foreach (MuxableType oType in muxPath[i].handledInputTypes)
                    {
                        if (oType.outputType.ID.Equals(duplicateTypes[0].outputType.ID))
                        {
                            iPath = i;
                        }
                    }
                }
                muxPath[iPath].handledInputTypes.Add(duplicateTypes[0]);
                duplicateTypes.RemoveAt(0);
            }

            List <MuxJob>    jobs            = new List <MuxJob>();
            List <MuxStream> subtitleStreams = new List <MuxStream>(subtitleStreamsArray);
            List <MuxStream> audioStreams    = new List <MuxStream>(audioStreamsArray);
            int    index          = 0;
            int    tempNumber     = 1;
            string previousOutput = null;

            foreach (MuxPathLeg mpl in muxPath)
            {
                List <string> filesToDeleteThisJob = new List <string>();

                MuxJob mjob = new MuxJob();

                if (previousOutput != null)
                {
                    mjob.Settings.MuxedInput = previousOutput;
                    filesToDeleteThisJob.Add(previousOutput);
                }

                if (video.Settings != null)
                {
                    mjob.NbOfBFrames        = video.Settings.NbBframes;
                    mjob.Codec              = video.Settings.Codec.ToString();
                    mjob.Settings.VideoName = video.Settings.VideoName;
                }
                mjob.NbOfFrames = video.NumberOfFrames;
                string fpsFormated = String.Format("{0:##.###}", framerate); // this formating is required for mkvmerge at least to avoid fps rounding error
                mjob.Settings.Framerate = Convert.ToDecimal(fpsFormated);

                string tempOutputName = Path.Combine(Path.GetDirectoryName(output),
                                                     Path.GetFileNameWithoutExtension(output) + tempNumber + ".");
                tempNumber++;
                foreach (MuxableType o in mpl.handledInputTypes)
                {
                    if (o.outputType is VideoType)
                    {
                        mjob.Settings.VideoInput = video.Output;
                        if (inputsToDelete.Contains(video.Output))
                        {
                            filesToDeleteThisJob.Add(video.Output);
                        }
                        mjob.Settings.DAR = video.DAR;
                    }
                    else if (o.outputType is AudioType)
                    {
                        MuxStream stream = audioStreams.Find(delegate(MuxStream m)
                        {
                            return(VideoUtil.guessAudioType(m.path) == o.outputType);
                        });

                        if (stream != null)
                        {
                            mjob.Settings.AudioStreams.Add(stream);
                            audioStreams.Remove(stream);

                            if (inputsToDelete.Contains(stream.path))
                            {
                                filesToDeleteThisJob.Add(stream.path);
                            }
                        }
                    }
                    else if (o.outputType is SubtitleType)
                    {
                        MuxStream stream = subtitleStreams.Find(delegate(MuxStream m)
                        {
                            return(VideoUtil.guessSubtitleType(m.path) == o.outputType);
                        });

                        if (stream != null)
                        {
                            mjob.Settings.SubtitleStreams.Add(stream);
                            subtitleStreams.Remove(stream);

                            if (inputsToDelete.Contains(stream.path))
                            {
                                filesToDeleteThisJob.Add(stream.path);
                            }
                        }
                    }
                    else if (o.outputType is ChapterType)
                    {
                        if ((VideoUtil.guessChapterType(chapterFile) == o.outputType))
                        {
                            mjob.Settings.ChapterFile = chapterFile;
                        }
                        if (inputsToDelete.Contains(chapterFile))
                        {
                            filesToDeleteThisJob.Add(chapterFile);
                        }
                    }
                    else if (o.outputType is DeviceType)
                    {
                        if ((VideoUtil.guessDeviceType(deviceType) == o.outputType))
                        {
                            mjob.Settings.DeviceType = deviceType;
                        }
                    }
                }
                foreach (MuxStream s in mjob.Settings.AudioStreams)
                {
                    audioStreams.Remove(s);
                }
                foreach (MuxStream s in mjob.Settings.SubtitleStreams)
                {
                    subtitleStreams.Remove(s);
                }
                mjob.FilesToDelete.AddRange(filesToDeleteThisJob);
                if (index == muxPath.Length - 1)
                {
                    mjob.Settings.MuxedOutput = output;
                    mjob.Settings.SplitSize   = splitSize;
                    mjob.Settings.DAR         = video.DAR;
                    mjob.ContainerType        = container;
                }
                else
                {
                    ContainerType cot = mpl.muxerInterface.GetContainersInCommon(muxPath[index + 1].muxerInterface)[0];
                    mjob.Settings.MuxedOutput = tempOutputName + cot.Extension;
                    mjob.ContainerType        = cot;
                }
                previousOutput = mjob.Settings.MuxedOutput;
                index++;
                jobs.Add(mjob);
                if (string.IsNullOrEmpty(mjob.Settings.VideoInput))
                {
                    mjob.Input = mjob.Settings.MuxedInput;
                }
                else
                {
                    mjob.Input = mjob.Settings.VideoInput;
                }
                mjob.Output  = mjob.Settings.MuxedOutput;
                mjob.MuxType = mpl.muxerInterface.MuxerType;
            }

            return(new SequentialChain(jobs.ToArray()));
        }
Example #4
0
        public JobChain GenerateJobSeries(VideoStream video, string muxedOutput, AudioJob[] audioStreams,
                                          MuxStream[] subtitles, string chapters, FileSize?desiredSize, FileSize?splitSize, ContainerType container, bool prerender, MuxStream[] muxOnlyAudio)
        {
            StringBuilder logBuilder = new StringBuilder();

            if (desiredSize.HasValue)
            {
                logBuilder.Append("Generating jobs. Desired size: " + desiredSize.Value.ToString() + "\r\n");
                if (video.Settings.EncodingMode != 4 && video.Settings.EncodingMode != 8) // no automated 2/3 pass
                {
                    if (this.mainForm.Settings.NbPasses == 2)
                    {
                        video.Settings.EncodingMode = 4; // automated 2 pass
                    }
                    else if (video.Settings.MaxNumberOfPasses == 3)
                    {
                        video.Settings.EncodingMode = 8;
                    }
                }
            }
            else
            {
                logBuilder.Append("Generating jobs. No desired size.\r\n");
            }

            fixFileNameExtensions(video, audioStreams, container);
            string videoOutput = video.Output;

            logBuilder.Append(eliminatedDuplicateFilenames(ref videoOutput, ref muxedOutput, audioStreams));
            video.Output = videoOutput;

            JobChain vjobs = jobUtil.prepareVideoJob(video.Input, video.Output, video.Settings, video.DAR, prerender, true);

            if (vjobs == null)
            {
                return(null);
            }

            /* Here, we guess the types of the files based on extension.
             * This is guaranteed to work with MeGUI-encoded files, because
             * the extension will always be recognised. For non-MeGUI files,
             * we can only ever hope.*/
            List <MuxStream>   allAudioToMux      = new List <MuxStream>();
            List <MuxableType> allInputAudioTypes = new List <MuxableType>();

            foreach (MuxStream muxStream in muxOnlyAudio)
            {
                if (VideoUtil.guessAudioMuxableType(muxStream.path, true) != null)
                {
                    allInputAudioTypes.Add(VideoUtil.guessAudioMuxableType(muxStream.path, true));
                    allAudioToMux.Add(muxStream);
                }
            }

            foreach (AudioJob stream in audioStreams)
            {
                allAudioToMux.Add(stream.ToMuxStream());
                allInputAudioTypes.Add(stream.ToMuxableType());
            }


            List <MuxableType> allInputSubtitleTypes = new List <MuxableType>();

            foreach (MuxStream muxStream in subtitles)
            {
                if (VideoUtil.guessSubtitleType(muxStream.path) != null)
                {
                    allInputSubtitleTypes.Add(new MuxableType(VideoUtil.guessSubtitleType(muxStream.path), null));
                }
            }

            MuxableType chapterInputType = null;

            if (!String.IsNullOrEmpty(chapters))
            {
                ChapterType type = VideoUtil.guessChapterType(chapters);
                if (type != null)
                {
                    chapterInputType = new MuxableType(type, null);
                }
            }

            JobChain muxJobs = this.jobUtil.GenerateMuxJobs(video, video.Framerate, allAudioToMux.ToArray(), allInputAudioTypes.ToArray(),
                                                            subtitles, allInputSubtitleTypes.ToArray(), chapters, chapterInputType, container, muxedOutput, splitSize, true);



            /*                foreach (Job mJob in muxJobs)
             *                  foreach (Job job in jobs)
             *                      mJob.AddDependency(job);*/


            /*
             * foreach (VideoJob job in vjobs)
             * {
             *  jobs.Add(job);
             * }
             * foreach (MuxJob job in muxJobs)
             * {
             *  jobs.Add(job);
             * }
             */

            if (desiredSize.HasValue)
            {
                /*                    if (encodedAudioPresent) // no audio encoding, we can calculate the video bitrate directly
                 *                  {
                 *                      logBuilder.Append("No audio encoding. Calculating desired video bitrate directly.\r\n");
                 *                      List<AudioStream> calculationAudioStreams = new List<AudioStream>();
                 *                      foreach (SubStream stream in muxOnlyAudio)
                 *                      {
                 *                          FileInfo fi = new FileInfo(stream.path);
                 *                          AudioStream newStream = new AudioStream();
                 *                          newStream.SizeBytes = fi.Length;
                 *                          newStream.Type = guessAudioType(stream.path);
                 *                          newStream.BitrateMode = BitrateManagementMode.VBR;
                 *                          calculationAudioStreams.Add(newStream);
                 *                          logBuilder.Append("Encoded audio file is present: " + stream.path +
                 *                              " It has a size of " + fi.Length + " bytes. \r\n");
                 *                      }
                 *
                 *                      long videoSizeKB;
                 *                      bool useBframes = false;
                 *                      if (video.Settings.NbBframes > 0)
                 *                          useBframes = true;
                 *
                 *                      bitrateKBits = calc.CalculateBitrateKBits(video.Settings.Codec, useBframes, container, calculationAudioStreams.ToArray(),
                 *                          desiredSizeBytes, video.NumberOfFrames, video.Framerate, out videoSizeKB);
                 *                      desiredSizeBytes = (long)videoSizeKB * 1024L; // convert kb back to bytes
                 *                      logBuilder.Append("Setting video bitrate for the video jobs to " + bitrateKBits + " kbit/s\r\n");
                 *                      foreach (VideoJob vJob in vjobs)
                 *                      {
                 *                          jobUtil.updateVideoBitrate(vJob, bitrateKBits);
                 *                      }
                 *                  }*/
                BitrateCalculationInfo b = new BitrateCalculationInfo();

                List <string> audiofiles = new List <string>();
                foreach (MuxStream s in allAudioToMux)
                {
                    audiofiles.Add(s.path);
                }
                b.AudioFiles = audiofiles;

                b.Container   = container;
                b.VideoJobs   = new List <TaggedJob>(vjobs.Jobs);
                b.DesiredSize = desiredSize.Value;
                ((VideoJob)vjobs.Jobs[0].Job).BitrateCalculationInfo = b;
            }
            mainForm.addToLog(logBuilder.ToString());


            return
                (new SequentialChain(
                     new ParallelChain((Job[])audioStreams),
                     new SequentialChain(vjobs),
                     new SequentialChain(muxJobs)));
        }
Example #5
0
        public JobChain GenerateMuxJobs(VideoStream video, decimal?framerate, MuxStream[] audioStreamsArray, MuxableType[] audioTypes,
                                        MuxStream[] subtitleStreamsArray, MuxableType[] subTypes,
                                        string chapterFile, MuxableType chapterInputType, ContainerType container, string output, FileSize?splitSize, List <string> inputsToDelete)
        {
            Debug.Assert(splitSize == null || splitSize.Value != FileSize.Empty);

            MuxProvider        prov     = mainForm.MuxProvider;
            List <MuxableType> allTypes = new List <MuxableType>();

            allTypes.Add(video.VideoType);
            allTypes.AddRange(audioTypes);
            allTypes.AddRange(subTypes);
            if (chapterInputType != null)
            {
                allTypes.Add(chapterInputType);
            }
            MuxPath          muxPath         = prov.GetMuxPath(container, allTypes.ToArray());
            List <MuxJob>    jobs            = new List <MuxJob>();
            List <MuxStream> subtitleStreams = new List <MuxStream>(subtitleStreamsArray);
            List <MuxStream> audioStreams    = new List <MuxStream>(audioStreamsArray);
            int    index          = 0;
            int    tempNumber     = 1;
            string previousOutput = null;

            foreach (MuxPathLeg mpl in muxPath)
            {
                List <string> filesToDeleteThisJob = new List <string>();

                MuxJob mjob = new MuxJob();


                if (previousOutput != null)
                {
                    mjob.Settings.MuxedInput = previousOutput;
                    filesToDeleteThisJob.Add(previousOutput);
                }

                mjob.NbOfFrames         = video.NumberOfFrames;
                mjob.NbOfBFrames        = video.Settings.NbBframes;
                mjob.Codec              = video.Settings.Codec.ToString();
                mjob.Settings.Framerate = framerate;

                string tempOutputName = Path.Combine(Path.GetDirectoryName(output),
                                                     Path.GetFileNameWithoutExtension(output) + tempNumber + ".");
                tempNumber++;
                foreach (MuxableType o in mpl.handledInputTypes)
                {
                    if (o.outputType is VideoType)
                    {
                        mjob.Settings.VideoInput = video.Output;
                        if (inputsToDelete.Contains(video.Output))
                        {
                            filesToDeleteThisJob.Add(video.Output);
                        }
                        mjob.Settings.DAR = video.DAR;
                    }
                    else if (o.outputType is AudioType)
                    {
                        MuxStream stream = audioStreams.Find(delegate(MuxStream m)
                        {
                            return(VideoUtil.guessAudioType(m.path) == o.outputType);
                        });

                        if (stream != null)
                        {
                            mjob.Settings.AudioStreams.Add(stream);
                            audioStreams.Remove(stream);

                            if (inputsToDelete.Contains(stream.path))
                            {
                                filesToDeleteThisJob.Add(stream.path);
                            }
                        }
                    }
                    else if (o.outputType is SubtitleType)
                    {
                        MuxStream stream = subtitleStreams.Find(delegate(MuxStream m)
                        {
                            return(VideoUtil.guessSubtitleType(m.path) == o.outputType);
                        });

                        if (stream != null)
                        {
                            mjob.Settings.SubtitleStreams.Add(stream);
                            subtitleStreams.Remove(stream);

                            if (inputsToDelete.Contains(stream.path))
                            {
                                filesToDeleteThisJob.Add(stream.path);
                            }
                        }
                    }
                    else if (o.outputType is ChapterType)
                    {
                        if ((VideoUtil.guessChapterType(chapterFile) == o.outputType))
                        {
                            mjob.Settings.ChapterFile = chapterFile;
                        }
                        if (inputsToDelete.Contains(chapterFile))
                        {
                            filesToDeleteThisJob.Add(chapterFile);
                        }
                    }
                }
                foreach (MuxStream s in mjob.Settings.AudioStreams)
                {
                    audioStreams.Remove(s);
                }
                foreach (MuxStream s in mjob.Settings.SubtitleStreams)
                {
                    subtitleStreams.Remove(s);
                }
                mjob.FilesToDelete.AddRange(filesToDeleteThisJob);
                if (index == muxPath.Length - 1)
                {
                    mjob.Settings.MuxedOutput = output;
                    mjob.Settings.SplitSize   = splitSize;
                    mjob.Settings.DAR         = video.DAR;
                    mjob.ContainerType        = container;
                }
                else
                {
                    ContainerType cot = mpl.muxerInterface.GetContainersInCommon(muxPath[index + 1].muxerInterface)[0];
                    mjob.Settings.MuxedOutput = tempOutputName + cot.Extension;
                    mjob.ContainerType        = cot;
                }
                previousOutput = mjob.Settings.MuxedOutput;
                index++;
                jobs.Add(mjob);
                if (string.IsNullOrEmpty(mjob.Settings.VideoInput))
                {
                    mjob.Input = mjob.Settings.MuxedInput;
                }
                else
                {
                    mjob.Input = mjob.Settings.VideoInput;
                }
                mjob.Output  = mjob.Settings.MuxedOutput;
                mjob.MuxType = mpl.muxerInterface.MuxerType;
            }

            return(new SequentialChain(jobs.ToArray()));
        }
Example #6
0
        public void Run(MainForm info)
        {
            // normal video verification
            string error = null;

            // update the current audio stream with the latest data
            //            updateAudioStreams();
            if ((error = info.Video.verifyVideoSettings()) != null)
            {
                MessageBox.Show(error, "Unsupported video configuration", MessageBoxButtons.OK, MessageBoxIcon.Stop);
                return;
            }
            if ((error = info.Audio.verifyAudioSettings()) != null && !error.Equals("No audio input defined."))
            {
                MessageBox.Show(error, "Unsupported audio configuration", MessageBoxButtons.OK, MessageBoxIcon.Stop);
                return;
            }
#warning must be fixed up to be more generic
            if (info.Video.CurrentVideoCodecSettings.EncodingMode == 2 || info.Video.CurrentVideoCodecSettings.EncodingMode == 5)
            {
                MessageBox.Show("First pass encoding is not supported for automated encoding as no output is generated.\nPlease choose another encoding mode", "Improper configuration",
                                MessageBoxButtons.OK, MessageBoxIcon.Stop);
                return;
            }

            VideoCodecSettings vSettings = info.Video.CurrentVideoCodecSettings.clone();
            bool cont = info.JobUtil.getFinalZoneConfiguration(vSettings, info.Video.Info.IntroEndFrame, info.Video.Info.CreditsStartFrame);
            if (cont)
            {
                int         length    = 0;
                double      framerate = 0.0;
                VideoStream myVideo   = new VideoStream();
                info.JobUtil.getInputProperties(out length, out framerate, info.Video.VideoInput);
                myVideo.Input          = info.Video.Info.VideoInput;
                myVideo.Output         = info.Video.Info.VideoOutput;
                myVideo.NumberOfFrames = length;
                myVideo.Framerate      = framerate;
                myVideo.ParX           = info.Video.Info.DARX;
                myVideo.ParY           = info.Video.Info.DARY;
                myVideo.VideoType      = info.Video.CurrentMuxableVideoType;
                myVideo.Settings       = vSettings;
#warning check delays here. Doom9 did them, but I'm not sure how they work
                foreach (AudioStream aStream in info.Audio.AudioStreams)
                {
                    if (aStream.Delay != 0)
                    {
                        aStream.settings.DelayEnabled = true;
                        aStream.settings.Delay        = aStream.Delay;
                    }
                }
                info.Audio.AudioStreams[0].Delay = 0;
                info.Audio.AudioStreams[1].Delay = 0;
                AutoEncodeWindow aew = new AutoEncodeWindow(myVideo, info.Audio.AudioStreams, info, info.Video.PrerenderJob);
                if (aew.init())
                {
                    aew.ShowDialog();
                }
                else
                {
                    MessageBox.Show("The currently selected combination of video and audio output cannot be muxed", "Unsupported configuration", MessageBoxButtons.OK, MessageBoxIcon.Warning);
                }
            }
        }
Example #7
0
        public JobChain GenerateJobSeries(VideoStream video, string muxedOutput, AudioJob[] audioStreams,
                                          MuxStream[] subtitles, string chapters, FileSize?desiredSize, FileSize?splitSize,
                                          ContainerType container, bool prerender, MuxStream[] muxOnlyAudio, LogItem log, string deviceType,
                                          Zone[] zones, string videoFileToMux, OneClickAudioTrack[] audioTracks)
        {
            if (desiredSize.HasValue && String.IsNullOrEmpty(videoFileToMux))
            {
                if (video.Settings.EncodingMode != 4 && video.Settings.EncodingMode != 8) // no automated 2/3 pass
                {
                    if (this.mainForm.Settings.NbPasses == 2)
                    {
                        video.Settings.EncodingMode = 4; // automated 2 pass
                    }
                    else if (video.Settings.MaxNumberOfPasses == 3)
                    {
                        video.Settings.EncodingMode = 8;
                    }
                }
            }

            fixFileNameExtensions(video, audioStreams, container);
            string videoOutput = video.Output;

            log.Add(eliminatedDuplicateFilenames(ref videoOutput, ref muxedOutput, audioStreams));

            JobChain vjobs = null;

            if (!String.IsNullOrEmpty(videoFileToMux))
            {
                video.Output = videoFileToMux;
            }
            else
            {
                video.Output = videoOutput;
                vjobs        = jobUtil.prepareVideoJob(video.Input, video.Output, video.Settings, video.DAR, prerender, true, zones);
                if (vjobs == null)
                {
                    return(null);
                }
            }

            /* Here, we guess the types of the files based on extension.
             * This is guaranteed to work with MeGUI-encoded files, because
             * the extension will always be recognised. For non-MeGUI files,
             * we can only ever hope.*/
            List <MuxStream>   allAudioToMux      = new List <MuxStream>();
            List <MuxableType> allInputAudioTypes = new List <MuxableType>();

            if (audioTracks != null)
            {
                // OneClick mode
                foreach (OneClickAudioTrack ocAudioTrack in audioTracks)
                {
                    if (ocAudioTrack.DirectMuxAudio != null)
                    {
                        if (VideoUtil.guessAudioMuxableType(ocAudioTrack.DirectMuxAudio.path, true) != null)
                        {
                            allInputAudioTypes.Add(VideoUtil.guessAudioMuxableType(ocAudioTrack.DirectMuxAudio.path, true));
                            allAudioToMux.Add(ocAudioTrack.DirectMuxAudio);
                        }
                    }
                    if (ocAudioTrack.AudioJob != null)
                    {
                        allAudioToMux.Add(ocAudioTrack.AudioJob.ToMuxStream());
                        allInputAudioTypes.Add(ocAudioTrack.AudioJob.ToMuxableType());
                    }
                }
            }
            else
            {
                // AutoEncode mode
                foreach (AudioJob stream in audioStreams)
                {
                    allAudioToMux.Add(stream.ToMuxStream());
                    allInputAudioTypes.Add(stream.ToMuxableType());
                }

                foreach (MuxStream muxStream in muxOnlyAudio)
                {
                    if (VideoUtil.guessAudioMuxableType(muxStream.path, true) != null)
                    {
                        allInputAudioTypes.Add(VideoUtil.guessAudioMuxableType(muxStream.path, true));
                        allAudioToMux.Add(muxStream);
                    }
                }
            }

            List <MuxableType> allInputSubtitleTypes = new List <MuxableType>();

            foreach (MuxStream muxStream in subtitles)
            {
                if (VideoUtil.guessSubtitleType(muxStream.path) != null)
                {
                    allInputSubtitleTypes.Add(new MuxableType(VideoUtil.guessSubtitleType(muxStream.path), null));
                }
            }

            MuxableType chapterInputType = null;

            if (!String.IsNullOrEmpty(chapters))
            {
                ChapterType type = VideoUtil.guessChapterType(chapters);
                if (type != null)
                {
                    chapterInputType = new MuxableType(type, null);
                }
            }

            MuxableType deviceOutputType = null;

            if (!String.IsNullOrEmpty(deviceType))
            {
                DeviceType type = VideoUtil.guessDeviceType(deviceType);
                if (type != null)
                {
                    deviceOutputType = new MuxableType(type, null);
                }
            }

            List <string> inputsToDelete = new List <string>();

            if (String.IsNullOrEmpty(videoFileToMux))
            {
                inputsToDelete.Add(video.Output);
            }
            inputsToDelete.AddRange(Array.ConvertAll <AudioJob, string>(audioStreams, delegate(AudioJob a) { return(a.Output); }));

            JobChain muxJobs = jobUtil.GenerateMuxJobs(video, video.Framerate, allAudioToMux.ToArray(), allInputAudioTypes.ToArray(),
                                                       subtitles, allInputSubtitleTypes.ToArray(), chapters, chapterInputType, container, muxedOutput, splitSize, inputsToDelete, deviceType, deviceOutputType);

            if (desiredSize.HasValue && String.IsNullOrEmpty(videoFileToMux))
            {
                BitrateCalculationInfo b = new BitrateCalculationInfo();

                List <string> audiofiles = new List <string>();
                foreach (MuxStream s in allAudioToMux)
                {
                    audiofiles.Add(s.path);
                }
                b.AudioFiles = audiofiles;

                b.Container   = container;
                b.VideoJobs   = new List <TaggedJob>(vjobs.Jobs);
                b.DesiredSize = desiredSize.Value;
                ((VideoJob)vjobs.Jobs[0].Job).BitrateCalculationInfo = b;
            }

            if (!String.IsNullOrEmpty(videoFileToMux))
            {
                return(new SequentialChain(new SequentialChain((Job[])audioStreams), new SequentialChain(muxJobs)));
            }
            else
            {
                return(new SequentialChain(
                           new SequentialChain((Job[])audioStreams),
                           new SequentialChain(vjobs),
                           new SequentialChain(muxJobs)));
            }
        }
Example #8
0
        public void postprocess()
        {
            audioFiles = vUtil.getAllDemuxedAudio(job.Output, 8);

            fillInAudioInformation();


            logBuilder.Append("Desired size of this automated encoding series: " + job.PostprocessingProperties.OutputSize
                              + " split size: " + job.PostprocessingProperties.Splitting + "\r\n");
            VideoCodecSettings videoSettings = job.PostprocessingProperties.VideoSettings;

            string videoOutput = Path.Combine(Path.GetDirectoryName(job.Output),
                                              Path.GetFileNameWithoutExtension(job.Output) + "_Video");
            string muxedOutput = job.PostprocessingProperties.FinalOutput;

            //Open the video
            Dar?   dar;
            string videoInput = openVideo(job.Output, job.PostprocessingProperties.DAR,
                                          job.PostprocessingProperties.HorizontalOutputResolution, job.PostprocessingProperties.SignalAR, logBuilder,
                                          job.PostprocessingProperties.AvsSettings, job.PostprocessingProperties.AutoDeinterlace, videoSettings, out dar);

            VideoStream myVideo = new VideoStream();
            ulong       length;
            double      framerate;

            JobUtil.getInputProperties(out length, out framerate, videoInput);
            myVideo.Input          = videoInput;
            myVideo.Output         = videoOutput;
            myVideo.NumberOfFrames = length;
            myVideo.Framerate      = (decimal)framerate;
            myVideo.DAR            = dar;
            myVideo.VideoType      = new MuxableType((new VideoEncoderProvider().GetSupportedOutput(videoSettings.EncoderType))[0], videoSettings.Codec);
            myVideo.Settings       = videoSettings;
            List <string> intermediateFiles = new List <string>();

            intermediateFiles.Add(videoInput);
            intermediateFiles.Add(job.Output);
            intermediateFiles.AddRange(audioFiles.Values);
            if (!string.IsNullOrEmpty(videoInput))
            {
                //Create empty subtitles for muxing (subtitles not supported in one click mode)
                MuxStream[] subtitles = new MuxStream[0];
                JobChain    c         = vUtil.GenerateJobSeries(myVideo, muxedOutput, job.PostprocessingProperties.AudioJobs, subtitles,
                                                                job.PostprocessingProperties.ChapterFile, job.PostprocessingProperties.OutputSize,
                                                                job.PostprocessingProperties.Splitting, job.PostprocessingProperties.Container,
                                                                false, job.PostprocessingProperties.DirectMuxAudio);

                /*                    vUtil.generateJobSeries(videoInput, videoOutput, muxedOutput, videoSettings,
                 *                      audioStreams, audio, subtitles, job.PostprocessingProperties.ChapterFile,
                 *                      job.PostprocessingProperties.OutputSize, job.PostprocessingProperties.SplitSize,
                 *                      containerOverhead, type, new string[] { job.Output, videoInput });*/
                if (c == null)
                {
                    return;
                }

                c = CleanupJob.AddAfter(c, intermediateFiles);
                mainForm.Jobs.addJobsWithDependencies(c);
            }
            mainForm.addToLog(logBuilder.ToString());
        }
Example #9
0
        private static void fixFileNameExtensions(VideoStream video, AudioJob[] audioStreams, ContainerType container)
        {
            AudioEncoderType[] audioCodecs = new AudioEncoderType[audioStreams.Length];
            for (int i = 0; i < audioStreams.Length; i++)
            {
                audioCodecs[i] = audioStreams[i].Settings.EncoderType;
            }
            MuxPath path;

            if (video.Settings == null)
            {
                path = MainForm.Instance.MuxProvider.GetMuxPath(VideoEncoderType.X264, audioCodecs, container);
            }
            else
            {
                path = MainForm.Instance.MuxProvider.GetMuxPath(video.Settings.EncoderType, audioCodecs, container);
            }
            if (path == null)
            {
                return;
            }
            List <AudioType> audioTypes = new List <AudioType>();

            foreach (MuxableType type in path.InitialInputTypes)
            {
                if (type.outputType is VideoType)
                {
                    if (video.Settings.EncoderType == VideoEncoderType.XVID && (type.outputType.ContainerType == ContainerType.AVI || type.outputType.ContainerType == ContainerType.MKV))
                    {
                        video.Output = Path.ChangeExtension(video.Output, ".m4v");
                    }
                    else if (video.Settings.EncoderType == VideoEncoderType.X264 && type.outputType.ContainerType == ContainerType.MP4)
                    {
                        video.Output = Path.ChangeExtension(video.Output, ".264");
                    }
                    else if (video.Settings.EncoderType == VideoEncoderType.X265 && (type.outputType.ContainerType == ContainerType.MKV || type.outputType.ContainerType == ContainerType.MP4))
                    {
                        video.Output = Path.ChangeExtension(video.Output, ".hevc");
                    }
                    else
                    {
                        video.Output = Path.ChangeExtension(video.Output, type.outputType.Extension);
                    }
                    video.VideoType = type;
                }
                if (type.outputType is AudioType)
                {
                    audioTypes.Add((AudioType)type.outputType);
                }
            }
            AudioEncoderProvider aProvider = new AudioEncoderProvider();

            for (int i = 0; i < audioStreams.Length; i++)
            {
                AudioType[] types = aProvider.GetSupportedOutput(audioStreams[i].Settings.EncoderType);
                foreach (AudioType type in types)
                {
                    if (audioTypes.Contains(type))
                    {
                        string newFileName = Path.ChangeExtension(audioStreams[i].Output, type.Extension);
                        if (!audioStreams[i].Input.Equals(newFileName))
                        {
                            audioStreams[i].Output = newFileName;
                        }
                        break;
                    }
                }
            }
        }