public OneClickAudioTrack(AudioJob oAudioJob, MuxStream oMuxStream, AudioTrackInfo oAudioTrackInfo, bool bMKVTrack) { _bMKVTrack = bMKVTrack; _audioJob = oAudioJob; _directMuxAudio = oMuxStream; _audioTrackInfo = oAudioTrackInfo; }
public LogItem postprocess() { audioFiles = vUtil.getAllDemuxedAudio(job.AudioTracks, job.Output, 8); fillInAudioInformation(); log.LogValue("Desired size", job.PostprocessingProperties.OutputSize); log.LogValue("Split size", job.PostprocessingProperties.Splitting); VideoCodecSettings videoSettings = job.PostprocessingProperties.VideoSettings; string videoOutput = Path.Combine(Path.GetDirectoryName(job.Output), Path.GetFileNameWithoutExtension(job.Output) + "_Video"); string muxedOutput = job.PostprocessingProperties.FinalOutput; //Open the video Dar? dar; string videoInput = openVideo(job.Output, job.PostprocessingProperties.DAR, job.PostprocessingProperties.HorizontalOutputResolution, job.PostprocessingProperties.SignalAR, log, job.PostprocessingProperties.AvsSettings, job.PostprocessingProperties.AutoDeinterlace, videoSettings, out dar); VideoStream myVideo = new VideoStream(); ulong length; double framerate; JobUtil.getInputProperties(out length, out framerate, videoInput); myVideo.Input = videoInput; myVideo.Output = videoOutput; myVideo.NumberOfFrames = length; myVideo.Framerate = (decimal)framerate; myVideo.DAR = dar; myVideo.VideoType = new MuxableType((new VideoEncoderProvider().GetSupportedOutput(videoSettings.EncoderType))[0], videoSettings.Codec); myVideo.Settings = videoSettings; List <string> intermediateFiles = new List <string>(); intermediateFiles.Add(videoInput); intermediateFiles.Add(job.Output); intermediateFiles.AddRange(audioFiles.Values); if (!string.IsNullOrEmpty(videoInput)) { //Create empty subtitles for muxing (subtitles not supported in one click mode) MuxStream[] subtitles = new MuxStream[0]; JobChain c = vUtil.GenerateJobSeries(myVideo, muxedOutput, job.PostprocessingProperties.AudioJobs, subtitles, job.PostprocessingProperties.ChapterFile, job.PostprocessingProperties.OutputSize, job.PostprocessingProperties.Splitting, job.PostprocessingProperties.Container, false, job.PostprocessingProperties.DirectMuxAudio, log); if (c == null) { log.Warn("Job creation aborted"); return(log); } c = CleanupJob.AddAfter(c, intermediateFiles); mainForm.Jobs.addJobsWithDependencies(c); } return(log); }
public void postprocess() { audioFiles = vUtil.getAllDemuxedAudio(job.Output, 8); fillInAudioInformation(); logBuilder.Append("Desired size of this automated encoding series: " + job.PostprocessingProperties.OutputSize + " split size: " + job.PostprocessingProperties.Splitting + "\r\n"); VideoCodecSettings videoSettings = job.PostprocessingProperties.VideoSettings; string videoOutput = Path.Combine(Path.GetDirectoryName(job.Output), Path.GetFileNameWithoutExtension(job.Output) + "_Video"); string muxedOutput = job.PostprocessingProperties.FinalOutput; //Open the video Dar? dar; string videoInput = openVideo(job.Output, job.PostprocessingProperties.DAR, job.PostprocessingProperties.HorizontalOutputResolution, job.PostprocessingProperties.SignalAR, logBuilder, job.PostprocessingProperties.AvsSettings, job.PostprocessingProperties.AutoDeinterlace, videoSettings, out dar); VideoStream myVideo = new VideoStream(); ulong length; double framerate; JobUtil.getInputProperties(out length, out framerate, videoInput); myVideo.Input = videoInput; myVideo.Output = videoOutput; myVideo.NumberOfFrames = length; myVideo.Framerate = (decimal)framerate; myVideo.DAR = dar; myVideo.VideoType = new MuxableType((new VideoEncoderProvider().GetSupportedOutput(videoSettings.EncoderType))[0], videoSettings.Codec); myVideo.Settings = videoSettings; List <string> intermediateFiles = new List <string>(); intermediateFiles.Add(videoInput); intermediateFiles.Add(job.Output); intermediateFiles.AddRange(audioFiles.Values); if (!string.IsNullOrEmpty(videoInput)) { //Create empty subtitles for muxing (subtitles not supported in one click mode) MuxStream[] subtitles = new MuxStream[0]; JobChain c = vUtil.GenerateJobSeries(myVideo, muxedOutput, job.PostprocessingProperties.AudioJobs, subtitles, job.PostprocessingProperties.ChapterFile, job.PostprocessingProperties.OutputSize, job.PostprocessingProperties.Splitting, job.PostprocessingProperties.Container, false, job.PostprocessingProperties.DirectMuxAudio); /* vUtil.generateJobSeries(videoInput, videoOutput, muxedOutput, videoSettings, * audioStreams, audio, subtitles, job.PostprocessingProperties.ChapterFile, * job.PostprocessingProperties.OutputSize, job.PostprocessingProperties.SplitSize, * containerOverhead, type, new string[] { job.Output, videoInput });*/ c = CleanupJob.AddAfter(c, intermediateFiles); mainForm.Jobs.addJobsWithDependencies(c); } mainForm.addToLog(logBuilder.ToString()); }
public OneClickAudioTrack(AudioJob oAudioJob, MuxStream oMuxStream, AudioTrackInfo oAudioTrackInfo, bool bMKVTrack) { _audioJob = oAudioJob; _directMuxAudio = oMuxStream; _audioTrackInfo = oAudioTrackInfo; if (_audioTrackInfo != null) { _audioTrackInfo.ExtractMKVTrack = bMKVTrack; } }
/// <summary> /// separates encodable from muxable audio streams /// in addition to returning the two types separately an array of SubStreams is returned /// which is plugged into the muxer.. it contains the names of all the audio files /// that have to be muxed /// </summary> /// <param name="encodable">encodeable audio streams</param> /// <param name="muxable">muxable Audio Streams with the path filled out and a blank language</param> private void separateEncodableAndMuxableAudioStreams(out AudioJob[] encodable, out MuxStream[] muxable, out AudioEncoderType[] muxTypes) { encodable = this.getConfiguredAudioJobs(); // discards improperly configured ones // the rest of the job is all encodeable muxable = new MuxStream[encodable.Length]; muxTypes = new AudioEncoderType[encodable.Length]; int j = 0; foreach (AudioJob stream in encodable) { muxable[j] = stream.ToMuxStream(); muxTypes[j] = stream.Settings.EncoderType; j++; } }
/// <summary> /// handles the go button for automated encoding /// checks if we're in automated 2 pass video mode and that we're not using the snow codec /// then the video and audio configuration is checked, and if it checks out /// the audio job, video jobs and muxing job are generated, audio and video job are linked /// and encoding is started /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private void queueButton_Click(object sender, System.EventArgs e) { if (!string.IsNullOrEmpty(this.muxedOutput.Filename)) { FileSize?desiredSize = targetSize.Value; FileSize?splitSize = splitting.Value; if (FileSizeRadio.Checked) { log.LogValue("Desired Size ", desiredSize); } else if (averageBitrateRadio.Checked) { log.LogValue("Projected Bitrate ", string.Format("{0}kbps", projectedBitrateKBits.Text)); } log.LogValue("Split Size ", splitSize); MuxStream[] audio; AudioJob[] aStreams; AudioEncoderType[] muxTypes; separateEncodableAndMuxableAudioStreams(out aStreams, out audio, out muxTypes); MuxStream[] subtitles = new MuxStream[0]; string chapters = ""; string videoInput = vInfo.VideoInput; string videoOutput = vInfo.VideoOutput; string muxedOutput = this.muxedOutput.Filename; ContainerType cot = this.container.SelectedItem as ContainerType; if (addSubsNChapters.Checked) { AdaptiveMuxWindow amw = new AdaptiveMuxWindow(mainForm); amw.setMinimizedMode(videoOutput, videoStream.Settings.EncoderType, jobUtil.getFramerate(videoInput), audio, muxTypes, muxedOutput, splitSize, cot); if (amw.ShowDialog() == DialogResult.OK) { amw.getAdditionalStreams(out audio, out subtitles, out chapters, out muxedOutput, out cot); } else // user aborted, abort the whole process { return; } } removeStreamsToBeEncoded(ref audio, aStreams); mainForm.Jobs.addJobsWithDependencies(vUtil.GenerateJobSeries(this.videoStream, muxedOutput, aStreams, subtitles, chapters, desiredSize, splitSize, cot, this.prerender, audio, log)); this.Close(); } }
private void StartPostProcessing() { Thread t = null; try { _log.LogEvent("Processing thread started"); raiseEvent("Preprocessing... ***PLEASE WAIT***"); _start = DateTime.Now; t = new Thread(new ThreadStart(delegate { while (true) { updateTime(); Thread.Sleep(1000); } })); t.Start(); List <string> arrAudioFilesDelete = new List <string>(); audioFiles = new Dictionary <int, string>(); List <AudioTrackInfo> arrAudioTracks = new List <AudioTrackInfo>(); List <AudioJob> arrAudioJobs = new List <AudioJob>(); List <MuxStream> arrMuxStreams = new List <MuxStream>(); List <string> intermediateFiles = new List <string>(); FileUtil.ensureDirectoryExists(job.PostprocessingProperties.WorkingDirectory); foreach (OneClickAudioTrack oAudioTrack in job.PostprocessingProperties.AudioTracks) { if (oAudioTrack.ExtractMKVTrack) { audioFiles.Add(oAudioTrack.AudioTrackInfo.TrackID, job.PostprocessingProperties.WorkingDirectory + "\\" + oAudioTrack.AudioTrackInfo.DemuxFileName); arrAudioFilesDelete.Add(job.PostprocessingProperties.WorkingDirectory + "\\" + oAudioTrack.AudioTrackInfo.DemuxFileName); } else if (oAudioTrack.AudioTrackInfo != null) { arrAudioTracks.Add(oAudioTrack.AudioTrackInfo); } if (oAudioTrack.AudioJob != null) { if (job.PostprocessingProperties.IndexType == FileIndexerWindow.IndexType.NONE && String.IsNullOrEmpty(oAudioTrack.AudioJob.Input)) { oAudioTrack.AudioJob.Input = job.Input; } arrAudioJobs.Add(oAudioTrack.AudioJob); } if (oAudioTrack.DirectMuxAudio != null) { arrMuxStreams.Add(oAudioTrack.DirectMuxAudio); } } if (audioFiles.Count == 0 && job.PostprocessingProperties.IndexType != FileIndexerWindow.IndexType.NONE && !job.PostprocessingProperties.Eac3toDemux) { audioFiles = vUtil.getAllDemuxedAudio(arrAudioTracks, new List <AudioTrackInfo>(), out arrAudioFilesDelete, job.IndexFile, _log); } fillInAudioInformation(arrAudioJobs, arrMuxStreams); if (!String.IsNullOrEmpty(job.PostprocessingProperties.VideoFileToMux)) { _log.LogEvent("Don't encode video: True"); } else { _log.LogEvent("Desired size: " + job.PostprocessingProperties.OutputSize); } _log.LogEvent("Split size: " + job.PostprocessingProperties.Splitting); // chapter file handling if (String.IsNullOrEmpty(job.PostprocessingProperties.ChapterFile)) { job.PostprocessingProperties.ChapterFile = null; } else if (job.PostprocessingProperties.Container == ContainerType.AVI) { _log.LogEvent("Chapter handling disabled because of the AVI target container"); job.PostprocessingProperties.ChapterFile = null; } else if (!File.Exists(job.PostprocessingProperties.ChapterFile)) { if (job.PostprocessingProperties.ChapterFile.StartsWith("<") || job.PostprocessingProperties.ChapterExtracted) { // internal chapter file string strTempFile = job.PostprocessingProperties.ChapterFile; if (Path.GetExtension(job.PostprocessingProperties.VideoInput).ToLower(System.Globalization.CultureInfo.InvariantCulture).Equals(".mkv")) { MediaInfoFile oInfo = new MediaInfoFile(job.PostprocessingProperties.VideoInput, ref _log); if (oInfo.hasMKVChapters()) { job.PostprocessingProperties.ChapterFile = Path.Combine(job.PostprocessingProperties.WorkingDirectory, Path.GetFileNameWithoutExtension(job.IndexFile) + " - Chapter Information.txt"); if (oInfo.extractMKVChapters(job.PostprocessingProperties.ChapterFile)) { intermediateFiles.Add(job.PostprocessingProperties.ChapterFile); job.PostprocessingProperties.ChapterExtracted = true; } else { job.PostprocessingProperties.ChapterFile = strTempFile; } } } else if (File.Exists(job.PostprocessingProperties.IFOInput)) { job.PostprocessingProperties.ChapterFile = VideoUtil.getChaptersFromIFO(job.PostprocessingProperties.IFOInput, false, job.PostprocessingProperties.WorkingDirectory, job.PostprocessingProperties.TitleNumberToProcess); if (!String.IsNullOrEmpty(job.PostprocessingProperties.ChapterFile)) { intermediateFiles.Add(job.PostprocessingProperties.ChapterFile); job.PostprocessingProperties.ChapterExtracted = true; } else { job.PostprocessingProperties.ChapterFile = strTempFile; } } } if (!File.Exists(job.PostprocessingProperties.ChapterFile)) { _log.LogEvent("File not found: " + job.PostprocessingProperties.ChapterFile, ImageType.Error); job.PostprocessingProperties.ChapterFile = null; } } else if (job.PostprocessingProperties.ChapterExtracted) { intermediateFiles.Add(job.PostprocessingProperties.ChapterFile); } string avsFile = String.Empty; VideoStream myVideo = new VideoStream(); VideoCodecSettings videoSettings = job.PostprocessingProperties.VideoSettings; if (String.IsNullOrEmpty(job.PostprocessingProperties.VideoFileToMux)) { //Open the video Dar?dar; avsFile = createAVSFile(job.IndexFile, job.Input, job.PostprocessingProperties.DAR, job.PostprocessingProperties.HorizontalOutputResolution, job.PostprocessingProperties.SignalAR, _log, job.PostprocessingProperties.AvsSettings, job.PostprocessingProperties.AutoDeinterlace, videoSettings, out dar, job.PostprocessingProperties.AutoCrop, job.PostprocessingProperties.KeepInputResolution, job.PostprocessingProperties.UseChaptersMarks); ulong length; double framerate; JobUtil.getInputProperties(out length, out framerate, avsFile); myVideo.Input = avsFile; myVideo.Output = Path.Combine(job.PostprocessingProperties.WorkingDirectory, Path.GetFileNameWithoutExtension(job.Input) + "_Video"); myVideo.NumberOfFrames = length; myVideo.Framerate = (decimal)framerate; myVideo.DAR = dar; myVideo.VideoType = new MuxableType((new VideoEncoderProvider().GetSupportedOutput(videoSettings.EncoderType))[0], videoSettings.Codec); myVideo.Settings = videoSettings; } else { myVideo.Output = job.PostprocessingProperties.VideoFileToMux; myVideo.Settings = videoSettings; MediaInfoFile oInfo = new MediaInfoFile(myVideo.Output, ref _log); videoSettings.VideoName = oInfo.VideoInfo.Track.Name; myVideo.Framerate = (decimal)oInfo.VideoInfo.FPS; } intermediateFiles.Add(avsFile); intermediateFiles.Add(job.IndexFile); intermediateFiles.AddRange(audioFiles.Values); if (!string.IsNullOrEmpty(qpfile)) { intermediateFiles.Add(qpfile); } foreach (string file in arrAudioFilesDelete) { intermediateFiles.Add(file); } if (File.Exists(Path.Combine(Path.GetDirectoryName(job.Input), Path.GetFileNameWithoutExtension(job.Input) + "._log"))) { intermediateFiles.Add(Path.Combine(Path.GetDirectoryName(job.Input), Path.GetFileNameWithoutExtension(job.Input) + "._log")); } foreach (string file in job.PostprocessingProperties.FilesToDelete) { intermediateFiles.Add(file); } if (!string.IsNullOrEmpty(avsFile) || !String.IsNullOrEmpty(job.PostprocessingProperties.VideoFileToMux)) { MuxStream[] subtitles; if (job.PostprocessingProperties.SubtitleTracks.Count == 0) { //Create empty subtitles for muxing subtitles = new MuxStream[0]; } else { subtitles = new MuxStream[job.PostprocessingProperties.SubtitleTracks.Count]; int i = 0; foreach (OneClickStream oTrack in job.PostprocessingProperties.SubtitleTracks) { if (oTrack.TrackInfo.IsMKVContainer()) { //demuxed MKV string trackFile = Path.GetDirectoryName(job.IndexFile) + "\\" + oTrack.TrackInfo.DemuxFileName; if (File.Exists(trackFile)) { intermediateFiles.Add(trackFile); if (Path.GetExtension(trackFile).ToLower(System.Globalization.CultureInfo.InvariantCulture).Equals(".idx")) { intermediateFiles.Add(FileUtil.GetPathWithoutExtension(trackFile) + ".sub"); } subtitles[i] = new MuxStream(trackFile, oTrack.Language, oTrack.Name, oTrack.Delay, oTrack.DefaultStream, oTrack.ForcedStream, null); } else { _log.LogEvent("File not found: " + trackFile, ImageType.Error); } } else { subtitles[i] = new MuxStream(oTrack.DemuxFilePath, oTrack.Language, oTrack.Name, oTrack.Delay, oTrack.DefaultStream, oTrack.ForcedStream, null); } i++; } } JobChain c = vUtil.GenerateJobSeries(myVideo, job.PostprocessingProperties.FinalOutput, arrAudioJobs.ToArray(), subtitles, job.PostprocessingProperties.ChapterFile, job.PostprocessingProperties.OutputSize, job.PostprocessingProperties.Splitting, job.PostprocessingProperties.Container, job.PostprocessingProperties.PrerenderJob, arrMuxStreams.ToArray(), _log, job.PostprocessingProperties.DeviceOutputType, null, job.PostprocessingProperties.VideoFileToMux, job.PostprocessingProperties.AudioTracks.ToArray()); if (c == null) { _log.Warn("Job creation aborted"); return; } c = CleanupJob.AddAfter(c, intermediateFiles, job.PostprocessingProperties.FinalOutput); mainForm.Jobs.addJobsWithDependencies(c); // batch processing other input files if necessary if (job.PostprocessingProperties.FilesToProcess.Count > 0) { OneClickWindow ocw = new OneClickWindow(mainForm); ocw.setBatchProcessing(job.PostprocessingProperties.FilesToProcess, job.PostprocessingProperties.OneClickSetting); } } } catch (Exception e) { t.Abort(); if (e is ThreadAbortException) { _log.LogEvent("Aborting..."); su.WasAborted = true; su.IsComplete = true; raiseEvent(); } else { _log.LogValue("An error occurred", e, ImageType.Error); su.HasError = true; su.IsComplete = true; raiseEvent(); } return; } t.Abort(); su.IsComplete = true; raiseEvent(); }
public Job CreateJobFromAdHoc( string projectId, string location, string inputUri, string outputUri) { // Create the client. TranscoderServiceClient client = TranscoderServiceClient.Create(); // Build the parent location name. LocationName parent = new LocationName(projectId, location); // Build the job config. VideoStream videoStream0 = new VideoStream { H264 = new VideoStream.Types.H264CodecSettings { BitrateBps = 550000, FrameRate = 60, HeightPixels = 360, WidthPixels = 640 } }; VideoStream videoStream1 = new VideoStream { H264 = new VideoStream.Types.H264CodecSettings { BitrateBps = 2500000, FrameRate = 60, HeightPixels = 720, WidthPixels = 1280 } }; AudioStream audioStream0 = new AudioStream { Codec = "aac", BitrateBps = 64000 }; ElementaryStream elementaryStream0 = new ElementaryStream { Key = "video_stream0", VideoStream = videoStream0 }; ElementaryStream elementaryStream1 = new ElementaryStream { Key = "video_stream1", VideoStream = videoStream1 }; ElementaryStream elementaryStream2 = new ElementaryStream { Key = "audio_stream0", AudioStream = audioStream0 }; MuxStream muxStream0 = new MuxStream { Key = "sd", Container = "mp4", ElementaryStreams = { "video_stream0", "audio_stream0" } }; MuxStream muxStream1 = new MuxStream { Key = "hd", Container = "mp4", ElementaryStreams = { "video_stream1", "audio_stream0" } }; Input input = new Input { Key = "input0", Uri = inputUri }; Output output = new Output { Uri = outputUri }; JobConfig jobConfig = new JobConfig { Inputs = { input }, Output = output, ElementaryStreams = { elementaryStream0, elementaryStream1, elementaryStream2 }, MuxStreams = { muxStream0, muxStream1 } }; // Build the job. Job newJob = new Job { Config = jobConfig, InputUri = inputUri, OutputUri = outputUri }; // Call the API. Job job = client.CreateJob(parent, newJob); // Return the result. return(job); }
public Job CreateJobWithStaticOverlay( string projectId, string location, string inputUri, string overlayImageUri, string outputUri) { // Create the client. TranscoderServiceClient client = TranscoderServiceClient.Create(); // Build the parent location name. LocationName parent = new LocationName(projectId, location); // Build the job config. VideoStream videoStream0 = new VideoStream { H264 = new VideoStream.Types.H264CodecSettings { BitrateBps = 550000, FrameRate = 60, HeightPixels = 360, WidthPixels = 640 } }; AudioStream audioStream0 = new AudioStream { Codec = "aac", BitrateBps = 64000 }; // Create the overlay image. Only JPEG is supported. Image resolution is based on output // video resolution. To respect the original image aspect ratio, set either x or y to 0.0. // This example stretches the overlay image the full width and half of the height of the // output video. Overlay.Types.Image overlayImage = new Overlay.Types.Image { Uri = overlayImageUri, Alpha = 1, Resolution = new Overlay.Types.NormalizedCoordinate { X = 1, Y = 0.5 } }; // Create the starting animation (when the overlay appears). Use the values x: 0 and y: 0 to // position the top-left corner of the overlay in the top-left corner of the output video. Overlay.Types.Animation animationStart = new Overlay.Types.Animation { AnimationStatic = new Overlay.Types.AnimationStatic { Xy = new Overlay.Types.NormalizedCoordinate { X = 0, Y = 0 }, StartTimeOffset = Duration.FromTimeSpan(TimeSpan.FromSeconds(0)) } }; // Create the ending animation (when the overlay disappears). In this example, the overlay // disappears at the 10-second mark in the output video. Overlay.Types.Animation animationEnd = new Overlay.Types.Animation { AnimationEnd = new Overlay.Types.AnimationEnd { StartTimeOffset = Duration.FromTimeSpan(TimeSpan.FromSeconds(10)) } }; // Create the overlay and add the image and animations to it. Overlay overlay = new Overlay { Image = overlayImage, Animations = { animationStart, animationEnd } }; ElementaryStream elementaryStream0 = new ElementaryStream { Key = "video_stream0", VideoStream = videoStream0 }; ElementaryStream elementaryStream1 = new ElementaryStream { Key = "audio_stream0", AudioStream = audioStream0 }; MuxStream muxStream0 = new MuxStream { Key = "sd", Container = "mp4", ElementaryStreams = { "video_stream0", "audio_stream0" } }; Input input = new Input { Key = "input0", Uri = inputUri }; Output output = new Output { Uri = outputUri }; JobConfig jobConfig = new JobConfig { Inputs = { input }, Output = output, ElementaryStreams = { elementaryStream0, elementaryStream1 }, MuxStreams = { muxStream0 }, Overlays = { overlay } }; // Build the job. Job newJob = new Job { InputUri = inputUri, OutputUri = outputUri, Config = jobConfig }; // Call the API. Job job = client.CreateJob(parent, newJob); // Return the result. return(job); }
public Job CreateJobWithConcatenatedInputs( string projectId, string location, string inputUri1, TimeSpan startTimeInput1, TimeSpan endTimeInput1, string inputUri2, TimeSpan startTimeInput2, TimeSpan endTimeInput2, string outputUri) { // Create the client. TranscoderServiceClient client = TranscoderServiceClient.Create(); // Build the parent location name. LocationName parent = new LocationName(projectId, location); // Build the job config. VideoStream videoStream0 = new VideoStream { H264 = new VideoStream.Types.H264CodecSettings { BitrateBps = 550000, FrameRate = 60, HeightPixels = 360, WidthPixels = 640 } }; AudioStream audioStream0 = new AudioStream { Codec = "aac", BitrateBps = 64000 }; ElementaryStream elementaryStream0 = new ElementaryStream { Key = "video_stream0", VideoStream = videoStream0 }; ElementaryStream elementaryStream1 = new ElementaryStream { Key = "audio_stream0", AudioStream = audioStream0 }; MuxStream muxStream0 = new MuxStream { Key = "sd", Container = "mp4", ElementaryStreams = { "video_stream0", "audio_stream0" } }; Input input1 = new Input { Key = "input1", Uri = inputUri1 }; Input input2 = new Input { Key = "input2", Uri = inputUri2 }; EditAtom atom1 = new EditAtom { Key = "atom1", StartTimeOffset = Duration.FromTimeSpan(startTimeInput1), EndTimeOffset = Duration.FromTimeSpan(endTimeInput1), Inputs = { input1.Key } }; EditAtom atom2 = new EditAtom { Key = "atom2", StartTimeOffset = Duration.FromTimeSpan(startTimeInput2), EndTimeOffset = Duration.FromTimeSpan(endTimeInput2), Inputs = { input2.Key } }; Output output = new Output { Uri = outputUri }; JobConfig jobConfig = new JobConfig { Inputs = { input1, input2 }, EditList = { atom1, atom2 }, Output = output, ElementaryStreams = { elementaryStream0, elementaryStream1 }, MuxStreams = { muxStream0 } }; // Build the job. Job newJob = new Job { OutputUri = outputUri, Config = jobConfig }; // Call the API. Job job = client.CreateJob(parent, newJob); // Return the result. return(job); }
public Job CreateJobWithSetNumberImagesSpritesheet( string projectId, string location, string inputUri, string outputUri) { // Create the client. TranscoderServiceClient client = TranscoderServiceClient.Create(); // Build the parent location name. LocationName parent = new LocationName(projectId, location); // Build the job config. VideoStream videoStream0 = new VideoStream { H264 = new VideoStream.Types.H264CodecSettings { BitrateBps = 550000, FrameRate = 60, HeightPixels = 360, WidthPixels = 640 } }; AudioStream audioStream0 = new AudioStream { Codec = "aac", BitrateBps = 64000 }; // Generates a 10x10 spritesheet of small images from the input video. // To preserve the source aspect ratio, you should set the // SpriteWidthPixels field or the SpriteHeightPixels field, but not // both (the API will automatically calculate the missing field). For // this sample, we don't care about the aspect ratio so we set both // fields. SpriteSheet smallSpriteSheet = new SpriteSheet { FilePrefix = SmallSpritesheetFilePrefix, SpriteHeightPixels = 32, SpriteWidthPixels = 64, ColumnCount = 10, RowCount = 10, TotalCount = 100 }; // Generates a 10x10 spritesheet of larger images from the input video. // input video. To preserve the source aspect ratio, you should set the // SpriteWidthPixels field or the SpriteHeightPixels field, but not // both (the API will automatically calculate the missing field). For // this sample, we don't care about the aspect ratio so we set both // fields. SpriteSheet largeSpriteSheet = new SpriteSheet { FilePrefix = LargeSpritesheetFilePrefix, SpriteHeightPixels = 72, SpriteWidthPixels = 128, ColumnCount = 10, RowCount = 10, TotalCount = 100 }; ElementaryStream elementaryStream0 = new ElementaryStream { Key = "video_stream0", VideoStream = videoStream0 }; ElementaryStream elementaryStream1 = new ElementaryStream { Key = "audio_stream0", AudioStream = audioStream0 }; MuxStream muxStream0 = new MuxStream { Key = "sd", Container = "mp4", ElementaryStreams = { "video_stream0", "audio_stream0" } }; Input input = new Input { Key = "input0", Uri = inputUri }; Output output = new Output { Uri = outputUri }; JobConfig jobConfig = new JobConfig { Inputs = { input }, Output = output, ElementaryStreams = { elementaryStream0, elementaryStream1 }, MuxStreams = { muxStream0 }, SpriteSheets = { smallSpriteSheet, largeSpriteSheet } }; // Build the job. Job newJob = new Job(); newJob.InputUri = inputUri; newJob.OutputUri = outputUri; newJob.Config = jobConfig; // Call the API. Job job = client.CreateJob(parent, newJob); // Return the result. return job; }
/// <summary> /// handles the go button for automated encoding /// checks if we're in automated 2 pass video mode /// then the video and audio configuration is checked, and if it checks out /// the audio job, video jobs and muxing job are generated, audio and video job are linked /// and encoding is started /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private void queueButton_Click(object sender, System.EventArgs e) { if (String.IsNullOrEmpty(this.muxedOutput.Filename)) { return; } FileSize?desiredSize = targetSize.Value; FileSize?splitSize = splitting.Value; LogItem log = new LogItem(this.muxedOutput.Filename); MainForm.Instance.AutoEncodeLog.Add(log); if (FileSizeRadio.Checked) { log.LogValue("Desired Size", desiredSize); } else if (averageBitrateRadio.Checked) { log.LogValue("Projected Bitrate", string.Format("{0}kbps", projectedBitrateKBits.Text)); } else { log.LogEvent("No Target Size (use profile settings)"); } log.LogValue("Split Size", splitSize); MuxStream[] audio; AudioJob[] aStreams; AudioEncoderType[] muxTypes; separateEncodableAndMuxableAudioStreams(out aStreams, out audio, out muxTypes); MuxStream[] subtitles = new MuxStream[0]; ChapterInfo chapters = new ChapterInfo(); string videoInput = vInfo.VideoInput; string videoOutput = vInfo.VideoOutput; string muxedOutput = this.muxedOutput.Filename; ContainerType cot = this.container.SelectedItem as ContainerType; // determine audio language foreach (MuxStream stream in audio) { string strLanguage = LanguageSelectionContainer.GetLanguageFromFileName(Path.GetFileNameWithoutExtension(stream.path)); if (!String.IsNullOrEmpty(strLanguage)) { stream.language = strLanguage; } } if (addSubsNChapters.Checked) { AdaptiveMuxWindow amw = new AdaptiveMuxWindow(); amw.setMinimizedMode(videoOutput, "", videoStream.Settings.EncoderType, JobUtil.getFramerate(videoInput), audio, muxTypes, muxedOutput, splitSize, cot); if (amw.ShowDialog() == DialogResult.OK) { amw.getAdditionalStreams(out audio, out subtitles, out chapters, out muxedOutput, out cot); } else // user aborted, abort the whole process { return; } } removeStreamsToBeEncoded(ref audio, aStreams); MainForm.Instance.Jobs.AddJobsWithDependencies(VideoUtil.GenerateJobSeries(videoStream, muxedOutput, aStreams, subtitles, new List <string>(), String.Empty, chapters, desiredSize, splitSize, cot, prerender, audio, log, device.Text, vInfo.Zones, null, null, false), true); this.Close(); }
public JobChain GenerateMuxJobs(VideoStream video, decimal?framerate, MuxStream[] audioStreamsArray, MuxableType[] audioTypes, MuxStream[] subtitleStreamsArray, MuxableType[] subTypes, string chapterFile, MuxableType chapterInputType, ContainerType container, string output, FileSize?splitSize, List <string> inputsToDelete, string deviceType, MuxableType deviceOutputType, bool alwaysMuxOutput) { Debug.Assert(splitSize == null || splitSize.Value != FileSize.Empty); MuxProvider prov = mainForm.MuxProvider; List <MuxableType> allTypes = new List <MuxableType>(); List <MuxableType> tempTypes = new List <MuxableType>(); List <MuxableType> duplicateTypes = new List <MuxableType>(); tempTypes.AddRange(audioTypes); tempTypes.AddRange(subTypes); allTypes.Add(video.VideoType); // remove duplicate entries to speed up the process foreach (MuxableType oType in tempTypes) { bool bFound = false; foreach (MuxableType oAllType in allTypes) { if (oType.outputType.ID.Equals(oAllType.outputType.ID)) { bFound = true; break; } } if (!bFound) { allTypes.Add(oType); } else { duplicateTypes.Add(oType); } } if (chapterInputType != null) { allTypes.Add(chapterInputType); } if (deviceOutputType != null) { allTypes.Add(deviceOutputType); } // get mux path MuxPath muxPath = prov.GetMuxPath(container, alwaysMuxOutput || splitSize.HasValue, allTypes.ToArray()); // add duplicate entries back into the mux path muxPath.InitialInputTypes.AddRange(duplicateTypes); while (duplicateTypes.Count > 0) { int iPath = 0; for (int i = 0; i < muxPath.Length; i++) { foreach (MuxableType oType in muxPath[i].handledInputTypes) { if (oType.outputType.ID.Equals(duplicateTypes[0].outputType.ID)) { iPath = i; } } } muxPath[iPath].handledInputTypes.Add(duplicateTypes[0]); duplicateTypes.RemoveAt(0); } List <MuxJob> jobs = new List <MuxJob>(); List <MuxStream> subtitleStreams = new List <MuxStream>(subtitleStreamsArray); List <MuxStream> audioStreams = new List <MuxStream>(audioStreamsArray); int index = 0; int tempNumber = 1; string previousOutput = null; foreach (MuxPathLeg mpl in muxPath) { List <string> filesToDeleteThisJob = new List <string>(); MuxJob mjob = new MuxJob(); if (previousOutput != null) { mjob.Settings.MuxedInput = previousOutput; filesToDeleteThisJob.Add(previousOutput); } if (video.Settings != null) { mjob.NbOfBFrames = video.Settings.NbBframes; mjob.Codec = video.Settings.Codec.ToString(); mjob.Settings.VideoName = video.Settings.VideoName; } mjob.NbOfFrames = video.NumberOfFrames; string fpsFormated = String.Format("{0:##.###}", framerate); // this formating is required for mkvmerge at least to avoid fps rounding error mjob.Settings.Framerate = Convert.ToDecimal(fpsFormated); string tempOutputName = Path.Combine(Path.GetDirectoryName(output), Path.GetFileNameWithoutExtension(output) + tempNumber + "."); tempNumber++; foreach (MuxableType o in mpl.handledInputTypes) { if (o.outputType is VideoType) { mjob.Settings.VideoInput = video.Output; if (inputsToDelete.Contains(video.Output)) { filesToDeleteThisJob.Add(video.Output); } mjob.Settings.DAR = video.DAR; } else if (o.outputType is AudioType) { MuxStream stream = audioStreams.Find(delegate(MuxStream m) { return(VideoUtil.guessAudioType(m.path) == o.outputType); }); if (stream != null) { mjob.Settings.AudioStreams.Add(stream); audioStreams.Remove(stream); if (inputsToDelete.Contains(stream.path)) { filesToDeleteThisJob.Add(stream.path); } } } else if (o.outputType is SubtitleType) { MuxStream stream = subtitleStreams.Find(delegate(MuxStream m) { return(VideoUtil.guessSubtitleType(m.path) == o.outputType); }); if (stream != null) { mjob.Settings.SubtitleStreams.Add(stream); subtitleStreams.Remove(stream); if (inputsToDelete.Contains(stream.path)) { filesToDeleteThisJob.Add(stream.path); } } } else if (o.outputType is ChapterType) { if ((VideoUtil.guessChapterType(chapterFile) == o.outputType)) { mjob.Settings.ChapterFile = chapterFile; } if (inputsToDelete.Contains(chapterFile)) { filesToDeleteThisJob.Add(chapterFile); } } else if (o.outputType is DeviceType) { if ((VideoUtil.guessDeviceType(deviceType) == o.outputType)) { mjob.Settings.DeviceType = deviceType; } } } foreach (MuxStream s in mjob.Settings.AudioStreams) { audioStreams.Remove(s); } foreach (MuxStream s in mjob.Settings.SubtitleStreams) { subtitleStreams.Remove(s); } mjob.FilesToDelete.AddRange(filesToDeleteThisJob); if (index == muxPath.Length - 1) { mjob.Settings.MuxedOutput = output; mjob.Settings.SplitSize = splitSize; mjob.Settings.DAR = video.DAR; mjob.ContainerType = container; } else { ContainerType cot = mpl.muxerInterface.GetContainersInCommon(muxPath[index + 1].muxerInterface)[0]; mjob.Settings.MuxedOutput = tempOutputName + cot.Extension; mjob.ContainerType = cot; } previousOutput = mjob.Settings.MuxedOutput; index++; jobs.Add(mjob); if (string.IsNullOrEmpty(mjob.Settings.VideoInput)) { mjob.Input = mjob.Settings.MuxedInput; } else { mjob.Input = mjob.Settings.VideoInput; } mjob.Output = mjob.Settings.MuxedOutput; mjob.MuxType = mpl.muxerInterface.MuxerType; } return(new SequentialChain(jobs.ToArray())); }
public Job CreateJobWithAnimatedOverlay( string projectId, string location, string inputUri, string overlayImageUri, string outputUri) { // Create the client. TranscoderServiceClient client = TranscoderServiceClient.Create(); // Build the parent location name. LocationName parent = new LocationName(projectId, location); // Build the job config. VideoStream videoStream0 = new VideoStream { H264 = new VideoStream.Types.H264CodecSettings { BitrateBps = 550000, FrameRate = 60, HeightPixels = 360, WidthPixels = 640 } }; AudioStream audioStream0 = new AudioStream { Codec = "aac", BitrateBps = 64000 }; // Create the overlay image. Only JPEG is supported. Image resolution is based on output // video resolution. This example uses the values x: 0 and y: 0 to maintain the original // resolution of the overlay image. Overlay.Types.Image overlayImage = new Overlay.Types.Image { Uri = overlayImageUri, Alpha = 1, Resolution = new Overlay.Types.NormalizedCoordinate { X = 0, Y = 0 } }; // Create the starting animation (when the overlay starts to fade in). Use the values x: 0.5 // and y: 0.5 to position the top-left corner of the overlay in the center of the output // video. Overlay.Types.Animation animationFadeIn = new Overlay.Types.Animation { AnimationFade = new Overlay.Types.AnimationFade { FadeType = Overlay.Types.FadeType.FadeIn, Xy = new Overlay.Types.NormalizedCoordinate { X = 0.5, Y = 0.5 }, StartTimeOffset = Duration.FromTimeSpan(TimeSpan.FromSeconds(5)), EndTimeOffset = Duration.FromTimeSpan(TimeSpan.FromSeconds(10)) } }; // Create the ending animation (when the overlay starts to fade out). The overlay will start // to fade out at the 12-second mark in the output video. Overlay.Types.Animation animationFadeOut = new Overlay.Types.Animation { AnimationFade = new Overlay.Types.AnimationFade { FadeType = Overlay.Types.FadeType.FadeOut, Xy = new Overlay.Types.NormalizedCoordinate { X = 0.5, Y = 0.5 }, StartTimeOffset = Duration.FromTimeSpan(TimeSpan.FromSeconds(12)), EndTimeOffset = Duration.FromTimeSpan(TimeSpan.FromSeconds(15)) } }; // Create the overlay and add the image and animations to it. Overlay overlay = new Overlay { Image = overlayImage, Animations = { animationFadeIn, animationFadeOut } }; ElementaryStream elementaryStream0 = new ElementaryStream { Key = "video_stream0", VideoStream = videoStream0 }; ElementaryStream elementaryStream1 = new ElementaryStream { Key = "audio_stream0", AudioStream = audioStream0 }; MuxStream muxStream0 = new MuxStream { Key = "sd", Container = "mp4", ElementaryStreams = { "video_stream0", "audio_stream0" } }; Input input = new Input { Key = "input0", Uri = inputUri }; Output output = new Output { Uri = outputUri }; JobConfig jobConfig = new JobConfig { Inputs = { input }, Output = output, ElementaryStreams = { elementaryStream0, elementaryStream1 }, MuxStreams = { muxStream0 }, Overlays = { overlay } }; // Build the job. Job newJob = new Job { InputUri = inputUri, OutputUri = outputUri, Config = jobConfig }; // Call the API. Job job = client.CreateJob(parent, newJob); // Return the result. return(job); }
/// <summary> /// handles the go button for automated encoding /// checks if we're in automated 2 pass video mode /// then the video and audio configuration is checked, and if it checks out /// the audio job, video jobs and muxing job are generated, audio and video job are linked /// and encoding is started /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private void queueButton_Click(object sender, System.EventArgs e) { if (!string.IsNullOrEmpty(this.muxedOutput.Filename)) { FileSize?desiredSize = targetSize.Value; FileSize?splitSize = splitting.Value; if (FileSizeRadio.Checked) { log.LogValue("Desired Size ", desiredSize); } else if (averageBitrateRadio.Checked) { log.LogValue("Projected Bitrate ", string.Format("{0}kbps", projectedBitrateKBits.Text)); } log.LogValue("Split Size ", splitSize); MuxStream[] audio; AudioJob[] aStreams; AudioEncoderType[] muxTypes; separateEncodableAndMuxableAudioStreams(out aStreams, out audio, out muxTypes); MuxStream[] subtitles = new MuxStream[0]; string chapters = ""; string videoInput = vInfo.VideoInput; string videoOutput = vInfo.VideoOutput; string muxedOutput = this.muxedOutput.Filename; ContainerType cot = this.container.SelectedItem as ContainerType; // determine audio language foreach (MuxStream stream in audio) { foreach (KeyValuePair <string, string> strLanguage in LanguageSelectionContainer.Languages) { if (Path.GetFileNameWithoutExtension(stream.path).ToLower(System.Globalization.CultureInfo.InvariantCulture).Contains(strLanguage.Key.ToLower(System.Globalization.CultureInfo.InvariantCulture))) { stream.language = strLanguage.Key; break; } } } if (addSubsNChapters.Checked) { AdaptiveMuxWindow amw = new AdaptiveMuxWindow(mainForm); amw.setMinimizedMode(videoOutput, "", videoStream.Settings.EncoderType, jobUtil.getFramerate(videoInput), audio, muxTypes, muxedOutput, splitSize, cot); if (amw.ShowDialog() == DialogResult.OK) { amw.getAdditionalStreams(out audio, out subtitles, out chapters, out muxedOutput, out cot); } else // user aborted, abort the whole process { return; } } removeStreamsToBeEncoded(ref audio, aStreams); mainForm.Jobs.addJobsWithDependencies(vUtil.GenerateJobSeries(this.videoStream, muxedOutput, aStreams, subtitles, chapters, desiredSize, splitSize, cot, this.prerender, audio, log, this.device.Text, vInfo.Zones, null, null)); this.Close(); } }
private void generateMetaFile() { metaFile = Path.ChangeExtension(job.Output, ".meta"); MuxSettings settings = job.Settings; CultureInfo ci = new CultureInfo("en-us"); using (StreamWriter sw = new StreamWriter(metaFile, false, Encoding.Default)) { string vcodecID = ""; string extra = ""; string trackID = ""; MediaInfoFile oVideoInfo = null; sw.Write("MUXOPT --no-pcr-on-video-pid --new-audio-pes"); // mux options if (!string.IsNullOrEmpty(settings.DeviceType) && settings.DeviceType != "Standard") { switch (settings.DeviceType) { case "Blu-ray": sw.Write(" --blu-ray"); break; case "AVCHD": sw.Write(" --avchd"); break; } if (settings.ChapterInfo.HasChapters) // chapters are defined { sw.Write(" --custom-chapters" + settings.ChapterInfo.GetChapterTimeLine()); } job.Output = Path.GetDirectoryName(job.Output) + "\\" + Path.GetFileNameWithoutExtension(job.Output); // remove m2ts file extension - use folder name only with this mode } sw.Write(" --vbr --vbv-len=500"); // mux options if (settings.SplitSize.HasValue) { sw.Write(" --split-size=" + settings.SplitSize.Value.MB + "MB"); } string fpsString = null; string videoFile = null; if (!string.IsNullOrEmpty(settings.VideoInput)) { videoFile = settings.VideoInput; } else if (!string.IsNullOrEmpty(settings.MuxedInput)) { videoFile = settings.MuxedInput; } if (!String.IsNullOrEmpty(videoFile)) { oVideoInfo = new MediaInfoFile(videoFile, ref log); if (oVideoInfo.HasVideo) { if (oVideoInfo.VideoInfo.Codec == VideoCodec.AVC) { vcodecID = "V_MPEG4/ISO/AVC"; extra = "insertSEI, contSPS"; } else if (oVideoInfo.VideoInfo.Codec == VideoCodec.HEVC) { vcodecID = "V_MPEGH/ISO/HEVC"; } else if (oVideoInfo.VideoInfo.Codec == VideoCodec.MPEG2) { vcodecID = "V_MPEG-2"; } else if (oVideoInfo.VideoInfo.Codec == VideoCodec.VC1) { vcodecID = "V_MS/VFW/WVC1"; } if (oVideoInfo.ContainerFileType == ContainerType.MP4) { trackID = "track=1"; } else if (oVideoInfo.ContainerFileType == ContainerType.MKV || oVideoInfo.ContainerFileType == ContainerType.M2TS) { trackID = "track=" + oVideoInfo.VideoInfo.Track.TrackID; } sw.Write("\n" + vcodecID + ", "); sw.Write("\"" + videoFile + "\""); if (settings.DAR.HasValue) { sw.Write(", ar=" + settings.DAR.Value.X + ":" + settings.DAR.Value.Y); } fpsString = oVideoInfo.VideoInfo.FPS.ToString(ci); if (settings.Framerate.HasValue) { fpsString = settings.Framerate.Value.ToString(ci); } sw.Write(", fps=" + fpsString); if (!String.IsNullOrEmpty(extra)) { sw.Write(", " + extra); } if (!String.IsNullOrEmpty(trackID)) { sw.Write(", " + trackID); } } else { log.Error("No video track found: " + videoFile); } } foreach (object o in settings.AudioStreams) { MuxStream stream = (MuxStream)o; string acodecID = ""; MediaInfoFile oInfo = new MediaInfoFile(stream.path, ref log); if (!oInfo.HasAudio) { log.Error("No audio track found: " + stream.path); continue; } if (oInfo.AudioInfo.Tracks[0].AudioCodec == AudioCodec.AC3 || oInfo.AudioInfo.Tracks[0].AudioCodec == AudioCodec.EAC3 || oInfo.AudioInfo.Tracks[0].AudioCodec == AudioCodec.THDAC3) { acodecID = "A_AC3"; } else if (oInfo.AudioInfo.Tracks[0].AudioCodec == AudioCodec.AAC) { acodecID = "A_AAC"; } else if (oInfo.AudioInfo.Tracks[0].AudioCodec == AudioCodec.DTS) { acodecID = "A_DTS"; } else if (oInfo.AudioInfo.Tracks[0].AudioCodec == AudioCodec.PCM) { acodecID = "A_LPCM"; } else { log.Error("Audio Codec not supported: " + oInfo.AudioInfo.Tracks[0].Codec); continue; } sw.Write("\n" + acodecID + ", "); sw.Write("\"" + stream.path + "\""); if (stream.delay != 0) { sw.Write(", timeshift={0}ms", stream.delay); } if (!String.IsNullOrEmpty(stream.language)) { foreach (KeyValuePair <string, string> strLanguage in LanguageSelectionContainer.Languages) { if (stream.language.ToLowerInvariant().Equals(strLanguage.Key.ToLowerInvariant())) { sw.Write(", lang=" + strLanguage.Value); break; } } } } foreach (object o in settings.SubtitleStreams) { MuxStream stream = (MuxStream)o; string scodecID = ""; if (stream.path.ToLowerInvariant().EndsWith(".srt")) { scodecID = "S_TEXT/UTF8"; } else { scodecID = "S_HDMV/PGS"; // sup files } sw.Write("\n" + scodecID + ", "); sw.Write("\"" + stream.path + "\""); if (stream.delay != 0) { sw.Write(", timeshift={0}ms", stream.delay); } if (stream.path.ToLowerInvariant().EndsWith(".srt") && oVideoInfo != null && !String.IsNullOrEmpty(fpsString)) { sw.Write(", video-width={0}, video-height={1}, fps={2}", oVideoInfo.VideoInfo.Width, oVideoInfo.VideoInfo.Height, fpsString); } if (!String.IsNullOrEmpty(stream.language)) { foreach (KeyValuePair <string, string> strLanguage in LanguageSelectionContainer.Languages) { if (stream.language.ToLowerInvariant().Equals(strLanguage.Key.ToLowerInvariant())) { sw.Write(", lang=" + strLanguage.Value); break; } } } } } job.FilesToDelete.Add(metaFile); if (File.Exists(metaFile)) { string strMuxFile = String.Empty; try { StreamReader sr = new StreamReader(metaFile); strMuxFile = sr.ReadToEnd(); sr.Close(); } catch (Exception) { } log.LogValue("mux script", strMuxFile); } }
public JobChain GenerateMuxJobs(VideoStream video, decimal?framerate, MuxStream[] audioStreamsArray, MuxableType[] audioTypes, MuxStream[] subtitleStreamsArray, MuxableType[] subTypes, string chapterFile, MuxableType chapterInputType, ContainerType container, string output, FileSize?splitSize, List <string> inputsToDelete) { Debug.Assert(splitSize == null || splitSize.Value != FileSize.Empty); MuxProvider prov = mainForm.MuxProvider; List <MuxableType> allTypes = new List <MuxableType>(); allTypes.Add(video.VideoType); allTypes.AddRange(audioTypes); allTypes.AddRange(subTypes); if (chapterInputType != null) { allTypes.Add(chapterInputType); } MuxPath muxPath = prov.GetMuxPath(container, allTypes.ToArray()); List <MuxJob> jobs = new List <MuxJob>(); List <MuxStream> subtitleStreams = new List <MuxStream>(subtitleStreamsArray); List <MuxStream> audioStreams = new List <MuxStream>(audioStreamsArray); int index = 0; int tempNumber = 1; string previousOutput = null; foreach (MuxPathLeg mpl in muxPath) { List <string> filesToDeleteThisJob = new List <string>(); MuxJob mjob = new MuxJob(); if (previousOutput != null) { mjob.Settings.MuxedInput = previousOutput; filesToDeleteThisJob.Add(previousOutput); } mjob.NbOfFrames = video.NumberOfFrames; mjob.NbOfBFrames = video.Settings.NbBframes; mjob.Codec = video.Settings.Codec.ToString(); mjob.Settings.Framerate = framerate; string tempOutputName = Path.Combine(Path.GetDirectoryName(output), Path.GetFileNameWithoutExtension(output) + tempNumber + "."); tempNumber++; foreach (MuxableType o in mpl.handledInputTypes) { if (o.outputType is VideoType) { mjob.Settings.VideoInput = video.Output; if (inputsToDelete.Contains(video.Output)) { filesToDeleteThisJob.Add(video.Output); } mjob.Settings.DAR = video.DAR; } else if (o.outputType is AudioType) { MuxStream stream = audioStreams.Find(delegate(MuxStream m) { return(VideoUtil.guessAudioType(m.path) == o.outputType); }); if (stream != null) { mjob.Settings.AudioStreams.Add(stream); audioStreams.Remove(stream); if (inputsToDelete.Contains(stream.path)) { filesToDeleteThisJob.Add(stream.path); } } } else if (o.outputType is SubtitleType) { MuxStream stream = subtitleStreams.Find(delegate(MuxStream m) { return(VideoUtil.guessSubtitleType(m.path) == o.outputType); }); if (stream != null) { mjob.Settings.SubtitleStreams.Add(stream); subtitleStreams.Remove(stream); if (inputsToDelete.Contains(stream.path)) { filesToDeleteThisJob.Add(stream.path); } } } else if (o.outputType is ChapterType) { if ((VideoUtil.guessChapterType(chapterFile) == o.outputType)) { mjob.Settings.ChapterFile = chapterFile; } if (inputsToDelete.Contains(chapterFile)) { filesToDeleteThisJob.Add(chapterFile); } } } foreach (MuxStream s in mjob.Settings.AudioStreams) { audioStreams.Remove(s); } foreach (MuxStream s in mjob.Settings.SubtitleStreams) { subtitleStreams.Remove(s); } mjob.FilesToDelete.AddRange(filesToDeleteThisJob); if (index == muxPath.Length - 1) { mjob.Settings.MuxedOutput = output; mjob.Settings.SplitSize = splitSize; mjob.Settings.DAR = video.DAR; mjob.ContainerType = container; } else { ContainerType cot = mpl.muxerInterface.GetContainersInCommon(muxPath[index + 1].muxerInterface)[0]; mjob.Settings.MuxedOutput = tempOutputName + cot.Extension; mjob.ContainerType = cot; } previousOutput = mjob.Settings.MuxedOutput; index++; jobs.Add(mjob); if (string.IsNullOrEmpty(mjob.Settings.VideoInput)) { mjob.Input = mjob.Settings.MuxedInput; } else { mjob.Input = mjob.Settings.VideoInput; } mjob.Output = mjob.Settings.MuxedOutput; mjob.MuxType = mpl.muxerInterface.MuxerType; } return(new SequentialChain(jobs.ToArray())); }
public JobTemplate CreateJobTemplate( string projectId, string location, string templateId) { // Create the client. TranscoderServiceClient client = TranscoderServiceClient.Create(); // Build the parent location name. LocationName parentLocation = new LocationName(projectId, location); // Build the job template config. VideoStream videoStream0 = new VideoStream { H264 = new VideoStream.Types.H264CodecSettings { BitrateBps = 550000, FrameRate = 60, HeightPixels = 360, WidthPixels = 640 } }; VideoStream videoStream1 = new VideoStream { H264 = new VideoStream.Types.H264CodecSettings { BitrateBps = 2500000, FrameRate = 60, HeightPixels = 720, WidthPixels = 1280 } }; AudioStream audioStream0 = new AudioStream { Codec = "aac", BitrateBps = 64000 }; ElementaryStream elementaryStream0 = new ElementaryStream { Key = "video_stream0", VideoStream = videoStream0 }; ElementaryStream elementaryStream1 = new ElementaryStream { Key = "video_stream1", VideoStream = videoStream1 }; ElementaryStream elementaryStream2 = new ElementaryStream { Key = "audio_stream0", AudioStream = audioStream0 }; MuxStream muxStream0 = new MuxStream { Key = "sd", Container = "mp4", ElementaryStreams = { "video_stream0", "audio_stream0" } }; MuxStream muxStream1 = new MuxStream { Key = "hd", Container = "mp4", ElementaryStreams = { "video_stream1", "audio_stream0" } }; JobConfig jobConfig = new JobConfig { ElementaryStreams = { elementaryStream0, elementaryStream1, elementaryStream2 }, MuxStreams = { muxStream0, muxStream1 } }; JobTemplate newJobTemplate = new JobTemplate { Config = jobConfig }; // Call the API. JobTemplate jobTemplate = client.CreateJobTemplate(parentLocation, newJobTemplate, templateId); return(jobTemplate); }