public async Task Init_StreamConfigAvailable_ReturnsStreamConfig(InitType initType) { var expectedConfig = new VideoStreamConfig { Codec = VideoCodec.H263, FrameRateDen = 1, FrameRateNum = 2, BitRate = 1 }; var formatContextStub = Substitute.For <IAVFormatContext>(); formatContextStub.ReadConfig(Arg.Any <int>()).Returns(expectedConfig); var glueStub = Substitute.For <IFFmpegGlue>(); glueStub.AllocFormatContext().Returns(formatContextStub); using (var demuxer = CreateFFmpegDemuxer(glueStub)) { var clipConfig = await InitDemuxer(demuxer, initType); var receivedConfig = clipConfig.StreamConfigs[0]; Assert.That(receivedConfig, Is.EqualTo(expectedConfig)); } }
public void OnStreamConfigChanged_WhenStreamConfigIsUnsupported_ThrowsArgumentException() { using (var stream = CreatePacketStream(StreamType.Audio)) { var videoConfig = new VideoStreamConfig(); Assert.Throws <ArgumentException>(() => stream.OnStreamConfigChanged(videoConfig)); } }
// In case of H265 as VideoCodecConfig::extraData we will have box, which // according to ISO/IES 14496-15 chapter 8.3.3.1.2 has following structure: // // aligned(8) class HEVCDecoderConfigurationRecord { // unsigned int(8) configurationVersion = 1; // unsigned int(2) general_profile_space; // unsigned int(1) general_tier_flag; // unsigned int(5) general_profile_idc; // unsigned int(32) general_profile_compatibility_flags; // unsigned int(48) general_constraint_indicator_flags; // unsigned int(8) general_level_idc; // bit(4) reserved = ‘1111’b; // unsigned int(12) min_spatial_segmentation_idc; // bit(6) reserved = ‘111111’b; // unsigned int(2) parallelismType; // bit(6) reserved = ‘111111’b; // unsigned int(2) chromaFormat; // bit(5) reserved = ‘11111’b; // unsigned int(3) bitDepthLumaMinus8; // bit(5) reserved = ‘11111’b; // unsigned int(3) bitDepthChromaMinus8; // bit(16) avgFrameRate; // bit(2) constantFrameRate; // bit(3) numTemporalLayers; // bit(1) temporalIdNested; // unsigned int(2) lengthSizeMinusOne; // unsigned int(8) numOfArrays; // for (j=0; j < numOfArrays; j++) { // bit(1) array_completeness; // unsigned int(1) reserved = 0; // unsigned int(6) NAL_unit_type; // unsigned int(16) numNalus; // for (i=0; i< numNalus; i++) { // unsigned int(16) nalUnitLength; // bit(8*nalUnitLength) nalUnit; // } // } // } // // "NAL_unit_type indicates the type of the NAL units in the following array // (which must be all of that type); it takes a value as defined // in ISO/IEC 23008‐2; it is restricted to take one of the // values indicating a VPS, SPS, PPS, or SEI NAL unit;" // (from ISO/IES 14496-15 chapter 8.3.3.1.2) // // If we want to change representations with different codec extra // configuration in adaptive streaming scenarios, then we have to modify each // packet data by inserting before video samples extracted NALs in the // following way: // for each nalUnit: // write nalUnitLength on lengthSizeMinusOne + 1 bytes in MSB format // (BigEndian) // write nalUnit (without any modifications) // append video packet data private void ExtractH265ExtraData(VideoStreamConfig videoConfig) { if (videoConfig.CodecExtraData.Length < 21) { // Min first 5 byte + num_sps Logger.Error("extra_data is too short to pass valid SPS/PPS header"); return; } var extraData = new byte[videoConfig.CodecExtraData.Length]; Buffer.BlockCopy(videoConfig.CodecExtraData, 0, extraData, 0, videoConfig.CodecExtraData.Length); var idx = 21; var lengthSize = (ReadByte(extraData, ref idx) & 0x3u) + 1; var numOfArrays = ReadByte(extraData, ref idx); var nals = new List <byte[]>(); for (var j = 0; j < numOfArrays; ++j) { if (extraData.Length < idx + 3) { Logger.Error("extra data too short"); return; } var nalUnitType = ReadByte(extraData, ref idx) & 0x3Fu; var numNalus = ReadUInt16(extraData, ref idx); for (var i = 0; i < numNalus; ++i) { if (extraData.Length < idx + 2) { Logger.Error("extra data too short"); return; } var nalUnitLength = ReadUInt16(extraData, ref idx); if (extraData.Length < idx + nalUnitLength) { Logger.Error("extra data too short"); return; } var elem = new byte[nalUnitLength]; Buffer.BlockCopy(extraData, idx, elem, 0, nalUnitLength); nals.Add(elem); idx += nalUnitLength; } } var size = nals.Sum(o => lengthSize + o.Length); parsedExtraData = new byte[size]; var offset = 0; CopySet(lengthSize, nals, ref offset); }
// In case of H265 as VideoCodecConfig::extraData we will have box, which // according to ISO/IES 14496-15 chapter 8.3.3.1.2 has following structure: // // aligned(8) class HEVCDecoderConfigurationRecord { // unsigned int(8) configurationVersion = 1; // unsigned int(2) general_profile_space; // unsigned int(1) general_tier_flag; // unsigned int(5) general_profile_idc; // unsigned int(32) general_profile_compatibility_flags; // unsigned int(48) general_constraint_indicator_flags; // unsigned int(8) general_level_idc; // bit(4) reserved = ‘1111’b; // unsigned int(12) min_spatial_segmentation_idc; // bit(6) reserved = ‘111111’b; // unsigned int(2) parallelismType; // bit(6) reserved = ‘111111’b; // unsigned int(2) chromaFormat; // bit(5) reserved = ‘11111’b; // unsigned int(3) bitDepthLumaMinus8; // bit(5) reserved = ‘11111’b; // unsigned int(3) bitDepthChromaMinus8; // bit(16) avgFrameRate; // bit(2) constantFrameRate; // bit(3) numTemporalLayers; // bit(1) temporalIdNested; // unsigned int(2) lengthSizeMinusOne; // unsigned int(8) numOfArrays; // for (j=0; j < numOfArrays; j++) { // bit(1) array_completeness; // unsigned int(1) reserved = 0; // unsigned int(6) NAL_unit_type; // unsigned int(16) numNalus; // for (i=0; i< numNalus; i++) { // unsigned int(16) nalUnitLength; // bit(8*nalUnitLength) nalUnit; // } // } // } // // "NAL_unit_type indicates the type of the NAL units in the following array // (which must be all of that type); it takes a value as defined // in ISO/IEC 23008‐2; it is restricted to take one of the // values indicating a VPS, SPS, PPS, or SEI NAL unit;" // (from ISO/IES 14496-15 chapter 8.3.3.1.2) // // If we want to change representations with different codec extra // configuration in adaptive streaming scenarios, then we have to modify each // packet data by inserting before video samples extracted NALs in the // following way: // for each nalUnit: // write nalUnitLength on lengthSizeMinusOne + 1 bytes in MSB format // (BigEndian) // write nalUnit (without any modifications) // append video packet data private void ExtractH265ExtraData(VideoStreamConfig vconf) { var extraData = vconf.CodecExtraData; if (extraData.Length < 21) { // Min first 5 byte + num_sps Logger.Error("extra_data is too short to pass valid SPS/PPS header"); return; } var idx = 21; var lengthSize = (ReadByte(extraData, ref idx) & 0x3u) + 1; var numOfArrays = ReadByte(extraData, ref idx); var nals = new List <byte[]>(); for (var j = 0; j < numOfArrays; ++j) { if (extraData.Length < idx + 3) { Logger.Error("extra data too short"); return; } var nalUnitType = ReadByte(extraData, ref idx) & 0x3Fu; var numNalus = ReadUInt16(extraData, ref idx); for (var i = 0; i < numNalus; ++i) { if (extraData.Length < idx + 2) { Logger.Error("extra data too short"); return; } var nalUnitLength = ReadUInt16(extraData, ref idx); if (extraData.Length < idx + nalUnitLength) { Logger.Error("extra data too short"); return; } nals.Add(extraData.AsSpan().Slice(idx, nalUnitLength).ToArray()); idx += nalUnitLength; } } var size = nals.Sum(o => lengthSize + o.Length); _parsedExtraData = new byte[size]; var offset = 0; CopySet(lengthSize, nals, ref offset); }
public void Start_StreamConfigFound_PublishesStreamConfig(StartType startType) { AsyncContext.Run(async() => { var videoConfig = new VideoStreamConfig(); var demuxerStub = CreateDemuxerStub(new ClipConfiguration { StreamConfigs = new List <StreamConfig> { videoConfig } }, startType); using (var controller = new DemuxerController(demuxerStub)) { var streamConfigTask = controller.StreamConfigReady().FirstAsync().ToTask(); StartController(controller, startType); var receivedConfig = await streamConfigTask; Assert.That(receivedConfig, Is.EqualTo(videoConfig)); } }); }
public static void Run() { /* Create BitcodinApi */ const string apiKey = "YOUR_API_KEY"; var bitApi = new BitcodinApi(apiKey); var inputConfig = new HttpInputConfig { Url = "http://bitbucketireland.s3.amazonaws.com/Sintel-two-audio-streams-short.mkv" }; Input input; try { input = bitApi.CreateInput(inputConfig); Console.WriteLine("Could create input: " + input.Filename); } catch (BitcodinApiException e) { Console.WriteLine("Could not create input: " + e); return; } var encodingProfileConfig = new EncodingProfileConfig { Name = "MyApiTestEncodingProfile" }; /* CREATE VIDEO STREAM CONFIGS */ var videoStreamConfig1 = new VideoStreamConfig { Bitrate = 4800000, Height = 1080, Width = 1920 }; encodingProfileConfig.VideoStreamConfigs.Add(videoStreamConfig1); var videoStreamConfig2 = new VideoStreamConfig { Bitrate = 2400000, Height = 720, Width = 1280 }; encodingProfileConfig.VideoStreamConfigs.Add(videoStreamConfig2); var videoStreamConfig3 = new VideoStreamConfig { Bitrate = 1200000, Height = 480, Width = 854 }; encodingProfileConfig.VideoStreamConfigs.Add(videoStreamConfig3); /* CREATE AUDIO STREAM CONFIGS */ var audioStreamConfig = new AudioStreamConfig { Bitrate = 128000 }; encodingProfileConfig.AudioStreamConfigs.Add(audioStreamConfig); /* CREATE ENCODING PROFILE */ EncodingProfile encodingProfile; try { encodingProfile = bitApi.CreateEncodingProfile(encodingProfileConfig); Console.WriteLine("Could create encoding profile: " + encodingProfile.Name); } catch (BitcodinApiException e) { Console.WriteLine("Could not create encoding profile: " + e); return; } /* CREATE DRM WIDEVINE CONFIG */ var widevineDrmConfig = new WidevineDrmConfig { RequestUrl = "http://license.uat.widevine.com/cenc/getcontentkey", SigningKey = "1ae8ccd0e7985cc0b6203a55855a1034afc252980e970ca90e5202689f947ab9", SigningIv = "d58ce954203b7c9a9a9d467f59839249", ContentId = "746573745f69645f4639465043304e4f", Provider = "widevine_test", Method = DrmMethod.MpegCenc }; /* Create Job */ Console.WriteLine("Create Job"); var jobConfig = new JobConfig { EncodingProfileId = encodingProfile.EncodingProfileId, InputId = input.InputId }; jobConfig.ManifestTypes.Add(ManifestType.MpegDashMpd); jobConfig.ManifestTypes.Add(ManifestType.HlsM3U8); jobConfig.Speed = Speed.Standard; jobConfig.DrmConfig = widevineDrmConfig; Job job; try { job = bitApi.CreateJob(jobConfig); } catch (BitcodinApiException e) { Console.WriteLine("Could not create job: " + e); return; } JobDetails jobDetails; do { try { jobDetails = bitApi.GetJobDetails(job.JobId); Console.WriteLine("Status: " + jobDetails.JobStatus + " - Enqueued Duration: " + jobDetails.EnqueueDuration + "s" + " - Realtime Factor: " + jobDetails.RealtimeFactor + " - Encoded Duration: " + jobDetails.EncodedDuration + "s" + " - Output: " + jobDetails.BytesWritten / (double)1024 / 1024 + "MB" + " - Duration: " + jobDetails.Duration + " - FrameRate: " + jobDetails.FrameRate + " - JobId: " + jobDetails.JobId + " - SegmentsSplitted: " + jobDetails.SegmentsSplitted); } catch (BitcodinApiException) { Console.WriteLine("Could not get any job details"); return; } if (jobDetails.JobStatus.ToUpper().Equals(Enum.GetName(typeof(JobStatus), 4))) { Console.WriteLine("Error during transcoding"); return; } Thread.Sleep(2000); } while (!jobDetails.JobStatus.ToUpper().Equals(Enum.GetName(typeof(JobStatus), 3))); Console.WriteLine("Job with ID " + job.JobId + " finished successfully!"); var outputConfig = new FtpOutputConfig { Name = "TestFtpOutput", Host = "ftp.yourdomain.com/content", Username = "******", Password = "******" }; Output output; try { output = bitApi.CreateOutput(outputConfig); Console.WriteLine("Output has been created: " + output.Name); } catch (BitcodinApiException e) { Console.WriteLine("Could not create Output: " + e); return; } var transferConfig = new TransferConfig { JobId = job.JobId, OutputId = output.OutputId }; try { bitApi.Transfer(transferConfig); Console.WriteLine("Output has been transfered"); } catch (BitcodinApiException e) { Console.WriteLine("Could not transfer Output: " + e); } }
private static bool IsCompatible(this VideoStreamConfig config, VideoStreamConfig otherConfig) { return(otherConfig != null && config.Codec == otherConfig.Codec); }
public static void Run() { /* Create BitcodinApi */ const string apiKey = "YOUR_API_KEY"; var bitApi = new BitcodinApi(apiKey); /* Create URL Input */ var ftpInputConfig = new FtpInputConfig { Url = "path/to/file.mkv", Username = "******", Password = "******" }; Input input; try { input = bitApi.CreateInput(ftpInputConfig); Console.WriteLine("Created FTP Input: " + input.Filename); } catch (BitcodinApiException e) { Console.WriteLine("Could not create FTP input: " + e); return; } /* Create EncodingProfile */ var videoConfig1 = new VideoStreamConfig { Bitrate = 4800000, Width = 1920, Height = 1080, Profile = Profile.Main, Preset = Preset.Premium }; var videoConfig2 = new VideoStreamConfig { Bitrate = 2400000, Width = 1280, Height = 720, Profile = Profile.Main, Preset = Preset.Premium }; var videoConfig3 = new VideoStreamConfig { Bitrate = 1200000, Width = 854, Height = 480, Profile = Profile.Main, Preset = Preset.Premium }; var encodingProfileConfig = new EncodingProfileConfig { Name = "FTPTestProfile" }; encodingProfileConfig.VideoStreamConfigs.Add(videoConfig1); encodingProfileConfig.VideoStreamConfigs.Add(videoConfig2); encodingProfileConfig.VideoStreamConfigs.Add(videoConfig3); var audioStreamConfig = new AudioStreamConfig { DefaultStreamId = 0, Bitrate = 192000 }; encodingProfileConfig.AudioStreamConfigs.Add(audioStreamConfig); EncodingProfile encodingProfile; try { encodingProfile = bitApi.CreateEncodingProfile(encodingProfileConfig); Console.WriteLine("Could create profile: " + encodingProfile.Name); } catch (BitcodinApiException e) { Console.WriteLine("Could not create encoding profile: " + e); return; } /* Create Job */ Console.WriteLine("Create Job"); var jobConfig = new JobConfig { EncodingProfileId = encodingProfile.EncodingProfileId, InputId = input.InputId }; jobConfig.ManifestTypes.Add(ManifestType.MpegDashMpd); jobConfig.ManifestTypes.Add(ManifestType.HlsM3U8); Job job; try { job = bitApi.CreateJob(jobConfig); } catch (BitcodinApiException e) { Console.WriteLine("Could not create job: " + e); return; } JobDetails jobDetails; do { try { jobDetails = bitApi.GetJobDetails(job.JobId); Console.WriteLine("Status: " + jobDetails.JobStatus + " - Enqueued Duration: " + jobDetails.EnqueueDuration + "s" + " - Realtime Factor: " + jobDetails.RealtimeFactor + " - Encoded Duration: " + jobDetails.EncodedDuration + "s" + " - Output: " + jobDetails.BytesWritten / (double)1024 / 1024 + "MB" + " - Duration: " + jobDetails.Duration + " - FrameRate: " + jobDetails.FrameRate + " - JobId: " + jobDetails.JobId + " - SegmentsSplitted: " + jobDetails.SegmentsSplitted); } catch (BitcodinApiException) { Console.WriteLine("Could not get any job details"); return; } if (jobDetails.JobStatus.ToUpper().Equals(Enum.GetName(typeof(JobStatus), 4))) { Console.WriteLine("Error during transcoding"); return; } Thread.Sleep(2000); } while (!jobDetails.JobStatus.ToUpper().Equals(Enum.GetName(typeof(JobStatus), 3))); Console.WriteLine("Job with ID " + job.JobId + " finished successfully!"); }
public static void Run() { /* Create BitcodinApi */ const string apiKey = "YOUR_API_KEY"; var bitApi = new BitcodinApi(apiKey); var inputConfig = new HttpInputConfig { Url = "http://bitbucketireland.s3.amazonaws.com/Sintel-two-audio-streams-short.mkv" }; Input input; try { input = bitApi.CreateInput(inputConfig); Console.WriteLine("Could create input: " + input.Filename); } catch (BitcodinApiException e) { Console.WriteLine("Could not create input: " + e); return; } var encodingProfileConfig = new EncodingProfileConfig { Name = "MyApiTestEncodingProfile" }; /* CREATE VIDEO STREAM CONFIGS */ var videoStreamConfig1 = new VideoStreamConfig { Bitrate = 4800000, Height = 1080, Width = 1920 }; encodingProfileConfig.VideoStreamConfigs.Add(videoStreamConfig1); var videoStreamConfig2 = new VideoStreamConfig { Bitrate = 2400000, Height = 720, Width = 1280 }; encodingProfileConfig.VideoStreamConfigs.Add(videoStreamConfig2); var videoStreamConfig3 = new VideoStreamConfig { Bitrate = 1200000, Height = 480, Width = 854 }; encodingProfileConfig.VideoStreamConfigs.Add(videoStreamConfig3); /* CREATE AUDIO STREAM CONFIGS */ var audioStreamConfig = new AudioStreamConfig { Bitrate = 128000 }; encodingProfileConfig.AudioStreamConfigs.Add(audioStreamConfig); /* CREATE ENCODING PROFILE */ EncodingProfile encodingProfile; try { encodingProfile = bitApi.CreateEncodingProfile(encodingProfileConfig); Console.WriteLine("Could create profile: " + encodingProfile.Name); } catch (BitcodinApiException e) { Console.WriteLine("Could not create encoding profile: " + e); return; } /* Create Job */ Console.WriteLine("Create Job"); var jobConfig = new JobConfig { EncodingProfileId = encodingProfile.EncodingProfileId, InputId = input.InputId }; jobConfig.ManifestTypes.Add(ManifestType.MpegDashMpd); jobConfig.StartTime = 90; // Transcoded video will start at 01:30 (90 seconds) jobConfig.Speed = Speed.Standard; Job job; try { job = bitApi.CreateJob(jobConfig); } catch (BitcodinApiException e) { Console.WriteLine("Could not create job: " + e); return; } JobDetails jobDetails; do { try { jobDetails = bitApi.GetJobDetails(job.JobId); Console.WriteLine("Status: " + jobDetails.JobStatus + " - Enqueued Duration: " + jobDetails.EnqueueDuration + "s" + " - Realtime Factor: " + jobDetails.RealtimeFactor + " - Encoded Duration: " + jobDetails.EncodedDuration + "s" + " - Output: " + jobDetails.BytesWritten / (double)1024 / 1024 + "MB" + " - Duration: " + jobDetails.Duration + " - FrameRate: " + jobDetails.FrameRate + " - JobId: " + jobDetails.JobId + " - SegmentsSplitted: " + jobDetails.SegmentsSplitted); } catch (BitcodinApiException) { Console.WriteLine("Could not get any job details"); return; } if (jobDetails.JobStatus.ToUpper().Equals(Enum.GetName(typeof(JobStatus), 4))) { Console.WriteLine("Error during transcoding"); return; } Thread.Sleep(2000); } while (!jobDetails.JobStatus.ToUpper().Equals(Enum.GetName(typeof(JobStatus), 3))); Console.WriteLine("Job with ID " + job.JobId + " finished successfully!"); var outputConfig = new FtpOutputConfig { Name = "TestFtpOutput", Host = "ftp.yourdomain.com/content", Username = "******", Password = "******" }; Output output; try { output = bitApi.CreateOutput(outputConfig); Console.WriteLine("Output has been created: " + output.Name); } catch (BitcodinApiException e) { Console.WriteLine("Could not create Output: " + e); return; } var transferConfig = new TransferConfig { JobId = job.JobId, OutputId = output.OutputId }; try { bitApi.Transfer(transferConfig); Console.WriteLine("Output has been transfered"); } catch (BitcodinApiException e) { Console.WriteLine("Could not transfer Output: " + e); } }
public static void Run() { /* Create BitcodinApi */ const string apiKey = "YOUR_API_KEY"; var bitApi = new BitcodinApi(apiKey); var inputConfig = new HttpInputConfig { Url = "http://bitbucketireland.s3.amazonaws.com/Sintel-two-audio-streams-short.mkv" }; Input input; try { input = bitApi.CreateInput(inputConfig); Console.WriteLine("Could create input: " + input.Filename); } catch (BitcodinApiException e) { Console.WriteLine("Could not create input: " + e); return; } var encodingProfileConfig = new EncodingProfileConfig { Name = "MyApiTestEncodingProfile" }; /* CREATE VIDEO STREAM CONFIGS */ var videoStreamConfig1 = new VideoStreamConfig { Bitrate = 4800000, Height = 1080, Width = 1920 }; encodingProfileConfig.VideoStreamConfigs.Add(videoStreamConfig1); var videoStreamConfig2 = new VideoStreamConfig { Bitrate = 2400000, Height = 720, Width = 1280 }; encodingProfileConfig.VideoStreamConfigs.Add(videoStreamConfig2); var videoStreamConfig3 = new VideoStreamConfig { Bitrate = 1200000, Height = 480, Width = 854 }; encodingProfileConfig.VideoStreamConfigs.Add(videoStreamConfig3); /* CREATE AUDIO STREAM CONFIGS */ var audioStreamConfig = new AudioStreamConfig { Bitrate = 128000 }; encodingProfileConfig.AudioStreamConfigs.Add(audioStreamConfig); /* CREATE ENCODING PROFILE */ EncodingProfile encodingProfile; try { encodingProfile = bitApi.CreateEncodingProfile(encodingProfileConfig); Console.WriteLine("Could create encoding profile: " + encodingProfile.Name); } catch (BitcodinApiException e) { Console.WriteLine("Could not create encoding profile: " + e); return; } /* Create Job */ Console.WriteLine("Create Job"); var jobConfig = new JobConfig { EncodingProfileId = encodingProfile.EncodingProfileId, InputId = input.InputId }; jobConfig.ManifestTypes.Add(ManifestType.MpegDashMpd); jobConfig.ManifestTypes.Add(ManifestType.HlsM3U8); jobConfig.Speed = Speed.Standard; Job job; try { job = bitApi.CreateJob(jobConfig); } catch (BitcodinApiException e) { Console.WriteLine("Could not create job: " + e); return; } JobDetails jobDetails; do { try { jobDetails = bitApi.GetJobDetails(job.JobId); Console.WriteLine("Status: " + jobDetails.JobStatus + " - Enqueued Duration: " + jobDetails.EnqueueDuration + "s" + " - Realtime Factor: " + jobDetails.RealtimeFactor + " - Encoded Duration: " + jobDetails.EncodedDuration + "s" + " - Output: " + jobDetails.BytesWritten / (double)1024 / 1024 + "MB" + " - Duration: " + jobDetails.Duration + " - FrameRate: " + jobDetails.FrameRate + " - JobId: " + jobDetails.JobId + " - SegmentsSplitted: " + jobDetails.SegmentsSplitted); } catch (BitcodinApiException) { Console.WriteLine("Could not get any job details"); return; } if (jobDetails.JobStatus.ToUpper().Equals(Enum.GetName(typeof(JobStatus), 4))) { Console.WriteLine("Error during transcoding"); return; } Thread.Sleep(2000); } while (!jobDetails.JobStatus.ToUpper().Equals(Enum.GetName(typeof(JobStatus), 3))); Console.WriteLine("Job with ID " + job.JobId + " finished successfully!"); /* CREATE MPD WITH YOUR VTT SUBTITLES */ Console.WriteLine("Create SUBTITLES"); var engSub = new VttSubtitle { LangLong = "English", LangShort = "eng", Url = "https://www.iandevlin.com/html5test/webvtt/upc-video-subtitles-en.vtt" }; var deSub = new VttSubtitle { LangLong = "German", LangShort = "de", Url = "http://url.to/your/eng.vtt" }; var subtitles = new VttSubtitle[2]; subtitles[0] = engSub; subtitles[1] = deSub; var vttMpdConfig = new VttMpdConfig { OutputFileName = "test", JobId = job.JobId, Subtitles = subtitles }; try { var vtt = bitApi.CreateVtt(vttMpdConfig); Console.WriteLine("Could create vtt: " + vtt.MpdUrl); } catch (BitcodinApiException e) { Console.WriteLine("Could not create vtt: " + e); } }
public static void Run() { /* Create BitcodinApi */ const string apiKey = "YOUR_API_KEY"; var bitApi = new BitcodinApi(apiKey); var inputConfig = new HttpInputConfig { Url = "http://eu-storage.bitcodin.com/inputs/Sintel.2010.720p.mkv" }; Input input; try { input = bitApi.CreateInput(inputConfig); Console.WriteLine("Could create input: " + input.Filename); } catch (BitcodinApiException e) { Console.WriteLine("Could not create input: " + e); return; } var encodingProfileConfig = new EncodingProfileConfig { Name = "MyApiTestEncodingProfile" }; /* CREATE VIDEO STREAM CONFIGS */ var videoStreamConfig1 = new VideoStreamConfig { Bitrate = 4800000, Height = 1080, Width = 1920 }; encodingProfileConfig.VideoStreamConfigs.Add(videoStreamConfig1); var videoStreamConfig2 = new VideoStreamConfig { Bitrate = 2400000, Height = 720, Width = 1280 }; encodingProfileConfig.VideoStreamConfigs.Add(videoStreamConfig2); var videoStreamConfig3 = new VideoStreamConfig { Bitrate = 1200000, Height = 480, Width = 854 }; encodingProfileConfig.VideoStreamConfigs.Add(videoStreamConfig3); /* CREATE AUDIO STREAM CONFIGS */ var audioStreamConfig = new AudioStreamConfig { Bitrate = 128000 }; encodingProfileConfig.AudioStreamConfigs.Add(audioStreamConfig); /* CREATE ENCODING PROFILE */ EncodingProfile encodingProfile; try { encodingProfile = bitApi.CreateEncodingProfile(encodingProfileConfig); Console.WriteLine("Could create encoding profile: " + encodingProfile.Name); } catch (BitcodinApiException e) { Console.WriteLine("Could not create encoding profile: " + e); return; } /* CREATE OUTPUT */ var outputConfig = new FtpOutputConfig { Name = "TestFtpOutput", Host = "ftp.yourdomain.com/content", Username = "******", Password = "******" }; Output output; try { output = bitApi.CreateOutput(outputConfig); Console.WriteLine("Could create Output: " + output.Name); } catch (BitcodinApiException e) { Console.WriteLine("Could not create Output: " + e); return; } /* Create Job */ var jobConfig = new JobConfig { EncodingProfileId = encodingProfile.EncodingProfileId, InputId = input.InputId, OutputId = output.OutputId, Speed = Speed.Standard }; jobConfig.ManifestTypes.Add(ManifestType.MpegDashMpd); jobConfig.ManifestTypes.Add(ManifestType.HlsM3U8); Job job; try { job = bitApi.CreateJob(jobConfig); } catch (BitcodinApiException e) { Console.WriteLine("Could not create job: " + e); return; } /* WAIT TIL JOB IS FINISHED */ JobDetails jobDetails; do { try { jobDetails = bitApi.GetJobDetails(job.JobId); Console.WriteLine("Status: " + jobDetails.JobStatus + " - Enqueued Duration: " + jobDetails.EnqueueDuration + "s" + " - Realtime Factor: " + jobDetails.RealtimeFactor + " - Encoded Duration: " + jobDetails.EncodedDuration + "s" + " - Output: " + jobDetails.BytesWritten / (double)1024 / 1024 + "MB"); } catch (BitcodinApiException) { Console.WriteLine("Could not get any job details"); return; } if (jobDetails.JobStatus.Equals(Enum.GetName(typeof(JobStatus), 4))) { Console.WriteLine("Error during transcoding"); return; } Thread.Sleep(2000); } while (!jobDetails.JobStatus.Equals(Enum.GetName(typeof(JobStatus), 3))); Console.WriteLine("Job with ID " + job.JobId + " finished successfully!"); /*TRANSFER */ Console.WriteLine("Transfering"); var transferConfig = new TransferConfig { JobId = job.JobId, OutputId = output.OutputId }; try { bitApi.Transfer(transferConfig); Console.WriteLine("Transfer finished..."); } catch (BitcodinApiException e) { Console.WriteLine("Could not transfer Output: " + e); } }
// In case of H264 as VideoCodecConfig::extraData we will have avcC box, which // according to ISO/IEC 14496-15 chapter 5.3.3.1.2 has following structure: // // aligned(8) class AVCDecoderConfigurationRecord { // unsigned int(8) configurationVersion = 1; // unsigned int(8) AVCProfileIndication; // unsigned int(8) profile_compatibility; // unsigned int(8) AVCLevelIndication; // bit(6) reserved = '111111'b; // unsigned int(2) lengthSizeMinusOne; // bit(3) reserved = '111'b; // unsigned int(5) numOfSequenceParameterSets; // for (i=0; i< numOfSequenceParameterSets; i++) { // unsigned int(16) sequenceParameterSetLength ; // bit(8*sequenceParameterSetLength) sequenceParameterSetNALUnit; // } // unsigned int(8) numOfPictureParameterSets; // for (i=0; i< numOfPictureParameterSets; i++) { // unsigned int(16) pictureParameterSetLength; // bit(8*pictureParameterSetLength) pictureParameterSetNALUnit; // } // if( profile_idc == 100 || profile_idc == 110 || // profile_idc == 122 || profile_idc == 144 ) // { // bit(6) reserved = '111111'b; // unsigned int(2) chroma_format; // bit(5) reserved = '11111'b; // unsigned int(3) bit_depth_luma_minus8; // bit(5) reserved = '11111'b; // unsigned int(3) bit_depth_chroma_minus8; // unsigned int(8) numOfSequenceParameterSetExt; // for (i=0; i< numOfSequenceParameterSetExt; i++) { // unsigned int(16) sequenceParameterSetExtLength; // bit(8*sequenceParameterSetExtLength) sequenceParameterSetExtNALUnit; // } // } // } // // If we want to change representations with different codec extra // configuration in adaptive streaming scenarios, then we have to modify each // packet data by inserting before video samples SPSes and PPSes in the // following way: // for each SPS: // write length of SPS on lengthSizeMinusOne + 1 bytes in MSB format // (BigEndian) // write SPS NAL data (without any modifications) // for each PPS: (do similar operation as for PPS) // write length of PPS on lengthSizeMinusOne + 1 bytes in MSB format // (BigEndian) // write PPS NAL data (without any modifications) // append video packet data // // For example: // - VideoCodecConfig::extra_data_: // 01 4D 40 20 FF E1 00 0C // 67 4D 40 20 96 52 80 A0 0B 76 02 05 // 01 00 04 68 EF 38 80 // - length_size: 4 // - SPS count 1, SPS data: 67 4D 40 20 96 52 80 A0 0B 76 02 05 // - PPS count 1, PPS data: 68 EF 38 80 // - modified packet structure (in hex): // 00 00 00 0C 67 4D 40 20 96 52 80 A0 0B 76 02 05 00 00 00 04 68 EF 38 80 // | | | | // | | | | // | | | | PPS NAL // | | | // | | | PPS length (4 bytes) // | | // | | SPS NAL // | // | SPS length (4 bytes) // after that header original ES packet bytes are appended private void ExtractH264ExtraData(VideoStreamConfig videoConfig) { if (videoConfig.CodecExtraData.Length < 6) { // Min first 5 byte + num_sps Logger.Error("extra_data is too short to pass valid SPS/PPS header"); return; } var extraData = new byte[videoConfig.CodecExtraData.Length]; Buffer.BlockCopy(videoConfig.CodecExtraData, 0, extraData, 0, videoConfig.CodecExtraData.Length); var idx = 0; var version = ReadByte(extraData, ref idx); var profileIndication = ReadByte(extraData, ref idx); var profileCompatibility = ReadByte(extraData, ref idx); var avcLevel = ReadByte(extraData, ref idx); uint lengthSize = ReadByte(extraData, ref idx); if ((lengthSize & 0xFCu) != 0xFCu) { // Be liberal in what you accept..., so just log error Logger.Warn("Not all reserved bits in length size filed are set to 1"); } lengthSize = (byte)((lengthSize & 0x3u) + 1); uint numSps = ReadByte(extraData, ref idx); if ((numSps & 0xE0u) != 0xE0u) { // Be liberal in what you accept..., so just log error Logger.Warn("Wrong SPS count format."); } numSps &= 0x1Fu; var spses = ReadH264ParameterSets(extraData, numSps, ref idx); if (spses == null) { Logger.Error("extra data too short"); return; } if (extraData.Length <= idx) { Logger.Error("extra data too short"); return; } uint numPps = ReadByte(extraData, ref idx); var ppses = ReadH264ParameterSets(extraData, numPps, ref idx); if (ppses == null) { Logger.Error("extra data too short"); return; } var size = spses.Sum(o => lengthSize + o.Length) + ppses.Sum(o => lengthSize + o.Length); parsedExtraData = new byte[size]; var offset = 0; CopySet(lengthSize, spses, ref offset); CopySet(lengthSize, ppses, ref offset); }
internal static bool IsCompatible(this VideoStreamConfig config, VideoStreamConfig otherConfig) { return(otherConfig != null && config.Codec == otherConfig.Codec && config.FrameRate == otherConfig.FrameRate); }