public async Task RunExample(string[] args) { _configProvider = new ConfigProvider(args); _bitmovinApi = BitmovinApi.Builder .WithApiKey(_configProvider.GetBitmovinApiKey()) .WithLogger(new ConsoleLogger()) .Build(); var encoding = await CreateEncoding("Multi codec encoding", "Encoding with different codecs and muxing types"); var input = await CreateHttpInput(_configProvider.GetHttpInputHost()); var output = await CreateS3Output(_configProvider.GetS3OutputBucketName(), _configProvider.GetS3OutputAccessKey(), _configProvider.GetS3OutputSecretKey()); var inputFilePath = _configProvider.GetHttpInputFilePath(); // Add an H.264 video stream to the encoding var h264VideoConfig = await CreateH264VideoConfiguration(); var h264VideoStream = await CreateStream(encoding, input, inputFilePath, h264VideoConfig); // Add an H.264 video stream to the encoding var h265VideoConfig = await CreateH265VideoConfiguration(); var h265VideoStream = await CreateStream(encoding, input, inputFilePath, h265VideoConfig); // Add an AAC audio stream to the encoding var aacConfig = await CreateAacAudioConfiguration(); var aacAudioStream = await CreateStream(encoding, input, inputFilePath, aacConfig); // Add an AC3 audio stream to the encoding var ac3Config = await CreateAc3AudioConfiguration(); var ac3AudioStream = await CreateStream(encoding, input, inputFilePath, ac3Config); // Create an MP4 muxing with the H.264 and AAC streams await CreateMp4Muxing(encoding, output, "mp4-h264-aac", new List <Stream>() { h264VideoStream, aacAudioStream }, "video.mp4"); // Create an MP4 muxing with the H.265 and AC3 streams await CreateMp4Muxing(encoding, output, "mp4-h265-ac3", new List <Stream>() { h265VideoStream, ac3AudioStream }, "video.mp4"); // Create a progressive TS muxing with the H.264 and AAC streams await CreateProgressiveTsMuxing(encoding, output, "progressivets-h264-aac", new List <Stream>() { h264VideoStream, aacAudioStream }, "video.ts"); await ExecuteEncoding(encoding); }
public async Task RunExample(string[] args) { _configProvider = new ConfigProvider(args); _bitmovinApi = BitmovinApi.Builder .WithApiKey(_configProvider.GetBitmovinApiKey()) // uncomment the following line if you are working with a multi-tenant account // .WithTenantOrgIdKey(_configProvider.GetBitmovinTenantOrgId()) .WithLogger(new ConsoleLogger()) .Build(); var encoding = await CreateEncoding("Audio Mapping - Simple Handling - Implicit Handling (Baseline)", "Input with stereo track -> Output with stereo track"); var input = await CreateHttpInput(_configProvider.GetHttpInputHost()); var output = await CreateS3Output(_configProvider.GetS3OutputBucketName(), _configProvider.GetS3OutputAccessKey(), _configProvider.GetS3OutputSecretKey()); var h264VideoConfig = await CreateH264VideoConfiguration(); var aacConfig = await CreateAacAudioConfiguration(); var inputFilePath = _configProvider.GetHttpInputFilePathWithStereoSound(); var ingestInputStream = await CreateIngestInputStream(encoding, input, inputFilePath); var videoStream = await CreateStream(encoding, ingestInputStream, h264VideoConfig); var audioStream = await CreateStream(encoding, ingestInputStream, aacConfig); await CreateMp4Muxing(encoding, output, "/", new List <Stream>() { videoStream, audioStream }, "stereo-track.mp4"); await ExecuteEncoding(encoding); }
public async Task RunExample(string[] args) { _configProvider = new ConfigProvider(args); _bitmovinApi = BitmovinApi.Builder .WithApiKey(_configProvider.GetBitmovinApiKey()) // uncomment the following line if you are working with a multi-tenant account // .WithTenantOrgIdKey(_configProvider.GetBitmovinTenantOrgId()) .WithLogger(new ConsoleLogger()) .Build(); var encoding = await CreateEncoding("Per-Title encoding", "Per-Title encoding with HLS and DASH manifest"); var input = await CreateHttpInput(_configProvider.GetHttpInputHost()); var output = await CreateS3Output(_configProvider.GetS3OutputBucketName(), _configProvider.GetS3OutputAccessKey(), _configProvider.GetS3OutputSecretKey()); var inputFilePath = _configProvider.GetHttpInputFilePath(); // Add an H.264 video stream to the encoding var h264VideoConfig = await CreateH264VideoConfiguration(); var h264VideoStream = await CreateStream(encoding, input, inputFilePath, h264VideoConfig, StreamMode.PER_TITLE_TEMPLATE); // Add an AAC audio stream to the encoding var aacConfig = await CreateAacAudioConfiguration(); var aacAudioStream = await CreateStream(encoding, input, inputFilePath, aacConfig, StreamMode.STANDARD); await CreateFmp4Muxing(encoding, output, "video/{height}/{bitrate}_{uuid}", h264VideoStream); await CreateFmp4Muxing(encoding, output, $"/audio", aacAudioStream); var dashManifest = await CreateDefaultDashManifest(encoding, output, "/"); var hlsManifest = await CreateDefaultHlsManifest(encoding, output, "/"); var startEncodingRequest = new StartEncodingRequest() { PerTitle = new PerTitle() { H264Configuration = new H264PerTitleConfiguration() { AutoRepresentations = new AutoRepresentation() } }, ManifestGenerator = ManifestGenerator.V2, VodDashManifests = new List <ManifestResource>() { BuildManifestResource(dashManifest) }, VodHlsManifests = new List <ManifestResource>() { BuildManifestResource(hlsManifest) } }; await ExecuteEncoding(encoding, startEncodingRequest); }
public async Task RunExample(string[] args) { _configProvider = new ConfigProvider(args); _bitmovinApi = BitmovinApi.Builder .WithApiKey(_configProvider.GetBitmovinApiKey()) // uncomment the following line if you are working with a multi-tenant account // .WithTenantOrgIdKey(_configProvider.GetBitmovinTenantOrgId()) .WithLogger(new ConsoleLogger()) .Build(); var encoding = await CreateEncoding("Audio Mapping - Channel Mixing - Downmixing", "Input with 5.1 track -> Output with downmixed stereo track"); var input = await CreateHttpInput(_configProvider.GetHttpInputHost()); var output = await CreateS3Output(_configProvider.GetS3OutputBucketName(), _configProvider.GetS3OutputAccessKey(), _configProvider.GetS3OutputSecretKey()); var inputFilePath = _configProvider.GetHttpInputFilePathWithSurroundSound(); // Create an H264 video configuration. var h264VideoConfig = await CreateH264VideoConfiguration(); // Create an AAC audio configuration. var aacAudioConfig = await CreateAacAudioConfiguration(); // Add video and audio ingest input streams. var videoIngestInputStream = await CreateIngestInputStream(encoding, input, inputFilePath); var audioIngestInputStream = await CreateIngestInputStream(encoding, input, inputFilePath); // Create the Downmixing configurations. var channelConfigLeft = new DownmixConfig(AudioMixChannelType.FRONT_LEFT); channelConfigLeft.AddSourceChannel(AudioMixSourceChannelType.FRONT_LEFT, 1.0); channelConfigLeft.AddSourceChannel(AudioMixSourceChannelType.BACK_LEFT, 0.8); channelConfigLeft.AddSourceChannel(AudioMixSourceChannelType.CENTER, 0.5); var channelConfigRight = new DownmixConfig(AudioMixChannelType.FRONT_RIGHT); channelConfigRight.AddSourceChannel(AudioMixSourceChannelType.FRONT_RIGHT, 1.0); channelConfigRight.AddSourceChannel(AudioMixSourceChannelType.BACK_RIGHT, 0.8); channelConfigRight.AddSourceChannel(AudioMixSourceChannelType.CENTER, 0.5); var audioMixInputStream = await CreateDownmixInputStream(encoding, audioIngestInputStream, new List <DownmixConfig>(new [] { channelConfigLeft, channelConfigRight })); // Create streams and add them to the encoding. var videoStream = await CreateStream(encoding, videoIngestInputStream, h264VideoConfig); var audioStream = await CreateStream(encoding, audioMixInputStream, aacAudioConfig); var streams = new List <Stream>(); streams.Add(videoStream); streams.Add(audioStream); await CreateMp4Muxing(encoding, output, "/", streams, "stereo-track-downmixed.mp4"); await ExecuteEncoding(encoding); }
public async Task RunExample(string[] args) { _configProvider = new ConfigProvider(args); _bitmovinApi = BitmovinApi.Builder .WithApiKey(_configProvider.GetBitmovinApiKey()) // uncomment the following line if you are working with a multi-tenant account // .WithTenantOrgIdKey(_configProvider.GetBitmovinTenantOrgId()) .WithLogger(new ConsoleLogger()) .Build(); var encoding = await CreateEncoding("fMP4 muxing with CENC DRM", "Example with CENC DRM content protection"); var input = await CreateHttpInput(_configProvider.GetHttpInputHost()); var output = await CreateS3Output(_configProvider.GetS3OutputBucketName(), _configProvider.GetS3OutputAccessKey(), _configProvider.GetS3OutputSecretKey()); var inputFilePath = _configProvider.GetHttpInputFilePath(); // Add an H.264 video stream to the encoding var h264VideoConfig = await CreateH264VideoConfiguration(); var h264VideoStream = await CreateStream(encoding, input, inputFilePath, h264VideoConfig); // Add an AAC audio stream to the encoding var aacConfig = await CreateAacAudioConfiguration(); var aacAudioStream = await CreateStream(encoding, input, inputFilePath, aacConfig); var videoMuxing = await CreateFmp4Muxing(encoding, h264VideoStream); var audioMuxing = await CreateFmp4Muxing(encoding, aacAudioStream); await CreateDrmConfig(encoding, videoMuxing, output, "video"); await CreateDrmConfig(encoding, audioMuxing, output, "audio"); var dashManifest = await CreateDefaultDashManifest(encoding, output, "/"); var hlsManifest = await CreateDefaultHlsManifest(encoding, output, "/"); var startEncodingRequest = new StartEncodingRequest() { ManifestGenerator = ManifestGenerator.V2, VodDashManifests = new List <ManifestResource>() { BuildManifestResource(dashManifest) }, VodHlsManifests = new List <ManifestResource>() { BuildManifestResource(hlsManifest) } }; await ExecuteEncoding(encoding, startEncodingRequest); }
public async Task RunExample(string[] args) { _configProvider = new ConfigProvider(args); _bitmovinApi = BitmovinApi.Builder .WithApiKey(_configProvider.GetBitmovinApiKey()) // uncomment the following line if you are working with a multi-tenant account // .WithTenantOrgIdKey(_configProvider.GetBitmovinTenantOrgId()) .WithLogger(new ConsoleLogger()) .Build(); var encoding = await CreateEncoding("Audio Mapping - Simple Handling - Distinct Input Files", "Separate inputs for video, stereo and surround tracks -> Output with 2 audio tracks"); var input = await CreateHttpInput(_configProvider.GetHttpInputHost()); var output = await CreateS3Output(_configProvider.GetS3OutputBucketName(), _configProvider.GetS3OutputAccessKey(), _configProvider.GetS3OutputSecretKey()); var videoInputFilePath = _configProvider.GetHttpInputFilePath(); var stereoInputFilePath = _configProvider.GetHttpInputFilePathWithStereoSound(); var surroundInputFilePath = _configProvider.GetHttpInputFilePathWithSurroundSound(); // Create an H264 video configuration. var h264VideoConfig = await CreateH264VideoConfiguration(); // Create an AAC audio configuration. var aacAudioConfig = await CreateAacAudioConfiguration(); // Create a Dolby Digital audio configuration. var ddConfig = createDolbyDigitalSurroundAudioConfig(); // Add video and audio ingest input streams. var videoIngestInputStream = await CreateIngestInputStream(encoding, input, videoInputFilePath); var stereoIngestInputStream = await CreateIngestInputStream(encoding, input, stereoInputFilePath); var surroundIngestInputStream = await CreateIngestInputStream(encoding, input, surroundInputFilePath); // Create streams and add them to the encoding. var videoStream = await CreateStream(encoding, videoIngestInputStream, h264VideoConfig); var audioStream1 = await CreateStream(encoding, stereoIngestInputStream, aacAudioConfig); var audioStream2 = await CreateStream(encoding, surroundIngestInputStream, aacAudioConfig); var streams = new List <Stream>(); streams.Add(videoStream); streams.Add(audioStream1); streams.Add(audioStream2); await CreateMp4Muxing(encoding, output, "/", streams, "stereo-and-surround-tracks.mp4"); await ExecuteEncoding(encoding); }
public void GetEncodings() { var bitmovin = new BitmovinApi(API_KEY); var runningEncodings = bitmovin.Encoding.Encoding.RetrieveListWithStatus(Status.RUNNING); foreach (var runningEncoding in runningEncodings) { Console.WriteLine("Encoding ID: {0}", runningEncoding.Id); Console.WriteLine("Encoding Name: {0}", runningEncoding.Name); Console.WriteLine(""); } }
public async Task RunExample(string[] args) { _configProvider = new ConfigProvider(args); _bitmovinApi = BitmovinApi.Builder .WithApiKey(_configProvider.GetBitmovinApiKey()) // uncomment the following line if you are working with a multi-tenant account // .WithTenantOrgIdKey(_configProvider.GetBitmovinTenantOrgId()) .WithLogger(new ConsoleLogger()) .Build(); var encoding = await CreateEncoding("Filter example", "Encoding with multiple filters applied to the video stream"); var input = await CreateHttpInput(_configProvider.GetHttpInputHost()); var output = await CreateS3Output(_configProvider.GetS3OutputBucketName(), _configProvider.GetS3OutputAccessKey(), _configProvider.GetS3OutputSecretKey()); var inputFilePath = _configProvider.GetHttpInputFilePath(); // Add an H.264 video stream to the encoding var h264VideoConfig = await CreateH264VideoConfiguration(); var h264VideoStream = await CreateStream(encoding, input, inputFilePath, h264VideoConfig); // Add an AAC audio stream to the encoding var aacConfig = await CreateAacAudioConfiguration(); var aacAudioStream = await CreateStream(encoding, input, inputFilePath, aacConfig); var filters = new List <Filter>() { await CreateWatermarkFilter(), await CreateTextFilter(), await CreateDeinterlaceFilter() }; await CreateStreamFilterList(encoding, h264VideoStream, filters); // Create an MP4 muxing with the H.264 and AAC streams await CreateMp4Muxing(encoding, output, "/", new List <Stream>() { h264VideoStream, aacAudioStream }, "filter_applied.mp4"); await ExecuteEncoding(encoding); }
public async Task RunExample(string[] args) { _configProvider = new ConfigProvider(args); _bitmovinApi = BitmovinApi.Builder .WithApiKey(_configProvider.GetBitmovinApiKey()) // uncomment the following line if you are working with a multi-tenant account // .WithTenantOrgIdKey(_configProvider.GetBitmovinTenantOrgId()) .WithLogger(new ConsoleLogger()) .Build(); var encoding = await CreateEncoding("Multi-language BroadcastTS", "BroadcastTS muxing example with multiple audio streams"); var input = await CreateHttpInput(_configProvider.GetHttpInputHost()); var output = await CreateS3Output(_configProvider.GetS3OutputBucketName(), _configProvider.GetS3OutputAccessKey(), _configProvider.GetS3OutputSecretKey()); var inputFilePath = _configProvider.GetHttpInputFilePath(); // Add an H.264 video stream to the encoding var h264VideoConfig = await CreateH264VideoConfiguration(); var h264VideoStream = await CreateStream(encoding, input, inputFilePath, h264VideoConfig, StreamSelectionMode.VIDEO_RELATIVE, 0); var mp2Config = await CreateMp2AudioConfiguration(); var audioStreams = new Dictionary <string, Stream>() { { "eng", await CreateStream(encoding, input, inputFilePath, mp2Config, StreamSelectionMode.AUDIO_RELATIVE, 0) }, { "deu", await CreateStream(encoding, input, inputFilePath, mp2Config, StreamSelectionMode.AUDIO_RELATIVE, 1) } }; await CreateBroadcastTsMuxing(encoding, h264VideoStream, audioStreams, output, "/"); await ExecuteEncoding(encoding); }
public async Task RunExample(string[] args) { _configProvider = new ConfigProvider(args); _bitmovinApi = BitmovinApi.Builder .WithApiKey(_configProvider.GetBitmovinApiKey()) // uncomment the following line if you are working with a multi-tenant account // .WithTenantOrgIdKey(_configProvider.GetBitmovinTenantOrgId()) .WithLogger(new ConsoleLogger()) .Build(); var encoding = await CreateEncoding("Multiple MP4 muxings", "Encoding with multiple MP4 muxings"); var input = await CreateHttpInput(_configProvider.GetHttpInputHost()); var output = await CreateS3Output(_configProvider.GetS3OutputBucketName(), _configProvider.GetS3OutputAccessKey(), _configProvider.GetS3OutputSecretKey()); var inputFilePath = _configProvider.GetHttpInputFilePath(); var h264VideoConfigurations = new List <H264VideoConfiguration> { await CreateH264VideoConfiguration(1080, 4_800_000L), await CreateH264VideoConfiguration(720, 2_400_000L), await CreateH264VideoConfiguration(480, 1_200_000L), await CreateH264VideoConfiguration(360, 800_000L), await CreateH264VideoConfiguration(240, 400_000L), }; // Add an AAC audio stream to the encoding var aacConfig = await CreateAacAudioConfiguration(); var aacAudioStream = await CreateStream(encoding, input, inputFilePath, aacConfig); // Create a video stream and a progressive MP4 muxing per video codec configuration foreach (var videoConfiguration in h264VideoConfigurations) { var videoStream = await CreateStream(encoding, input, inputFilePath, videoConfiguration); await CreateMp4Muxing(encoding, output, videoConfiguration.Height.ToString(), new List <Stream>() { videoStream, aacAudioStream }, "video_h264.mp4"); } await ExecuteEncoding(encoding); }
public async Task RunExample(string[] args) { _configProvider = new ConfigProvider(args); _bitmovinApi = BitmovinApi.Builder .WithApiKey(_configProvider.GetBitmovinApiKey()) .WithLogger(new ConsoleLogger()) .Build(); var encoding = await CreateEncoding("fMP4 muxing with CENC DRM", "Example with CENC DRM content protection"); var input = await CreateHttpInput(_configProvider.GetHttpInputHost()); var output = await CreateS3Output(_configProvider.GetS3OutputBucketName(), _configProvider.GetS3OutputAccessKey(), _configProvider.GetS3OutputSecretKey()); var inputFilePath = _configProvider.GetHttpInputFilePath(); // Add an H.264 video stream to the encoding var h264VideoConfig = await CreateH264VideoConfiguration(); var h264VideoStream = await CreateStream(encoding, input, inputFilePath, h264VideoConfig); // Add an AAC audio stream to the encoding var aacConfig = await CreateAacAudioConfiguration(); var aacAudioStream = await CreateStream(encoding, input, inputFilePath, aacConfig); var videoMuxing = await CreateFmp4Muxing(encoding, h264VideoStream); var audioMuxing = await CreateFmp4Muxing(encoding, aacAudioStream); await CreateDrmConfig(encoding, videoMuxing, output, "video"); await CreateDrmConfig(encoding, audioMuxing, output, "audio"); await ExecuteEncoding(encoding); await GenerateDashManifest(encoding, output, "/"); await GenerateHlsManifest(encoding, output, "/"); }
public async Task RunExample(string[] args) { _configProvider = new ConfigProvider(args); _bitmovinApi = BitmovinApi.Builder .WithApiKey(_configProvider.GetBitmovinApiKey()) // uncomment the following line if you are working with a multi-tenant account // .WithTenantOrgIdKey(_configProvider.GetBitmovinTenantOrgId()) .WithLogger(new ConsoleLogger()) .Build(); var encoding = await CreateEncoding("Encoding with default manifests", "Encoding with HLS and DASH default manifests"); var input = await CreateHttpInput(_configProvider.GetHttpInputHost()); var output = await CreateS3Output(_configProvider.GetS3OutputBucketName(), _configProvider.GetS3OutputAccessKey(), _configProvider.GetS3OutputSecretKey()); var inputFilePath = _configProvider.GetHttpInputFilePath(); // Add an H.264 video stream to the encoding var h264VideoConfig = await CreateH264VideoConfiguration(); var h264VideoStream = await CreateStream(encoding, input, inputFilePath, h264VideoConfig); // Add an AAC audio stream to the encoding var aacConfig = await CreateAacAudioConfiguration(); var aacAudioStream = await CreateStream(encoding, input, inputFilePath, aacConfig); await CreateFmp4Muxing(encoding, output, "video", h264VideoStream); await CreateFmp4Muxing(encoding, output, "audio", aacAudioStream); await ExecuteEncoding(encoding); await GenerateDashManifest(encoding, output, "/"); await GenerateHlsManifest(encoding, output, "/"); }
public async Task RunExample(string[] args) { _configProvider = new ConfigProvider(args); _bitmovinApi = BitmovinApi.Builder .WithApiKey(_configProvider.GetBitmovinApiKey()) // uncomment the following line if you are working with a multi-tenant account // .WithTenantOrgIdKey(_configProvider.GetBitmovinTenantOrgId()) .WithLogger(new ConsoleLogger()) .Build(); var encoding = await CreateEncoding("Per-Title encoding", "Per-Title encoding with HLS and DASH manifest"); var input = await CreateHttpInput(_configProvider.GetHttpInputHost()); var output = await CreateS3Output(_configProvider.GetS3OutputBucketName(), _configProvider.GetS3OutputAccessKey(), _configProvider.GetS3OutputSecretKey()); var inputFilePath = _configProvider.GetHttpInputFilePath(); // Add an H.264 video stream to the encoding var h264VideoConfig = await CreateH264VideoConfiguration(); var h264VideoStream = await CreateStream(encoding, input, inputFilePath, h264VideoConfig, StreamMode.PER_TITLE_TEMPLATE); // Add an AAC audio stream to the encoding var aacConfig = await CreateAacAudioConfiguration(); var aacAudioStream = await CreateStream(encoding, input, inputFilePath, aacConfig, StreamMode.STANDARD); await CreateFmp4Muxing(encoding, output, "video/{height}/{bitrate}_{uuid}", h264VideoStream); await CreateFmp4Muxing(encoding, output, $"/audio/{aacConfig.Bitrate / 1000}kbs", aacAudioStream); await ExecuteEncoding(encoding); await GenerateDashManifest(encoding, output, "/"); await GenerateHlsManifest(encoding, output, "/"); }
public async Task RunExample(string[] args) { _configProvider = new ConfigProvider(args); _bitmovinApi = BitmovinApi.Builder .WithApiKey(_configProvider.GetBitmovinApiKey()) // uncomment the following line if you are working with a multi-tenant account // .WithTenantOrgIdKey(_configProvider.GetBitmovinTenantOrgId()) .WithLogger(new ConsoleLogger()) .Build(); var encoding = await CreateEncoding("Encoding with SSAI", "Encoding Example - SSAI conditioned HLS streams"); var input = await CreateHttpInput(_configProvider.GetHttpInputHost()); var inputFilePath = _configProvider.GetHttpInputFilePath(); var output = await CreateS3Output(_configProvider.GetS3OutputBucketName(), _configProvider.GetS3OutputAccessKey(), _configProvider.GetS3OutputSecretKey()); var videoConfigurations = new List <H264VideoConfiguration>() { await CreateH264VideoConfiguration(1080, 4_800_000), await CreateH264VideoConfiguration(720, 2_400_000), await CreateH264VideoConfiguration(480, 1_200_000), await CreateH264VideoConfiguration(360, 800_000), await CreateH264VideoConfiguration(240, 400_000) }; // create a stream and fMP4 muxing for each video codec configuration var videoMuxings = new Dictionary <VideoConfiguration, Fmp4Muxing>(); foreach (var videoConfig in videoConfigurations) { var videoStream = await CreateStream(encoding, input, inputFilePath, videoConfig); var muxing = await CreateFmp4Muxing(encoding, output, $"video/{videoConfig.Height}", videoStream); videoMuxings[videoConfig] = muxing; } // create a stream and fMP4 muxing for audio var aacConfig = await CreateAacAudioConfiguration(); var aacAudioStream = await CreateStream(encoding, input, inputFilePath, aacConfig); var aacAudioMuxing = await CreateFmp4Muxing(encoding, output, "audio", aacAudioStream); // seconds in which to add a custom HLS tag for ad placement, as well as when to insert a // keyframe/split a segment var adBreakPlacements = new List <double>() { 5.0, 15.0 }; // define keyframes that are used to insert advertisement tags into the manifest var keyframes = await CreateKeyframes(encoding, adBreakPlacements); await ExecuteEncoding(encoding); // create the master manifest that references audio and video playlists var manifestHls = await CreateHlsMasterManifest(output, "/"); // create an audio playlist and provide it with custom tags for ad-placement var audioMediaInfo = await CreateAudioMediaPlaylist(encoding, manifestHls, aacAudioMuxing, "audio/"); await PlaceAudioAdvertisementTags(manifestHls, audioMediaInfo, keyframes); // create a video playlist for each video muxing and provide it with custom tags for ad-placement foreach (var key in videoMuxings.Keys) { var streamInfo = await CreateVideoStreamPlaylist( encoding, manifestHls, $"video_${key.Height}.m3u8", videoMuxings[key], $"video/${key.Height}", audioMediaInfo ); await PlaceVideoAdvertisementTags(manifestHls, streamInfo, keyframes); } await ExecuteHlsManifestCreation(manifestHls); }
public async Task RunExample(string[] args) { _configProvider = new ConfigProvider(args); _bitmovinApi = BitmovinApi.Builder .WithApiKey(_configProvider.GetBitmovinApiKey()) // uncomment the following line if you are working with a multi-tenant account // .WithTenantOrgIdKey(_configProvider.GetBitmovinTenantOrgId()) .WithLogger(new ConsoleLogger()) .Build(); var encoding = await CreateEncoding(ClassName, "Encoding with a concatenation in MP4 muxing"); var httpInput = await CreateHttpInput(_configProvider.GetHttpInputHost()); var mainFilePath = _configProvider.GetHttpInputFilePath(); var bumperFilePath = _configProvider.GetHttpInputBumperFilePath(); var promoFilePath = _configProvider.GetHttpInputPromoFilePath(); var output = await CreateS3Output(_configProvider.GetS3OutputBucketName(), _configProvider.GetS3OutputAccessKey(), _configProvider.GetS3OutputSecretKey()); // Define a video and audio stream as an IngestInputStream to represent each input file (main, bumper, and promo) var main = await CreateIngestInputStream(encoding, httpInput, mainFilePath, StreamSelectionMode.AUTO); var bumper = await CreateIngestInputStream(encoding, httpInput, bumperFilePath, StreamSelectionMode.AUTO); var promo = await CreateIngestInputStream(encoding, httpInput, promoFilePath, StreamSelectionMode.AUTO); // In this example, we trim the main input file and create two separated streams as TimeBasedTrimmingInputStream var mainPart1 = await CreateTimeBasedTrimmingInputStream(encoding, main, 10.0, 90.0); var mainPart2 = await CreateTimeBasedTrimmingInputStream(encoding, main, 100.0, 60.0); // Define each concatenation input configuration with "isMain" flag and "position" setting // And create a concatenation input stream for the main part 1 and 2 together with bumper and promo var bumperConfig = new ConcatenationInputConfiguration() { InputStreamId = mainPart1.Id, IsMain = false, Position = 0 }; var part1Config = new ConcatenationInputConfiguration() { InputStreamId = mainPart1.Id, IsMain = true, Position = 1 }; var promo1Config = new ConcatenationInputConfiguration() { InputStreamId = promo.Id, IsMain = false, Position = 2 }; var part2Config = new ConcatenationInputConfiguration() { InputStreamId = mainPart2.Id, IsMain = false, Position = 3 }; var promo2Config = new ConcatenationInputConfiguration() { InputStreamId = promo.Id, IsMain = false, Position = 4 }; var concatenationInputConfigurations = new List <ConcatenationInputConfiguration>() { bumperConfig, part1Config, promo1Config, part2Config, promo2Config }; var allTogether = await CreateConcatenationInputStream(encoding, concatenationInputConfigurations); // Create an audio codec configuration and the stream // In this sample, we use AAc with 128kbps as a pre-defined audio codec var aacAudioConfiguration = await CreateAacAudioConfiguration(); var aacAudioStream = await CreateStreamWithConcatenationInputStream(encoding, allTogether, aacAudioConfiguration); // Create a video codec configuration and the stream var videoConfiguration = await CreateH264VideoConfiguration(1080, 4800000); var videoStream = await CreateStreamWithConcatenationInputStream(encoding, allTogether, videoConfiguration); await CreateMp4Muxing(encoding, output, "/MultipleInputsConcatenation", new List <Stream>(){ videoStream, aacAudioStream }, "MultipleInputsConcatenationMp4"); await ExecuteEncoding(encoding); }
public async Task RunExample(string[] args) { _configProvider = new ConfigProvider(args); _bitmovinApi = BitmovinApi.Builder .WithApiKey(_configProvider.GetBitmovinApiKey()) // uncomment the following line if you are working with a multi-tenant account // .WithTenantOrgIdKey(_configProvider.GetBitmovinTenantOrgId()) .WithLogger(new ConsoleLogger()) .Build(); var encoding = await CreateEncoding("Audio Mapping - Channel Mixing - Swapping Channels", "Input with stereo track -> Output with swapped stereo tracks"); var input = await CreateHttpInput(_configProvider.GetHttpInputHost()); var output = await CreateS3Output(_configProvider.GetS3OutputBucketName(), _configProvider.GetS3OutputAccessKey(), _configProvider.GetS3OutputSecretKey()); var inputFilePath = _configProvider.GetHttpInputFilePathWithStereoSound(); // Create an H264 video configuration. var h264VideoConfig = await CreateH264VideoConfiguration(); // Create an AAC audio configuration. var aacAudioConfig = await CreateAacAudioConfiguration(); // Add video and audio ingest input streams. var videoIngestInputStream = await CreateIngestInputStream(encoding, input, inputFilePath); var audioIngestInputStream = await CreateIngestInputStream(encoding, input, inputFilePath); var audioMixInputStream = new AudioMixInputStream() { Name = "Swapping channels 0 and 1", ChannelLayout = AudioMixInputChannelLayout.CL_STEREO }; var sourceChannel0 = new AudioMixInputStreamSourceChannel() { Type = AudioMixSourceChannelType.CHANNEL_NUMBER, ChannelNumber = 0 }; var sourceChannel1 = new AudioMixInputStreamSourceChannel() { Type = AudioMixSourceChannelType.CHANNEL_NUMBER, ChannelNumber = 1 }; var outputChannel0 = new AudioMixInputStreamChannel() { InputStreamId = audioIngestInputStream.Id, OutputChannelType = AudioMixChannelType.CHANNEL_NUMBER, OutputChannelNumber = 0, SourceChannels = new List <AudioMixInputStreamSourceChannel> { sourceChannel1 } }; var outputChannel1 = new AudioMixInputStreamChannel() { InputStreamId = audioIngestInputStream.Id, OutputChannelType = AudioMixChannelType.CHANNEL_NUMBER, OutputChannelNumber = 1, SourceChannels = new List <AudioMixInputStreamSourceChannel> { sourceChannel0 } }; var audioMixChannels = new List <AudioMixInputStreamChannel>() { outputChannel0, outputChannel1 }; audioMixInputStream = await CreateAudioMixInputStream(encoding, audioMixInputStream, audioMixChannels); // Create streams and add them to the encoding. var videoStream = await CreateStream(encoding, videoIngestInputStream, h264VideoConfig); var audioStream = await CreateStream(encoding, audioMixInputStream, aacAudioConfig); var streams = new List <Stream>(); streams.Add(videoStream); streams.Add(audioStream); await CreateMp4Muxing(encoding, output, "/", streams, "stereo-track-swapped.mp4"); await ExecuteEncoding(encoding); }
public void StartLiveEncoding() { var bitmovin = new BitmovinApi(API_KEY); double?segmentLength = 4.0; var output = bitmovin.Output.Gcs.Create(new GcsOutput { Name = "GCS Ouput", AccessKey = GCS_ACCESS_KEY, SecretKey = GCS_SECRET_KEY, BucketName = GCS_BUCKET_NAME }); var encoding = bitmovin.Encoding.Encoding.Create(new Encoding.Encoding { Name = "Live Stream C#", CloudRegion = EncodingCloudRegion.GOOGLE_EUROPE_WEST_1, EncoderVersion = "STABLE" }); var rtmpInput = bitmovin.Input.Rtmp.RetrieveList(0, 100)[0]; var videoConfig1080p = bitmovin.Codec.H264.Create(new H264VideoConfiguration { Name = "H264_Profile_1080p", Profile = H264Profile.HIGH, Width = 1920, Height = 1080, Bitrate = 4800000, Rate = 30.0f }); var videoStream1080p = bitmovin.Encoding.Encoding.Stream.Create(encoding.Id, CreateStream(rtmpInput, "live", 0, videoConfig1080p, SelectionMode.AUTO)); var videoConfig720p = bitmovin.Codec.H264.Create(new H264VideoConfiguration { Name = "H264_Profile_720p", Profile = H264Profile.HIGH, Width = 1280, Height = 720, Bitrate = 2400000, Rate = 30.0f }); var videoStream720p = bitmovin.Encoding.Encoding.Stream.Create(encoding.Id, CreateStream(rtmpInput, "live", 0, videoConfig720p, SelectionMode.AUTO)); var videoConfig480p = bitmovin.Codec.H264.Create(new H264VideoConfiguration { Name = "H264_Profile_480p", Profile = H264Profile.HIGH, Width = 858, Height = 480, Bitrate = 1200000, Rate = 30.0f }); var videoStream480p = bitmovin.Encoding.Encoding.Stream.Create(encoding.Id, CreateStream(rtmpInput, "live", 0, videoConfig480p, SelectionMode.AUTO)); var videoConfig360p = bitmovin.Codec.H264.Create(new H264VideoConfiguration { Name = "H264_Profile_360p", Profile = H264Profile.HIGH, Width = 640, Height = 360, Bitrate = 800000, Rate = 30.0f }); var videoStream360p = bitmovin.Encoding.Encoding.Stream.Create(encoding.Id, CreateStream(rtmpInput, "live", 0, videoConfig360p, SelectionMode.AUTO)); var videoConfig240p = bitmovin.Codec.H264.Create(new H264VideoConfiguration { Name = "H264_Profile_240p", Profile = H264Profile.HIGH, Width = 426, Height = 240, Bitrate = 400000, Rate = 30.0f }); var videoStream240p = bitmovin.Encoding.Encoding.Stream.Create(encoding.Id, CreateStream(rtmpInput, "live", 0, videoConfig240p, SelectionMode.AUTO)); var audioConfig = bitmovin.Codec.Aac.Create(new AACAudioConfiguration { Name = "AAC_Profile_128k", Bitrate = 128000, Rate = 48000 }); var audioStream = bitmovin.Encoding.Encoding.Stream.Create(encoding.Id, CreateStream(rtmpInput, "/", 1, audioConfig, SelectionMode.AUTO)); var videoFMP4Muxing240p = bitmovin.Encoding.Encoding.Fmp4.Create(encoding.Id, CreateFMP4Muxing(videoStream240p, output, OUTPUT_PATH + "video/240p", segmentLength)); var videoFMP4Muxing360p = bitmovin.Encoding.Encoding.Fmp4.Create(encoding.Id, CreateFMP4Muxing(videoStream360p, output, OUTPUT_PATH + "video/360p", segmentLength)); var videoFMP4Muxing480p = bitmovin.Encoding.Encoding.Fmp4.Create(encoding.Id, CreateFMP4Muxing(videoStream480p, output, OUTPUT_PATH + "video/480p", segmentLength)); var videoFMP4Muxing720p = bitmovin.Encoding.Encoding.Fmp4.Create(encoding.Id, CreateFMP4Muxing(videoStream720p, output, OUTPUT_PATH + "video/720p", segmentLength)); var videoFMP4Muxing1080p = bitmovin.Encoding.Encoding.Fmp4.Create(encoding.Id, CreateFMP4Muxing(videoStream1080p, output, OUTPUT_PATH + "video/1080p", segmentLength)); var audioFMP4Muxing = bitmovin.Encoding.Encoding.Fmp4.Create(encoding.Id, CreateFMP4Muxing(audioStream, output, OUTPUT_PATH + "audio/128kbps", segmentLength)); var manifestOutput = new Encoding.Output { OutputPath = OUTPUT_PATH, OutputId = output.Id, Acl = new List <Acl> { new Acl { Permission = Permission.PUBLIC_READ } } }; var manifest = bitmovin.Manifest.Dash.Create(new Dash { Name = "MPEG-DASH Manifest", ManifestName = "stream.mpd", Outputs = new List <Encoding.Output> { manifestOutput } }); var period = bitmovin.Manifest.Dash.Period.Create(manifest.Id, new Period()); var videoAdaptationSet = bitmovin.Manifest.Dash.VideoAdaptationSet.Create(manifest.Id, period.Id, new VideoAdaptationSet()); var audioAdaptationSet = bitmovin.Manifest.Dash.AudioAdaptationSet.Create(manifest.Id, period.Id, new AudioAdaptationSet { Lang = "en" }); bitmovin.Manifest.Dash.Fmp4.Create(manifest.Id, period.Id, audioAdaptationSet.Id, new Manifest.Fmp4 { Type = SegmentScheme.TEMPLATE, EncodingId = encoding.Id, MuxingId = audioFMP4Muxing.Id, SegmentPath = "audio/128kbps" }); bitmovin.Manifest.Dash.Fmp4.Create(manifest.Id, period.Id, videoAdaptationSet.Id, new Manifest.Fmp4 { Type = SegmentScheme.TEMPLATE, EncodingId = encoding.Id, MuxingId = videoFMP4Muxing240p.Id, SegmentPath = "video/240p" }); bitmovin.Manifest.Dash.Fmp4.Create(manifest.Id, period.Id, videoAdaptationSet.Id, new Manifest.Fmp4 { Type = SegmentScheme.TEMPLATE, EncodingId = encoding.Id, MuxingId = videoFMP4Muxing360p.Id, SegmentPath = "video/360p" }); bitmovin.Manifest.Dash.Fmp4.Create(manifest.Id, period.Id, videoAdaptationSet.Id, new Manifest.Fmp4 { Type = SegmentScheme.TEMPLATE, EncodingId = encoding.Id, MuxingId = videoFMP4Muxing480p.Id, SegmentPath = "video/480p" }); bitmovin.Manifest.Dash.Fmp4.Create(manifest.Id, period.Id, videoAdaptationSet.Id, new Manifest.Fmp4 { Type = SegmentScheme.TEMPLATE, EncodingId = encoding.Id, MuxingId = videoFMP4Muxing720p.Id, SegmentPath = "video/720p" }); bitmovin.Manifest.Dash.Fmp4.Create(manifest.Id, period.Id, videoAdaptationSet.Id, new Manifest.Fmp4 { Type = SegmentScheme.TEMPLATE, EncodingId = encoding.Id, MuxingId = videoFMP4Muxing1080p.Id, SegmentPath = "video/1080p" }); bitmovin.Encoding.Encoding.StartLive(encoding.Id, new StartLiveEncodingRequest { StreamKey = "YourStreamKey", DashManifests = new List <LiveDashManifest> { new LiveDashManifest { ManifestId = manifest.Id, Timeshift = 300, LiveEdgeOffset = 90 } } }); LiveEncoding liveEncoding = null; while (liveEncoding == null) { try { liveEncoding = bitmovin.Encoding.Encoding.RetrieveLiveDetails(encoding.Id); } catch (System.Exception) { Thread.Sleep(5000); } } Console.WriteLine("Live stream started"); Console.WriteLine("Encoding ID: {0}", encoding.Id); Console.WriteLine("IP: {0}", liveEncoding.EncoderIp); Console.WriteLine("Rtmp URL: rtmp://{0}/live", liveEncoding.EncoderIp); Console.WriteLine("Stream Key: {0}", liveEncoding.StreamKey); }
public void StopLiveEncoding() { var bitmovin = new BitmovinApi(API_KEY); bitmovin.Encoding.Encoding.StopLive(ENCODING_ID); }
public void StartVodEncoding() { var bitmovin = new BitmovinApi(API_KEY); double?segmentLength = 4.0; // Create Output var output = bitmovin.Output.Gcs.Create(new GcsOutput { Name = "GCS Ouput", AccessKey = GCS_ACCESS_KEY, SecretKey = GCS_SECRET_KEY, BucketName = GCS_BUCKET_NAME }); // Create encoding var encoding = bitmovin.Encoding.Encoding.Create(new Encoding.Encoding { Name = "VoD VP9 Encoding C#", CloudRegion = EncodingCloudRegion.GOOGLE_EUROPE_WEST_1, EncoderVersion = "STABLE" }); var httpHost = bitmovin.Input.Http.Create(new HttpInput { Name = "HTTP Input", Host = INPUT_HTTP_HOST }); // Create configurations and streams var videoConfig1080p = bitmovin.Codec.VP9.Create(new VP9VideoConfiguration { Name = "VP9_Profile_1080p", Width = 1920, Height = 1080, Bitrate = 4800000, Rate = 30.0f }); var videoStream1080p = bitmovin.Encoding.Encoding.Stream.Create(encoding.Id, CreateStream(httpHost, INPUT_HTTP_PATH, 0, videoConfig1080p, SelectionMode.VIDEO_RELATIVE)); var videoConfig720p = bitmovin.Codec.VP9.Create(new VP9VideoConfiguration { Name = "VP9_Profile_720p", Width = 1280, Height = 720, Bitrate = 2400000, Rate = 30.0f }); var videoStream720p = bitmovin.Encoding.Encoding.Stream.Create(encoding.Id, CreateStream(httpHost, INPUT_HTTP_PATH, 0, videoConfig720p, SelectionMode.VIDEO_RELATIVE)); var videoConfig480p = bitmovin.Codec.VP9.Create(new VP9VideoConfiguration { Name = "VP9_Profile_480p", Width = 858, Height = 480, Bitrate = 1200000, Rate = 30.0f }); var videoStream480p = bitmovin.Encoding.Encoding.Stream.Create(encoding.Id, CreateStream(httpHost, INPUT_HTTP_PATH, 0, videoConfig480p, SelectionMode.VIDEO_RELATIVE)); var videoConfig360p = bitmovin.Codec.VP9.Create(new VP9VideoConfiguration { Name = "VP9_Profile_360p", Width = 640, Height = 360, Bitrate = 800000, Rate = 30.0f }); var videoStream360p = bitmovin.Encoding.Encoding.Stream.Create(encoding.Id, CreateStream(httpHost, INPUT_HTTP_PATH, 0, videoConfig360p, SelectionMode.VIDEO_RELATIVE)); var videoConfig240p = bitmovin.Codec.VP9.Create(new VP9VideoConfiguration { Name = "VP9_Profile_240p", Width = 426, Height = 240, Bitrate = 400000, Rate = 30.0f }); var videoStream240p = bitmovin.Encoding.Encoding.Stream.Create(encoding.Id, CreateStream(httpHost, INPUT_HTTP_PATH, 0, videoConfig240p, SelectionMode.VIDEO_RELATIVE)); var audioConfig = bitmovin.Codec.Aac.Create(new AACAudioConfiguration { Name = "AAC_Profile_128k", Bitrate = 128000, Rate = 48000 }); var audioStream = bitmovin.Encoding.Encoding.Stream.Create(encoding.Id, CreateStream(httpHost, INPUT_HTTP_PATH, 0, audioConfig, SelectionMode.AUDIO_RELATIVE)); // Create Muxing for DASH var videoWebmMuxing240p = bitmovin.Encoding.Encoding.SegmentedWebm.Create(encoding.Id, CreateSegmentedWebmMuxing(videoStream240p, output, OUTPUT_PATH + "video/240p", segmentLength)); var videoWebmMuxing360p = bitmovin.Encoding.Encoding.SegmentedWebm.Create(encoding.Id, CreateSegmentedWebmMuxing(videoStream360p, output, OUTPUT_PATH + "video/360p", segmentLength)); var videoWebmMuxing480p = bitmovin.Encoding.Encoding.SegmentedWebm.Create(encoding.Id, CreateSegmentedWebmMuxing(videoStream480p, output, OUTPUT_PATH + "video/480p", segmentLength)); var videoWebmMuxing720p = bitmovin.Encoding.Encoding.SegmentedWebm.Create(encoding.Id, CreateSegmentedWebmMuxing(videoStream720p, output, OUTPUT_PATH + "video/720p", segmentLength)); var videoWebmMuxing1080p = bitmovin.Encoding.Encoding.SegmentedWebm.Create(encoding.Id, CreateSegmentedWebmMuxing(videoStream1080p, output, OUTPUT_PATH + "video/1080p", segmentLength)); var audioFMP4Muxing = bitmovin.Encoding.Encoding.Fmp4.Create(encoding.Id, CreateFMP4Muxing(audioStream, output, OUTPUT_PATH + "audio/128kbps", segmentLength)); // Start encoding bitmovin.Encoding.Encoding.Start(encoding.Id); var encodingTask = bitmovin.Encoding.Encoding.RetrieveStatus(encoding.Id); while (encodingTask.Status != Status.ERROR && encodingTask.Status != Status.FINISHED) { // Wait for the encoding to finish encodingTask = bitmovin.Encoding.Encoding.RetrieveStatus(encoding.Id); Thread.Sleep(2500); } if (encodingTask.Status != Status.FINISHED) { Console.WriteLine("Encoding could not be finished successfully."); return; } // Create manifest output var manifestOutput = new Encoding.Output { OutputPath = OUTPUT_PATH, OutputId = output.Id, Acl = new List <Acl> { new Acl { Permission = Permission.PUBLIC_READ } } }; // Create DASH Manifest var manifestDash = bitmovin.Manifest.Dash.Create(new Dash { Name = "MPEG-DASH Manifest", ManifestName = "stream.mpd", Outputs = new List <Encoding.Output> { manifestOutput } }); var period = bitmovin.Manifest.Dash.Period.Create(manifestDash.Id, new Period()); var videoAdaptationSet = bitmovin.Manifest.Dash.VideoAdaptationSet.Create(manifestDash.Id, period.Id, new VideoAdaptationSet()); var audioAdaptationSet = bitmovin.Manifest.Dash.AudioAdaptationSet.Create(manifestDash.Id, period.Id, new AudioAdaptationSet { Lang = "en" }); bitmovin.Manifest.Dash.Fmp4.Create(manifestDash.Id, period.Id, audioAdaptationSet.Id, new Manifest.Fmp4 { Type = SegmentScheme.TEMPLATE, EncodingId = encoding.Id, MuxingId = audioFMP4Muxing.Id, SegmentPath = "audio/128kbps" }); bitmovin.Manifest.Dash.Webm.Create(manifestDash.Id, period.Id, videoAdaptationSet.Id, new Manifest.Webm { Type = SegmentScheme.TEMPLATE, EncodingId = encoding.Id, MuxingId = videoWebmMuxing240p.Id, SegmentPath = "video/240p" }); bitmovin.Manifest.Dash.Webm.Create(manifestDash.Id, period.Id, videoAdaptationSet.Id, new Manifest.Webm { Type = SegmentScheme.TEMPLATE, EncodingId = encoding.Id, MuxingId = videoWebmMuxing360p.Id, SegmentPath = "video/360p" }); bitmovin.Manifest.Dash.Webm.Create(manifestDash.Id, period.Id, videoAdaptationSet.Id, new Manifest.Webm { Type = SegmentScheme.TEMPLATE, EncodingId = encoding.Id, MuxingId = videoWebmMuxing480p.Id, SegmentPath = "video/480p" }); bitmovin.Manifest.Dash.Webm.Create(manifestDash.Id, period.Id, videoAdaptationSet.Id, new Manifest.Webm { Type = SegmentScheme.TEMPLATE, EncodingId = encoding.Id, MuxingId = videoWebmMuxing720p.Id, SegmentPath = "video/720p" }); bitmovin.Manifest.Dash.Webm.Create(manifestDash.Id, period.Id, videoAdaptationSet.Id, new Manifest.Webm { Type = SegmentScheme.TEMPLATE, EncodingId = encoding.Id, MuxingId = videoWebmMuxing1080p.Id, SegmentPath = "video/1080p" }); bitmovin.Manifest.Dash.Start(manifestDash.Id); var dashManifestStatus = bitmovin.Manifest.Dash.RetrieveStatus(manifestDash.Id); while (dashManifestStatus.Status != Status.ERROR && dashManifestStatus.Status != Status.FINISHED) { // Wait for the Dash Manifest creation to finish dashManifestStatus = bitmovin.Manifest.Dash.RetrieveStatus(manifestDash.Id); Thread.Sleep(2500); } if (dashManifestStatus.Status != Status.FINISHED) { Console.WriteLine("DASH Manifest could not be finished successfully."); return; } Console.WriteLine("Encoding finished successfully"); }
public async Task RunExample(string[] args) { _configProvider = new ConfigProvider(Environment.GetCommandLineArgs()); _bitmovinApi = BitmovinApi.Builder .WithApiKey(_configProvider.GetBitmovinApiKey()) .WithLogger(new ConsoleLogger()) .Build(); var input = await CreateHttpInput(_configProvider.GetHttpInputHost()); var output = await CreateS3Output(_configProvider.GetS3OutputBucketName(), _configProvider.GetS3OutputAccessKey(), _configProvider.GetS3OutputSecretKey()); var codecConfigurations = new List <CodecConfiguration>() { await CreateH264VideoConfiguration(480, 800_000L), await CreateH264VideoConfiguration(720, 1_200_000L), await CreateH264VideoConfiguration(1080, 2_000_000L), await CreateAacAudioConfiguration() }; var jobDispatcher = new JobDispatcher(); do { var queuedEncodings = await CountQueuedEncodings(); var freeSlots = TargetQueueSize - queuedEncodings; if (freeSlots > 0) { var jobsToStart = jobDispatcher.GetJobsToStart(freeSlots); if (jobsToStart.Count > 0) { Console.WriteLine($"There are currently {queuedEncodings} encodings queued. " + $"Starting {jobsToStart.Count} more to reach target queue size " + $"of {TargetQueueSize}"); await StartEncodings(jobsToStart, codecConfigurations, input, output); } else { Console.WriteLine("No more jobs to start. Waiting for " + $"{jobDispatcher.GetStartedJobs().Count} jobs to finish."); } } else { Console.WriteLine($"There are currently {queuedEncodings} encodings queued. " + "Waiting for free slots..."); } await Task.Delay(10000); foreach (var job in jobDispatcher.GetStartedJobs()) { await UpdateEncodingJob(job); await Task.Delay(300); } } while (!jobDispatcher.AllJobsFinished()); Console.WriteLine("All encodings jobs are finished!"); jobDispatcher.LogFailedJobs(); }
public void RunExample() { var bitmovin = new BitmovinApi(API_KEY); var output = bitmovin.Output.Gcs.Create(new GcsOutput { Name = "GCS Ouput", AccessKey = GCS_ACCESS_KEY, SecretKey = GCS_SECRET_KEY, BucketName = GCS_BUCKET_NAME }); var muxings = bitmovin.Encoding.Encoding.Ts.RetrieveList(ENCODING_ID, 0, 50); var audioMuxing = muxings.First(c => { var stream = bitmovin.Encoding.Encoding.Stream.Retrieve(ENCODING_ID, c.Streams.First().StreamId); return(bitmovin.Codec.Codec.IsAudioCodec(stream.CodecConfigId)); }); var videoMuxings = muxings.Where(c => { var stream = bitmovin.Encoding.Encoding.Stream.Retrieve(ENCODING_ID, c.Streams.First().StreamId); return(bitmovin.Codec.Codec.IsVideoCodec(stream.CodecConfigId)); }).ToList(); var manifest = bitmovin.Manifest.Hls.Create(new Hls { Name = MANIFEST_NAME, ManifestName = MANIFEST_NAME, Outputs = new List <Encoding.Output> { new Encoding.Output { OutputId = output.Id, OutputPath = OUTPUT_PATH } } }); var audioManifestName = String.Format("audio_{0}.m3u8", Guid.NewGuid()); var mediaInfo = new MediaInfo { GroupId = "audio", Name = audioManifestName, Uri = audioManifestName, Type = MediaType.AUDIO, SegmentPath = GetSegmentPath(audioMuxing.Outputs.First().OutputPath), StreamId = audioMuxing.Streams.First().StreamId, MuxingId = audioMuxing.Id, EncodingId = ENCODING_ID, StartSegmentNumber = START_SEGMENT, EndSegmentNumber = END_SEGMENT, Language = "en", AssocLanguage = "en", Autoselect = false, IsDefault = false, Forced = false }; bitmovin.Manifest.Hls.AddMediaInfo(manifest.Id, mediaInfo); foreach (var videoMuxing in videoMuxings) { bitmovin.Manifest.Hls.AddStreamInfo(manifest.Id, new StreamInfo { Uri = String.Format("video_{0}.m3u8", Guid.NewGuid()), EncodingId = ENCODING_ID, StreamId = videoMuxing.Streams.First().StreamId, MuxingId = videoMuxing.Id, StartSegmentNumber = START_SEGMENT, EndSegmentNumber = END_SEGMENT, Audio = "audio", SegmentPath = GetSegmentPath(videoMuxing.Outputs.First().OutputPath) }); } Console.WriteLine("Start creating hls manifest"); bitmovin.Manifest.Hls.Start(manifest.Id); Console.WriteLine("Manifest creation started"); var status = bitmovin.Manifest.Hls.RetrieveStatus(manifest.Id); while (status.Status == Status.RUNNING) { status = bitmovin.Manifest.Hls.RetrieveStatus(manifest.Id); Thread.Sleep(2500); } Console.WriteLine("Manifest created successfully"); }
public void StartVodEncoding() { var bitmovin = new BitmovinApi(API_KEY); double?segmentLength = 4.0; // Create Output var output = bitmovin.Output.Gcs.Create(new GcsOutput { Name = "GCS Ouput", AccessKey = GCS_ACCESS_KEY, SecretKey = GCS_SECRET_KEY, BucketName = GCS_BUCKET_NAME }); // Create encoding var encoding = bitmovin.Encoding.Encoding.Create(new Encoding.Encoding { Name = "VoD Encoding C#", CloudRegion = EncodingCloudRegion.GOOGLE_EUROPE_WEST_1, EncoderVersion = "STABLE" }); var httpHost = bitmovin.Input.Http.Create(new HttpInput { Name = "HTTP Input", Host = INPUT_HTTP_HOST }); // Create configurations and streams var videoConfig1080p = bitmovin.Codec.H264.Create(new H264VideoConfiguration { Name = "H264_Profile_1080p", Profile = H264Profile.HIGH, Width = 1920, Height = 1080, Bitrate = 4800000, Rate = 30.0f, BFrames = 3, Cabac = true, MvSearchRangeMax = 16, RefFrames = 2, MvPredictionMode = MvPredictionMode.SPATIAL, RcLookahead = 30, SubMe = H264SubMe.RD_IP, MotionEstimationMethod = H264MotionEstimationMethod.HEX, BAdaptiveStrategy = BAdapt.FAST, Partitions = new List <H264Partition> { H264Partition.I4X4, H264Partition.I8X8, H264Partition.P8X8, H264Partition.B8X8 } }); var videoStream1080p = bitmovin.Encoding.Encoding.Stream.Create(encoding.Id, CreateStream(httpHost, INPUT_HTTP_PATH, 0, videoConfig1080p, SelectionMode.VIDEO_RELATIVE)); var videoConfig720p = bitmovin.Codec.H264.Create(new H264VideoConfiguration { Name = "H264_Profile_720p", Profile = H264Profile.HIGH, Width = 1280, Height = 720, Bitrate = 2400000, Rate = 30.0f, BFrames = 3, Cabac = true, MvSearchRangeMax = 16, RefFrames = 2, MvPredictionMode = MvPredictionMode.SPATIAL, RcLookahead = 30, SubMe = H264SubMe.RD_IP, MotionEstimationMethod = H264MotionEstimationMethod.HEX, BAdaptiveStrategy = BAdapt.FAST, Partitions = new List <H264Partition> { H264Partition.I4X4, H264Partition.I8X8, H264Partition.P8X8, H264Partition.B8X8 } }); var videoStream720p = bitmovin.Encoding.Encoding.Stream.Create(encoding.Id, CreateStream(httpHost, INPUT_HTTP_PATH, 0, videoConfig720p, SelectionMode.VIDEO_RELATIVE)); var videoConfig480p = bitmovin.Codec.H264.Create(new H264VideoConfiguration { Name = "H264_Profile_480p", Profile = H264Profile.HIGH, Width = 858, Height = 480, Bitrate = 1200000, Rate = 30.0f, BFrames = 3, Cabac = true, MvSearchRangeMax = 16, RefFrames = 2, MvPredictionMode = MvPredictionMode.SPATIAL, RcLookahead = 30, SubMe = H264SubMe.RD_IP, MotionEstimationMethod = H264MotionEstimationMethod.HEX, BAdaptiveStrategy = BAdapt.FAST, Partitions = new List <H264Partition> { H264Partition.I4X4, H264Partition.I8X8, H264Partition.P8X8, H264Partition.B8X8 } }); var videoStream480p = bitmovin.Encoding.Encoding.Stream.Create(encoding.Id, CreateStream(httpHost, INPUT_HTTP_PATH, 0, videoConfig480p, SelectionMode.VIDEO_RELATIVE)); var videoConfig360p = bitmovin.Codec.H264.Create(new H264VideoConfiguration { Name = "H264_Profile_360p", Profile = H264Profile.HIGH, Width = 640, Height = 360, Bitrate = 800000, Rate = 30.0f, BFrames = 3, Cabac = true, MvSearchRangeMax = 16, RefFrames = 2, MvPredictionMode = MvPredictionMode.SPATIAL, RcLookahead = 30, SubMe = H264SubMe.RD_IP, MotionEstimationMethod = H264MotionEstimationMethod.HEX, BAdaptiveStrategy = BAdapt.FAST, Partitions = new List <H264Partition> { H264Partition.I4X4, H264Partition.I8X8, H264Partition.P8X8, H264Partition.B8X8 } }); var videoStream360p = bitmovin.Encoding.Encoding.Stream.Create(encoding.Id, CreateStream(httpHost, INPUT_HTTP_PATH, 0, videoConfig360p, SelectionMode.VIDEO_RELATIVE)); var videoConfig240p = bitmovin.Codec.H264.Create(new H264VideoConfiguration { Name = "H264_Profile_240p", Profile = H264Profile.HIGH, Width = 426, Height = 240, Bitrate = 400000, Rate = 30.0f, BFrames = 3, Cabac = true, MvSearchRangeMax = 16, RefFrames = 2, MvPredictionMode = MvPredictionMode.SPATIAL, RcLookahead = 30, SubMe = H264SubMe.RD_IP, MotionEstimationMethod = H264MotionEstimationMethod.HEX, BAdaptiveStrategy = BAdapt.FAST, Partitions = new List <H264Partition> { H264Partition.I4X4, H264Partition.I8X8, H264Partition.P8X8, H264Partition.B8X8 } }); var videoStream240p = bitmovin.Encoding.Encoding.Stream.Create(encoding.Id, CreateStream(httpHost, INPUT_HTTP_PATH, 0, videoConfig240p, SelectionMode.VIDEO_RELATIVE)); var audioConfig = bitmovin.Codec.Aac.Create(new AACAudioConfiguration { Name = "AAC_Profile_128k", Bitrate = 128000, Rate = 48000 }); var audioStream = bitmovin.Encoding.Encoding.Stream.Create(encoding.Id, CreateStream(httpHost, INPUT_HTTP_PATH, 0, audioConfig, SelectionMode.AUDIO_RELATIVE)); // Create FMP4 Muxing for DASH var videoFMP4Muxing240p = bitmovin.Encoding.Encoding.Fmp4.Create(encoding.Id, CreateFMP4Muxing(videoStream240p, output, OUTPUT_PATH + "video/240p_dash", segmentLength)); var videoFMP4Muxing360p = bitmovin.Encoding.Encoding.Fmp4.Create(encoding.Id, CreateFMP4Muxing(videoStream360p, output, OUTPUT_PATH + "video/360p_dash", segmentLength)); var videoFMP4Muxing480p = bitmovin.Encoding.Encoding.Fmp4.Create(encoding.Id, CreateFMP4Muxing(videoStream480p, output, OUTPUT_PATH + "video/480p_dash", segmentLength)); var videoFMP4Muxing720p = bitmovin.Encoding.Encoding.Fmp4.Create(encoding.Id, CreateFMP4Muxing(videoStream720p, output, OUTPUT_PATH + "video/720p_dash", segmentLength)); var videoFMP4Muxing1080p = bitmovin.Encoding.Encoding.Fmp4.Create(encoding.Id, CreateFMP4Muxing(videoStream1080p, output, OUTPUT_PATH + "video/1080p_dash", segmentLength)); var audioFMP4Muxing = bitmovin.Encoding.Encoding.Fmp4.Create(encoding.Id, CreateFMP4Muxing(audioStream, output, OUTPUT_PATH + "audio/128kbps_dash", segmentLength)); // Create TS Muxings for HLS var videoTsMuxing240p = bitmovin.Encoding.Encoding.Ts.Create(encoding.Id, CreateTsMuxing(videoStream240p, output, OUTPUT_PATH + "video/240p_hls", segmentLength)); var videoTsMuxing360p = bitmovin.Encoding.Encoding.Ts.Create(encoding.Id, CreateTsMuxing(videoStream360p, output, OUTPUT_PATH + "video/360p_hls", segmentLength)); var videoTsMuxing480p = bitmovin.Encoding.Encoding.Ts.Create(encoding.Id, CreateTsMuxing(videoStream480p, output, OUTPUT_PATH + "video/480p_hls", segmentLength)); var videoTsMuxing720p = bitmovin.Encoding.Encoding.Ts.Create(encoding.Id, CreateTsMuxing(videoStream720p, output, OUTPUT_PATH + "video/720p_hls", segmentLength)); var videoTsMuxing1080p = bitmovin.Encoding.Encoding.Ts.Create(encoding.Id, CreateTsMuxing(videoStream1080p, output, OUTPUT_PATH + "video/1080p_hls", segmentLength)); var audioTsMuxing = bitmovin.Encoding.Encoding.Ts.Create(encoding.Id, CreateTsMuxing(audioStream, output, OUTPUT_PATH + "audio/128kbps_hls", segmentLength)); // Start encoding bitmovin.Encoding.Encoding.Start(encoding.Id); var encodingTask = bitmovin.Encoding.Encoding.RetrieveStatus(encoding.Id); while (encodingTask.Status != Status.ERROR && encodingTask.Status != Status.FINISHED) { // Wait for the encoding to finish encodingTask = bitmovin.Encoding.Encoding.RetrieveStatus(encoding.Id); Thread.Sleep(2500); } if (encodingTask.Status != Status.FINISHED) { Console.WriteLine("Encoding could not be finished successfully."); return; } // Create manifest output (can be used for both HLS + DASH) var manifestOutput = new Encoding.Output { OutputPath = OUTPUT_PATH, OutputId = output.Id, Acl = new List <Acl> { new Acl { Permission = Permission.PUBLIC_READ } } }; // Create HLS Manifest var manifestHls = bitmovin.Manifest.Hls.Create(new Hls { Name = "HLS Manifest", ManifestName = "stream.m3u8", Outputs = new List <Encoding.Output> { manifestOutput } }); var mediaInfo = new MediaInfo { GroupId = "audio", Name = "English", Uri = "audio.m3u8", Type = MediaType.AUDIO, SegmentPath = "audio/128kbps_hls/", StreamId = audioStream.Id, MuxingId = audioTsMuxing.Id, EncodingId = encoding.Id, Language = "en", AssocLanguage = "en", Autoselect = false, IsDefault = false, Forced = false }; bitmovin.Manifest.Hls.AddMediaInfo(manifestHls.Id, mediaInfo); bitmovin.Manifest.Hls.AddStreamInfo(manifestHls.Id, new StreamInfo { Uri = "video_240.m3u8", EncodingId = encoding.Id, StreamId = videoStream240p.Id, MuxingId = videoTsMuxing240p.Id, Audio = "audio", SegmentPath = "video/240p_hls/" }); bitmovin.Manifest.Hls.AddStreamInfo(manifestHls.Id, new StreamInfo { Uri = "video_360.m3u8", EncodingId = encoding.Id, StreamId = videoStream360p.Id, MuxingId = videoTsMuxing360p.Id, Audio = "audio", SegmentPath = "video/360p_hls/" }); bitmovin.Manifest.Hls.AddStreamInfo(manifestHls.Id, new StreamInfo { Uri = "video_480.m3u8", EncodingId = encoding.Id, StreamId = videoStream480p.Id, MuxingId = videoTsMuxing480p.Id, Audio = "audio", SegmentPath = "video/480p_hls/" }); bitmovin.Manifest.Hls.AddStreamInfo(manifestHls.Id, new StreamInfo { Uri = "video_720.m3u8", EncodingId = encoding.Id, StreamId = videoStream720p.Id, MuxingId = videoTsMuxing720p.Id, Audio = "audio", SegmentPath = "video/720p_hls/" }); bitmovin.Manifest.Hls.AddStreamInfo(manifestHls.Id, new StreamInfo { Uri = "video_1080.m3u8", EncodingId = encoding.Id, StreamId = videoStream1080p.Id, MuxingId = videoTsMuxing1080p.Id, Audio = "audio", SegmentPath = "video/1080p_hls/" }); bitmovin.Manifest.Hls.Start(manifestHls.Id); var hlsManifestStatus = bitmovin.Manifest.Hls.RetrieveStatus(manifestHls.Id); while (hlsManifestStatus.Status != Status.ERROR && hlsManifestStatus.Status != Status.FINISHED) { // Wait for the HLS Manifest creation to finish hlsManifestStatus = bitmovin.Manifest.Hls.RetrieveStatus(manifestHls.Id); Thread.Sleep(2500); } if (hlsManifestStatus.Status != Status.FINISHED) { Console.WriteLine("HLS Manifest could not be finished successfully."); return; } // Create DASH Manifest var manifestDash = bitmovin.Manifest.Dash.Create(new Dash { Name = "MPEG-DASH Manifest", ManifestName = "stream.mpd", Outputs = new List <Encoding.Output> { manifestOutput } }); var period = bitmovin.Manifest.Dash.Period.Create(manifestDash.Id, new Period()); var videoAdaptationSet = bitmovin.Manifest.Dash.VideoAdaptationSet.Create(manifestDash.Id, period.Id, new VideoAdaptationSet()); var audioAdaptationSet = bitmovin.Manifest.Dash.AudioAdaptationSet.Create(manifestDash.Id, period.Id, new AudioAdaptationSet { Lang = "en" }); bitmovin.Manifest.Dash.Fmp4.Create(manifestDash.Id, period.Id, audioAdaptationSet.Id, new Manifest.Fmp4 { Type = SegmentScheme.TEMPLATE, EncodingId = encoding.Id, MuxingId = audioFMP4Muxing.Id, SegmentPath = "audio/128kbps_dash" }); bitmovin.Manifest.Dash.Fmp4.Create(manifestDash.Id, period.Id, videoAdaptationSet.Id, new Manifest.Fmp4 { Type = SegmentScheme.TEMPLATE, EncodingId = encoding.Id, MuxingId = videoFMP4Muxing240p.Id, SegmentPath = "video/240p_dash" }); bitmovin.Manifest.Dash.Fmp4.Create(manifestDash.Id, period.Id, videoAdaptationSet.Id, new Manifest.Fmp4 { Type = SegmentScheme.TEMPLATE, EncodingId = encoding.Id, MuxingId = videoFMP4Muxing360p.Id, SegmentPath = "video/360p_dash" }); bitmovin.Manifest.Dash.Fmp4.Create(manifestDash.Id, period.Id, videoAdaptationSet.Id, new Manifest.Fmp4 { Type = SegmentScheme.TEMPLATE, EncodingId = encoding.Id, MuxingId = videoFMP4Muxing480p.Id, SegmentPath = "video/480p_dash" }); bitmovin.Manifest.Dash.Fmp4.Create(manifestDash.Id, period.Id, videoAdaptationSet.Id, new Manifest.Fmp4 { Type = SegmentScheme.TEMPLATE, EncodingId = encoding.Id, MuxingId = videoFMP4Muxing720p.Id, SegmentPath = "video/720p_dash" }); bitmovin.Manifest.Dash.Fmp4.Create(manifestDash.Id, period.Id, videoAdaptationSet.Id, new Manifest.Fmp4 { Type = SegmentScheme.TEMPLATE, EncodingId = encoding.Id, MuxingId = videoFMP4Muxing1080p.Id, SegmentPath = "video/1080p_dash" }); bitmovin.Manifest.Dash.Start(manifestDash.Id); var dashManifestStatus = bitmovin.Manifest.Dash.RetrieveStatus(manifestDash.Id); while (dashManifestStatus.Status != Status.ERROR && dashManifestStatus.Status != Status.FINISHED) { // Wait for the Dash Manifest creation to finish dashManifestStatus = bitmovin.Manifest.Dash.RetrieveStatus(manifestDash.Id); Thread.Sleep(2500); } if (dashManifestStatus.Status != Status.FINISHED) { Console.WriteLine("DASH Manifest could not be finished successfully."); return; } Console.WriteLine("Encoding finished successfully"); }
public async Task RunExample(string[] args) { _configProvider = new ConfigProvider(args); _bitmovinApi = BitmovinApi.Builder .WithApiKey(_configProvider.GetBitmovinApiKey()) // uncomment the following line if you are working with a multi-tenant account // .WithTenantOrgIdKey(_configProvider.GetBitmovinTenantOrgId()) .WithLogger(new ConsoleLogger()) .Build(); var encoding = await CreateEncoding("Encoding with default manifests", "Encoding with HLS and DASH default manifests"); var input = await CreateHttpInput(_configProvider.GetHttpInputHost()); var output = await CreateS3Output(_configProvider.GetS3OutputBucketName(), _configProvider.GetS3OutputAccessKey(), _configProvider.GetS3OutputSecretKey()); var inputFilePath = _configProvider.GetHttpInputFilePath(); // ABR Ladder - H264 var h264VideoConfig_720_3000000 = await CreateH264VideoConfiguration(1280, 720, 3000000L); var h264VideoConfig_720_4608000 = await CreateH264VideoConfiguration(1280, 720, 4608000L); var h264VideoConfig_1080_6144000 = await CreateH264VideoConfiguration(1920, 1080, 6144000L); var h264VideoConfig_1080_7987200 = await CreateH264VideoConfiguration(1920, 1080, 7987200L); var videoConfigs = new List <H264VideoConfiguration>() { h264VideoConfig_720_3000000, h264VideoConfig_720_4608000, h264VideoConfig_1080_6144000, h264VideoConfig_1080_7987200 }; // create video streams and muxings foreach (H264VideoConfiguration config in videoConfigs) { var h264VideoStream = await CreateStream(encoding, input, inputFilePath, config); await CreateFmp4Muxing(encoding, output, String.Format("/video/{0}", config.Bitrate), h264VideoStream); } // Audio - ACC var aacConfig_192000 = await CreateAacAudioConfiguration(192000L); var aacConfig_64000 = await CreateAacAudioConfiguration(64000L); var audioConfigs = new List <AacAudioConfiguration>() { aacConfig_192000, aacConfig_64000 }; // create video streams and muxings foreach (AacAudioConfiguration config in audioConfigs) { var aacAudioStream = await CreateStream(encoding, input, inputFilePath, config); await CreateFmp4Muxing(encoding, output, String.Format("/audio/{0}", config.Bitrate), aacAudioStream); } var dashManifest = await CreateDefaultDashManifest(encoding, output, "/"); var hlsManifest = await CreateDefaultHlsManifest(encoding, output, "/"); var startEncodingRequest = new StartEncodingRequest() { ManifestGenerator = ManifestGenerator.V2, VodDashManifests = new List <ManifestResource>() { BuildManifestResource(dashManifest) }, VodHlsManifests = new List <ManifestResource>() { BuildManifestResource(hlsManifest) } }; await ExecuteEncoding(encoding, startEncodingRequest); }
public async Task RunExample(string[] args) { _configProvider = new ConfigProvider(args); _bitmovinApi = BitmovinApi.Builder .WithApiKey(_configProvider.GetBitmovinApiKey()) .WithLogger(new ConsoleLogger()) .Build(); var encoding = await CreateEncoding("Live encoding example", "Live encoding with RTMP input"); var input = await GetRtmpInput(); var inputFilePath = "live"; var output = await CreateS3Output(_configProvider.GetS3OutputBucketName(), _configProvider.GetS3OutputAccessKey(), _configProvider.GetS3OutputSecretKey()); // Add an H.264 video stream to the encoding var h264VideoConfig = await CreateH264VideoConfiguration(); var h264VideoStream = await CreateStream(encoding, input, inputFilePath, h264VideoConfig, 0); // Add an AAC audio stream to the encoding var aacConfig = await CreateAacAudioConfiguration(); var aacAudioStream = await CreateStream(encoding, input, inputFilePath, aacConfig, 1); await CreateFmp4Muxing(encoding, output, $"/video/${h264VideoConfig.Height}p", h264VideoStream); await CreateFmp4Muxing(encoding, output, $"/audio/${aacConfig.Bitrate! / 1000}kbs", aacAudioStream); var dashManifest = await CreateDefaultDashManifest(encoding, output, "/"); var hlsManifest = await CreateDefaultHlsManifest(encoding, output, "/"); var liveDashManifest = new LiveDashManifest() { ManifestId = dashManifest.Id }; var liveHlsManifest = new LiveHlsManifest() { ManifestId = hlsManifest.Id }; var startLiveEncodingRequest = new StartLiveEncodingRequest() { DashManifests = new List <LiveDashManifest>() { liveDashManifest }, HlsManifests = new List <LiveHlsManifest>() { liveHlsManifest }, StreamKey = StreamKey }; await StartLiveEncodingAndWaitUntilRunning(encoding, startLiveEncodingRequest); var liveEncoding = await WaitForLiveEncodingDetails(encoding); Console.WriteLine("Live encoding is up and ready for ingest. " + $"RTMP URL: rtmp://{liveEncoding.EncoderIp}/live StreamKey: {liveEncoding.StreamKey}"); /* * This will enable you to shut down the live encoding from within your script. * In production, it is naturally recommended to stop the encoding by using the Bitmovin dashboard * or an independent API call - https://bitmovin.com/docs/encoding/api-reference/sections/encodings#/Encoding/PostEncodingEncodingsLiveStopByEncodingId */ Console.WriteLine("Press any key to shutdown the live encoding..."); Console.ReadKey(); Console.WriteLine("Shutting down live encoding."); await _bitmovinApi.Encoding.Encodings.Live.StopAsync(encoding.Id); await WaitUntilEncodingIsInState(encoding, Status.FINISHED); }
public void StartVodEncoding() { // If you run into network errors, try uncommenting the following line. //System.Net.ServicePointManager.SecurityProtocol = System.Net.SecurityProtocolType.Ssl3 | System.Net.SecurityProtocolType.Tls12; var bitmovin = new BitmovinApi(API_KEY); double?segmentLength = 4.0; // Create Output var output = bitmovin.Output.Azure.Create(new AzureOutput { Name = "Azure Output", AccountName = AZURE_OUTPUT_TEST_ACCOUNT_NAME, AccountKey = AZURE_OUTPUT_TEST_ACCOUNT_KEY, Container = AZURE_OUTPUT_TEST_CONTAINER_NAME }); // Create encoding var encoding = bitmovin.Encoding.Encoding.Create(new Encoding.Encoding { Name = "VoD Encoding C#", CloudRegion = EncodingCloudRegion.GOOGLE_EUROPE_WEST_1, EncoderVersion = "STABLE" }); var input = bitmovin.Input.Azure.Create(new AzureInput { Name = "Azure Input", AccountName = AZURE_INPUT_TEST_ACCOUNT_NAME, AccountKey = AZURE_INPUT_TEST_ACCOUNT_KEY, Container = AZURE_INPUT_TEST_CONTAINER_NAME }); // Create configurations and streams var videoConfig1080p = bitmovin.Codec.H264.Create(new H264VideoConfiguration { Name = "H264_Profile_1080p", Profile = H264Profile.HIGH, Width = 1920, Height = 1080, Bitrate = 4800000, Rate = 30.0f }); var videoStream1080p = bitmovin.Encoding.Encoding.Stream.Create(encoding.Id, CreateStream(input, AZURE_INPUT_PATH, 0, videoConfig1080p, SelectionMode.VIDEO_RELATIVE)); var videoConfig720p = bitmovin.Codec.H264.Create(new H264VideoConfiguration { Name = "H264_Profile_720p", Profile = H264Profile.HIGH, Width = 1280, Height = 720, Bitrate = 2400000, Rate = 30.0f }); var videoStream720p = bitmovin.Encoding.Encoding.Stream.Create(encoding.Id, CreateStream(input, AZURE_INPUT_PATH, 0, videoConfig720p, SelectionMode.VIDEO_RELATIVE)); var videoConfig480p = bitmovin.Codec.H264.Create(new H264VideoConfiguration { Name = "H264_Profile_480p", Profile = H264Profile.HIGH, Width = 858, Height = 480, Bitrate = 1200000, Rate = 30.0f }); var videoStream480p = bitmovin.Encoding.Encoding.Stream.Create(encoding.Id, CreateStream(input, AZURE_INPUT_PATH, 0, videoConfig480p, SelectionMode.VIDEO_RELATIVE)); var videoConfig360p = bitmovin.Codec.H264.Create(new H264VideoConfiguration { Name = "H264_Profile_360p", Profile = H264Profile.HIGH, Width = 640, Height = 360, Bitrate = 800000, Rate = 30.0f }); var videoStream360p = bitmovin.Encoding.Encoding.Stream.Create(encoding.Id, CreateStream(input, AZURE_INPUT_PATH, 0, videoConfig360p, SelectionMode.VIDEO_RELATIVE)); var videoConfig240p = bitmovin.Codec.H264.Create(new H264VideoConfiguration { Name = "H264_Profile_240p", Profile = H264Profile.HIGH, Width = 426, Height = 240, Bitrate = 400000, Rate = 30.0f }); var videoStream240p = bitmovin.Encoding.Encoding.Stream.Create(encoding.Id, CreateStream(input, AZURE_INPUT_PATH, 0, videoConfig240p, SelectionMode.VIDEO_RELATIVE)); var audioConfig = bitmovin.Codec.Aac.Create(new AACAudioConfiguration { Name = "AAC_Profile_128k", Bitrate = 128000, Rate = 48000 }); var audioStream = bitmovin.Encoding.Encoding.Stream.Create(encoding.Id, CreateStream(input, AZURE_INPUT_PATH, 0, audioConfig, SelectionMode.AUDIO_RELATIVE)); // Create FMP4 Muxing for DASH var videoFMP4Muxing240p = bitmovin.Encoding.Encoding.Fmp4.Create(encoding.Id, CreateFMP4Muxing(videoStream240p, output, AZURE_OUTPUT_PATH + "video/240p_dash", segmentLength)); var videoFMP4Muxing360p = bitmovin.Encoding.Encoding.Fmp4.Create(encoding.Id, CreateFMP4Muxing(videoStream360p, output, AZURE_OUTPUT_PATH + "video/360p_dash", segmentLength)); var videoFMP4Muxing480p = bitmovin.Encoding.Encoding.Fmp4.Create(encoding.Id, CreateFMP4Muxing(videoStream480p, output, AZURE_OUTPUT_PATH + "video/480p_dash", segmentLength)); var videoFMP4Muxing720p = bitmovin.Encoding.Encoding.Fmp4.Create(encoding.Id, CreateFMP4Muxing(videoStream720p, output, AZURE_OUTPUT_PATH + "video/720p_dash", segmentLength)); var videoFMP4Muxing1080p = bitmovin.Encoding.Encoding.Fmp4.Create(encoding.Id, CreateFMP4Muxing(videoStream1080p, output, AZURE_OUTPUT_PATH + "video/1080p_dash", segmentLength)); var audioFMP4Muxing = bitmovin.Encoding.Encoding.Fmp4.Create(encoding.Id, CreateFMP4Muxing(audioStream, output, AZURE_OUTPUT_PATH + "audio/128kbps_dash", segmentLength)); // Create TS Muxings for HLS var videoTsMuxing240p = bitmovin.Encoding.Encoding.Ts.Create(encoding.Id, CreateTsMuxing(videoStream240p, output, AZURE_OUTPUT_PATH + "video/240p_hls", segmentLength)); var videoTsMuxing360p = bitmovin.Encoding.Encoding.Ts.Create(encoding.Id, CreateTsMuxing(videoStream360p, output, AZURE_OUTPUT_PATH + "video/360p_hls", segmentLength)); var videoTsMuxing480p = bitmovin.Encoding.Encoding.Ts.Create(encoding.Id, CreateTsMuxing(videoStream480p, output, AZURE_OUTPUT_PATH + "video/480p_hls", segmentLength)); var videoTsMuxing720p = bitmovin.Encoding.Encoding.Ts.Create(encoding.Id, CreateTsMuxing(videoStream720p, output, AZURE_OUTPUT_PATH + "video/720p_hls", segmentLength)); var videoTsMuxing1080p = bitmovin.Encoding.Encoding.Ts.Create(encoding.Id, CreateTsMuxing(videoStream1080p, output, AZURE_OUTPUT_PATH + "video/1080p_hls", segmentLength)); var audioTsMuxing = bitmovin.Encoding.Encoding.Ts.Create(encoding.Id, CreateTsMuxing(audioStream, output, AZURE_OUTPUT_PATH + "audio/128kbps_hls", segmentLength)); // Start encoding bitmovin.Encoding.Encoding.Start(encoding.Id); var encodingTask = bitmovin.Encoding.Encoding.RetrieveStatus(encoding.Id); while (encodingTask.Status != Status.ERROR && encodingTask.Status != Status.FINISHED) { // Wait for the encoding to finish encodingTask = bitmovin.Encoding.Encoding.RetrieveStatus(encoding.Id); Thread.Sleep(2500); } if (encodingTask.Status != Status.FINISHED) { Console.WriteLine("Encoding could not be finished successfully."); return; } // Create manifest output (can be used for both HLS + DASH) var manifestOutput = new Encoding.Output { OutputPath = AZURE_OUTPUT_PATH, OutputId = output.Id, Acl = new List <Acl> { new Acl { Permission = Permission.PUBLIC_READ } } }; // Create HLS Manifest var manifestHls = bitmovin.Manifest.Hls.Create(new Hls { Name = "HLS Manifest", ManifestName = "stream.m3u8", Outputs = new List <Encoding.Output> { manifestOutput } }); var mediaInfo = new MediaInfo { GroupId = "audio", Name = "English", Uri = "audio.m3u8", Type = MediaType.AUDIO, SegmentPath = "audio/128kbps_hls/", StreamId = audioStream.Id, MuxingId = audioTsMuxing.Id, EncodingId = encoding.Id, Language = "en", AssocLanguage = "en", Autoselect = false, IsDefault = false, Forced = false }; bitmovin.Manifest.Hls.AddMediaInfo(manifestHls.Id, mediaInfo); bitmovin.Manifest.Hls.AddStreamInfo(manifestHls.Id, new StreamInfo { Uri = "video_240.m3u8", EncodingId = encoding.Id, StreamId = videoStream240p.Id, MuxingId = videoTsMuxing240p.Id, Audio = "audio", SegmentPath = "video/240p_hls/" }); bitmovin.Manifest.Hls.AddStreamInfo(manifestHls.Id, new StreamInfo { Uri = "video_360.m3u8", EncodingId = encoding.Id, StreamId = videoStream360p.Id, MuxingId = videoTsMuxing360p.Id, Audio = "audio", SegmentPath = "video/360p_hls/" }); bitmovin.Manifest.Hls.AddStreamInfo(manifestHls.Id, new StreamInfo { Uri = "video_480.m3u8", EncodingId = encoding.Id, StreamId = videoStream480p.Id, MuxingId = videoTsMuxing480p.Id, Audio = "audio", SegmentPath = "video/480p_hls/" }); bitmovin.Manifest.Hls.AddStreamInfo(manifestHls.Id, new StreamInfo { Uri = "video_720.m3u8", EncodingId = encoding.Id, StreamId = videoStream720p.Id, MuxingId = videoTsMuxing720p.Id, Audio = "audio", SegmentPath = "video/720p_hls/" }); bitmovin.Manifest.Hls.AddStreamInfo(manifestHls.Id, new StreamInfo { Uri = "video_1080.m3u8", EncodingId = encoding.Id, StreamId = videoStream1080p.Id, MuxingId = videoTsMuxing1080p.Id, Audio = "audio", SegmentPath = "video/1080p_hls/" }); bitmovin.Manifest.Hls.Start(manifestHls.Id); var hlsManifestStatus = bitmovin.Manifest.Hls.RetrieveStatus(manifestHls.Id); while (hlsManifestStatus.Status != Status.ERROR && hlsManifestStatus.Status != Status.FINISHED) { // Wait for the HLS Manifest creation to finish hlsManifestStatus = bitmovin.Manifest.Hls.RetrieveStatus(manifestHls.Id); Thread.Sleep(2500); } if (hlsManifestStatus.Status != Status.FINISHED) { Console.WriteLine("HLS Manifest could not be finished successfully."); return; } // Create DASH Manifest var manifestDash = bitmovin.Manifest.Dash.Create(new Dash { Name = "MPEG-DASH Manifest", ManifestName = "stream.mpd", Outputs = new List <Encoding.Output> { manifestOutput } }); var period = bitmovin.Manifest.Dash.Period.Create(manifestDash.Id, new Period()); var videoAdaptationSet = bitmovin.Manifest.Dash.VideoAdaptationSet.Create(manifestDash.Id, period.Id, new VideoAdaptationSet()); var audioAdaptationSet = bitmovin.Manifest.Dash.AudioAdaptationSet.Create(manifestDash.Id, period.Id, new AudioAdaptationSet { Lang = "en" }); bitmovin.Manifest.Dash.Fmp4.Create(manifestDash.Id, period.Id, audioAdaptationSet.Id, new Manifest.Fmp4 { Type = SegmentScheme.TEMPLATE, EncodingId = encoding.Id, MuxingId = audioFMP4Muxing.Id, SegmentPath = "audio/128kbps_dash" }); bitmovin.Manifest.Dash.Fmp4.Create(manifestDash.Id, period.Id, videoAdaptationSet.Id, new Manifest.Fmp4 { Type = SegmentScheme.TEMPLATE, EncodingId = encoding.Id, MuxingId = videoFMP4Muxing240p.Id, SegmentPath = "video/240p_dash" }); bitmovin.Manifest.Dash.Fmp4.Create(manifestDash.Id, period.Id, videoAdaptationSet.Id, new Manifest.Fmp4 { Type = SegmentScheme.TEMPLATE, EncodingId = encoding.Id, MuxingId = videoFMP4Muxing360p.Id, SegmentPath = "video/360p_dash" }); bitmovin.Manifest.Dash.Fmp4.Create(manifestDash.Id, period.Id, videoAdaptationSet.Id, new Manifest.Fmp4 { Type = SegmentScheme.TEMPLATE, EncodingId = encoding.Id, MuxingId = videoFMP4Muxing480p.Id, SegmentPath = "video/480p_dash" }); bitmovin.Manifest.Dash.Fmp4.Create(manifestDash.Id, period.Id, videoAdaptationSet.Id, new Manifest.Fmp4 { Type = SegmentScheme.TEMPLATE, EncodingId = encoding.Id, MuxingId = videoFMP4Muxing720p.Id, SegmentPath = "video/720p_dash" }); bitmovin.Manifest.Dash.Fmp4.Create(manifestDash.Id, period.Id, videoAdaptationSet.Id, new Manifest.Fmp4 { Type = SegmentScheme.TEMPLATE, EncodingId = encoding.Id, MuxingId = videoFMP4Muxing1080p.Id, SegmentPath = "video/1080p_dash" }); bitmovin.Manifest.Dash.Start(manifestDash.Id); var dashManifestStatus = bitmovin.Manifest.Dash.RetrieveStatus(manifestDash.Id); while (dashManifestStatus.Status != Status.ERROR && dashManifestStatus.Status != Status.FINISHED) { // Wait for the Dash Manifest creation to finish dashManifestStatus = bitmovin.Manifest.Dash.RetrieveStatus(manifestDash.Id); Thread.Sleep(2500); } if (dashManifestStatus.Status != Status.FINISHED) { Console.WriteLine("DASH Manifest could not be finished successfully."); return; } Console.WriteLine("Encoding finished successfully"); }
public async Task RunExample(string[] args) { _configProvider = new ConfigProvider(args); _bitmovinApi = BitmovinApi.Builder .WithApiKey(_configProvider.GetBitmovinApiKey()) // uncomment the following line if you are working with a multi-tenant account // .WithTenantOrgIdKey(_configProvider.GetBitmovinTenantOrgId()) .WithLogger(new ConsoleLogger()) .Build(); var encoding = await CreateEncoding("Audio Mapping - Stream Merging", "Multiple stereo input tracks -> Output with single merged stereo track"); var h264Config = await CreateH264VideoConfiguration(); var aacConfig = await CreateAacAudioConfiguration(); var input = await CreateHttpInput(_configProvider.GetHttpInputHost()); var output = await CreateS3Output(_configProvider.GetS3OutputBucketName(), _configProvider.GetS3OutputAccessKey(), _configProvider.GetS3OutputSecretKey()); var inputFilePath = _configProvider.GetHttpInputFilePathWithTwoStereoTracks(); var videoIngestInputStream = await CreateIngestInputStream(encoding, input, inputFilePath); var mainAudioIngestInputStream = await CreateIngestInputStreamForAudioTrack(encoding, input, inputFilePath, 0); var secondaryAudioIngestInputStream = await CreateIngestInputStreamForAudioTrack(encoding, input, inputFilePath, 1); var secondaryAudioMixInputStream = new AudioMixInputStream { ChannelLayout = AudioMixInputChannelLayout.CL_STEREO }; for (var i = 0; i <= 1; i++) { var sourceChannel = new AudioMixInputStreamSourceChannel { Type = AudioMixSourceChannelType.CHANNEL_NUMBER, ChannelNumber = i, Gain = 0.5 }; var inputStreamChannel = new AudioMixInputStreamChannel { InputStreamId = secondaryAudioIngestInputStream.Id, OutputChannelType = AudioMixChannelType.CHANNEL_NUMBER, OutputChannelNumber = i, SourceChannels = new List <AudioMixInputStreamSourceChannel>() { sourceChannel } }; secondaryAudioMixInputStream.AudioMixChannels.Add(inputStreamChannel); } secondaryAudioMixInputStream = await _bitmovinApi.Encoding.Encodings.InputStreams.AudioMix.CreateAsync( encoding.Id, secondaryAudioMixInputStream); var videoStream = await CreateStream(encoding, new List <InputStream>() { videoIngestInputStream }, h264Config); var audioStream = await CreateStream( encoding, new List <InputStream>() { mainAudioIngestInputStream, secondaryAudioMixInputStream }, aacConfig); await CreateMp4Muxing( encoding, output, "/", new List <Stream>() { videoStream, audioStream }, "stereo-and-surround-tracks-mapped.mp4"); await ExecuteEncoding(encoding); }
public async Task RunExample(string[] args) { _configProvider = new ConfigProvider(args); _bitmovinApi = BitmovinApi.Builder .WithApiKey(_configProvider.GetBitmovinApiKey()) // uncomment the following line if you are working with a multi-tenant account // .WithTenantOrgIdKey(_configProvider.GetBitmovinTenantOrgId()) .WithLogger(new ConsoleLogger()) .Build(); var encoding = await CreateEncoding("Audio Mapping - Stream Mapping - Multiple Mono Tracks", "Input with multiple mono tracks -> Output with stereo and surround tracks"); var input = await CreateHttpInput(_configProvider.GetHttpInputHost()); var output = await CreateS3Output(_configProvider.GetS3OutputBucketName(), _configProvider.GetS3OutputAccessKey(), _configProvider.GetS3OutputSecretKey()); var h264Config = await CreateH264VideoConfiguration(); var aacConfig = await CreateAacAudioConfiguration(); var ddConfig = await CreateDdSurroundAudioConfiguration(); var inputFilePath = _configProvider.GetHttpInputFilePathWithMultipleMonoAudioTracks(); var videoIngestInputStream = await CreateIngestInputStream(encoding, input, inputFilePath); var stereoMap = new List <ChannelMappingConfiguration>() { new(AudioMixChannelType.FRONT_LEFT, 0), new(AudioMixChannelType.FRONT_RIGHT, 1) }; var surroundMap = new List <ChannelMappingConfiguration>() { new(AudioMixChannelType.FRONT_LEFT, 2), new(AudioMixChannelType.FRONT_RIGHT, 3), new(AudioMixChannelType.BACK_LEFT, 4), new(AudioMixChannelType.BACK_RIGHT, 5), new(AudioMixChannelType.CENTER, 6), new(AudioMixChannelType.LOW_FREQUENCY, 7), }; var stereoAudioMixStreams = await CreateAudioMixInputStreamChannels(encoding, input, inputFilePath, stereoMap); var stereoMixInputStream = await CreateAudioMixInputStream(encoding, AudioMixInputChannelLayout.CL_STEREO, stereoAudioMixStreams); var surroundAudioMixStreams = await CreateAudioMixInputStreamChannels(encoding, input, inputFilePath, surroundMap); var surroundMixInputStream = await CreateAudioMixInputStream( encoding, AudioMixInputChannelLayout.CL_5_1_BACK, surroundAudioMixStreams); var videoStream = await CreateStream(encoding, videoIngestInputStream, h264Config); var stereoAudioStream = await CreateStream(encoding, stereoMixInputStream, aacConfig); var surroundAudioStream = await CreateStream(encoding, surroundMixInputStream, ddConfig); await CreateMp4Muxing( encoding, output, "/", new List <Stream>() { videoStream, stereoAudioStream, surroundAudioStream }, "stereo-and-surround-tracks-mapped.mp4"); await ExecuteEncoding(encoding); }
public void StartLiveStream() { var bitmovin = new BitmovinApi(API_KEY); double?segmentLength = 4.0; var output = bitmovin.Output.Gcs.Create(new GcsOutput { Name = "GCS Ouput", AccessKey = GCS_ACCESS_KEY, SecretKey = GCS_SECRET_KEY, BucketName = GCS_BUCKET_NAME }); var encoding = bitmovin.Encoding.Encoding.Create(new Encoding.Encoding { Name = "Live Stream C#", CloudRegion = EncodingCloudRegion.GOOGLE_EUROPE_WEST_1, EncoderVersion = "STABLE" }); var rtmpInput = bitmovin.Input.Rtmp.RetrieveList(0, 100)[0]; var videoConfig1080p = bitmovin.Codec.H264.Create(new H264VideoConfiguration { Name = "H264_Profile_1080p", Profile = H264Profile.HIGH, Width = 1920, Height = 1080, Bitrate = 4800000, Rate = 30.0f }); var videoStream1080p = bitmovin.Encoding.Encoding.Stream.Create(encoding.Id, CreateStream(rtmpInput, "live", 0, videoConfig1080p, SelectionMode.AUTO)); var videoConfig720p = bitmovin.Codec.H264.Create(new H264VideoConfiguration { Name = "H264_Profile_720p", Profile = H264Profile.HIGH, Width = 1280, Height = 720, Bitrate = 2400000, Rate = 30.0f }); var videoStream720p = bitmovin.Encoding.Encoding.Stream.Create(encoding.Id, CreateStream(rtmpInput, "live", 0, videoConfig720p, SelectionMode.AUTO)); var videoConfig480p = bitmovin.Codec.H264.Create(new H264VideoConfiguration { Name = "H264_Profile_480p", Profile = H264Profile.HIGH, Width = 858, Height = 480, Bitrate = 1200000, Rate = 30.0f }); var videoStream480p = bitmovin.Encoding.Encoding.Stream.Create(encoding.Id, CreateStream(rtmpInput, "live", 0, videoConfig480p, SelectionMode.AUTO)); var videoConfig360p = bitmovin.Codec.H264.Create(new H264VideoConfiguration { Name = "H264_Profile_360p", Profile = H264Profile.HIGH, Width = 640, Height = 360, Bitrate = 800000, Rate = 30.0f }); var videoStream360p = bitmovin.Encoding.Encoding.Stream.Create(encoding.Id, CreateStream(rtmpInput, "live", 0, videoConfig360p, SelectionMode.AUTO)); var videoConfig240p = bitmovin.Codec.H264.Create(new H264VideoConfiguration { Name = "H264_Profile_240p", Profile = H264Profile.HIGH, Width = 426, Height = 240, Bitrate = 400000, Rate = 30.0f }); var videoStream240p = bitmovin.Encoding.Encoding.Stream.Create(encoding.Id, CreateStream(rtmpInput, "live", 0, videoConfig240p, SelectionMode.AUTO)); var audioConfig = bitmovin.Codec.Aac.Create(new AACAudioConfiguration { Name = "AAC_Profile_128k", Bitrate = 128000, Rate = 48000 }); var audioStream = bitmovin.Encoding.Encoding.Stream.Create(encoding.Id, CreateStream(rtmpInput, "/", 1, audioConfig, SelectionMode.AUTO)); var videoMuxing240p = bitmovin.Encoding.Encoding.Ts.Create(encoding.Id, CreateTsMuxing(videoStream240p, output, OUTPUT_PATH + "video/240p", segmentLength)); var videoMuxing360p = bitmovin.Encoding.Encoding.Ts.Create(encoding.Id, CreateTsMuxing(videoStream360p, output, OUTPUT_PATH + "video/360p", segmentLength)); var videoMuxing480p = bitmovin.Encoding.Encoding.Ts.Create(encoding.Id, CreateTsMuxing(videoStream480p, output, OUTPUT_PATH + "video/480p", segmentLength)); var videoMuxing720p = bitmovin.Encoding.Encoding.Ts.Create(encoding.Id, CreateTsMuxing(videoStream720p, output, OUTPUT_PATH + "video/720p", segmentLength)); var videoMuxing1080p = bitmovin.Encoding.Encoding.Ts.Create(encoding.Id, CreateTsMuxing(videoStream1080p, output, OUTPUT_PATH + "video/1080p", segmentLength)); var audioMuxing = bitmovin.Encoding.Encoding.Ts.Create(encoding.Id, CreateTsMuxing(audioStream, output, OUTPUT_PATH + "audio/128kbps", segmentLength)); var manifestOutput = new Encoding.Output { OutputPath = OUTPUT_PATH, OutputId = output.Id, Acl = new List <Acl> { new Acl { Permission = Permission.PUBLIC_READ } } }; var manifest = bitmovin.Manifest.Hls.Create(new Hls { Name = "HLS Manifest", ManifestName = "stream.m3u8", Outputs = new List <Encoding.Output> { manifestOutput } }); var mediaInfo = new MediaInfo { GroupId = "audio", Name = "English", Uri = "audio.m3u8", Type = MediaType.AUDIO, SegmentPath = "audio/128kbps/", StreamId = audioStream.Id, MuxingId = audioMuxing.Id, EncodingId = encoding.Id, Language = "en", AssocLanguage = "en", Autoselect = false, IsDefault = false, Forced = false }; bitmovin.Manifest.Hls.AddMediaInfo(manifest.Id, mediaInfo); bitmovin.Manifest.Hls.AddStreamInfo(manifest.Id, new StreamInfo { Uri = "video_240.m3u8", EncodingId = encoding.Id, StreamId = videoStream240p.Id, MuxingId = videoMuxing240p.Id, Audio = "audio", SegmentPath = "video/240p/" }); bitmovin.Manifest.Hls.AddStreamInfo(manifest.Id, new StreamInfo { Uri = "video_360.m3u8", EncodingId = encoding.Id, StreamId = videoStream360p.Id, MuxingId = videoMuxing360p.Id, Audio = "audio", SegmentPath = "video/360p/" }); bitmovin.Manifest.Hls.AddStreamInfo(manifest.Id, new StreamInfo { Uri = "video_480.m3u8", EncodingId = encoding.Id, StreamId = videoStream480p.Id, MuxingId = videoMuxing480p.Id, Audio = "audio", SegmentPath = "video/480p/" }); bitmovin.Manifest.Hls.AddStreamInfo(manifest.Id, new StreamInfo { Uri = "video_720.m3u8", EncodingId = encoding.Id, StreamId = videoStream720p.Id, MuxingId = videoMuxing720p.Id, Audio = "audio", SegmentPath = "video/720p/" }); bitmovin.Manifest.Hls.AddStreamInfo(manifest.Id, new StreamInfo { Uri = "video_1080.m3u8", EncodingId = encoding.Id, StreamId = videoStream1080p.Id, MuxingId = videoMuxing1080p.Id, Audio = "audio", SegmentPath = "video/1080p/" }); bitmovin.Encoding.Encoding.StartLive(encoding.Id, new StartLiveEncodingRequest { StreamKey = "YourStreamKey", HlsManifests = new List <LiveHlsManifest> { new LiveHlsManifest { ManifestId = manifest.Id, Timeshift = 300 } } }); LiveEncoding liveEncoding = null; while (liveEncoding == null) { try { liveEncoding = bitmovin.Encoding.Encoding.RetrieveLiveDetails(encoding.Id); } catch (System.Exception) { Thread.Sleep(5000); } } Console.WriteLine("Live stream started"); Console.WriteLine("Encoding ID: {0}", encoding.Id); Console.WriteLine("IP: {0}", liveEncoding.EncoderIp); Console.WriteLine("Rtmp URL: rtmp://{0}/live", liveEncoding.EncoderIp); Console.WriteLine("Stream Key: {0}", liveEncoding.StreamKey); }