/// <summary> /// Read the supplied configuration and prepare the transformer for work. /// </summary> private void PrepareTransformer(EncoderConfiguration.Configuration Configuration, List<EncoderPackage> Packages) { Config = Configuration; TimeSpan TargetDuration = TimeSpan.FromSeconds(Config.EncoderSettings.FragmentSeconds); PublishPoint = Config.Upload.VideoDestinationRoot; if (String.IsNullOrEmpty(PublishPoint)) throw new ArgumentException("Publishing point must not be empty", "PublishUrl"); PushServer = new IisSmoothPush(new Uri(PublishPoint)); TrackDurations = new Dictionary<int, long>(); TrackOffsets = new Dictionary<int, long>(); targetDuration = (ulong)TargetDuration.Ticks; Streams = new MediaStream[Packages.Count]; foreach (var pkg in Packages) { if (pkg.Specification.HasVideo && pkg.Specification.HasAudio) { throw new NotSupportedException("IIS Smooth output doesn't support pre-muxed streams"); } if (pkg.Specification.HasAudio) { Streams[pkg.JobIndex] = new MediaStream(); // for now, stream 0 is audio, and all others are video. Streams[pkg.JobIndex].TrackId = pkg.JobIndex + 1; Streams[pkg.JobIndex].FourCC = "mp3a"; // MP3 //Streams[pkg.JobIndex].FourCC = "mp4a"; // AAC Streams[pkg.JobIndex].Height = 0; Streams[pkg.JobIndex].Width = 0; Streams[pkg.JobIndex].Bitrate = 96000; //pkg.Job.Bitrate; // later! } else if (pkg.Specification.HasVideo) { Streams[pkg.JobIndex] = new MediaStream(); // for now, stream 0 is audio, and all others are video. Streams[pkg.JobIndex].TrackId = pkg.JobIndex + 1; Streams[pkg.JobIndex].FourCC = "H264"; // this is the M$ format, not iso (which is 'avc1') Streams[pkg.JobIndex].Height = Config.EncoderSettings.OutputHeight; // the actual size may be different due to scaling factor. Streams[pkg.JobIndex].Width = Config.EncoderSettings.OutputWidth; Streams[pkg.JobIndex].Bitrate = pkg.Job.Bitrate; } } Mp4fFile = new FileRoot(Streams); Demuxer = new MpegTS_Demux[Packages.Count]; for (int di = 0; di < Demuxer.Length; di++) { Demuxer[di] = new MpegTS_Demux(); } }
/// <summary> /// Pushes a set of frames to IIS. Will trigger a connect if needed. /// </summary> private void PushStream(MediaStream stream, FileRoot TargetMp4fFile) { if (stream == null || stream.Frames == null) return; // no frames. SanitiseStream(stream); if (stream.Frames.Count < 1) return; // no frames. if (!PushServer.IsConnected(stream.TrackId)) ConnectAndPushHeaders(stream, TargetMp4fFile); // set start-of-fragment time from PTS stream.Offset = stream.Frames[0].FramePresentationTime - stream.Frames[0].FrameDuration; // Push the fragment var fragment_handler = TargetMp4fFile.GenerateFragment(stream); PushServer.PushData(stream.TrackId, fragment_handler.MoofData()); PushServer.PushData(stream.TrackId, fragment_handler.MdatData()); }
/// <summary> /// Used once per connection, this opens a long-life HTTP stream /// and pushes the very basic MP4 parts needed to get IIS working. /// </summary> private void ConnectAndPushHeaders(MediaStream stream, FileRoot TargetMp4fFile) { SmilGenerator smil = new SmilGenerator("HCS Encoder by Iain Ballard.", stream); smil.ApproxBitrate = stream.Bitrate; MP4_Mangler.ExtraBoxes.SmoothSmil ssmil = new MP4_Mangler.ExtraBoxes.SmoothSmil(smil.Generate()); PushServer.Connect(stream.TrackId); // This pushes to the subpath: Streams({id}-stream{index}) // push headers (only done once per track) // each one needs it's own HTTP Chunk, so don't concat! PushServer.PushData(stream.TrackId, TargetMp4fFile.GenerateFileSpec()); PushServer.PushData(stream.TrackId, ssmil.deepData()); PushServer.PushData(stream.TrackId, TargetMp4fFile.GenerateHeaders()); }