public static List <ManifestTrack> InitializeTrackRegistry(IEnumerable <Box> boxes) { List <ManifestTrack> trackManifests = new List <ManifestTrack>(); var moov = boxes.SingleOrDefault(b => b.Type == BoxType.Moov); var mfra = boxes.SingleOrDefault(b => b.Type == BoxType.Mfra); foreach (var trak in moov.InnerBoxes.Where(b => b.Type == BoxType.Trak)) { var tkhd = trak.InnerBoxes.SingleOrDefault(b => b.Type == BoxType.Tkhd) as TrackHeaderFullBox; var mdia = trak.InnerBoxes.SingleOrDefault(b => b.Type == BoxType.Mdia); var mdhd = mdia.InnerBoxes.SingleOrDefault(b => b.Type == BoxType.Mdhd) as MediaHeaderFullBox; var tfra = mfra == null ? null : mfra.InnerBoxes.Where(b => b.Type == BoxType.Tfra).Cast <TrackFragmentRandomAccessFullBox>() .SingleOrDefault(b => b.TrackId == tkhd.TrackId); var stsd = mdia.InnerBoxes.SingleOrDefault(b => b.Type == BoxType.Minf) .InnerBoxes.SingleOrDefault(b => b.Type == BoxType.Stbl) .InnerBoxes.SingleOrDefault(b => b.Type == BoxType.Stsd) as SampleDescriptionFullBox; var handlerType = GetTrackHandlerType( mdia.InnerBoxes.SingleOrDefault(box => box.Type == BoxType.Hdlr) as HandlerReferenceFullBox); var manTrack = new ManifestTrack(handlerType, tkhd, mdhd, tfra, stsd); if (manTrack.IsSupported) { trackManifests.Add(manTrack); } } return(trackManifests); }
public static SmoothStreamingMediaStreamIndex GenerateClientManifestStreamIndex(IList <Box> boxes) { var track = ManifestTrack.InitializeTrackRegistry(boxes).First(); var moov = boxes.SingleOrDefault(b => b.Type == BoxType.Moov); return(GenerateClientManifestStreamIndex(track, moov)); }
public static SmoothStreamingMediaStreamIndex GenerateClientManifestStreamIndex(ManifestTrack track, Box moov) { var streamIndex = new SmoothStreamingMediaStreamIndex(); streamIndex.Type = track.Type.ToString().ToLower(); var qualityLevel = new SmoothStreamingMediaStreamIndexQualityLevel(); streamIndex.QualityLevel.Add(qualityLevel); streamIndex.TimeScale = track.TimeScale; var mdhd = moov.InnerBoxes.Single(box => box.Type == BoxType.Trak && (box.InnerBoxes.Single(tkhd => tkhd.Type == BoxType.Tkhd) as TrackHeaderFullBox).TrackId == track.Id) .InnerBoxes.Single(box => box.Type == BoxType.Mdia) .InnerBoxes.Single(box => box.Type == BoxType.Mdhd) as MediaHeaderFullBox; streamIndex.Language = mdhd.Language; switch (track.Type) { case ManifestTrackType.Video: //populate track streamIndex.MaxHeight = track.Height; streamIndex.MaxWidth = track.Width; streamIndex.DisplayWidth = track.DisplayWidth; streamIndex.DisplayHeight = track.DisplayHeight; //populate quality level qualityLevel.Index = 0; qualityLevel.Bitrate = track.Bitrate; qualityLevel.FourCC = track.FourCodecCode; qualityLevel.MaxHeight = track.Height; qualityLevel.MaxWidth = track.Width; qualityLevel.CodecPrivateData = track.CodecPrivateData; break; case ManifestTrackType.Audio: streamIndex.FourCC = track.FourCodecCode; streamIndex.Index = 0; //populate quality level qualityLevel.Bitrate = track.Bitrate; qualityLevel.SamplingRate = track.SampleRate; qualityLevel.Channels = track.ChannelCount; qualityLevel.BitsPerSample = track.SampleSize; qualityLevel.PacketSize = track.PacketSize; qualityLevel.AudioTag = track.AudioTag; qualityLevel.CodecPrivateData = track.CodecPrivateData; qualityLevel.FourCC = track.FourCodecCode; break; case ManifestTrackType.Text: int i = 0; i++; break; } if (track.Fragments != null) { streamIndex.c.AddRange(GenerateClientManifestChunks(track, moov)); streamIndex.Chunks = (uint)streamIndex.c.Count; } return streamIndex; }
private static IEnumerable<SmoothStreamingMediaStreamIndexC> GenerateClientManifestChunks(ManifestTrack track, Box moov) { // Pull entries that point to different moof boxes, duplicate boxes offsets cause playback issues. var entries = track.Fragments.TrackFragmentRandomAccessEntries .GroupBy(entry => entry.MoofOffset) .Select(entry => entry.First()) .ToList(); ulong entriesDuration = 0; //create chunks for (int i = 0; i < entries.Count; i++) { var c = new SmoothStreamingMediaStreamIndexC(); c.n = i; if (i != entries.Count - 1) { // The duration is the difference between this entry and the next's time c.d = entries.ElementAt(i + 1).Time - entries.ElementAt(i).Time; entriesDuration += c.d; } else { var mvhd = moov.InnerBoxes.SingleOrDefault(b => b.Type == BoxType.Mvhd) as MovieHeaderFullBox; // Final duration is what we have left, the track duration is in the timescale of the presentation, // so we have to convert to the timescale of the actual media in the track c.d = ConvertTimeToTimescale(track.Duration, mvhd.TimeScale, track.TimeScale) - entriesDuration; } yield return c; } }
public static SmoothStreamingMediaStreamIndex GenerateClientManifestStreamIndex(ManifestTrack track, Box moov) { var streamIndex = new SmoothStreamingMediaStreamIndex(); streamIndex.Type = track.Type.ToString().ToLower(); var qualityLevel = new SmoothStreamingMediaStreamIndexQualityLevel(); streamIndex.QualityLevel.Add(qualityLevel); streamIndex.TimeScale = track.TimeScale; var mdhd = moov.InnerBoxes.Single(box => box.Type == BoxType.Trak && (box.InnerBoxes.Single(tkhd => tkhd.Type == BoxType.Tkhd) as TrackHeaderFullBox).TrackId == track.Id) .InnerBoxes.Single(box => box.Type == BoxType.Mdia) .InnerBoxes.Single(box => box.Type == BoxType.Mdhd) as MediaHeaderFullBox; streamIndex.Language = mdhd.Language; switch (track.Type) { case ManifestTrackType.Video: //populate track streamIndex.MaxHeight = track.Height; streamIndex.MaxWidth = track.Width; streamIndex.DisplayWidth = track.DisplayWidth; streamIndex.DisplayHeight = track.DisplayHeight; //populate quality level qualityLevel.Index = 0; qualityLevel.Bitrate = track.Bitrate; qualityLevel.FourCC = track.FourCodecCode; qualityLevel.MaxHeight = track.Height; qualityLevel.MaxWidth = track.Width; qualityLevel.CodecPrivateData = track.CodecPrivateData; break; case ManifestTrackType.Audio: streamIndex.FourCC = track.FourCodecCode; streamIndex.Index = 0; //populate quality level qualityLevel.Bitrate = track.Bitrate; qualityLevel.SamplingRate = track.SampleRate; qualityLevel.Channels = track.ChannelCount; qualityLevel.BitsPerSample = track.SampleSize; qualityLevel.PacketSize = track.PacketSize; qualityLevel.AudioTag = track.AudioTag; qualityLevel.CodecPrivateData = track.CodecPrivateData; qualityLevel.FourCC = track.FourCodecCode; break; case ManifestTrackType.Text: int i = 0; i++; break; } if (track.Fragments != null) { streamIndex.c.AddRange(GenerateClientManifestChunks(track, moov)); streamIndex.Chunks = (uint)streamIndex.c.Count; } return(streamIndex); }
private static IEnumerable <SmoothStreamingMediaStreamIndexC> GenerateClientManifestChunks(ManifestTrack track, Box moov) { // Pull entries that point to different moof boxes, duplicate boxes offsets cause playback issues. var entries = track.Fragments.TrackFragmentRandomAccessEntries .GroupBy(entry => entry.MoofOffset) .Select(entry => entry.First()) .ToList(); ulong entriesDuration = 0; //create chunks for (int i = 0; i < entries.Count; i++) { var c = new SmoothStreamingMediaStreamIndexC(); c.n = i; if (i != entries.Count - 1) { // The duration is the difference between this entry and the next's time c.d = entries.ElementAt(i + 1).Time - entries.ElementAt(i).Time; entriesDuration += c.d; } else { var mvhd = moov.InnerBoxes.SingleOrDefault(b => b.Type == BoxType.Mvhd) as MovieHeaderFullBox; // Final duration is what we have left, the track duration is in the timescale of the presentation, // so we have to convert to the timescale of the actual media in the track c.d = ConvertTimeToTimescale(track.Duration, mvhd.TimeScale, track.TimeScale) - entriesDuration; } yield return(c); } }
public static Stream GenerateClientManifestStream(IEnumerable <Box> Boxes) { var ManifestTracks = ManifestTrack.InitializeTrackRegistry(Boxes); return(GenerateClientManifestStream(Boxes, ManifestTracks)); }
public static List<ManifestTrack> InitializeTrackRegistry(IEnumerable<Box> boxes) { List<ManifestTrack> trackManifests = new List<ManifestTrack>(); var moov = boxes.SingleOrDefault(b => b.Type == BoxType.Moov); var mfra = boxes.SingleOrDefault(b => b.Type == BoxType.Mfra); foreach (var trak in moov.InnerBoxes.Where(b => b.Type == BoxType.Trak)) { var tkhd = trak.InnerBoxes.SingleOrDefault(b => b.Type == BoxType.Tkhd) as TrackHeaderFullBox; var mdia = trak.InnerBoxes.SingleOrDefault(b => b.Type == BoxType.Mdia); var mdhd = mdia.InnerBoxes.SingleOrDefault(b => b.Type == BoxType.Mdhd) as MediaHeaderFullBox; var tfra = mfra == null ? null : mfra.InnerBoxes.Where(b => b.Type == BoxType.Tfra).Cast<TrackFragmentRandomAccessFullBox>() .SingleOrDefault(b => b.TrackId == tkhd.TrackId); var stsd = mdia.InnerBoxes.SingleOrDefault(b => b.Type == BoxType.Minf) .InnerBoxes.SingleOrDefault(b => b.Type == BoxType.Stbl) .InnerBoxes.SingleOrDefault(b => b.Type == BoxType.Stsd) as SampleDescriptionFullBox; var handlerType = GetTrackHandlerType( mdia.InnerBoxes.SingleOrDefault(box => box.Type == BoxType.Hdlr) as HandlerReferenceFullBox); var manTrack = new ManifestTrack(handlerType, tkhd, mdhd, tfra, stsd); if (manTrack.IsSupported) { trackManifests.Add(manTrack); } } return trackManifests; }