예제 #1
0
 /// <summary>
 /// Given a Stream ID for a video stream, return the video MediaType.
 /// </summary>
 /// <param name="id"></param>
 /// <returns></returns>
 public static MediaTypeVideoInfo StreamIdToVideoMediaType(int id)
 {
     byte[] frame = DatabaseUtility.GetFirstFrame(id);
     if (frame == null)
     {
         return(null);
     }
     byte[] compressionData;
     return(VideoMediaTypeFromFrame(frame, out compressionData));
 }
예제 #2
0
 /// <summary>
 /// Given a Stream ID for an audio stream, return the Audio MediaType.
 /// </summary>
 /// <param name="id"></param>
 /// <returns></returns>
 public static MediaTypeWaveFormatEx StreamIdToAudioMediaType(int id)
 {
     byte[] frame = DatabaseUtility.GetFirstFrame(id);
     if (frame == null)
     {
         return(null);
     }
     byte[] compressionData;
     return(AudioMediaTypeFromFrame(frame, out compressionData));
 }
예제 #3
0
        /// <summary>
        /// Return a ProfileData instance containing the MediaType and codec private data as
        /// determined by the first frame of the stream referenced by the streamID.
        /// Return null if there are no frames, or if stream payload is not audio or video.
        /// </summary>
        /// <param name="streamID"></param>
        /// <returns></returns>
        public static ProfileData StreamIdToProfileData(int streamID, PayloadType payload)
        {
            byte[] frame = DatabaseUtility.GetFirstFrame(streamID);
            if (frame == null)
            {
                return(null);
            }

            if (payload == PayloadType.dynamicAudio)
            {
                return(FramesToProfileData(frame, null));
            }
            else if (payload == PayloadType.dynamicVideo)
            {
                return(FramesToProfileData(null, frame));
            }
            return(null);
        }
예제 #4
0
        /// <summary>
        /// Construct and return a ProfileData instance
        /// containing media type and codec private data for audio and video as
        /// determined using the first audio and video frames referenced by the segment.
        /// Audio sources in the segment other than the first will be ignored.
        /// </summary>
        /// <param name="segment"></param>
        /// <returns></returns>
        public static ProfileData SegmentToProfileData(ArchiveTranscoderJobSegment segment)
        {
            DateTime startDt = DateTime.Parse(segment.StartTime);
            DateTime endDt   = DateTime.Parse(segment.EndTime);

            byte[] aframe = DatabaseUtility.GetFirstFrame(PayloadType.dynamicAudio, segment.AudioDescriptor[0].AudioCname,
                                                          segment.AudioDescriptor[0].AudioName, startDt.Ticks, endDt.Ticks);

            if (Utility.SegmentFlagIsSet(segment, SegmentFlags.SlidesReplaceVideo))
            {
                return(AudioFrameToProfileData(aframe));
            }
            else
            {
                byte[] vframe = DatabaseUtility.GetFirstFrame(PayloadType.dynamicVideo, segment.VideoDescriptor.VideoCname,
                                                              segment.VideoDescriptor.VideoName, startDt.Ticks, endDt.Ticks);
                return(FramesToProfileData(aframe, vframe));
            }
        }