예제 #1
0
        /// <summary>
        /// Returns a string representing the structure in little-endian
        /// hexadecimal format.
        /// </summary>
        /// <remarks>
        /// The string generated here is intended to be passed as
        /// CodecPrivateData for Silverlight 2's MediaStreamSource
        /// </remarks>
        /// <returns>
        /// A string representing the structure in little-endia hexadecimal
        /// format.
        /// </returns>
        public string ToHexString()
        {
            string s = WaveFormatExtensible.ToHexString();

            char[] mpeglayer3Data = new char[6 * 4];
            BitTools.ToHexHelper(4, this.Id, 0, mpeglayer3Data);
            BitTools.ToHexHelper(8, this.BitratePaddingMode, 4, mpeglayer3Data);
            BitTools.ToHexHelper(4, this.BlockSize, 12, mpeglayer3Data);
            BitTools.ToHexHelper(4, this.FramesPerBlock, 16, mpeglayer3Data);
            BitTools.ToHexHelper(4, this.CodecDelay, 20, mpeglayer3Data);
            return(s + new string(mpeglayer3Data));
        }
예제 #2
0
 /// <summary>
 /// Returns a string representing all of the fields in the object.
 /// </summary>
 /// <returns>
 /// A string representing all of the fields in the object.
 /// </returns>
 public override string ToString()
 {
     return("MPEGLAYER3 "
            + WaveFormatExtensible.ToString()
            + string.Format(
                CultureInfo.InvariantCulture,
                "ID: {0}, Flags: {1}, BlockSize: {2}, FramesPerBlock {3}, CodecDelay {4}",
                this.Id,
                this.BitratePaddingMode,
                this.BlockSize,
                this.FramesPerBlock,
                this.CodecDelay));
 }
예제 #3
0
        protected override void OpenMediaAsync()
        {
            var flvFile = new FlvFile(this.mediaStream);

            this.audioSamples = flvFile.FlvFileBody.Tags.Where(tag => tag.TagType == TagType.Audio).ToList();
            this.videoSamples = flvFile.FlvFileBody.Tags.Where(tag => tag.TagType == TagType.Video).ToList();

            //Audio
            WaveFormatExtensible wfx = new WaveFormatExtensible();

            wfx.FormatTag             = 0x00FF;
            wfx.Channels              = 2;
            wfx.BlockAlign            = 8;
            wfx.BitsPerSample         = 16;
            wfx.SamplesPerSec         = 44100;
            wfx.AverageBytesPerSecond = wfx.SamplesPerSec * wfx.Channels * wfx.BitsPerSample / wfx.BlockAlign;
            wfx.Size = 0;
            string codecPrivateData = wfx.ToHexString();

            Dictionary <MediaStreamAttributeKeys, string> audioStreamAttributes = new Dictionary <MediaStreamAttributeKeys, string>();

            audioStreamAttributes[MediaStreamAttributeKeys.CodecPrivateData] = codecPrivateData;
            this.audioStreamDescription = new MediaStreamDescription(MediaStreamType.Audio, audioStreamAttributes);

            //Video
            Dictionary <MediaStreamAttributeKeys, string> videoStreamAttributes = new Dictionary <MediaStreamAttributeKeys, string>();

            videoStreamAttributes[MediaStreamAttributeKeys.VideoFourCC] = "H264";
            this.videoStreamDescription = new MediaStreamDescription(MediaStreamType.Video, videoStreamAttributes);

            //Media
            Dictionary <MediaSourceAttributesKeys, string> mediaSourceAttributes = new Dictionary <MediaSourceAttributesKeys, string>();

            mediaSourceAttributes[MediaSourceAttributesKeys.Duration] = audioSamples.Last().Timestamp.ToString(CultureInfo.InvariantCulture);
            mediaSourceAttributes[MediaSourceAttributesKeys.CanSeek]  = true.ToString();

            List <MediaStreamDescription> mediaStreamDescriptions = new List <MediaStreamDescription>();

            mediaStreamDescriptions.Add(this.audioStreamDescription);
            mediaStreamDescriptions.Add(this.videoStreamDescription);

            this.ReportOpenMediaCompleted(mediaSourceAttributes, mediaStreamDescriptions);
        }
        /// <summary>
        /// Callback which handles setting up an MSS once the first MpegFrame after Id3v2 data has been read.
        /// </summary>
        /// <param name="mpegLayer3Frame"> First MpegFrame</param>
        /// <param name="mediaStreamAttributes">Empty dictionary for MediaStreamAttributes</param>
        /// <param name="mediaStreamDescriptions">Empty dictionary for MediaStreamDescriptions</param>
        /// <param name="mediaSourceAttributes">Empty dictionary for MediaSourceAttributes</param>
        private void ReadPastId3v2TagsCallback(
            MpegFrame mpegLayer3Frame,
            Dictionary<MediaStreamAttributeKeys, string> mediaStreamAttributes,
            List<MediaStreamDescription> mediaStreamDescriptions,
            Dictionary<MediaSourceAttributesKeys, string> mediaSourceAttributes)
        {
            if (mpegLayer3Frame.FrameSize <= 0)
            {
                throw new InvalidOperationException("MpegFrame's FrameSize cannot be negative");
            }

            // Initialize the Mp3 data structures used by the Media pipeline with state from the first frame.
            WaveFormatExtensible wfx = new WaveFormatExtensible();
            this.MpegLayer3WaveFormat = new MpegLayer3WaveFormat();
            this.MpegLayer3WaveFormat.WaveFormatExtensible = wfx;

            this.MpegLayer3WaveFormat.WaveFormatExtensible.FormatTag = 85;
            this.MpegLayer3WaveFormat.WaveFormatExtensible.Channels = (short)((mpegLayer3Frame.Channels == Channel.SingleChannel) ? 1 : 2);
            this.MpegLayer3WaveFormat.WaveFormatExtensible.SamplesPerSec = mpegLayer3Frame.SamplingRate;
            this.MpegLayer3WaveFormat.WaveFormatExtensible.AverageBytesPerSecond = mpegLayer3Frame.Bitrate / 8;
            this.MpegLayer3WaveFormat.WaveFormatExtensible.BlockAlign = 1;
            this.MpegLayer3WaveFormat.WaveFormatExtensible.BitsPerSample = 0;
            this.MpegLayer3WaveFormat.WaveFormatExtensible.ExtraDataSize = 12;

            this.MpegLayer3WaveFormat.Id = 1;
            this.MpegLayer3WaveFormat.BitratePaddingMode = 0;
            this.MpegLayer3WaveFormat.FramesPerBlock = 1;
            this.MpegLayer3WaveFormat.BlockSize = (short)mpegLayer3Frame.FrameSize;
            this.MpegLayer3WaveFormat.CodecDelay = 0;

            mediaStreamAttributes[MediaStreamAttributeKeys.CodecPrivateData] = this.MpegLayer3WaveFormat.ToHexString();
            this.audioStreamDescription = new MediaStreamDescription(MediaStreamType.Audio, mediaStreamAttributes);

            mediaStreamDescriptions.Add(this.audioStreamDescription);

            this.trackDuration = new TimeSpan(0, 0, (int)(this.audioStreamLength / MpegLayer3WaveFormat.WaveFormatExtensible.AverageBytesPerSecond));
            mediaSourceAttributes[MediaSourceAttributesKeys.Duration] = this.trackDuration.Ticks.ToString(CultureInfo.InvariantCulture);
            if (this.audioStream.CanSeek)
            {
                mediaSourceAttributes[MediaSourceAttributesKeys.CanSeek] = "1";
            }
            else
            {
                mediaSourceAttributes[MediaSourceAttributesKeys.CanSeek] = "0";
            }

            // Report that the Mp3MediaStreamSource has finished initializing its internal state and can now
            // pass in Mp3 Samples.
            this.ReportOpenMediaCompleted(mediaSourceAttributes, mediaStreamDescriptions);

            this.currentFrame = mpegLayer3Frame;
            this.currentFrameStartPosition = MpegFrame.FrameHeaderSize;
        }
        public void SetupTests()
        {
            this.wfx = new WaveFormatExtensible();
            this.wfx.FormatTag = 85;
            this.wfx.Channels = 2;
            this.wfx.SamplesPerSec = 8000;
            this.wfx.AverageBytesPerSecond = 500;
            this.wfx.BlockAlign = 1;
            this.wfx.BitsPerSample = 16;
            this.wfx.ExtraDataSize = 12;

            this.mp3wfx = new MpegLayer3WaveFormat();
        }
 public void SetupTests()
 {
     this.wfx = new WaveFormatExtensible();
 }
예제 #7
0
        protected override void OpenMediaAsync()
        {
            // Initialize data structures to pass to the Media pipeline via the MediaStreamSource
            Dictionary<MediaSourceAttributesKeys, string> mediaSourceAttributes = new Dictionary<MediaSourceAttributesKeys, string>();
            Dictionary<MediaStreamAttributeKeys, string> mediaStreamAttributes = new Dictionary<MediaStreamAttributeKeys, string>();
            List<MediaStreamDescription> mediaStreamDescriptions = new List<MediaStreamDescription>();

            // Initialize the Mp3 data structures used by the Media pipeline with state from the first frame.
            WaveFormatExtensible wfx = new WaveFormatExtensible();
            wfx.FormatTag = 1; // PCM
            wfx.Channels = 2;
            wfx.SamplesPerSec = 44100;
            wfx.AverageBytesPerSecond = 44100 * 2 * 2;
            wfx.BlockAlign = 4;
            wfx.BitsPerSample = 16;
            wfx.Size = 0;

            mediaStreamAttributes[MediaStreamAttributeKeys.CodecPrivateData] = wfx.ToHexString();
            this.audioStreamDescription = new MediaStreamDescription(MediaStreamType.Audio, mediaStreamAttributes);

            mediaStreamDescriptions.Add(this.audioStreamDescription);

            // <note>This part is mere copy of Mp3MediaStreamSource:
            // Setting a 0 duration to avoid the math to calcualte the Mp3 file length in minutes and seconds.
            // This was done just to simplify this initial version of the code for other people reading it.
            mediaSourceAttributes[MediaSourceAttributesKeys.Duration] = this.trackDuration.Ticks.ToString (CultureInfo.InvariantCulture);
            mediaSourceAttributes[MediaSourceAttributesKeys.CanSeek] = true.ToString ();
            // </note>

            sample_enumerator = this.DecodeSamples ().GetEnumerator ();
            this.ReportOpenMediaCompleted(mediaSourceAttributes, mediaStreamDescriptions);
        }