Exemple #1
0
        private MediaStreamSample GetVideoSample2()
        {
            //Utility.Trace(" GetVideoSample2 ");

            MediaStreamSample sample = new MediaStreamSample(this.videoStreamDescription, null, 0, 0, 0, emptyDict);

            if (m_VmediaStreamEnd)
            {
                Utility.Trace(" GetVideoSample2 NULL stream has been sent!");
                return(sample);
            }
            if (vIdx > (fNum - 1))
            {
                // free current video block
                m_curVideoBlk = null;

                Utility.Trace(" GetVideoSample,m_vbuffer WaitForWorkItem ");
                m_vbuffer.WaitForWorkItem();
                Utility.Trace(" GetVideoSample,m_vbuffer Got an Item ");

                m_curVideoBlk = m_vbuffer.Dequeue();

                if (m_curVideoBlk != null)
                {
                    vIdx = 0;
                    fNum = (int)m_curVideoBlk.VideoFrameNum;
                }
                else
                {
                    m_VmediaStreamEnd = true;
                    Utility.Trace(" GetVideoSample2 NULL stream has been sent!");
                    return(sample);
                }
            }


            MemoryStream vStream = new MemoryStream();
            int          len     = (int)m_curVideoBlk.vFrames[vIdx].Length;

            vStream.Write(m_curVideoBlk.vFrames[vIdx]._data, 0, len);

            MediaStreamSample mediaStreamSample = new MediaStreamSample
                                                  (
                this.videoStreamDescription,
                vStream,
                0,
                len,
                m_curVideoBlk.vFrames[vIdx].RelativeTime,   //// 以100 纳秒为单位
                emptyDict
                                                  );

            vIdx++;

            return(mediaStreamSample);
        }
Exemple #2
0
        public MSF.VideoBlock ParseVideoBlock(int offset, Stream _stream)
        {
            _stream.Seek(offset, SeekOrigin.Begin);
            byte[]         tmp = new byte[4];
            MSF.VideoBlock vb  = new MSF.VideoBlock();

            _stream.Read(tmp, 0, 2);
            vb.VideoBlockHeaderLength = (uint)BitTools.MaskBits(tmp, 0, 12);

            //计算有多少个视频帧, 每个视频参数集 5byte
            vb.VideoFrameNum = vb.VideoBlockHeaderLength / 5;

            uint addr = (uint)offset + vb.VideoBlockHeaderLength + CRC_Size;

            for (int i = 0; i < vb.VideoFrameNum; i++)
            {
                MSF.VideoBlock.VideoFrame f = new MSF.VideoBlock.VideoFrame();
                //本段视频的开始地址
                f.RelativeStartAddr = addr;

                // 视频段长度
                _stream.Read(tmp, 0, 2);
                f.Length = (uint)BitTools.MaskBits(tmp, 0, 16);

                // 帧类型,I帧 or P帧
                _stream.Read(tmp, 0, 1);
                f.FrameMode = (uint)BitTools.MaskBits(tmp, 0, 3);
                if (vb.FirstIFrameIdx == -1 && f.FrameMode == 0)
                {
                    vb.FirstIFrameIdx = i;
                }
                // 相对播放时间
                if (BitTools.MaskBits(tmp, 7, 1) == 1)  //相对时间提示
                {
                    _stream.Read(tmp, 0, 2);
                    f.RelativeTime = BitTools.MaskBits(tmp, 0, 16) * timeUint;
                }

                // 本段视频流
                //f._vs = BitTools.CopyToNewStream(_stream, (long)f.RelativeStartAddr, (int)f.Length);
                BitTools.CopyFromStreamToBytesArray(_stream, (long)f.RelativeStartAddr, (int)f.Length, ref f._data);

                vb.vFrames.Add(f);
                //下一段视频的开始地址
                addr += f.Length;
            }

            ParseNALHeader(vb);

            return(vb);
        }
        protected override void OpenMediaAsync()
        {
            //WaveFormatEx
            HeAacWaveFormat      aacf = new HeAacWaveFormat();
            WaveFormatExtensible wfx  = new WaveFormatExtensible();

            aacf.WaveFormatExtensible = wfx;

            aacf.WaveFormatExtensible.FormatTag             = 0x1610; //0xFF;//0x1610;
            aacf.WaveFormatExtensible.Channels              = 2;      //
            aacf.WaveFormatExtensible.BlockAlign            = 1;
            aacf.WaveFormatExtensible.BitsPerSample         = 0;      //16; //unkonw set to 0
            aacf.WaveFormatExtensible.SamplesPerSec         = 24000;  //  from 8000 to 96000 Hz
            aacf.WaveFormatExtensible.AverageBytesPerSecond = 0;      //wfx.SamplesPerSec * wfx.Channels * wfx.BitsPerSample / wfx.BlockAlign;
            aacf.WaveFormatExtensible.Size = 12;

            // Extra 3 words in WAVEFORMATEX
            // refer to http://msdn.microsoft.com/en-us/library/windows/desktop/dd757806(v=vs.85).aspx
            aacf.wPayloadType = 0x0; //Audio Data Transport Stream (ADTS). The stream contains an adts_sequence, as defined by MPEG-2.
            aacf.wAudioProfileLevelIndication = 0xFE;
            aacf.wStructType = 0;

            string codecPrivateData = aacf.ToHexString();

            Dictionary <MediaStreamAttributeKeys, string> audioStreamAttributes = new Dictionary <MediaStreamAttributeKeys, string>();

            audioStreamAttributes[MediaStreamAttributeKeys.CodecPrivateData] = codecPrivateData;
            audioStreamDescription = new MediaStreamDescription(MediaStreamType.Audio, audioStreamAttributes);


            m_vbuffer.WaitForWorkItem();

            m_curVideoBlk = m_vbuffer.Dequeue().CommandParameter as MSF.VideoBlock;

            if (m_curVideoBlk == null)
            {
                return;
            }
            vIdx = 0;
            fNum = (int)m_curVideoBlk.VideoFrameNum;


            H264NalFormat h264f = new H264NalFormat();

            h264f.sps = m_curVideoBlk.FirstIFrameInfo.sps;
            h264f.pps = m_curVideoBlk.FirstIFrameInfo.pps;
            string s = h264f.ToHexString();

            //Video
            Dictionary <MediaStreamAttributeKeys, string> videoStreamAttributes = new Dictionary <MediaStreamAttributeKeys, string>();

            videoStreamAttributes[MediaStreamAttributeKeys.VideoFourCC]      = "H264";
            videoStreamAttributes[MediaStreamAttributeKeys.Height]           = "240";
            videoStreamAttributes[MediaStreamAttributeKeys.Width]            = "320";
            videoStreamAttributes[MediaStreamAttributeKeys.CodecPrivateData] = s;//"0000016742E00D96520283F40500000168CE388000";
            videoStreamDescription = new MediaStreamDescription(MediaStreamType.Video, videoStreamAttributes);

            //Media
            Dictionary <MediaSourceAttributesKeys, string> mediaSourceAttributes = new Dictionary <MediaSourceAttributesKeys, string>();

            mediaSourceAttributes[MediaSourceAttributesKeys.Duration] = TimeSpan.FromSeconds(6).Ticks.ToString(CultureInfo.InvariantCulture);
            mediaSourceAttributes[MediaSourceAttributesKeys.CanSeek]  = "0";

            List <MediaStreamDescription> mediaStreamDescriptions = new List <MediaStreamDescription>();

#if !DEBUG
            // Emulator does not support HE-AAC
            mediaStreamDescriptions.Add(audioStreamDescription);
#endif

            mediaStreamDescriptions.Add(videoStreamDescription);

            this.AudioBufferLength = 500;
            this.ReportOpenMediaCompleted(mediaSourceAttributes, mediaStreamDescriptions);
        }
        private MediaStreamSample GetVideoSample2()
        {
            //Utility.Trace(" GetVideoSample2 ");

            MediaStreamSample sample = new MediaStreamSample(this.videoStreamDescription, null, 0, 0, 0, emptyDict);
            if (m_VmediaStreamEnd)
            {
                Utility.Trace(" GetVideoSample2 NULL stream has been sent!");
                return sample;
            }
            if (vIdx > (fNum - 1))
            {
                // free current video block and request for next video block
                m_curVideoBlk = null;

                Utility.Trace(" GetVideoSample,m_vbuffer WaitForWorkItem ");
                m_vbuffer.WaitForWorkItem();
                Utility.Trace(" GetVideoSample,m_vbuffer Got an Item ");

                m_curVideoBlk = (MSF.VideoBlock)m_vbuffer.Dequeue().CommandParameter;

                if (m_curVideoBlk != null)
                {
                    Utility.Trace(String.Format(" GetVideoSample,m_vbuffer.count = {0} ", m_vbuffer.Count()));

                    if (m_vbuffer.Count() == 0)
                        m_vBufferFullEvent.Reset();

                    vIdx = 0;
                    fNum = (int)m_curVideoBlk.VideoFrameNum;
                }
                else
                {
                    m_VmediaStreamEnd = true;
                    Utility.Trace(" GetVideoSample2 NULL stream has been sent!");
                    return sample;
                }

            }

            MemoryStream vStream = new MemoryStream();
            int len = (int)m_curVideoBlk.vFrames[vIdx].Length;
            vStream.Write(m_curVideoBlk.vFrames[vIdx]._data, 0, len);

            MediaStreamSample mediaStreamSample = new MediaStreamSample
                (
                this.videoStreamDescription,
                vStream,
                0,
                len,
                m_curVideoBlk.vFrames[vIdx].RelativeTime,   //// 以100 纳秒为单位
                emptyDict
                );
            vIdx++;

            return mediaStreamSample;
        }
        protected override void OpenMediaAsync()
        {
            //WaveFormatEx
            HeAacWaveFormat aacf = new HeAacWaveFormat();
            WaveFormatExtensible wfx = new WaveFormatExtensible();
            aacf.WaveFormatExtensible = wfx;

            aacf.WaveFormatExtensible.FormatTag = 0x1610; //0xFF;//0x1610;
            aacf.WaveFormatExtensible.Channels = 2; //
            aacf.WaveFormatExtensible.BlockAlign = 1;
            aacf.WaveFormatExtensible.BitsPerSample = 0;//16; //unkonw set to 0
            aacf.WaveFormatExtensible.SamplesPerSec = 24000; //  from 8000 to 96000 Hz
            aacf.WaveFormatExtensible.AverageBytesPerSecond = 0;//wfx.SamplesPerSec * wfx.Channels * wfx.BitsPerSample / wfx.BlockAlign;
            aacf.WaveFormatExtensible.Size = 12;

            // Extra 3 words in WAVEFORMATEX
            // refer to http://msdn.microsoft.com/en-us/library/windows/desktop/dd757806(v=vs.85).aspx
            aacf.wPayloadType = 0x0; //Audio Data Transport Stream (ADTS). The stream contains an adts_sequence, as defined by MPEG-2.
            aacf.wAudioProfileLevelIndication = 0xFE;
            aacf.wStructType = 0;

            string codecPrivateData = aacf.ToHexString();

            Dictionary<MediaStreamAttributeKeys, string> audioStreamAttributes = new Dictionary<MediaStreamAttributeKeys, string>();
            audioStreamAttributes[MediaStreamAttributeKeys.CodecPrivateData] = codecPrivateData;
            audioStreamDescription = new MediaStreamDescription(MediaStreamType.Audio, audioStreamAttributes);

            m_vbuffer.WaitForWorkItem();

            m_curVideoBlk = m_vbuffer.Dequeue().CommandParameter as MSF.VideoBlock;

            if (m_curVideoBlk == null)
                return;
            vIdx = 0;
            fNum = (int)m_curVideoBlk.VideoFrameNum;

            H264NalFormat h264f = new H264NalFormat();
            h264f.sps = m_curVideoBlk.FirstIFrameInfo.sps;
            h264f.pps = m_curVideoBlk.FirstIFrameInfo.pps;
            string s = h264f.ToHexString();

            //Video
            Dictionary<MediaStreamAttributeKeys, string> videoStreamAttributes = new Dictionary<MediaStreamAttributeKeys, string>();
            videoStreamAttributes[MediaStreamAttributeKeys.VideoFourCC] = "H264";
            videoStreamAttributes[MediaStreamAttributeKeys.Height] = "240";
            videoStreamAttributes[MediaStreamAttributeKeys.Width] = "320";
            videoStreamAttributes[MediaStreamAttributeKeys.CodecPrivateData] = s;//"0000016742E00D96520283F40500000168CE388000";
            videoStreamDescription = new MediaStreamDescription(MediaStreamType.Video, videoStreamAttributes);

            //Media
            Dictionary<MediaSourceAttributesKeys, string> mediaSourceAttributes = new Dictionary<MediaSourceAttributesKeys, string>();
            mediaSourceAttributes[MediaSourceAttributesKeys.Duration] = TimeSpan.FromSeconds(6).Ticks.ToString(CultureInfo.InvariantCulture);
            mediaSourceAttributes[MediaSourceAttributesKeys.CanSeek] = "0";

            List<MediaStreamDescription> mediaStreamDescriptions = new List<MediaStreamDescription>();

            #if !DEBUG
            // Emulator does not support HE-AAC
               mediaStreamDescriptions.Add(audioStreamDescription);
            #endif

            mediaStreamDescriptions.Add(videoStreamDescription);

            this.AudioBufferLength = 500;
            this.ReportOpenMediaCompleted(mediaSourceAttributes, mediaStreamDescriptions);
        }
Exemple #6
0
        public MSF.VideoBlock ParseVideoBlock(int offset, Stream _stream)
        {
            _stream.Seek(offset, SeekOrigin.Begin);
            byte[] tmp = new byte[4];
            MSF.VideoBlock vb = new MSF.VideoBlock();

            _stream.Read(tmp, 0, 2);
            vb.VideoBlockHeaderLength = (uint)BitTools.MaskBits(tmp, 0, 12);

            //计算有多少个视频帧, 每个视频参数集 5byte
            vb.VideoFrameNum = vb.VideoBlockHeaderLength / 5;

            uint addr = (uint)offset + vb.VideoBlockHeaderLength + CRC_Size;
            for (int i = 0; i < vb.VideoFrameNum; i++)
            {
                MSF.VideoBlock.VideoFrame f = new MSF.VideoBlock.VideoFrame();
                //本段视频的开始地址
                f.RelativeStartAddr = addr;

                // 视频段长度
                _stream.Read(tmp, 0, 2);
                f.Length = (uint)BitTools.MaskBits(tmp, 0, 16);

                // 帧类型,I帧 or P帧
                _stream.Read(tmp, 0, 1);
                f.FrameMode = (uint)BitTools.MaskBits(tmp, 0, 3);
                if (vb.FirstIFrameIdx == -1 && f.FrameMode ==0)
                {
                    vb.FirstIFrameIdx = i;
                }
                // 相对播放时间
                if (BitTools.MaskBits(tmp, 7, 1) == 1)  //相对时间提示
                {
                    _stream.Read(tmp, 0, 2);
                    f.RelativeTime = BitTools.MaskBits(tmp, 0, 16) * timeUint;
                }

                // 本段视频流
                //f._vs = BitTools.CopyToNewStream(_stream, (long)f.RelativeStartAddr, (int)f.Length);
                BitTools.CopyFromStreamToBytesArray(_stream, (long)f.RelativeStartAddr, (int)f.Length, ref f._data);

                vb.vFrames.Add(f);
                //下一段视频的开始地址
                addr += f.Length;
            }

            ParseNALHeader(vb);

            return vb;
        }
Exemple #7
0
        public bool ParseNALHeader(MSF.VideoBlock vb)
        {
            // Need to get the SPS and PPS of the stream, the first frame must be I frame, other frames cannot be played.
            if (vb.FirstIFrameIdx == -1)
            {
                return(false);
            }

            //有I帧
            byte[]        tmp          = new byte[32];
            byte[]        NALStartCode = new byte[] { 0x00, 0x00, 0x01 };
            List <byte[]> nals         = new List <byte[]>();


            Stream s = new MemoryStream(); //vb.vFrames[vb.FirstIFrameIdx]._vs;

            s.Write(vb.vFrames[vb.FirstIFrameIdx]._data, 0,
                    (int)vb.vFrames[vb.FirstIFrameIdx].Length);

            s.Seek(0, SeekOrigin.Begin);
            s.Read(tmp, 0, 3);

            if (BitTools.FindBytePattern(tmp, NALStartCode) != 0)
            {
                return(false);
            }

            while (true)
            {
                long pos = s.Position;
                s.Read(tmp, 0, 32);
                int nextNALidx = BitTools.FindBytePattern(tmp, NALStartCode);

                if (nextNALidx == -1)
                {
                    break;
                }
                byte[] nal = new byte[nextNALidx];
                BitTools.CopyToBytesArray(tmp, nal, nextNALidx);
                nals.Add(nal);

                // rollback pos
                pos += nextNALidx + 3; //跳过下一个startcode
                s.Seek(pos, SeekOrigin.Begin);
            }
            s.Close();

            vb.FirstIFrameInfo = new MSF.VideoBlock.NAL_SPS_PPS_SEI();

            foreach (var v in nals)
            {
                switch (v[0])
                {
                case 0x67:      //SPS http://www.eefocus.com/czzheng/blog/11-09/230262_65ff0.html
                    vb.FirstIFrameInfo.sps = new byte[v.Length - 1];
                    BitTools.CopyToBytesArray(v, 1, vb.FirstIFrameInfo.sps, v.Length - 1);
                    break;

                case 0x68:      //PPS
                    vb.FirstIFrameInfo.pps = new byte[v.Length - 1];
                    BitTools.CopyToBytesArray(v, 1, vb.FirstIFrameInfo.pps, v.Length - 1);
                    break;

                case 0x06:      // SEI
                    vb.FirstIFrameInfo.sei = new byte[v.Length - 1];
                    BitTools.CopyToBytesArray(v, 1, vb.FirstIFrameInfo.sei, v.Length - 1);
                    break;

                default:
                    break;
                }
            }
            return(true);
        }