Exemple #1
0
 private void run()
 {
     while (!_disposeEvent.WaitOne(0))
     {
         if (_buffers.Count > 0)
         {
             if (_buffers.Count > 1)
             {
                 Console.WriteLine("count: " + _buffers.Count);
             }
             var           buffers    = _buffers.Dequeue();
             byte[]        packetData = getFrameBuf(buffers);
             int           nalType    = packetData[_startCode.Length] & 0x1F;
             CCTVFrameType streamType = CCTVFrameType.StreamFrame;
             if (nalType != 0x05) // KEY FRAME
             {
                 streamType = CCTVFrameType.StreamKeyFrame;
             }
             onStream(packetData, streamType, DateTime.Now);
             onFrame(packetData, streamType);
         }
         else
         {
             Thread.Sleep(1);
         }
     }
 }
Exemple #2
0
        protected void onStream(byte[] stream, CCTVFrameType type, DateTime time)
        {
            var handler = StreamEvent;

            if (handler != null)
            {
                handler(new VideoStreamPacket(stream, type, time));
            }
        }
        private static VideoStreamPacket Decode(MemoryStream ms)
        {
            int length = PacketBase.ReadInt(ms);

            byte[]        buffer = PacketBase.ReadByteArray(ms, length);
            CCTVFrameType type   = (CCTVFrameType)PacketBase.ReadInt(ms);
            DateTime      time   = PacketBase.ReadTime(ms);

            return(new VideoStreamPacket(buffer, type, time));
        }
Exemple #4
0
        protected void onFrame(byte[] stream, CCTVFrameType frameType)
        {
            var handler = RtpFrameEvent;

            if (handler != null)
            {
                var frame = StreamToFrame.GetRtpFrame(stream);
                handler(frame, frameType);
            }
        }
        private void decodeE0(byte[] buffer, int index) //视频流
        {
            int length = readUshort(buffer, 4);         //PES packet length:16 位字段,指出了PES 分组中跟在该字段后的字节数目。值为0 表示PES 分组长度要么没有规定要么没有限制。这种情况只允许出现在有效负载包含来源于传输流分组中某个视频基本流的字节的PES 分组中。

            if (length != buffer.Length - 6)
            {
                onStreamBuffer();
                return;
            }
            int start = 8 + buffer[8];

            index = start + 1;
            int startcodeLength = getStreamStartCode(buffer, index);

            if (startcodeLength == 4 || startcodeLength == 3)
            {
                int nalu = buffer[index + startcodeLength];
                int nal  = nalu & 0x1F;
                if (nal == 7)//对应序列参数集SPS
                {
                    tryChanged(ref _sps, subBytes(buffer, index));
                    return;        // getStreamBuffer();
                }
                else if (nal == 8) //8对应图像参数集PPS
                {
                    tryChanged(ref _pps, subBytes(buffer, index));
                    return;        // getStreamBuffer();
                }
                else if (nal == 5) //5对应IDR图像中的片(I帧)
                {
                    _frameType = CCTVFrameType.StreamKeyFrame;
                }
                else
                {
                    _frameType = CCTVFrameType.StreamFrame;
                }
            }
            setStreamBuffer(buffer, index);
            return;
        }
Exemple #6
0
        /// <summary>
        /// 实时预览 Callback Function
        /// </summary>
        /// <param name="lRealHandle">当前的预览句柄 </param>
        /// <param name="dwDataType">数据类型 1 系统头数据 ,2 流数据(包括复合流或音视频分开的视频流数据) 3 音频数据 </param>
        /// <param name="pBuffer">存放数据的缓冲区指针</param>
        /// <param name="dwBufSize">缓冲区大小 </param>
        /// <param name="pUser">用户数据</param>
        private void onRealData(int lRealHandle, uint dwDataType, IntPtr pBuffer, uint dwBufSize, IntPtr pUser)
        {
            byte[] buffer = DataConverter.ToByteArray(pBuffer, (int)dwBufSize);
            if (dwDataType == NetDvrDll32.NET_DVR_SYSHEAD)
            {
                onHeader(new HikHeaderPacket(buffer));
            }
            else if (dwDataType == NetDvrDll32.NET_DVR_STREAMDATA)
            {
                //_hikToStand.UpdateStandardStream(buffer);
                var infos = _hikStream.Update(DateTime.Now, buffer);
                foreach (var packets in infos)
                {
                    if (packets != null && packets.Length > 0)
                    {
                        int  length = packets.Sum(_ => _.Buffer.Length);
                        bool isKey  = _keyDetector.Update(packets.First().Time, length);

                        CCTVFrameType type = CCTVFrameType.StreamFrame;
                        if (_keyDetector.Update(packets.First().Time, length))
                        {
                            type = CCTVFrameType.StreamKeyFrame;
                        }

                        onStream(packets.First().Buffer, type, packets.First().Time); //合并视频流的第一个packet
                        for (int i = 1; i < packets.Length; i++)                      //合并视频流的其他packet
                        {
                            onStream(packets[i].Buffer, CCTVFrameType.StreamFrame, packets[i].Time);
                        }
                    }
                }
            }
            else if (dwDataType == NetDvrDll32.NET_DVR_AUDIOSTREAMDATA)
            {
                onStream(buffer, CCTVFrameType.AudioFrame, DateTime.Now);
            }
        }
 public VideoStreamPacket(byte[] buffer, CCTVFrameType frameType, DateTime time)
 {
     Buffer    = buffer;
     FrameType = frameType;
     Time      = time;
 }
 private void onRtpFrame(RtpFrame frame, CCTVFrameType frameType)
 {
     _rtspServer?.UpdateRtpFrame(frame);
 }