コード例 #1
0
        //private FileStream fs = new FileStream(@"D:\monitorVideoStreambefore.264", FileMode.OpenOrCreate, FileAccess.Write, FileShare.None);
        //private FileStream fs1 = new FileStream(@"D:\monitorVideoStreamAfter.264", FileMode.OpenOrCreate, FileAccess.Write, FileShare.None);
        //private int writeCount = 0;
        //private int writeCount1 = 0;

        /// <summary>
        /// 发送数据
        /// </summary>
        /// <param name="videoData"></param>
        /// <param name="sender"></param>
        public void SendData(MediaData mediaData, string sender)
        {
            if (mediaData.StreamType == StreamType.VIDEO_H264)
            {
                //if (writeCount < 1200)
                //{
                //    fs.Write(mediaData.Data, 0, (int)mediaData.Size);
                //    writeCount++;
                //}

                SendVideoData(mediaData, sender);
            }
            if (mediaData.StreamType == StreamType.AUDIO_G711A || mediaData.StreamType == StreamType.AUDIO_G711U)
            {
                SendAudioData(mediaData, sender);
            }
        }
コード例 #2
0
ファイル: VideoPipe.cs プロジェクト: wgyswqs/esdk_Cgw
        /// <summary>
        /// 开始发送码流数据
        /// </summary>
        /// <param name="str"></param>
        public void SenderData(MediaData mediaData)
        {
            NLogEx.LoggerEx logEx = new NLogEx.LoggerEx(log);
            try
            {
                if (pipedStream == null)
                {
                    return;
                }
                //同步写数据
                byte[] byteArray1 = BitConverter.GetBytes(mediaData.Size);
                byte[] byteArray2 = BitConverter.GetBytes((Int32)mediaData.StreamType);
                byte[] byteArray3 = BitConverter.GetBytes((Int32)mediaData.DataType);
                byte[] byteArray4 = BitConverter.GetBytes((Int32)mediaData.FrameType);
                byte[] byteArray5 = mediaData.Data;
                //将数据合并起来
                byte[] byteArray = new byte[byteArray1.Length + byteArray2.Length + byteArray3.Length + byteArray4.Length + byteArray5.Length];

                byteArray1.CopyTo(byteArray, 0);
                byteArray2.CopyTo(byteArray, byteArray1.Length);
                byteArray3.CopyTo(byteArray, byteArray1.Length + byteArray2.Length);
                byteArray4.CopyTo(byteArray, byteArray1.Length + byteArray2.Length + byteArray3.Length);
                byteArray5.CopyTo(byteArray, byteArray1.Length + byteArray2.Length + byteArray3.Length + byteArray4.Length);
                //写入管道
                //pipedStream.BeginWrite(byteArray, 0, byteArray.Length, new AsyncCallback(CallbackFun), null);
                pipedStream.Write(byteArray, 0, byteArray.Length);
            }
            catch (Exception ex)
            {
                logEx.Error(ex.ToString());
                if (ex is System.IO.IOException)
                {
                    this.Stop();
                }
            }
        }
コード例 #3
0
ファイル: TiandyVideoMonitor.cs プロジェクト: eSDK/esdk_Cgw
        /// <summary>
        /// 码流回调处理,用于中转
        /// </summary>
        /// <param name="playfd">预览句柄</param>
        /// <param name="datatype">视频流类型</param>
        /// <param name="buf">码流</param>
        /// <param name="size">码流大小</param>
        /// <param name="usr">用户标识数据</param>
        private void TiandyRealDataCallbackFunc(int playfd, int datatype, byte[] buf, uint size, int usr)
        {
            NLogEx.LoggerEx logEx = new NLogEx.LoggerEx(log);

            string cameraNo = null;
            try
            {

                StreamType streamType = StreamType.VIDEO_H264;
                //判断码流类型,因为码流回调访问频频非常高,不单独抽成一个方法,减少方法访问开销
                //对于支持的码流类型,用break退出switch,对于不支持的码流类型直接舍弃,用return返回
                switch (datatype)
                {
                    case (int)TiandyStreamType.STREAM_TYPE_VIDEO_FRAME_I:
                        streamType = StreamType.VIDEO_H264;
                        break;
                    case (int)TiandyStreamType.STREAM_TYPE_VIDEO_FRAME_P:
                        streamType = StreamType.VIDEO_H264;
                        break;
                    case (int)TiandyStreamType.STREAM_TYPE_VIDEO_FRAME_B:
                        streamType = StreamType.VIDEO_H264;
                        break;

                    //音频只接收G711A和G711U,其他舍弃
                    case (int)TiandyStreamType.STREAM_TYPE_AUDIO_G711A:
                        streamType = StreamType.AUDIO_G711A;
                        break;
                    case (int)TiandyStreamType.STREAM_TYPE_AUDIO_G711U:
                        streamType = StreamType.AUDIO_G711U;
                        break;
                    default:
                        //不支持的类型直接舍弃,返回
                        //logEx.Warn("This stream type is not support. Chuck the data.StreamType:{0};Camera no:{1}", datatype, cameraNo);
                        return;
                }

                if (this.handelOperateLock.TryEnterReadLock(CgwConst.ENTER_LOCK_WAIT_TIME))
                {
                    try
                    {
                        if (!this.videoHandleCameraDic.ContainsKey(playfd))
                        {
                            logEx.Error("The video data handle is not found.Handle:{0}", playfd);
                            return;
                        }
                        cameraNo = this.videoHandleCameraDic[playfd];
                        if (string.IsNullOrEmpty(cameraNo))
                        {
                            return;
                        }
                    }
                    finally
                    {
                        this.handelOperateLock.ExitReadLock();
                    }
                }

                if (streamType == StreamType.AUDIO_G711A || streamType == StreamType.AUDIO_G711U)
                {
                    //如果是音频流,需要判断mic的状态,开启时才发送音频流
                    if (this.micOperateLock.TryEnterReadLock(CgwConst.ENTER_LOCK_WAIT_TIME))
                    {
                        try
                        {
                            if (this.cameraMicStatusDic.ContainsKey(cameraNo))
                            {
                                //如果mic为非开启状态,则不发送音频码流,
                                if (!this.cameraMicStatusDic[cameraNo])
                                {
                                    //logEx.Warn("This data is audio,but the mic is off.Chuck the data.Camera no:{0}", cameraNo);
                                    return;
                                }
                            }
                            else
                            {
                                //默认为关闭状态,因此如果cameraMicStatusDic不包含该摄像头,则认为处于关闭状态,舍弃音频码流
                                //logEx.Warn("This data is audio,but the mic is off.Chuck the data.Camera no:{0}", cameraNo);
                                return;
                            }
                        }
                        finally
                        {
                            this.micOperateLock.ExitReadLock();
                        }
                    }

                }

                MediaData mediaData = new MediaData();
                mediaData.Data = buf;
                mediaData.DataType = MediaDataType.FRAME_DATA;
                mediaData.StreamType = streamType;
                mediaData.Size = size;

                //向回调函数转发码流
                this.dataCallBack(cameraNo, mediaData, sender);
            }
            catch (Exception e)
            {
                logEx.Error("Send the media data failed.The camera No is {0}.Execption message:{1}", cameraNo, e.Message);
            }
        }
コード例 #4
0
ファイル: IvsVideoMonitor.cs プロジェクト: eSDK/esdk_Cgw
        /// <summary>
        /// 码流回调函数,
        /// 1、根据预览句柄handle从videoHandleCameraDic中获取摄像头编号,
        /// 2、根据摄像头编号,获取mic状态
        /// 3、判断码流是否是音频,如果是音频,根据mic状态判断是否需要转发
        /// </summary>
        /// <param name="handle">播放句柄</param>
        /// <param name="pRawFrameInfo">实况参数</param>
        /// <param name="pBuf">码流</param>
        /// <param name="uiBufSize">大小</param>
        /// <param name="cameraNo">摄像头编号</param>
        private void IvsRealPlayCallBackRaw(int handle, IvsRawFrameInfo pRawFrameInfo, byte[] pBuf, UInt32 uiBufSize, string pUserData)
        {
            NLogEx.LoggerEx logEx = new NLogEx.LoggerEx(log);
            //logEx.Trace("Enter:IvsRealPlayCallBackRaw(),{0}", pUserData);

            MediaDataSender mediaDataSender = null;
            try
            {
                if (this.handelOperateLock.TryEnterReadLock(CgwConst.ENTER_LOCK_WAIT_TIME))
                {
                    try
                    {
                        if (!this.videoHandleCameraDic.ContainsKey(handle))
                        {
                            logEx.Error("The video data handle is not found.Handle:{0}", handle);
                            return;
                        }
                        mediaDataSender = this.videoHandleCameraDic[handle];
                    }
                    finally
                    {
                        this.handelOperateLock.ExitReadLock();
                    }
                }

                if (mediaDataSender == null)
                {
                    return;
                }

                StreamType streamType = StreamType.VIDEO_H264;
                //判断码流类型,因为码流回调访问频频非常高,不单独抽成一个方法,减少方法访问开销
                //对于支持的码流类型,用break退出switch,对于不支持的码流类型直接舍弃,用return返回
                switch (pRawFrameInfo.StreamType)
                {
                    //对于音频只接收G711A和G711U,其他舍弃
                    case (int)IvsStreamType.PAY_LOAD_TYPE_PCMU:
                        streamType = StreamType.AUDIO_G711U;
                        break;
                    case (int)IvsStreamType.PAY_LOAD_TYPE_PCMA:
                        streamType = StreamType.AUDIO_G711A;
                        break;

                    //只接收H264的视频码流
                    case (int)IvsStreamType.PAY_LOAD_TYPE_H264:
                        //H264的标准视频流,作为视频流处理
                        streamType = StreamType.VIDEO_H264;
                        break;
                    default:
                        //不支持的类型,直接舍弃,返回
                        //logEx.Warn("This stream type is not support. Chuck the data.StreamType:{0};Camera no:{1}",
                        //            Enum.GetName(typeof(IvsStreamType), pRawFrameInfo.StreamType),
                        //            cameraNo);
                        return;
                }

                if (streamType == StreamType.AUDIO_G711A || streamType == StreamType.AUDIO_G711U)
                {
                    //如果是音频流,需要判断mic的状态,开启时才发送音频流
                    if (this.micOperateLock.TryEnterReadLock(CgwConst.ENTER_LOCK_WAIT_TIME))
                    {
                        try
                        {
                            if (this.cameraMicStatusDic.ContainsKey(mediaDataSender.CameraNo))
                            {
                                //如果mic为非开启状态,则不发送音频码流,
                                if (!this.cameraMicStatusDic[mediaDataSender.CameraNo])
                                {
                                    //logEx.Warn("This data is audio,but the mic is off.Chuck the data.Camera no:{0}", mediaDataSender.CameraNo);
                                    return;
                                }
                            }
                            else
                            {
                                //默认为关闭状态,因此如果cameraMicStatusDic不包含该摄像头,则认为处于关闭状态,舍弃音频码流
                                //logEx.Warn("This data is audio,but the mic is off.Chuck the data.Camera no:{0}", mediaDataSender.CameraNo);
                                return;
                            }
                        }
                        finally
                        {
                            this.micOperateLock.ExitReadLock();
                        }
                    }

                }

                MediaData mediaData = new MediaData();
                mediaData.Data = pBuf;
                //IVS 目前为裸码流
                mediaData.DataType = MediaDataType.FRAME_DATA;
                mediaData.StreamType = streamType;
                mediaData.Size = uiBufSize;

                //将Ivs帧类型转换成各融合网关统一的帧类型
                string name = Enum.GetName(typeof(IvsH264NaluType), pRawFrameInfo.FrameType);

                //帧类型判断,偶现IVS返回的帧类型,在融合网关中未记录,返回值为null
                if (string.IsNullOrEmpty(name))
                {
                    logEx.Warn("IvsRealPlayCallBackRaw FrameType Is Not Defined ,FrameType:{0}", pRawFrameInfo.FrameType);
                    mediaData.FrameType = FrameDataType.H264_NALU_TYPE_UNDEFINED;
                }
                else
                {
                    if (Enum.IsDefined(typeof(FrameDataType), name))
                    {
                        FrameDataType frameDataType = (FrameDataType)Enum.Parse(typeof(FrameDataType), name);
                        mediaData.FrameType = frameDataType;
                    }
                    else
                    {
                        mediaData.FrameType = FrameDataType.H264_NALU_TYPE_UNDEFINED;
                    }
                }

                //向回调函数转发码流
                //this.videoDataCallBack(cameraNo, mediaData, this.sender);

                //logEx.Debug("FrameDataCallBackFun.mediaData.DataType={0},FrameType = {1},StreamType = {2},Size = {3}", Enum.GetName(typeof(MediaDataType), mediaData.DataType),
                //         Enum.GetName(typeof(FrameDataType), mediaData.FrameType), Enum.GetName(typeof(StreamType), mediaData.StreamType), mediaData.Size);
                mediaDataSender.SendData(mediaData, this.sender);
            }
            catch (Exception e)
            {
                logEx.Error("Send the media data failed.Execption message:{0}", e.ToString());
            }
        }
コード例 #5
0
ファイル: VideoPipe.cs プロジェクト: eSDK/esdk_Cgw
        /// <summary>
        /// 开始发送码流数据
        /// </summary>
        /// <param name="str"></param>
        public void SenderData(MediaData mediaData)
        {
            NLogEx.LoggerEx logEx = new NLogEx.LoggerEx(log);
            try
            {
                if (pipedStream == null)
                {
                    return;
                }
                //同步写数据
                byte[] byteArray1 = BitConverter.GetBytes(mediaData.Size);
                byte[] byteArray2 = BitConverter.GetBytes((Int32)mediaData.StreamType);
                byte[] byteArray3 = BitConverter.GetBytes((Int32)mediaData.DataType);
                byte[] byteArray4 = BitConverter.GetBytes((Int32)mediaData.FrameType);
                byte[] byteArray5 = mediaData.Data;
                //将数据合并起来
                byte[] byteArray = new byte[byteArray1.Length + byteArray2.Length + byteArray3.Length + byteArray4.Length + byteArray5.Length];

                byteArray1.CopyTo(byteArray, 0);
                byteArray2.CopyTo(byteArray, byteArray1.Length);
                byteArray3.CopyTo(byteArray, byteArray1.Length + byteArray2.Length);
                byteArray4.CopyTo(byteArray, byteArray1.Length + byteArray2.Length + byteArray3.Length);
                byteArray5.CopyTo(byteArray, byteArray1.Length + byteArray2.Length + byteArray3.Length + byteArray4.Length);
                //写入管道
                //pipedStream.BeginWrite(byteArray, 0, byteArray.Length, new AsyncCallback(CallbackFun), null);
                pipedStream.Write(byteArray, 0, byteArray.Length);
            }
            catch (Exception ex)
            {
                logEx.Error(ex.ToString());
                if (ex is System.IO.IOException)
                {
                    this.Stop();
                }
            }
        }
コード例 #6
0
ファイル: MediaDataSender.cs プロジェクト: eSDK/esdk_Cgw
        //private FileStream fs = new FileStream(@"D:\monitorVideoStreambefore.264", FileMode.OpenOrCreate, FileAccess.Write, FileShare.None);
        //private FileStream fs1 = new FileStream(@"D:\monitorVideoStreamAfter.264", FileMode.OpenOrCreate, FileAccess.Write, FileShare.None);
        //private int writeCount = 0;
        //private int writeCount1 = 0;
        /// <summary>
        /// 发送数据
        /// </summary>
        /// <param name="videoData"></param>
        /// <param name="sender"></param>
        public void SendData(MediaData mediaData, string sender)
        {
            if (mediaData.StreamType == StreamType.VIDEO_H264)
            {
                //if (writeCount < 1200)
                //{
                //    fs.Write(mediaData.Data, 0, (int)mediaData.Size);
                //    writeCount++;
                //}

                SendVideoData(mediaData, sender);
            }
            if (mediaData.StreamType == StreamType.AUDIO_G711A || mediaData.StreamType == StreamType.AUDIO_G711U)
            {
                SendAudioData(mediaData, sender);
            }
        }
コード例 #7
0
ファイル: MediaDataSender.cs プロジェクト: eSDK/esdk_Cgw
 /// <summary>
 /// 发送音频码流
 /// </summary>
 /// <param name="videoData"></param>
 /// <param name="sender"></param>
 public void SendAudioData(MediaData videoData, string sender)
 {
     this.DataCallBack(this.CameraNo, videoData, sender);
 }
コード例 #8
0
ファイル: MediaDataSender.cs プロジェクト: eSDK/esdk_Cgw
        /// <summary>
        /// 发送视频码流,
        /// 对于H264的帧数据,如果监控平台回调的码流,SPS PPS IDR是分开发送的,则需要进行拼帧处理,将SPS+PPS+IDR拼接在一起发送
        /// </summary>
        /// <param name="mediaData"></param>
        /// <param name="sender"></param>
        public void SendVideoData(MediaData mediaData, string sender)
        {
            //对于H264的帧数据,如果监控平台回调的码流,SPS PPS IDR是分开发送的,则需要进行拼帧处理,将SPS+PPS+IDR拼接在一起发送
            if (mediaData.DataType == MediaDataType.FRAME_DATA)
            {
                switch (mediaData.FrameType)
                {
                    case FrameDataType.H264_SEI_NALU_TYPE:
                        //SEI的包,直接丢弃
                        return;

                    case FrameDataType.H264_SPS_NALU_TYPE:
                        //SPS的类型,先清空缓存
                        //this.CacheData = new byte[mediaData.Size];
                        //将SPS的数据缓存
                        this.CacheData = mediaData.Data;
                        this.LastCacheFrameType = mediaData.FrameType;

                        //直接return,暂不发送
                        return;

                    case FrameDataType.H264_PPS_NALU_TYPE:
                        //如果缓存最后一帧数据不为SPS或者PPS,说明数据乱序,舍弃。
                        if ((this.LastCacheFrameType != FrameDataType.H264_SPS_NALU_TYPE) && (this.LastCacheFrameType != FrameDataType.H264_PPS_NALU_TYPE))
                        {
                            return;
                        }

                        //将PPS的数据缓存,暂不发送
                        byte[] byteTemp = new byte[this.CacheData.Length + mediaData.Data.Length];
                        this.CacheData.CopyTo(byteTemp, 0);
                        mediaData.Data.CopyTo(byteTemp, this.CacheData.Length);
                        this.CacheData = byteTemp;

                        this.LastCacheFrameType = mediaData.FrameType;

                        //直接return,暂不发送
                        return;

                    case FrameDataType.H264_IDR_NALU_TYPE:
                        //如果缓存是上最后一帧数据不为PPS,说明数据乱序,舍弃
                        if (this.LastCacheFrameType != FrameDataType.H264_PPS_NALU_TYPE)
                        {
                            return;
                        }

                        byteTemp = new byte[this.CacheData.Length + mediaData.Data.Length];
                        this.CacheData.CopyTo(byteTemp, 0);
                        mediaData.Data.CopyTo(byteTemp, this.CacheData.Length);
                        mediaData.Data = byteTemp;
                        mediaData.Size = (uint)mediaData.Data.Length;
                        //break跳出 switch,进行发送
                        break;

                    default:
                        //其他类型的数据,直接发送
                        break;
                }
            }

            //if (writeCount1 < 1200)
            //{
            //    fs1.Write(mediaData.Data, 0, (int)mediaData.Size);
            //    writeCount1++;
            //}

            this.DataCallBack(this.CameraNo, mediaData, sender);
        }
コード例 #9
0
ファイル: MonitorManageService.cs プロジェクト: eSDK/esdk_Cgw
        /// <summary>
        /// 码流回调委托
        /// </summary>
        /// <param name="cameraNo">摄像头编号</param>
        /// <param name="mediaData">码流</param>
        /// <param name="sender">调用者,区别哪个平台的回调</param>
        public void DataCallBackFunc(string cameraNo, MediaData mediaData, string sender)
        {
            NLogEx.LoggerEx logEx = new NLogEx.LoggerEx(log);
            //logEx.Trace("Enter: MonitorManageService.SetDataCallBackFunc.sender ={0},cameraNo ={1}", sender, cameraNo);

            VideoPipe videoPipe = VideoPipeManage.Instance().GetVideoPipe(cameraNo);
            if (videoPipe != null && videoPipe.HaveConnect)
            {
                videoPipe.SenderData(mediaData);
                //logEx.Trace("Enter: Send MediaData.sender ={0},cameraNo ={1},size ={2},data ={3}", sender, cameraNo, mediaData.Size, mediaData.Data);
            }
        }
コード例 #10
0
ファイル: eLTEVideoMonitor.cs プロジェクト: eSDK/esdk_Cgw
        /// <summary>
        /// rtp码流回调处理
        /// </summary>
        /// <param name="pBuf">帧数据字节数组</param>
        /// <param name="pFrameData">帧数据类型</param>
        /// <param name="uiChannel">通道</param>
        /// <param name="uiBufSize">帧数据字节数组长度</param>
        private void FrameDataCallBackFun(IntPtr pBuf, uint uiBufSize, ref ST_FRAME_DATA pFrameData, uint uiChannel)
        {
            NLogEx.LoggerEx logEx = new NLogEx.LoggerEx(log);
            ST_FRAME_DATA frameData = pFrameData;

            MediaDataSender mediaDataSender = null;
            if (this.handelOperateLock.TryEnterReadLock(CgwConst.ENTER_LOCK_WAIT_TIME))
            {
                try
                {
                    if (this.videoChannelDataSenderDic.ContainsKey(uiChannel))
                    {
                        mediaDataSender = this.videoChannelDataSenderDic[uiChannel];
                    }
                }
                finally
                {
                    this.handelOperateLock.ExitReadLock();
                }
            }

            if (mediaDataSender == null)
            {
                logEx.Warn("FrameDataCallBackFun mediaDataSender = NULL");
                return;
            }

            StreamType streamType = StreamType.VIDEO_H264;
            //对于支持的码流类型,用break退出switch,对于不支持的码流类型直接舍弃,用return返回
            switch (frameData.iStreamType)
            {
                //对于音频只接收G711A和G711U,其他舍弃
                case (int)IvsStreamType.PAY_LOAD_TYPE_PCMU:
                    streamType = StreamType.AUDIO_G711U;
                    break;
                case (int)IvsStreamType.PAY_LOAD_TYPE_PCMA:
                    streamType = StreamType.AUDIO_G711A;
                    break;

                //只接收H264的视频码流
                case (int)IvsStreamType.PAY_LOAD_TYPE_H264:
                    //H264的标准视频流,作为视频流处理
                    streamType = StreamType.VIDEO_H264;
                    break;
                default:
                    //不支持的类型,直接舍弃,返回
                    logEx.Warn("FrameDataCallBackFun.iStreamType is not valid");
                    return;
            }

            if (streamType == StreamType.AUDIO_G711A || streamType == StreamType.AUDIO_G711U)
            {
                //如果是音频流,需要判断mic的状态,开启时才发送音频流
                if (this.micOperateLock.TryEnterReadLock(CgwConst.ENTER_LOCK_WAIT_TIME))
                {
                    try
                    {
                        if (this.cameraMicStatusDic.ContainsKey(mediaDataSender.CameraNo))
                        {
                            //如果mic为非开启状态,则不发送音频码流,
                            if (!this.cameraMicStatusDic[mediaDataSender.CameraNo])
                            {
                                // logEx.Warn("This data is audio,but the mic is off.Chuck the data.Camera no:{0}", mediaDataSender.CameraNo);
                                return;
                            }
                        }
                        else
                        {
                            //默认为关闭状态,因此如果cameraMicStatusDic不包含该摄像头,则认为处于关闭状态,舍弃音频码流
                            // logEx.Warn("This data is audio,but the mic is off.Chuck the data.Camera no:{0}", mediaDataSender.CameraNo);
                            return;
                        }
                    }
                    finally
                    {
                        this.micOperateLock.ExitReadLock();
                    }
                }
            }

            try
            {
                MediaData mediaData = new MediaData();

                //获取非托管的数据 
                byte[] datagram = new byte[uiBufSize];
                Marshal.Copy(pBuf, datagram, 0, (int)uiBufSize);

                //视频数据增加头信息
                if (!(streamType == StreamType.AUDIO_G711A || streamType == StreamType.AUDIO_G711U))
                {
                    //头部增加四个四节的开始表实0x000001
                    byte[] newDatagram = new byte[uiBufSize + 4];
                    datagram.CopyTo(newDatagram, 4);
                    newDatagram[3] = 1;
                    mediaData.Data = newDatagram;
                    mediaData.Size = (uint)(uiBufSize + 4);
                }
                else
                {
                    mediaData.Data = datagram;
                    mediaData.Size = (uint)(uiBufSize);
                }
                //裸码流
                mediaData.DataType = MediaDataType.FRAME_DATA;
                mediaData.StreamType = streamType;


                //将帧类型转换成各融合网关统一的帧类型
                string name = Enum.GetName(typeof(IvsH264NaluType), frameData.iFrameDataType);
                if (Enum.IsDefined(typeof(FrameDataType), name))
                {
                    FrameDataType frameDataType = (FrameDataType)Enum.Parse(typeof(FrameDataType), name);
                    mediaData.FrameType = frameDataType;
                }
                else
                {
                    mediaData.FrameType = FrameDataType.H264_NALU_TYPE_UNDEFINED;
                    logEx.Warn("eLTE FrameDataCallBackFun FrameType is Not Defined, FrameType:{0}", frameData.iFrameDataType);
                }

                //logEx.Debug("FrameDataCallBackFun.mediaData.DataType={0},FrameType = {1},StreamType = {2},Size = {3}", Enum.GetName(typeof(MediaDataType), mediaData.DataType),
                //             Enum.GetName(typeof(FrameDataType), mediaData.FrameType), Enum.GetName(typeof(StreamType), mediaData.StreamType), mediaData.Size);
                //向回调函数转发码流
                mediaDataSender.SendData(mediaData, this.sender);
            }
            catch (System.Exception ex)
            {
                logEx.Error("FrameDataCallBackFun failed.Execption message:{0}", ex.Message);
            }
        }
コード例 #11
0
ファイル: VideoMonitorManage.cs プロジェクト: eSDK/esdk_Cgw
        //private FileStream fs = new FileStream(@"D:\IvsVideoStream2.264", FileMode.OpenOrCreate, FileAccess.Write, FileShare.None);
        //private int writeCount = 0;
        /// <summary>
        /// 向下注册的码流回调函数
        /// </summary>
        /// <param name="cameraNo">摄像头编号</param>
        /// <param name="mediaData">码流</param>
        /// <param name="monitorId">监控平台ID</param>
        private void DataCallBackFunc(string cameraNo, MediaData mediaData, string monitorId)
        {
            //摄像头编号要加上平台唯一标记
            cameraNo = EncodeNo(cameraNo, monitorId);

            dataCallBack(cameraNo, mediaData, monitorId);
        }
コード例 #12
0
        /// <summary>
        /// 发送视频码流,
        /// 对于H264的帧数据,如果监控平台回调的码流,SPS PPS IDR是分开发送的,则需要进行拼帧处理,将SPS+PPS+IDR拼接在一起发送
        /// </summary>
        /// <param name="mediaData"></param>
        /// <param name="sender"></param>
        public void SendVideoData(MediaData mediaData, string sender)
        {
            //对于H264的帧数据,如果监控平台回调的码流,SPS PPS IDR是分开发送的,则需要进行拼帧处理,将SPS+PPS+IDR拼接在一起发送
            if (mediaData.DataType == MediaDataType.FRAME_DATA)
            {
                switch (mediaData.FrameType)
                {
                case FrameDataType.H264_SEI_NALU_TYPE:
                    //SEI的包,直接丢弃
                    return;

                case FrameDataType.H264_SPS_NALU_TYPE:
                    //SPS的类型,先清空缓存
                    //this.CacheData = new byte[mediaData.Size];
                    //将SPS的数据缓存
                    this.CacheData          = mediaData.Data;
                    this.LastCacheFrameType = mediaData.FrameType;

                    //直接return,暂不发送
                    return;

                case FrameDataType.H264_PPS_NALU_TYPE:
                    //如果缓存最后一帧数据不为SPS或者PPS,说明数据乱序,舍弃。
                    if ((this.LastCacheFrameType != FrameDataType.H264_SPS_NALU_TYPE) && (this.LastCacheFrameType != FrameDataType.H264_PPS_NALU_TYPE))
                    {
                        return;
                    }

                    //将PPS的数据缓存,暂不发送
                    byte[] byteTemp = new byte[this.CacheData.Length + mediaData.Data.Length];
                    this.CacheData.CopyTo(byteTemp, 0);
                    mediaData.Data.CopyTo(byteTemp, this.CacheData.Length);
                    this.CacheData = byteTemp;

                    this.LastCacheFrameType = mediaData.FrameType;

                    //直接return,暂不发送
                    return;

                case FrameDataType.H264_IDR_NALU_TYPE:
                    //如果缓存是上最后一帧数据不为PPS,说明数据乱序,舍弃
                    if (this.LastCacheFrameType != FrameDataType.H264_PPS_NALU_TYPE)
                    {
                        return;
                    }

                    byteTemp = new byte[this.CacheData.Length + mediaData.Data.Length];
                    this.CacheData.CopyTo(byteTemp, 0);
                    mediaData.Data.CopyTo(byteTemp, this.CacheData.Length);
                    mediaData.Data = byteTemp;
                    mediaData.Size = (uint)mediaData.Data.Length;
                    //break跳出 switch,进行发送
                    break;

                default:
                    //其他类型的数据,直接发送
                    break;
                }
            }

            //if (writeCount1 < 1200)
            //{
            //    fs1.Write(mediaData.Data, 0, (int)mediaData.Size);
            //    writeCount1++;
            //}

            this.DataCallBack(this.CameraNo, mediaData, sender);
        }
コード例 #13
0
 /// <summary>
 /// 发送音频码流
 /// </summary>
 /// <param name="videoData"></param>
 /// <param name="sender"></param>
 public void SendAudioData(MediaData videoData, string sender)
 {
     this.DataCallBack(this.CameraNo, videoData, sender);
 }