private void _ups_Unpacked(object arg1, Nalu arg2) { if (_decoder == null) { _decoder = new VideoDecoder(); var codicId = Constants.AVCodecID.AV_CODEC_ID_H264; _decoder.Create(codicId); } if (arg2.Header.Type == 7 || arg2.Header.Type == 8) { _decoder.Decode(arg2.BytesWithStartCode(), out _curWidth, out _curHeight); } else { byte[] frameData = _decoder.Decode(arg2.BytesWithStartCode(), out _curWidth, out _curHeight); if (frameData != null) { if (_curWidth != _width || _curHeight != _height) { _width = _curWidth; _height = _curHeight; _renderSource.SetupSurface(_curWidth, _curHeight); } renderFrame(frameData); } } }
private void onStreamEvent(IStreamPacket packet) { Console.WriteLine(); Console.Write("packet {0}:", packet.Buffer.Length); for (int i = 0; i < 30; i++) { Console.Write("{0:X2}, ", packet.Buffer[i]); } Console.WriteLine(); int width = 0; int height = 0; byte[] frameData = _decoder.Decode(packet.Buffer, out width, out height); if (frameData != null) { Console.WriteLine("Decode frame: {0}", frameData.Length); int size = width * 4 * height; byte[] rgb = new byte[size]; for (int i = 0; i < width * height; i++) { rgb[i * 4] = rgb[i * 4 + 1] = rgb[i * 4 + 2] = frameData[i]; rgb[i * 4 + 3] = 0xFF; } IntPtr ptr = Marshal.AllocHGlobal(size); Marshal.Copy(rgb, 0, ptr, size); using (Bitmap bmp = new Bitmap(width, height, width * 4, System.Drawing.Imaging.PixelFormat.Format32bppRgb, ptr)) { onBitmap(bmp); } Marshal.FreeHGlobal(ptr); } }
private void updateFromH264(IStreamPacket packet) { byte[] buffer = getVideoDecoderBuffer(packet.Buffer); if (buffer != null && buffer.Length > 0) { int width = 0; int height = 0; byte[] frameData = _decoder?.Decode(buffer, out width, out height); if (frameData != null) { onDisplay(frameData, width, height); MessageInfo = "设备支持海康码流转为Ffmpeg.VideoDecoder格式"; } } }
private void onHeader(IHeaderPacket packet) { var header = packet as StandardHeaderPacket; Console.Write("header {0}:", header.Buffer.Length); for (int i = 0; i < header.Buffer.Length; i++) { Console.Write("{0:X2}, ", header.Buffer[i]); } Console.WriteLine(); _decoder = new VideoDecoder(); _decoder.Create((Constants.AVCodecID)header.CodecID); int width = 0; int height = 0; _decoder.Decode(header.Buffer, out width, out height); }
public VideoFrame Decode(int type, ulong pts, byte[] data) { if (_decoder != null) { int width; int height; byte[] frame = _decoder.Decode(data, out width, out height); if (frame != null) { return(new VideoFrame() { Width = width, Height = height, Pts = pts, Data = frame, }); } } return(null); }
private void onHeader(IHeaderPacket packet) { var header = packet as StandardHeaderPacket; _rtspServer.UpdateHeader(header.Buffer); Console.WriteLine(); string rtspstr = $"rtsp header {header.Buffer.Length}:"; for (int i = 0; i < header.Buffer.Length; i++) { rtspstr += string.Format("{0:X2}, ", header.Buffer[i]); } Console.WriteLine(rtspstr); _decoder = new VideoDecoder(); _decoder.Create((Constants.AVCodecID)header.CodecID); int width = 0; int height = 0; byte[] frame = _decoder.Decode(header.Buffer, out width, out height); }
private void onStreamEvent(IStreamPacket packet) { //savetoTxt(packet.Buffer); //Console.WriteLine(); //string rtspstr = $"rtsp stream {packet.Buffer.Length}:"; //for (int i = 0; i < Math.Min(100, packet.Buffer.Length); i++) // rtspstr += string.Format("{0:X2}, ", packet.Buffer[i]); //Console.WriteLine(rtspstr); int width = 0; int height = 0; byte[] frameData = _decoder.Decode(packet.Buffer, out width, out height); if (frameData != null) { if (width != _width || height != _height) { _width = width; _height = height; _renderSource.SetupSurface(width, height); } renderFrame(frameData, width, height); } }