public byte[] GotRtpPacket(RTPPacket rtpPacket) { var payload = rtpPacket.Payload; //logger.LogDebug($"rtp video, seqnum {seqnum}, ts {timestamp}, marker {marker}, payload {payload.Length}."); if (_currVideoFramePosn + payload.Length >= _currVideoFrame.Length) { // Something has gone very wrong. Clear the buffer. _currVideoFramePosn = 0; } // New frames must have the VP8 Payload Descriptor Start bit set. // The tracking of the current video frame position is to deal with a VP8 frame being split across multiple RTP packets // as per https://tools.ietf.org/html/rfc7741#section-4.4. if (_currVideoFramePosn > 0 || (payload[0] & 0x10) > 0) { RtpVP8Header vp8Header = RtpVP8Header.GetVP8Header(payload); Buffer.BlockCopy(payload, vp8Header.Length, _currVideoFrame, _currVideoFramePosn, payload.Length - vp8Header.Length); _currVideoFramePosn += payload.Length - vp8Header.Length; if (rtpPacket.Header.MarkerBit > 0) { var frame = _currVideoFrame.Take(_currVideoFramePosn).ToArray(); _currVideoFramePosn = 0; return(frame); } } else { var hdr = rtpPacket.Header; logger.LogWarning("Discarding RTP packet, VP8 header Start bit not set."); logger.LogWarning($"rtp video, seqnum {hdr.SequenceNumber}, ts {hdr.Timestamp}, marker {hdr.MarkerBit}, payload {payload.Length}."); } return(null); }
private static void RtpSession_OnRtpPacketReceived(IPEndPoint remoteEndPoint, SDPMediaTypesEnum mediaType, RTPPacket rtpPacket) { if (mediaType == SDPMediaTypesEnum.audio) { //Console.WriteLine($"rtp audio, seqnum {rtpPacket.Header.SequenceNumber}, payload type {rtpPacket.Header.PayloadType}, marker {rtpPacket.Header.MarkerBit}."); } else if (mediaType == SDPMediaTypesEnum.video) { //Console.WriteLine($"rtp video, seqnum {rtpPacket.Header.SequenceNumber}, ts {rtpPacket.Header.Timestamp}, marker {rtpPacket.Header.MarkerBit}, payload {rtpPacket.Payload.Length}, payload[0-5] {rtpPacket.Payload.HexStr(5)}."); // New frames must have the VP8 Payload Descriptor Start bit set. // The tracking of the current video frame position is to deal with a VP8 frame being split across multiple RTP packets // as per https://tools.ietf.org/html/rfc7741#section-4.4. if (_currVideoFramePosn > 0 || (rtpPacket.Payload[0] & 0x10) > 0) { RtpVP8Header vp8Header = RtpVP8Header.GetVP8Header(rtpPacket.Payload); Buffer.BlockCopy(rtpPacket.Payload, vp8Header.Length, _currVideoFrame, _currVideoFramePosn, rtpPacket.Payload.Length - vp8Header.Length); _currVideoFramePosn += rtpPacket.Payload.Length - vp8Header.Length; if (rtpPacket.Header.MarkerBit == 1) { unsafe { fixed(byte *p = _currVideoFrame) { uint width = 0, height = 0; byte[] i420 = null; //Console.WriteLine($"Attempting vpx decode {_currVideoFramePosn} bytes."); int decodeResult = _vpxEncoder.Decode(p, _currVideoFramePosn, ref i420, ref width, ref height); if (decodeResult != 0) { Console.WriteLine("VPX decode of video sample failed."); } else { //Console.WriteLine($"Video frame ready {width}x{height}."); fixed(byte *r = i420) { byte[] bmp = null; int stride = 0; int convRes = _imgConverter.ConvertYUVToRGB(r, VideoSubTypesEnum.I420, (int)width, (int)height, VideoSubTypesEnum.BGR24, ref bmp, ref stride); if (convRes == 0) { _form.BeginInvoke(new Action(() => { fixed(byte *s = bmp) { System.Drawing.Bitmap bmpImage = new System.Drawing.Bitmap((int)width, (int)height, stride, System.Drawing.Imaging.PixelFormat.Format24bppRgb, (IntPtr)s); _picBox.Image = bmpImage; } })); } else { Console.WriteLine("Pixel format conversion of decoded sample failed."); } } } } } _currVideoFramePosn = 0; } } else { Console.WriteLine("Discarding RTP packet, VP8 header Start bit not set."); Console.WriteLine($"rtp video, seqnum {rtpPacket.Header.SequenceNumber}, ts {rtpPacket.Header.Timestamp}, marker {rtpPacket.Header.MarkerBit}, payload {rtpPacket.Payload.Length}, payload[0-5] {rtpPacket.Payload.HexStr(5)}."); } } }