Example #1
0
        /// <summary>
        /// This method gets called when an encoded video sample has been received from the remote call party.
        /// The sample needs to be decoded and then handed off to the UI for display.
        /// </summary>
        /// <param name="sample">The encoded video sample.</param>
        public void EncodedVideoSampleReceived(byte[] sample, int length)
        {
            IntPtr encodedBufferPtr = Marshal.AllocHGlobal(length);

            Marshal.Copy(sample, 0, encodedBufferPtr, length);

            byte[] decodedBuffer    = null;
            uint   decodedImgWidth  = 0;
            uint   decodedImgHeight = 0;

            unsafe
            {
                _vpxDecoder.Decode((byte *)encodedBufferPtr, length, ref decodedBuffer, ref decodedImgWidth, ref decodedImgHeight);
            }

            Marshal.FreeHGlobal(encodedBufferPtr);

            if (decodedBuffer != null && decodedBuffer.Length > 0)
            {
                IntPtr decodedSamplePtr = Marshal.AllocHGlobal(decodedBuffer.Length);
                Marshal.Copy(decodedBuffer, 0, decodedSamplePtr, decodedBuffer.Length);

                byte[] bmp = null;

                unsafe
                {
                    _imageConverter.ConvertYUVToRGB((byte *)decodedSamplePtr, VideoSubTypesEnum.I420, Convert.ToInt32(decodedImgWidth), Convert.ToInt32(decodedImgHeight), VideoSubTypesEnum.RGB24, ref bmp);
                }

                Marshal.FreeHGlobal(decodedSamplePtr);

                OnRemoteVideoSampleReady?.Invoke(bmp, Convert.ToInt32(decodedImgWidth), Convert.ToInt32(decodedImgHeight));
            }
        }
Example #2
0
        private static void RtpSession_OnRtpPacketReceived(SDPMediaTypesEnum mediaType, RTPPacket rtpPacket)
        {
            if (mediaType == SDPMediaTypesEnum.audio)
            {
                //Console.WriteLine($"rtp audio, seqnum {rtpPacket.Header.SequenceNumber}, payload type {rtpPacket.Header.PayloadType}, marker {rtpPacket.Header.MarkerBit}.");
            }
            else if (mediaType == SDPMediaTypesEnum.video)
            {
                //Console.WriteLine($"rtp video, seqnum {rtpPacket.Header.SequenceNumber}, ts {rtpPacket.Header.Timestamp}, marker {rtpPacket.Header.MarkerBit}, payload {rtpPacket.Payload.Length}, payload[0-5] {rtpPacket.Payload.HexStr(5)}.");

                // New frames must have the VP8 Payload Descriptor Start bit set.
                if (_currVideoFramePosn > 0 || (rtpPacket.Payload[0] & 0x10) > 0)
                {
                    // TODO: use the VP8 Payload descriptor to properly determine the VP8 header length (currently hard coded to 4).
                    Buffer.BlockCopy(rtpPacket.Payload, 4, _currVideoFrame, _currVideoFramePosn, rtpPacket.Payload.Length - 4);
                    _currVideoFramePosn += rtpPacket.Payload.Length - 4;

                    if (rtpPacket.Header.MarkerBit == 1)
                    {
                        unsafe
                        {
                            fixed(byte *p = _currVideoFrame)
                            {
                                uint width = 0, height = 0;

                                byte[] i420 = null;

                                //Console.WriteLine($"Attempting vpx decode {_currVideoFramePosn} bytes.");

                                int decodeResult = _vpxEncoder.Decode(p, _currVideoFramePosn, ref i420, ref width, ref height);

                                if (decodeResult != 0)
                                {
                                    Console.WriteLine("VPX decode of video sample failed.");
                                }
                                else
                                {
                                    //Console.WriteLine($"Video frame ready {width}x{height}.");

                                    fixed(byte *r = i420)
                                    {
                                        byte[] bmp     = null;
                                        int    stride  = 0;
                                        int    convRes = _imgConverter.ConvertYUVToRGB(r, VideoSubTypesEnum.I420, (int)width, (int)height, VideoSubTypesEnum.BGR24, ref bmp, ref stride);

                                        if (convRes == 0)
                                        {
                                            _form.BeginInvoke(new Action(() =>
                                            {
                                                fixed(byte *s = bmp)
                                                {
                                                    System.Drawing.Bitmap bmpImage = new System.Drawing.Bitmap((int)width, (int)height, stride, System.Drawing.Imaging.PixelFormat.Format24bppRgb, (IntPtr)s);
                                                    _picBox.Image = bmpImage;
                                                }
                                            }));
                                        }
                                        else
                                        {
                                            Console.WriteLine("Pixel format conversion of decoded sample failed.");
                                        }
                                    }
                                }
                            }
                        }

                        _currVideoFramePosn = 0;
                    }
                }
                else
                {
                    Console.WriteLine("Discarding RTP packet, VP8 header Start bit not set.");
                    Console.WriteLine($"rtp video, seqnum {rtpPacket.Header.SequenceNumber}, ts {rtpPacket.Header.Timestamp}, marker {rtpPacket.Header.MarkerBit}, payload {rtpPacket.Payload.Length}, payload[0-5] {rtpPacket.Payload.HexStr(5)}.");
                }
            }
        }
Example #3
0
        /// <summary>
        /// Render a video RTP packet received from a remote party.
        /// </summary>
        /// <param name="rtpPacket">The RTP packet containing the video payload.</param>
        private void RenderVideo(RTPPacket rtpPacket)
        {
            if ((rtpPacket.Payload[0] & 0x10) > 0)
            {
                RtpVP8Header vp8Header = RtpVP8Header.GetVP8Header(rtpPacket.Payload);
                Buffer.BlockCopy(rtpPacket.Payload, vp8Header.Length, _currVideoFrame, _currVideoFramePosn, rtpPacket.Payload.Length - vp8Header.Length);
                _currVideoFramePosn += rtpPacket.Payload.Length - vp8Header.Length;

                if (rtpPacket.Header.MarkerBit == 1)
                {
                    unsafe
                    {
                        fixed(byte *p = _currVideoFrame)
                        {
                            uint width = 0, height = 0;

                            byte[] i420 = null;

                            //Console.WriteLine($"Attempting vpx decode {_currVideoFramePosn} bytes.");

                            int decodeResult = _vpxDecoder.Decode(p, _currVideoFramePosn, ref i420, ref width, ref height);

                            if (decodeResult != 0)
                            {
                                Console.WriteLine("VPX decode of video sample failed.");
                            }
                            else
                            {
                                if (OnVideoSampleReady != null)
                                {
                                    fixed(byte *r = i420)
                                    {
                                        byte[] bmp     = null;
                                        int    stride  = 0;
                                        int    convRes = _imgConverter.ConvertYUVToRGB(r, VideoSubTypesEnum.I420, (int)width, (int)height, VideoSubTypesEnum.BGR24, ref bmp, ref stride);

                                        if (convRes == 0)
                                        {
                                            //fixed (byte* s = bmp)
                                            //{
                                            //    System.Drawing.Bitmap bmpImage = new System.Drawing.Bitmap((int)width, (int)height, stride, System.Drawing.Imaging.PixelFormat.Format24bppRgb, (IntPtr)s);
                                            //}
                                            OnVideoSampleReady(bmp, width, height, stride);
                                        }
                                        else
                                        {
                                            Log.LogWarning("Pixel format conversion of decoded sample failed.");
                                        }
                                    }
                                }
                            }
                        }
                    }

                    _currVideoFramePosn = 0;
                }
            }
            else
            {
                Log.LogWarning("Discarding RTP packet, VP8 header Start bit not set.");
                Log.LogWarning($"rtp video, seqnum {rtpPacket.Header.SequenceNumber}, ts {rtpPacket.Header.Timestamp}, marker {rtpPacket.Header.MarkerBit}, payload {rtpPacket.Payload.Length}, payload[0-5] {rtpPacket.Payload.HexStr(5)}.");
            }
        }
Example #4
0
        private static void RtpSession_OnRtpPacketReceived(IPEndPoint remoteEndPoint, SDPMediaTypesEnum mediaType, RTPPacket rtpPacket)
        {
            if (mediaType == SDPMediaTypesEnum.audio)
            {
                //Console.WriteLine($"rtp audio, seqnum {rtpPacket.Header.SequenceNumber}, payload type {rtpPacket.Header.PayloadType}, marker {rtpPacket.Header.MarkerBit}.");
            }
            else if (mediaType == SDPMediaTypesEnum.video)
            {
                //Console.WriteLine($"rtp video, seqnum {rtpPacket.Header.SequenceNumber}, ts {rtpPacket.Header.Timestamp}, marker {rtpPacket.Header.MarkerBit}, payload {rtpPacket.Payload.Length}, payload[0-5] {rtpPacket.Payload.HexStr(5)}.");

                // New frames must have the VP8 Payload Descriptor Start bit set.
                // The tracking of the current video frame position is to deal with a VP8 frame being split across multiple RTP packets
                // as per https://tools.ietf.org/html/rfc7741#section-4.4.
                if (_currVideoFramePosn > 0 || (rtpPacket.Payload[0] & 0x10) > 0)
                {
                    RtpVP8Header vp8Header = RtpVP8Header.GetVP8Header(rtpPacket.Payload);

                    Buffer.BlockCopy(rtpPacket.Payload, vp8Header.Length, _currVideoFrame, _currVideoFramePosn, rtpPacket.Payload.Length - vp8Header.Length);
                    _currVideoFramePosn += rtpPacket.Payload.Length - vp8Header.Length;

                    if (rtpPacket.Header.MarkerBit == 1)
                    {
                        unsafe
                        {
                            fixed(byte *p = _currVideoFrame)
                            {
                                uint width = 0, height = 0;

                                byte[] i420 = null;

                                //Console.WriteLine($"Attempting vpx decode {_currVideoFramePosn} bytes.");

                                int decodeResult = _vpxEncoder.Decode(p, _currVideoFramePosn, ref i420, ref width, ref height);

                                if (decodeResult != 0)
                                {
                                    Console.WriteLine("VPX decode of video sample failed.");
                                }
                                else
                                {
                                    //Console.WriteLine($"Video frame ready {width}x{height}.");

                                    fixed(byte *r = i420)
                                    {
                                        byte[] bmp     = null;
                                        int    stride  = 0;
                                        int    convRes = _imgConverter.ConvertYUVToRGB(r, VideoSubTypesEnum.I420, (int)width, (int)height, VideoSubTypesEnum.BGR24, ref bmp, ref stride);

                                        if (convRes == 0)
                                        {
                                            _form.BeginInvoke(new Action(() =>
                                            {
                                                fixed(byte *s = bmp)
                                                {
                                                    System.Drawing.Bitmap bmpImage = new System.Drawing.Bitmap((int)width, (int)height, stride, System.Drawing.Imaging.PixelFormat.Format24bppRgb, (IntPtr)s);
                                                    _picBox.Image = bmpImage;
                                                }
                                            }));
                                        }
                                        else
                                        {
                                            Console.WriteLine("Pixel format conversion of decoded sample failed.");
                                        }
                                    }
                                }
                            }
                        }

                        _currVideoFramePosn = 0;
                    }
                }
                else
                {
                    Console.WriteLine("Discarding RTP packet, VP8 header Start bit not set.");
                    Console.WriteLine($"rtp video, seqnum {rtpPacket.Header.SequenceNumber}, ts {rtpPacket.Header.Timestamp}, marker {rtpPacket.Header.MarkerBit}, payload {rtpPacket.Payload.Length}, payload[0-5] {rtpPacket.Payload.HexStr(5)}.");
                }
            }
        }