Exemple #1
0
        private void SendTestPattern()
        {
            try
            {
                unsafe
                {
                    Bitmap testPattern = new Bitmap("wizard.jpeg");

                    SIPSorceryMedia.VPXEncoder vpxEncoder = new VPXEncoder();
                    vpxEncoder.InitEncoder(Convert.ToUInt32(testPattern.Width), Convert.ToUInt32(testPattern.Height));

                    SIPSorceryMedia.ImageConvert colorConverter = new ImageConvert();

                    byte[] sampleBuffer  = null;
                    byte[] encodedBuffer = new byte[4096];

                    while (!_exit)
                    {
                        if (_webRtcSessions.Any(x => x.Value.Peer.IsDtlsNegotiationComplete == true && x.Value.Peer.IsClosed == false))
                        {
                            var stampedTestPattern = testPattern.Clone() as System.Drawing.Image;

                            AddTimeStampAndLocation(stampedTestPattern, DateTime.UtcNow.ToString("dd MMM yyyy HH:mm:ss:fff"), "Test Pattern");
                            sampleBuffer = BitmapToRGB24(stampedTestPattern as System.Drawing.Bitmap);

                            fixed(byte *p = sampleBuffer)
                            {
                                byte[] convertedFrame = null;
                                colorConverter.ConvertRGBtoYUV(p, VideoSubTypesEnum.RGB24, testPattern.Width, testPattern.Height, VideoSubTypesEnum.I420, ref convertedFrame);

                                fixed(byte *q = convertedFrame)
                                {
                                    int encodeResult = vpxEncoder.Encode(q, sampleBuffer.Length, 1, ref encodedBuffer);

                                    if (encodeResult != 0)
                                    {
                                        logger.Warn("VPX encode of video sample failed.");
                                        continue;
                                    }
                                }
                            }

                            stampedTestPattern.Dispose();
                            stampedTestPattern = null;

                            lock (_webRtcSessions)
                            {
                                foreach (var session in _webRtcSessions.Where(x => x.Value.Peer.IsDtlsNegotiationComplete == true && x.Value.Peer.LocalIceCandidates.Any(y => y.RemoteRtpEndPoint != null)))
                                {
                                    session.Value.Send(encodedBuffer);
                                }
                            }

                            encodedBuffer = null;
                        }

                        Thread.Sleep(100);
                    }
                }
            }
            catch (Exception excp)
            {
                logger.Error("Exception SendTestPattern. " + excp);
            }
        }
Exemple #2
0
        private void SampleWebCam(MFVideoSampler videoSampler, VideoMode videoMode, CancellationTokenSource cts)
        {
            try
            {
                Thread.CurrentThread.Name = "vidsampler_" + videoMode.DeviceIndex + "_" + videoMode.Width + "_" + videoMode.Height;

                var vpxEncoder = new VPXEncoder();
                // TODO: The last parameter passed to the vpx encoder init needs to be the frame stride not the width.
                vpxEncoder.InitEncoder(Convert.ToUInt32(videoMode.Width), Convert.ToUInt32(videoMode.Height), Convert.ToUInt32(videoMode.Width));

                // var videoSampler = new MFVideoSampler();
                //videoSampler.Init(videoMode.DeviceIndex, videoMode.Width, videoMode.Height);
                // videoSampler.InitFromFile();

                while (!_stop && !cts.IsCancellationRequested)
                {
                    byte[] videoSample = null;
                    var    sample      = videoSampler.GetSample(ref videoSample);

                    //if (result == NAudio.MediaFoundation.MediaFoundationErrors.MF_E_HW_MFT_FAILED_START_STREAMING)
                    //{
                    //    logger.Warn("A sample could not be acquired from the local webcam. Check that it is not already in use.");
                    //    OnLocalVideoError("A sample could not be acquired from the local webcam. Check that it is not already in use.");
                    //    break;
                    //}
                    //else if (result != 0)
                    //{
                    //    logger.Warn("A sample could not be acquired from the local webcam. Check that it is not already in use. Error code: " + result);
                    //    OnLocalVideoError("A sample could not be acquired from the local webcam. Check that it is not already in use. Error code: " + result);
                    //    break;
                    //}
                    //else
                    if (sample?.HasVideoSample == true)
                    {
                        // This event sends the raw bitmap to the WPF UI.
                        OnLocalVideoSampleReady?.Invoke(videoSample, videoSampler.Width, videoSampler.Height);

                        // This event encodes the sample and forwards it to the RTP manager for network transmission.
                        if (_rtpManager != null)
                        {
                            IntPtr rawSamplePtr = Marshal.AllocHGlobal(videoSample.Length);
                            Marshal.Copy(videoSample, 0, rawSamplePtr, videoSample.Length);

                            byte[] yuv = null;

                            unsafe
                            {
                                // TODO: using width instead of stride.
                                _imageConverter.ConvertRGBtoYUV((byte *)rawSamplePtr, VideoSubTypesEnum.RGB24, Convert.ToInt32(videoMode.Width), Convert.ToInt32(videoMode.Height), Convert.ToInt32(videoMode.Width), VideoSubTypesEnum.I420, ref yuv);
                                //_imageConverter.ConvertToI420((byte*)rawSamplePtr, VideoSubTypesEnum.RGB24, Convert.ToInt32(videoMode.Width), Convert.ToInt32(videoMode.Height), ref yuv);
                            }

                            Marshal.FreeHGlobal(rawSamplePtr);

                            IntPtr yuvPtr = Marshal.AllocHGlobal(yuv.Length);
                            Marshal.Copy(yuv, 0, yuvPtr, yuv.Length);

                            byte[] encodedBuffer = null;

                            unsafe
                            {
                                vpxEncoder.Encode((byte *)yuvPtr, yuv.Length, _encodingSample++, ref encodedBuffer);
                            }

                            Marshal.FreeHGlobal(yuvPtr);

                            //if(encodedBuffer )
                            _rtpManager.LocalVideoSampleReady(encodedBuffer);
                        }
                    }
                }

                videoSampler.Stop();
                vpxEncoder.Dispose();
            }
            catch (Exception excp)
            {
                logger.Error($"Exception SampleWebCam. {excp.Message}");
            }
        }
Exemple #3
0
        public void StartLocalVideo(VideoMode videoMode)
        {
            if (_localVideoSamplingTask != null && !_localVideoSamplingTask.IsCompleted && _localVideoSamplingCancelTokenSource != null)
            {
                _localVideoSamplingCancelTokenSource.Cancel();
            }

            var videoSampler = new MFVideoSampler();

            videoSampler.Init(videoMode.DeviceIndex, VideoSubTypesEnum.RGB24, videoMode.Width, videoMode.Height);
            //videoSampler.InitFromFile();
            //_audioChannel = new AudioChannel();

            _localVideoSamplingCancelTokenSource = new CancellationTokenSource();
            var cancellationToken = _localVideoSamplingCancelTokenSource.Token;

            _localVideoSamplingTask = Task.Factory.StartNew(() =>
            {
                Thread.CurrentThread.Name = "vidsampler_" + videoMode.DeviceIndex + "_" + videoMode.Width + "_" + videoMode.Height;

                var vpxEncoder = new VPXEncoder();
                vpxEncoder.InitEncoder(Convert.ToUInt32(videoMode.Width), Convert.ToUInt32(videoMode.Height));

                // var videoSampler = new MFVideoSampler();
                //videoSampler.Init(videoMode.DeviceIndex, videoMode.Width, videoMode.Height);
                // videoSampler.InitFromFile();

                while (!_stop && !cancellationToken.IsCancellationRequested)
                {
                    byte[] videoSample = null;
                    int result         = videoSampler.GetSample(ref videoSample);

                    if (result == NAudio.MediaFoundation.MediaFoundationErrors.MF_E_HW_MFT_FAILED_START_STREAMING)
                    {
                        logger.Warn("A sample could not be acquired from the local webcam. Check that it is not already in use.");
                        OnLocalVideoError("A sample could not be acquired from the local webcam. Check that it is not already in use.");
                        break;
                    }
                    else if (result != 0)
                    {
                        logger.Warn("A sample could not be acquired from the local webcam. Check that it is not already in use. Error code: " + result);
                        OnLocalVideoError("A sample could not be acquired from the local webcam. Check that it is not already in use. Error code: " + result);
                        break;
                    }
                    else if (videoSample != null)
                    {
                        // This event sends the raw bitmap to the WPF UI.
                        if (OnLocalVideoSampleReady != null)
                        {
                            OnLocalVideoSampleReady(videoSample, videoSampler.Width, videoSampler.Height);
                        }

                        // This event encodes the sample and forwards it to the RTP manager for network transmission.
                        if (_rtpManager != null)
                        {
                            IntPtr rawSamplePtr = Marshal.AllocHGlobal(videoSample.Length);
                            Marshal.Copy(videoSample, 0, rawSamplePtr, videoSample.Length);

                            byte[] yuv = null;

                            unsafe
                            {
                                _imageConverter.ConvertRGBtoYUV((byte *)rawSamplePtr, VideoSubTypesEnum.RGB24, Convert.ToInt32(videoMode.Width), Convert.ToInt32(videoMode.Height), VideoSubTypesEnum.I420, ref yuv);
                                //_imageConverter.ConvertToI420((byte*)rawSamplePtr, VideoSubTypesEnum.RGB24, Convert.ToInt32(videoMode.Width), Convert.ToInt32(videoMode.Height), ref yuv);
                            }

                            Marshal.FreeHGlobal(rawSamplePtr);

                            IntPtr yuvPtr = Marshal.AllocHGlobal(yuv.Length);
                            Marshal.Copy(yuv, 0, yuvPtr, yuv.Length);

                            byte[] encodedBuffer = null;

                            unsafe
                            {
                                vpxEncoder.Encode((byte *)yuvPtr, yuv.Length, _encodingSample++, ref encodedBuffer);
                            }

                            Marshal.FreeHGlobal(yuvPtr);

                            //if(encodedBuffer )
                            _rtpManager.LocalVideoSampleReady(encodedBuffer);
                        }
                    }
                }

                videoSampler.Stop();
                vpxEncoder.Dispose();
            }, cancellationToken);

            _localAudioSamplingTask = Task.Factory.StartNew(() =>
            {
                Thread.CurrentThread.Name = "audsampler_" + videoMode.DeviceIndex;

                while (!_stop && !cancellationToken.IsCancellationRequested)
                {
                    byte[] audioSample = null;
                    int result         = videoSampler.GetAudioSample(ref audioSample);

                    if (result == NAudio.MediaFoundation.MediaFoundationErrors.MF_E_HW_MFT_FAILED_START_STREAMING)
                    {
                        logger.Warn("An audio sample could not be acquired from the local source. Check that it is not already in use.");
                        //OnLocalVideoError("A sample could not be acquired from the local webcam. Check that it is not already in use.");
                        break;
                    }
                    else if (result != 0)
                    {
                        logger.Warn("An audio sample could not be acquired from the local source. Check that it is not already in use. Error code: " + result);
                        //OnLocalVideoError("A sample could not be acquired from the local webcam. Check that it is not already in use. Error code: " + result);
                        break;
                    }
                    else if (audioSample != null)
                    {
                        if (_audioChannel != null)
                        {
                            _audioChannel.AudioSampleReceived(audioSample, 0);
                        }
                    }
                }
            }, cancellationToken);
        }
Exemple #4
0
        public void StartLocalVideo(VideoMode videoMode)
        {
            if (_localVideoSamplingTask != null && !_localVideoSamplingTask.IsCompleted && _localVideoSamplingCancelTokenSource != null)
            {
                _localVideoSamplingCancelTokenSource.Cancel();
            }

            var videoSampler = new MFVideoSampler();
            videoSampler.Init(videoMode.DeviceIndex, videoMode.Width, videoMode.Height);
            //videoSampler.InitFromFile();
            //_audioChannel = new AudioChannel();

            _localVideoSamplingCancelTokenSource = new CancellationTokenSource();
            var cancellationToken = _localVideoSamplingCancelTokenSource.Token;

            _localVideoSamplingTask = Task.Factory.StartNew(() =>
            {
                Thread.CurrentThread.Name = "vidsampler_" + videoMode.DeviceIndex + "_" + videoMode.Width + "_" + videoMode.Height;

                var vpxEncoder = new VPXEncoder();
                vpxEncoder.InitEncoder(Convert.ToUInt32(videoMode.Width), Convert.ToUInt32(videoMode.Height));

               // var videoSampler = new MFVideoSampler();
                //videoSampler.Init(videoMode.DeviceIndex, videoMode.Width, videoMode.Height);
               // videoSampler.InitFromFile();

                while (!_stop && !cancellationToken.IsCancellationRequested)
                {
                    byte[] videoSample = null;
                    int result = videoSampler.GetSample(ref videoSample);

                    if (result == NAudio.MediaFoundation.MediaFoundationErrors.MF_E_HW_MFT_FAILED_START_STREAMING)
                    {
                        logger.Warn("A sample could not be acquired from the local webcam. Check that it is not already in use.");
                        OnLocalVideoError("A sample could not be acquired from the local webcam. Check that it is not already in use.");
                        break;
                    }
                    else if (result != 0)
                    {
                        logger.Warn("A sample could not be acquired from the local webcam. Check that it is not already in use. Error code: " + result);
                        OnLocalVideoError("A sample could not be acquired from the local webcam. Check that it is not already in use. Error code: " + result);
                        break;
                    }
                    else if (videoSample != null)
                    {
                        // This event sends the raw bitmap to the WPF UI.
                        if (OnLocalVideoSampleReady != null)
                        {
                            OnLocalVideoSampleReady(videoSample, videoSampler.Width, videoSampler.Height);
                        }

                        // This event encodes the sample and forwards it to the RTP manager for network transmission.
                        if (_rtpManager != null)
                        {
                            IntPtr rawSamplePtr = Marshal.AllocHGlobal(videoSample.Length);
                            Marshal.Copy(videoSample, 0, rawSamplePtr, videoSample.Length);

                            byte[] yuv = null;

                            unsafe
                            {
                                _imageConverter.ConvertRGBtoYUV((byte*)rawSamplePtr, Convert.ToInt32(videoMode.Width), Convert.ToInt32(videoMode.Height), ref yuv);
                            }

                            Marshal.FreeHGlobal(rawSamplePtr);

                            IntPtr yuvPtr = Marshal.AllocHGlobal(yuv.Length);
                            Marshal.Copy(yuv, 0, yuvPtr, yuv.Length);

                            byte[] encodedBuffer = null;

                            unsafe
                            {
                                vpxEncoder.Encode((byte*)yuvPtr, yuv.Length, _encodingSample++, ref encodedBuffer);
                            }

                            Marshal.FreeHGlobal(yuvPtr);

                            _rtpManager.LocalVideoSampleReady(encodedBuffer);
                        }
                    }
                }

                videoSampler.Stop();
                vpxEncoder.Dispose();
            }, cancellationToken);

            _localAudioSamplingTask = Task.Factory.StartNew(() =>
            {
                Thread.CurrentThread.Name = "audsampler_" + videoMode.DeviceIndex;

                while (!_stop && !cancellationToken.IsCancellationRequested)
                {
                    byte[] audioSample = null;
                    int result = videoSampler.GetAudioSample(ref audioSample);

                    if (result == NAudio.MediaFoundation.MediaFoundationErrors.MF_E_HW_MFT_FAILED_START_STREAMING)
                    {
                        logger.Warn("An audio sample could not be acquired from the local source. Check that it is not already in use.");
                        //OnLocalVideoError("A sample could not be acquired from the local webcam. Check that it is not already in use.");
                        break;
                    }
                    else if (result != 0)
                    {
                        logger.Warn("An audio sample could not be acquired from the local source. Check that it is not already in use. Error code: " + result);
                        //OnLocalVideoError("A sample could not be acquired from the local webcam. Check that it is not already in use. Error code: " + result);
                        break;
                    }
                    else if (audioSample != null)
                    {
                        if (_audioChannel != null)
                        {
                            _audioChannel.AudioSampleReceived(audioSample, 0);
                        }
                    }
                }
            }, cancellationToken);
        }
        private void SendTestPattern()
        {
            try
            {
                unsafe
                {
                    Bitmap testPattern = new Bitmap(_testPattermImagePath);

                    // Get the stride.
                    Rectangle rect = new Rectangle(0, 0, testPattern.Width, testPattern.Height);
                    System.Drawing.Imaging.BitmapData bmpData =
                        testPattern.LockBits(rect, System.Drawing.Imaging.ImageLockMode.ReadWrite,
                                             testPattern.PixelFormat);

                    // Get the address of the first line.
                    int stride = bmpData.Stride;

                    testPattern.UnlockBits(bmpData);

                    // Initialise the video codec and color converter.
                    SIPSorceryMedia.VPXEncoder vpxEncoder = new VPXEncoder();
                    vpxEncoder.InitEncoder((uint)testPattern.Width, (uint)testPattern.Height, (uint)stride);

                    SIPSorceryMedia.ImageConvert colorConverter = new ImageConvert();

                    byte[] sampleBuffer  = null;
                    byte[] encodedBuffer = null;
                    int    sampleCount   = 0;
                    uint   rtpTimestamp  = 0;

                    while (!_exit)
                    {
                        if (_webRtcSessions.Any(x => (x.Value.Peer.IsDtlsNegotiationComplete == true || x.Value.IsEncryptionDisabled == true) &&
                                                x.Value.Peer.LocalIceCandidates.Any(y => y.RemoteRtpEndPoint != null && x.Value.MediaSource == MediaSourceEnum.TestPattern &&
                                                                                    x.Value.Peer.IsClosed == false)))
                        {
                            var stampedTestPattern = testPattern.Clone() as System.Drawing.Image;
                            AddTimeStampAndLocation(stampedTestPattern, DateTime.UtcNow.ToString("dd MMM yyyy HH:mm:ss:fff"), "Test Pattern");
                            sampleBuffer = BitmapToRGB24(stampedTestPattern as System.Drawing.Bitmap);

                            fixed(byte *p = sampleBuffer)
                            {
                                byte[] convertedFrame = null;
                                colorConverter.ConvertRGBtoYUV(p, VideoSubTypesEnum.BGR24, testPattern.Width, testPattern.Height, stride, VideoSubTypesEnum.I420, ref convertedFrame);

                                fixed(byte *q = convertedFrame)
                                {
                                    int encodeResult = vpxEncoder.Encode(q, convertedFrame.Length, 1, ref encodedBuffer);

                                    if (encodeResult != 0)
                                    {
                                        logger.Warn("VPX encode of video sample failed.");
                                        continue;
                                    }
                                }
                            }

                            stampedTestPattern.Dispose();
                            stampedTestPattern = null;

                            lock (_webRtcSessions)
                            {
                                foreach (var session in _webRtcSessions.Where(x => (x.Value.Peer.IsDtlsNegotiationComplete == true || x.Value.IsEncryptionDisabled == true) &&
                                                                              x.Value.Peer.LocalIceCandidates.Any(y => y.RemoteRtpEndPoint != null) && x.Value.MediaSource == MediaSourceEnum.TestPattern))
                                {
                                    try
                                    {
                                        session.Value.SendMedia(MediaSampleTypeEnum.VP8, rtpTimestamp, encodedBuffer);
                                    }
                                    catch (Exception sendExcp)
                                    {
                                        logger.Warn("Exception SendTestPattern.SendMedia. " + sendExcp.Message);
                                        session.Value.Peer.Close();
                                    }
                                }
                            }

                            encodedBuffer = null;

                            sampleCount++;
                            rtpTimestamp += VP8_TIMESTAMP_SPACING;
                        }

                        Thread.Sleep(30);
                    }
                }
            }
            catch (Exception excp)
            {
                logger.Error("Exception SendTestPattern. " + excp);
            }
        }
Exemple #6
0
        private static void SendRTPFromCamera()
        {
            try
            {
                unsafe
                {
                    SIPSorceryMedia.MFVideoSampler videoSampler = new SIPSorceryMedia.MFVideoSampler();

                    //List<VideoMode> webcamModes = new List<VideoMode>();
                    //int deviceCount = videoSampler.GetVideoDevices(ref webcamModes);
                    //foreach (var videoMode in webcamModes)
                    //{
                    //    Console.WriteLine(videoMode.DeviceFriendlyName + " " + (videoMode.VideoSubTypeFriendlyName ?? videoMode.VideoSubType.ToString()) + " " + videoMode.Width + "x" + videoMode.Height + ".");
                    //}

                    videoSampler.Init(_webcamIndex, _webcamVideoSubType, _webcamWidth, _webcamHeight);

                    SIPSorceryMedia.VPXEncoder vpxEncoder = new VPXEncoder();
                    vpxEncoder.InitEncoder(_webcamWidth, _webcamHeight);

                    SIPSorceryMedia.ImageConvert colorConverter = new ImageConvert();

                    byte pictureID = 0x1;
                    byte[] sampleBuffer = null;
                    byte[] encodedBuffer = new byte[4096];

                    while (true)
                    {
                        if (_webRTCClients.Any(x => x.STUNExchangeComplete == true && x.IsDtlsNegotiationComplete == true))
                        {
                            int result = videoSampler.GetSample(ref sampleBuffer);
                            if (result != 0)
                            {
                                Console.WriteLine("Video sampler returned a null sample.");
                            }
                            else
                            {
                                //Console.WriteLine("Got managed sample " + sample.Buffer.Length + ", is key frame " + sample.IsKeyFrame + ".");

                                fixed (byte* p = sampleBuffer)
                                {
                                    byte[] convertedFrame = null;
                                    colorConverter.ConvertToI420(p, _webcamVideoSubType, Convert.ToInt32(_webcamWidth), Convert.ToInt32(_webcamHeight), ref convertedFrame);

                                    //int encodeResult = vpxEncoder.Encode(p, sampleBuffer.Length, 1, ref encodedBuffer);
                                    fixed (byte* q = convertedFrame)
                                    {
                                        int encodeResult = vpxEncoder.Encode(q, sampleBuffer.Length, 1, ref encodedBuffer);

                                        if (encodeResult != 0)
                                        {
                                            Console.WriteLine("VPX encode of video sample failed.");
                                            continue;
                                        }
                                    }
                                }

                                lock (_webRTCClients)
                                {
                                    foreach (var client in _webRTCClients.Where(x => x.STUNExchangeComplete && x.IsDtlsNegotiationComplete == true))
                                    {
                                        try
                                        {
                                            //if (client.LastRtcpSenderReportSentAt == DateTime.MinValue)
                                            //{
                                            //    logger.Debug("Sending RTCP report to " + client.SocketAddress + ".");

                                            //    // Send RTCP report.
                                            //    RTCPPacket rtcp = new RTCPPacket(client.SSRC, 0, 0, 0, 0);
                                            //    byte[] rtcpBuffer = rtcp.GetBytes();
                                            //    _webRTCReceiverClient.BeginSend(rtcpBuffer, rtcpBuffer.Length, client.SocketAddress, null, null);
                                            //    //int rtperr = client.SrtpContext.ProtectRTP(rtcpBuffer, rtcpBuffer.Length - SRTP_AUTH_KEY_LENGTH);
                                            //}

                                            //Console.WriteLine("Sending VP8 frame of " + encodedBuffer.Length + " bytes to " + client.SocketAddress + ".");

                                            client.LastTimestamp = (client.LastTimestamp == 0) ? RTSPSession.DateTimeToNptTimestamp32(DateTime.Now) : client.LastTimestamp + TIMESTAMP_SPACING;

                                            for (int index = 0; index * RTP_MAX_PAYLOAD < encodedBuffer.Length; index++)
                                            {
                                                int offset = (index == 0) ? 0 : (index * RTP_MAX_PAYLOAD);
                                                int payloadLength = (offset + RTP_MAX_PAYLOAD < encodedBuffer.Length) ? RTP_MAX_PAYLOAD : encodedBuffer.Length - offset;

                                                byte[] vp8HeaderBytes = (index == 0) ? new byte[] { 0x10 } : new byte[] { 0x00 };

                                                RTPPacket rtpPacket = new RTPPacket(payloadLength + SRTP_AUTH_KEY_LENGTH + vp8HeaderBytes.Length);
                                                rtpPacket.Header.SyncSource = client.SSRC;
                                                rtpPacket.Header.SequenceNumber = client.SequenceNumber++;
                                                rtpPacket.Header.Timestamp = client.LastTimestamp;
                                                rtpPacket.Header.MarkerBit = ((offset + payloadLength) >= encodedBuffer.Length) ? 1 : 0; // Set marker bit for the last packet in the frame.
                                                rtpPacket.Header.PayloadType = PAYLOAD_TYPE_ID;

                                                Buffer.BlockCopy(vp8HeaderBytes, 0, rtpPacket.Payload, 0, vp8HeaderBytes.Length);
                                                Buffer.BlockCopy(encodedBuffer, offset, rtpPacket.Payload, vp8HeaderBytes.Length, payloadLength);

                                                var rtpBuffer = rtpPacket.GetBytes();

                                                //_webRTCReceiverClient.Send(rtpBuffer, rtpBuffer.Length, _wiresharpEP);

                                                int rtperr = client.SrtpContext.ProtectRTP(rtpBuffer, rtpBuffer.Length - SRTP_AUTH_KEY_LENGTH);
                                                if (rtperr != 0)
                                                {
                                                    logger.Warn("SRTP packet protection failed, result " + rtperr + ".");
                                                }
                                                else
                                                {
                                                    //logger.Debug("Sending RTP, offset " + offset + ", frame bytes " + payloadLength + ", vp8 header bytes " + vp8HeaderBytes.Length + ", timestamp " + rtpPacket.Header.Timestamp + ", seq # " + rtpPacket.Header.SequenceNumber + " to " + client.SocketAddress + ".");

                                                    //_webRTCReceiverClient.Send(rtpBuffer, rtpBuffer.Length, client.SocketAddress);

                                                    _webRTCReceiverClient.BeginSend(rtpBuffer, rtpBuffer.Length, client.SocketAddress, null, null);
                                                }
                                            }
                                        }
                                        catch (Exception sendExcp)
                                        {
                                            logger.Error("SendRTP exception sending to " + client.SocketAddress + ". " + sendExcp.Message);
                                        }
                                    }
                                }

                                pictureID++;

                                if (pictureID > 127)
                                {
                                    pictureID = 1;
                                }

                                encodedBuffer = null;
                                sampleBuffer = null;
                            }
                        }
                    }
                }
            }
            catch (Exception excp)
            {
                Console.WriteLine("Exception SendRTP. " + excp);
            }
        }
        private void SendTestPattern()
        {
            try
            {
                unsafe
                {
                    Bitmap testPattern = new Bitmap("wizard.jpeg");

                    SIPSorceryMedia.VPXEncoder vpxEncoder = new VPXEncoder();
                    vpxEncoder.InitEncoder(Convert.ToUInt32(testPattern.Width), Convert.ToUInt32(testPattern.Height));

                    SIPSorceryMedia.ImageConvert colorConverter = new ImageConvert();

                    byte[] sampleBuffer = null;
                    byte[] encodedBuffer = new byte[4096];

                    while (!_exit)
                    {
                        if (_webRtcSessions.Any(x => x.Value.Peer.IsDtlsNegotiationComplete == true && x.Value.Peer.IsClosed == false))
                        {
                            var stampedTestPattern = testPattern.Clone() as System.Drawing.Image;

                            AddTimeStampAndLocation(stampedTestPattern, DateTime.UtcNow.ToString("dd MMM yyyy HH:mm:ss:fff"), "Test Pattern");
                            sampleBuffer = BitmapToRGB24(stampedTestPattern as System.Drawing.Bitmap);

                            fixed (byte* p = sampleBuffer)
                            {
                                byte[] convertedFrame = null;
                                colorConverter.ConvertRGBtoYUV(p, VideoSubTypesEnum.RGB24, testPattern.Width, testPattern.Height, VideoSubTypesEnum.I420, ref convertedFrame);

                                fixed (byte* q = convertedFrame)
                                {
                                    int encodeResult = vpxEncoder.Encode(q, sampleBuffer.Length, 1, ref encodedBuffer);

                                    if (encodeResult != 0)
                                    {
                                        logger.Warn("VPX encode of video sample failed.");
                                        continue;
                                    }
                                }
                            }

                            stampedTestPattern.Dispose();
                            stampedTestPattern = null;

                            lock (_webRtcSessions)
                            {
                                foreach (var session in _webRtcSessions.Where(x => x.Value.Peer.IsDtlsNegotiationComplete == true && x.Value.Peer.LocalIceCandidates.Any(y => y.RemoteRtpEndPoint != null)))
                                {
                                    session.Value.Send(encodedBuffer);
                                }
                            }

                            encodedBuffer = null;
                        }

                        Thread.Sleep(100);
                    }
                }
            }
            catch (Exception excp)
            {
                logger.Error("Exception SendTestPattern. " + excp);
            }
        }