示例#1
0
        public static void Main(string [] args)
        {
            var e   = new VpxEncoder(VpxCodec.VP8, null, VpxCodecFlags.None);
            var buf = e.GetGlobalHeaders();

            Console.WriteLine(buf.Size);
        }
示例#2
0
        public void SimpleTests()
        {
            var e   = new VpxEncoder(VpxCodec.VP8, null, VpxCodecFlags.None);
            var buf = e.GetGlobalHeaders();

            Assert.AreEqual(IntPtr.Zero, buf.Buffer, "there should be no global headers.");
        }
示例#3
0
        private static void OnVideoResolutionChangedEvent(uint width, uint height, uint stride)
        {
            try
            {
                if (_vpxEncoder == null ||
                    (_vpxEncoder.GetWidth() != width || _vpxEncoder.GetHeight() != height || _vpxEncoder.GetStride() != stride))
                {
                    _vpxEncoderReady = false;

                    if (_vpxEncoder != null)
                    {
                        _vpxEncoder.Dispose();
                    }

                    _vpxEncoder = new VpxEncoder();
                    _vpxEncoder.InitEncoder(width, height, stride);

                    logger.LogInformation($"VPX encoder initialised with width {width}, height {height} and stride {stride}.");

                    _vpxEncoderReady = true;
                }
            }
            catch (Exception excp)
            {
                logger.LogWarning("Exception MfSampleGrabber_OnVideoResolutionChangedEvent. " + excp.Message);
            }
        }
示例#4
0
    public void Setup(string path, VpxEncoder encoder)
    {
        // バッファ出力
        if (_fs != null)
        {
            return;
        }

//		_fs = new FileStream(Application.dataPath + "/test.vp9", FileMode.Create, FileAccess.Write);
        _fs = new FileStream(Path.Combine(Application.dataPath, path), FileMode.Create, FileAccess.Write);
        // エンコード時イベント登録
        encoder.OnEncoded += this.DebugWriteFile;
    }
示例#5
0
        /// <summary>
        /// This class manages different media renderer's that can be included in a call, e.g. audio and video.
        /// </summary>
        /// <param name="dispatcher">Need a UI dispatcher so tasks can be executed on the UI thread. For example this object
        /// gets created when a button is clicked on and is therefore owned by the UI thread. When a call transfer completes the
        /// resources need to be closed without any UI interaction. In that case need to marshal back to the UI thread.</param>
        /// <param name="useVideo">Set to true if the current call is going to be using video.</param>
        public MediaManager(Dispatcher dispatcher, bool useVideo = false)
        {
            _dispatcher = dispatcher;
            _useVideo   = useVideo;

            if (_useVideo)
            {
                _vpxDecoder = new VpxEncoder();
                _vpxDecoder.InitDecoder();

                _imageConverter = new ImageConvert();
            }
        }
        public TestPatternVideoSource(string testPatternSource, int framesPerSecond)
        {
            _testPatternPath = testPatternSource;
            _framesPerSecond = (framesPerSecond > 0 && framesPerSecond <= DEFAULT_FRAMES_PER_SECOND) ? framesPerSecond : DEFAULT_FRAMES_PER_SECOND;
            _samplePeriod    = 1000 / framesPerSecond;

            if (!String.IsNullOrEmpty(testPatternSource) && !File.Exists(testPatternSource))
            {
                logger.LogWarning($"Requested test pattern file could not be found {testPatternSource}.");
            }

            if (_testPatternPath == null)
            {
                if (!File.Exists(FALLBACK_TEST_PATTERN_IMAGE_PATH))
                {
                    throw new ApplicationException($"The fallback test pattern image file could not be found {FALLBACK_TEST_PATTERN_IMAGE_PATH}.");
                }
                else
                {
                    _testPatternPath = FALLBACK_TEST_PATTERN_IMAGE_PATH;
                }
            }

            logger.LogDebug($"Loading test pattern from {_testPatternPath}.");

            _testPattern = new Bitmap(_testPatternPath);

            // Get the stride.
            Rectangle rect = new Rectangle(0, 0, _testPattern.Width, _testPattern.Height);

            System.Drawing.Imaging.BitmapData bmpData =
                _testPattern.LockBits(rect, System.Drawing.Imaging.ImageLockMode.ReadWrite,
                                      _testPattern.PixelFormat);

            _width  = (uint)_testPattern.Width;
            _height = (uint)_testPattern.Height;

            // Get the address of the first line.
            _stride = (uint)bmpData.Stride;

            _testPattern.UnlockBits(bmpData);

            // Initialise the video codec and color converter.
            _vpxEncoder = new VpxEncoder();
            _vpxEncoder.InitEncoder(_width, _height, _stride);

            _colorConverter = new ImageConvert();
        }
示例#7
0
        /// <summary>
        /// Video resolution changed event handler.
        /// </summary>
        /// <param name="width">The new video frame width.</param>
        /// <param name="height">The new video frame height.</param>
        /// <param name="stride">The new video frame stride.</param>
        private VpxEncoder InitialiseVpxEncoder(uint width, uint height, uint stride)
        {
            try
            {
                var vpxEncoder = new VpxEncoder();
                vpxEncoder.InitEncoder(width, height, stride);

                logger.LogInformation($"VPX encoder initialised with width {width}, height {height} and stride {stride}.");

                return(vpxEncoder);
            }
            catch (Exception excp)
            {
                logger.LogWarning("Exception InitialiseVpxEncoder. " + excp.Message);
                throw;
            }
        }
示例#8
0
        /// <summary>
        /// Creates a new RTP audio visual session with audio/video capturing and rendering capabilities.
        /// </summary>
        /// <param name="addrFamily">The address family to create the underlying socket on (IPv4 or IPv6).</param>
        /// <param name="audioOptions">Options for the send and receive audio streams on this session.</param>
        /// <param name="videoOptions">Options for the send and receive video streams on this session</param>
        public RtpAVSession(AddressFamily addrFamily, AudioOptions audioOptions, VideoOptions videoOptions)
            : base(addrFamily, false, false, false)
        {
            _audioOpts = audioOptions ?? DefaultAudioOptions;
            _videoOpts = videoOptions ?? DefaultVideoOptions;

            // Initialise the video decoding objects. Even if we are not sourcing video
            // we need to be ready to receive and render.
            _vpxDecoder = new VpxEncoder();
            int res = _vpxDecoder.InitDecoder();

            if (res != 0)
            {
                throw new ApplicationException("VPX decoder initialisation failed.");
            }
            _imgConverter = new ImageConvert();

            if (_audioOpts.AudioSource != AudioSourcesEnum.None)
            {
                var pcmu = new SDPMediaFormat(SDPMediaFormatsEnum.PCMU);

                // RTP event support.
                int            clockRate      = pcmu.GetClockRate();
                SDPMediaFormat rtpEventFormat = new SDPMediaFormat(DTMF_EVENT_PAYLOAD_ID);
                rtpEventFormat.SetFormatAttribute($"{TELEPHONE_EVENT_ATTRIBUTE}/{clockRate}");
                rtpEventFormat.SetFormatParameterAttribute("0-16");

                var audioCapabilities = new List <SDPMediaFormat> {
                    pcmu, rtpEventFormat
                };

                MediaStreamTrack audioTrack = new MediaStreamTrack(null, SDPMediaTypesEnum.audio, false, audioCapabilities);
                addTrack(audioTrack);
            }

            if (_videoOpts.VideoSource != VideoSourcesEnum.None)
            {
                MediaStreamTrack videoTrack = new MediaStreamTrack(null, SDPMediaTypesEnum.video, false, new List <SDPMediaFormat> {
                    new SDPMediaFormat(SDPMediaFormatsEnum.VP8)
                });
                addTrack(videoTrack);
            }

            // Where the magic (for processing received media) happens.
            base.OnRtpPacketReceived += RtpPacketReceived;
        }
示例#9
0
        static void Main(string[] args)
        {
            AddConsoleLogger();

            _vpxEncoder = new VpxEncoder();
            int res = _vpxEncoder.InitDecoder();

            if (res != 0)
            {
                throw new ApplicationException("VPX decoder initialisation failed.");
            }

            _imgConverter = new ImageConvert();

            // Start web socket.
            Console.WriteLine("Starting web socket server...");
            _webSocketServer = new WebSocketServer(IPAddress.Any, WEBSOCKET_PORT, true);
            _webSocketServer.SslConfiguration.ServerCertificate          = new System.Security.Cryptography.X509Certificates.X509Certificate2(LOCALHOST_CERTIFICATE_PATH);
            _webSocketServer.SslConfiguration.CheckCertificateRevocation = false;
            _webSocketServer.AddWebSocketService <SDPExchange>("/", (sdpExchanger) =>
            {
                sdpExchanger.WebSocketOpened   += WebSocketOpened;
                sdpExchanger.OnMessageReceived += WebSocketMessageReceived;
            });
            _webSocketServer.Start();

            Console.WriteLine($"Waiting for browser web socket connection to {_webSocketServer.Address}:{_webSocketServer.Port}...");

            // Open a Window to display the video feed from the WebRTC peer.
            _form          = new Form();
            _form.AutoSize = true;
            _form.BackgroundImageLayout = ImageLayout.Center;
            _picBox = new PictureBox
            {
                Size     = new Size(640, 480),
                Location = new Point(0, 0),
                Visible  = true
            };
            _form.Controls.Add(_picBox);

            Application.EnableVisualStyles();
            Application.Run(_form);
        }
示例#10
0
        /// <summary>
        /// Used when the video source is originating as bitmaps produced locally. For example
        /// the audio scope generates bitmaps in response to an audio signal. The generated bitmaps
        /// then need to be encoded and transmitted to the remote party.
        /// </summary>
        /// <param name="bmp">The locally generated bitmap to transmit to the remote party.</param>
        private void LocalBitmapAvailable(Bitmap bmp)
        {
            if (_vpxEncoder == null)
            {
                _extBmpWidth  = bmp.Width;
                _extBmpHeight = bmp.Height;
                _extBmpStride = (int)VideoUtils.GetStride(bmp);

                _vpxEncoder = new VpxEncoder();
                int res = _vpxEncoder.InitEncoder((uint)bmp.Width, (uint)bmp.Height, (uint)_extBmpStride);
                if (res != 0)
                {
                    throw new ApplicationException("VPX encoder initialisation failed.");
                }
                _imgEncConverter = new ImageConvert();
            }

            var sampleBuffer = VideoUtils.BitmapToRGB24(bmp);

            unsafe
            {
                fixed(byte *p = sampleBuffer)
                {
                    byte[] convertedFrame = null;
                    _imgEncConverter.ConvertRGBtoYUV(p, VideoSubTypesEnum.BGR24, _extBmpWidth, _extBmpHeight, _extBmpStride, VideoSubTypesEnum.I420, ref convertedFrame);

                    fixed(byte *q = convertedFrame)
                    {
                        byte[] encodedBuffer = null;
                        int    encodeResult  = _vpxEncoder.Encode(q, convertedFrame.Length, 1, ref encodedBuffer);

                        if (encodeResult != 0)
                        {
                            throw new ApplicationException("VPX encode of video sample failed.");
                        }

                        base.SendVp8Frame(_rtpVideoTimestampPeriod, (int)SDPMediaFormatsEnum.VP8, encodedBuffer);
                    }
                }
            }

            bmp.Dispose();
        }
        protected virtual void Dispose(bool disposing)
        {
            if (!_isDisposing)
            {
                _isDisposing = true;

                if (disposing)
                {
                    _testPattern.Dispose();
                }

                lock (_vpxEncoder)
                {
                    // Prevent the encoder being disposed of if it's in the middle of a sample.
                    _vpxEncoder.Dispose();
                    _vpxEncoder = null;
                }

                _colorConverter = null;
            }
        }
示例#12
0
        private async Task ExpireConnections()
        {
            try
            {
                logger.LogDebug("Starting expire connections thread.");

                byte[] encodedBuffer = null;

                if (File.Exists(_expiredImagePath))
                {
                    Bitmap expiredImage = expiredImage = new Bitmap(_expiredImagePath);

                    // Get the stride.
                    Rectangle rect = new Rectangle(0, 0, expiredImage.Width, expiredImage.Height);
                    System.Drawing.Imaging.BitmapData bmpData =
                        expiredImage.LockBits(rect, System.Drawing.Imaging.ImageLockMode.ReadWrite,
                                              expiredImage.PixelFormat);

                    // Get the address of the first line.
                    int stride = bmpData.Stride;

                    expiredImage.UnlockBits(bmpData);

                    // Initialise the video codec and color converter.
                    SIPSorceryMedia.VpxEncoder vpxEncoder = new VpxEncoder();
                    vpxEncoder.InitEncoder((uint)expiredImage.Width, (uint)expiredImage.Height, (uint)stride);

                    SIPSorceryMedia.ImageConvert colorConverter = new ImageConvert();

                    byte[] sampleBuffer = null;


                    sampleBuffer = BitmapToRGB24(expiredImage);

                    unsafe
                    {
                        fixed(byte *p = sampleBuffer)
                        {
                            byte[] convertedFrame = null;
                            colorConverter.ConvertRGBtoYUV(p, VideoSubTypesEnum.BGR24, expiredImage.Width, expiredImage.Height, stride, VideoSubTypesEnum.I420, ref convertedFrame);

                            fixed(byte *q = convertedFrame)
                            {
                                int encodeResult = vpxEncoder.Encode(q, convertedFrame.Length, 1, ref encodedBuffer);

                                if (encodeResult != 0)
                                {
                                    logger.LogWarning("VPX encode of expired image failed.");
                                }
                            }
                        }
                    }

                    expiredImage.Dispose();
                    vpxEncoder.Dispose();
                }

                while (!_exit)
                {
                    foreach (var conn in _webRtcConnections.Where(x => DateTime.Now.Subtract(x.Value.CreatedAt).TotalSeconds > _connectionTimeLimitSeconds).Select(x => x.Value))
                    {
                        OnMp4MediaSampleReady    -= conn.SendMedia;
                        OnTestPatternSampleReady -= conn.SendMedia;

                        if (conn.WebRtcSession.IsDtlsNegotiationComplete && !conn.WebRtcSession.IsClosed && encodedBuffer != null)
                        {
                            // Send the expired frame 3 times as a crude attempt to cope with packet loss.
                            conn.SendMedia(SDPMediaTypesEnum.video, conn.LastVideoTimeStamp + VP8_TIMESTAMP_SPACING, encodedBuffer);
                            await Task.Delay(1);

                            conn.SendMedia(SDPMediaTypesEnum.video, conn.LastVideoTimeStamp + VP8_TIMESTAMP_SPACING, encodedBuffer);
                            await Task.Delay(1);

                            conn.SendMedia(SDPMediaTypesEnum.video, conn.LastVideoTimeStamp + VP8_TIMESTAMP_SPACING, encodedBuffer);
                        }

                        conn.WebRtcSession.Close("expired");
                    }

                    await Task.Delay(1000);
                }
            }
            catch (Exception excp)
            {
                logger.LogError("Exception ExpireConnections. " + excp);
            }
        }
示例#13
0
        private async void SampleTestPattern()
        {
            try
            {
                logger.LogDebug("Starting test pattern sampling thread.");

                _isTestPatternSampling = true;

                Bitmap testPattern = new Bitmap(_testPatternImagePath);

                // Get the stride.
                Rectangle rect = new Rectangle(0, 0, testPattern.Width, testPattern.Height);
                System.Drawing.Imaging.BitmapData bmpData =
                    testPattern.LockBits(rect, System.Drawing.Imaging.ImageLockMode.ReadWrite,
                                         testPattern.PixelFormat);

                // Get the address of the first line.
                int stride = bmpData.Stride;

                testPattern.UnlockBits(bmpData);

                // Initialise the video codec and color converter.
                SIPSorceryMedia.VpxEncoder vpxEncoder = new VpxEncoder();
                vpxEncoder.InitEncoder((uint)testPattern.Width, (uint)testPattern.Height, (uint)stride);

                SIPSorceryMedia.ImageConvert colorConverter = new ImageConvert();

                byte[] sampleBuffer  = null;
                byte[] encodedBuffer = null;
                int    sampleCount   = 0;
                uint   rtpTimestamp  = 0;

                while (!_exit)
                {
                    if (OnTestPatternSampleReady == null)
                    {
                        logger.LogDebug("No active clients, test pattern sampling paused.");
                        break;
                    }
                    else
                    {
                        var stampedTestPattern = testPattern.Clone() as System.Drawing.Image;
                        AddTimeStampAndLocation(stampedTestPattern, DateTime.UtcNow.ToString("dd MMM yyyy HH:mm:ss:fff"), "Test Pattern");
                        sampleBuffer = BitmapToRGB24(stampedTestPattern as System.Drawing.Bitmap);

                        unsafe
                        {
                            fixed(byte *p = sampleBuffer)
                            {
                                byte[] convertedFrame = null;
                                colorConverter.ConvertRGBtoYUV(p, VideoSubTypesEnum.BGR24, testPattern.Width, testPattern.Height, stride, VideoSubTypesEnum.I420, ref convertedFrame);

                                fixed(byte *q = convertedFrame)
                                {
                                    int encodeResult = vpxEncoder.Encode(q, convertedFrame.Length, 1, ref encodedBuffer);

                                    if (encodeResult != 0)
                                    {
                                        logger.LogWarning("VPX encode of video sample failed.");
                                        continue;
                                    }
                                }
                            }

                            stampedTestPattern.Dispose();

                            OnTestPatternSampleReady?.Invoke(SDPMediaTypesEnum.video, rtpTimestamp, encodedBuffer);

                            sampleCount++;
                            rtpTimestamp += VP8_TIMESTAMP_SPACING;
                        }

                        await Task.Delay(30);
                    }
                }

                testPattern.Dispose();
                vpxEncoder.Dispose();
            }
            catch (Exception excp)
            {
                logger.LogError("Exception SampleTestPattern. " + excp);
            }
            finally
            {
                logger.LogDebug("test pattern sampling thread stopped.");
                _isTestPatternSampling = false;
            }
        }
示例#14
0
        /// <summary>
        /// Starts the Media Foundation sampling.
        /// </summary>
        unsafe private void SampleMp4Media()
        {
            try
            {
                logger.LogDebug("Starting mp4 media sampling thread.");

                _isMp4Sampling = true;

                VpxEncoder vpxEncoder     = null;
                uint       vp8Timestamp   = 0;
                uint       mulawTimestamp = 0;

                while (!_exit)
                {
                    if (OnMp4MediaSampleReady == null)
                    {
                        logger.LogDebug("No active clients, media sampling paused.");
                        break;
                    }
                    else
                    {
                        byte[] sampleBuffer = null;
                        var    sample       = _mediaSource.GetSample(ref sampleBuffer);

                        if (sample != null && sample.HasVideoSample)
                        {
                            if (vpxEncoder == null ||
                                (vpxEncoder.GetWidth() != sample.Width || vpxEncoder.GetHeight() != sample.Height || vpxEncoder.GetStride() != sample.Stride))
                            {
                                if (vpxEncoder != null)
                                {
                                    vpxEncoder.Dispose();
                                }

                                vpxEncoder = InitialiseVpxEncoder((uint)sample.Width, (uint)sample.Height, (uint)sample.Stride);
                            }

                            byte[] vpxEncodedBuffer = null;

                            unsafe
                            {
                                fixed(byte *p = sampleBuffer)
                                {
                                    int encodeResult = vpxEncoder.Encode(p, sampleBuffer.Length, 1, ref vpxEncodedBuffer);

                                    if (encodeResult != 0)
                                    {
                                        logger.LogWarning("VPX encode of video sample failed.");
                                    }
                                }
                            }

                            OnMp4MediaSampleReady?.Invoke(SDPMediaTypesEnum.video, vp8Timestamp, vpxEncodedBuffer);

                            //Console.WriteLine($"Video SeqNum {videoSeqNum}, timestamp {videoTimestamp}, buffer length {vpxEncodedBuffer.Length}, frame count {sampleProps.FrameCount}.");

                            vp8Timestamp += VP8_TIMESTAMP_SPACING;
                        }
                        else if (sample != null && sample.HasAudioSample)
                        {
                            uint sampleDuration = (uint)(sampleBuffer.Length / 2);

                            byte[] mulawSample = new byte[sampleDuration];
                            int    sampleIndex = 0;

                            for (int index = 0; index < sampleBuffer.Length; index += 2)
                            {
                                var ulawByte = MuLawEncoder.LinearToMuLawSample(BitConverter.ToInt16(sampleBuffer, index));
                                mulawSample[sampleIndex++] = ulawByte;
                            }

                            OnMp4MediaSampleReady?.Invoke(SDPMediaTypesEnum.audio, mulawTimestamp, mulawSample);

                            //Console.WriteLine($"Audio SeqNum {audioSeqNum}, timestamp {audioTimestamp}, buffer length {mulawSample.Length}.");

                            mulawTimestamp += sampleDuration;
                        }
                    }
                }

                vpxEncoder.Dispose();
            }
            catch (Exception excp)
            {
                logger.LogWarning("Exception SampleMp4Media. " + excp.Message);
            }
            finally
            {
                logger.LogDebug("mp4 sampling thread stopped.");
                _isMp4Sampling = false;
            }
        }
示例#15
0
        private void SampleWebCam(MFVideoSampler videoSampler, VideoMode videoMode, CancellationTokenSource cts)
        {
            try
            {
                Thread.CurrentThread.Name = "vidsampler_" + videoMode.DeviceIndex + "_" + videoMode.Width + "_" + videoMode.Height;

                var vpxEncoder = new VpxEncoder();
                // TODO: The last parameter passed to the vpx encoder init needs to be the frame stride not the width.
                vpxEncoder.InitEncoder(Convert.ToUInt32(videoMode.Width), Convert.ToUInt32(videoMode.Height), Convert.ToUInt32(videoMode.Width));

                // var videoSampler = new MFVideoSampler();
                //videoSampler.Init(videoMode.DeviceIndex, videoMode.Width, videoMode.Height);
                // videoSampler.InitFromFile();

                while (!_stop && !cts.IsCancellationRequested)
                {
                    byte[] videoSample = null;
                    var    sample      = videoSampler.GetSample(ref videoSample);

                    //if (result == NAudio.MediaFoundation.MediaFoundationErrors.MF_E_HW_MFT_FAILED_START_STREAMING)
                    //{
                    //    logger.Warn("A sample could not be acquired from the local webcam. Check that it is not already in use.");
                    //    OnLocalVideoError("A sample could not be acquired from the local webcam. Check that it is not already in use.");
                    //    break;
                    //}
                    //else if (result != 0)
                    //{
                    //    logger.Warn("A sample could not be acquired from the local webcam. Check that it is not already in use. Error code: " + result);
                    //    OnLocalVideoError("A sample could not be acquired from the local webcam. Check that it is not already in use. Error code: " + result);
                    //    break;
                    //}
                    //else
                    if (sample?.HasVideoSample == true)
                    {
                        // This event sends the raw bitmap to the WPF UI.
                        OnLocalVideoSampleReady?.Invoke(videoSample, videoSampler.Width, videoSampler.Height);

                        // This event encodes the sample and forwards it to the RTP manager for network transmission.
                        if (OnLocalVideoEncodedSampleReady != null)
                        {
                            IntPtr rawSamplePtr = Marshal.AllocHGlobal(videoSample.Length);
                            Marshal.Copy(videoSample, 0, rawSamplePtr, videoSample.Length);

                            byte[] yuv = null;

                            unsafe
                            {
                                // TODO: using width instead of stride.
                                _imageConverter.ConvertRGBtoYUV((byte *)rawSamplePtr, VideoSubTypesEnum.RGB24, Convert.ToInt32(videoMode.Width), Convert.ToInt32(videoMode.Height), Convert.ToInt32(videoMode.Width), VideoSubTypesEnum.I420, ref yuv);
                                //_imageConverter.ConvertToI420((byte*)rawSamplePtr, VideoSubTypesEnum.RGB24, Convert.ToInt32(videoMode.Width), Convert.ToInt32(videoMode.Height), ref yuv);
                            }

                            Marshal.FreeHGlobal(rawSamplePtr);

                            IntPtr yuvPtr = Marshal.AllocHGlobal(yuv.Length);
                            Marshal.Copy(yuv, 0, yuvPtr, yuv.Length);

                            byte[] encodedBuffer = null;

                            unsafe
                            {
                                vpxEncoder.Encode((byte *)yuvPtr, yuv.Length, _encodingSample++, ref encodedBuffer);
                            }

                            Marshal.FreeHGlobal(yuvPtr);

                            OnLocalVideoEncodedSampleReady(encodedBuffer);
                        }
                    }
                }

                videoSampler.Stop();
                vpxEncoder.Dispose();
            }
            catch (Exception excp)
            {
                logger.LogError($"Exception SampleWebCam. {excp.Message}");
            }
        }
示例#16
0
        private static void SendTestPattern()
        {
            try
            {
                unsafe
                {
                    Bitmap testPattern = new Bitmap(TEST_PATTERN_IMAGE_PATH);

                    // Get the stride.
                    Rectangle rect = new Rectangle(0, 0, testPattern.Width, testPattern.Height);
                    System.Drawing.Imaging.BitmapData bmpData =
                        testPattern.LockBits(rect, System.Drawing.Imaging.ImageLockMode.ReadWrite,
                                             testPattern.PixelFormat);

                    // Get the address of the first line.
                    int stride = bmpData.Stride;

                    testPattern.UnlockBits(bmpData);

                    // Initialise the video codec and color converter.
                    SIPSorceryMedia.VpxEncoder vpxEncoder = new VpxEncoder();
                    vpxEncoder.InitEncoder((uint)testPattern.Width, (uint)testPattern.Height, (uint)stride);

                    SIPSorceryMedia.ImageConvert colorConverter = new ImageConvert();

                    byte[] sampleBuffer  = null;
                    byte[] encodedBuffer = null;
                    int    sampleCount   = 0;
                    uint   rtpTimestamp  = 0;

                    while (!_exit)
                    {
                        if (OnTestPatternSampleReady != null)
                        {
                            var stampedTestPattern = testPattern.Clone() as System.Drawing.Image;
                            AddTimeStampAndLocation(stampedTestPattern, DateTime.UtcNow.ToString("dd MMM yyyy HH:mm:ss:fff"), "Test Pattern");
                            sampleBuffer = BitmapToRGB24(stampedTestPattern as System.Drawing.Bitmap);

                            fixed(byte *p = sampleBuffer)
                            {
                                byte[] convertedFrame = null;
                                colorConverter.ConvertRGBtoYUV(p, VideoSubTypesEnum.BGR24, testPattern.Width, testPattern.Height, stride, VideoSubTypesEnum.I420, ref convertedFrame);

                                fixed(byte *q = convertedFrame)
                                {
                                    int encodeResult = vpxEncoder.Encode(q, convertedFrame.Length, 1, ref encodedBuffer);

                                    if (encodeResult != 0)
                                    {
                                        logger.LogWarning("VPX encode of video sample failed.");
                                        continue;
                                    }
                                }
                            }

                            stampedTestPattern.Dispose();
                            stampedTestPattern = null;

                            OnTestPatternSampleReady(rtpTimestamp, encodedBuffer);

                            encodedBuffer = null;

                            sampleCount++;
                            rtpTimestamp += VP8_TIMESTAMP_SPACING;
                        }

                        Thread.Sleep(30);
                    }
                }
            }
            catch (Exception excp)
            {
                logger.LogError("Exception SendTestPattern. " + excp);
            }
        }
示例#17
0
        /// <summary>
        /// Creates a new RTP audio visual session with audio/video capturing and rendering capabilities.
        /// </summary>
        /// <param name="addrFamily">The address family to create the underlying socket on (IPv4 or IPv6).</param>
        /// <param name="audioOptions">Options for the send and receive audio streams on this session.</param>
        /// <param name="videoOptions">Options for the send and receive video streams on this session</param>
        /// <param name="bindAddress">Optional. If specified this address will be used as the bind address for any RTP
        /// and control sockets created. Generally this address does not need to be set. The default behaviour
        /// is to bind to [::] or 0.0.0.0, depending on system support, which minimises network routing
        /// causing connection issues.</param>
        /// <param name="disableExternalAudioSource">If true then no attempt will be made to use an external audio
        /// source, e.g. microphone.</param>
        public RtpAVSession(AudioOptions audioOptions, VideoOptions videoOptions, IPAddress bindAddress = null, bool disableExternalAudioSource = false)
            : base(false, false, false, bindAddress)
        {
            _audioOpts = audioOptions ?? DefaultAudioOptions;
            _videoOpts = videoOptions ?? DefaultVideoOptions;
            _disableExternalAudioSource = disableExternalAudioSource;

            if (_audioOpts != null && _audioOpts.AudioCodecs != null &&
                _audioOpts.AudioCodecs.Any(x => !(x == SDPMediaFormatsEnum.PCMU || x == SDPMediaFormatsEnum.PCMA || x == SDPMediaFormatsEnum.G722)))
            {
                throw new ApplicationException("Only PCMA, PCMU and G722 are supported for audio codec options.");
            }

            // Initialise the video decoding objects. Even if we are not sourcing video
            // we need to be ready to receive and render.
            _vpxDecoder = new VpxEncoder();
            int res = _vpxDecoder.InitDecoder();

            if (res != 0)
            {
                throw new ApplicationException("VPX decoder initialisation failed.");
            }
            _imgConverter = new ImageConvert();

            if (_audioOpts.AudioSource != AudioSourcesEnum.None)
            {
                var pcmu = new SDPMediaFormat(SDPMediaFormatsEnum.PCMU);

                //// RTP event support.
                //int clockRate = pcmu.GetClockRate();
                //SDPMediaFormat rtpEventFormat = new SDPMediaFormat(DTMF_EVENT_PAYLOAD_ID);
                //rtpEventFormat.SetFormatAttribute($"{SDP.TELEPHONE_EVENT_ATTRIBUTE}/{clockRate}");
                //rtpEventFormat.SetFormatParameterAttribute("0-16");

                var audioCapabilities = new List <SDPMediaFormat>();
                if (_audioOpts.AudioCodecs == null || _audioOpts.AudioCodecs.Count == 0)
                {
                    audioCapabilities.Add(pcmu);
                }
                else
                {
                    foreach (var codec in _audioOpts.AudioCodecs)
                    {
                        audioCapabilities.Add(new SDPMediaFormat(codec));
                    }
                }
                //audioCapabilities.Add(rtpEventFormat);

                if (audioCapabilities.Any(x => x.FormatCodec == SDPMediaFormatsEnum.G722))
                {
                    _g722Encode      = new G722Codec();
                    _g722EncodeState = new G722CodecState(64000, G722Flags.None);
                    _g722Decode      = new G722Codec();
                    _g722DecodeState = new G722CodecState(64000, G722Flags.None);
                }

                MediaStreamTrack audioTrack = new MediaStreamTrack(SDPMediaTypesEnum.audio, false, audioCapabilities);
                addTrack(audioTrack);
            }

            if (_videoOpts.VideoSource != VideoSourcesEnum.None)
            {
                MediaStreamTrack videoTrack = new MediaStreamTrack(SDPMediaTypesEnum.video, false, new List <SDPMediaFormat> {
                    new SDPMediaFormat(SDPMediaFormatsEnum.VP8)
                });
                addTrack(videoTrack);
            }

            // Where the magic (for processing received media) happens.
            base.OnRtpPacketReceived += RtpPacketReceived;
        }