Ejemplo n.º 1
0
        public unsafe void Roundtrip_Bitmap_719x405()
        {
            int width  = 719;
            int height = 405;

            using (Bitmap bmp = new Bitmap($"img/testpattern_{width}x{height}.bmp"))
            {
                byte[] bgr = BitmapToBuffer(bmp, out int stride);

                byte[] i420 = PixelConverter.BGRtoI420(bgr, width, height, stride);

                Assert.NotNull(i420);

                byte[] rtBgr = PixelConverter.I420toBGR(i420, width, height, out int rtStride);

                Assert.NotNull(rtBgr);

                fixed(byte *pBgr = rtBgr)
                {
                    Bitmap rtBmp = new Bitmap(width, height, rtStride, System.Drawing.Imaging.PixelFormat.Format24bppRgb, new IntPtr(pBgr));

                    rtBmp.Save($"roundtrip_bitmap_{width}x{height}.bmp");
                    rtBmp.Dispose();
                }
            }
        }
Ejemplo n.º 2
0
        public unsafe void WrongSizeI420ToBGRTest()
        {
            int width  = 720;
            int height = 405;

            byte[] i420 = new byte[width * height * 3 / 2];
            Assert.Throws <ApplicationException>(() => PixelConverter.I420toBGR(i420, width, height, out _));
        }
Ejemplo n.º 3
0
        public unsafe void ConvertOddDimensionI420ToBGRTest()
        {
            int width  = 4;
            int height = 3;

            byte[] i420 = new byte[20];
            byte[] bgr  = PixelConverter.I420toBGR(i420, width, height, out _);

            Assert.NotNull(bgr);
            Assert.Equal(36, bgr.Length);
        }
        public unsafe void ConvertKnownI420ToBGRTest()
        {
            int width  = 640;
            int height = 480;

            byte[] i420 = File.ReadAllBytes("img/ref-i420.yuv");
            byte[] bgr  = PixelConverter.I420toBGR(i420, width, height);

            fixed(byte *s = bgr)
            {
                Bitmap bmp = new Bitmap(width, height, width * 3, PixelFormat.Format24bppRgb, (IntPtr)s);

                bmp.Save("ConvertKnownI420ToBGRTest.bmp");
                bmp.Dispose();
            }
        }
Ejemplo n.º 5
0
        public unsafe void RoundtripBgra32ToI420Test()
        {
            Bitmap bmp = new Bitmap("img/ref-bgra32.bmp");

            Assert.Equal(PixelFormat.Format32bppRgb, bmp.PixelFormat);

            byte[] buffer = BitmapToBuffer(bmp, out int stride);

            byte[] i420 = PixelConverter.RGBAtoI420(buffer, bmp.Width, bmp.Height, stride);
            byte[] bgr  = PixelConverter.I420toBGR(i420, bmp.Width, bmp.Height, out int rtStride);

            fixed(byte *s = bgr)
            {
                Bitmap roundTripBmp = new Bitmap(bmp.Width, bmp.Height, rtStride, PixelFormat.Format24bppRgb, (IntPtr)s);

                roundTripBmp.Save("RoundtripBgra32ToI420Test.bmp");
                roundTripBmp.Dispose();
            }

            bmp.Dispose();
        }
        public unsafe void RoundtripBgr24ToI420Test()
        {
            Bitmap bmp = new Bitmap("img/ref-bgr24.bmp");

            // BGR formats get recognised as RGB but when rendered by WPF are treated as BGR.
            Assert.Equal(PixelFormat.Format24bppRgb, bmp.PixelFormat);

            byte[] buffer = BitmapToBuffer(bmp);

            byte[] i420 = PixelConverter.BGRtoI420(buffer, bmp.Width, bmp.Height);
            byte[] bgr  = PixelConverter.I420toBGR(i420, bmp.Width, bmp.Height);

            fixed(byte *s = bgr)
            {
                Bitmap roundTripBmp = new Bitmap(bmp.Width, bmp.Height, (int)bmp.Width * 3, PixelFormat.Format24bppRgb, (IntPtr)s);

                roundTripBmp.Save("RoundtripBgr24ToI420Test.bmp");
                roundTripBmp.Dispose();
            }

            bmp.Dispose();
        }
Ejemplo n.º 7
0
        static async Task Main(string[] args)
        {
            Console.WriteLine("SIPSorcery Video Phone Command Line Demo");
            Console.WriteLine("Press ctrl-c to exit.");

            Log = AddConsoleLogger();
            ManualResetEvent exitMRE        = new ManualResetEvent(false);
            ManualResetEvent waitForCallMre = new ManualResetEvent(false);

            var parseResult = Parser.Default.ParseArguments <Options>(args);

            _options = (parseResult as Parsed <Options>)?.Value;

            if (parseResult.Tag != ParserResultType.NotParsed)
            {
                if (_options.ListCameras)
                {
                    #region List webcams.

                    var webcams = await WindowsVideoEndPoint.GetVideoCatpureDevices();

                    if (webcams == null || webcams.Count == 0)
                    {
                        Console.WriteLine("No webcams were found.");
                    }
                    else
                    {
                        var index = 0;
                        foreach (var webcam in webcams)
                        {
                            Console.WriteLine($"{index}: \"{webcam.Name}\", use --cam={index}.");
                            index++;
                        }
                    }

                    #endregion
                }
                else if (_options.ListFormats != null)
                {
                    #region List webcam formats.

                    var webcams = await WindowsVideoEndPoint.GetVideoCatpureDevices();

                    if (webcams == null || webcams.Count == 0)
                    {
                        Console.WriteLine("No webcams were found.");
                    }
                    else if (_options.ListFormats >= webcams.Count)
                    {
                        Console.WriteLine($"No webcam available for index {_options.ListFormats}.");
                    }
                    else
                    {
                        string webcamName = webcams[_options.ListFormats.Value].Name;
                        var    formats    = await WindowsVideoEndPoint.GetDeviceFrameFormats(webcamName);

                        Console.WriteLine($"Video frame formats for {webcamName}.");
                        foreach (var vidFmt in formats)
                        {
                            float  vidFps = vidFmt.MediaFrameFormat.FrameRate.Numerator / vidFmt.MediaFrameFormat.FrameRate.Denominator;
                            string pixFmt = vidFmt.MediaFrameFormat.Subtype == WindowsVideoEndPoint.MF_I420_PIXEL_FORMAT ? "I420" : vidFmt.MediaFrameFormat.Subtype;
                            Console.WriteLine($"{vidFmt.Width}x{vidFmt.Height} {vidFps:0.##}fps {pixFmt}");
                        }
                    }

                    #endregion
                }
                else
                {
                    string webcamName = null;

                    if (_options.WebcamIndex != null)
                    {
                        var webcams = await WindowsVideoEndPoint.GetVideoCatpureDevices();

                        if (webcams == null || webcams.Count == 0)
                        {
                            Console.WriteLine("No webcams were found.");
                            Application.Exit();
                        }
                        else if (webcams.Count < _options.WebcamIndex)
                        {
                            Console.WriteLine($"No webcam available for index {_options.WebcamIndex}.");
                            Application.Exit();
                        }
                        else
                        {
                            webcamName = webcams[_options.WebcamIndex.Value].Name;
                            Console.WriteLine($"Using webcam {webcamName}.");
                        }
                    }

                    _sipTransport = new SIPTransport();

                    if (string.IsNullOrEmpty(_options.CallDestination))
                    {
                        // We haven't been asked to place a call so we're listening.
                        IPAddress listenAddress  = (System.Net.Sockets.Socket.OSSupportsIPv6) ? IPAddress.IPv6Any : IPAddress.Any;
                        var       listenEndPoint = new IPEndPoint(listenAddress, SIP_PORT_DEFAULT);

                        try
                        {
                            SIPUDPChannel udpChannel = new SIPUDPChannel(listenEndPoint, true);
                            _sipTransport.AddSIPChannel(udpChannel);
                        }
                        catch (ApplicationException appExcp)
                        {
                            Console.WriteLine($"Failed to create UDP SIP channel on {listenEndPoint}, error {appExcp.Message}.");
                            SIPUDPChannel udpChannel = new SIPUDPChannel(new IPEndPoint(listenAddress, 0), true);
                            _sipTransport.AddSIPChannel(udpChannel);
                        }

                        var listeningEP = _sipTransport.GetSIPChannels().First().ListeningSIPEndPoint;
                        Console.WriteLine($"Listening for incoming call on {listeningEP}.");
                    }

                    EnableTraceLogs(_sipTransport);

                    // Open a window to display the video feed from the remote SIP party.
                    _form          = new Form();
                    _form.Text     = string.IsNullOrEmpty(_options.CallDestination) ? "Listener" : "Caller";
                    _form.AutoSize = true;
                    _form.BackgroundImageLayout = ImageLayout.Center;
                    _localVideoPicBox           = new PictureBox
                    {
                        Size     = new Size(VIDEO_FRAME_WIDTH, VIDEO_FRAME_HEIGHT),
                        Location = new Point(0, 0),
                        Visible  = true
                    };
                    _remoteVideoPicBox = new PictureBox
                    {
                        Size     = new Size(VIDEO_FRAME_WIDTH, VIDEO_FRAME_HEIGHT),
                        Location = new Point(0, VIDEO_FRAME_HEIGHT),
                        Visible  = true
                    };
                    _form.Controls.Add(_localVideoPicBox);
                    _form.Controls.Add(_remoteVideoPicBox);

                    var userAgent = new SIPUserAgent(_sipTransport, null, true);
                    userAgent.OnCallHungup += (dialog) => exitMRE.Set();

                    WindowsAudioEndPoint windowsAudioEndPoint = null;
                    if (!_options.NoAudio)
                    {
                        windowsAudioEndPoint = new WindowsAudioEndPoint(new AudioEncoder());
                        windowsAudioEndPoint.RestrictFormats(x => x.Codec == AudioCodecsEnum.G722);
                    }

                    MediaEndPoints mediaEndPoints = null;

                    if (_options.TestPattern && _options.WebcamIndex == null)
                    {
                        var testPattern = new VideoTestPatternSource(new FFmpegVideoEncoder());
                        var decoderSink = new DecoderVideoSink(new FFmpegVideoEncoder());
                        //var decoderSink = new DecoderVideoSink(new VpxVideoEncoder());

                        testPattern.RestrictFormats(format => format.Codec == VIDEO_CODEC);
                        decoderSink.RestrictFormats(format => format.Codec == VIDEO_CODEC);

                        mediaEndPoints = new MediaEndPoints
                        {
                            AudioSink   = windowsAudioEndPoint,
                            AudioSource = windowsAudioEndPoint,
                            VideoSink   = decoderSink,
                            VideoSource = testPattern,
                        };
                    }
                    else
                    {
                        WindowsVideoEndPoint windowsVideoEndPoint = webcamName switch
                        {
                            null => new WindowsVideoEndPoint(new FFmpegVideoEncoder()),
                            _ => new WindowsVideoEndPoint(new FFmpegVideoEncoder(), webcamName),
                        };
                        windowsVideoEndPoint.RestrictFormats(format => format.Codec == VIDEO_CODEC);

                        mediaEndPoints = new MediaEndPoints
                        {
                            AudioSink   = windowsAudioEndPoint,
                            AudioSource = windowsAudioEndPoint,
                            VideoSink   = windowsVideoEndPoint,
                            VideoSource = windowsVideoEndPoint,
                        };
                    }

                    mediaEndPoints.VideoSource.OnVideoSourceRawSample += (uint durationMilliseconds, int width, int height, byte[] sample, VideoPixelFormatsEnum pixelFormat) =>
                    {
                        if (_isFormActivated)
                        {
                            _form?.BeginInvoke(new Action(() =>
                            {
                                if (_form.Handle != IntPtr.Zero)
                                {
                                    int stride = width * 3;
                                    if (pixelFormat == VideoPixelFormatsEnum.I420)
                                    {
                                        sample = PixelConverter.I420toBGR(sample, width, height, out stride);
                                    }

                                    if (_localVideoPicBox.Width != width || _localVideoPicBox.Height != height)
                                    {
                                        Log.LogDebug($"Adjusting local video display from {_localVideoPicBox.Width}x{_localVideoPicBox.Height} to {width}x{height}.");
                                        _localVideoPicBox.Width  = width;
                                        _localVideoPicBox.Height = height;
                                    }

                                    unsafe
                                    {
                                        fixed(byte *s = sample)
                                        {
                                            System.Drawing.Bitmap bmpImage = new System.Drawing.Bitmap(width, height, stride, System.Drawing.Imaging.PixelFormat.Format24bppRgb, (IntPtr)s);
                                            _localVideoPicBox.Image        = bmpImage;
                                        }
                                    }
                                }
                            }));
                        }
                    };

                    Console.CancelKeyPress += delegate(object sender, ConsoleCancelEventArgs e)
                    {
                        e.Cancel = true;
                        Log.LogInformation("Exiting...");
                        waitForCallMre.Set();
                        exitMRE.Set();
                    };

                    if (string.IsNullOrEmpty(_options.CallDestination))
                    {
                        ActivateForm();

                        userAgent.OnIncomingCall += async(ua, req) =>
                        {
                            var voipMediaSession = new VoIPMediaSession(mediaEndPoints);
                            voipMediaSession.AcceptRtpFromAny = true;
                            if (voipMediaSession.VideoLocalTrack != null)
                            {
                                voipMediaSession.VideoLocalTrack.MaximumBandwidth = MAXIMUM_VIDEO_BANDWIDTH;
                            }

                            var uas = userAgent.AcceptCall(req);
                            await userAgent.Answer(uas, voipMediaSession);

                            Console.WriteLine("Starting local video source...");
                            await mediaEndPoints.VideoSource.StartVideo().ConfigureAwait(false);

                            waitForCallMre.Set();
                        };

                        Console.WriteLine("Waiting for incoming call...");
                        waitForCallMre.WaitOne();
                    }
                    else
                    {
                        var voipMediaSession = new VoIPMediaSession(mediaEndPoints);
                        voipMediaSession.AcceptRtpFromAny = true;
                        if (voipMediaSession.VideoLocalTrack != null)
                        {
                            voipMediaSession.VideoLocalTrack.MaximumBandwidth = MAXIMUM_VIDEO_BANDWIDTH;
                        }

                        ActivateForm();

                        Console.WriteLine("Starting local video source...");
                        await mediaEndPoints.VideoSource.StartVideo().ConfigureAwait(false);

                        // Place the call and wait for the result.
                        Task <bool> callTask = userAgent.Call(_options.CallDestination, null, null, voipMediaSession);
                        callTask.Wait(CALL_TIMEOUT_SECONDS * 1000);
                    }

                    if (userAgent.IsCallActive)
                    {
                        Log.LogInformation("Call attempt successful.");
                        mediaEndPoints.VideoSink.OnVideoSinkDecodedSample += (byte[] bmp, uint width, uint height, int stride, VideoPixelFormatsEnum pixelFormat) =>
                        {
                            if (_isFormActivated)
                            {
                                _form?.BeginInvoke(new Action(() =>
                                {
                                    if (_form.Handle != IntPtr.Zero)
                                    {
                                        unsafe
                                        {
                                            if (_remoteVideoPicBox.Width != (int)width || _remoteVideoPicBox.Height != (int)height)
                                            {
                                                Log.LogDebug($"Adjusting remote video display from {_remoteVideoPicBox.Width}x{_remoteVideoPicBox.Height} to {width}x{height}.");
                                                _remoteVideoPicBox.Width  = (int)width;
                                                _remoteVideoPicBox.Height = (int)height;
                                            }

                                            fixed(byte *s = bmp)
                                            {
                                                System.Drawing.Bitmap bmpImage = new System.Drawing.Bitmap((int)width, (int)height, stride, System.Drawing.Imaging.PixelFormat.Format24bppRgb, (IntPtr)s);
                                                _remoteVideoPicBox.Image       = bmpImage;
                                            }
                                        }
                                    }
                                }));
                            }
                        };
                    }
                    else
                    {
                        Log.LogWarning("Call attempt failed.");
                        Console.WriteLine("Press ctrl-c to exit.");
                    }

                    exitMRE.WaitOne();

                    if (userAgent.IsCallActive)
                    {
                        Log.LogInformation("Hanging up.");
                        userAgent.Hangup();
                    }

                    Task.Delay(1000).Wait();

                    // Clean up.
                    if (_form.Handle != IntPtr.Zero)
                    {
                        _form.BeginInvoke(new Action(() => _form.Close()));
                    }
                    _sipTransport.Shutdown();
                }
            }
        }
Ejemplo n.º 8
0
        public unsafe IEnumerable <VideoSample> DecodeVideo(byte[] frame, VideoPixelFormatsEnum pixelFormat, VideoCodecsEnum codec)
        {
            lock (_decoderLock)
            {
                if (_vp8Decoder == null)
                {
                    _vp8Decoder = new vpx_codec_ctx_t();
                    vpx_codec_iface_t   algo = vp8_dx.vpx_codec_vp8_dx();
                    vpx_codec_dec_cfg_t cfg  = new vpx_codec_dec_cfg_t {
                        threads = 1
                    };
                    vpx_codec_err_t res = vpx_decoder.vpx_codec_dec_init(_vp8Decoder, algo, cfg, 0);
                }

                //logger.LogDebug($"Attempting to decode {frame.Length} bytes.");
                //Console.WriteLine(frame.HexStr());

                fixed(byte *pFrame = frame)
                {
                    var result = vpx_decoder.vpx_codec_decode(_vp8Decoder, pFrame, (uint)frame.Length, IntPtr.Zero, 0);

                    //logger.LogDebug($"VP8 decode result {result}.");
                    if (result != vpx_codec_err_t.VPX_CODEC_OK)
                    {
                        logger.LogWarning($"VP8 decode of video sample failed with {result}.");
                    }
                }

                IntPtr iter = IntPtr.Zero;
                var    img  = vpx_decoder.vpx_codec_get_frame(_vp8Decoder, iter);

                if (img == null)
                {
                    logger.LogWarning("Image could not be acquired from VP8 decoder stage.");
                }
                else
                {
                    int dwidth  = (int)img.d_w;
                    int dheight = (int)img.d_h;
                    int sz      = dwidth * dheight;

                    var yPlane = img.planes[0];
                    var uPlane = img.planes[1];
                    var vPlane = img.planes[2];

                    byte[] decodedBuffer = new byte[dwidth * dheight * 3 / 2];

                    for (uint row = 0; row < dheight; row++)
                    {
                        Marshal.Copy((IntPtr)(yPlane + row * img.stride[0]), decodedBuffer, (int)(row * dwidth), (int)dwidth);

                        if (row < dheight / 2)
                        {
                            Marshal.Copy((IntPtr)(uPlane + row * img.stride[1]), decodedBuffer, (int)(sz + row * (dwidth / 2)), (int)dwidth / 2);
                            Marshal.Copy((IntPtr)(vPlane + row * img.stride[2]), decodedBuffer, (int)(sz + sz / 4 + row * (dwidth / 2)), (int)dwidth / 2);
                        }
                    }

                    byte[] rgb = PixelConverter.I420toBGR(decodedBuffer, dwidth, dheight, out _);
                    return(new List <VideoSample> {
                        new VideoSample {
                            Width = img.d_w, Height = img.d_h, Sample = rgb
                        }
                    });
                }

                return(new List <VideoSample>());
            }
        }
Ejemplo n.º 9
0
        /// <summary>
        /// Consumers subscribing to the <seealso cref="OnVideoSourceRawSample"/> will most likely want bitmap samples.
        /// This method takes the I420 buffer for the test patten frame, converts it to BGR and fire the event.
        /// </summary>
        /// <param name="i420Buffer">The I420 buffer representing the test pattern.</param>
        private void GenerateRawSample(int width, int height, byte[] i420Buffer)
        {
            var bgr = PixelConverter.I420toBGR(i420Buffer, width, height, out _);

            OnVideoSourceRawSample?.Invoke((uint)_frameSpacing, width, height, bgr, VideoPixelFormatsEnum.Bgr);
        }