public static byte[] ToI420(int width, int height, byte[] sample, VideoPixelFormatsEnum pixelFormat)
        {
            byte[] i420Buffer = null;

            switch (pixelFormat)
            {
            case VideoPixelFormatsEnum.I420:
                // No conversion needed.
                i420Buffer = sample;
                break;

            case VideoPixelFormatsEnum.Bgra:
                i420Buffer = PixelConverter.RGBAtoI420(sample, width, height);
                break;

            case VideoPixelFormatsEnum.Bgr:
                i420Buffer = PixelConverter.BGRtoI420(sample, width, height);
                break;

            case VideoPixelFormatsEnum.Rgb:
                i420Buffer = PixelConverter.RGBtoI420(sample, width, height);
                break;

            default:
                throw new ApplicationException($"Pixel format {pixelFormat} does not have an I420 conversion implemented.");
            }

            return(i420Buffer);
        }
        /// <summary>
        /// Copies data from an RGB buffer to a software bitmap.
        /// </summary>
        /// <param name="rgb24Buffer">The RGB buffer to copy from.</param>
        /// <param name="sbmp">The software bitmap to copy the data to.</param>
        private void SetBitmapData(byte[] buffer, SoftwareBitmap sbmp, VideoPixelFormatsEnum pixelFormat)
        {
            using (BitmapBuffer bmpBuffer = sbmp.LockBuffer(BitmapBufferAccessMode.Write))
            {
                using (var reference = bmpBuffer.CreateReference())
                {
                    unsafe
                    {
                        byte *dataInBytes;
                        uint  capacity;
                        ((IMemoryBufferByteAccess)reference).GetBuffer(out dataInBytes, out capacity);
                        int posn = 0;

                        // Fill-in the RGBA plane
                        BitmapPlaneDescription bufferLayout = bmpBuffer.GetPlaneDescription(0);
                        for (int i = 0; i < bufferLayout.Height; i++)
                        {
                            for (int j = 0; j < bufferLayout.Width; j++)
                            {
                                // NOTE: Same as for System.Drawing.Bitmap pixel formats that have "rgb" in their name, such as
                                // BitmapPixelFormat.Rgba8, use a buffer format of BGR. Many issues on StackOverflow regarding this,
                                // e.g. https://stackoverflow.com/questions/5106505/converting-gdi-pixelformat-to-wpf-pixelformat.
                                // Notice the switch of the Blue and Red pixels below.
                                if (pixelFormat == VideoPixelFormatsEnum.Rgb)
                                {
                                    dataInBytes[bufferLayout.StartIndex + bufferLayout.Stride * i + 4 * j + 0] = buffer[posn++];
                                    dataInBytes[bufferLayout.StartIndex + bufferLayout.Stride * i + 4 * j + 1] = buffer[posn++];
                                    dataInBytes[bufferLayout.StartIndex + bufferLayout.Stride * i + 4 * j + 2] = buffer[posn++];
                                    dataInBytes[bufferLayout.StartIndex + bufferLayout.Stride * i + 4 * j + 3] = (byte)255;
                                }
                                else if (pixelFormat == VideoPixelFormatsEnum.Bgr)
                                {
                                    dataInBytes[bufferLayout.StartIndex + bufferLayout.Stride * i + 4 * j + 2] = buffer[posn++];
                                    dataInBytes[bufferLayout.StartIndex + bufferLayout.Stride * i + 4 * j + 1] = buffer[posn++];
                                    dataInBytes[bufferLayout.StartIndex + bufferLayout.Stride * i + 4 * j + 0] = buffer[posn++];
                                    dataInBytes[bufferLayout.StartIndex + bufferLayout.Stride * i + 4 * j + 3] = (byte)255;
                                }
                                //if (pixelFormat == VideoPixelFormatsEnum.Rgba)
                                //{
                                //    dataInBytes[bufferLayout.StartIndex + bufferLayout.Stride * i + 4 * j + 0] = buffer[posn++];
                                //    dataInBytes[bufferLayout.StartIndex + bufferLayout.Stride * i + 4 * j + 1] = buffer[posn++];
                                //    dataInBytes[bufferLayout.StartIndex + bufferLayout.Stride * i + 4 * j + 2] = buffer[posn++];
                                //    dataInBytes[bufferLayout.StartIndex + bufferLayout.Stride * i + 4 * j + 3] = buffer[posn++];
                                //}
                                else if (pixelFormat == VideoPixelFormatsEnum.Bgra)
                                {
                                    dataInBytes[bufferLayout.StartIndex + bufferLayout.Stride * i + 4 * j + 2] = buffer[posn++];
                                    dataInBytes[bufferLayout.StartIndex + bufferLayout.Stride * i + 4 * j + 1] = buffer[posn++];
                                    dataInBytes[bufferLayout.StartIndex + bufferLayout.Stride * i + 4 * j + 0] = buffer[posn++];
                                    dataInBytes[bufferLayout.StartIndex + bufferLayout.Stride * i + 4 * j + 3] = buffer[posn++];
                                }
                            }
                        }
                    }
                }
            }
        }
Ejemplo n.º 3
0
    private void UpdateTexture(byte[] bmp, uint width, uint height, int stride, VideoPixelFormatsEnum pixelFormat)
    {
        //Buffer.BlockCopy(bmp, 0, _textureBytes, 0, bmp.Length);

        int posn = 0;

        for (int i = bmp.Length - 1; i > 0; i -= 3)
        {
            _textureBytes[posn++] = bmp[i];
            _textureBytes[posn++] = bmp[i - 1];
            _textureBytes[posn++] = bmp[i - 2];
            _textureBytes[posn++] = 255;
        }

        //SetDummyTexture(width, height);

        _newFrameReady = true;
    }
        public static byte[] ToI420(int width, int height, byte[] sample, VideoPixelFormatsEnum pixelFormat)
        {
            switch (pixelFormat)
            {
            case VideoPixelFormatsEnum.I420:
                return(sample);

            case VideoPixelFormatsEnum.Bgra:
                return(PixelConverter.RGBAtoI420(sample, width, height, width * 4));

            case VideoPixelFormatsEnum.Bgr:
                return(PixelConverter.BGRtoI420(sample, width, height, width * 3));

            case VideoPixelFormatsEnum.Rgb:
                return(PixelConverter.RGBtoI420(sample, width, height, width * 3));

            default:
                throw new ApplicationException($"Pixel format {pixelFormat} does not have an I420 conversion implemented.");
            }
        }
Ejemplo n.º 5
0
        public byte[] EncodeVideo(int width, int height, byte[] sample, VideoPixelFormatsEnum pixelFormat, VideoCodecsEnum codec)
        {
            //lock (_encoderLock)
            //{
            //    if (_vp8Encoder == null)
            //    {
            //        _vp8Encoder = new Vp8Codec();
            //        _vp8Encoder.InitialiseEncoder((uint)width, (uint)height);
            //    }

            //    var i420Buffer = PixelConverter.ToI420(width, height, sample, pixelFormat);
            //    var encodedBuffer = _vp8Encoder.Encode(i420Buffer, _forceKeyFrame);

            //    if (_forceKeyFrame)
            //    {
            //        _forceKeyFrame = false;
            //    }

            //    return encodedBuffer;
            //}

            throw new NotImplementedException("TODO: The encoder has not yet been ported.");
        }
Ejemplo n.º 6
0
        public byte[] EncodeVideo(int width, int height, byte[] sample, VideoPixelFormatsEnum pixelFormat, VideoCodecsEnum codec)
        {
            //lock (_encoderLock)
            //{
            //    if (_vp8Encoder == null)
            //    {
            //        _vp8Encoder = new Vp8Codec();
            //        _vp8Encoder.InitialiseEncoder((uint)width, (uint)height);
            //    }

            //    var i420Buffer = PixelConverter.ToI420(width, height, sample, pixelFormat);
            //    var encodedBuffer = _vp8Encoder.Encode(i420Buffer, _forceKeyFrame);

            //    if (_forceKeyFrame)
            //    {
            //        _forceKeyFrame = false;
            //    }

            //    return encodedBuffer;
            //}

            return(null);
        }
Ejemplo n.º 7
0
        private static void MesasureTestPatternSourceFrameRate(uint durationMilliseconds, int width, int height, byte[] sample, VideoPixelFormatsEnum pixelFormat)
        {
            if (_startTime == DateTime.MinValue)
            {
                _startTime = DateTime.Now;
            }

            _frameCount++;

            if (DateTime.Now.Subtract(_startTime).TotalSeconds > 5)
            {
                double fps = _frameCount / DateTime.Now.Subtract(_startTime).TotalSeconds;
                Console.WriteLine($"Frame rate {fps:0.##}fps.");
                _startTime  = DateTime.Now;
                _frameCount = 0;
            }
        }
Ejemplo n.º 8
0
 public void ExternalVideoSourceRawSample(uint durationMilliseconds, int width, int height, byte[] sample, VideoPixelFormatsEnum pixlFormat) =>
 throw new NotImplementedException("The test pattern video source does not offer any encoding services for external sources.");
Ejemplo n.º 9
0
        /// <summary>
        /// Called when the active SIP client has a bitmap representing the remote video stream
        /// ready.
        /// </summary>
        /// <param name="sample">The bitmap sample in pixel format BGR24.</param>
        /// <param name="width">The bitmap width.</param>
        /// <param name="height">The bitmap height.</param>
        /// <param name="stride">The bitmap stride.</param>
        private void VideoSampleReady(byte[] sample, uint width, uint height, int stride, VideoPixelFormatsEnum pixelFormat, WriteableBitmap wBmp, System.Windows.Controls.Image dst)
        {
            if (sample != null && sample.Length > 0)
            {
                this.Dispatcher.BeginInvoke(new Action(() =>
                {
                    var bmpPixelFormat = PixelFormats.Bgr24;
                    switch (pixelFormat)
                    {
                    case VideoPixelFormatsEnum.Bgr:
                        bmpPixelFormat = PixelFormats.Bgr24;
                        break;

                    case VideoPixelFormatsEnum.Bgra:
                        bmpPixelFormat = PixelFormats.Bgra32;
                        break;

                    case VideoPixelFormatsEnum.Rgb:
                        bmpPixelFormat = PixelFormats.Rgb24;
                        break;

                    default:
                        bmpPixelFormat = PixelFormats.Bgr24;
                        break;
                    }

                    if (wBmp == null || wBmp.Width != width || wBmp.Height != height)
                    {
                        wBmp = new WriteableBitmap(
                            (int)width,
                            (int)height,
                            96,
                            96,
                            bmpPixelFormat,
                            null);

                        dst.Source = wBmp;
                    }

                    // Reserve the back buffer for updates.
                    wBmp.Lock();

                    Marshal.Copy(sample, 0, wBmp.BackBuffer, sample.Length);

                    // Specify the area of the bitmap that changed.
                    wBmp.AddDirtyRect(new Int32Rect(0, 0, (int)width, (int)height));

                    // Release the back buffer and make it available for display.
                    wBmp.Unlock();
                }), System.Windows.Threading.DispatcherPriority.Normal);
            }
        }
        public void ExternalVideoSourceRawSample(uint durationMilliseconds, int width, int height, byte[] sample, VideoPixelFormatsEnum pixelFormat)
        {
            if (!_isClosed)
            {
                if (OnVideoSourceEncodedSample != null)
                {
                    var encodedBuffer = _vp8Codec.EncodeVideo(width, height, sample, pixelFormat, VideoCodecsEnum.VP8);

                    if (encodedBuffer != null)
                    {
                        uint fps           = (durationMilliseconds > 0) ? 1000 / durationMilliseconds : DEFAULT_FRAMES_PER_SECOND;
                        uint durationRtpTS = VIDEO_SAMPLING_RATE / fps;
                        OnVideoSourceEncodedSample.Invoke(durationRtpTS, encodedBuffer);
                    }
                }
            }
        }
        public void ExternalVideoSourceRawSample(uint durationMilliseconds, int width, int height, byte[] sample, VideoPixelFormatsEnum pixelFormat)
        {
            if (!_isClosed)
            {
                if (_vp8Encoder == null)
                {
                    _vp8Encoder = new Vp8Codec();
                    _vp8Encoder.InitialiseEncoder((uint)width, (uint)height);
                }

                if (_encodeBmp == null)
                {
                    _encodeBmp = new SoftwareBitmap(BitmapPixelFormat.Rgba8, width, height);
                }

                if (OnVideoSourceEncodedSample != null)
                {
                    //byte[] i420Buffer = PixelConverter.RGBtoI420(rgb24Sample, width, height);
                    //byte[] encodedBuffer = _vp8Encoder.Encode(i420Buffer, _forceKeyFrame);

                    SetBitmapData(sample, _encodeBmp, pixelFormat);

                    var    nv12bmp    = SoftwareBitmap.Convert(_encodeBmp, BitmapPixelFormat.Nv12);
                    byte[] nv12Buffer = null;

                    using (BitmapBuffer buffer = nv12bmp.LockBuffer(BitmapBufferAccessMode.Read))
                    {
                        using (var reference = buffer.CreateReference())
                        {
                            unsafe
                            {
                                byte *dataInBytes;
                                uint  capacity;
                                ((IMemoryBufferByteAccess)reference).GetBuffer(out dataInBytes, out capacity);

                                nv12Buffer = new byte[capacity];
                                Marshal.Copy((IntPtr)dataInBytes, nv12Buffer, 0, (int)capacity);
                            }
                        }
                    }

                    byte[] encodedBuffer = _vp8Encoder.Encode(nv12Buffer, _forceKeyFrame);

                    if (encodedBuffer != null)
                    {
                        //Console.WriteLine($"encoded buffer: {encodedBuffer.HexStr()}");
                        uint fps           = (durationMilliseconds > 0) ? 1000 / durationMilliseconds : DEFAULT_FRAMES_PER_SECOND;
                        uint durationRtpTS = VIDEO_SAMPLING_RATE / fps;
                        OnVideoSourceEncodedSample.Invoke(durationRtpTS, encodedBuffer);
                    }

                    if (_forceKeyFrame)
                    {
                        _forceKeyFrame = false;
                    }
                }
            }
        }
Ejemplo n.º 12
0
        public unsafe IEnumerable <VideoSample> DecodeVideo(byte[] frame, VideoPixelFormatsEnum pixelFormat, VideoCodecsEnum codec)
        {
            lock (_decoderLock)
            {
                if (_vp8Decoder == null)
                {
                    _vp8Decoder = new vpx_codec_ctx_t();
                    vpx_codec_iface_t   algo = vp8_dx.vpx_codec_vp8_dx();
                    vpx_codec_dec_cfg_t cfg  = new vpx_codec_dec_cfg_t {
                        threads = 1
                    };
                    vpx_codec_err_t res = vpx_decoder.vpx_codec_dec_init(_vp8Decoder, algo, cfg, 0);
                }

                //logger.LogDebug($"Attempting to decode {frame.Length} bytes.");
                //Console.WriteLine(frame.HexStr());

                fixed(byte *pFrame = frame)
                {
                    var result = vpx_decoder.vpx_codec_decode(_vp8Decoder, pFrame, (uint)frame.Length, IntPtr.Zero, 0);

                    //logger.LogDebug($"VP8 decode result {result}.");
                    if (result != vpx_codec_err_t.VPX_CODEC_OK)
                    {
                        logger.LogWarning($"VP8 decode of video sample failed with {result}.");
                    }
                }

                IntPtr iter = IntPtr.Zero;
                var    img  = vpx_decoder.vpx_codec_get_frame(_vp8Decoder, iter);

                if (img == null)
                {
                    logger.LogWarning("Image could not be acquired from VP8 decoder stage.");
                }
                else
                {
                    int dwidth  = (int)img.d_w;
                    int dheight = (int)img.d_h;
                    int sz      = dwidth * dheight;

                    var yPlane = img.planes[0];
                    var uPlane = img.planes[1];
                    var vPlane = img.planes[2];

                    byte[] decodedBuffer = new byte[dwidth * dheight * 3 / 2];

                    for (uint row = 0; row < dheight; row++)
                    {
                        Marshal.Copy((IntPtr)(yPlane + row * img.stride[0]), decodedBuffer, (int)(row * dwidth), (int)dwidth);

                        if (row < dheight / 2)
                        {
                            Marshal.Copy((IntPtr)(uPlane + row * img.stride[1]), decodedBuffer, (int)(sz + row * (dwidth / 2)), (int)dwidth / 2);
                            Marshal.Copy((IntPtr)(vPlane + row * img.stride[2]), decodedBuffer, (int)(sz + sz / 4 + row * (dwidth / 2)), (int)dwidth / 2);
                        }
                    }

                    byte[] rgb = PixelConverter.I420toBGR(decodedBuffer, dwidth, dheight, out _);
                    return(new List <VideoSample> {
                        new VideoSample {
                            Width = img.d_w, Height = img.d_h, Sample = rgb
                        }
                    });
                }

                return(new List <VideoSample>());
            }
        }
Ejemplo n.º 13
0
        public void ExternalVideoSourceRawSample(uint durationMilliseconds, int width, int height, byte[] sample, VideoPixelFormatsEnum pixelFormat)
        {
            //if (!_isClosed)
            //{
            //    if (OnVideoSourceEncodedSample != null)
            //    {
            //        uint fps = (durationMilliseconds > 0) ? 1000 / durationMilliseconds : Helper.DEFAULT_VIDEO_FRAME_RATE;
            //        if(fps == 0)
            //        {
            //            fps = 1;
            //        }

            //        int stride = (pixelFormat == VideoPixelFormatsEnum.Bgra) ? 4 * width : 3 * width;
            //        var i420Buffer = PixelConverter.ToI420(width, height, stride, sample, pixelFormat);
            //        byte[]? encodedBuffer = _ffmpegEncoder. Encode(FFmpegConvert.GetAVCodecID(_videoFormatManager.SelectedFormat.Codec), i420Buffer, width, height, (int)fps, _forceKeyFrame);

            //        if (encodedBuffer != null)
            //        {
            //            //Console.WriteLine($"encoded buffer: {encodedBuffer.HexStr()}");
            //            uint durationRtpTS = Helper.VIDEO_SAMPLING_RATE / fps;

            //            // Note the event handler can be removed while the encoding is in progress.
            //            OnVideoSourceEncodedSample?.Invoke(durationRtpTS, encodedBuffer);
            //        }

            //        if (_forceKeyFrame)
            //        {
            //            _forceKeyFrame = false;
            //        }
            //    }
            //}
        }
 public void ExternalVideoSourceRawSample(uint durationMilliseconds, int width, int height, byte[] sample, VideoPixelFormatsEnum pixelFormat) =>
 throw new ApplicationException("The Windows Video End Point does not support external samples. Use the video end point from SIPSorceryMedia.Encoders.");
 public void ExternalVideoSourceRawSample(uint durationMilliseconds, int width, int height, byte[] sample, VideoPixelFormatsEnum pixelFormat) => throw new NotImplementedException();