Example #1
0
        public RtpVideoFramer(VideoCodecsEnum codec)
        {
            if (codec != VideoCodecsEnum.VP8)
            {
                throw new NotSupportedException("The RTP video framer currently only understands VP8 encoded frames.");
            }

            _codec = codec;
        }
        public void SetVideoSinkFormat(VideoCodecsEnum videoFormat)
        {
            if (videoFormat != VideoCodecsEnum.VP8)
            {
                throw new ApplicationException($"The Windows Video Sink End Point does not support video codec {videoFormat}.");
            }

            _selectedSinkFormat = videoFormat;
        }
Example #3
0
        public RtpVideoFramer(VideoCodecsEnum codec)
        {
            if (!(codec == VideoCodecsEnum.VP8 || codec == VideoCodecsEnum.H264))
            {
                throw new NotSupportedException("The RTP video framer currently only understands H264 and VP8 encoded frames.");
            }

            _codec = codec;

            if (_codec == VideoCodecsEnum.H264)
            {
                _h264Depacketiser = new H264Depacketiser();
            }
        }
Example #4
0
        public static AVCodecID GetAVCodecID(VideoCodecsEnum videoCodec)
        {
            var avCodecID = AVCodecID.AV_CODEC_ID_VP8;

            switch (videoCodec)
            {
            case VideoCodecsEnum.VP8:
                avCodecID = AVCodecID.AV_CODEC_ID_VP8;
                break;

            case VideoCodecsEnum.H264:
                avCodecID = AVCodecID.AV_CODEC_ID_H264;
                break;

            default:
                throw new ApplicationException($"FFmpeg video source, selected video codec {videoCodec} is not supported.");
            }

            return(avCodecID);
        }
 /// <summary>
 /// Creates a new video format based on a well known codec.
 /// </summary>
 public VideoFormat(VideoCodecsEnum codec, int formatID, int clockRate = DEFAULT_CLOCK_RATE, string parameters = null)
     : this(formatID, codec.ToString(), clockRate, parameters)
 {
 }
Example #6
0
        static void Main(string[] args)
        {
            VideoCodecsEnum VideoCodec = VideoCodecsEnum.H264;
            IVideoSource    videoSource;

            // Initialise FFmpeg librairies
            FFmpegInit.Initialise(FfmpegLogLevelEnum.AV_LOG_FATAL, LIB_PATH);

            // Get cameras and monitors
            List <Camera>  cameras  = FFmpegCameraManager.GetCameraDevices();
            List <Monitor> monitors = FFmpegMonitorManager.GetMonitorDevices();

            char keyChar = ' ';

            while (true)
            {
                Console.Clear();
                if (!(cameras?.Count > 0))
                {
                    Console.WriteLine("\nNo Camera found ...");
                }
                if (!(monitors?.Count > 0))
                {
                    Console.WriteLine("\nNo Monitor found ...");
                }

                Console.WriteLine("\nWhat do you want to use ?");
                if (cameras?.Count > 0)
                {
                    Console.Write("\n [c] - Camera ");
                }
                if (monitors?.Count > 0)
                {
                    Console.Write("\n [m] - Monitor ");
                }
                Console.Write($"\n [f] - File - Path:[{VIDEO_FILE_PATH}]");

                Console.WriteLine("\n");
                Console.Out.Flush();

                var keyConsole = Console.ReadKey();
                if (((keyConsole.KeyChar == 'c') && (cameras?.Count > 0)) ||
                    ((keyConsole.KeyChar == 'm') && (monitors?.Count > 0)) ||
                    (keyConsole.KeyChar == 'f'))
                {
                    keyChar = keyConsole.KeyChar;
                    break;
                }
            }
            // Do we manage a camera ?
            if (keyChar == 'c')
            {
                int cameraIndex = 0;
                if (cameras?.Count > 1)
                {
                    while (true)
                    {
                        Console.Clear();
                        Console.WriteLine("\nWhich camera do you want to use:");
                        int index = 0;
                        foreach (Camera camera in cameras)
                        {
                            Console.Write($"\n [{index}] - {camera.Name} ");
                            index++;
                        }
                        Console.WriteLine("\n");
                        Console.Out.Flush();

                        var keyConsole = Console.ReadKey();
                        if (int.TryParse("" + keyConsole.KeyChar, out int keyValue) && keyValue < index && keyValue >= 0)
                        {
                            cameraIndex = keyValue;
                            break;
                        }
                    }
                }

                var selectedCamera = cameras[cameraIndex];
                SIPSorceryMedia.FFmpeg.FFmpegCameraSource cameraSource = new SIPSorceryMedia.FFmpeg.FFmpegCameraSource(selectedCamera.Path);
                videoSource = cameraSource as IVideoSource;
            }
            // Do we manage a Monitor ?
            else if (keyChar == 'm')
            {
                int monitorIndex = 0;
                if (monitors?.Count > 1)
                {
                    while (true)
                    {
                        Console.Clear();
                        Console.WriteLine("\nWhich Monitor do you want to use:");
                        int index = 0;
                        foreach (Monitor monitor in monitors)
                        {
                            Console.Write($"\n [{index}] - {monitor.Name} {(monitor.Primary ? " PRIMARY" : "")}");
                            index++;
                        }
                        Console.WriteLine("\n");
                        Console.Out.Flush();

                        var keyConsole = Console.ReadKey();
                        if (int.TryParse("" + keyConsole.KeyChar, out int keyValue) && keyValue < index && keyValue >= 0)
                        {
                            monitorIndex = keyValue;
                            break;
                        }
                    }
                }

                var selectedMonitor = monitors[monitorIndex];
                SIPSorceryMedia.FFmpeg.FFmpegScreenSource screenSource = new SIPSorceryMedia.FFmpeg.FFmpegScreenSource(selectedMonitor.Path, selectedMonitor.Rect, 20);
                videoSource = screenSource as IVideoSource;
            }
            // Do we manage a File ?
            else
            {
                SIPSorceryMedia.FFmpeg.FFmpegFileSource fileSource = new SIPSorceryMedia.FFmpeg.FFmpegFileSource(VIDEO_FILE_PATH, false, null, true);
                videoSource = fileSource as IVideoSource;
            }

            // Create object used to display video in Ascii
            asciiFrame = new AsciiFrame();

            videoSource.RestrictFormats(x => x.Codec == VideoCodec);
            videoSource.SetVideoSourceFormat(videoSource.GetVideoSourceFormats().Find(x => x.Codec == VideoCodec));
            videoSource.OnVideoSourceRawSampleFaster += FileSource_OnVideoSourceRawSampleFaster;
            videoSource.StartVideo();

            for (var loop = true; loop;)
            {
                var cki = Console.ReadKey(true);
                switch (cki.Key)
                {
                case ConsoleKey.Q:
                case ConsoleKey.Enter:
                case ConsoleKey.Escape:
                    Console.CursorVisible = true;
                    loop = false;
                    break;
                }
            }
        }
Example #7
0
 /// <summary>
 /// This source can only supply raw RGB bitmap samples.
 /// </summary>
 public void SetVideoSourceFormat(VideoCodecsEnum videoFormat)
 {
     throw new NotImplementedException();
 }
 public void SetVideoSourceFormat(VideoCodecsEnum videoFormat) =>
 throw new NotImplementedException("This source can only supply raw RGB bitmap samples.");
 public void SetVideoSinkFormat(VideoCodecsEnum videoFormat)
 {
     _selectedSinkFormat = videoFormat;
 }
Example #10
0
        public unsafe IEnumerable <VideoSample> DecodeVideo(byte[] frame, VideoPixelFormatsEnum pixelFormat, VideoCodecsEnum codec)
        {
            lock (_decoderLock)
            {
                if (_vp8Decoder == null)
                {
                    _vp8Decoder = new vpx_codec_ctx_t();
                    vpx_codec_iface_t   algo = vp8_dx.vpx_codec_vp8_dx();
                    vpx_codec_dec_cfg_t cfg  = new vpx_codec_dec_cfg_t {
                        threads = 1
                    };
                    vpx_codec_err_t res = vpx_decoder.vpx_codec_dec_init(_vp8Decoder, algo, cfg, 0);
                }

                //logger.LogDebug($"Attempting to decode {frame.Length} bytes.");
                //Console.WriteLine(frame.HexStr());

                fixed(byte *pFrame = frame)
                {
                    var result = vpx_decoder.vpx_codec_decode(_vp8Decoder, pFrame, (uint)frame.Length, IntPtr.Zero, 0);

                    //logger.LogDebug($"VP8 decode result {result}.");
                    if (result != vpx_codec_err_t.VPX_CODEC_OK)
                    {
                        logger.LogWarning($"VP8 decode of video sample failed with {result}.");
                    }
                }

                IntPtr iter = IntPtr.Zero;
                var    img  = vpx_decoder.vpx_codec_get_frame(_vp8Decoder, iter);

                if (img == null)
                {
                    logger.LogWarning("Image could not be acquired from VP8 decoder stage.");
                }
                else
                {
                    int dwidth  = (int)img.d_w;
                    int dheight = (int)img.d_h;
                    int sz      = dwidth * dheight;

                    var yPlane = img.planes[0];
                    var uPlane = img.planes[1];
                    var vPlane = img.planes[2];

                    byte[] decodedBuffer = new byte[dwidth * dheight * 3 / 2];

                    for (uint row = 0; row < dheight; row++)
                    {
                        Marshal.Copy((IntPtr)(yPlane + row * img.stride[0]), decodedBuffer, (int)(row * dwidth), (int)dwidth);

                        if (row < dheight / 2)
                        {
                            Marshal.Copy((IntPtr)(uPlane + row * img.stride[1]), decodedBuffer, (int)(sz + row * (dwidth / 2)), (int)dwidth / 2);
                            Marshal.Copy((IntPtr)(vPlane + row * img.stride[2]), decodedBuffer, (int)(sz + sz / 4 + row * (dwidth / 2)), (int)dwidth / 2);
                        }
                    }

                    byte[] rgb = PixelConverter.I420toBGR(decodedBuffer, dwidth, dheight, out _);
                    return(new List <VideoSample> {
                        new VideoSample {
                            Width = img.d_w, Height = img.d_h, Sample = rgb
                        }
                    });
                }

                return(new List <VideoSample>());
            }
        }
Example #11
0
        public byte[] EncodeVideo(int width, int height, byte[] sample, VideoPixelFormatsEnum pixelFormat, VideoCodecsEnum codec)
        {
            //lock (_encoderLock)
            //{
            //    if (_vp8Encoder == null)
            //    {
            //        _vp8Encoder = new Vp8Codec();
            //        _vp8Encoder.InitialiseEncoder((uint)width, (uint)height);
            //    }

            //    var i420Buffer = PixelConverter.ToI420(width, height, sample, pixelFormat);
            //    var encodedBuffer = _vp8Encoder.Encode(i420Buffer, _forceKeyFrame);

            //    if (_forceKeyFrame)
            //    {
            //        _forceKeyFrame = false;
            //    }

            //    return encodedBuffer;
            //}

            throw new NotImplementedException("TODO: The encoder has not yet been ported.");
        }
Example #12
0
 public bool IsSupported(VideoCodecsEnum codec) => codec == VideoCodecsEnum.VP8;
Example #13
0
        public byte[] EncodeVideo(int width, int height, byte[] sample, VideoPixelFormatsEnum pixelFormat, VideoCodecsEnum codec)
        {
            //lock (_encoderLock)
            //{
            //    if (_vp8Encoder == null)
            //    {
            //        _vp8Encoder = new Vp8Codec();
            //        _vp8Encoder.InitialiseEncoder((uint)width, (uint)height);
            //    }

            //    var i420Buffer = PixelConverter.ToI420(width, height, sample, pixelFormat);
            //    var encodedBuffer = _vp8Encoder.Encode(i420Buffer, _forceKeyFrame);

            //    if (_forceKeyFrame)
            //    {
            //        _forceKeyFrame = false;
            //    }

            //    return encodedBuffer;
            //}

            return(null);
        }