Exemple #1
0
        public async Task RawVideoConvert()
        {
            TestHelper.BeginTest("RawVideoConvert");
            TestHelper.SetConfigurationDefaults();
            TestHelper.CleanDirectory("/home/pi/videos/tests");

            using (var ffCaptureHandler = FFmpegCaptureHandler.RawVideoToAvi("/home/pi/videos/tests", "testing1234"))
                using (var vidEncoder = new MMALVideoEncoder())
                    using (var renderer = new MMALVideoRenderer())
                    {
                        Fixture.MMALCamera.ConfigureCameraSettings();

                        var portConfig = new MMALPortConfig(MMALEncoding.H264, MMALEncoding.I420, 10, 25000000, null);

                        vidEncoder.ConfigureOutputPort(portConfig, ffCaptureHandler);

                        Fixture.MMALCamera.Camera.VideoPort.ConnectTo(vidEncoder);
                        Fixture.MMALCamera.Camera.PreviewPort.ConnectTo(renderer);

                        // Camera warm up time
                        await Task.Delay(2000);

                        var cts = new CancellationTokenSource(TimeSpan.FromMinutes(1));

                        // Take video for 1 minute.
                        await Fixture.MMALCamera.ProcessAsync(Fixture.MMALCamera.Camera.VideoPort, cts.Token);

                        Fixture.CheckAndAssertFilepath("/home/pi/videos/tests/testing1234.avi");
                    }
        }
Exemple #2
0
        public void StartStreamingAsync(string url, string streamName, TimeSpan duration)
        {
            var camera = MMALCamera.Instance;

            AsyncContext.Run(async() =>
            {
                using (var ffCaptureHandler = new FFmpegCaptureHandler($"-i - -vcodec copy -an -f flv {url}{streamName}"))
                    using (var vidEncoder = new MMALVideoEncoder(ffCaptureHandler, new MMAL_RATIONAL_T(25, 1), DateTime.Now.Add(duration)))
                        using (var renderer = new MMALVideoRenderer())
                        {
                            camera.ConfigureCameraSettings();

                            //Create our component pipeline. Here we are using the H.264 standard with a YUV420 pixel format. The video will be taken at 25mb/s at the highest quality setting (10).
                            vidEncoder.ConfigureOutputPort(0, MMALEncoding.H264, MMALEncoding.I420, 0, 25000000);

                            camera.Camera.VideoPort.ConnectTo(vidEncoder);
                            camera.Camera.PreviewPort.ConnectTo(renderer);

                            //Camera warm up time
                            await Task.Delay(2000);

                            //Take video for 3 minutes.
                            await camera.BeginProcessing(camera.Camera.VideoPort);
                        }
            });

            camera.Cleanup();
        }
Exemple #3
0
        public void StartStreaming(string url, string streamName, TimeSpan duration)
        {
            var camera = MMALCamera.Instance;
            var task   = Task.Run(async() =>
            {
                using (var ffCaptureHandler = new FFmpegCaptureHandler($"-i - -vcodec copy -an -f flv {url}{streamName}"))
                    using (var vidEncoder = new MMALVideoEncoder(ffCaptureHandler, new MMAL_RATIONAL_T(25, 1), DateTime.Now.Add(duration)))
                        using (var renderer = new MMALVideoRenderer())
                        {
                            vidEncoder.ConfigureOutputPort(0, MMALEncoding.H264, MMALEncoding.I420, 0, 25000000);

                            camera.Camera.VideoPort.ConnectTo(vidEncoder);
                            camera.Camera.PreviewPort.ConnectTo(renderer);
                            camera.ConfigureCameraSettings();

                            await camera.BeginProcessing(camera.Camera.VideoPort);
                        }
            });

            //TODO: Improve Woopsa error to display AggregateException
            task.Wait();
            camera.Cleanup();
        }
Exemple #4
0
        public async Task TakeVideo(CancellationToken cancellationToken)
        {
            var cam = MMALCamera.Instance;

            Console.WriteLine($"Video path: {_videoSavePath}Millie{DateTime.Now:s}");

            try {
                using (var ffCaptureHandler = FFmpegCaptureHandler.RawVideoToAvi(_videoSavePath, $"Millie{DateTime.Now:s}"))
                    using (var vidEncoder = new MMALVideoEncoder(ffCaptureHandler))
                        using (var renderer = new MMALVideoRenderer())
                        {
                            cam.ConfigureCameraSettings();
                            MMALCameraConfig.Rotation = 270;

                            var portConfig = new MMALPortConfig(MMALEncoding.H264, MMALEncoding.I420, 10, MMALVideoEncoder.MaxBitrateLevel4, null);

                            // Create our component pipeline. Here we are using the H.264 standard with a YUV420 pixel format. The video will be taken at 25Mb/s.
                            vidEncoder.ConfigureOutputPort(portConfig);

                            cam.Camera.VideoPort.ConnectTo(vidEncoder);
                            cam.Camera.PreviewPort.ConnectTo(renderer);

                            // Camera warm up time
                            await Task.Delay(2000, cancellationToken);

                            await cam.ProcessAsync(cam.Camera.VideoPort, cancellationToken);
                        }
            }
            catch (Exception ex) {
                Console.WriteLine(ex);
            }
            finally {
                // Only call when you no longer require the camera, i.e. on app shutdown.
                cam.Cleanup();
            }
        }