Example #1
0
        public async Task TakeVideoAsync(string filePath)
        {
            try
            {
                // Singleton initialized lazily. Reference once in your application.
                MMALCamera cam = this.MMALSharpCameraInstance;

                // using (var vidCaptureHandler = new VideoStreamCaptureHandler("/home/pi/videos/", "avi"))
                using (var vidCaptureHandler = new VideoStreamCaptureHandler(filePath))
                {
                    var cts   = new CancellationTokenSource(TimeSpan.FromSeconds(30));
                    var split = new Split
                    {
                        Mode  = TimelapseMode.Second,
                        Value = 15,
                    };
                    await cam.TakeVideo(vidCaptureHandler, cts.Token, split);
                }

                // Cleanup disposes all unmanaged resources and unloads Broadcom library. To be called when no more processing is to be done
                // on the camera.
//                cam.Cleanup();
                Console.WriteLine($"Wrote video to: {filePath}");
            }
            catch (Exception ex)
            {
                Console.WriteLine($"{nameof(MMALSharpCamera)} {nameof(TakeVideoAsync)} Failed");
                Console.WriteLine($"{nameof(MMALSharpCamera)} {nameof(TakeVideoAsync)} {ex.ToString()}");
                Console.WriteLine($"{nameof(MMALSharpCamera)} {nameof(TakeVideoAsync)} Failed");
            }
        }
        public void TakeVideoSplit(string extension, MMALEncoding encodingType, MMALEncoding pixelFormat)
        {
            TestHelper.SetConfigurationDefaults();

            MMALCameraConfig.InlineHeaders = true;

            AsyncContext.Run(async() =>
            {
                var vidCaptureHandler = new VideoStreamCaptureHandler("/home/pi/videos/tests/split_test", extension);

                TestHelper.CleanDirectory("/home/pi/videos/tests/split_test");

                using (var vidEncoder = new MMALVideoEncoder(vidCaptureHandler, new MMAL_RATIONAL_T(25, 1),
                                                             DateTime.Now.AddSeconds(30), new Split {
                    Mode = TimelapseMode.Second, Value = 15
                }))
                {
                    vidEncoder.ConfigureOutputPort(0, encodingType, pixelFormat, 10, 25000000);

                    //Create our component pipeline.
                    fixture.MMALCamera.Camera.VideoPort
                    .ConnectTo(vidEncoder);
                    fixture.MMALCamera.Camera.PreviewPort
                    .ConnectTo(new MMALVideoRenderer());

                    fixture.MMALCamera.ConfigureCameraSettings();

                    //2 files should be created from this test.
                    await fixture.MMALCamera.BeginProcessing(fixture.MMALCamera.Camera.VideoPort, vidEncoder);

                    Assert.True(Directory.GetFiles("/home/pi/videos/tests/split_test").Length == 2);
                }
            });
        }
Example #3
0
        public async Task TakeVideoAndStoreTimestamps()
        {
            TestHelper.BeginTest("Video - TakeVideoAndStoreTimestamps");
            TestHelper.SetConfigurationDefaults();
            TestHelper.CleanDirectory("/home/pi/videos/tests");

            using (var vidCaptureHandler = new VideoStreamCaptureHandler("/home/pi/videos/tests", "h264", true))
                using (var preview = new MMALVideoRenderer())
                    using (var vidEncoder = new MMALVideoEncoder())
                    {
                        Fixture.MMALCamera.ConfigureCameraSettings();

                        var portConfig = new MMALPortConfig(MMALEncoding.H264, MMALEncoding.I420, bitrate: MMALVideoEncoder.MaxBitrateLevel4);

                        vidEncoder.ConfigureOutputPort(portConfig, vidCaptureHandler);

                        // Create our component pipeline.
                        Fixture.MMALCamera.Camera.VideoPort
                        .ConnectTo(vidEncoder);
                        Fixture.MMALCamera.Camera.PreviewPort
                        .ConnectTo(preview);

                        // Camera warm up time
                        await Task.Delay(2000);

                        CancellationTokenSource cts = new CancellationTokenSource(TimeSpan.FromSeconds(15));

                        // Record video for 15 seconds
                        await Fixture.MMALCamera.ProcessAsync(Fixture.MMALCamera.Camera.VideoPort, cts.Token);

                        Fixture.CheckAndAssertFilepath(vidCaptureHandler.GetFilepath());
                        Fixture.CheckAndAssertFilepath($"{vidCaptureHandler.Directory}/{vidCaptureHandler.CurrentFilename}.pts");
                    }
        }
Example #4
0
        public async Task TakeVideo(string extension, MMALEncoding encodingType, MMALEncoding pixelFormat)
        {
            TestHelper.BeginTest("TakeVideo", encodingType.EncodingName, pixelFormat.EncodingName);
            TestHelper.SetConfigurationDefaults();
            TestHelper.CleanDirectory("/home/pi/videos/tests");

            using (var vidCaptureHandler = new VideoStreamCaptureHandler("/home/pi/videos/tests", extension))
                using (var preview = new MMALVideoRenderer())
                    using (var vidEncoder = new MMALVideoEncoder(vidCaptureHandler))
                    {
                        Fixture.MMALCamera.ConfigureCameraSettings();

                        var portConfig = new MMALPortConfig(encodingType, pixelFormat, 25, 10, 25000000, null);

                        vidEncoder.ConfigureOutputPort(portConfig);

                        // Create our component pipeline.
                        Fixture.MMALCamera.Camera.VideoPort
                        .ConnectTo(vidEncoder);
                        Fixture.MMALCamera.Camera.PreviewPort
                        .ConnectTo(preview);

                        // Camera warm up time
                        await Task.Delay(2000);

                        CancellationTokenSource cts = new CancellationTokenSource(TimeSpan.FromSeconds(15));

                        // Record video for 20 seconds
                        await Fixture.MMALCamera.ProcessAsync(Fixture.MMALCamera.Camera.VideoPort, cts.Token);

                        Fixture.CheckAndAssertFilepath(vidCaptureHandler.GetFilepath());
                    }
        }
Example #5
0
        public static void TakeVideoManualMode()
        {
            MMALCamera cam = MMALCamera.Instance;

            AsyncContext.Run(async() =>
            {
                //using (var vidCaptureHandler = new VideoStreamCaptureHandler("/home/pi/videos/", "mjpeg"))
                using (var vidCaptureHandler = new VideoStreamCaptureHandler("/home/pi/videos/", "avi"))
                    using (var vidEncoder = new MMALVideoEncoder(vidCaptureHandler))
                        //using (var ffCaptureHandler = FFmpegCaptureHandler.RawVideoToAvi("/home/pi/videos/", "testing1234"))
                        //using (var vidEncoder = new MMALVideoEncoder(ffCaptureHandler))
                        using (var renderer = new MMALVideoRenderer())
                        {
                            cam.ConfigureCameraSettings();

                            // Create our component pipeline. Here we are using the H.264 standard with a YUV420 pixel format. The video will be taken at 25Mb/s.
                            vidEncoder.ConfigureOutputPort(0, MMALEncoding.MJPEG, MMALEncoding.I420, 90, 25000000);

                            cam.Camera.VideoPort.ConnectTo(vidEncoder);
                            cam.Camera.PreviewPort.ConnectTo(renderer);

                            // Camera warm up time
                            await Task.Delay(2000);

                            var cts = new CancellationTokenSource(TimeSpan.FromSeconds(10));

                            // Take video for 3 minutes.
                            await cam.ProcessAsync(cam.Camera.VideoPort, cts.Token);
                        }
            });

            cam.Cleanup();
        }
Example #6
0
        /// <summary>
        /// Self-contained method for recording H.264 video for a specified amount of time. Records at 30fps, 25Mb/s at the highest quality.
        /// </summary>
        /// <param name="handler">The video capture handler to apply to the encoder.</param>
        /// <param name="cancellationToken">A cancellationToken to signal when to stop video capture.</param>
        /// <param name="split">Used for Segmented video mode.</param>
        /// <returns>The awaitable Task.</returns>
        public async Task TakeVideo(VideoStreamCaptureHandler handler, CancellationToken cancellationToken, Split split = null)
        {
            if (split != null && !MMALCameraConfig.InlineHeaders)
            {
                MMALLog.Logger.Warn("Inline headers not enabled. Split mode not supported when this is disabled.");
                split = null;
            }

            using (var vidEncoder = new MMALVideoEncoder(handler, null, split))
                using (var renderer = new MMALVideoRenderer())
                {
                    this.ConfigureCameraSettings();

                    vidEncoder.ConfigureOutputPort(MMALEncoding.H264, MMALEncoding.I420, 0, MMALVideoEncoder.MaxBitrateLevel4);

                    // Create our component pipeline.
                    this.Camera.VideoPort.ConnectTo(vidEncoder);
                    this.Camera.PreviewPort.ConnectTo(renderer);

                    MMALLog.Logger.Info($"Preparing to take video. Resolution: {vidEncoder.Width} x {vidEncoder.Height}. " +
                                        $"Encoder: {vidEncoder.Outputs[0].EncodingType.EncodingName}. Pixel Format: {vidEncoder.Outputs[0].PixelFormat.EncodingName}.");

                    // Camera warm up time
                    await Task.Delay(2000);

                    await this.ProcessAsync(this.Camera.VideoPort, cancellationToken);
                }
        }
Example #7
0
        public async Task AnnotateVideoRefreshSeconds()
        {
            TestHelper.BeginTest("Video - AnnotateVideo");
            TestHelper.SetConfigurationDefaults();
            TestHelper.CleanDirectory("/home/pi/videos/tests");

            MMALCameraConfig.Annotate             = new AnnotateImage();
            MMALCameraConfig.Annotate.RefreshRate = DateTimeTextRefreshRate.Seconds;
            MMALCameraConfig.Annotate.TimeFormat  = "HH:mm:ss";

            using (var vidCaptureHandler = new VideoStreamCaptureHandler("/home/pi/videos/tests", "h264"))
                using (var vidEncoder = new MMALVideoEncoder())
                    using (var renderer = new MMALVideoRenderer())
                    {
                        Fixture.MMALCamera.ConfigureCameraSettings();

                        var portConfig = new MMALPortConfig(MMALEncoding.H264, MMALEncoding.I420, quality: 10, bitrate: MMALVideoEncoder.MaxBitrateLevel4);

                        // Create our component pipeline. Here we are using the H.264 standard with a YUV420 pixel format. The video will be taken at 25Mb/s.
                        vidEncoder.ConfigureOutputPort(portConfig, vidCaptureHandler);

                        Fixture.MMALCamera.Camera.VideoPort.ConnectTo(vidEncoder);
                        Fixture.MMALCamera.Camera.PreviewPort.ConnectTo(renderer);

                        // Camera warm up time
                        await Task.Delay(2000);

                        var cts = new CancellationTokenSource(TimeSpan.FromSeconds(30));

                        // Take video for 30 seconds.
                        await Fixture.MMALCamera.ProcessAsync(Fixture.MMALCamera.Camera.VideoPort, cts.Token);

                        Fixture.CheckAndAssertFilepath(vidCaptureHandler.GetFilepath());
                    }
        }
Example #8
0
        public async Task RecordVideoDirectlyFromResizer()
        {
            TestHelper.BeginTest("RecordVideoDirectlyFromResizer");
            TestHelper.SetConfigurationDefaults();
            TestHelper.CleanDirectory("/home/pi/videos/tests");

            using (var vidCaptureHandler = new VideoStreamCaptureHandler("/home/pi/videos/tests", "raw"))
                using (var preview = new MMALVideoRenderer())
                    using (var resizer = new MMALResizerComponent())
                    {
                        Fixture.MMALCamera.ConfigureCameraSettings();

                        // Use the resizer to resize 1080p to 640x480.
                        var portConfig = new MMALPortConfig(MMALEncoding.I420, MMALEncoding.I420, 640, 480, 0, 0, 0, false, null);

                        resizer.ConfigureOutputPort <VideoPort>(0, portConfig, vidCaptureHandler);

                        // Create our component pipeline.
                        Fixture.MMALCamera.Camera.VideoPort
                        .ConnectTo(resizer);
                        Fixture.MMALCamera.Camera.PreviewPort
                        .ConnectTo(preview);

                        // Camera warm up time
                        await Task.Delay(2000);

                        CancellationTokenSource cts = new CancellationTokenSource(TimeSpan.FromSeconds(15));

                        // Record video for 20 seconds
                        await Fixture.MMALCamera.ProcessAsync(Fixture.MMALCamera.Camera.VideoPort, cts.Token);

                        Fixture.CheckAndAssertFilepath(vidCaptureHandler.GetFilepath());
                    }
        }
Example #9
0
        public async Task ChangeEncoderType()
        {
            TestHelper.BeginTest("Video - ChangeEncoderType");
            TestHelper.SetConfigurationDefaults();
            TestHelper.CleanDirectory("/home/pi/videos/tests");

            using (var vidCaptureHandler = new VideoStreamCaptureHandler("/home/pi/videos/tests", "h264"))
                using (var preview = new MMALVideoRenderer())
                    using (var vidEncoder = new MMALVideoEncoder(vidCaptureHandler))
                    {
                        Fixture.MMALCamera.ConfigureCameraSettings();

                        var portConfig = new MMALPortConfig(MMALEncoding.MJPEG, MMALEncoding.I420, 25, 10, 25000000, null);

                        vidEncoder.ConfigureOutputPort(portConfig);

                        // Create our component pipeline.
                        Fixture.MMALCamera.Camera.VideoPort
                        .ConnectTo(vidEncoder);
                        Fixture.MMALCamera.Camera.PreviewPort
                        .ConnectTo(preview);

                        // Camera warm up time
                        await Task.Delay(2000);

                        CancellationTokenSource cts = new CancellationTokenSource(TimeSpan.FromSeconds(20));

                        // Record video for 20 seconds
                        await Fixture.MMALCamera.ProcessAsync(Fixture.MMALCamera.Camera.VideoPort, cts.Token);

                        Fixture.CheckAndAssertFilepath(vidCaptureHandler.GetFilepath());
                    }

            using (var vidCaptureHandler = new VideoStreamCaptureHandler("/home/pi/videos/tests", "mjpeg"))
                using (var preview = new MMALVideoRenderer())
                    using (var vidEncoder = new MMALVideoEncoder(vidCaptureHandler))
                    {
                        Fixture.MMALCamera.ConfigureCameraSettings();

                        var portConfig = new MMALPortConfig(MMALEncoding.MJPEG, MMALEncoding.I420, 25, 10, 25000000, null);

                        vidEncoder.ConfigureOutputPort(portConfig);

                        // Create our component pipeline.
                        Fixture.MMALCamera.Camera.VideoPort
                        .ConnectTo(vidEncoder);
                        Fixture.MMALCamera.Camera.PreviewPort
                        .ConnectTo(preview);

                        CancellationTokenSource cts = new CancellationTokenSource(TimeSpan.FromSeconds(20));

                        // Record video for 20 seconds
                        await Fixture.MMALCamera.ProcessAsync(Fixture.MMALCamera.Camera.VideoPort, cts.Token);

                        Fixture.CheckAndAssertFilepath(vidCaptureHandler.GetFilepath());
                    }
        }
Example #10
0
        public async Task ImageFxComponentFromCameraVideoPortWithSplitterAndEncoder(MMAL_PARAM_IMAGEFX_T effect, bool throwsException)
        {
            TestHelper.BeginTest($"Video - ImageFxComponentFromCameraVideoPortWithSplitterAndEncoder - {effect}");
            TestHelper.SetConfigurationDefaults();
            TestHelper.CleanDirectory("/home/pi/videos/tests");

            using (var vidCaptureHandler = new VideoStreamCaptureHandler("/home/pi/videos/tests", "h264"))
                using (var preview = new MMALNullSinkComponent())
                    using (var imageFx = new MMALImageFxComponent())
                        using (var splitter = new MMALSplitterComponent())
                            using (var vidEncoder = new MMALVideoEncoder())
                            {
                                Fixture.MMALCamera.ConfigureCameraSettings();

                                var vidEncoderConfig = new MMALPortConfig(MMALEncoding.H264, MMALEncoding.I420);
                                var splitterConfig   = new MMALPortConfig(MMALEncoding.I420, MMALEncoding.I420);
                                var fxConfig         = new MMALPortConfig(MMALEncoding.I420, MMALEncoding.I420);

                                imageFx.ConfigureOutputPort <VideoPort>(0, fxConfig, null);

                                splitter.ConfigureInputPort(new MMALPortConfig(MMALEncoding.I420, MMALEncoding.I420), imageFx.Outputs[0], null);
                                splitter.ConfigureOutputPort <VideoPort>(0, splitterConfig, null);

                                vidEncoder.ConfigureInputPort(new MMALPortConfig(MMALEncoding.I420, MMALEncoding.I420), splitter.Outputs[0], null);
                                vidEncoder.ConfigureOutputPort(0, vidEncoderConfig, vidCaptureHandler);

                                if (throwsException)
                                {
                                    Assert.Throws <MMALInvalidException>(() =>
                                    {
                                        imageFx.ImageEffect = effect;
                                    });
                                }
                                else
                                {
                                    imageFx.ImageEffect = effect;
                                }

                                // Create our component pipeline.
                                Fixture.MMALCamera.Camera.VideoPort
                                .ConnectTo(imageFx);
                                Fixture.MMALCamera.Camera.PreviewPort
                                .ConnectTo(preview);

                                imageFx.Outputs[0].ConnectTo(splitter);
                                splitter.Outputs[0].ConnectTo(vidEncoder);

                                // Camera warm up time
                                await Task.Delay(2000);

                                var cts = new CancellationTokenSource(TimeSpan.FromSeconds(5));

                                await Fixture.MMALCamera.ProcessAsync(Fixture.MMALCamera.Camera.VideoPort, cts.Token);

                                Fixture.CheckAndAssertFilepath(vidCaptureHandler.GetFilepath());
                            }
        }
Example #11
0
        static async Task h264(int seconds)
        {
            var cam      = GetConfiguredCamera();
            var pathname = ramdiskPath + "capture.h264";

            using var handler = new VideoStreamCaptureHandler(pathname);
            var timeout = new CancellationTokenSource(TimeSpan.FromSeconds(seconds));

            Console.WriteLine($"Capturing h.264: {pathname}");
            await cam.TakeVideo(handler, timeout.Token);

            cam.Cleanup();
            Console.WriteLine("Exiting.");
        }
Example #12
0
        public async Task RecordVideoDirectlyFromSplitter()
        {
            TestHelper.BeginTest("RecordVideoDirectlyFromSplitter");
            TestHelper.SetConfigurationDefaults();
            TestHelper.CleanDirectory("/home/pi/videos/tests");

            using (var vidCaptureHandler = new VideoStreamCaptureHandler("/home/pi/videos/tests", "raw"))
                using (var vidCaptureHandler2 = new VideoStreamCaptureHandler("/home/pi/videos/tests", "raw"))
                    using (var vidCaptureHandler3 = new VideoStreamCaptureHandler("/home/pi/videos/tests", "raw"))
                        using (var vidCaptureHandler4 = new VideoStreamCaptureHandler("/home/pi/videos/tests", "raw"))
                            using (var preview = new MMALVideoRenderer())
                                using (var splitter = new MMALSplitterComponent())
                                {
                                    Fixture.MMALCamera.ConfigureCameraSettings();

                                    var splitterPortConfig = new MMALPortConfig(MMALEncoding.I420, MMALEncoding.I420, 0, 0, null);

                                    // Create our component pipeline.
                                    splitter.ConfigureInputPort(new MMALPortConfig(MMALEncoding.OPAQUE, MMALEncoding.I420, 0), Fixture.MMALCamera.Camera.VideoPort, null);
                                    splitter.ConfigureOutputPort <VideoPort>(0, splitterPortConfig, vidCaptureHandler);
                                    splitter.ConfigureOutputPort <VideoPort>(1, splitterPortConfig, vidCaptureHandler2);
                                    splitter.ConfigureOutputPort <VideoPort>(2, splitterPortConfig, vidCaptureHandler3);
                                    splitter.ConfigureOutputPort <VideoPort>(3, splitterPortConfig, vidCaptureHandler4);

                                    // Create our component pipeline.
                                    Fixture.MMALCamera.Camera.VideoPort
                                    .ConnectTo(splitter);
                                    Fixture.MMALCamera.Camera.PreviewPort
                                    .ConnectTo(preview);

                                    // Camera warm up time
                                    await Task.Delay(2000);

                                    CancellationTokenSource cts = new CancellationTokenSource(TimeSpan.FromSeconds(15));

                                    // Record video for 20 seconds
                                    await Fixture.MMALCamera.ProcessAsync(Fixture.MMALCamera.Camera.VideoPort, cts.Token);

                                    Fixture.CheckAndAssertFilepath(vidCaptureHandler.GetFilepath());
                                    Fixture.CheckAndAssertFilepath(vidCaptureHandler2.GetFilepath());
                                    Fixture.CheckAndAssertFilepath(vidCaptureHandler3.GetFilepath());
                                    Fixture.CheckAndAssertFilepath(vidCaptureHandler4.GetFilepath());
                                }
        }
Example #13
0
        public static async Task TakeVideoAsync()
        {
            MMALCameraConfig.VideoResolution = new Resolution(640, 480);
            MMALCameraConfig.Flips           = MMAL_PARAM_MIRROR_T.MMAL_PARAM_MIRROR_VERTICAL;
            MMALCameraConfig.InlineHeaders   = true;
            MMALCameraConfig.VideoProfile    = MMAL_VIDEO_PROFILE_T.MMAL_VIDEO_PROFILE_H264_HIGH;
            // Singleton initialized lazily. Reference once in your application.
            MMALCamera cam = MMALCamera.Instance;

            using (var vidCaptureHandler = new VideoStreamCaptureHandler("/home/pi/videos/", "avi"))
            {
                var cts = new CancellationTokenSource(TimeSpan.FromSeconds(10));

                await cam.TakeVideo(vidCaptureHandler, cts.Token);
            }

            // Cleanup disposes all unmanaged resources and unloads Broadcom library. To be called when no more processing is to be done
            // on the camera.
            cam.Cleanup();
        }
Example #14
0
        public async Task TakeVideoSplit()
        {
            TestHelper.BeginTest("TakeVideoSplit");
            TestHelper.SetConfigurationDefaults();
            TestHelper.CleanDirectory("/home/pi/videos/tests/split_test");

            MMALCameraConfig.InlineHeaders = true;

            using (var vidCaptureHandler = new VideoStreamCaptureHandler("/home/pi/videos/tests/split_test", "h264"))
                using (var preview = new MMALVideoRenderer())
                    using (var vidEncoder = new MMALVideoEncoder())
                    {
                        Fixture.MMALCamera.ConfigureCameraSettings();

                        var split = new Split
                        {
                            Mode  = TimelapseMode.Second,
                            Value = 15
                        };

                        var portConfig = new MMALPortConfig(MMALEncoding.H264, MMALEncoding.I420, 10, 25000000, null, split);

                        vidEncoder.ConfigureOutputPort(portConfig, vidCaptureHandler);

                        // Create our component pipeline.
                        Fixture.MMALCamera.Camera.VideoPort
                        .ConnectTo(vidEncoder);
                        Fixture.MMALCamera.Camera.PreviewPort
                        .ConnectTo(preview);

                        // Camera warm up time
                        await Task.Delay(2000);

                        CancellationTokenSource cts = new CancellationTokenSource(TimeSpan.FromSeconds(30));

                        // 2 files should be created from this test.
                        await Fixture.MMALCamera.ProcessAsync(Fixture.MMALCamera.Camera.VideoPort, cts.Token);

                        Assert.True(Directory.GetFiles("/home/pi/videos/tests/split_test").Length == 2);
                    }
        }
        public async Task RecordVideoDirectlyFromResizerWithSplitterComponent()
        {
            TestHelper.BeginTest("RecordVideoDirectlyFromResizerWithSplitterComponent");
            TestHelper.SetConfigurationDefaults();
            TestHelper.CleanDirectory("/home/pi/videos/tests");

            // I am only using a single output here because due to the disk IO performance on the Pi you ideally need to be
            // using a faster storage medium such as the ramdisk to output to multiple files.
            using (var vidCaptureHandler = new VideoStreamCaptureHandler("/home/pi/videos/tests", "h264"))
                using (var preview = new MMALVideoRenderer())
                    using (var splitter = new MMALSplitterComponent())
                        using (var resizer = new MMALResizerComponent())
                        {
                            Fixture.MMALCamera.ConfigureCameraSettings();

                            var splitterPortConfig = new MMALPortConfig(MMALEncoding.OPAQUE, MMALEncoding.I420);
                            var resizerPortConfig  = new MMALPortConfig(MMALEncoding.I420, MMALEncoding.I420, width: 1024, height: 768, timeout: DateTime.Now.AddSeconds(15));

                            // Create our component pipeline.
                            splitter.ConfigureInputPort(new MMALPortConfig(MMALEncoding.OPAQUE, MMALEncoding.I420), Fixture.MMALCamera.Camera.VideoPort, null);
                            splitter.ConfigureOutputPort(0, splitterPortConfig, null);

                            resizer.ConfigureOutputPort <VideoPort>(0, resizerPortConfig, vidCaptureHandler);

                            // Create our component pipeline.
                            Fixture.MMALCamera.Camera.VideoPort
                            .ConnectTo(splitter);

                            splitter.Outputs[0].ConnectTo(resizer);

                            Fixture.MMALCamera.Camera.PreviewPort
                            .ConnectTo(preview);

                            // Camera warm up time
                            await Task.Delay(2000);

                            await Fixture.MMALCamera.ProcessAsync(Fixture.MMALCamera.Camera.VideoPort);

                            Fixture.CheckAndAssertFilepath(vidCaptureHandler.GetFilepath());
                        }
        }
Example #16
0
        public async Task TakeVideoAndStoreMotionVectors(string extension, MMALEncoding encodingType, MMALEncoding pixelFormat)
        {
            TestHelper.BeginTest("TakeVideoAndStoreMotionVectors", encodingType.EncodingName, pixelFormat.EncodingName);
            TestHelper.SetConfigurationDefaults();
            TestHelper.CleanDirectory("/home/pi/videos/tests");

            // Ensure inline motion vectors are enabled.
            MMALCameraConfig.InlineMotionVectors = true;

            using (var motionVectorStore = File.Create("/home/pi/videos/tests/motion.dat"))
                using (var vidCaptureHandler = new VideoStreamCaptureHandler("/home/pi/videos/tests", extension))
                    using (var preview = new MMALVideoRenderer())
                        using (var vidEncoder = new MMALVideoEncoder())
                        {
                            Fixture.MMALCamera.ConfigureCameraSettings();

                            var portConfig = new MMALPortConfig(encodingType, pixelFormat, 10, 25000000, null, storeMotionVectors: true);

                            vidEncoder.ConfigureOutputPort(portConfig, vidCaptureHandler);

                            // Initialise the motion vector stream.
                            vidCaptureHandler.InitialiseMotionStore(motionVectorStore);

                            // Create our component pipeline.
                            Fixture.MMALCamera.Camera.VideoPort
                            .ConnectTo(vidEncoder);
                            Fixture.MMALCamera.Camera.PreviewPort
                            .ConnectTo(preview);

                            // Camera warm up time
                            await Task.Delay(2000);

                            CancellationTokenSource cts = new CancellationTokenSource(TimeSpan.FromSeconds(10));

                            // Record video for 10 seconds
                            await Fixture.MMALCamera.ProcessAsync(Fixture.MMALCamera.Camera.VideoPort, cts.Token);

                            Fixture.CheckAndAssertFilepath(vidCaptureHandler.GetFilepath());
                        }
        }
        public void TakeVideo(string extension, MMALEncoding encodingType, MMALEncoding pixelFormat)
        {
            TestHelper.SetConfigurationDefaults();

            AsyncContext.Run(async() =>
            {
                var vidCaptureHandler = new VideoStreamCaptureHandler("/home/pi/videos/tests", extension);

                TestHelper.CleanDirectory("/home/pi/videos/tests");

                using (var vidEncoder = new MMALVideoEncoder(vidCaptureHandler, new MMAL_RATIONAL_T(25, 1), DateTime.Now.AddSeconds(20)))
                {
                    fixture.MMALCamera.ConfigureCameraSettings();

                    vidEncoder.ConfigureOutputPort(0, encodingType, pixelFormat, 10, 25000000);

                    //Create our component pipeline.
                    fixture.MMALCamera.Camera.VideoPort
                    .ConnectTo(vidEncoder);
                    fixture.MMALCamera.Camera.PreviewPort
                    .ConnectTo(new MMALVideoRenderer());

                    //Camera warm up time
                    await Task.Delay(2000);

                    //Record video for 20 seconds
                    await fixture.MMALCamera.BeginProcessing(fixture.MMALCamera.Camera.VideoPort);

                    if (System.IO.File.Exists(vidCaptureHandler.GetFilepath()))
                    {
                        var length = new System.IO.FileInfo(vidCaptureHandler.GetFilepath()).Length;
                        Assert.True(length > 0);
                    }
                    else
                    {
                        Assert.True(false, $"File {vidCaptureHandler.GetFilepath()} was not created");
                    }
                }
            });
        }
Example #18
0
        private async Task TakeVideoManual(string extension, MMALEncoding encoding, MMALEncoding pixelFormat, int bitrate, int seconds)
        {
            using (var vidCaptureHandler = new VideoStreamCaptureHandler($"/home/pi/videos/", extension))
                using (var vidEncoder = new MMALVideoEncoder(vidCaptureHandler))
                    using (var renderer = new MMALVideoRenderer())
                    {
                        this.Cam.ConfigureCameraSettings();

                        vidEncoder.ConfigureOutputPort(0, encoding, pixelFormat, 0, bitrate);

                        this.Cam.Camera.VideoPort.ConnectTo(vidEncoder);
                        this.Cam.Camera.PreviewPort.ConnectTo(renderer);

                        // Camera warm up time
                        await Task.Delay(2000);

                        var cts = new CancellationTokenSource(TimeSpan.FromSeconds(seconds));

                        // Take video for 3 minutes.
                        await this.Cam.ProcessAsync(this.Cam.Camera.VideoPort, cts.Token);
                    }
        }
        public async Task RecordVideoDirectlyFromSplitter()
        {
            TestHelper.BeginTest("RecordVideoDirectlyFromSplitter");
            TestHelper.SetConfigurationDefaults();
            TestHelper.CleanDirectory("/home/pi/videos/tests");

            // I am only using a single output here because due to the disk IO performance on the Pi you ideally need to be
            // using a faster storage medium such as the ramdisk to output to multiple files.
            using (var vidCaptureHandler = new VideoStreamCaptureHandler("/home/pi/videos/tests", "raw"))
                using (var preview = new MMALVideoRenderer())
                    using (var splitter = new MMALSplitterComponent())
                    {
                        Fixture.MMALCamera.ConfigureCameraSettings();

                        var splitterPortConfig = new MMALPortConfig(MMALEncoding.I420, MMALEncoding.I420);

                        // Create our component pipeline.
                        splitter.ConfigureInputPort(new MMALPortConfig(MMALEncoding.OPAQUE, MMALEncoding.I420, 0), Fixture.MMALCamera.Camera.VideoPort, null);
                        splitter.ConfigureOutputPort(0, splitterPortConfig, vidCaptureHandler);

                        // Create our component pipeline.
                        Fixture.MMALCamera.Camera.VideoPort
                        .ConnectTo(splitter);
                        Fixture.MMALCamera.Camera.PreviewPort
                        .ConnectTo(preview);

                        // Camera warm up time
                        await Task.Delay(2000);

                        CancellationTokenSource cts = new CancellationTokenSource(TimeSpan.FromSeconds(15));

                        // Record video for 20 seconds
                        await Fixture.MMALCamera.ProcessAsync(Fixture.MMALCamera.Camera.VideoPort, cts.Token);

                        Fixture.CheckAndAssertFilepath(vidCaptureHandler.GetFilepath());
                    }
        }
        /// <summary>
        /// Self-contained method for recording H.264 video for a specified amount of time. Records at 30fps, 25Mb/s at the highest quality.
        /// </summary>
        /// <param name="handler">The video capture handler to apply to the encoder.</param>
        /// <param name="timeout">A timeout to stop the video capture</param>
        /// <param name="split">Used for Segmented video mode</param>
        /// <returns>The awaitable Task</returns>
        public async Task TakeVideo(VideoStreamCaptureHandler handler, DateTime?timeout = null, Split split = null)
        {
            if (split != null && !MMALCameraConfig.InlineHeaders)
            {
                MMALLog.Logger.Warn("Inline headers not enabled. Split mode not supported when this is disabled.");
                split = null;
            }

            using (var vidEncoder = new MMALVideoEncoder(handler, new MMAL_RATIONAL_T(30, 1), timeout, split))
                using (var renderer = new MMALVideoRenderer())
                {
                    vidEncoder.ConfigureOutputPort(0, MMALEncoding.H264, MMALEncoding.I420, 10, 25000000);

                    //Create our component pipeline.
                    this.Camera.VideoPort.ConnectTo(vidEncoder);
                    this.Camera.PreviewPort.ConnectTo(renderer);
                    this.ConfigureCameraSettings();

                    MMALLog.Logger.Info($"Preparing to take video. Resolution: {vidEncoder.Width} x {vidEncoder.Height}. " +
                                        $"Encoder: {vidEncoder.Outputs[0].EncodingType.EncodingName}. Pixel Format: {vidEncoder.Outputs[0].PixelFormat.EncodingName}.");

                    await BeginProcessing(this.Camera.VideoPort, vidEncoder);
                }
        }
Example #21
0
        public void GrabVideo(TimeSpan duration)
        {
            var camera = MMALCamera.Instance;
            var task   = Task.Run(async() =>
            {
                using (var vidCaptureHandler = new VideoStreamCaptureHandler(Path.Combine(Folder, Video), "avi"))
                    using (var vidEncoder = new MMALVideoEncoder(vidCaptureHandler, new MMAL_RATIONAL_T(25, 1), DateTime.Now.Add(duration)))
                        using (var renderer = new MMALVideoRenderer())
                        {
                            vidEncoder.ConfigureOutputPort(0, MMALEncoding.H264, MMALEncoding.I420, 0, 25000000);

                            camera.Camera.VideoPort.ConnectTo(vidEncoder);
                            camera.Camera.PreviewPort.ConnectTo(renderer);
                            camera.ConfigureCameraSettings();

                            await camera.BeginProcessing(camera.Camera.VideoPort);
                        }
            });

            //TODO: Improve Woopsa error to display AggregateException
            task.Wait();
            camera.Cleanup();
            LastVideoFilename = Path.Combine(Image, GetLastFile(Path.Combine(Folder, Video)).Name);
        }
Example #22
0
        public void ChangeColorSpace()
        {
            TestHelper.BeginTest("ChangeColorSpace");
            TestHelper.SetConfigurationDefaults();
            TestHelper.CleanDirectory("/home/pi/videos/tests");

            MMALCameraConfig.VideoColorSpace = MMALEncoding.MMAL_COLOR_SPACE_ITUR_BT601;

            using (var handler = new VideoStreamCaptureHandler("/home/pi/videos/tests", "h264"))
                using (var handler2 = new VideoStreamCaptureHandler("/home/pi/video/tests", "h264"))
                    using (var handler3 = new VideoStreamCaptureHandler("/home/pi/video/tests", "h264"))
                        using (var handler4 = new VideoStreamCaptureHandler("/home/pi/video/tests", "h264"))
                            using (var splitter = new MMALSplitterComponent(null))
                                using (var vidEncoder = new MMALVideoEncoder(handler))
                                    using (var vidEncoder2 = new MMALVideoEncoder(handler2))
                                        using (var vidEncoder3 = new MMALVideoEncoder(handler3))
                                            using (var vidEncoder4 = new MMALVideoEncoder(handler4))
                                                using (var renderer = new MMALVideoRenderer())
                                                {
                                                    Fixture.MMALCamera.ConfigureCameraSettings();

                                                    var splitterPortConfig = new MMALPortConfig(MMALEncoding.OPAQUE, MMALEncoding.I420, 10, 0, 13000000, null);
                                                    var portConfig1        = new MMALPortConfig(MMALEncoding.H264, MMALEncoding.I420, 10, 10, 13000000, DateTime.Now.AddSeconds(20));
                                                    var portConfig2        = new MMALPortConfig(MMALEncoding.H264, MMALEncoding.I420, 10, 20, 13000000, DateTime.Now.AddSeconds(15));
                                                    var portConfig3        = new MMALPortConfig(MMALEncoding.H264, MMALEncoding.I420, 25, 30, 13000000, DateTime.Now.AddSeconds(10));
                                                    var portConfig4        = new MMALPortConfig(MMALEncoding.H264, MMALEncoding.I420, 25, 40, 13000000, DateTime.Now.AddSeconds(10));

                                                    // Create our component pipeline.
                                                    splitter.ConfigureInputPort(MMALEncoding.OPAQUE, MMALEncoding.I420, Fixture.MMALCamera.Camera.VideoPort);
                                                    splitter.ConfigureOutputPort(0, splitterPortConfig);
                                                    splitter.ConfigureOutputPort(1, splitterPortConfig);
                                                    splitter.ConfigureOutputPort(2, splitterPortConfig);
                                                    splitter.ConfigureOutputPort(3, splitterPortConfig);

                                                    vidEncoder.ConfigureInputPort(MMALEncoding.OPAQUE, MMALEncoding.I420, splitter.Outputs[0]);
                                                    vidEncoder.ConfigureOutputPort(0, portConfig1);

                                                    vidEncoder2.ConfigureInputPort(MMALEncoding.OPAQUE, MMALEncoding.I420, splitter.Outputs[1]);
                                                    vidEncoder2.ConfigureOutputPort(0, portConfig2);
                                                    vidEncoder3.ConfigureInputPort(MMALEncoding.OPAQUE, MMALEncoding.I420, splitter.Outputs[2]);
                                                    vidEncoder3.ConfigureOutputPort(0, portConfig3);

                                                    vidEncoder4.ConfigureInputPort(MMALEncoding.OPAQUE, MMALEncoding.I420, splitter.Outputs[3]);
                                                    vidEncoder4.ConfigureOutputPort(0, portConfig4);

                                                    Fixture.MMALCamera.Camera.VideoPort.ConnectTo(splitter);

                                                    splitter.Outputs[0].ConnectTo(vidEncoder);
                                                    splitter.Outputs[1].ConnectTo(vidEncoder2);
                                                    splitter.Outputs[2].ConnectTo(vidEncoder3);
                                                    splitter.Outputs[3].ConnectTo(vidEncoder4);

                                                    Fixture.MMALCamera.Camera.PreviewPort.ConnectTo(renderer);

                                                    // Assert that all output ports have the same video color space.
                                                    Assert.True(Fixture.MMALCamera.Camera.VideoPort.VideoColorSpace.EncodingVal == MMALEncoding.MMAL_COLOR_SPACE_ITUR_BT601.EncodingVal);

                                                    Assert.True(splitter.Outputs[0].VideoColorSpace.EncodingVal == MMALEncoding.MMAL_COLOR_SPACE_ITUR_BT601.EncodingVal);
                                                    Assert.True(splitter.Outputs[1].VideoColorSpace.EncodingVal == MMALEncoding.MMAL_COLOR_SPACE_ITUR_BT601.EncodingVal);
                                                    Assert.True(splitter.Outputs[2].VideoColorSpace.EncodingVal == MMALEncoding.MMAL_COLOR_SPACE_ITUR_BT601.EncodingVal);
                                                    Assert.True(splitter.Outputs[3].VideoColorSpace.EncodingVal == MMALEncoding.MMAL_COLOR_SPACE_ITUR_BT601.EncodingVal);

                                                    Assert.True(vidEncoder.Outputs[0].VideoColorSpace.EncodingVal == MMALEncoding.MMAL_COLOR_SPACE_ITUR_BT601.EncodingVal);
                                                    Assert.True(vidEncoder2.Outputs[0].VideoColorSpace.EncodingVal == MMALEncoding.MMAL_COLOR_SPACE_ITUR_BT601.EncodingVal);
                                                    Assert.True(vidEncoder3.Outputs[0].VideoColorSpace.EncodingVal == MMALEncoding.MMAL_COLOR_SPACE_ITUR_BT601.EncodingVal);
                                                    Assert.True(vidEncoder4.Outputs[0].VideoColorSpace.EncodingVal == MMALEncoding.MMAL_COLOR_SPACE_ITUR_BT601.EncodingVal);
                                                }
        }
Example #23
0
        public async Task VideoSplitterComponent()
        {
            TestHelper.BeginTest("VideoSplitterComponent");
            TestHelper.SetConfigurationDefaults();
            TestHelper.CleanDirectory("/home/pi/videos/tests");

            using (var handler = new VideoStreamCaptureHandler("/home/pi/videos/tests", "h264"))
                using (var handler2 = new VideoStreamCaptureHandler("/home/pi/videos/tests", "h264"))
                    using (var handler3 = new VideoStreamCaptureHandler("/home/pi/videos/tests", "h264"))
                        using (var handler4 = new VideoStreamCaptureHandler("/home/pi/videos/tests", "h264"))
                            using (var splitter = new MMALSplitterComponent(null))
                                using (var vidEncoder = new MMALVideoEncoder(handler))
                                    using (var vidEncoder2 = new MMALVideoEncoder(handler2))
                                        using (var vidEncoder3 = new MMALVideoEncoder(handler3))
                                            using (var vidEncoder4 = new MMALVideoEncoder(handler4))
                                                using (var renderer = new MMALNullSinkComponent())
                                                {
                                                    Fixture.MMALCamera.ConfigureCameraSettings();

                                                    var splitterPortConfig = new MMALPortConfig(MMALEncoding.OPAQUE, MMALEncoding.I420, 10, 0, 13000000, null);
                                                    var portConfig1        = new MMALPortConfig(MMALEncoding.H264, MMALEncoding.I420, 10, 10, 13000000, DateTime.Now.AddSeconds(20));
                                                    var portConfig2        = new MMALPortConfig(MMALEncoding.H264, MMALEncoding.I420, 10, 20, 13000000, DateTime.Now.AddSeconds(15));
                                                    var portConfig3        = new MMALPortConfig(MMALEncoding.H264, MMALEncoding.I420, 25, 30, 13000000, DateTime.Now.AddSeconds(10));
                                                    var portConfig4        = new MMALPortConfig(MMALEncoding.H264, MMALEncoding.I420, 25, 40, 13000000, DateTime.Now.AddSeconds(10));

                                                    // Create our component pipeline.
                                                    splitter.ConfigureInputPort(MMALEncoding.OPAQUE, MMALEncoding.I420, Fixture.MMALCamera.Camera.VideoPort);
                                                    splitter.ConfigureOutputPort(0, splitterPortConfig);
                                                    splitter.ConfigureOutputPort(1, splitterPortConfig);
                                                    splitter.ConfigureOutputPort(2, splitterPortConfig);
                                                    splitter.ConfigureOutputPort(3, splitterPortConfig);

                                                    vidEncoder.ConfigureInputPort(MMALEncoding.OPAQUE, MMALEncoding.I420, splitter.Outputs[0]);
                                                    vidEncoder.ConfigureOutputPort(0, portConfig1);

                                                    vidEncoder2.ConfigureInputPort(MMALEncoding.OPAQUE, MMALEncoding.I420, splitter.Outputs[1]);
                                                    vidEncoder2.ConfigureOutputPort(0, portConfig2);
                                                    vidEncoder3.ConfigureInputPort(MMALEncoding.OPAQUE, MMALEncoding.I420, splitter.Outputs[2]);
                                                    vidEncoder3.ConfigureOutputPort(0, portConfig3);

                                                    vidEncoder4.ConfigureInputPort(MMALEncoding.OPAQUE, MMALEncoding.I420, splitter.Outputs[3]);
                                                    vidEncoder4.ConfigureOutputPort(0, portConfig4);

                                                    Fixture.MMALCamera.Camera.VideoPort.ConnectTo(splitter);

                                                    splitter.Outputs[0].ConnectTo(vidEncoder);
                                                    splitter.Outputs[1].ConnectTo(vidEncoder2);
                                                    splitter.Outputs[2].ConnectTo(vidEncoder3);
                                                    splitter.Outputs[3].ConnectTo(vidEncoder4);

                                                    Fixture.MMALCamera.Camera.PreviewPort.ConnectTo(renderer);

                                                    // Camera warm up time
                                                    await Task.Delay(2000);

                                                    await Fixture.MMALCamera.ProcessAsync(Fixture.MMALCamera.Camera.VideoPort);

                                                    Fixture.CheckAndAssertFilepath(handler.GetFilepath());
                                                    Fixture.CheckAndAssertFilepath(handler2.GetFilepath());
                                                    Fixture.CheckAndAssertFilepath(handler3.GetFilepath());
                                                    Fixture.CheckAndAssertFilepath(handler4.GetFilepath());
                                                }
        }
        public void VideoSplitterComponent()
        {
            TestHelper.BeginTest("VideoSplitterComponent");
            TestHelper.SetConfigurationDefaults();

            AsyncContext.Run(async() =>
            {
                TestHelper.CleanDirectory("/home/pi/videos/tests");

                using (var handler = new VideoStreamCaptureHandler("/home/pi/videos/tests", "avi"))
                    using (var handler2 = new VideoStreamCaptureHandler("/home/pi/video/tests", "avi"))
                        using (var handler3 = new VideoStreamCaptureHandler("/home/pi/video/tests", "avi"))
                            using (var handler4 = new VideoStreamCaptureHandler("/home/pi/video/tests", "avi"))
                                using (var splitter = new MMALSplitterComponent(null))
                                    using (var vidEncoder = new MMALVideoEncoder(handler, DateTime.Now.AddSeconds(10)))
                                        using (var vidEncoder2 = new MMALVideoEncoder(handler2, DateTime.Now.AddSeconds(15)))
                                            using (var vidEncoder3 = new MMALVideoEncoder(handler3, DateTime.Now.AddSeconds(10)))
                                                using (var vidEncoder4 = new MMALVideoEncoder(handler4, DateTime.Now.AddSeconds(10)))
                                                    using (var renderer = new MMALVideoRenderer())
                                                    {
                                                        _fixture.MMALCamera.ConfigureCameraSettings();

                                                        // Create our component pipeline.
                                                        splitter.ConfigureInputPort(MMALEncoding.I420, MMALEncoding.I420, _fixture.MMALCamera.Camera.VideoPort);
                                                        splitter.ConfigureOutputPort(0, MMALEncoding.OPAQUE, MMALEncoding.I420, 0);
                                                        splitter.ConfigureOutputPort(1, MMALEncoding.OPAQUE, MMALEncoding.I420, 0);
                                                        splitter.ConfigureOutputPort(2, MMALEncoding.OPAQUE, MMALEncoding.I420, 0);
                                                        splitter.ConfigureOutputPort(3, MMALEncoding.OPAQUE, MMALEncoding.I420, 0);

                                                        vidEncoder.ConfigureInputPort(MMALEncoding.OPAQUE, MMALEncoding.I420, splitter.Outputs[0]);
                                                        vidEncoder.ConfigureOutputPort(0, MMALEncoding.H264, MMALEncoding.I420, 10, 25000000);

                                                        vidEncoder2.ConfigureInputPort(MMALEncoding.OPAQUE, MMALEncoding.I420, splitter.Outputs[1]);
                                                        vidEncoder2.ConfigureOutputPort(0, MMALEncoding.H264, MMALEncoding.I420, 20, 25000000);
                                                        vidEncoder3.ConfigureInputPort(MMALEncoding.OPAQUE, MMALEncoding.I420, splitter.Outputs[2]);
                                                        vidEncoder3.ConfigureOutputPort(0, MMALEncoding.H264, MMALEncoding.I420, 30, 25000000);

                                                        vidEncoder4.ConfigureInputPort(MMALEncoding.OPAQUE, MMALEncoding.I420, splitter.Outputs[3]);
                                                        vidEncoder4.ConfigureOutputPort(0, MMALEncoding.H264, MMALEncoding.I420, 40, 25000000);

                                                        _fixture.MMALCamera.Camera.VideoPort.ConnectTo(splitter);

                                                        splitter.Outputs[0].ConnectTo(vidEncoder);
                                                        splitter.Outputs[1].ConnectTo(vidEncoder2);
                                                        splitter.Outputs[2].ConnectTo(vidEncoder3);
                                                        splitter.Outputs[3].ConnectTo(vidEncoder4);

                                                        _fixture.MMALCamera.Camera.PreviewPort.ConnectTo(renderer);

                                                        // Camera warm up time
                                                        await Task.Delay(2000);

                                                        await _fixture.MMALCamera.ProcessAsync(_fixture.MMALCamera.Camera.VideoPort);

                                                        _fixture.CheckAndAssertFilepath(handler.GetFilepath());
                                                        _fixture.CheckAndAssertFilepath(handler2.GetFilepath());
                                                        _fixture.CheckAndAssertFilepath(handler3.GetFilepath());
                                                        _fixture.CheckAndAssertFilepath(handler4.GetFilepath());
                                                    }
            });
        }
        public void ChangeEncoderType()
        {
            TestHelper.SetConfigurationDefaults();

            AsyncContext.Run(async() =>
            {
                var vidCaptureHandler = new VideoStreamCaptureHandler("/home/pi/videos/tests", "avi");

                TestHelper.CleanDirectory("/home/pi/videos/tests");

                using (var vidEncoder = new MMALVideoEncoder(vidCaptureHandler, new MMAL_RATIONAL_T(25, 1), DateTime.Now.AddSeconds(20)))
                {
                    vidEncoder.ConfigureOutputPort(0, MMALEncoding.MJPEG, MMALEncoding.I420, 10, 25000000);

                    //Create our component pipeline.
                    fixture.MMALCamera.Camera.VideoPort
                    .ConnectTo(vidEncoder);
                    fixture.MMALCamera.Camera.PreviewPort
                    .ConnectTo(new MMALVideoRenderer());

                    fixture.MMALCamera.ConfigureCameraSettings();

                    //Record video for 20 seconds
                    await fixture.MMALCamera.BeginProcessing(fixture.MMALCamera.Camera.VideoPort, vidEncoder);

                    if (System.IO.File.Exists(vidCaptureHandler.GetFilepath()))
                    {
                        var length = new System.IO.FileInfo(vidCaptureHandler.GetFilepath()).Length;
                        Assert.True(length > 0);
                    }
                    else
                    {
                        Assert.True(false, $"File {vidCaptureHandler.GetFilepath()} was not created");
                    }
                }

                vidCaptureHandler = new VideoStreamCaptureHandler("/home/pi/videos/tests", "mjpeg");

                using (var vidEncoder = new MMALVideoEncoder(vidCaptureHandler, new MMAL_RATIONAL_T(25, 1), DateTime.Now.AddSeconds(20)))
                {
                    vidEncoder.ConfigureOutputPort(0, MMALEncoding.MJPEG, MMALEncoding.I420, 90, 25000000);

                    //Create our component pipeline.
                    fixture.MMALCamera.Camera.VideoPort
                    .ConnectTo(vidEncoder);
                    fixture.MMALCamera.Camera.PreviewPort
                    .ConnectTo(new MMALVideoRenderer());

                    fixture.MMALCamera.ConfigureCameraSettings();

                    //Record video for 20 seconds
                    await fixture.MMALCamera.BeginProcessing(fixture.MMALCamera.Camera.VideoPort, vidEncoder);

                    if (System.IO.File.Exists(vidCaptureHandler.GetFilepath()))
                    {
                        var length = new System.IO.FileInfo(vidCaptureHandler.GetFilepath()).Length;
                        Assert.True(length > 0);
                    }
                    else
                    {
                        Assert.True(false, $"File {vidCaptureHandler.GetFilepath()} was not created");
                    }
                }
            });
        }
Example #26
0
        public async Task RecordVideoDirectlyFromResizerWithSplitterComponent()
        {
            TestHelper.BeginTest("RecordVideoDirectlyFromResizerWithSplitterComponent");
            TestHelper.SetConfigurationDefaults();
            TestHelper.CleanDirectory("/home/pi/videos/tests");

            using (var vidCaptureHandler = new VideoStreamCaptureHandler("/home/pi/videos/tests", "raw"))
                using (var vidCaptureHandler2 = new VideoStreamCaptureHandler("/home/pi/videos/tests", "raw"))
                    using (var vidCaptureHandler3 = new VideoStreamCaptureHandler("/home/pi/videos/tests", "raw"))
                        using (var vidCaptureHandler4 = new VideoStreamCaptureHandler("/home/pi/videos/tests", "raw"))
                            using (var preview = new MMALVideoRenderer())
                                using (var splitter = new MMALSplitterComponent())
                                    using (var resizer = new MMALResizerComponent())
                                        using (var resizer2 = new MMALResizerComponent())
                                            using (var resizer3 = new MMALResizerComponent())
                                                using (var resizer4 = new MMALResizerComponent())
                                                {
                                                    Fixture.MMALCamera.ConfigureCameraSettings();

                                                    var splitterPortConfig = new MMALPortConfig(MMALEncoding.OPAQUE, MMALEncoding.I420, 0, 0, null);

                                                    // Create our component pipeline.
                                                    splitter.ConfigureInputPort(new MMALPortConfig(MMALEncoding.OPAQUE, MMALEncoding.I420, 0), Fixture.MMALCamera.Camera.VideoPort, null);
                                                    splitter.ConfigureOutputPort(0, splitterPortConfig, null);
                                                    splitter.ConfigureOutputPort(1, splitterPortConfig, null);
                                                    splitter.ConfigureOutputPort(2, splitterPortConfig, null);
                                                    splitter.ConfigureOutputPort(3, splitterPortConfig, null);

                                                    var portConfig  = new MMALPortConfig(MMALEncoding.I420, MMALEncoding.I420, 1024, 768, 0, 0, 0, false, DateTime.Now.AddSeconds(20));
                                                    var portConfig2 = new MMALPortConfig(MMALEncoding.I420, MMALEncoding.I420, 800, 600, 0, 0, 0, false, DateTime.Now.AddSeconds(20));
                                                    var portConfig3 = new MMALPortConfig(MMALEncoding.I420, MMALEncoding.I420, 640, 480, 0, 0, 0, false, DateTime.Now.AddSeconds(15));
                                                    var portConfig4 = new MMALPortConfig(MMALEncoding.I420, MMALEncoding.I420, 320, 240, 0, 0, 0, false, DateTime.Now.AddSeconds(20));

                                                    resizer.ConfigureOutputPort <VideoPort>(0, portConfig, vidCaptureHandler);
                                                    resizer2.ConfigureOutputPort <VideoPort>(0, portConfig2, vidCaptureHandler2);
                                                    resizer3.ConfigureOutputPort <VideoPort>(0, portConfig3, vidCaptureHandler3);
                                                    resizer4.ConfigureOutputPort <VideoPort>(0, portConfig4, vidCaptureHandler4);

                                                    // Create our component pipeline.
                                                    Fixture.MMALCamera.Camera.VideoPort
                                                    .ConnectTo(splitter);

                                                    splitter.Outputs[0].ConnectTo(resizer);
                                                    splitter.Outputs[1].ConnectTo(resizer2);
                                                    splitter.Outputs[2].ConnectTo(resizer3);
                                                    splitter.Outputs[3].ConnectTo(resizer4);

                                                    Fixture.MMALCamera.Camera.PreviewPort
                                                    .ConnectTo(preview);

                                                    // Camera warm up time
                                                    await Task.Delay(2000);

                                                    // Record video for 20 seconds
                                                    await Fixture.MMALCamera.ProcessAsync(Fixture.MMALCamera.Camera.VideoPort);

                                                    Fixture.CheckAndAssertFilepath(vidCaptureHandler.GetFilepath());
                                                    Fixture.CheckAndAssertFilepath(vidCaptureHandler2.GetFilepath());
                                                    Fixture.CheckAndAssertFilepath(vidCaptureHandler3.GetFilepath());
                                                    Fixture.CheckAndAssertFilepath(vidCaptureHandler4.GetFilepath());
                                                }
        }