Esempio n. 1
0
        /// <summary>
        /// Self-contained method for recording raw video frames directly from the camera's video port.
        /// Uses the encoding and pixel format as set in <see cref="MMALCameraConfig.VideoEncoding"/> and <see cref="MMALCameraConfig.VideoSubformat"/>.
        /// </summary>
        /// <param name="handler">The video capture handler to apply to the encoder.</param>
        /// <param name="cancellationToken">A cancellationToken to signal when to stop video capture.</param>
        /// <returns>The awaitable Task.</returns>
        public async Task TakeRawVideo(IVideoCaptureHandler handler, CancellationToken cancellationToken)
        {
            using (var splitter = new MMALSplitterComponent())
                using (var renderer = new MMALVideoRenderer())
                {
                    this.ConfigureCameraSettings();

                    var splitterOutputConfig = new MMALPortConfig(MMALCameraConfig.VideoEncoding, MMALCameraConfig.VideoSubformat, 0);

                    // Force port type to SplitterVideoPort to prevent resolution from being set against splitter component.
                    splitter.ConfigureOutputPort <SplitterVideoPort>(0, splitterOutputConfig, handler);

                    // Create our component pipeline.
                    this.Camera.VideoPort.ConnectTo(splitter);
                    this.Camera.PreviewPort.ConnectTo(renderer);

                    MMALLog.Logger.LogInformation($"Preparing to take raw video. Resolution: {this.Camera.VideoPort.Resolution.Width} x {this.Camera.VideoPort.Resolution.Height}. " +
                                                  $"Encoder: {MMALCameraConfig.VideoEncoding.EncodingName}. Pixel Format: {MMALCameraConfig.VideoSubformat.EncodingName}.");

                    // Camera warm up time
                    await Task.Delay(2000).ConfigureAwait(false);

                    await this.ProcessAsync(this.Camera.VideoPort, cancellationToken).ConfigureAwait(false);
                }
        }
Esempio n. 2
0
        public async Task ImageFxComponentFromCameraVideoPortWithSplitterAndEncoder(MMAL_PARAM_IMAGEFX_T effect, bool throwsException)
        {
            TestHelper.BeginTest($"Video - ImageFxComponentFromCameraVideoPortWithSplitterAndEncoder - {effect}");
            TestHelper.SetConfigurationDefaults();
            TestHelper.CleanDirectory("/home/pi/videos/tests");

            using (var vidCaptureHandler = new VideoStreamCaptureHandler("/home/pi/videos/tests", "h264"))
                using (var preview = new MMALNullSinkComponent())
                    using (var imageFx = new MMALImageFxComponent())
                        using (var splitter = new MMALSplitterComponent())
                            using (var vidEncoder = new MMALVideoEncoder())
                            {
                                Fixture.MMALCamera.ConfigureCameraSettings();

                                var vidEncoderConfig = new MMALPortConfig(MMALEncoding.H264, MMALEncoding.I420);
                                var splitterConfig   = new MMALPortConfig(MMALEncoding.I420, MMALEncoding.I420);
                                var fxConfig         = new MMALPortConfig(MMALEncoding.I420, MMALEncoding.I420);

                                imageFx.ConfigureOutputPort <VideoPort>(0, fxConfig, null);

                                splitter.ConfigureInputPort(new MMALPortConfig(MMALEncoding.I420, MMALEncoding.I420), imageFx.Outputs[0], null);
                                splitter.ConfigureOutputPort <VideoPort>(0, splitterConfig, null);

                                vidEncoder.ConfigureInputPort(new MMALPortConfig(MMALEncoding.I420, MMALEncoding.I420), splitter.Outputs[0], null);
                                vidEncoder.ConfigureOutputPort(0, vidEncoderConfig, vidCaptureHandler);

                                if (throwsException)
                                {
                                    Assert.Throws <MMALInvalidException>(() =>
                                    {
                                        imageFx.ImageEffect = effect;
                                    });
                                }
                                else
                                {
                                    imageFx.ImageEffect = effect;
                                }

                                // Create our component pipeline.
                                Fixture.MMALCamera.Camera.VideoPort
                                .ConnectTo(imageFx);
                                Fixture.MMALCamera.Camera.PreviewPort
                                .ConnectTo(preview);

                                imageFx.Outputs[0].ConnectTo(splitter);
                                splitter.Outputs[0].ConnectTo(vidEncoder);

                                // Camera warm up time
                                await Task.Delay(2000);

                                var cts = new CancellationTokenSource(TimeSpan.FromSeconds(5));

                                await Fixture.MMALCamera.ProcessAsync(Fixture.MMALCamera.Camera.VideoPort, cts.Token);

                                Fixture.CheckAndAssertFilepath(vidCaptureHandler.GetFilepath());
                            }
        }
Esempio n. 3
0
        public async Task RecordVideoDirectlyFromSplitter()
        {
            TestHelper.BeginTest("RecordVideoDirectlyFromSplitter");
            TestHelper.SetConfigurationDefaults();
            TestHelper.CleanDirectory("/home/pi/videos/tests");

            using (var vidCaptureHandler = new VideoStreamCaptureHandler("/home/pi/videos/tests", "raw"))
                using (var vidCaptureHandler2 = new VideoStreamCaptureHandler("/home/pi/videos/tests", "raw"))
                    using (var vidCaptureHandler3 = new VideoStreamCaptureHandler("/home/pi/videos/tests", "raw"))
                        using (var vidCaptureHandler4 = new VideoStreamCaptureHandler("/home/pi/videos/tests", "raw"))
                            using (var preview = new MMALVideoRenderer())
                                using (var splitter = new MMALSplitterComponent())
                                {
                                    Fixture.MMALCamera.ConfigureCameraSettings();

                                    var splitterPortConfig = new MMALPortConfig(MMALEncoding.I420, MMALEncoding.I420, 0, 0, null);

                                    // Create our component pipeline.
                                    splitter.ConfigureInputPort(new MMALPortConfig(MMALEncoding.OPAQUE, MMALEncoding.I420, 0), Fixture.MMALCamera.Camera.VideoPort, null);
                                    splitter.ConfigureOutputPort <VideoPort>(0, splitterPortConfig, vidCaptureHandler);
                                    splitter.ConfigureOutputPort <VideoPort>(1, splitterPortConfig, vidCaptureHandler2);
                                    splitter.ConfigureOutputPort <VideoPort>(2, splitterPortConfig, vidCaptureHandler3);
                                    splitter.ConfigureOutputPort <VideoPort>(3, splitterPortConfig, vidCaptureHandler4);

                                    // Create our component pipeline.
                                    Fixture.MMALCamera.Camera.VideoPort
                                    .ConnectTo(splitter);
                                    Fixture.MMALCamera.Camera.PreviewPort
                                    .ConnectTo(preview);

                                    // Camera warm up time
                                    await Task.Delay(2000);

                                    CancellationTokenSource cts = new CancellationTokenSource(TimeSpan.FromSeconds(15));

                                    // Record video for 20 seconds
                                    await Fixture.MMALCamera.ProcessAsync(Fixture.MMALCamera.Camera.VideoPort, cts.Token);

                                    Fixture.CheckAndAssertFilepath(vidCaptureHandler.GetFilepath());
                                    Fixture.CheckAndAssertFilepath(vidCaptureHandler2.GetFilepath());
                                    Fixture.CheckAndAssertFilepath(vidCaptureHandler3.GetFilepath());
                                    Fixture.CheckAndAssertFilepath(vidCaptureHandler4.GetFilepath());
                                }
        }
Esempio n. 4
0
        public async Task TakePicturesDirectlyFromSplitterComponent()
        {
            TestHelper.BeginTest("TakePicturesDirectlyFromSplitterComponent");
            TestHelper.SetConfigurationDefaults();

            using (var imgCaptureHandler = new ImageStreamCaptureHandler("/home/pi/images/tests", "raw"))
                using (var imgCaptureHandler2 = new ImageStreamCaptureHandler("/home/pi/images/tests", "raw"))
                    using (var imgCaptureHandler3 = new ImageStreamCaptureHandler("/home/pi/images/tests", "raw"))
                        using (var imgCaptureHandler4 = new ImageStreamCaptureHandler("/home/pi/images/tests", "raw"))
                            using (var splitter = new MMALSplitterComponent())
                                using (var nullSink = new MMALNullSinkComponent())
                                {
                                    Fixture.MMALCamera.ConfigureCameraSettings();

                                    var splitterConfig = new MMALPortConfig(MMALEncoding.I420, MMALEncoding.I420);

                                    // Create our component pipeline.
                                    splitter.ConfigureOutputPort <SplitterStillPort>(0, splitterConfig, imgCaptureHandler);
                                    splitter.ConfigureOutputPort <SplitterStillPort>(1, splitterConfig, imgCaptureHandler2);
                                    splitter.ConfigureOutputPort <SplitterStillPort>(2, splitterConfig, imgCaptureHandler3);
                                    splitter.ConfigureOutputPort <SplitterStillPort>(3, splitterConfig, imgCaptureHandler4);

                                    Fixture.MMALCamera.Camera.StillPort.ConnectTo(splitter);
                                    Fixture.MMALCamera.Camera.PreviewPort.ConnectTo(nullSink);

                                    // Camera warm up time
                                    await Task.Delay(2000);

                                    await Fixture.MMALCamera.ProcessAsync(Fixture.MMALCamera.Camera.StillPort);

                                    Fixture.CheckAndAssertFilepath(imgCaptureHandler.GetFilepath());
                                    Fixture.CheckAndAssertFilepath(imgCaptureHandler2.GetFilepath());
                                    Fixture.CheckAndAssertFilepath(imgCaptureHandler3.GetFilepath());
                                    Fixture.CheckAndAssertFilepath(imgCaptureHandler4.GetFilepath());
                                }
        }
Esempio n. 5
0
        public async Task RecordVideoDirectlyFromResizerWithSplitterComponent()
        {
            TestHelper.BeginTest("RecordVideoDirectlyFromResizerWithSplitterComponent");
            TestHelper.SetConfigurationDefaults();
            TestHelper.CleanDirectory("/home/pi/videos/tests");

            // I am only using a single output here because due to the disk IO performance on the Pi you ideally need to be
            // using a faster storage medium such as the ramdisk to output to multiple files.
            using (var vidCaptureHandler = new VideoStreamCaptureHandler("/home/pi/videos/tests", "h264"))
                using (var preview = new MMALVideoRenderer())
                    using (var splitter = new MMALSplitterComponent())
                        using (var resizer = new MMALResizerComponent())
                        {
                            Fixture.MMALCamera.ConfigureCameraSettings();

                            var splitterPortConfig = new MMALPortConfig(MMALEncoding.OPAQUE, MMALEncoding.I420);
                            var resizerPortConfig  = new MMALPortConfig(MMALEncoding.I420, MMALEncoding.I420, width: 1024, height: 768, timeout: DateTime.Now.AddSeconds(15));

                            // Create our component pipeline.
                            splitter.ConfigureInputPort(new MMALPortConfig(MMALEncoding.OPAQUE, MMALEncoding.I420), Fixture.MMALCamera.Camera.VideoPort, null);
                            splitter.ConfigureOutputPort(0, splitterPortConfig, null);

                            resizer.ConfigureOutputPort <VideoPort>(0, resizerPortConfig, vidCaptureHandler);

                            // Create our component pipeline.
                            Fixture.MMALCamera.Camera.VideoPort
                            .ConnectTo(splitter);

                            splitter.Outputs[0].ConnectTo(resizer);

                            Fixture.MMALCamera.Camera.PreviewPort
                            .ConnectTo(preview);

                            // Camera warm up time
                            await Task.Delay(2000);

                            await Fixture.MMALCamera.ProcessAsync(Fixture.MMALCamera.Camera.VideoPort);

                            Fixture.CheckAndAssertFilepath(vidCaptureHandler.GetFilepath());
                        }
        }
Esempio n. 6
0
        public async Task Start(FrameSplitter frameSplitter, CancellationToken ct)
        {
            _splitter = frameSplitter;
            MMALCameraConfig.VideoStabilisation = false;

            MMALCameraConfig.SensorMode      = MMALSensorMode.Mode1;
            MMALCameraConfig.ExposureMode    = MMAL_PARAM_EXPOSUREMODE_T.MMAL_PARAM_EXPOSUREMODE_BACKLIGHT;
            MMALCameraConfig.VideoResolution = new Resolution(CapWidth, CapHeight);
            MMALCameraConfig.VideoFramerate  = new MMAL_RATIONAL_T(60, 1);

            using var vidCaptureHandler = new EmguInMemoryCaptureHandler();
            using var splitter          = new MMALSplitterComponent();
            using var renderer          = new MMALNullSinkComponent();
            _cam.ConfigureCameraSettings();
            Log.Debug("Cam mode is " + MMALCameraConfig.SensorMode);
            // Register to the event.
            vidCaptureHandler.MyEmguEvent += ProcessFrame;

            // We are instructing the splitter to do a format conversion to BGR24.
            var splitterPortConfig =
                new MMALPortConfig(MMALEncoding.BGR24, MMALEncoding.BGR24, CapWidth, CapHeight, null);

            // By default in MMALSharp, the Video port outputs using proprietary communication (Opaque) with a YUV420 pixel format.
            // Changes to this are done via MMALCameraConfig.VideoEncoding and MMALCameraConfig.VideoSub format.
            splitter.ConfigureInputPort(
                new MMALPortConfig(MMALEncoding.OPAQUE, MMALEncoding.I420, CapWidth, CapHeight, null),
                _cam.Camera.VideoPort, null);

            // We then use the splitter config object we constructed earlier. We then tell this output port to use our capture handler to record data.
            splitter.ConfigureOutputPort <SplitterVideoPort>(0, splitterPortConfig, vidCaptureHandler);

            _cam.Camera.PreviewPort.ConnectTo(renderer);
            _cam.Camera.VideoPort.ConnectTo(splitter);

            await Task.Delay(2000, ct);

            await _cam.ProcessAsync(_cam.Camera.VideoPort, ct).ConfigureAwait(false);

            Log.Debug("Camera closed.");
        }
Esempio n. 7
0
        public async Task RecordVideoDirectlyFromSplitter()
        {
            TestHelper.BeginTest("RecordVideoDirectlyFromSplitter");
            TestHelper.SetConfigurationDefaults();
            TestHelper.CleanDirectory("/home/pi/videos/tests");

            // I am only using a single output here because due to the disk IO performance on the Pi you ideally need to be
            // using a faster storage medium such as the ramdisk to output to multiple files.
            using (var vidCaptureHandler = new VideoStreamCaptureHandler("/home/pi/videos/tests", "raw"))
                using (var preview = new MMALVideoRenderer())
                    using (var splitter = new MMALSplitterComponent())
                    {
                        Fixture.MMALCamera.ConfigureCameraSettings();

                        var splitterPortConfig = new MMALPortConfig(MMALEncoding.I420, MMALEncoding.I420);

                        // Create our component pipeline.
                        splitter.ConfigureInputPort(new MMALPortConfig(MMALEncoding.OPAQUE, MMALEncoding.I420, 0), Fixture.MMALCamera.Camera.VideoPort, null);
                        splitter.ConfigureOutputPort(0, splitterPortConfig, vidCaptureHandler);

                        // Create our component pipeline.
                        Fixture.MMALCamera.Camera.VideoPort
                        .ConnectTo(splitter);
                        Fixture.MMALCamera.Camera.PreviewPort
                        .ConnectTo(preview);

                        // Camera warm up time
                        await Task.Delay(2000);

                        CancellationTokenSource cts = new CancellationTokenSource(TimeSpan.FromSeconds(15));

                        // Record video for 20 seconds
                        await Fixture.MMALCamera.ProcessAsync(Fixture.MMALCamera.Camera.VideoPort, cts.Token);

                        Fixture.CheckAndAssertFilepath(vidCaptureHandler.GetFilepath());
                    }
        }
Esempio n. 8
0
        // motion without raw recording, decouple from onDetect event
        static async Task motion(int totalSeconds, int recordSeconds, int sensitivity)
        {
            DeleteFiles(ramdiskPath, "*.h264");
            DeleteFiles(ramdiskPath, "*.raw");

            var cam = GetConfiguredCamera();

            // No longer cut-and-paste from the MMALSharp wiki:
            // The built-in MotionConfig "recordingTime" argument only applies to calling StartRecording
            // on the motion buffer, which is RAW (and huge). That also means the onStopDetect action
            // for cam.WithMotionDetection is not especially useful. So this variation doesn't record the
            // RAW stream and instead uses a token timeout to terminate the recording.

            // When using H.264 encoding we require key frames to be generated for the Circular buffer capture handler.
            MMALCameraConfig.InlineHeaders = true;

            Console.WriteLine("Preparing pipeline...");
            using (var splitter = new MMALSplitterComponent())
            {
                // Two capture handlers are being used here, one for motion detection and the other to record a H.264 stream.
                using var vidCaptureHandler = new CircularBufferCaptureHandler(4000000, "/media/ramdisk", "h264");
                using var motionCircularBufferCaptureHandler = new CircularBufferCaptureHandler(4000000, "/media/ramdisk", "raw");
                using var resizer    = new MMALIspComponent();
                using var vidEncoder = new MMALVideoEncoder();
                using var renderer   = new MMALVideoRenderer();
                cam.ConfigureCameraSettings();

                var splitterPortConfig   = new MMALPortConfig(MMALEncoding.OPAQUE, MMALEncoding.I420, 0, 0, null);
                var vidEncoderPortConfig = new MMALPortConfig(MMALEncoding.H264, MMALEncoding.I420, 0, MMALVideoEncoder.MaxBitrateLevel4, null);

                // The ISP resizer is being used for better performance. Frame difference motion detection will only work if using raw video data. Do not encode to H.264/MJPEG.
                // Resizing to a smaller image may improve performance, but ensure that the width/height are multiples of 32 and 16 respectively to avoid cropping.
                var resizerPortConfig = new MMALPortConfig(MMALEncoding.RGB24, MMALEncoding.RGB24, 640, 480, 0, 0, 0, false, null);

                splitter.ConfigureInputPort(new MMALPortConfig(MMALEncoding.OPAQUE, MMALEncoding.I420), cam.Camera.VideoPort, null);
                splitter.ConfigureOutputPort(0, splitterPortConfig, null);
                splitter.ConfigureOutputPort(1, splitterPortConfig, null);

                resizer.ConfigureOutputPort <VideoPort>(0, resizerPortConfig, motionCircularBufferCaptureHandler);

                vidEncoder.ConfigureInputPort(new MMALPortConfig(MMALEncoding.OPAQUE, MMALEncoding.I420), splitter.Outputs[1], null);
                vidEncoder.ConfigureOutputPort(vidEncoderPortConfig, vidCaptureHandler);

                cam.Camera.VideoPort.ConnectTo(splitter);
                cam.Camera.PreviewPort.ConnectTo(renderer);

                splitter.Outputs[0].ConnectTo(resizer);
                splitter.Outputs[1].ConnectTo(vidEncoder);

                Console.WriteLine("Camera warmup...");
                await Task.Delay(2000);

                Console.WriteLine($"Detecting motion for {totalSeconds} seconds with sensitivity threshold {sensitivity}...");

                var cts = new CancellationTokenSource(TimeSpan.FromSeconds(totalSeconds));

                // The recording duration doesn't matter; see notes at top of this method.
                var motionConfig = new MotionConfig(TimeSpan.FromSeconds(10), sensitivity);

                // Stephen Cleary says CTS disposal is unnecessary as long as you cancel! https://stackoverflow.com/a/19005066/152997
                var startRecordingCTS = LocalPrepareToRecord();

                await cam.WithMotionDetection(
                    motionCircularBufferCaptureHandler,
                    motionConfig,
                    // This callback will be invoked when motion has been detected.
                    () => {
                    // This has no effect if the token is already cancelled.
                    startRecordingCTS.Cancel();
                })
                .ProcessAsync(cam.Camera.VideoPort, cts.Token);

                CancellationTokenSource LocalPrepareToRecord()
                {
                    var cts = new CancellationTokenSource();

                    cts.Token.Register(LocalStartRecording);
                    return(cts);
                }

                async void LocalStartRecording()
                {
                    Console.WriteLine($"Motion detected, recording {recordSeconds} seconds...");
                    motionCircularBufferCaptureHandler.DisableMotionDetection();
                    vidCaptureHandler.StartRecording();
                    vidEncoder.RequestIFrame();

                    // Prepare to record
                    // Stephen Cleary says CTS disposal is unnecessary as long as you cancel! https://stackoverflow.com/a/19005066/152997
                    var recordingCTS = new CancellationTokenSource();

                    // When the token expires, stop recording and re-enable capture
                    recordingCTS.Token.Register(LocalEndRecording);

                    // Start the clock
                    recordingCTS.CancelAfter(recordSeconds * 1000);

                    // Record until the duration passes or the overall motion detection token expires
                    await Task.WhenAny(new Task[]
                    {
                        cts.Token.AsTask(),
                        recordingCTS.Token.AsTask()
                    });

                    if (!recordingCTS.IsCancellationRequested)
                    {
                        recordingCTS.Cancel();
                    }
                }

                void LocalEndRecording()
                {
                    Console.WriteLine("...recording stopped.");
                    startRecordingCTS = LocalPrepareToRecord();
                    motionCircularBufferCaptureHandler.EnableMotionDetection();
                    vidCaptureHandler.StopRecording();
                    vidCaptureHandler.Split();
                }
            }

            // can't use the convenient fall-through using or MMALCamera.Cleanup
            // throws: Argument is invalid. Unable to destroy component
            cam.Cleanup();

            Console.WriteLine("Exiting.");
        }
Esempio n. 9
0
        // motion as in the wiki (records raw file)
        static async Task motion_record_raw(int totalSeconds, int recordSeconds, int sensitivity)
        {
            DeleteFiles(ramdiskPath, "*.h264");
            DeleteFiles(ramdiskPath, "*.raw");

            var cam = GetConfiguredCamera();

            // When using H.264 encoding we require key frames to be generated for the Circular buffer capture handler.
            MMALCameraConfig.InlineHeaders = true;

            Console.WriteLine("Preparing pipeline...");
            using (var splitter = new MMALSplitterComponent())
            {
                // Two capture handlers are being used here, one for motion detection and the other to record a H.264 stream.
                using var vidCaptureHandler = new CircularBufferCaptureHandler(4000000, "/media/ramdisk", "h264");
                using var motionCircularBufferCaptureHandler = new CircularBufferCaptureHandler(4000000, "/media/ramdisk", "raw");
                using var resizer    = new MMALIspComponent();
                using var vidEncoder = new MMALVideoEncoder();
                using var renderer   = new MMALVideoRenderer();
                cam.ConfigureCameraSettings();

                // The ISP resizer is being used for better performance. Frame difference motion detection will only work if using raw video data. Do not encode to H.264/MJPEG.
                // Resizing to a smaller image may improve performance, but ensure that the width/height are multiples of 32 and 16 respectively to avoid cropping.
                var resizerPortConfig    = new MMALPortConfig(MMALEncoding.RGB24, MMALEncoding.RGB24, 640, 480, 0, 0, 0, false, null);
                var vidEncoderPortConfig = new MMALPortConfig(MMALEncoding.H264, MMALEncoding.I420, 0, MMALVideoEncoder.MaxBitrateLevel4, null);
                var splitterPortConfig   = new MMALPortConfig(MMALEncoding.OPAQUE, MMALEncoding.I420, 0, 0, null);

                splitter.ConfigureInputPort(new MMALPortConfig(MMALEncoding.OPAQUE, MMALEncoding.I420), cam.Camera.VideoPort, null);
                splitter.ConfigureOutputPort(0, splitterPortConfig, null);
                splitter.ConfigureOutputPort(1, splitterPortConfig, null);

                resizer.ConfigureOutputPort <VideoPort>(0, resizerPortConfig, motionCircularBufferCaptureHandler);

                vidEncoder.ConfigureInputPort(new MMALPortConfig(MMALEncoding.OPAQUE, MMALEncoding.I420), splitter.Outputs[1], null);
                vidEncoder.ConfigureOutputPort(vidEncoderPortConfig, vidCaptureHandler);

                cam.Camera.VideoPort.ConnectTo(splitter);
                cam.Camera.PreviewPort.ConnectTo(renderer);

                splitter.Outputs[0].ConnectTo(resizer);
                splitter.Outputs[1].ConnectTo(vidEncoder);

                Console.WriteLine("Camera warmup...");
                await Task.Delay(2000);

                var cts = new CancellationTokenSource(TimeSpan.FromSeconds(totalSeconds));

                var motionConfig = new MotionConfig(TimeSpan.FromSeconds(recordSeconds), sensitivity);

                Console.WriteLine($"Detecting motion for {totalSeconds} seconds with sensitivity threshold {sensitivity}...");

                await cam.WithMotionDetection(
                    motionCircularBufferCaptureHandler,
                    motionConfig,
                    // This callback will be invoked when motion has been detected.
                    () =>
                {
                    Console.WriteLine($"Motion detected, recording {recordSeconds} seconds...");

                    // Stop motion detection while we are recording.
                    motionCircularBufferCaptureHandler.DisableMotionDetection();

                    // Start recording our H.264 video.
                    vidCaptureHandler.StartRecording();
                    motionCircularBufferCaptureHandler.StartRecording();

                    // Request a key frame to be immediately generated by the h.264 encoder.
                    vidEncoder.RequestIFrame();
                },
                    // Invoked when motion handler recording-time expires
                    () =>
                {
                    // We want to re-enable the motion detection.
                    motionCircularBufferCaptureHandler.EnableMotionDetection();

                    // Stop recording on our capture handlers.
                    motionCircularBufferCaptureHandler.StopRecording();
                    vidCaptureHandler.StopRecording();

                    // Optionally create new file for our next recording run (don't do the RAW file, we don't want it).
                    vidCaptureHandler.Split();

                    Console.WriteLine("...recording stopped.");
                })
                .ProcessAsync(cam.Camera.VideoPort, cts.Token);
            }

            // can't use the convenient fall-through using or MMALCamera.Cleanup
            // throws: Argument is invalid. Unable to destroy component
            cam.Cleanup();

            Console.WriteLine("Exiting.");
        }
Esempio n. 10
0
        public void ChangeColorSpace()
        {
            TestHelper.BeginTest("ChangeColorSpace");
            TestHelper.SetConfigurationDefaults();
            TestHelper.CleanDirectory("/home/pi/videos/tests");

            MMALCameraConfig.VideoColorSpace = MMALEncoding.MMAL_COLOR_SPACE_ITUR_BT601;

            using (var handler = new VideoStreamCaptureHandler("/home/pi/videos/tests", "h264"))
                using (var handler2 = new VideoStreamCaptureHandler("/home/pi/video/tests", "h264"))
                    using (var handler3 = new VideoStreamCaptureHandler("/home/pi/video/tests", "h264"))
                        using (var handler4 = new VideoStreamCaptureHandler("/home/pi/video/tests", "h264"))
                            using (var splitter = new MMALSplitterComponent(null))
                                using (var vidEncoder = new MMALVideoEncoder(handler))
                                    using (var vidEncoder2 = new MMALVideoEncoder(handler2))
                                        using (var vidEncoder3 = new MMALVideoEncoder(handler3))
                                            using (var vidEncoder4 = new MMALVideoEncoder(handler4))
                                                using (var renderer = new MMALVideoRenderer())
                                                {
                                                    Fixture.MMALCamera.ConfigureCameraSettings();

                                                    var splitterPortConfig = new MMALPortConfig(MMALEncoding.OPAQUE, MMALEncoding.I420, 10, 0, 13000000, null);
                                                    var portConfig1        = new MMALPortConfig(MMALEncoding.H264, MMALEncoding.I420, 10, 10, 13000000, DateTime.Now.AddSeconds(20));
                                                    var portConfig2        = new MMALPortConfig(MMALEncoding.H264, MMALEncoding.I420, 10, 20, 13000000, DateTime.Now.AddSeconds(15));
                                                    var portConfig3        = new MMALPortConfig(MMALEncoding.H264, MMALEncoding.I420, 25, 30, 13000000, DateTime.Now.AddSeconds(10));
                                                    var portConfig4        = new MMALPortConfig(MMALEncoding.H264, MMALEncoding.I420, 25, 40, 13000000, DateTime.Now.AddSeconds(10));

                                                    // Create our component pipeline.
                                                    splitter.ConfigureInputPort(MMALEncoding.OPAQUE, MMALEncoding.I420, Fixture.MMALCamera.Camera.VideoPort);
                                                    splitter.ConfigureOutputPort(0, splitterPortConfig);
                                                    splitter.ConfigureOutputPort(1, splitterPortConfig);
                                                    splitter.ConfigureOutputPort(2, splitterPortConfig);
                                                    splitter.ConfigureOutputPort(3, splitterPortConfig);

                                                    vidEncoder.ConfigureInputPort(MMALEncoding.OPAQUE, MMALEncoding.I420, splitter.Outputs[0]);
                                                    vidEncoder.ConfigureOutputPort(0, portConfig1);

                                                    vidEncoder2.ConfigureInputPort(MMALEncoding.OPAQUE, MMALEncoding.I420, splitter.Outputs[1]);
                                                    vidEncoder2.ConfigureOutputPort(0, portConfig2);
                                                    vidEncoder3.ConfigureInputPort(MMALEncoding.OPAQUE, MMALEncoding.I420, splitter.Outputs[2]);
                                                    vidEncoder3.ConfigureOutputPort(0, portConfig3);

                                                    vidEncoder4.ConfigureInputPort(MMALEncoding.OPAQUE, MMALEncoding.I420, splitter.Outputs[3]);
                                                    vidEncoder4.ConfigureOutputPort(0, portConfig4);

                                                    Fixture.MMALCamera.Camera.VideoPort.ConnectTo(splitter);

                                                    splitter.Outputs[0].ConnectTo(vidEncoder);
                                                    splitter.Outputs[1].ConnectTo(vidEncoder2);
                                                    splitter.Outputs[2].ConnectTo(vidEncoder3);
                                                    splitter.Outputs[3].ConnectTo(vidEncoder4);

                                                    Fixture.MMALCamera.Camera.PreviewPort.ConnectTo(renderer);

                                                    // Assert that all output ports have the same video color space.
                                                    Assert.True(Fixture.MMALCamera.Camera.VideoPort.VideoColorSpace.EncodingVal == MMALEncoding.MMAL_COLOR_SPACE_ITUR_BT601.EncodingVal);

                                                    Assert.True(splitter.Outputs[0].VideoColorSpace.EncodingVal == MMALEncoding.MMAL_COLOR_SPACE_ITUR_BT601.EncodingVal);
                                                    Assert.True(splitter.Outputs[1].VideoColorSpace.EncodingVal == MMALEncoding.MMAL_COLOR_SPACE_ITUR_BT601.EncodingVal);
                                                    Assert.True(splitter.Outputs[2].VideoColorSpace.EncodingVal == MMALEncoding.MMAL_COLOR_SPACE_ITUR_BT601.EncodingVal);
                                                    Assert.True(splitter.Outputs[3].VideoColorSpace.EncodingVal == MMALEncoding.MMAL_COLOR_SPACE_ITUR_BT601.EncodingVal);

                                                    Assert.True(vidEncoder.Outputs[0].VideoColorSpace.EncodingVal == MMALEncoding.MMAL_COLOR_SPACE_ITUR_BT601.EncodingVal);
                                                    Assert.True(vidEncoder2.Outputs[0].VideoColorSpace.EncodingVal == MMALEncoding.MMAL_COLOR_SPACE_ITUR_BT601.EncodingVal);
                                                    Assert.True(vidEncoder3.Outputs[0].VideoColorSpace.EncodingVal == MMALEncoding.MMAL_COLOR_SPACE_ITUR_BT601.EncodingVal);
                                                    Assert.True(vidEncoder4.Outputs[0].VideoColorSpace.EncodingVal == MMALEncoding.MMAL_COLOR_SPACE_ITUR_BT601.EncodingVal);
                                                }
        }
Esempio n. 11
0
        public async Task VideoSplitterComponent()
        {
            TestHelper.BeginTest("VideoSplitterComponent");
            TestHelper.SetConfigurationDefaults();
            TestHelper.CleanDirectory("/home/pi/videos/tests");

            using (var handler = new VideoStreamCaptureHandler("/home/pi/videos/tests", "h264"))
                using (var handler2 = new VideoStreamCaptureHandler("/home/pi/videos/tests", "h264"))
                    using (var handler3 = new VideoStreamCaptureHandler("/home/pi/videos/tests", "h264"))
                        using (var handler4 = new VideoStreamCaptureHandler("/home/pi/videos/tests", "h264"))
                            using (var splitter = new MMALSplitterComponent(null))
                                using (var vidEncoder = new MMALVideoEncoder(handler))
                                    using (var vidEncoder2 = new MMALVideoEncoder(handler2))
                                        using (var vidEncoder3 = new MMALVideoEncoder(handler3))
                                            using (var vidEncoder4 = new MMALVideoEncoder(handler4))
                                                using (var renderer = new MMALNullSinkComponent())
                                                {
                                                    Fixture.MMALCamera.ConfigureCameraSettings();

                                                    var splitterPortConfig = new MMALPortConfig(MMALEncoding.OPAQUE, MMALEncoding.I420, 10, 0, 13000000, null);
                                                    var portConfig1        = new MMALPortConfig(MMALEncoding.H264, MMALEncoding.I420, 10, 10, 13000000, DateTime.Now.AddSeconds(20));
                                                    var portConfig2        = new MMALPortConfig(MMALEncoding.H264, MMALEncoding.I420, 10, 20, 13000000, DateTime.Now.AddSeconds(15));
                                                    var portConfig3        = new MMALPortConfig(MMALEncoding.H264, MMALEncoding.I420, 25, 30, 13000000, DateTime.Now.AddSeconds(10));
                                                    var portConfig4        = new MMALPortConfig(MMALEncoding.H264, MMALEncoding.I420, 25, 40, 13000000, DateTime.Now.AddSeconds(10));

                                                    // Create our component pipeline.
                                                    splitter.ConfigureInputPort(MMALEncoding.OPAQUE, MMALEncoding.I420, Fixture.MMALCamera.Camera.VideoPort);
                                                    splitter.ConfigureOutputPort(0, splitterPortConfig);
                                                    splitter.ConfigureOutputPort(1, splitterPortConfig);
                                                    splitter.ConfigureOutputPort(2, splitterPortConfig);
                                                    splitter.ConfigureOutputPort(3, splitterPortConfig);

                                                    vidEncoder.ConfigureInputPort(MMALEncoding.OPAQUE, MMALEncoding.I420, splitter.Outputs[0]);
                                                    vidEncoder.ConfigureOutputPort(0, portConfig1);

                                                    vidEncoder2.ConfigureInputPort(MMALEncoding.OPAQUE, MMALEncoding.I420, splitter.Outputs[1]);
                                                    vidEncoder2.ConfigureOutputPort(0, portConfig2);
                                                    vidEncoder3.ConfigureInputPort(MMALEncoding.OPAQUE, MMALEncoding.I420, splitter.Outputs[2]);
                                                    vidEncoder3.ConfigureOutputPort(0, portConfig3);

                                                    vidEncoder4.ConfigureInputPort(MMALEncoding.OPAQUE, MMALEncoding.I420, splitter.Outputs[3]);
                                                    vidEncoder4.ConfigureOutputPort(0, portConfig4);

                                                    Fixture.MMALCamera.Camera.VideoPort.ConnectTo(splitter);

                                                    splitter.Outputs[0].ConnectTo(vidEncoder);
                                                    splitter.Outputs[1].ConnectTo(vidEncoder2);
                                                    splitter.Outputs[2].ConnectTo(vidEncoder3);
                                                    splitter.Outputs[3].ConnectTo(vidEncoder4);

                                                    Fixture.MMALCamera.Camera.PreviewPort.ConnectTo(renderer);

                                                    // Camera warm up time
                                                    await Task.Delay(2000);

                                                    await Fixture.MMALCamera.ProcessAsync(Fixture.MMALCamera.Camera.VideoPort);

                                                    Fixture.CheckAndAssertFilepath(handler.GetFilepath());
                                                    Fixture.CheckAndAssertFilepath(handler2.GetFilepath());
                                                    Fixture.CheckAndAssertFilepath(handler3.GetFilepath());
                                                    Fixture.CheckAndAssertFilepath(handler4.GetFilepath());
                                                }
        }
Esempio n. 12
0
        public async Task Start(CancellationToken ct)
        {
            LogUtil.Write("Starting Camera...");
            MMALCameraConfig.VideoStabilisation = false;

            var sensorMode = MMALSensorMode.Mode0;

            switch (camMode)
            {
            case 1:
                sensorMode = MMALSensorMode.Mode1;
                break;

            case 2:
                sensorMode = MMALSensorMode.Mode2;
                break;

            case 3:
                sensorMode = MMALSensorMode.Mode3;
                break;

            case 4:
                sensorMode = MMALSensorMode.Mode4;
                break;

            case 5:
                sensorMode = MMALSensorMode.Mode5;
                break;

            case 6:
                sensorMode = MMALSensorMode.Mode6;
                break;

            case 7:
                sensorMode = MMALSensorMode.Mode7;
                break;
            }
            MMALCameraConfig.SensorMode      = sensorMode;
            MMALCameraConfig.ExposureMode    = MMAL_PARAM_EXPOSUREMODE_T.MMAL_PARAM_EXPOSUREMODE_BACKLIGHT;
            MMALCameraConfig.VideoResolution = new Resolution(capWidth, capHeight);
            MMALCameraConfig.VideoFramerate  = new MMAL_RATIONAL_T(60, 1);

            using var vidCaptureHandler = new EmguInMemoryCaptureHandler();
            using var splitter          = new MMALSplitterComponent();
            using var renderer          = new MMALNullSinkComponent();
            cam.ConfigureCameraSettings();
            LogUtil.Write("Cam mode is " + MMALCameraConfig.SensorMode);
            // Register to the event.
            vidCaptureHandler.MyEmguEvent += OnEmguEventCallback;

            // We are instructing the splitter to do a format conversion to BGR24.
            var splitterPortConfig = new MMALPortConfig(MMALEncoding.BGR24, MMALEncoding.BGR24, capWidth, capHeight, null);

            // By default in MMALSharp, the Video port outputs using proprietary communication (Opaque) with a YUV420 pixel format.
            // Changes to this are done via MMALCameraConfig.VideoEncoding and MMALCameraConfig.VideoSub format.
            splitter.ConfigureInputPort(new MMALPortConfig(MMALEncoding.OPAQUE, MMALEncoding.I420, capWidth, capHeight, null), cam.Camera.VideoPort, null);

            // We then use the splitter config object we constructed earlier. We then tell this output port to use our capture handler to record data.
            splitter.ConfigureOutputPort <SplitterVideoPort>(0, splitterPortConfig, vidCaptureHandler);

            cam.Camera.PreviewPort.ConnectTo(renderer);
            cam.Camera.VideoPort.ConnectTo(splitter);

            // Camera warm up time
            LogUtil.Write("Camera is warming up...");
            await Task.Delay(2000).ConfigureAwait(false);

            LogUtil.WriteInc("Camera initialized.");
            await cam.ProcessAsync(cam.Camera.VideoPort, ct).ConfigureAwait(false);

            LogUtil.WriteDec("Camera closed.");
        }
Esempio n. 13
0
        public async Task RecordVideoDirectlyFromResizerWithSplitterComponent()
        {
            TestHelper.BeginTest("RecordVideoDirectlyFromResizerWithSplitterComponent");
            TestHelper.SetConfigurationDefaults();
            TestHelper.CleanDirectory("/home/pi/videos/tests");

            using (var vidCaptureHandler = new VideoStreamCaptureHandler("/home/pi/videos/tests", "raw"))
                using (var vidCaptureHandler2 = new VideoStreamCaptureHandler("/home/pi/videos/tests", "raw"))
                    using (var vidCaptureHandler3 = new VideoStreamCaptureHandler("/home/pi/videos/tests", "raw"))
                        using (var vidCaptureHandler4 = new VideoStreamCaptureHandler("/home/pi/videos/tests", "raw"))
                            using (var preview = new MMALVideoRenderer())
                                using (var splitter = new MMALSplitterComponent())
                                    using (var resizer = new MMALResizerComponent())
                                        using (var resizer2 = new MMALResizerComponent())
                                            using (var resizer3 = new MMALResizerComponent())
                                                using (var resizer4 = new MMALResizerComponent())
                                                {
                                                    Fixture.MMALCamera.ConfigureCameraSettings();

                                                    var splitterPortConfig = new MMALPortConfig(MMALEncoding.OPAQUE, MMALEncoding.I420, 0, 0, null);

                                                    // Create our component pipeline.
                                                    splitter.ConfigureInputPort(new MMALPortConfig(MMALEncoding.OPAQUE, MMALEncoding.I420, 0), Fixture.MMALCamera.Camera.VideoPort, null);
                                                    splitter.ConfigureOutputPort(0, splitterPortConfig, null);
                                                    splitter.ConfigureOutputPort(1, splitterPortConfig, null);
                                                    splitter.ConfigureOutputPort(2, splitterPortConfig, null);
                                                    splitter.ConfigureOutputPort(3, splitterPortConfig, null);

                                                    var portConfig  = new MMALPortConfig(MMALEncoding.I420, MMALEncoding.I420, 1024, 768, 0, 0, 0, false, DateTime.Now.AddSeconds(20));
                                                    var portConfig2 = new MMALPortConfig(MMALEncoding.I420, MMALEncoding.I420, 800, 600, 0, 0, 0, false, DateTime.Now.AddSeconds(20));
                                                    var portConfig3 = new MMALPortConfig(MMALEncoding.I420, MMALEncoding.I420, 640, 480, 0, 0, 0, false, DateTime.Now.AddSeconds(15));
                                                    var portConfig4 = new MMALPortConfig(MMALEncoding.I420, MMALEncoding.I420, 320, 240, 0, 0, 0, false, DateTime.Now.AddSeconds(20));

                                                    resizer.ConfigureOutputPort <VideoPort>(0, portConfig, vidCaptureHandler);
                                                    resizer2.ConfigureOutputPort <VideoPort>(0, portConfig2, vidCaptureHandler2);
                                                    resizer3.ConfigureOutputPort <VideoPort>(0, portConfig3, vidCaptureHandler3);
                                                    resizer4.ConfigureOutputPort <VideoPort>(0, portConfig4, vidCaptureHandler4);

                                                    // Create our component pipeline.
                                                    Fixture.MMALCamera.Camera.VideoPort
                                                    .ConnectTo(splitter);

                                                    splitter.Outputs[0].ConnectTo(resizer);
                                                    splitter.Outputs[1].ConnectTo(resizer2);
                                                    splitter.Outputs[2].ConnectTo(resizer3);
                                                    splitter.Outputs[3].ConnectTo(resizer4);

                                                    Fixture.MMALCamera.Camera.PreviewPort
                                                    .ConnectTo(preview);

                                                    // Camera warm up time
                                                    await Task.Delay(2000);

                                                    // Record video for 20 seconds
                                                    await Fixture.MMALCamera.ProcessAsync(Fixture.MMALCamera.Camera.VideoPort);

                                                    Fixture.CheckAndAssertFilepath(vidCaptureHandler.GetFilepath());
                                                    Fixture.CheckAndAssertFilepath(vidCaptureHandler2.GetFilepath());
                                                    Fixture.CheckAndAssertFilepath(vidCaptureHandler3.GetFilepath());
                                                    Fixture.CheckAndAssertFilepath(vidCaptureHandler4.GetFilepath());
                                                }
        }
Esempio n. 14
0
        public async Task UserProvidedPortName()
        {
            TestHelper.BeginTest("UserProvidedPortName");
            TestHelper.SetConfigurationDefaults();
            TestHelper.CleanDirectory("/home/pi/images/tests");

            using (var imgCaptureHandler = new ImageStreamCaptureHandler("/home/pi/images/tests", "jpg"))
                using (var imgCaptureHandler2 = new ImageStreamCaptureHandler("/home/pi/images/tests", "jpg"))
                    using (var imgCaptureHandler3 = new ImageStreamCaptureHandler("/home/pi/images/tests", "jpg"))
                        using (var imgCaptureHandler4 = new ImageStreamCaptureHandler("/home/pi/images/tests", "jpg"))
                            using (var imgEncoder = new MMALImageEncoder())
                                using (var imgEncoder2 = new MMALImageEncoder())
                                    using (var imgEncoder3 = new MMALImageEncoder())
                                        using (var imgEncoder4 = new MMALImageEncoder())
                                            using (var splitter = new MMALSplitterComponent())
                                                using (var isp1 = new MMALIspComponent())
                                                    using (var isp2 = new MMALIspComponent())
                                                        using (var isp3 = new MMALIspComponent())
                                                            using (var isp4 = new MMALIspComponent())
                                                                using (var nullSink = new MMALNullSinkComponent())
                                                                {
                                                                    Fixture.MMALCamera.ConfigureCameraSettings();

                                                                    var portConfig  = new MMALPortConfig(MMALEncoding.JPEG, MMALEncoding.I420, quality: 90, userPortName: "Image encoder 1");
                                                                    var portConfig2 = new MMALPortConfig(MMALEncoding.JPEG, MMALEncoding.I420, quality: 90, userPortName: "Image encoder 2");
                                                                    var portConfig3 = new MMALPortConfig(MMALEncoding.JPEG, MMALEncoding.I420, quality: 90, userPortName: "Image encoder 3");
                                                                    var portConfig4 = new MMALPortConfig(MMALEncoding.JPEG, MMALEncoding.I420, quality: 90, userPortName: "Image encoder 4");

                                                                    var splitterConfig = new MMALPortConfig(MMALEncoding.I420, MMALEncoding.I420);

                                                                    var resizeConfig  = new MMALPortConfig(MMALEncoding.I420, MMALEncoding.I420, width: 1280, height: 720);
                                                                    var resizeConfig2 = new MMALPortConfig(MMALEncoding.I420, MMALEncoding.I420, width: 1024, height: 720);
                                                                    var resizeConfig3 = new MMALPortConfig(MMALEncoding.I420, MMALEncoding.I420, width: 640, height: 480);
                                                                    var resizeConfig4 = new MMALPortConfig(MMALEncoding.I420, MMALEncoding.I420, width: 620, height: 310);

                                                                    // Create our component pipeline.
                                                                    splitter.ConfigureOutputPort <SplitterStillPort>(0, splitterConfig, null);
                                                                    splitter.ConfigureOutputPort <SplitterStillPort>(1, splitterConfig, null);
                                                                    splitter.ConfigureOutputPort <SplitterStillPort>(2, splitterConfig, null);
                                                                    splitter.ConfigureOutputPort <SplitterStillPort>(3, splitterConfig, null);

                                                                    isp1.ConfigureOutputPort(resizeConfig, null);
                                                                    isp2.ConfigureOutputPort(resizeConfig2, null);
                                                                    isp3.ConfigureOutputPort(resizeConfig3, null);
                                                                    isp4.ConfigureOutputPort(resizeConfig4, null);

                                                                    imgEncoder.ConfigureOutputPort(portConfig, imgCaptureHandler);
                                                                    imgEncoder2.ConfigureOutputPort(portConfig2, imgCaptureHandler2);
                                                                    imgEncoder3.ConfigureOutputPort(portConfig3, imgCaptureHandler3);
                                                                    imgEncoder4.ConfigureOutputPort(portConfig4, imgCaptureHandler4);

                                                                    Fixture.MMALCamera.Camera.StillPort.ConnectTo(splitter);
                                                                    Fixture.MMALCamera.Camera.PreviewPort.ConnectTo(nullSink);

                                                                    splitter.Outputs[0].ConnectTo(isp1);
                                                                    splitter.Outputs[1].ConnectTo(isp2);
                                                                    splitter.Outputs[2].ConnectTo(isp3);
                                                                    splitter.Outputs[3].ConnectTo(isp4);

                                                                    isp1.Outputs[0].ConnectTo(imgEncoder);
                                                                    isp2.Outputs[0].ConnectTo(imgEncoder2);
                                                                    isp3.Outputs[0].ConnectTo(imgEncoder3);
                                                                    isp4.Outputs[0].ConnectTo(imgEncoder4);

                                                                    // Camera warm up time
                                                                    await Task.Delay(2000);

                                                                    await Fixture.MMALCamera.ProcessAsync(Fixture.MMALCamera.Camera.StillPort);

                                                                    Fixture.CheckAndAssertFilepath(imgCaptureHandler.GetFilepath());
                                                                    Fixture.CheckAndAssertFilepath(imgCaptureHandler2.GetFilepath());
                                                                    Fixture.CheckAndAssertFilepath(imgCaptureHandler3.GetFilepath());
                                                                    Fixture.CheckAndAssertFilepath(imgCaptureHandler4.GetFilepath());
                                                                }
        }
Esempio n. 15
0
        public async Task TakeMultiplePicturesFromSplitterComponent()
        {
            // This test relies on an ISP component being connected between a splitter component output port
            // and an image encoder input port. If the ISP component is not used as a go-between, the splitter
            // component appears to only accept 1 of its output ports being connected to an image encoder. I believe
            // this may be a firmware restriction.
            TestHelper.BeginTest("TakeMultiplePicturesFromSplitterComponent");
            TestHelper.SetConfigurationDefaults();
            TestHelper.CleanDirectory("/home/pi/images/tests");

            using (var imgCaptureHandler = new ImageStreamCaptureHandler("/home/pi/images/tests", "jpg"))
                using (var imgCaptureHandler2 = new ImageStreamCaptureHandler("/home/pi/images/tests", "jpg"))
                    using (var imgCaptureHandler3 = new ImageStreamCaptureHandler("/home/pi/images/tests", "jpg"))
                        using (var imgCaptureHandler4 = new ImageStreamCaptureHandler("/home/pi/images/tests", "jpg"))
                            using (var imgEncoder = new MMALImageEncoder())
                                using (var imgEncoder2 = new MMALImageEncoder())
                                    using (var imgEncoder3 = new MMALImageEncoder())
                                        using (var imgEncoder4 = new MMALImageEncoder())
                                            using (var splitter = new MMALSplitterComponent())
                                                using (var isp1 = new MMALIspComponent())
                                                    using (var isp2 = new MMALIspComponent())
                                                        using (var isp3 = new MMALIspComponent())
                                                            using (var isp4 = new MMALIspComponent())
                                                                using (var nullSink = new MMALNullSinkComponent())
                                                                {
                                                                    Fixture.MMALCamera.ConfigureCameraSettings();

                                                                    var portConfig  = new MMALPortConfig(MMALEncoding.JPEG, MMALEncoding.I420, quality: 90);
                                                                    var portConfig2 = new MMALPortConfig(MMALEncoding.JPEG, MMALEncoding.I420, quality: 90);
                                                                    var portConfig3 = new MMALPortConfig(MMALEncoding.JPEG, MMALEncoding.I420, quality: 90);
                                                                    var portConfig4 = new MMALPortConfig(MMALEncoding.JPEG, MMALEncoding.I420, quality: 90);

                                                                    var splitterConfig = new MMALPortConfig(MMALEncoding.I420, MMALEncoding.I420);

                                                                    var resizeConfig  = new MMALPortConfig(MMALEncoding.I420, MMALEncoding.I420, width: 1280, height: 720);
                                                                    var resizeConfig2 = new MMALPortConfig(MMALEncoding.I420, MMALEncoding.I420, width: 1024, height: 720);
                                                                    var resizeConfig3 = new MMALPortConfig(MMALEncoding.I420, MMALEncoding.I420, width: 640, height: 480);
                                                                    var resizeConfig4 = new MMALPortConfig(MMALEncoding.I420, MMALEncoding.I420, width: 620, height: 310);

                                                                    // Create our component pipeline.
                                                                    splitter.ConfigureOutputPort <SplitterStillPort>(0, splitterConfig, null);
                                                                    splitter.ConfigureOutputPort <SplitterStillPort>(1, splitterConfig, null);
                                                                    splitter.ConfigureOutputPort <SplitterStillPort>(2, splitterConfig, null);
                                                                    splitter.ConfigureOutputPort <SplitterStillPort>(3, splitterConfig, null);

                                                                    isp1.ConfigureOutputPort(resizeConfig, null);
                                                                    isp2.ConfigureOutputPort(resizeConfig2, null);
                                                                    isp3.ConfigureOutputPort(resizeConfig3, null);
                                                                    isp4.ConfigureOutputPort(resizeConfig4, null);

                                                                    imgEncoder.ConfigureOutputPort(portConfig, imgCaptureHandler);
                                                                    imgEncoder2.ConfigureOutputPort(portConfig2, imgCaptureHandler2);
                                                                    imgEncoder3.ConfigureOutputPort(portConfig3, imgCaptureHandler3);
                                                                    imgEncoder4.ConfigureOutputPort(portConfig4, imgCaptureHandler4);

                                                                    Fixture.MMALCamera.Camera.StillPort.ConnectTo(splitter);
                                                                    Fixture.MMALCamera.Camera.PreviewPort.ConnectTo(nullSink);

                                                                    splitter.Outputs[0].ConnectTo(isp1);
                                                                    splitter.Outputs[1].ConnectTo(isp2);
                                                                    splitter.Outputs[2].ConnectTo(isp3);
                                                                    splitter.Outputs[3].ConnectTo(isp4);

                                                                    isp1.Outputs[0].ConnectTo(imgEncoder);
                                                                    isp2.Outputs[0].ConnectTo(imgEncoder2);
                                                                    isp3.Outputs[0].ConnectTo(imgEncoder3);
                                                                    isp4.Outputs[0].ConnectTo(imgEncoder4);

                                                                    // Camera warm up time
                                                                    await Task.Delay(2000);

                                                                    await Fixture.MMALCamera.ProcessAsync(Fixture.MMALCamera.Camera.StillPort);

                                                                    Fixture.CheckAndAssertFilepath(imgCaptureHandler.GetFilepath());
                                                                    Fixture.CheckAndAssertFilepath(imgCaptureHandler2.GetFilepath());
                                                                    Fixture.CheckAndAssertFilepath(imgCaptureHandler3.GetFilepath());
                                                                    Fixture.CheckAndAssertFilepath(imgCaptureHandler4.GetFilepath());
                                                                }
        }
Esempio n. 16
0
        public void VideoSplitterComponent()
        {
            TestHelper.BeginTest("VideoSplitterComponent");
            TestHelper.SetConfigurationDefaults();

            AsyncContext.Run(async() =>
            {
                TestHelper.CleanDirectory("/home/pi/videos/tests");

                using (var handler = new VideoStreamCaptureHandler("/home/pi/videos/tests", "avi"))
                    using (var handler2 = new VideoStreamCaptureHandler("/home/pi/video/tests", "avi"))
                        using (var handler3 = new VideoStreamCaptureHandler("/home/pi/video/tests", "avi"))
                            using (var handler4 = new VideoStreamCaptureHandler("/home/pi/video/tests", "avi"))
                                using (var splitter = new MMALSplitterComponent(null))
                                    using (var vidEncoder = new MMALVideoEncoder(handler, DateTime.Now.AddSeconds(10)))
                                        using (var vidEncoder2 = new MMALVideoEncoder(handler2, DateTime.Now.AddSeconds(15)))
                                            using (var vidEncoder3 = new MMALVideoEncoder(handler3, DateTime.Now.AddSeconds(10)))
                                                using (var vidEncoder4 = new MMALVideoEncoder(handler4, DateTime.Now.AddSeconds(10)))
                                                    using (var renderer = new MMALVideoRenderer())
                                                    {
                                                        _fixture.MMALCamera.ConfigureCameraSettings();

                                                        // Create our component pipeline.
                                                        splitter.ConfigureInputPort(MMALEncoding.I420, MMALEncoding.I420, _fixture.MMALCamera.Camera.VideoPort);
                                                        splitter.ConfigureOutputPort(0, MMALEncoding.OPAQUE, MMALEncoding.I420, 0);
                                                        splitter.ConfigureOutputPort(1, MMALEncoding.OPAQUE, MMALEncoding.I420, 0);
                                                        splitter.ConfigureOutputPort(2, MMALEncoding.OPAQUE, MMALEncoding.I420, 0);
                                                        splitter.ConfigureOutputPort(3, MMALEncoding.OPAQUE, MMALEncoding.I420, 0);

                                                        vidEncoder.ConfigureInputPort(MMALEncoding.OPAQUE, MMALEncoding.I420, splitter.Outputs[0]);
                                                        vidEncoder.ConfigureOutputPort(0, MMALEncoding.H264, MMALEncoding.I420, 10, 25000000);

                                                        vidEncoder2.ConfigureInputPort(MMALEncoding.OPAQUE, MMALEncoding.I420, splitter.Outputs[1]);
                                                        vidEncoder2.ConfigureOutputPort(0, MMALEncoding.H264, MMALEncoding.I420, 20, 25000000);
                                                        vidEncoder3.ConfigureInputPort(MMALEncoding.OPAQUE, MMALEncoding.I420, splitter.Outputs[2]);
                                                        vidEncoder3.ConfigureOutputPort(0, MMALEncoding.H264, MMALEncoding.I420, 30, 25000000);

                                                        vidEncoder4.ConfigureInputPort(MMALEncoding.OPAQUE, MMALEncoding.I420, splitter.Outputs[3]);
                                                        vidEncoder4.ConfigureOutputPort(0, MMALEncoding.H264, MMALEncoding.I420, 40, 25000000);

                                                        _fixture.MMALCamera.Camera.VideoPort.ConnectTo(splitter);

                                                        splitter.Outputs[0].ConnectTo(vidEncoder);
                                                        splitter.Outputs[1].ConnectTo(vidEncoder2);
                                                        splitter.Outputs[2].ConnectTo(vidEncoder3);
                                                        splitter.Outputs[3].ConnectTo(vidEncoder4);

                                                        _fixture.MMALCamera.Camera.PreviewPort.ConnectTo(renderer);

                                                        // Camera warm up time
                                                        await Task.Delay(2000);

                                                        await _fixture.MMALCamera.ProcessAsync(_fixture.MMALCamera.Camera.VideoPort);

                                                        _fixture.CheckAndAssertFilepath(handler.GetFilepath());
                                                        _fixture.CheckAndAssertFilepath(handler2.GetFilepath());
                                                        _fixture.CheckAndAssertFilepath(handler3.GetFilepath());
                                                        _fixture.CheckAndAssertFilepath(handler4.GetFilepath());
                                                    }
            });
        }