예제 #1
0
        public async Task TakePictureWithInMemoryHandler()
        {
            TestHelper.BeginTest("TakePictureWithInMemoryHandler");
            TestHelper.SetConfigurationDefaults();

            var imgCaptureHandler = new InMemoryCaptureHandler();

            using (var preview = new MMALNullSinkComponent())
                using (var imgEncoder = new MMALImageEncoder(true))
                {
                    Fixture.MMALCamera.ConfigureCameraSettings();

                    var portConfig = new MMALPortConfig(MMALEncoding.JPEG, MMALEncoding.I420, 90);

                    imgEncoder.ConfigureOutputPort(portConfig, imgCaptureHandler);

                    // Create our component pipeline.
                    Fixture.MMALCamera.Camera.StillPort
                    .ConnectTo(imgEncoder);
                    Fixture.MMALCamera.Camera.PreviewPort
                    .ConnectTo(preview);

                    // Camera warm up time
                    await Task.Delay(2000);

                    await Fixture.MMALCamera.ProcessAsync(Fixture.MMALCamera.Camera.StillPort);

                    Assert.True(imgCaptureHandler.WorkingData.Count > 0);
                }
        }
예제 #2
0
        public async Task TakePicturesFromVideoPortWithCustomFilename(string extension, MMALEncoding encodingType, MMALEncoding pixelFormat)
        {
            TestHelper.BeginTest("TakePicturesFromVideoPortWithCustomFilename", encodingType.EncodingName, pixelFormat.EncodingName);
            TestHelper.SetConfigurationDefaults();
            TestHelper.CleanDirectory("/home/pi/images/tests");

            using (var imgCaptureHandler = new ImageStreamCaptureHandler($"/home/pi/images/tests/fromVideoPort.{extension}"))
                using (var splitter = new MMALSplitterComponent())
                    using (var preview = new MMALNullSinkComponent())
                        using (var imgEncoder = new MMALImageEncoder(continuousCapture: true))
                        {
                            Fixture.MMALCamera.ConfigureCameraSettings();

                            var portConfig = new MMALPortConfig(encodingType, pixelFormat, 90);

                            imgEncoder.ConfigureOutputPort(portConfig, imgCaptureHandler);

                            // Create our component pipeline.
                            Fixture.MMALCamera.Camera.VideoPort
                            .ConnectTo(splitter);
                            splitter.Outputs[0].ConnectTo(imgEncoder);
                            Fixture.MMALCamera.Camera.PreviewPort
                            .ConnectTo(preview);

                            // Camera warm up time
                            await Task.Delay(2000);

                            var cts = new CancellationTokenSource(TimeSpan.FromSeconds(10));

                            await Fixture.MMALCamera.ProcessAsync(Fixture.MMALCamera.Camera.VideoPort, cts.Token);

                            Fixture.CheckAndAssertDirectory(imgCaptureHandler.Directory);
                        }
        }
예제 #3
0
        public async Task TakePictureTimeout()
        {
            TestHelper.BeginTest("TakePictureTimeout");
            TestHelper.SetConfigurationDefaults();
            TestHelper.CleanDirectory("/home/pi/images/tests/split_tests");

            using (var imgCaptureHandler = new ImageStreamCaptureHandler("/home/pi/images/tests/split_tests", "jpg"))
                using (var preview = new MMALNullSinkComponent())
                    using (var imgEncoder = new MMALImageEncoder())
                    {
                        Fixture.MMALCamera.ConfigureCameraSettings();

                        var portConfig = new MMALPortConfig(MMALEncoding.JPEG, MMALEncoding.I420, 90);

                        imgEncoder.ConfigureOutputPort(portConfig, imgCaptureHandler);

                        // Create our component pipeline.
                        Fixture.MMALCamera.Camera.StillPort
                        .ConnectTo(imgEncoder);
                        Fixture.MMALCamera.Camera.PreviewPort
                        .ConnectTo(preview);

                        // Camera warm up time
                        await Task.Delay(2000);

                        var timeout = DateTime.Now.AddSeconds(10);
                        while (DateTime.Now.CompareTo(timeout) < 0)
                        {
                            await Fixture.MMALCamera.ProcessAsync(Fixture.MMALCamera.Camera.StillPort);
                        }

                        Fixture.CheckAndAssertDirectory(imgCaptureHandler.Directory);
                    }
        }
예제 #4
0
        public async Task RecordVideoDirectlyFromResizer()
        {
            TestHelper.BeginTest("RecordVideoDirectlyFromResizer");
            TestHelper.SetConfigurationDefaults();
            TestHelper.CleanDirectory("/home/pi/videos/tests");

            using (var vidCaptureHandler = new VideoStreamCaptureHandler("/home/pi/videos/tests", "raw"))
                using (var preview = new MMALVideoRenderer())
                    using (var resizer = new MMALResizerComponent())
                    {
                        Fixture.MMALCamera.ConfigureCameraSettings();

                        // Use the resizer to resize 1080p to 640x480.
                        var portConfig = new MMALPortConfig(MMALEncoding.I420, MMALEncoding.I420, 640, 480, 0, 0, 0, false, null);

                        resizer.ConfigureOutputPort <VideoPort>(0, portConfig, vidCaptureHandler);

                        // Create our component pipeline.
                        Fixture.MMALCamera.Camera.VideoPort
                        .ConnectTo(resizer);
                        Fixture.MMALCamera.Camera.PreviewPort
                        .ConnectTo(preview);

                        // Camera warm up time
                        await Task.Delay(2000);

                        CancellationTokenSource cts = new CancellationTokenSource(TimeSpan.FromSeconds(15));

                        // Record video for 20 seconds
                        await Fixture.MMALCamera.ProcessAsync(Fixture.MMALCamera.Camera.VideoPort, cts.Token);

                        Fixture.CheckAndAssertFilepath(vidCaptureHandler.GetFilepath());
                    }
        }
예제 #5
0
        /// <summary>
        /// Self-contained method for capturing a single image from the camera still port.
        /// An MMALImageEncoder component will be created and attached to the still port.
        /// </summary>
        /// <param name="handler">The image capture handler to apply to the encoder component.</param>
        /// <param name="encodingType">The image encoding type e.g. JPEG, BMP.</param>
        /// <param name="pixelFormat">The pixel format to use with the encoder e.g. I420 (YUV420).</param>
        /// <returns>The awaitable Task.</returns>
        public async Task TakePicture(ICaptureHandler handler, MMALEncoding encodingType, MMALEncoding pixelFormat)
        {
            using (var imgEncoder = new MMALImageEncoder(handler))
                using (var renderer = new MMALNullSinkComponent())
                {
                    this.ConfigureCameraSettings();

                    var portConfig = new MMALPortConfig(encodingType, pixelFormat, 90);

                    imgEncoder.ConfigureOutputPort(portConfig);

                    // Create our component pipeline.
                    this.Camera.StillPort.ConnectTo(imgEncoder);
                    this.Camera.PreviewPort.ConnectTo(renderer);

                    // Enable the image encoder output port.
                    MMALLog.Logger.Info($"Preparing to take picture. Resolution: {this.Camera.StillPort.Resolution.Width} x {this.Camera.StillPort.Resolution.Height}. " +
                                        $"Encoder: {encodingType.EncodingName}. Pixel Format: {pixelFormat.EncodingName}.");

                    // Camera warm up time
                    await Task.Delay(2000).ConfigureAwait(false);

                    await this.ProcessAsync(this.Camera.StillPort).ConfigureAwait(false);
                }
        }
예제 #6
0
        public async Task TakePictureRawBayer(string extension, MMALEncoding encodingType, MMALEncoding pixelFormat)
        {
            TestHelper.BeginTest("TakePictureRawBayer", encodingType.EncodingName, pixelFormat.EncodingName);
            TestHelper.SetConfigurationDefaults();
            TestHelper.CleanDirectory("/home/pi/images/tests");

            using (var imgCaptureHandler = new ImageStreamCaptureHandler("/home/pi/images/tests", extension))
                using (var preview = new MMALNullSinkComponent())
                    using (var imgEncoder = new MMALImageEncoder(true))
                    {
                        Fixture.MMALCamera.ConfigureCameraSettings();

                        var portConfig = new MMALPortConfig(encodingType, pixelFormat, quality: 90);

                        imgEncoder.ConfigureOutputPort(portConfig, imgCaptureHandler);

                        // Create our component pipeline.
                        Fixture.MMALCamera.Camera.StillPort
                        .ConnectTo(imgEncoder);
                        Fixture.MMALCamera.Camera.PreviewPort
                        .ConnectTo(preview);

                        // Camera warm up time
                        await Task.Delay(2000);

                        await Fixture.MMALCamera.ProcessAsync(Fixture.MMALCamera.Camera.StillPort);

                        Fixture.CheckAndAssertFilepath(imgCaptureHandler.GetFilepath());
                    }
        }
예제 #7
0
        public async Task UserProvidedBufferNumAndSize()
        {
            TestHelper.BeginTest("UserProvidedBufferNumAndSize");
            TestHelper.SetConfigurationDefaults();
            TestHelper.CleanDirectory("/home/pi/images/tests");

            MMALCameraConfig.UserBufferNum  = 10;
            MMALCameraConfig.UserBufferSize = 20000;

            using (var imgCaptureHandler = new ImageStreamCaptureHandler("/home/pi/images/tests", "jpg"))
                using (var preview = new MMALNullSinkComponent())
                    using (var imgEncoder = new MMALImageEncoder())
                    {
                        Fixture.MMALCamera.ConfigureCameraSettings();

                        var portConfig = new MMALPortConfig(MMALEncoding.JPEG, MMALEncoding.I420, quality: 90);

                        imgEncoder.ConfigureOutputPort(portConfig, imgCaptureHandler);

                        // Create our component pipeline.
                        Fixture.MMALCamera.Camera.StillPort
                        .ConnectTo(imgEncoder);
                        Fixture.MMALCamera.Camera.PreviewPort
                        .ConnectTo(preview);

                        // Camera warm up time
                        await Task.Delay(2000);

                        await Fixture.MMALCamera.ProcessAsync(Fixture.MMALCamera.Camera.StillPort);

                        Fixture.CheckAndAssertFilepath(imgCaptureHandler.GetFilepath());
                    }
        }
예제 #8
0
        private void ConfigureBitrate(int outputPort, MMALPortConfig config)
        {
            if (this.Outputs[outputPort].EncodingType == MMALEncoding.H264)
            {
                List <VideoLevel> levelList = null;

                if (MMALCameraConfig.VideoProfile == MMALParametersVideo.MMAL_VIDEO_PROFILE_T.MMAL_VIDEO_PROFILE_H264_HIGH)
                {
                    levelList = GetHighLevelLimits();
                }
                else if (MMALCameraConfig.VideoProfile == MMALParametersVideo.MMAL_VIDEO_PROFILE_T.MMAL_VIDEO_PROFILE_H264_HIGH10)
                {
                    levelList = GetHigh10LevelLimits();
                }
                else
                {
                    levelList = GetNormalLevelLimits();
                }

                var level = levelList.First(c => c.Level == MMALCameraConfig.VideoLevel);

                if (config.Bitrate > level.Maxbitrate)
                {
                    throw new PiCameraError("Bitrate requested exceeds maximum for selected Video Level and Profile");
                }
            }
            else if (this.Outputs[outputPort].EncodingType == MMALEncoding.MJPEG)
            {
                if (this.Outputs[outputPort].Bitrate > MaxBitrateMJPEG)
                {
                    MMALLog.Logger.LogWarning("Bitrate too high: Reducing to 25MBit/s");
                    config.Bitrate = MaxBitrateMJPEG;
                }
            }
        }
예제 #9
0
        public async Task TakePictureWithCustomConnectionCallbackHandler()
        {
            TestHelper.BeginTest("TakePictureWithCustomConnectionCallbackHandler");
            TestHelper.SetConfigurationDefaults();

            using (var imgCaptureHandler = new ImageStreamCaptureHandler("/home/pi/images/tests", "jpg"))
                using (var preview = new MMALNullSinkComponent())
                    using (var imgEncoder = new MMALImageEncoder())
                    {
                        Fixture.MMALCamera.ConfigureCameraSettings();

                        var portConfig = new MMALPortConfig(MMALEncoding.JPEG, MMALEncoding.I420, quality: 90);

                        imgEncoder.ConfigureOutputPort(portConfig, imgCaptureHandler);

                        // Create our component pipeline.
                        var connection = Fixture.MMALCamera.Camera.StillPort
                                         .ConnectTo(imgEncoder, 0, true);

                        Fixture.MMALCamera.Camera.PreviewPort
                        .ConnectTo(preview);

                        // Register our custom connection callback handler.
                        connection.RegisterCallbackHandler(new CustomConnectionCallbackHandler(connection));

                        // Camera warm up time
                        await Task.Delay(2000);

                        await Fixture.MMALCamera.ProcessAsync(Fixture.MMALCamera.Camera.StillPort);

                        Fixture.CheckAndAssertFilepath(imgCaptureHandler.GetFilepath());
                    }
        }
예제 #10
0
        public async Task TakeVideoWithCircularBuffer(string extension, MMALEncoding encodingType, MMALEncoding pixelFormat)
        {
            TestHelper.BeginTest("TakeVideoWithCircularBuffer", encodingType.EncodingName, pixelFormat.EncodingName);
            TestHelper.SetConfigurationDefaults();
            TestHelper.CleanDirectory("/home/pi/videos/tests");

            using (var circularBufferHandler = new CircularBufferCaptureHandler(4096, "/home/pi/videos/tests", extension))
                using (var preview = new MMALVideoRenderer())
                    using (var vidEncoder = new MMALVideoEncoder())
                    {
                        Fixture.MMALCamera.ConfigureCameraSettings();

                        var portConfig = new MMALPortConfig(encodingType, pixelFormat, 10, 25000000, null, storeMotionVectors: true);

                        vidEncoder.ConfigureOutputPort(portConfig, circularBufferHandler);

                        // Create our component pipeline.
                        Fixture.MMALCamera.Camera.VideoPort
                        .ConnectTo(vidEncoder);
                        Fixture.MMALCamera.Camera.PreviewPort
                        .ConnectTo(preview);

                        // Camera warm up time
                        await Task.Delay(2000);

                        CancellationTokenSource cts = new CancellationTokenSource(TimeSpan.FromSeconds(10));

                        // Record video for 10 seconds
                        await Fixture.MMALCamera.ProcessAsync(Fixture.MMALCamera.Camera.VideoPort, cts.Token);

                        // Check that the circular buffer has stored some data during the recording operation.
                        Assert.True(circularBufferHandler.Buffer.Size > 0);
                    }
        }
예제 #11
0
        public async Task AnnotateVideoRefreshSeconds()
        {
            TestHelper.BeginTest("Video - AnnotateVideo");
            TestHelper.SetConfigurationDefaults();
            TestHelper.CleanDirectory("/home/pi/videos/tests");

            MMALCameraConfig.Annotate             = new AnnotateImage();
            MMALCameraConfig.Annotate.RefreshRate = DateTimeTextRefreshRate.Seconds;
            MMALCameraConfig.Annotate.TimeFormat  = "HH:mm:ss";

            using (var vidCaptureHandler = new VideoStreamCaptureHandler("/home/pi/videos/tests", "h264"))
                using (var vidEncoder = new MMALVideoEncoder())
                    using (var renderer = new MMALVideoRenderer())
                    {
                        Fixture.MMALCamera.ConfigureCameraSettings();

                        var portConfig = new MMALPortConfig(MMALEncoding.H264, MMALEncoding.I420, quality: 10, bitrate: MMALVideoEncoder.MaxBitrateLevel4);

                        // Create our component pipeline. Here we are using the H.264 standard with a YUV420 pixel format. The video will be taken at 25Mb/s.
                        vidEncoder.ConfigureOutputPort(portConfig, vidCaptureHandler);

                        Fixture.MMALCamera.Camera.VideoPort.ConnectTo(vidEncoder);
                        Fixture.MMALCamera.Camera.PreviewPort.ConnectTo(renderer);

                        // Camera warm up time
                        await Task.Delay(2000);

                        var cts = new CancellationTokenSource(TimeSpan.FromSeconds(30));

                        // Take video for 30 seconds.
                        await Fixture.MMALCamera.ProcessAsync(Fixture.MMALCamera.Camera.VideoPort, cts.Token);

                        Fixture.CheckAndAssertFilepath(vidCaptureHandler.GetFilepath());
                    }
        }
예제 #12
0
        public async Task TakeVideoAndStoreTimestamps()
        {
            TestHelper.BeginTest("Video - TakeVideoAndStoreTimestamps");
            TestHelper.SetConfigurationDefaults();
            TestHelper.CleanDirectory("/home/pi/videos/tests");

            using (var vidCaptureHandler = new VideoStreamCaptureHandler("/home/pi/videos/tests", "h264", true))
                using (var preview = new MMALVideoRenderer())
                    using (var vidEncoder = new MMALVideoEncoder())
                    {
                        Fixture.MMALCamera.ConfigureCameraSettings();

                        var portConfig = new MMALPortConfig(MMALEncoding.H264, MMALEncoding.I420, bitrate: MMALVideoEncoder.MaxBitrateLevel4);

                        vidEncoder.ConfigureOutputPort(portConfig, vidCaptureHandler);

                        // Create our component pipeline.
                        Fixture.MMALCamera.Camera.VideoPort
                        .ConnectTo(vidEncoder);
                        Fixture.MMALCamera.Camera.PreviewPort
                        .ConnectTo(preview);

                        // Camera warm up time
                        await Task.Delay(2000);

                        CancellationTokenSource cts = new CancellationTokenSource(TimeSpan.FromSeconds(15));

                        // Record video for 15 seconds
                        await Fixture.MMALCamera.ProcessAsync(Fixture.MMALCamera.Camera.VideoPort, cts.Token);

                        Fixture.CheckAndAssertFilepath(vidCaptureHandler.GetFilepath());
                        Fixture.CheckAndAssertFilepath($"{vidCaptureHandler.Directory}/{vidCaptureHandler.CurrentFilename}.pts");
                    }
        }
예제 #13
0
        public async Task JpegThumbnail()
        {
            TestHelper.BeginTest("Image - JpegThumbnail");
            TestHelper.SetConfigurationDefaults();
            TestHelper.CleanDirectory("/home/pi/images/tests");

            JpegThumbnail tm = new JpegThumbnail(true, 200, 200, 90);

            using (var imgCaptureHandler = new ImageStreamCaptureHandler("/home/pi/images/tests", "jpg"))
                using (var preview = new MMALNullSinkComponent())
                    using (var imgEncoder = new MMALImageEncoder(thumbnailConfig: tm))
                    {
                        Fixture.MMALCamera.ConfigureCameraSettings();

                        var portConfig = new MMALPortConfig(MMALEncoding.JPEG, MMALEncoding.I420, 90);

                        imgEncoder.ConfigureOutputPort(portConfig, imgCaptureHandler);

                        // Create our component pipeline.
                        Fixture.MMALCamera.Camera.StillPort
                        .ConnectTo(imgEncoder);
                        Fixture.MMALCamera.Camera.PreviewPort
                        .ConnectTo(preview);

                        imgCaptureHandler.Manipulate(context =>
                        {
                            context.StripBayerMetadata(CameraVersion.OV5647);
                        }, ImageFormat.Jpeg);

                        // Camera warm up time
                        await Task.Delay(2000);

                        await Fixture.MMALCamera.ProcessAsync(Fixture.MMALCamera.Camera.StillPort);
                    }
        }
예제 #14
0
        private async Task ResizePicture(string extension, MMALEncoding encoding, MMALEncoding pixelFormat, int width, int height)
        {
            using (var imgCaptureHandler = new ImageStreamCaptureHandler("/home/pi/images/", extension))
                using (var resizer = new MMALResizerComponent())
                    using (var imgEncoder = new MMALImageEncoder())
                        using (var nullSink = new MMALNullSinkComponent())
                        {
                            this.Cam.ConfigureCameraSettings();

                            await Task.Delay(2000);

                            var resizerConfig = new MMALPortConfig(pixelFormat, pixelFormat, width: width, height: height);
                            var encoderConfig = new MMALPortConfig(encoding, pixelFormat, quality: 90);

                            // Create our component pipeline.
                            resizer.ConfigureInputPort(new MMALPortConfig(MMALCameraConfig.Encoding, MMALCameraConfig.EncodingSubFormat), this.Cam.Camera.StillPort, null);
                            resizer.ConfigureOutputPort(resizerConfig, null);
                            imgEncoder.ConfigureOutputPort(encoderConfig, imgCaptureHandler);

                            this.Cam.Camera.StillPort.ConnectTo(resizer);
                            resizer.Outputs[0].ConnectTo(imgEncoder);
                            this.Cam.Camera.PreviewPort.ConnectTo(nullSink);

                            await this.Cam.ProcessAsync(this.Cam.Camera.StillPort);
                        }
        }
예제 #15
0
        /// <inheritdoc />
        public override IDownstreamComponent ConfigureOutputPort(int outputPort, MMALPortConfig config, IOutputCaptureHandler handler)
        {
            base.ConfigureOutputPort(outputPort, config, handler);

            if (this.RawBayer)
            {
                MMALCamera.Instance.Camera.StillPort.SetRawCapture(true);
            }

            if (this.UseExif)
            {
                this.AddExifTags(this.ExifTags);
            }

            if (this.JpegThumbnailConfig != null)
            {
                var str = new MMAL_PARAMETER_THUMBNAIL_CONFIG_T(
                    new MMAL_PARAMETER_HEADER_T(
                        MMALParametersCamera.MMAL_PARAMETER_THUMBNAIL_CONFIGURATION,
                        Marshal.SizeOf <MMAL_PARAMETER_THUMBNAIL_CONFIG_T>()),
                    this.JpegThumbnailConfig.Enable, this.JpegThumbnailConfig.Width,
                    this.JpegThumbnailConfig.Height, this.JpegThumbnailConfig.Quality);

                MMALCheck(MMALPort.mmal_port_parameter_set(this.Control.Ptr, &str.Hdr), "Unable to set JPEG thumbnail config.");
            }

            return(this);
        }
예제 #16
0
        public async Task RawVideoConvert()
        {
            TestHelper.BeginTest("RawVideoConvert");
            TestHelper.SetConfigurationDefaults();
            TestHelper.CleanDirectory("/home/pi/videos/tests");

            using (var ffCaptureHandler = FFmpegCaptureHandler.RawVideoToAvi("/home/pi/videos/tests", "testing1234"))
                using (var vidEncoder = new MMALVideoEncoder())
                    using (var renderer = new MMALVideoRenderer())
                    {
                        Fixture.MMALCamera.ConfigureCameraSettings();

                        var portConfig = new MMALPortConfig(MMALEncoding.H264, MMALEncoding.I420, 10, 25000000, null);

                        vidEncoder.ConfigureOutputPort(portConfig, ffCaptureHandler);

                        Fixture.MMALCamera.Camera.VideoPort.ConnectTo(vidEncoder);
                        Fixture.MMALCamera.Camera.PreviewPort.ConnectTo(renderer);

                        // Camera warm up time
                        await Task.Delay(2000);

                        var cts = new CancellationTokenSource(TimeSpan.FromMinutes(1));

                        // Take video for 1 minute.
                        await Fixture.MMALCamera.ProcessAsync(Fixture.MMALCamera.Camera.VideoPort, cts.Token);

                        Fixture.CheckAndAssertFilepath("/home/pi/videos/tests/testing1234.avi");
                    }
        }
예제 #17
0
        public async Task EdgeDetectionKernelProcessor(string extension, MMALEncoding encodingType, MMALEncoding pixelFormat)
        {
            TestHelper.BeginTest("EdgeDetectionKernelProcessor", encodingType.EncodingName, pixelFormat.EncodingName);
            TestHelper.SetConfigurationDefaults();
            TestHelper.CleanDirectory("/home/pi/images/tests");

            using (var imgCaptureHandler = new ImageStreamCaptureHandler("/home/pi/images/tests", extension))
                using (var preview = new MMALNullSinkComponent())
                    using (var imgEncoder = new MMALImageEncoder())
                    {
                        Fixture.MMALCamera.ConfigureCameraSettings();

                        var portConfig = new MMALPortConfig(encodingType, pixelFormat, 90);

                        imgEncoder.ConfigureOutputPort(portConfig, imgCaptureHandler);

                        // Create our component pipeline.
                        Fixture.MMALCamera.Camera.StillPort
                        .ConnectTo(imgEncoder);
                        Fixture.MMALCamera.Camera.PreviewPort
                        .ConnectTo(preview);

                        imgCaptureHandler.Manipulate(context =>
                        {
                            context.Apply(new EdgeDetection(EDStrength.High));
                        }, ImageFormat.Jpeg);

                        // Camera warm up time
                        await Task.Delay(2000);

                        await Fixture.MMALCamera.ProcessAsync(Fixture.MMALCamera.Camera.StillPort);

                        Fixture.CheckAndAssertFilepath(imgCaptureHandler.GetFilepath());
                    }
        }
예제 #18
0
        /// <summary>
        /// Self-contained method for recording raw video frames directly from the camera's video port.
        /// Uses the encoding and pixel format as set in <see cref="MMALCameraConfig.VideoEncoding"/> and <see cref="MMALCameraConfig.VideoSubformat"/>.
        /// </summary>
        /// <param name="handler">The video capture handler to apply to the encoder.</param>
        /// <param name="cancellationToken">A cancellationToken to signal when to stop video capture.</param>
        /// <returns>The awaitable Task.</returns>
        public async Task TakeRawVideo(IVideoCaptureHandler handler, CancellationToken cancellationToken)
        {
            using (var splitter = new MMALSplitterComponent())
                using (var renderer = new MMALVideoRenderer())
                {
                    this.ConfigureCameraSettings();

                    var splitterOutputConfig = new MMALPortConfig(MMALCameraConfig.VideoEncoding, MMALCameraConfig.VideoSubformat, 0);

                    // Force port type to SplitterVideoPort to prevent resolution from being set against splitter component.
                    splitter.ConfigureOutputPort <SplitterVideoPort>(0, splitterOutputConfig, handler);

                    // Create our component pipeline.
                    this.Camera.VideoPort.ConnectTo(splitter);
                    this.Camera.PreviewPort.ConnectTo(renderer);

                    MMALLog.Logger.LogInformation($"Preparing to take raw video. Resolution: {this.Camera.VideoPort.Resolution.Width} x {this.Camera.VideoPort.Resolution.Height}. " +
                                                  $"Encoder: {MMALCameraConfig.VideoEncoding.EncodingName}. Pixel Format: {MMALCameraConfig.VideoSubformat.EncodingName}.");

                    // Camera warm up time
                    await Task.Delay(2000).ConfigureAwait(false);

                    await this.ProcessAsync(this.Camera.VideoPort, cancellationToken).ConfigureAwait(false);
                }
        }
예제 #19
0
        /// <summary>
        /// Call to configure changes on a downstream component input port.
        /// </summary>
        /// <typeparam name="TPort">Input port type.</typeparam>
        /// <param name="config">User provided port configuration object.</param>
        /// <param name="handler">The input port capture handler.</param>
        /// <returns>This <see cref="MMALDownstreamComponent"/>.</returns>
        public virtual unsafe IDownstreamComponent ConfigureInputPort <TPort>(MMALPortConfig config, IInputCaptureHandler handler)
            where TPort : IInputPort
        {
            this.Inputs[0] = (IInputPort)Activator.CreateInstance(typeof(TPort), (IntPtr)(&(*this.Ptr->Input[0])), this, Guid.NewGuid());

            return(this.ConfigureInputPort(config, null, handler));
        }
예제 #20
0
        /// <summary>
        /// Self-contained method for capturing a continual images from the camera still port for a specified period of time.
        /// An MMALImageEncoder component will be created and attached to the still port.
        /// </summary>
        /// <param name="handler">The image capture handler to apply to the encoder component.</param>
        /// <param name="encodingType">The image encoding type e.g. JPEG, BMP.</param>
        /// <param name="pixelFormat">The pixel format to use with the encoder e.g. I420 (YUV420).</param>
        /// <param name="cancellationToken">A cancellationToken to trigger stop capturing.</param>
        /// <param name="burstMode">When enabled, burst mode will increase the rate at which images are taken, at the expense of quality.</param>
        /// <returns>The awaitable Task.</returns>
        public async Task TakePictureTimeout(IFileStreamCaptureHandler handler, MMALEncoding encodingType, MMALEncoding pixelFormat, CancellationToken cancellationToken, bool burstMode = false)
        {
            if (burstMode)
            {
                MMALCameraConfig.StillBurstMode = true;
            }

            using (var imgEncoder = new MMALImageEncoder())
                using (var renderer = new MMALNullSinkComponent())
                {
                    this.ConfigureCameraSettings();

                    var portConfig = new MMALPortConfig(encodingType, pixelFormat, 90);

                    imgEncoder.ConfigureOutputPort(portConfig, handler);

                    // Create our component pipeline.
                    this.Camera.StillPort.ConnectTo(imgEncoder);
                    this.Camera.PreviewPort.ConnectTo(renderer);

                    // Camera warm up time
                    await Task.Delay(2000).ConfigureAwait(false);

                    while (!cancellationToken.IsCancellationRequested)
                    {
                        await this.ProcessAsync(this.Camera.StillPort).ConfigureAwait(false);

                        if (!cancellationToken.IsCancellationRequested)
                        {
                            handler.NewFile();
                        }
                    }
                }
        }
예제 #21
0
        /// <inheritdoc />
        public override unsafe MMALDownstreamComponent ConfigureInputPort(MMALPortConfig config)
        {
            if (config.EncodingType != null)
            {
                this.Inputs[0].Ptr->Format->Encoding = config.EncodingType.EncodingVal;
            }

            if (config.PixelFormat != null)
            {
                this.Inputs[0].Ptr->Format->EncodingVariant = config.PixelFormat.EncodingVal;
            }

            this.Inputs[0].Ptr->Format->Type = MMALFormat.MMAL_ES_TYPE_T.MMAL_ES_TYPE_VIDEO;

            this.Inputs[0].Bitrate    = config.Bitrate;
            this.Inputs[0].Resolution = new Resolution(config.Width, config.Height).Pad();
            this.Inputs[0].Crop       = new Rectangle(0, 0, config.Width, config.Height);
            this.Inputs[0].FrameRate  = new MMAL_RATIONAL_T(config.Framerate, 1);
            this.Inputs[0].Ptr->Format->Es->Video.Par = new MMAL_RATIONAL_T(1, 1);

            this.Inputs[0].EncodingType = config.EncodingType;

            this.Inputs[0].Commit();

            this.Inputs[0].Ptr->BufferNum  = Math.Max(this.Inputs[0].Ptr->BufferNumMin, this.Inputs[0].Ptr->BufferNumRecommended);
            this.Inputs[0].Ptr->BufferSize = Math.Max(this.Inputs[0].Ptr->BufferSizeMin, this.Inputs[0].Ptr->BufferSizeRecommended);

            return(this);
        }
예제 #22
0
        /// <summary>
        /// Self-contained method for recording H.264 video for a specified amount of time. Records at 30fps, 25Mb/s at the highest quality.
        /// </summary>
        /// <param name="handler">The video capture handler to apply to the encoder.</param>
        /// <param name="cancellationToken">A cancellationToken to signal when to stop video capture.</param>
        /// <param name="split">Used for Segmented video mode.</param>
        /// <returns>The awaitable Task.</returns>
        public async Task TakeVideo(IVideoCaptureHandler handler, CancellationToken cancellationToken, Split split = null)
        {
            if (split != null && !MMALCameraConfig.InlineHeaders)
            {
                MMALLog.Logger.LogWarning("Inline headers not enabled. Split mode not supported when this is disabled.");
                split = null;
            }

            using (var vidEncoder = new MMALVideoEncoder())
                using (var renderer = new MMALVideoRenderer())
                {
                    this.ConfigureCameraSettings();

                    var portConfig = new MMALPortConfig(MMALEncoding.H264, MMALEncoding.I420, 10, MMALVideoEncoder.MaxBitrateLevel4, null, split);

                    vidEncoder.ConfigureOutputPort(portConfig, handler);

                    // Create our component pipeline.
                    this.Camera.VideoPort.ConnectTo(vidEncoder);
                    this.Camera.PreviewPort.ConnectTo(renderer);

                    MMALLog.Logger.LogInformation($"Preparing to take video. Resolution: {this.Camera.VideoPort.Resolution.Width} x {this.Camera.VideoPort.Resolution.Height}. " +
                                                  $"Encoder: {vidEncoder.Outputs[0].EncodingType.EncodingName}. Pixel Format: {vidEncoder.Outputs[0].PixelFormat.EncodingName}.");

                    // Camera warm up time
                    await Task.Delay(2000).ConfigureAwait(false);

                    await this.ProcessAsync(this.Camera.VideoPort, cancellationToken).ConfigureAwait(false);
                }
        }
예제 #23
0
        public async Task TakeVideo(string extension, MMALEncoding encodingType, MMALEncoding pixelFormat)
        {
            TestHelper.BeginTest("TakeVideo", encodingType.EncodingName, pixelFormat.EncodingName);
            TestHelper.SetConfigurationDefaults();
            TestHelper.CleanDirectory("/home/pi/videos/tests");

            using (var vidCaptureHandler = new VideoStreamCaptureHandler("/home/pi/videos/tests", extension))
                using (var preview = new MMALVideoRenderer())
                    using (var vidEncoder = new MMALVideoEncoder(vidCaptureHandler))
                    {
                        Fixture.MMALCamera.ConfigureCameraSettings();

                        var portConfig = new MMALPortConfig(encodingType, pixelFormat, 25, 10, 25000000, null);

                        vidEncoder.ConfigureOutputPort(portConfig);

                        // Create our component pipeline.
                        Fixture.MMALCamera.Camera.VideoPort
                        .ConnectTo(vidEncoder);
                        Fixture.MMALCamera.Camera.PreviewPort
                        .ConnectTo(preview);

                        // Camera warm up time
                        await Task.Delay(2000);

                        CancellationTokenSource cts = new CancellationTokenSource(TimeSpan.FromSeconds(15));

                        // Record video for 20 seconds
                        await Fixture.MMALCamera.ProcessAsync(Fixture.MMALCamera.Camera.VideoPort, cts.Token);

                        Fixture.CheckAndAssertFilepath(vidCaptureHandler.GetFilepath());
                    }
        }
예제 #24
0
        /// <inheritdoc />
        public override IDownstreamComponent ConfigureInputPort(MMALPortConfig config, IInputCaptureHandler handler)
        {
            config.BufferNum = Math.Max(this.Inputs[0].BufferNumRecommended, 3);

            base.ConfigureInputPort(config, handler);

            return(this);
        }
예제 #25
0
        /// <inheritdoc />
        public override unsafe MMALDownstreamComponent ConfigureInputPort(MMALPortConfig config)
        {
            base.ConfigureInputPort(config);

            this.Inputs[0].Ptr->BufferNum = Math.Max(this.Inputs[0].Ptr->BufferNumRecommended, 3);

            return(this);
        }
예제 #26
0
        static async Task fragmp4(int seconds)
        {
            // This generates a "fragmented" MP4 which should be larger than an MP4
            // with a normal single MOOV atom trailer at the end of the file. See:
            // https://superuser.com/a/1530949/143047
            // 10 seconds of "-badmp4" is 34MB
            // 10 seconds of "-fragmp4" is 26MB regardless of the other options described in the link
            // 60 seconds (bad) is 219MB versus 208MB (frag) and again we lose 2 seconds
            // -g is keyframe rate, default is 250
            // -flush_packets 1 flushes the I/O stream after each packet, decreasing latency (apparently)
            // adding two seconds to the requested duration approximates the regular file size (10s = 33MB, 60s = 218MB)

            seconds += 2; // HACK! see above

            var cam      = GetConfiguredCamera();
            var pathname = ramdiskPath + "video.mp4";

            Directory.CreateDirectory(ramdiskPath);
            File.Delete(pathname);

            Console.WriteLine("Preparing pipeline...");
            cam.ConfigureCameraSettings();

            using (var ffmpeg = new ExternalProcessCaptureHandler(
                       new ExternalProcessCaptureHandlerOptions
            {
                Filename = "ffmpeg",
                Arguments = $"-framerate 24 -i - -b:v 2500k -c copy -movflags +frag_keyframe+separate_moof+omit_tfhd_offset+empty_moov {pathname}",
                EchoOutput = true,
                DrainOutputDelayMs = 500,                                     // default
                TerminationSignals = new[] { Signum.SIGINT, Signum.SIGQUIT }, // not the supposedly-correct SIGINT+SIGINT but this produces some exit output
            }))
            {
                // quality arg-help says set bitrate zero to use quality for VBR
                var portCfg = new MMALPortConfig(MMALEncoding.H264, MMALEncoding.I420, quality: 10, bitrate: 0, timeout: null);
                using var encoder  = new MMALVideoEncoder();
                using var renderer = new MMALVideoRenderer();
                encoder.ConfigureOutputPort(portCfg, ffmpeg);
                cam.Camera.VideoPort.ConnectTo(encoder);
                cam.Camera.PreviewPort.ConnectTo(renderer);

                Console.WriteLine("Camera warmup...");
                await Task.Delay(2000);

                Console.WriteLine($"Capturing MP4: {pathname}");
                var timerToken = new CancellationTokenSource(TimeSpan.FromSeconds(seconds));
                await Task.WhenAll(new Task[] {
                    ffmpeg.ManageProcessLifecycleAsync(timerToken.Token),
                    cam.ProcessAsync(cam.Camera.VideoPort, timerToken.Token),
                }).ConfigureAwait(false);
            }

            // can't use the convenient fall-through using or MMALCamera.Cleanup
            // throws: Argument is invalid. Unable to destroy component
            cam.Cleanup();

            Console.WriteLine("Exiting.");
        }
예제 #27
0
        /// <summary>
        /// Creates a new instance of a Overlay renderer component. This component is identical to the <see cref="MMALVideoRenderer"/> class, however it provides
        /// the ability to overlay a static source onto the render overlay.
        /// </summary>
        /// <param name="parent">The parent renderer which is being used to overlay onto the display.</param>
        /// <param name="config">The configuration for rendering a static preview overlay.</param>
        /// <param name="source">A reference to the current stream being used in the overlay.</param>
        public MMALOverlayRenderer(MMALVideoRenderer parent, PreviewOverlayConfiguration config, byte[] source)
            : base(config)
        {
            this.Source               = source;
            this.ParentRenderer       = parent;
            this.OverlayConfiguration = config;
            parent.Overlays.Add(this);

            if (config != null)
            {
                var width  = 0;
                var height = 0;

                if (config.Resolution.Width > 0 && config.Resolution.Height > 0)
                {
                    width  = config.Resolution.Width;
                    height = config.Resolution.Height;
                }
                else
                {
                    width  = parent.Inputs[0].Resolution.Width;
                    height = parent.Inputs[0].Resolution.Height;
                }

                if (config.Encoding == null)
                {
                    var sourceLength = source.Length;
                    var planeSize    = this.Inputs[0].Resolution.Pad();
                    var planeLength  = Math.Floor((double)planeSize.Width * planeSize.Height);

                    if (Math.Floor(sourceLength / planeLength) == 3)
                    {
                        config.Encoding = MMALEncoding.RGB24;
                    }
                    else if (Math.Floor(sourceLength / planeLength) == 4)
                    {
                        config.Encoding = MMALEncoding.RGBA;
                    }
                    else
                    {
                        throw new PiCameraError("Unable to determine encoding from image size.");
                    }
                }

                if (!this.AllowedEncodings.Any(c => c.EncodingVal == this.Inputs[0].NativeEncodingType))
                {
                    throw new PiCameraError($"Incompatible encoding type for use with Preview Render overlay {this.Inputs[0].NativeEncodingType.ParseEncoding().EncodingName}.");
                }

                var portConfig = new MMALPortConfig(config.Encoding, null, width, height, 0, 0, 0, false, null, 0, 0);

                this.ConfigureInputPort(portConfig, null);

                this.Control.Start();
                this.Inputs[0].Start();
            }
        }
예제 #28
0
        public async Task ChangeEncoderType()
        {
            TestHelper.BeginTest("Video - ChangeEncoderType");
            TestHelper.SetConfigurationDefaults();
            TestHelper.CleanDirectory("/home/pi/videos/tests");

            using (var vidCaptureHandler = new VideoStreamCaptureHandler("/home/pi/videos/tests", "h264"))
                using (var preview = new MMALVideoRenderer())
                    using (var vidEncoder = new MMALVideoEncoder(vidCaptureHandler))
                    {
                        Fixture.MMALCamera.ConfigureCameraSettings();

                        var portConfig = new MMALPortConfig(MMALEncoding.MJPEG, MMALEncoding.I420, 25, 10, 25000000, null);

                        vidEncoder.ConfigureOutputPort(portConfig);

                        // Create our component pipeline.
                        Fixture.MMALCamera.Camera.VideoPort
                        .ConnectTo(vidEncoder);
                        Fixture.MMALCamera.Camera.PreviewPort
                        .ConnectTo(preview);

                        // Camera warm up time
                        await Task.Delay(2000);

                        CancellationTokenSource cts = new CancellationTokenSource(TimeSpan.FromSeconds(20));

                        // Record video for 20 seconds
                        await Fixture.MMALCamera.ProcessAsync(Fixture.MMALCamera.Camera.VideoPort, cts.Token);

                        Fixture.CheckAndAssertFilepath(vidCaptureHandler.GetFilepath());
                    }

            using (var vidCaptureHandler = new VideoStreamCaptureHandler("/home/pi/videos/tests", "mjpeg"))
                using (var preview = new MMALVideoRenderer())
                    using (var vidEncoder = new MMALVideoEncoder(vidCaptureHandler))
                    {
                        Fixture.MMALCamera.ConfigureCameraSettings();

                        var portConfig = new MMALPortConfig(MMALEncoding.MJPEG, MMALEncoding.I420, 25, 10, 25000000, null);

                        vidEncoder.ConfigureOutputPort(portConfig);

                        // Create our component pipeline.
                        Fixture.MMALCamera.Camera.VideoPort
                        .ConnectTo(vidEncoder);
                        Fixture.MMALCamera.Camera.PreviewPort
                        .ConnectTo(preview);

                        CancellationTokenSource cts = new CancellationTokenSource(TimeSpan.FromSeconds(20));

                        // Record video for 20 seconds
                        await Fixture.MMALCamera.ProcessAsync(Fixture.MMALCamera.Camera.VideoPort, cts.Token);

                        Fixture.CheckAndAssertFilepath(vidCaptureHandler.GetFilepath());
                    }
        }
예제 #29
0
        public async Task ImageFxComponentFromCameraVideoPortWithSplitterAndEncoder(MMAL_PARAM_IMAGEFX_T effect, bool throwsException)
        {
            TestHelper.BeginTest($"Video - ImageFxComponentFromCameraVideoPortWithSplitterAndEncoder - {effect}");
            TestHelper.SetConfigurationDefaults();
            TestHelper.CleanDirectory("/home/pi/videos/tests");

            using (var vidCaptureHandler = new VideoStreamCaptureHandler("/home/pi/videos/tests", "h264"))
                using (var preview = new MMALNullSinkComponent())
                    using (var imageFx = new MMALImageFxComponent())
                        using (var splitter = new MMALSplitterComponent())
                            using (var vidEncoder = new MMALVideoEncoder())
                            {
                                Fixture.MMALCamera.ConfigureCameraSettings();

                                var vidEncoderConfig = new MMALPortConfig(MMALEncoding.H264, MMALEncoding.I420);
                                var splitterConfig   = new MMALPortConfig(MMALEncoding.I420, MMALEncoding.I420);
                                var fxConfig         = new MMALPortConfig(MMALEncoding.I420, MMALEncoding.I420);

                                imageFx.ConfigureOutputPort <VideoPort>(0, fxConfig, null);

                                splitter.ConfigureInputPort(new MMALPortConfig(MMALEncoding.I420, MMALEncoding.I420), imageFx.Outputs[0], null);
                                splitter.ConfigureOutputPort <VideoPort>(0, splitterConfig, null);

                                vidEncoder.ConfigureInputPort(new MMALPortConfig(MMALEncoding.I420, MMALEncoding.I420), splitter.Outputs[0], null);
                                vidEncoder.ConfigureOutputPort(0, vidEncoderConfig, vidCaptureHandler);

                                if (throwsException)
                                {
                                    Assert.Throws <MMALInvalidException>(() =>
                                    {
                                        imageFx.ImageEffect = effect;
                                    });
                                }
                                else
                                {
                                    imageFx.ImageEffect = effect;
                                }

                                // Create our component pipeline.
                                Fixture.MMALCamera.Camera.VideoPort
                                .ConnectTo(imageFx);
                                Fixture.MMALCamera.Camera.PreviewPort
                                .ConnectTo(preview);

                                imageFx.Outputs[0].ConnectTo(splitter);
                                splitter.Outputs[0].ConnectTo(vidEncoder);

                                // Camera warm up time
                                await Task.Delay(2000);

                                var cts = new CancellationTokenSource(TimeSpan.FromSeconds(5));

                                await Fixture.MMALCamera.ProcessAsync(Fixture.MMALCamera.Camera.VideoPort, cts.Token);

                                Fixture.CheckAndAssertFilepath(vidCaptureHandler.GetFilepath());
                            }
        }
예제 #30
0
        /// <summary>
        /// Self-contained method for capturing timelapse images.
        /// An MMALImageEncoder component will be created and attached to the still port.
        /// </summary>
        /// <param name="handler">The image capture handler to apply to the encoder component.</param>
        /// <param name="encodingType">The image encoding type e.g. JPEG, BMP.</param>
        /// <param name="pixelFormat">The pixel format to use with the encoder e.g. I420 (YUV420).</param>
        /// <param name="timelapse">A Timelapse object which specifies the timeout and rate at which images should be taken.</param>
        /// <returns>The awaitable Task.</returns>
        /// <exception cref="ArgumentNullException"/>
        public async Task TakePictureTimelapse(IFileStreamCaptureHandler handler, MMALEncoding encodingType, MMALEncoding pixelFormat, Timelapse timelapse)
        {
            int interval = 0;

            if (timelapse == null)
            {
                throw new ArgumentNullException(nameof(timelapse), "Timelapse object null. This must be initialized for Timelapse mode");
            }

            using (var imgEncoder = new MMALImageEncoder())
                using (var renderer = new MMALNullSinkComponent())
                {
                    this.ConfigureCameraSettings();

                    var portConfig = new MMALPortConfig(encodingType, pixelFormat, 90);

                    imgEncoder.ConfigureOutputPort(portConfig, handler);

                    // Create our component pipeline.
                    this.Camera.StillPort.ConnectTo(imgEncoder);
                    this.Camera.PreviewPort.ConnectTo(renderer);

                    // Camera warm up time
                    await Task.Delay(2000).ConfigureAwait(false);

                    while (!timelapse.CancellationToken.IsCancellationRequested)
                    {
                        switch (timelapse.Mode)
                        {
                        case TimelapseMode.Millisecond:
                            interval = timelapse.Value;
                            break;

                        case TimelapseMode.Second:
                            interval = timelapse.Value * 1000;
                            break;

                        case TimelapseMode.Minute:
                            interval = (timelapse.Value * 60) * 1000;
                            break;
                        }

                        await Task.Delay(interval).ConfigureAwait(false);

                        MMALLog.Logger.LogInformation($"Preparing to take picture. Resolution: {MMALCameraConfig.StillResolution.Width} x {MMALCameraConfig.StillResolution.Height}. " +
                                                      $"Encoder: {encodingType.EncodingName}. Pixel Format: {pixelFormat.EncodingName}.");

                        await this.ProcessAsync(this.Camera.StillPort).ConfigureAwait(false);

                        if (!timelapse.CancellationToken.IsCancellationRequested)
                        {
                            handler.NewFile();
                        }
                    }
                }
        }