示例#1
0
        public static void TakePictureManualMode()
        {
            MMALCamera cam = MMALCamera.Instance;

            AsyncContext.Run(async() =>
            {
                using (var imgCaptureHandler = new ImageStreamCaptureHandler("/home/pi/images/", "jpg"))
                    using (var imgEncoder = new MMALImageEncoder(imgCaptureHandler))
                        using (var nullSink = new MMALNullSinkComponent())
                        {
                            cam.ConfigureCameraSettings();

                            // Create our component pipeline.
                            imgEncoder.ConfigureOutputPort(0, MMALEncoding.JPEG, MMALEncoding.I420, 90);

                            cam.Camera.StillPort.ConnectTo(imgEncoder);
                            cam.Camera.PreviewPort.ConnectTo(nullSink);

                            // Camera warm up time
                            await Task.Delay(2000);
                            await cam.ProcessAsync(cam.Camera.StillPort);
                        }
            });

            cam.Cleanup();
        }
示例#2
0
        public static void TakeVideoManualMode()
        {
            MMALCamera cam = MMALCamera.Instance;

            AsyncContext.Run(async() =>
            {
                //using (var vidCaptureHandler = new VideoStreamCaptureHandler("/home/pi/videos/", "mjpeg"))
                using (var vidCaptureHandler = new VideoStreamCaptureHandler("/home/pi/videos/", "avi"))
                    using (var vidEncoder = new MMALVideoEncoder(vidCaptureHandler))
                        //using (var ffCaptureHandler = FFmpegCaptureHandler.RawVideoToAvi("/home/pi/videos/", "testing1234"))
                        //using (var vidEncoder = new MMALVideoEncoder(ffCaptureHandler))
                        using (var renderer = new MMALVideoRenderer())
                        {
                            cam.ConfigureCameraSettings();

                            // Create our component pipeline. Here we are using the H.264 standard with a YUV420 pixel format. The video will be taken at 25Mb/s.
                            vidEncoder.ConfigureOutputPort(0, MMALEncoding.MJPEG, MMALEncoding.I420, 90, 25000000);

                            cam.Camera.VideoPort.ConnectTo(vidEncoder);
                            cam.Camera.PreviewPort.ConnectTo(renderer);

                            // Camera warm up time
                            await Task.Delay(2000);

                            var cts = new CancellationTokenSource(TimeSpan.FromSeconds(10));

                            // Take video for 3 minutes.
                            await cam.ProcessAsync(cam.Camera.VideoPort, cts.Token);
                        }
            });

            cam.Cleanup();
        }
        private async Task CaptureImageAsync(Stream output, CancellationToken cancellationToken)
        {
            try
            {
                log.LogDebug("Processing camera output...");
                this.memoryStreamHandler.CurrentStream.Clear();
                await camera.ProcessAsync(camera.Camera.StillPort, cancellationToken);

                var imageDataLengthInKiB = this.memoryStreamHandler.CurrentStream.Length / 1024;
                this.log.LogDebug($"Fetching image data ({imageDataLengthInKiB} KiB)...");
                await this.memoryStreamHandler.CurrentStream.CopyAndResetAsync(output);

                this.log.LogDebug("Image captured successfully.");
            }
            catch (Exception ex)
            {
                this.log.LogError(ex, "Capturing image failed.");
            }
        }
示例#4
0
        private static void TakePictureAndSave()
        {
            using (var imgCaptureHandler = new ImageStreamCaptureHandler(pathToSavePicture + "/", "jpg"))
                using (var imgEncoder = new MMALImageEncoder())
                    using (var nullSink = new MMALNullSinkComponent())
                    {
                        cam.ConfigureCameraSettings();

                        var portConfig = new MMALPortConfig(MMALEncoding.JPEG, MMALEncoding.I420, 90);

                        // Create our component pipeline.
                        imgEncoder.ConfigureOutputPort(portConfig, imgCaptureHandler);

                        cam.Camera.StillPort.ConnectTo(imgEncoder);
                        cam.Camera.PreviewPort.ConnectTo(nullSink);

                        // Camera warm up time
                        Thread.Sleep(2000);
                        cam.ProcessAsync(cam.Camera.StillPort);
                        Thread.Sleep(2000);
                    }
        }
示例#5
0
        public async Task Start(CancellationToken ct)
        {
            LogUtil.Write("Starting Camera...");
            MMALCameraConfig.VideoStabilisation = false;

            var sensorMode = MMALSensorMode.Mode0;

            switch (camMode)
            {
            case 1:
                sensorMode = MMALSensorMode.Mode1;
                break;

            case 2:
                sensorMode = MMALSensorMode.Mode2;
                break;

            case 3:
                sensorMode = MMALSensorMode.Mode3;
                break;

            case 4:
                sensorMode = MMALSensorMode.Mode4;
                break;

            case 5:
                sensorMode = MMALSensorMode.Mode5;
                break;

            case 6:
                sensorMode = MMALSensorMode.Mode6;
                break;

            case 7:
                sensorMode = MMALSensorMode.Mode7;
                break;
            }
            MMALCameraConfig.SensorMode      = sensorMode;
            MMALCameraConfig.ExposureMode    = MMAL_PARAM_EXPOSUREMODE_T.MMAL_PARAM_EXPOSUREMODE_BACKLIGHT;
            MMALCameraConfig.VideoResolution = new Resolution(capWidth, capHeight);
            MMALCameraConfig.VideoFramerate  = new MMAL_RATIONAL_T(60, 1);

            using var vidCaptureHandler = new EmguInMemoryCaptureHandler();
            using var splitter          = new MMALSplitterComponent();
            using var renderer          = new MMALNullSinkComponent();
            cam.ConfigureCameraSettings();
            LogUtil.Write("Cam mode is " + MMALCameraConfig.SensorMode);
            // Register to the event.
            vidCaptureHandler.MyEmguEvent += OnEmguEventCallback;

            // We are instructing the splitter to do a format conversion to BGR24.
            var splitterPortConfig = new MMALPortConfig(MMALEncoding.BGR24, MMALEncoding.BGR24, capWidth, capHeight, null);

            // By default in MMALSharp, the Video port outputs using proprietary communication (Opaque) with a YUV420 pixel format.
            // Changes to this are done via MMALCameraConfig.VideoEncoding and MMALCameraConfig.VideoSub format.
            splitter.ConfigureInputPort(new MMALPortConfig(MMALEncoding.OPAQUE, MMALEncoding.I420, capWidth, capHeight, null), cam.Camera.VideoPort, null);

            // We then use the splitter config object we constructed earlier. We then tell this output port to use our capture handler to record data.
            splitter.ConfigureOutputPort <SplitterVideoPort>(0, splitterPortConfig, vidCaptureHandler);

            cam.Camera.PreviewPort.ConnectTo(renderer);
            cam.Camera.VideoPort.ConnectTo(splitter);

            // Camera warm up time
            LogUtil.Write("Camera is warming up...");
            await Task.Delay(2000).ConfigureAwait(false);

            LogUtil.WriteInc("Camera initialized.");
            await cam.ProcessAsync(cam.Camera.VideoPort, ct).ConfigureAwait(false);

            LogUtil.WriteDec("Camera closed.");
        }