Beispiel #1
0
        public static void TakePictureManualMode()
        {
            MMALCamera cam = MMALCamera.Instance;

            AsyncContext.Run(async() =>
            {
                using (var imgCaptureHandler = new ImageStreamCaptureHandler("/home/pi/images/", "jpg"))
                    using (var imgEncoder = new MMALImageEncoder(imgCaptureHandler))
                        using (var nullSink = new MMALNullSinkComponent())
                        {
                            cam.ConfigureCameraSettings();

                            // Create our component pipeline.
                            imgEncoder.ConfigureOutputPort(0, MMALEncoding.JPEG, MMALEncoding.I420, 90);

                            cam.Camera.StillPort.ConnectTo(imgEncoder);
                            cam.Camera.PreviewPort.ConnectTo(nullSink);

                            // Camera warm up time
                            await Task.Delay(2000);
                            await cam.ProcessAsync(cam.Camera.StillPort);
                        }
            });

            cam.Cleanup();
        }
        public async Task TakePicturesAsync(string filename, TimeSpan duration, int msWaitBetweenPictures)
        {
            try
            {
                // Singleton initialized lazily. Reference once in your application.
                MMALCamera cam = this.MMALSharpCameraInstance;
                MMALCameraConfig.StillResolution = new Resolution(1080, 920);
                cam.ConfigureCameraSettings();

                using (var imgCaptureHandler = new IndexedImageStreamCaptureHandler(filename))
                {
                    Console.WriteLine($"Current filename in handler: {imgCaptureHandler.CurrentFilename}");

                    var cts       = new CancellationTokenSource(duration);
                    var timelapse = new Timelapse
                    {
                        Mode              = TimelapseMode.Millisecond,
                        Value             = msWaitBetweenPictures,
                        CancellationToken = cts.Token
                    };
                    await cam.TakePictureTimelapse(imgCaptureHandler, MMALEncoding.JPEG, MMALEncoding.I420, timelapse);
                }

                // Cleanup disposes all unmanaged resources and unloads Broadcom library. To be called when no more processing is to be done
                // on the camera.
                Console.WriteLine($"Wrote picture to: {filename} with running index");
            }
            catch (Exception ex)
            {
                Console.WriteLine($"{nameof(MMALSharpCamera)} {nameof(TakePictureAsync)} Failed");
                Console.WriteLine($"{nameof(MMALSharpCamera)} {nameof(TakePictureAsync)} {ex.ToString()}");
                Console.WriteLine($"{nameof(MMALSharpCamera)} {nameof(TakePictureAsync)} Failed");
            }
        }
Beispiel #3
0
        public static void TakeVideoManualMode()
        {
            MMALCamera cam = MMALCamera.Instance;

            AsyncContext.Run(async() =>
            {
                //using (var vidCaptureHandler = new VideoStreamCaptureHandler("/home/pi/videos/", "mjpeg"))
                using (var vidCaptureHandler = new VideoStreamCaptureHandler("/home/pi/videos/", "avi"))
                    using (var vidEncoder = new MMALVideoEncoder(vidCaptureHandler))
                        //using (var ffCaptureHandler = FFmpegCaptureHandler.RawVideoToAvi("/home/pi/videos/", "testing1234"))
                        //using (var vidEncoder = new MMALVideoEncoder(ffCaptureHandler))
                        using (var renderer = new MMALVideoRenderer())
                        {
                            cam.ConfigureCameraSettings();

                            // Create our component pipeline. Here we are using the H.264 standard with a YUV420 pixel format. The video will be taken at 25Mb/s.
                            vidEncoder.ConfigureOutputPort(0, MMALEncoding.MJPEG, MMALEncoding.I420, 90, 25000000);

                            cam.Camera.VideoPort.ConnectTo(vidEncoder);
                            cam.Camera.PreviewPort.ConnectTo(renderer);

                            // Camera warm up time
                            await Task.Delay(2000);

                            var cts = new CancellationTokenSource(TimeSpan.FromSeconds(10));

                            // Take video for 3 minutes.
                            await cam.ProcessAsync(cam.Camera.VideoPort, cts.Token);
                        }
            });

            cam.Cleanup();
        }
Beispiel #4
0
        public Camera(NetworkStream outputStream, Vector2 resolution, int bitrate = 1300000, int frameRate = 25, int quality = 0, EventHandler <byte[]> frameCaptured = null)
        {
            OnFrameCaptured = frameCaptured;

            MMALCameraConfig.VideoResolution = new Resolution((int)resolution.X, (int)resolution.Y);
            MMALCameraConfig.VideoFramerate  = new MMAL_RATIONAL_T(frameRate, 1);

            OutputHandler = new NetworkStreamCaptureHandler(outputStream);
            VideoEncoder  = new MMALVideoEncoder();

            // Create our component pipeline. Here we are using the H.264 standard with a YUV420 pixel format. The video will be taken at 25Mb/s.
            Instance = MMALCamera.Instance;
            Instance.ConfigureCameraSettings();

            MMALPortConfig portConfig = new MMALPortConfig(MMALEncoding.H264, MMALEncoding.I420, quality, bitrate, null);

            VideoEncoder.ConfigureOutputPort(portConfig, OutputHandler);

            nullSink = new MMALNullSinkComponent();
            Instance.Camera.PreviewPort.ConnectTo(nullSink);
            Instance.Camera.VideoPort.ConnectTo(VideoEncoder);
        }
        private async Task BuildCameraPipelineAsync(CancellationToken cancellationToken)
        {
            camera.ConfigureCameraSettings();

            this.imageEncoder        = new MMALImageEncoder();
            this.memoryStreamHandler = new MemoryStreamCaptureHandler();
            this.resizer             = new MMALResizerComponent();
            this.nullSink            = new MMALNullSinkComponent();

            this.portConfig        = new MMALPortConfig(MMALEncoding.JPEG, MMALEncoding.I420, 90);
            this.resizerPortConfig = new MMALPortConfig(MMALEncoding.I420, MMALEncoding.I420, 1024, 768, 0, 0, 0, false, null);

            this.imageEncoder.ConfigureOutputPort(this.portConfig, memoryStreamHandler);
            this.resizer.ConfigureInputPort(new MMALPortConfig(MMALEncoding.OPAQUE, MMALEncoding.I420), camera.Camera.StillPort, null)
            .ConfigureOutputPort(this.resizerPortConfig, null);

            camera.Camera.StillPort.ConnectTo(resizer);
            this.resizer.Outputs[0].ConnectTo(imageEncoder);
            camera.Camera.PreviewPort.ConnectTo(nullSink);

            await Task.Delay(TimeSpan.FromSeconds(2), cancellationToken);
        }
Beispiel #6
0
        private static void TakePictureAndSave()
        {
            using (var imgCaptureHandler = new ImageStreamCaptureHandler(pathToSavePicture + "/", "jpg"))
                using (var imgEncoder = new MMALImageEncoder())
                    using (var nullSink = new MMALNullSinkComponent())
                    {
                        cam.ConfigureCameraSettings();

                        var portConfig = new MMALPortConfig(MMALEncoding.JPEG, MMALEncoding.I420, 90);

                        // Create our component pipeline.
                        imgEncoder.ConfigureOutputPort(portConfig, imgCaptureHandler);

                        cam.Camera.StillPort.ConnectTo(imgEncoder);
                        cam.Camera.PreviewPort.ConnectTo(nullSink);

                        // Camera warm up time
                        Thread.Sleep(2000);
                        cam.ProcessAsync(cam.Camera.StillPort);
                        Thread.Sleep(2000);
                    }
        }
Beispiel #7
0
        /// <summary>
        /// Camera settings (applies both config and hard-coded defaults).
        /// </summary>
        protected virtual void ConfigureCamera()
        {
            Console.WriteLine("Configuring camera.");

            MMALCameraConfig.Resolution = new Resolution(AppConfig.Get.Camera.Width, AppConfig.Get.Camera.Height);
            MMALCameraConfig.SensorMode = AppConfig.Get.Camera.Mode;
            MMALCameraConfig.Framerate  = AppConfig.Get.Camera.FPS;

            var overlay = new AnnotateImage(AppConfig.Get.Name, 30, Color.White)
            {
                ShowDateText = true,
                ShowTimeText = true,
                DateFormat   = "yyyy-MM-dd",
                TimeFormat   = "HH:mm:ss",
                RefreshRate  = DateTimeTextRefreshRate.Seconds
            };

            MMALCameraConfig.Annotate = overlay;

            // image quality tweaks to play with later
            MMALCameraConfig.Sharpness            = 0;  // 0 = auto, default; -100 to 100
            MMALCameraConfig.Contrast             = 0;  // 0 = auto, default; -100 to 100
            MMALCameraConfig.Brightness           = 50; // 50 = default; 0 = black, 100 = white
            MMALCameraConfig.Saturation           = 0;  // 0 = default; -100 to 100
            MMALCameraConfig.ExposureCompensation = 0;  // 0 = none, default; -10 to 10, lightens/darkens the image

            // low-light tweaks which don't seem to degrade full-light recording
            MMALCameraConfig.ExposureMode      = MMAL_PARAM_EXPOSUREMODE_T.MMAL_PARAM_EXPOSUREMODE_NIGHT;
            MMALCameraConfig.ExposureMeterMode = MMAL_PARAM_EXPOSUREMETERINGMODE_T.MMAL_PARAM_EXPOSUREMETERINGMODE_MATRIX;
            MMALCameraConfig.DrcLevel          = MMAL_PARAMETER_DRC_STRENGTH_T.MMAL_PARAMETER_DRC_STRENGTH_HIGH;

            // h.264 requires key frames for the circular buffer capture handler
            MMALCameraConfig.InlineHeaders = true;

            Cam.ConfigureCameraSettings();
            Cam.EnableAnnotation();
        }
Beispiel #8
0
        public async Task Start(CancellationToken ct)
        {
            LogUtil.Write("Starting Camera...");
            MMALCameraConfig.VideoStabilisation = false;

            var sensorMode = MMALSensorMode.Mode0;

            switch (camMode)
            {
            case 1:
                sensorMode = MMALSensorMode.Mode1;
                break;

            case 2:
                sensorMode = MMALSensorMode.Mode2;
                break;

            case 3:
                sensorMode = MMALSensorMode.Mode3;
                break;

            case 4:
                sensorMode = MMALSensorMode.Mode4;
                break;

            case 5:
                sensorMode = MMALSensorMode.Mode5;
                break;

            case 6:
                sensorMode = MMALSensorMode.Mode6;
                break;

            case 7:
                sensorMode = MMALSensorMode.Mode7;
                break;
            }
            MMALCameraConfig.SensorMode      = sensorMode;
            MMALCameraConfig.ExposureMode    = MMAL_PARAM_EXPOSUREMODE_T.MMAL_PARAM_EXPOSUREMODE_BACKLIGHT;
            MMALCameraConfig.VideoResolution = new Resolution(capWidth, capHeight);
            MMALCameraConfig.VideoFramerate  = new MMAL_RATIONAL_T(60, 1);

            using var vidCaptureHandler = new EmguInMemoryCaptureHandler();
            using var splitter          = new MMALSplitterComponent();
            using var renderer          = new MMALNullSinkComponent();
            cam.ConfigureCameraSettings();
            LogUtil.Write("Cam mode is " + MMALCameraConfig.SensorMode);
            // Register to the event.
            vidCaptureHandler.MyEmguEvent += OnEmguEventCallback;

            // We are instructing the splitter to do a format conversion to BGR24.
            var splitterPortConfig = new MMALPortConfig(MMALEncoding.BGR24, MMALEncoding.BGR24, capWidth, capHeight, null);

            // By default in MMALSharp, the Video port outputs using proprietary communication (Opaque) with a YUV420 pixel format.
            // Changes to this are done via MMALCameraConfig.VideoEncoding and MMALCameraConfig.VideoSub format.
            splitter.ConfigureInputPort(new MMALPortConfig(MMALEncoding.OPAQUE, MMALEncoding.I420, capWidth, capHeight, null), cam.Camera.VideoPort, null);

            // We then use the splitter config object we constructed earlier. We then tell this output port to use our capture handler to record data.
            splitter.ConfigureOutputPort <SplitterVideoPort>(0, splitterPortConfig, vidCaptureHandler);

            cam.Camera.PreviewPort.ConnectTo(renderer);
            cam.Camera.VideoPort.ConnectTo(splitter);

            // Camera warm up time
            LogUtil.Write("Camera is warming up...");
            await Task.Delay(2000).ConfigureAwait(false);

            LogUtil.WriteInc("Camera initialized.");
            await cam.ProcessAsync(cam.Camera.VideoPort, ct).ConfigureAwait(false);

            LogUtil.WriteDec("Camera closed.");
        }