/// <summary> /// Self-contained method for capturing a continual images from the camera still port for a specified period of time. /// An MMALImageEncoder component will be created and attached to the still port. /// </summary> /// <param name="handler">The image capture handler to apply to the encoder component.</param> /// <param name="encodingType">The image encoding type e.g. JPEG, BMP.</param> /// <param name="pixelFormat">The pixel format to use with the encoder e.g. I420 (YUV420).</param> /// <param name="cancellationToken">A cancellationToken to trigger stop capturing.</param> /// <param name="burstMode">When enabled, burst mode will increase the rate at which images are taken, at the expense of quality.</param> /// <returns>The awaitable Task.</returns> public async Task TakePictureTimeout(IFileStreamCaptureHandler handler, MMALEncoding encodingType, MMALEncoding pixelFormat, CancellationToken cancellationToken, bool burstMode = false) { if (burstMode) { MMALCameraConfig.StillBurstMode = true; } using (var imgEncoder = new MMALImageEncoder()) using (var renderer = new MMALNullSinkComponent()) { this.ConfigureCameraSettings(); var portConfig = new MMALPortConfig(encodingType, pixelFormat, 90); imgEncoder.ConfigureOutputPort(portConfig, handler); // Create our component pipeline. this.Camera.StillPort.ConnectTo(imgEncoder); this.Camera.PreviewPort.ConnectTo(renderer); // Camera warm up time await Task.Delay(2000).ConfigureAwait(false); while (!cancellationToken.IsCancellationRequested) { await this.ProcessAsync(this.Camera.StillPort).ConfigureAwait(false); if (!cancellationToken.IsCancellationRequested) { handler.NewFile(); } } } }
public async Task JpegThumbnail() { TestHelper.BeginTest("Image - JpegThumbnail"); TestHelper.SetConfigurationDefaults(); TestHelper.CleanDirectory("/home/pi/images/tests"); JpegThumbnail tm = new JpegThumbnail(true, 200, 200, 90); using (var imgCaptureHandler = new ImageStreamCaptureHandler("/home/pi/images/tests", "jpg")) using (var preview = new MMALNullSinkComponent()) using (var imgEncoder = new MMALImageEncoder(thumbnailConfig: tm)) { Fixture.MMALCamera.ConfigureCameraSettings(); var portConfig = new MMALPortConfig(MMALEncoding.JPEG, MMALEncoding.I420, 90); imgEncoder.ConfigureOutputPort(portConfig, imgCaptureHandler); // Create our component pipeline. Fixture.MMALCamera.Camera.StillPort .ConnectTo(imgEncoder); Fixture.MMALCamera.Camera.PreviewPort .ConnectTo(preview); imgCaptureHandler.Manipulate(context => { context.StripBayerMetadata(CameraVersion.OV5647); }, ImageFormat.Jpeg); // Camera warm up time await Task.Delay(2000); await Fixture.MMALCamera.ProcessAsync(Fixture.MMALCamera.Camera.StillPort); } }
private async Task ResizePicture(string extension, MMALEncoding encoding, MMALEncoding pixelFormat, int width, int height) { using (var imgCaptureHandler = new ImageStreamCaptureHandler("/home/pi/images/", extension)) using (var resizer = new MMALResizerComponent()) using (var imgEncoder = new MMALImageEncoder()) using (var nullSink = new MMALNullSinkComponent()) { this.Cam.ConfigureCameraSettings(); await Task.Delay(2000); var resizerConfig = new MMALPortConfig(pixelFormat, pixelFormat, width: width, height: height); var encoderConfig = new MMALPortConfig(encoding, pixelFormat, quality: 90); // Create our component pipeline. resizer.ConfigureInputPort(new MMALPortConfig(MMALCameraConfig.Encoding, MMALCameraConfig.EncodingSubFormat), this.Cam.Camera.StillPort, null); resizer.ConfigureOutputPort(resizerConfig, null); imgEncoder.ConfigureOutputPort(encoderConfig, imgCaptureHandler); this.Cam.Camera.StillPort.ConnectTo(resizer); resizer.Outputs[0].ConnectTo(imgEncoder); this.Cam.Camera.PreviewPort.ConnectTo(nullSink); await this.Cam.ProcessAsync(this.Cam.Camera.StillPort); } }
/// <summary> /// Self-contained method for capturing a single image from the camera still port. /// An MMALImageEncoder component will be created and attached to the still port. /// </summary> /// <param name="handler">The image capture handler to apply to the encoder component.</param> /// <param name="encodingType">The image encoding type e.g. JPEG, BMP.</param> /// <param name="pixelFormat">The pixel format to use with the encoder e.g. I420 (YUV420).</param> /// <returns>The awaitable Task.</returns> public async Task TakePicture(ICaptureHandler handler, MMALEncoding encodingType, MMALEncoding pixelFormat) { using (var imgEncoder = new MMALImageEncoder(handler)) using (var renderer = new MMALNullSinkComponent()) { this.ConfigureCameraSettings(); var portConfig = new MMALPortConfig(encodingType, pixelFormat, 90); imgEncoder.ConfigureOutputPort(portConfig); // Create our component pipeline. this.Camera.StillPort.ConnectTo(imgEncoder); this.Camera.PreviewPort.ConnectTo(renderer); // Enable the image encoder output port. MMALLog.Logger.Info($"Preparing to take picture. Resolution: {this.Camera.StillPort.Resolution.Width} x {this.Camera.StillPort.Resolution.Height}. " + $"Encoder: {encodingType.EncodingName}. Pixel Format: {pixelFormat.EncodingName}."); // Camera warm up time await Task.Delay(2000).ConfigureAwait(false); await this.ProcessAsync(this.Camera.StillPort).ConfigureAwait(false); } }
public async Task TakePictureWithInMemoryHandler() { TestHelper.BeginTest("TakePictureWithInMemoryHandler"); TestHelper.SetConfigurationDefaults(); var imgCaptureHandler = new InMemoryCaptureHandler(); using (var preview = new MMALNullSinkComponent()) using (var imgEncoder = new MMALImageEncoder(true)) { Fixture.MMALCamera.ConfigureCameraSettings(); var portConfig = new MMALPortConfig(MMALEncoding.JPEG, MMALEncoding.I420, 90); imgEncoder.ConfigureOutputPort(portConfig, imgCaptureHandler); // Create our component pipeline. Fixture.MMALCamera.Camera.StillPort .ConnectTo(imgEncoder); Fixture.MMALCamera.Camera.PreviewPort .ConnectTo(preview); // Camera warm up time await Task.Delay(2000); await Fixture.MMALCamera.ProcessAsync(Fixture.MMALCamera.Camera.StillPort); Assert.True(imgCaptureHandler.WorkingData.Count > 0); } }
public async Task TakePicturesFromVideoPortWithCustomFilename(string extension, MMALEncoding encodingType, MMALEncoding pixelFormat) { TestHelper.BeginTest("TakePicturesFromVideoPortWithCustomFilename", encodingType.EncodingName, pixelFormat.EncodingName); TestHelper.SetConfigurationDefaults(); TestHelper.CleanDirectory("/home/pi/images/tests"); using (var imgCaptureHandler = new ImageStreamCaptureHandler($"/home/pi/images/tests/fromVideoPort.{extension}")) using (var splitter = new MMALSplitterComponent()) using (var preview = new MMALNullSinkComponent()) using (var imgEncoder = new MMALImageEncoder(continuousCapture: true)) { Fixture.MMALCamera.ConfigureCameraSettings(); var portConfig = new MMALPortConfig(encodingType, pixelFormat, 90); imgEncoder.ConfigureOutputPort(portConfig, imgCaptureHandler); // Create our component pipeline. Fixture.MMALCamera.Camera.VideoPort .ConnectTo(splitter); splitter.Outputs[0].ConnectTo(imgEncoder); Fixture.MMALCamera.Camera.PreviewPort .ConnectTo(preview); // Camera warm up time await Task.Delay(2000); var cts = new CancellationTokenSource(TimeSpan.FromSeconds(10)); await Fixture.MMALCamera.ProcessAsync(Fixture.MMALCamera.Camera.VideoPort, cts.Token); Fixture.CheckAndAssertDirectory(imgCaptureHandler.Directory); } }
public async Task UserProvidedBufferNumAndSize() { TestHelper.BeginTest("UserProvidedBufferNumAndSize"); TestHelper.SetConfigurationDefaults(); TestHelper.CleanDirectory("/home/pi/images/tests"); MMALCameraConfig.UserBufferNum = 10; MMALCameraConfig.UserBufferSize = 20000; using (var imgCaptureHandler = new ImageStreamCaptureHandler("/home/pi/images/tests", "jpg")) using (var preview = new MMALNullSinkComponent()) using (var imgEncoder = new MMALImageEncoder()) { Fixture.MMALCamera.ConfigureCameraSettings(); var portConfig = new MMALPortConfig(MMALEncoding.JPEG, MMALEncoding.I420, quality: 90); imgEncoder.ConfigureOutputPort(portConfig, imgCaptureHandler); // Create our component pipeline. Fixture.MMALCamera.Camera.StillPort .ConnectTo(imgEncoder); Fixture.MMALCamera.Camera.PreviewPort .ConnectTo(preview); // Camera warm up time await Task.Delay(2000); await Fixture.MMALCamera.ProcessAsync(Fixture.MMALCamera.Camera.StillPort); Fixture.CheckAndAssertFilepath(imgCaptureHandler.GetFilepath()); } }
public void TakePictureTimeout() { TestHelper.BeginTest("TakePictureTimeout"); TestHelper.SetConfigurationDefaults(); AsyncContext.Run(async() => { TestHelper.CleanDirectory("/home/pi/images/tests/split_tests"); using (var imgCaptureHandler = new ImageStreamCaptureHandler("/home/pi/images/tests/split_tests", "jpg")) using (var preview = new MMALNullSinkComponent()) using (var imgEncoder = new MMALImageEncoder(imgCaptureHandler)) { _fixture.MMALCamera.ConfigureCameraSettings(); imgEncoder.ConfigureOutputPort(0, MMALEncoding.JPEG, MMALEncoding.I420, 90); // Create our component pipeline. _fixture.MMALCamera.Camera.StillPort .ConnectTo(imgEncoder); _fixture.MMALCamera.Camera.PreviewPort .ConnectTo(preview); // Camera warm up time await Task.Delay(2000); var timeout = DateTime.Now.AddSeconds(10); while (DateTime.Now.CompareTo(timeout) < 0) { await _fixture.MMALCamera.ProcessAsync(_fixture.MMALCamera.Camera.StillPort); } } }); }
public async Task TakePictureWithCustomConnectionCallbackHandler() { TestHelper.BeginTest("TakePictureWithCustomConnectionCallbackHandler"); TestHelper.SetConfigurationDefaults(); using (var imgCaptureHandler = new ImageStreamCaptureHandler("/home/pi/images/tests", "jpg")) using (var preview = new MMALNullSinkComponent()) using (var imgEncoder = new MMALImageEncoder()) { Fixture.MMALCamera.ConfigureCameraSettings(); var portConfig = new MMALPortConfig(MMALEncoding.JPEG, MMALEncoding.I420, quality: 90); imgEncoder.ConfigureOutputPort(portConfig, imgCaptureHandler); // Create our component pipeline. var connection = Fixture.MMALCamera.Camera.StillPort .ConnectTo(imgEncoder, 0, true); Fixture.MMALCamera.Camera.PreviewPort .ConnectTo(preview); // Register our custom connection callback handler. connection.RegisterCallbackHandler(new CustomConnectionCallbackHandler(connection)); // Camera warm up time await Task.Delay(2000); await Fixture.MMALCamera.ProcessAsync(Fixture.MMALCamera.Camera.StillPort); Fixture.CheckAndAssertFilepath(imgCaptureHandler.GetFilepath()); } }
public async Task TakePictureRawBayer(string extension, MMALEncoding encodingType, MMALEncoding pixelFormat) { TestHelper.BeginTest("TakePictureRawBayer", encodingType.EncodingName, pixelFormat.EncodingName); TestHelper.SetConfigurationDefaults(); TestHelper.CleanDirectory("/home/pi/images/tests"); using (var imgCaptureHandler = new ImageStreamCaptureHandler("/home/pi/images/tests", extension)) using (var preview = new MMALNullSinkComponent()) using (var imgEncoder = new MMALImageEncoder(true)) { Fixture.MMALCamera.ConfigureCameraSettings(); var portConfig = new MMALPortConfig(encodingType, pixelFormat, quality: 90); imgEncoder.ConfigureOutputPort(portConfig, imgCaptureHandler); // Create our component pipeline. Fixture.MMALCamera.Camera.StillPort .ConnectTo(imgEncoder); Fixture.MMALCamera.Camera.PreviewPort .ConnectTo(preview); // Camera warm up time await Task.Delay(2000); await Fixture.MMALCamera.ProcessAsync(Fixture.MMALCamera.Camera.StillPort); Fixture.CheckAndAssertFilepath(imgCaptureHandler.GetFilepath()); } }
public static void TakePictureManualMode() { MMALCamera cam = MMALCamera.Instance; AsyncContext.Run(async() => { using (var imgCaptureHandler = new ImageStreamCaptureHandler("/home/pi/images/", "jpg")) using (var imgEncoder = new MMALImageEncoder(imgCaptureHandler)) using (var nullSink = new MMALNullSinkComponent()) { cam.ConfigureCameraSettings(); // Create our component pipeline. imgEncoder.ConfigureOutputPort(0, MMALEncoding.JPEG, MMALEncoding.I420, 90); cam.Camera.StillPort.ConnectTo(imgEncoder); cam.Camera.PreviewPort.ConnectTo(nullSink); // Camera warm up time await Task.Delay(2000); await cam.ProcessAsync(cam.Camera.StillPort); } }); cam.Cleanup(); }
public async Task TakePictureTimeout() { TestHelper.BeginTest("TakePictureTimeout"); TestHelper.SetConfigurationDefaults(); TestHelper.CleanDirectory("/home/pi/images/tests/split_tests"); using (var imgCaptureHandler = new ImageStreamCaptureHandler("/home/pi/images/tests/split_tests", "jpg")) using (var preview = new MMALNullSinkComponent()) using (var imgEncoder = new MMALImageEncoder()) { Fixture.MMALCamera.ConfigureCameraSettings(); var portConfig = new MMALPortConfig(MMALEncoding.JPEG, MMALEncoding.I420, quality: 90); imgEncoder.ConfigureOutputPort(portConfig, imgCaptureHandler); // Create our component pipeline. Fixture.MMALCamera.Camera.StillPort .ConnectTo(imgEncoder); Fixture.MMALCamera.Camera.PreviewPort .ConnectTo(preview); // Camera warm up time await Task.Delay(2000); var timeout = DateTime.Now.AddSeconds(10); while (DateTime.Now.CompareTo(timeout) < 0) { await Fixture.MMALCamera.ProcessAsync(Fixture.MMALCamera.Camera.StillPort); } Fixture.CheckAndAssertDirectory(imgCaptureHandler.Directory); } }
public async Task EdgeDetectionKernelProcessor(string extension, MMALEncoding encodingType, MMALEncoding pixelFormat) { TestHelper.BeginTest("EdgeDetectionKernelProcessor", encodingType.EncodingName, pixelFormat.EncodingName); TestHelper.SetConfigurationDefaults(); TestHelper.CleanDirectory("/home/pi/images/tests"); using (var imgCaptureHandler = new ImageStreamCaptureHandler("/home/pi/images/tests", extension)) using (var preview = new MMALNullSinkComponent()) using (var imgEncoder = new MMALImageEncoder()) { Fixture.MMALCamera.ConfigureCameraSettings(); var portConfig = new MMALPortConfig(encodingType, pixelFormat, 90); imgEncoder.ConfigureOutputPort(portConfig, imgCaptureHandler); // Create our component pipeline. Fixture.MMALCamera.Camera.StillPort .ConnectTo(imgEncoder); Fixture.MMALCamera.Camera.PreviewPort .ConnectTo(preview); imgCaptureHandler.Manipulate(context => { context.Apply(new EdgeDetection(EDStrength.High)); }, ImageFormat.Jpeg); // Camera warm up time await Task.Delay(2000); await Fixture.MMALCamera.ProcessAsync(Fixture.MMALCamera.Camera.StillPort); Fixture.CheckAndAssertFilepath(imgCaptureHandler.GetFilepath()); } }
/// <summary> /// Self-contained method to capture raw image data directly from the Camera component - this method does not use an Image encoder. /// Note: We cannot use the OPAQUE encoding format with this helper method, the capture will not fail, but will not produce valid data. For reference, RaspiStillYUV uses YUV420. /// </summary> /// <param name="handler">The image capture handler to use to save image.</param> /// <returns>The awaitable Task.</returns> /// <exception cref="ArgumentNullException"/> /// <exception cref="PiCameraError"/> public async Task TakeRawPicture(IOutputCaptureHandler handler) { if (this.Camera.StillPort.ConnectedReference != null) { throw new PiCameraError("A connection was found to the Camera still port. No encoder should be connected to the Camera's still port for raw capture."); } if (handler == null) { throw new ArgumentNullException(nameof(handler)); } using (var renderer = new MMALNullSinkComponent()) { this.ConfigureCameraSettings(handler); this.Camera.PreviewPort.ConnectTo(renderer); MMALLog.Logger.LogInformation($"Preparing to take raw picture - Resolution: {this.Camera.StillPort.Resolution.Width} x {this.Camera.StillPort.Resolution.Height}. " + $"Encoder: {MMALCameraConfig.StillEncoding.EncodingName}. Pixel Format: {MMALCameraConfig.StillSubFormat.EncodingName}."); // Camera warm up time await Task.Delay(2000).ConfigureAwait(false); await this.ProcessAsync(this.Camera.StillPort).ConfigureAwait(false); } }
/// <summary> /// Self-contained method for capturing a continual images from the camera still port for a specified period of time. /// An MMALImageEncoder component will be created and attached to the still port. /// </summary> /// <param name="handler">The image capture handler to apply to the encoder component.</param> /// <param name="encodingType">The image encoding type e.g. JPEG, BMP.</param> /// <param name="pixelFormat">The pixel format to use with the encoder e.g. I420 (YUV420).</param> /// <param name="cancellationToken">A cancellationToken to trigger stop capturing.</param> /// <param name="burstMode">When enabled, burst mode will increase the rate at which images are taken, at the expense of quality.</param> /// <returns>The awaitable Task.</returns> public async Task TakePictureTimeout(ImageStreamCaptureHandler handler, MMALEncoding encodingType, MMALEncoding pixelFormat, CancellationToken cancellationToken, bool burstMode = false) { if (burstMode) { this.Camera.StillPort.SetParameter(MMALParametersCamera.MMAL_PARAMETER_CAMERA_BURST_CAPTURE, true); } using (var imgEncoder = new MMALImageEncoder(handler)) using (var renderer = new MMALNullSinkComponent()) { this.ConfigureCameraSettings(); imgEncoder.ConfigureOutputPort(encodingType, pixelFormat, 90); // Create our component pipeline. this.Camera.StillPort.ConnectTo(imgEncoder); this.Camera.PreviewPort.ConnectTo(renderer); // Camera warm up time await Task.Delay(2000); while (!cancellationToken.IsCancellationRequested) { await this.ProcessAsync(this.Camera.StillPort); } } }
public void TakePictureTimelapse() { TestHelper.BeginTest("TakePictureTimelapse"); TestHelper.SetConfigurationDefaults(); AsyncContext.Run(async() => { TestHelper.CleanDirectory("/home/pi/images/tests/split_tests"); using (var imgCaptureHandler = new ImageStreamCaptureHandler("/home/pi/images/tests/split_tests", "jpg")) using (var preview = new MMALNullSinkComponent()) using (var imgEncoder = new MMALImageEncoder(imgCaptureHandler)) { _fixture.MMALCamera.ConfigureCameraSettings(); imgEncoder.ConfigureOutputPort(0, MMALEncoding.JPEG, MMALEncoding.I420, 90); // Create our component pipeline. _fixture.MMALCamera.Camera.StillPort .ConnectTo(imgEncoder); _fixture.MMALCamera.Camera.PreviewPort .ConnectTo(preview); CancellationTokenSource cts = new CancellationTokenSource(TimeSpan.FromSeconds(30)); Timelapse tl = new Timelapse { Mode = TimelapseMode.Second, CancellationToken = cts.Token, Value = 5 }; // Camera warm up time await Task.Delay(2000); while (!tl.CancellationToken.IsCancellationRequested) { int interval = tl.Value * 1000; await Task.Delay(interval); await _fixture.MMALCamera.ProcessAsync(_fixture.MMALCamera.Camera.StillPort); } DirectoryInfo info = new DirectoryInfo(imgCaptureHandler.Directory); if (info.Exists) { var files = info.EnumerateFiles(); Assert.True(files != null && files.Count() == 6); } else { Assert.True(false, $"File {imgCaptureHandler.GetFilepath()} was not created"); } } }); }
public async Task ImageFxComponentFromCameraVideoPortWithSplitterAndEncoder(MMAL_PARAM_IMAGEFX_T effect, bool throwsException) { TestHelper.BeginTest($"Video - ImageFxComponentFromCameraVideoPortWithSplitterAndEncoder - {effect}"); TestHelper.SetConfigurationDefaults(); TestHelper.CleanDirectory("/home/pi/videos/tests"); using (var vidCaptureHandler = new VideoStreamCaptureHandler("/home/pi/videos/tests", "h264")) using (var preview = new MMALNullSinkComponent()) using (var imageFx = new MMALImageFxComponent()) using (var splitter = new MMALSplitterComponent()) using (var vidEncoder = new MMALVideoEncoder()) { Fixture.MMALCamera.ConfigureCameraSettings(); var vidEncoderConfig = new MMALPortConfig(MMALEncoding.H264, MMALEncoding.I420); var splitterConfig = new MMALPortConfig(MMALEncoding.I420, MMALEncoding.I420); var fxConfig = new MMALPortConfig(MMALEncoding.I420, MMALEncoding.I420); imageFx.ConfigureOutputPort <VideoPort>(0, fxConfig, null); splitter.ConfigureInputPort(new MMALPortConfig(MMALEncoding.I420, MMALEncoding.I420), imageFx.Outputs[0], null); splitter.ConfigureOutputPort <VideoPort>(0, splitterConfig, null); vidEncoder.ConfigureInputPort(new MMALPortConfig(MMALEncoding.I420, MMALEncoding.I420), splitter.Outputs[0], null); vidEncoder.ConfigureOutputPort(0, vidEncoderConfig, vidCaptureHandler); if (throwsException) { Assert.Throws <MMALInvalidException>(() => { imageFx.ImageEffect = effect; }); } else { imageFx.ImageEffect = effect; } // Create our component pipeline. Fixture.MMALCamera.Camera.VideoPort .ConnectTo(imageFx); Fixture.MMALCamera.Camera.PreviewPort .ConnectTo(preview); imageFx.Outputs[0].ConnectTo(splitter); splitter.Outputs[0].ConnectTo(vidEncoder); // Camera warm up time await Task.Delay(2000); var cts = new CancellationTokenSource(TimeSpan.FromSeconds(5)); await Fixture.MMALCamera.ProcessAsync(Fixture.MMALCamera.Camera.VideoPort, cts.Token); Fixture.CheckAndAssertFilepath(vidCaptureHandler.GetFilepath()); } }
/// <summary> /// Self-contained method for capturing timelapse images. /// An MMALImageEncoder component will be created and attached to the still port. /// </summary> /// <param name="handler">The image capture handler to apply to the encoder component.</param> /// <param name="encodingType">The image encoding type e.g. JPEG, BMP.</param> /// <param name="pixelFormat">The pixel format to use with the encoder e.g. I420 (YUV420).</param> /// <param name="timelapse">A Timelapse object which specifies the timeout and rate at which images should be taken.</param> /// <returns>The awaitable Task.</returns> /// <exception cref="ArgumentNullException"/> public async Task TakePictureTimelapse(IFileStreamCaptureHandler handler, MMALEncoding encodingType, MMALEncoding pixelFormat, Timelapse timelapse) { int interval = 0; if (timelapse == null) { throw new ArgumentNullException(nameof(timelapse), "Timelapse object null. This must be initialized for Timelapse mode"); } using (var imgEncoder = new MMALImageEncoder()) using (var renderer = new MMALNullSinkComponent()) { this.ConfigureCameraSettings(); var portConfig = new MMALPortConfig(encodingType, pixelFormat, 90); imgEncoder.ConfigureOutputPort(portConfig, handler); // Create our component pipeline. this.Camera.StillPort.ConnectTo(imgEncoder); this.Camera.PreviewPort.ConnectTo(renderer); // Camera warm up time await Task.Delay(2000).ConfigureAwait(false); while (!timelapse.CancellationToken.IsCancellationRequested) { switch (timelapse.Mode) { case TimelapseMode.Millisecond: interval = timelapse.Value; break; case TimelapseMode.Second: interval = timelapse.Value * 1000; break; case TimelapseMode.Minute: interval = (timelapse.Value * 60) * 1000; break; } await Task.Delay(interval).ConfigureAwait(false); MMALLog.Logger.LogInformation($"Preparing to take picture. Resolution: {MMALCameraConfig.StillResolution.Width} x {MMALCameraConfig.StillResolution.Height}. " + $"Encoder: {encodingType.EncodingName}. Pixel Format: {pixelFormat.EncodingName}."); await this.ProcessAsync(this.Camera.StillPort).ConfigureAwait(false); if (!timelapse.CancellationToken.IsCancellationRequested) { handler.NewFile(); } } } }
/// <summary> /// Self-contained method to capture raw image data directly from the Camera component - this method does not use an Image encoder. /// Note: We cannot use the OPAQUE encoding format with this helper method, the capture will not fail, but will not produce valid data. For reference, RaspiStillYUV uses YUV420. /// </summary> /// <param name="handler">The image capture handler to use to save image.</param> /// <returns>The awaitable Task.</returns> /// <exception cref="ArgumentNullException"/> /// <exception cref="PiCameraError"/> public async Task TakeRawPicture(ICaptureHandler handler) { if (this.Camera.StillPort.ConnectedReference != null) { throw new PiCameraError("A connection was found to the Camera still port. No encoder should be connected to the Camera's still port for raw capture."); } this.Camera.Handler = handler ?? throw new ArgumentNullException(nameof(handler)); using (var renderer = new MMALNullSinkComponent()) { this.ConfigureCameraSettings(); this.Camera.PreviewPort.ConnectTo(renderer); // Enable the image encoder output port. try { MMALLog.Logger.Info($"Preparing to take raw picture - Resolution: {MMALCameraConfig.StillResolution.Width} x {MMALCameraConfig.StillResolution.Height}. " + $"Encoder: {MMALCameraConfig.StillEncoding.EncodingName}. Pixel Format: {MMALCameraConfig.StillSubFormat.EncodingName}."); // Camera warm up time await Task.Delay(2000); this.Camera.Start(this.Camera.StillPort); this.Camera.StillPort.Trigger = new Nito.AsyncEx.AsyncCountdownEvent(1); this.StartCapture(this.Camera.StillPort); // Wait until the process is complete. await this.Camera.StillPort.Trigger.WaitAsync(); // Stop capturing on the camera still port. this.StopCapture(this.Camera.StillPort); this.Camera.Stop(MMALCameraComponent.MMALCameraStillPort); // Close open connections and clean port pools. this.Camera.DisableConnections(); this.Camera.CleanPortPools(); } finally { this.Camera.Handler.PostProcess(); this.Camera.Handler.Dispose(); this.Camera.Handler = null; } } }
public void ChangeEncodingType() { TestHelper.BeginTest("Image - ChangeEncodingType"); TestHelper.SetConfigurationDefaults(); AsyncContext.Run(async() => { TestHelper.CleanDirectory("/home/pi/images/tests"); using (var imgCaptureHandler = new ImageStreamCaptureHandler("/home/pi/images/tests", "jpg")) using (var preview = new MMALNullSinkComponent()) using (var imgEncoder = new MMALImageEncoder(imgCaptureHandler)) { _fixture.MMALCamera.ConfigureCameraSettings(); imgEncoder.ConfigureOutputPort(MMALEncoding.JPEG, MMALEncoding.I420, 90); // Create our component pipeline. _fixture.MMALCamera.Camera.StillPort .ConnectTo(imgEncoder); _fixture.MMALCamera.Camera.PreviewPort .ConnectTo(preview); // Camera warm up time await Task.Delay(2000); await _fixture.MMALCamera.ProcessAsync(_fixture.MMALCamera.Camera.StillPort); _fixture.CheckAndAssertFilepath(imgCaptureHandler.GetFilepath()); } using (var imgCaptureHandler = new ImageStreamCaptureHandler("/home/pi/images/tests", "bmp")) using (var preview = new MMALNullSinkComponent()) using (var imgEncoder = new MMALImageEncoder(imgCaptureHandler)) { imgEncoder.ConfigureOutputPort(MMALEncoding.BMP, MMALEncoding.I420, 90); // Create our component pipeline. _fixture.MMALCamera.Camera.StillPort .ConnectTo(imgEncoder); _fixture.MMALCamera.Camera.PreviewPort .ConnectTo(preview); await _fixture.MMALCamera.ProcessAsync(_fixture.MMALCamera.Camera.StillPort); _fixture.CheckAndAssertFilepath(imgCaptureHandler.GetFilepath()); } }); }
public async Task StripBayerData() { TestHelper.BeginTest("Image - StripBayerData"); TestHelper.SetConfigurationDefaults(); TestHelper.CleanDirectory("/home/pi/images/tests"); string filepath = string.Empty; using (var imgCaptureHandler = new ImageStreamCaptureHandler("/home/pi/images/tests", "raw")) using (var preview = new MMALNullSinkComponent()) using (var imgEncoder = new MMALImageEncoder(true)) { Fixture.MMALCamera.ConfigureCameraSettings(); var portConfig = new MMALPortConfig(MMALEncoding.JPEG, MMALEncoding.I420, 90); imgEncoder.ConfigureOutputPort(portConfig, imgCaptureHandler); // Create our component pipeline. Fixture.MMALCamera.Camera.StillPort .ConnectTo(imgEncoder); Fixture.MMALCamera.Camera.PreviewPort .ConnectTo(preview); imgCaptureHandler.Manipulate(context => { context.StripBayerMetadata(CameraVersion.OV5647); }, ImageFormat.Jpeg); // Camera warm up time await Task.Delay(2000); await Fixture.MMALCamera.ProcessAsync(Fixture.MMALCamera.Camera.StillPort); filepath = imgCaptureHandler.GetFilepath(); } byte[] meta = new byte[4]; var array = File.ReadAllBytes(filepath); // Uncomment depending on which version of the camera you're using. // Array.Copy(array, array.Length - BayerMetaProcessor.BayerMetaLengthV1, meta, 0, 4); Array.Copy(array, array.Length - BayerMetaProcessor.BayerMetaLengthV2, meta, 0, 4); Assert.True(Encoding.ASCII.GetString(meta) == "BRCM"); }
public async Task ImageFxComponentFromCameraStillPort(MMAL_PARAM_IMAGEFX_T effect, bool throwsException) { TestHelper.BeginTest($"Image - ImageFxComponentFromCameraStillPort - {effect}"); TestHelper.SetConfigurationDefaults(); TestHelper.CleanDirectory("/home/pi/images/tests"); using (var imgCaptureHandler = new ImageStreamCaptureHandler("/home/pi/images/tests", "jpg")) using (var preview = new MMALNullSinkComponent()) using (var imageFx = new MMALImageFxComponent()) using (var imgEncoder = new MMALImageEncoder()) { Fixture.MMALCamera.ConfigureCameraSettings(); var portConfig = new MMALPortConfig(MMALEncoding.JPEG, MMALEncoding.I420, quality: 90); var fxConfig = new MMALPortConfig(MMALEncoding.I420, MMALEncoding.I420); imageFx.ConfigureOutputPort(fxConfig, null); imgEncoder.ConfigureOutputPort(portConfig, imgCaptureHandler); if (throwsException) { Assert.Throws <MMALInvalidException>(() => { imageFx.ImageEffect = effect; }); } else { imageFx.ImageEffect = effect; } // Create our component pipeline. Fixture.MMALCamera.Camera.StillPort .ConnectTo(imageFx); Fixture.MMALCamera.Camera.PreviewPort .ConnectTo(preview); imageFx.Outputs[0].ConnectTo(imgEncoder); // Camera warm up time await Task.Delay(2000); await Fixture.MMALCamera.ProcessAsync(Fixture.MMALCamera.Camera.StillPort); Fixture.CheckAndAssertFilepath(imgCaptureHandler.GetFilepath()); } }
public async Task TakePictureTimelapse() { TestHelper.BeginTest("TakePictureTimelapse"); TestHelper.SetConfigurationDefaults(); TestHelper.CleanDirectory("/home/pi/images/tests/split_tests"); using (var imgCaptureHandler = new ImageStreamCaptureHandler("/home/pi/images/tests/split_tests", "jpg")) using (var preview = new MMALNullSinkComponent()) using (var imgEncoder = new MMALImageEncoder()) { Fixture.MMALCamera.ConfigureCameraSettings(); var portConfig = new MMALPortConfig(MMALEncoding.JPEG, MMALEncoding.I420, 90); imgEncoder.ConfigureOutputPort(portConfig, imgCaptureHandler); // Create our component pipeline. Fixture.MMALCamera.Camera.StillPort .ConnectTo(imgEncoder); Fixture.MMALCamera.Camera.PreviewPort .ConnectTo(preview); // Camera warm up time await Task.Delay(2000); CancellationTokenSource cts = new CancellationTokenSource(TimeSpan.FromSeconds(30)); Timelapse tl = new Timelapse { Mode = TimelapseMode.Second, CancellationToken = cts.Token, Value = 5 }; while (!tl.CancellationToken.IsCancellationRequested) { int interval = tl.Value * 1000; await Task.Delay(interval); await Fixture.MMALCamera.ProcessAsync(Fixture.MMALCamera.Camera.StillPort); } Fixture.CheckAndAssertDirectory(imgCaptureHandler.Directory); } }
/// <summary> /// Self-contained method for capturing a single image from the camera still port. /// An MMALImageEncoder component will be created and attached to the still port. /// </summary> /// <param name="handler">The image capture handler to apply to the encoder component</param> /// <param name="encodingType">The image encoding type e.g. JPEG, BMP</param> /// <param name="pixelFormat">The pixel format to use with the encoder e.g. I420 (YUV420)</param> /// <returns>The awaitable Task</returns> public async Task TakePicture(ImageStreamCaptureHandler handler, MMALEncoding encodingType, MMALEncoding pixelFormat) { using (var imgEncoder = new MMALImageEncoder(handler)) using (var renderer = new MMALNullSinkComponent()) { imgEncoder.ConfigureOutputPort(0, encodingType, pixelFormat, 90); //Create our component pipeline. this.Camera.StillPort.ConnectTo(imgEncoder); this.Camera.PreviewPort.ConnectTo(renderer); this.ConfigureCameraSettings(); //Enable the image encoder output port. MMALLog.Logger.Info($"Preparing to take picture. Resolution: {imgEncoder.Width} x {imgEncoder.Height}. " + $"Encoder: {encodingType.EncodingName}. Pixel Format: {pixelFormat.EncodingName}."); await BeginProcessing(this.Camera.StillPort, imgEncoder); } }
public async Task GrabImageAsync() { var camera = MMALCamera.Instance; using (var imgCaptureHandler = new ImageStreamCaptureHandler(Path.Combine(Folder, Image), "jpg")) using (var imgEncoder = new MMALImageEncoder(imgCaptureHandler)) using (var renderer = new MMALNullSinkComponent()) { imgEncoder.ConfigureOutputPort(0, MMALEncoding.JPEG, MMALEncoding.I420, 90); camera.Camera.StillPort.ConnectTo(imgEncoder); camera.Camera.PreviewPort.ConnectTo(renderer); camera.ConfigureCameraSettings(); await camera.BeginProcessing(camera.Camera.StillPort); } camera.Cleanup(); LastImageFilename = Path.Combine(Image, GetLastFile(Path.Combine(Folder, Image)).Name); }
private async Task TakePictureManual(string extension, MMALEncoding encoding, MMALEncoding pixelFormat) { using (var imgCaptureHandler = new ImageStreamCaptureHandler("/home/pi/images/", extension)) using (var imgEncoder = new MMALImageEncoder()) using (var nullSink = new MMALNullSinkComponent()) { this.Cam.ConfigureCameraSettings(); await Task.Delay(2000); var encoderConfig = new MMALPortConfig(encoding, pixelFormat, quality: 90); // Create our component pipeline. imgEncoder.ConfigureOutputPort(encoderConfig, imgCaptureHandler); this.Cam.Camera.StillPort.ConnectTo(imgEncoder); this.Cam.Camera.PreviewPort.ConnectTo(nullSink); await this.Cam.ProcessAsync(this.Cam.Camera.StillPort); } }
public async Task <byte[]> TakePicture(int quality) { using (var jpegCaptureHandler = new InMemoryCaptureHandler()) using (var imageEncoder = new MMALImageEncoder()) using (var nullSink = new MMALNullSinkComponent()) { lock (this) { MMALCamera.Instance.ConfigureCameraSettings(); } MMALPortConfig portConfigJPEG = new MMALPortConfig(MMALEncoding.JPEG, MMALEncoding.I420, quality: quality); imageEncoder.ConfigureOutputPort(portConfigJPEG, jpegCaptureHandler); MMALCamera.Instance.Camera.StillPort.ConnectTo(imageEncoder); MMALCamera.Instance.Camera.PreviewPort.ConnectTo(nullSink); await MMALCamera.Instance.ProcessAsync(MMALCamera.Instance.Camera.StillPort); return(jpegCaptureHandler.WorkingData.ToArray()); } }
public async Task Start(FrameSplitter frameSplitter, CancellationToken ct) { _splitter = frameSplitter; MMALCameraConfig.VideoStabilisation = false; MMALCameraConfig.SensorMode = MMALSensorMode.Mode1; MMALCameraConfig.ExposureMode = MMAL_PARAM_EXPOSUREMODE_T.MMAL_PARAM_EXPOSUREMODE_BACKLIGHT; MMALCameraConfig.VideoResolution = new Resolution(CapWidth, CapHeight); MMALCameraConfig.VideoFramerate = new MMAL_RATIONAL_T(60, 1); using var vidCaptureHandler = new EmguInMemoryCaptureHandler(); using var splitter = new MMALSplitterComponent(); using var renderer = new MMALNullSinkComponent(); _cam.ConfigureCameraSettings(); Log.Debug("Cam mode is " + MMALCameraConfig.SensorMode); // Register to the event. vidCaptureHandler.MyEmguEvent += ProcessFrame; // We are instructing the splitter to do a format conversion to BGR24. var splitterPortConfig = new MMALPortConfig(MMALEncoding.BGR24, MMALEncoding.BGR24, CapWidth, CapHeight, null); // By default in MMALSharp, the Video port outputs using proprietary communication (Opaque) with a YUV420 pixel format. // Changes to this are done via MMALCameraConfig.VideoEncoding and MMALCameraConfig.VideoSub format. splitter.ConfigureInputPort( new MMALPortConfig(MMALEncoding.OPAQUE, MMALEncoding.I420, CapWidth, CapHeight, null), _cam.Camera.VideoPort, null); // We then use the splitter config object we constructed earlier. We then tell this output port to use our capture handler to record data. splitter.ConfigureOutputPort <SplitterVideoPort>(0, splitterPortConfig, vidCaptureHandler); _cam.Camera.PreviewPort.ConnectTo(renderer); _cam.Camera.VideoPort.ConnectTo(splitter); await Task.Delay(2000, ct); await _cam.ProcessAsync(_cam.Camera.VideoPort, ct).ConfigureAwait(false); Log.Debug("Camera closed."); }
public void TakePicture(string extension, MMALEncoding encodingType, MMALEncoding pixelFormat) { TestHelper.BeginTest("TakePicture", encodingType.EncodingName, pixelFormat.EncodingName); TestHelper.SetConfigurationDefaults(); AsyncContext.Run(async() => { TestHelper.CleanDirectory("/home/pi/images/tests"); using (var imgCaptureHandler = new ImageStreamCaptureHandler("/home/pi/images/tests", extension)) using (var preview = new MMALNullSinkComponent()) using (var imgEncoder = new MMALImageEncoder(imgCaptureHandler)) { _fixture.MMALCamera.ConfigureCameraSettings(); imgEncoder.ConfigureOutputPort(encodingType, pixelFormat, 90); // Create our component pipeline. _fixture.MMALCamera.Camera.StillPort .ConnectTo(imgEncoder); _fixture.MMALCamera.Camera.PreviewPort .ConnectTo(preview); // Camera warm up time await Task.Delay(2000); await _fixture.MMALCamera.ProcessAsync(_fixture.MMALCamera.Camera.StillPort); if (System.IO.File.Exists(imgCaptureHandler.GetFilepath())) { var length = new System.IO.FileInfo(imgCaptureHandler.GetFilepath()).Length; Assert.True(length > 0); } else { Assert.True(false, $"File {imgCaptureHandler.GetFilepath()} was not created"); } } }); }
public Camera(NetworkStream outputStream, Vector2 resolution, int bitrate = 1300000, int frameRate = 25, int quality = 0, EventHandler <byte[]> frameCaptured = null) { OnFrameCaptured = frameCaptured; MMALCameraConfig.VideoResolution = new Resolution((int)resolution.X, (int)resolution.Y); MMALCameraConfig.VideoFramerate = new MMAL_RATIONAL_T(frameRate, 1); OutputHandler = new NetworkStreamCaptureHandler(outputStream); VideoEncoder = new MMALVideoEncoder(); // Create our component pipeline. Here we are using the H.264 standard with a YUV420 pixel format. The video will be taken at 25Mb/s. Instance = MMALCamera.Instance; Instance.ConfigureCameraSettings(); MMALPortConfig portConfig = new MMALPortConfig(MMALEncoding.H264, MMALEncoding.I420, quality, bitrate, null); VideoEncoder.ConfigureOutputPort(portConfig, OutputHandler); nullSink = new MMALNullSinkComponent(); Instance.Camera.PreviewPort.ConnectTo(nullSink); Instance.Camera.VideoPort.ConnectTo(VideoEncoder); }