public async Task RawVideoConvert() { TestHelper.BeginTest("RawVideoConvert"); TestHelper.SetConfigurationDefaults(); TestHelper.CleanDirectory("/home/pi/videos/tests"); using (var ffCaptureHandler = FFmpegCaptureHandler.RawVideoToAvi("/home/pi/videos/tests", "testing1234")) using (var vidEncoder = new MMALVideoEncoder()) using (var renderer = new MMALVideoRenderer()) { Fixture.MMALCamera.ConfigureCameraSettings(); var portConfig = new MMALPortConfig(MMALEncoding.H264, MMALEncoding.I420, 10, 25000000, null); vidEncoder.ConfigureOutputPort(portConfig, ffCaptureHandler); Fixture.MMALCamera.Camera.VideoPort.ConnectTo(vidEncoder); Fixture.MMALCamera.Camera.PreviewPort.ConnectTo(renderer); // Camera warm up time await Task.Delay(2000); var cts = new CancellationTokenSource(TimeSpan.FromMinutes(1)); // Take video for 1 minute. await Fixture.MMALCamera.ProcessAsync(Fixture.MMALCamera.Camera.VideoPort, cts.Token); Fixture.CheckAndAssertFilepath("/home/pi/videos/tests/testing1234.avi"); } }
public async Task TakeVideoAndStoreTimestamps() { TestHelper.BeginTest("Video - TakeVideoAndStoreTimestamps"); TestHelper.SetConfigurationDefaults(); TestHelper.CleanDirectory("/home/pi/videos/tests"); using (var vidCaptureHandler = new VideoStreamCaptureHandler("/home/pi/videos/tests", "h264", true)) using (var preview = new MMALVideoRenderer()) using (var vidEncoder = new MMALVideoEncoder()) { Fixture.MMALCamera.ConfigureCameraSettings(); var portConfig = new MMALPortConfig(MMALEncoding.H264, MMALEncoding.I420, bitrate: MMALVideoEncoder.MaxBitrateLevel4); vidEncoder.ConfigureOutputPort(portConfig, vidCaptureHandler); // Create our component pipeline. Fixture.MMALCamera.Camera.VideoPort .ConnectTo(vidEncoder); Fixture.MMALCamera.Camera.PreviewPort .ConnectTo(preview); // Camera warm up time await Task.Delay(2000); CancellationTokenSource cts = new CancellationTokenSource(TimeSpan.FromSeconds(15)); // Record video for 15 seconds await Fixture.MMALCamera.ProcessAsync(Fixture.MMALCamera.Camera.VideoPort, cts.Token); Fixture.CheckAndAssertFilepath(vidCaptureHandler.GetFilepath()); Fixture.CheckAndAssertFilepath($"{vidCaptureHandler.Directory}/{vidCaptureHandler.CurrentFilename}.pts"); } }
public void StartStreamingAsync(string url, string streamName, TimeSpan duration) { var camera = MMALCamera.Instance; AsyncContext.Run(async() => { using (var ffCaptureHandler = new FFmpegCaptureHandler($"-i - -vcodec copy -an -f flv {url}{streamName}")) using (var vidEncoder = new MMALVideoEncoder(ffCaptureHandler, new MMAL_RATIONAL_T(25, 1), DateTime.Now.Add(duration))) using (var renderer = new MMALVideoRenderer()) { camera.ConfigureCameraSettings(); //Create our component pipeline. Here we are using the H.264 standard with a YUV420 pixel format. The video will be taken at 25mb/s at the highest quality setting (10). vidEncoder.ConfigureOutputPort(0, MMALEncoding.H264, MMALEncoding.I420, 0, 25000000); camera.Camera.VideoPort.ConnectTo(vidEncoder); camera.Camera.PreviewPort.ConnectTo(renderer); //Camera warm up time await Task.Delay(2000); //Take video for 3 minutes. await camera.BeginProcessing(camera.Camera.VideoPort); } }); camera.Cleanup(); }
public async Task AnnotateVideoRefreshSeconds() { TestHelper.BeginTest("Video - AnnotateVideo"); TestHelper.SetConfigurationDefaults(); TestHelper.CleanDirectory("/home/pi/videos/tests"); MMALCameraConfig.Annotate = new AnnotateImage(); MMALCameraConfig.Annotate.RefreshRate = DateTimeTextRefreshRate.Seconds; MMALCameraConfig.Annotate.TimeFormat = "HH:mm:ss"; using (var vidCaptureHandler = new VideoStreamCaptureHandler("/home/pi/videos/tests", "h264")) using (var vidEncoder = new MMALVideoEncoder()) using (var renderer = new MMALVideoRenderer()) { Fixture.MMALCamera.ConfigureCameraSettings(); var portConfig = new MMALPortConfig(MMALEncoding.H264, MMALEncoding.I420, quality: 10, bitrate: MMALVideoEncoder.MaxBitrateLevel4); // Create our component pipeline. Here we are using the H.264 standard with a YUV420 pixel format. The video will be taken at 25Mb/s. vidEncoder.ConfigureOutputPort(portConfig, vidCaptureHandler); Fixture.MMALCamera.Camera.VideoPort.ConnectTo(vidEncoder); Fixture.MMALCamera.Camera.PreviewPort.ConnectTo(renderer); // Camera warm up time await Task.Delay(2000); var cts = new CancellationTokenSource(TimeSpan.FromSeconds(30)); // Take video for 30 seconds. await Fixture.MMALCamera.ProcessAsync(Fixture.MMALCamera.Camera.VideoPort, cts.Token); Fixture.CheckAndAssertFilepath(vidCaptureHandler.GetFilepath()); } }
public async Task RecordVideoDirectlyFromResizer() { TestHelper.BeginTest("RecordVideoDirectlyFromResizer"); TestHelper.SetConfigurationDefaults(); TestHelper.CleanDirectory("/home/pi/videos/tests"); using (var vidCaptureHandler = new VideoStreamCaptureHandler("/home/pi/videos/tests", "raw")) using (var preview = new MMALVideoRenderer()) using (var resizer = new MMALResizerComponent()) { Fixture.MMALCamera.ConfigureCameraSettings(); // Use the resizer to resize 1080p to 640x480. var portConfig = new MMALPortConfig(MMALEncoding.I420, MMALEncoding.I420, 640, 480, 0, 0, 0, false, null); resizer.ConfigureOutputPort <VideoPort>(0, portConfig, vidCaptureHandler); // Create our component pipeline. Fixture.MMALCamera.Camera.VideoPort .ConnectTo(resizer); Fixture.MMALCamera.Camera.PreviewPort .ConnectTo(preview); // Camera warm up time await Task.Delay(2000); CancellationTokenSource cts = new CancellationTokenSource(TimeSpan.FromSeconds(15)); // Record video for 20 seconds await Fixture.MMALCamera.ProcessAsync(Fixture.MMALCamera.Camera.VideoPort, cts.Token); Fixture.CheckAndAssertFilepath(vidCaptureHandler.GetFilepath()); } }
private void TakePicture(object sender, EventArgs e) { if (ReloadConfig) { this.MMALCamera.ConfigureCameraSettings(); ConfigForm.ReloadConfig = false; } using (var imgCaptureHandler = new ImageStreamCaptureHandler("/home/pi/images/", "jpg")) using (var imgEncoder = new MMALImageEncoder(imgCaptureHandler)) using (var renderer = new MMALVideoRenderer()) { this.MMALCamera.ConfigureCameraSettings(); // Create our component pipeline. imgEncoder.ConfigureOutputPort(0, MMALEncoding.JPEG, MMALEncoding.I420, 90); this.MMALCamera.Camera.StillPort.ConnectTo(imgEncoder); this.MMALCamera.Camera.PreviewPort.ConnectTo(renderer); Task.Factory.Run(async() => { // Camera warm up time await Task.Delay(5000); await this.MMALCamera.BeginProcessing(this.MMALCamera.Camera.StillPort); }); } }
public async Task TakeVideo(string extension, MMALEncoding encodingType, MMALEncoding pixelFormat) { TestHelper.BeginTest("TakeVideo", encodingType.EncodingName, pixelFormat.EncodingName); TestHelper.SetConfigurationDefaults(); TestHelper.CleanDirectory("/home/pi/videos/tests"); using (var vidCaptureHandler = new VideoStreamCaptureHandler("/home/pi/videos/tests", extension)) using (var preview = new MMALVideoRenderer()) using (var vidEncoder = new MMALVideoEncoder(vidCaptureHandler)) { Fixture.MMALCamera.ConfigureCameraSettings(); var portConfig = new MMALPortConfig(encodingType, pixelFormat, 25, 10, 25000000, null); vidEncoder.ConfigureOutputPort(portConfig); // Create our component pipeline. Fixture.MMALCamera.Camera.VideoPort .ConnectTo(vidEncoder); Fixture.MMALCamera.Camera.PreviewPort .ConnectTo(preview); // Camera warm up time await Task.Delay(2000); CancellationTokenSource cts = new CancellationTokenSource(TimeSpan.FromSeconds(15)); // Record video for 20 seconds await Fixture.MMALCamera.ProcessAsync(Fixture.MMALCamera.Camera.VideoPort, cts.Token); Fixture.CheckAndAssertFilepath(vidCaptureHandler.GetFilepath()); } }
/// <summary> /// Self-contained method for recording raw video frames directly from the camera's video port. /// Uses the encoding and pixel format as set in <see cref="MMALCameraConfig.VideoEncoding"/> and <see cref="MMALCameraConfig.VideoSubformat"/>. /// </summary> /// <param name="handler">The video capture handler to apply to the encoder.</param> /// <param name="cancellationToken">A cancellationToken to signal when to stop video capture.</param> /// <returns>The awaitable Task.</returns> public async Task TakeRawVideo(IVideoCaptureHandler handler, CancellationToken cancellationToken) { using (var splitter = new MMALSplitterComponent()) using (var renderer = new MMALVideoRenderer()) { this.ConfigureCameraSettings(); var splitterOutputConfig = new MMALPortConfig(MMALCameraConfig.VideoEncoding, MMALCameraConfig.VideoSubformat, 0); // Force port type to SplitterVideoPort to prevent resolution from being set against splitter component. splitter.ConfigureOutputPort <SplitterVideoPort>(0, splitterOutputConfig, handler); // Create our component pipeline. this.Camera.VideoPort.ConnectTo(splitter); this.Camera.PreviewPort.ConnectTo(renderer); MMALLog.Logger.LogInformation($"Preparing to take raw video. Resolution: {this.Camera.VideoPort.Resolution.Width} x {this.Camera.VideoPort.Resolution.Height}. " + $"Encoder: {MMALCameraConfig.VideoEncoding.EncodingName}. Pixel Format: {MMALCameraConfig.VideoSubformat.EncodingName}."); // Camera warm up time await Task.Delay(2000).ConfigureAwait(false); await this.ProcessAsync(this.Camera.VideoPort, cancellationToken).ConfigureAwait(false); } }
public static void TakeVideoManualMode() { MMALCamera cam = MMALCamera.Instance; AsyncContext.Run(async() => { //using (var vidCaptureHandler = new VideoStreamCaptureHandler("/home/pi/videos/", "mjpeg")) using (var vidCaptureHandler = new VideoStreamCaptureHandler("/home/pi/videos/", "avi")) using (var vidEncoder = new MMALVideoEncoder(vidCaptureHandler)) //using (var ffCaptureHandler = FFmpegCaptureHandler.RawVideoToAvi("/home/pi/videos/", "testing1234")) //using (var vidEncoder = new MMALVideoEncoder(ffCaptureHandler)) using (var renderer = new MMALVideoRenderer()) { cam.ConfigureCameraSettings(); // Create our component pipeline. Here we are using the H.264 standard with a YUV420 pixel format. The video will be taken at 25Mb/s. vidEncoder.ConfigureOutputPort(0, MMALEncoding.MJPEG, MMALEncoding.I420, 90, 25000000); cam.Camera.VideoPort.ConnectTo(vidEncoder); cam.Camera.PreviewPort.ConnectTo(renderer); // Camera warm up time await Task.Delay(2000); var cts = new CancellationTokenSource(TimeSpan.FromSeconds(10)); // Take video for 3 minutes. await cam.ProcessAsync(cam.Camera.VideoPort, cts.Token); } }); cam.Cleanup(); }
public async Task TakeVideoWithCircularBuffer(string extension, MMALEncoding encodingType, MMALEncoding pixelFormat) { TestHelper.BeginTest("TakeVideoWithCircularBuffer", encodingType.EncodingName, pixelFormat.EncodingName); TestHelper.SetConfigurationDefaults(); TestHelper.CleanDirectory("/home/pi/videos/tests"); using (var circularBufferHandler = new CircularBufferCaptureHandler(4096, "/home/pi/videos/tests", extension)) using (var preview = new MMALVideoRenderer()) using (var vidEncoder = new MMALVideoEncoder()) { Fixture.MMALCamera.ConfigureCameraSettings(); var portConfig = new MMALPortConfig(encodingType, pixelFormat, 10, 25000000, null, storeMotionVectors: true); vidEncoder.ConfigureOutputPort(portConfig, circularBufferHandler); // Create our component pipeline. Fixture.MMALCamera.Camera.VideoPort .ConnectTo(vidEncoder); Fixture.MMALCamera.Camera.PreviewPort .ConnectTo(preview); // Camera warm up time await Task.Delay(2000); CancellationTokenSource cts = new CancellationTokenSource(TimeSpan.FromSeconds(10)); // Record video for 10 seconds await Fixture.MMALCamera.ProcessAsync(Fixture.MMALCamera.Camera.VideoPort, cts.Token); // Check that the circular buffer has stored some data during the recording operation. Assert.True(circularBufferHandler.Buffer.Size > 0); } }
/// <summary> /// Self-contained method for recording H.264 video for a specified amount of time. Records at 30fps, 25Mb/s at the highest quality. /// </summary> /// <param name="handler">The video capture handler to apply to the encoder.</param> /// <param name="cancellationToken">A cancellationToken to signal when to stop video capture.</param> /// <param name="split">Used for Segmented video mode.</param> /// <returns>The awaitable Task.</returns> public async Task TakeVideo(VideoStreamCaptureHandler handler, CancellationToken cancellationToken, Split split = null) { if (split != null && !MMALCameraConfig.InlineHeaders) { MMALLog.Logger.Warn("Inline headers not enabled. Split mode not supported when this is disabled."); split = null; } using (var vidEncoder = new MMALVideoEncoder(handler, null, split)) using (var renderer = new MMALVideoRenderer()) { this.ConfigureCameraSettings(); vidEncoder.ConfigureOutputPort(MMALEncoding.H264, MMALEncoding.I420, 0, MMALVideoEncoder.MaxBitrateLevel4); // Create our component pipeline. this.Camera.VideoPort.ConnectTo(vidEncoder); this.Camera.PreviewPort.ConnectTo(renderer); MMALLog.Logger.Info($"Preparing to take video. Resolution: {vidEncoder.Width} x {vidEncoder.Height}. " + $"Encoder: {vidEncoder.Outputs[0].EncodingType.EncodingName}. Pixel Format: {vidEncoder.Outputs[0].PixelFormat.EncodingName}."); // Camera warm up time await Task.Delay(2000); await this.ProcessAsync(this.Camera.VideoPort, cancellationToken); } }
static async Task fragmp4(int seconds) { // This generates a "fragmented" MP4 which should be larger than an MP4 // with a normal single MOOV atom trailer at the end of the file. See: // https://superuser.com/a/1530949/143047 // 10 seconds of "-badmp4" is 34MB // 10 seconds of "-fragmp4" is 26MB regardless of the other options described in the link // 60 seconds (bad) is 219MB versus 208MB (frag) and again we lose 2 seconds // -g is keyframe rate, default is 250 // -flush_packets 1 flushes the I/O stream after each packet, decreasing latency (apparently) // adding two seconds to the requested duration approximates the regular file size (10s = 33MB, 60s = 218MB) seconds += 2; // HACK! see above var cam = GetConfiguredCamera(); var pathname = ramdiskPath + "video.mp4"; Directory.CreateDirectory(ramdiskPath); File.Delete(pathname); Console.WriteLine("Preparing pipeline..."); cam.ConfigureCameraSettings(); using (var ffmpeg = new ExternalProcessCaptureHandler( new ExternalProcessCaptureHandlerOptions { Filename = "ffmpeg", Arguments = $"-framerate 24 -i - -b:v 2500k -c copy -movflags +frag_keyframe+separate_moof+omit_tfhd_offset+empty_moov {pathname}", EchoOutput = true, DrainOutputDelayMs = 500, // default TerminationSignals = new[] { Signum.SIGINT, Signum.SIGQUIT }, // not the supposedly-correct SIGINT+SIGINT but this produces some exit output })) { // quality arg-help says set bitrate zero to use quality for VBR var portCfg = new MMALPortConfig(MMALEncoding.H264, MMALEncoding.I420, quality: 10, bitrate: 0, timeout: null); using var encoder = new MMALVideoEncoder(); using var renderer = new MMALVideoRenderer(); encoder.ConfigureOutputPort(portCfg, ffmpeg); cam.Camera.VideoPort.ConnectTo(encoder); cam.Camera.PreviewPort.ConnectTo(renderer); Console.WriteLine("Camera warmup..."); await Task.Delay(2000); Console.WriteLine($"Capturing MP4: {pathname}"); var timerToken = new CancellationTokenSource(TimeSpan.FromSeconds(seconds)); await Task.WhenAll(new Task[] { ffmpeg.ManageProcessLifecycleAsync(timerToken.Token), cam.ProcessAsync(cam.Camera.VideoPort, timerToken.Token), }).ConfigureAwait(false); } // can't use the convenient fall-through using or MMALCamera.Cleanup // throws: Argument is invalid. Unable to destroy component cam.Cleanup(); Console.WriteLine("Exiting."); }
public async Task ChangeEncoderType() { TestHelper.BeginTest("Video - ChangeEncoderType"); TestHelper.SetConfigurationDefaults(); TestHelper.CleanDirectory("/home/pi/videos/tests"); using (var vidCaptureHandler = new VideoStreamCaptureHandler("/home/pi/videos/tests", "h264")) using (var preview = new MMALVideoRenderer()) using (var vidEncoder = new MMALVideoEncoder(vidCaptureHandler)) { Fixture.MMALCamera.ConfigureCameraSettings(); var portConfig = new MMALPortConfig(MMALEncoding.MJPEG, MMALEncoding.I420, 25, 10, 25000000, null); vidEncoder.ConfigureOutputPort(portConfig); // Create our component pipeline. Fixture.MMALCamera.Camera.VideoPort .ConnectTo(vidEncoder); Fixture.MMALCamera.Camera.PreviewPort .ConnectTo(preview); // Camera warm up time await Task.Delay(2000); CancellationTokenSource cts = new CancellationTokenSource(TimeSpan.FromSeconds(20)); // Record video for 20 seconds await Fixture.MMALCamera.ProcessAsync(Fixture.MMALCamera.Camera.VideoPort, cts.Token); Fixture.CheckAndAssertFilepath(vidCaptureHandler.GetFilepath()); } using (var vidCaptureHandler = new VideoStreamCaptureHandler("/home/pi/videos/tests", "mjpeg")) using (var preview = new MMALVideoRenderer()) using (var vidEncoder = new MMALVideoEncoder(vidCaptureHandler)) { Fixture.MMALCamera.ConfigureCameraSettings(); var portConfig = new MMALPortConfig(MMALEncoding.MJPEG, MMALEncoding.I420, 25, 10, 25000000, null); vidEncoder.ConfigureOutputPort(portConfig); // Create our component pipeline. Fixture.MMALCamera.Camera.VideoPort .ConnectTo(vidEncoder); Fixture.MMALCamera.Camera.PreviewPort .ConnectTo(preview); CancellationTokenSource cts = new CancellationTokenSource(TimeSpan.FromSeconds(20)); // Record video for 20 seconds await Fixture.MMALCamera.ProcessAsync(Fixture.MMALCamera.Camera.VideoPort, cts.Token); Fixture.CheckAndAssertFilepath(vidCaptureHandler.GetFilepath()); } }
private void ConfigureButton() { _buttonPin.PinMode = GpioPinDriveMode.Input; _button = new Unosquare.RaspberryIO.Peripherals.Button(_buttonPin); _buttonPin.InputPullMode = GpioPinResistorPullMode.PullUp; MMALLog.Logger.Debug($"Input button configured"); _button.Released += (s, e) => { MMALLog.Logger.Debug($"Button released"); }; _button.Pressed += (s, e) => { MMALLog.Logger.Debug($"Button pressed"); if (Processing) { return; } Processing = true; if (ReloadConfig) { this.MMALCamera.ConfigureCameraSettings(); ConfigForm.ReloadConfig = false; } AsyncContext.Run(async() => { using (var imgCaptureHandler = new ImageStreamCaptureHandler("/home/pi/images/", "jpg")) using (var imgEncoder = new MMALImageEncoder(imgCaptureHandler)) using (var renderer = new MMALVideoRenderer()) { this.MMALCamera.ConfigureCameraSettings(); // Create our component pipeline. imgEncoder.ConfigureOutputPort(0, MMALEncoding.JPEG, MMALEncoding.I420, 90); this.MMALCamera.Camera.StillPort.ConnectTo(imgEncoder); this.MMALCamera.Camera.PreviewPort.ConnectTo(renderer); await Task.Delay(2000); await this.MMALCamera.BeginProcessing(this.MMALCamera.Camera.StillPort); Processing = false; } }); }; }
static async Task badmp4(int seconds) { Console.WriteLine("\n\nWARNING:\nffmpeg can't create a valid MP4 when running as a child process.\nSee repository README. This code is here for reference only.\n\nPress any key..."); Console.ReadKey(true); var cam = GetConfiguredCamera(); var pathname = ramdiskPath + "video.mp4"; Directory.CreateDirectory(ramdiskPath); File.Delete(pathname); Console.WriteLine("Preparing pipeline..."); cam.ConfigureCameraSettings(); using (var ffmpeg = new ExternalProcessCaptureHandler( new ExternalProcessCaptureHandlerOptions { Filename = "ffmpeg", Arguments = $"-framerate 24 -i - -b:v 2500k -c copy {pathname}", EchoOutput = true, DrainOutputDelayMs = 500, // default TerminationSignals = new[] { Signum.SIGINT, Signum.SIGQUIT }, // not the supposedly-correct SIGINT+SIGINT but this produces some exit output })) { // quality arg-help says set bitrate zero to use quality for VBR var portCfg = new MMALPortConfig(MMALEncoding.H264, MMALEncoding.I420, quality: 10, bitrate: 0, timeout: null); using var encoder = new MMALVideoEncoder(); using var renderer = new MMALVideoRenderer(); encoder.ConfigureOutputPort(portCfg, ffmpeg); cam.Camera.VideoPort.ConnectTo(encoder); cam.Camera.PreviewPort.ConnectTo(renderer); Console.WriteLine("Camera warmup..."); await Task.Delay(2000); Console.WriteLine($"Capturing MP4: {pathname}"); var timerToken = new CancellationTokenSource(TimeSpan.FromSeconds(seconds)); await Task.WhenAll(new Task[] { ffmpeg.ManageProcessLifecycleAsync(timerToken.Token), cam.ProcessAsync(cam.Camera.VideoPort, timerToken.Token), }).ConfigureAwait(false); } // can't use the convenient fall-through using or MMALCamera.Cleanup // throws: Argument is invalid. Unable to destroy component cam.Cleanup(); Console.WriteLine("Exiting. Remember, video.mp4 is not valid."); }
static async Task stream(int seconds) { var cam = GetConfiguredCamera(); MMALCameraConfig.VideoResolution = new MMALSharp.Common.Utility.Resolution(640, 480); MMALCameraConfig.SensorMode = MMALSensorMode.Mode7; // for some reason mode 6 has a pinkish tinge MMALCameraConfig.VideoFramerate = new MMAL_RATIONAL_T(20, 1); Console.WriteLine("Preparing pipeline..."); cam.ConfigureCameraSettings(); // note cvlc requires real quotes even though we used apostrophes for the command line equivalent using (var vlc = new ExternalProcessCaptureHandler( new ExternalProcessCaptureHandlerOptions { Filename = "cvlc", Arguments = @"stream:///dev/stdin --sout ""#transcode{vcodec=mjpg,vb=2500,fps=20,acodec=none}:standard{access=http{mime=multipart/x-mixed-replace;boundary=7b3cc56e5f51db803f790dad720ed50a},mux=mpjpeg,dst=:8554/}"" :demux=h264", EchoOutput = true, DrainOutputDelayMs = 500, // default TerminationSignals = ExternalProcessCaptureHandlerOptions.signalsVLC })) { var portCfg = new MMALPortConfig(MMALEncoding.H264, MMALEncoding.I420, quality: 0, bitrate: MMALVideoEncoder.MaxBitrateMJPEG, timeout: null); using var encoder = new MMALVideoEncoder(); using var renderer = new MMALVideoRenderer(); encoder.ConfigureOutputPort(portCfg, vlc); cam.Camera.VideoPort.ConnectTo(encoder); cam.Camera.PreviewPort.ConnectTo(renderer); Console.WriteLine("Camera warmup..."); await Task.Delay(2000); Console.WriteLine($"Streaming MJPEG for {seconds} sec to:"); Console.WriteLine($"http://{Environment.MachineName}.local:8554/"); var timeout = new CancellationTokenSource(TimeSpan.FromSeconds(seconds)); await Task.WhenAll(new Task[] { vlc.ManageProcessLifecycleAsync(timeout.Token), cam.ProcessAsync(cam.Camera.VideoPort, timeout.Token), }).ConfigureAwait(false); } // can't use the convenient fall-through using or MMALCamera.Cleanup // throws: Argument is invalid. Unable to destroy component cam.Cleanup(); Console.WriteLine("Exiting."); }
public async Task RecordVideoDirectlyFromSplitter() { TestHelper.BeginTest("RecordVideoDirectlyFromSplitter"); TestHelper.SetConfigurationDefaults(); TestHelper.CleanDirectory("/home/pi/videos/tests"); using (var vidCaptureHandler = new VideoStreamCaptureHandler("/home/pi/videos/tests", "raw")) using (var vidCaptureHandler2 = new VideoStreamCaptureHandler("/home/pi/videos/tests", "raw")) using (var vidCaptureHandler3 = new VideoStreamCaptureHandler("/home/pi/videos/tests", "raw")) using (var vidCaptureHandler4 = new VideoStreamCaptureHandler("/home/pi/videos/tests", "raw")) using (var preview = new MMALVideoRenderer()) using (var splitter = new MMALSplitterComponent()) { Fixture.MMALCamera.ConfigureCameraSettings(); var splitterPortConfig = new MMALPortConfig(MMALEncoding.I420, MMALEncoding.I420, 0, 0, null); // Create our component pipeline. splitter.ConfigureInputPort(new MMALPortConfig(MMALEncoding.OPAQUE, MMALEncoding.I420, 0), Fixture.MMALCamera.Camera.VideoPort, null); splitter.ConfigureOutputPort <VideoPort>(0, splitterPortConfig, vidCaptureHandler); splitter.ConfigureOutputPort <VideoPort>(1, splitterPortConfig, vidCaptureHandler2); splitter.ConfigureOutputPort <VideoPort>(2, splitterPortConfig, vidCaptureHandler3); splitter.ConfigureOutputPort <VideoPort>(3, splitterPortConfig, vidCaptureHandler4); // Create our component pipeline. Fixture.MMALCamera.Camera.VideoPort .ConnectTo(splitter); Fixture.MMALCamera.Camera.PreviewPort .ConnectTo(preview); // Camera warm up time await Task.Delay(2000); CancellationTokenSource cts = new CancellationTokenSource(TimeSpan.FromSeconds(15)); // Record video for 20 seconds await Fixture.MMALCamera.ProcessAsync(Fixture.MMALCamera.Camera.VideoPort, cts.Token); Fixture.CheckAndAssertFilepath(vidCaptureHandler.GetFilepath()); Fixture.CheckAndAssertFilepath(vidCaptureHandler2.GetFilepath()); Fixture.CheckAndAssertFilepath(vidCaptureHandler3.GetFilepath()); Fixture.CheckAndAssertFilepath(vidCaptureHandler4.GetFilepath()); } }
public async Task TakeVideoSplit() { TestHelper.BeginTest("TakeVideoSplit"); TestHelper.SetConfigurationDefaults(); TestHelper.CleanDirectory("/home/pi/videos/tests/split_test"); MMALCameraConfig.InlineHeaders = true; using (var vidCaptureHandler = new VideoStreamCaptureHandler("/home/pi/videos/tests/split_test", "h264")) using (var preview = new MMALVideoRenderer()) using (var vidEncoder = new MMALVideoEncoder()) { Fixture.MMALCamera.ConfigureCameraSettings(); var split = new Split { Mode = TimelapseMode.Second, Value = 15 }; var portConfig = new MMALPortConfig(MMALEncoding.H264, MMALEncoding.I420, 10, 25000000, null, split); vidEncoder.ConfigureOutputPort(portConfig, vidCaptureHandler); // Create our component pipeline. Fixture.MMALCamera.Camera.VideoPort .ConnectTo(vidEncoder); Fixture.MMALCamera.Camera.PreviewPort .ConnectTo(preview); // Camera warm up time await Task.Delay(2000); CancellationTokenSource cts = new CancellationTokenSource(TimeSpan.FromSeconds(30)); // 2 files should be created from this test. await Fixture.MMALCamera.ProcessAsync(Fixture.MMALCamera.Camera.VideoPort, cts.Token); Assert.True(Directory.GetFiles("/home/pi/videos/tests/split_test").Length == 2); } }
public async Task RecordVideoDirectlyFromResizerWithSplitterComponent() { TestHelper.BeginTest("RecordVideoDirectlyFromResizerWithSplitterComponent"); TestHelper.SetConfigurationDefaults(); TestHelper.CleanDirectory("/home/pi/videos/tests"); // I am only using a single output here because due to the disk IO performance on the Pi you ideally need to be // using a faster storage medium such as the ramdisk to output to multiple files. using (var vidCaptureHandler = new VideoStreamCaptureHandler("/home/pi/videos/tests", "h264")) using (var preview = new MMALVideoRenderer()) using (var splitter = new MMALSplitterComponent()) using (var resizer = new MMALResizerComponent()) { Fixture.MMALCamera.ConfigureCameraSettings(); var splitterPortConfig = new MMALPortConfig(MMALEncoding.OPAQUE, MMALEncoding.I420); var resizerPortConfig = new MMALPortConfig(MMALEncoding.I420, MMALEncoding.I420, width: 1024, height: 768, timeout: DateTime.Now.AddSeconds(15)); // Create our component pipeline. splitter.ConfigureInputPort(new MMALPortConfig(MMALEncoding.OPAQUE, MMALEncoding.I420), Fixture.MMALCamera.Camera.VideoPort, null); splitter.ConfigureOutputPort(0, splitterPortConfig, null); resizer.ConfigureOutputPort <VideoPort>(0, resizerPortConfig, vidCaptureHandler); // Create our component pipeline. Fixture.MMALCamera.Camera.VideoPort .ConnectTo(splitter); splitter.Outputs[0].ConnectTo(resizer); Fixture.MMALCamera.Camera.PreviewPort .ConnectTo(preview); // Camera warm up time await Task.Delay(2000); await Fixture.MMALCamera.ProcessAsync(Fixture.MMALCamera.Camera.VideoPort); Fixture.CheckAndAssertFilepath(vidCaptureHandler.GetFilepath()); } }
public async Task TakeVideoAndStoreMotionVectors(string extension, MMALEncoding encodingType, MMALEncoding pixelFormat) { TestHelper.BeginTest("TakeVideoAndStoreMotionVectors", encodingType.EncodingName, pixelFormat.EncodingName); TestHelper.SetConfigurationDefaults(); TestHelper.CleanDirectory("/home/pi/videos/tests"); // Ensure inline motion vectors are enabled. MMALCameraConfig.InlineMotionVectors = true; using (var motionVectorStore = File.Create("/home/pi/videos/tests/motion.dat")) using (var vidCaptureHandler = new VideoStreamCaptureHandler("/home/pi/videos/tests", extension)) using (var preview = new MMALVideoRenderer()) using (var vidEncoder = new MMALVideoEncoder()) { Fixture.MMALCamera.ConfigureCameraSettings(); var portConfig = new MMALPortConfig(encodingType, pixelFormat, 10, 25000000, null, storeMotionVectors: true); vidEncoder.ConfigureOutputPort(portConfig, vidCaptureHandler); // Initialise the motion vector stream. vidCaptureHandler.InitialiseMotionStore(motionVectorStore); // Create our component pipeline. Fixture.MMALCamera.Camera.VideoPort .ConnectTo(vidEncoder); Fixture.MMALCamera.Camera.PreviewPort .ConnectTo(preview); // Camera warm up time await Task.Delay(2000); CancellationTokenSource cts = new CancellationTokenSource(TimeSpan.FromSeconds(10)); // Record video for 10 seconds await Fixture.MMALCamera.ProcessAsync(Fixture.MMALCamera.Camera.VideoPort, cts.Token); Fixture.CheckAndAssertFilepath(vidCaptureHandler.GetFilepath()); } }
private async Task TakeVideoManual(string extension, MMALEncoding encoding, MMALEncoding pixelFormat, int bitrate, int seconds) { using (var vidCaptureHandler = new VideoStreamCaptureHandler($"/home/pi/videos/", extension)) using (var vidEncoder = new MMALVideoEncoder(vidCaptureHandler)) using (var renderer = new MMALVideoRenderer()) { this.Cam.ConfigureCameraSettings(); vidEncoder.ConfigureOutputPort(0, encoding, pixelFormat, 0, bitrate); this.Cam.Camera.VideoPort.ConnectTo(vidEncoder); this.Cam.Camera.PreviewPort.ConnectTo(renderer); // Camera warm up time await Task.Delay(2000); var cts = new CancellationTokenSource(TimeSpan.FromSeconds(seconds)); // Take video for 3 minutes. await this.Cam.ProcessAsync(this.Cam.Camera.VideoPort, cts.Token); } }
public void StartStreaming(string url, string streamName, TimeSpan duration) { var camera = MMALCamera.Instance; var task = Task.Run(async() => { using (var ffCaptureHandler = new FFmpegCaptureHandler($"-i - -vcodec copy -an -f flv {url}{streamName}")) using (var vidEncoder = new MMALVideoEncoder(ffCaptureHandler, new MMAL_RATIONAL_T(25, 1), DateTime.Now.Add(duration))) using (var renderer = new MMALVideoRenderer()) { vidEncoder.ConfigureOutputPort(0, MMALEncoding.H264, MMALEncoding.I420, 0, 25000000); camera.Camera.VideoPort.ConnectTo(vidEncoder); camera.Camera.PreviewPort.ConnectTo(renderer); camera.ConfigureCameraSettings(); await camera.BeginProcessing(camera.Camera.VideoPort); } }); //TODO: Improve Woopsa error to display AggregateException task.Wait(); camera.Cleanup(); }
public async Task RecordVideoDirectlyFromSplitter() { TestHelper.BeginTest("RecordVideoDirectlyFromSplitter"); TestHelper.SetConfigurationDefaults(); TestHelper.CleanDirectory("/home/pi/videos/tests"); // I am only using a single output here because due to the disk IO performance on the Pi you ideally need to be // using a faster storage medium such as the ramdisk to output to multiple files. using (var vidCaptureHandler = new VideoStreamCaptureHandler("/home/pi/videos/tests", "raw")) using (var preview = new MMALVideoRenderer()) using (var splitter = new MMALSplitterComponent()) { Fixture.MMALCamera.ConfigureCameraSettings(); var splitterPortConfig = new MMALPortConfig(MMALEncoding.I420, MMALEncoding.I420); // Create our component pipeline. splitter.ConfigureInputPort(new MMALPortConfig(MMALEncoding.OPAQUE, MMALEncoding.I420, 0), Fixture.MMALCamera.Camera.VideoPort, null); splitter.ConfigureOutputPort(0, splitterPortConfig, vidCaptureHandler); // Create our component pipeline. Fixture.MMALCamera.Camera.VideoPort .ConnectTo(splitter); Fixture.MMALCamera.Camera.PreviewPort .ConnectTo(preview); // Camera warm up time await Task.Delay(2000); CancellationTokenSource cts = new CancellationTokenSource(TimeSpan.FromSeconds(15)); // Record video for 20 seconds await Fixture.MMALCamera.ProcessAsync(Fixture.MMALCamera.Camera.VideoPort, cts.Token); Fixture.CheckAndAssertFilepath(vidCaptureHandler.GetFilepath()); } }
public async Task TakeVideo(CancellationToken cancellationToken) { var cam = MMALCamera.Instance; Console.WriteLine($"Video path: {_videoSavePath}Millie{DateTime.Now:s}"); try { using (var ffCaptureHandler = FFmpegCaptureHandler.RawVideoToAvi(_videoSavePath, $"Millie{DateTime.Now:s}")) using (var vidEncoder = new MMALVideoEncoder(ffCaptureHandler)) using (var renderer = new MMALVideoRenderer()) { cam.ConfigureCameraSettings(); MMALCameraConfig.Rotation = 270; var portConfig = new MMALPortConfig(MMALEncoding.H264, MMALEncoding.I420, 10, MMALVideoEncoder.MaxBitrateLevel4, null); // Create our component pipeline. Here we are using the H.264 standard with a YUV420 pixel format. The video will be taken at 25Mb/s. vidEncoder.ConfigureOutputPort(portConfig); cam.Camera.VideoPort.ConnectTo(vidEncoder); cam.Camera.PreviewPort.ConnectTo(renderer); // Camera warm up time await Task.Delay(2000, cancellationToken); await cam.ProcessAsync(cam.Camera.VideoPort, cancellationToken); } } catch (Exception ex) { Console.WriteLine(ex); } finally { // Only call when you no longer require the camera, i.e. on app shutdown. cam.Cleanup(); } }
public void GrabVideo(TimeSpan duration) { var camera = MMALCamera.Instance; var task = Task.Run(async() => { using (var vidCaptureHandler = new VideoStreamCaptureHandler(Path.Combine(Folder, Video), "avi")) using (var vidEncoder = new MMALVideoEncoder(vidCaptureHandler, new MMAL_RATIONAL_T(25, 1), DateTime.Now.Add(duration))) using (var renderer = new MMALVideoRenderer()) { vidEncoder.ConfigureOutputPort(0, MMALEncoding.H264, MMALEncoding.I420, 0, 25000000); camera.Camera.VideoPort.ConnectTo(vidEncoder); camera.Camera.PreviewPort.ConnectTo(renderer); camera.ConfigureCameraSettings(); await camera.BeginProcessing(camera.Camera.VideoPort); } }); //TODO: Improve Woopsa error to display AggregateException task.Wait(); camera.Cleanup(); LastVideoFilename = Path.Combine(Image, GetLastFile(Path.Combine(Folder, Video)).Name); }
/// <summary> /// Self-contained method for recording H.264 video for a specified amount of time. Records at 30fps, 25Mb/s at the highest quality. /// </summary> /// <param name="handler">The video capture handler to apply to the encoder.</param> /// <param name="timeout">A timeout to stop the video capture</param> /// <param name="split">Used for Segmented video mode</param> /// <returns>The awaitable Task</returns> public async Task TakeVideo(VideoStreamCaptureHandler handler, DateTime?timeout = null, Split split = null) { if (split != null && !MMALCameraConfig.InlineHeaders) { MMALLog.Logger.Warn("Inline headers not enabled. Split mode not supported when this is disabled."); split = null; } using (var vidEncoder = new MMALVideoEncoder(handler, new MMAL_RATIONAL_T(30, 1), timeout, split)) using (var renderer = new MMALVideoRenderer()) { vidEncoder.ConfigureOutputPort(0, MMALEncoding.H264, MMALEncoding.I420, 10, 25000000); //Create our component pipeline. this.Camera.VideoPort.ConnectTo(vidEncoder); this.Camera.PreviewPort.ConnectTo(renderer); this.ConfigureCameraSettings(); MMALLog.Logger.Info($"Preparing to take video. Resolution: {vidEncoder.Width} x {vidEncoder.Height}. " + $"Encoder: {vidEncoder.Outputs[0].EncodingType.EncodingName}. Pixel Format: {vidEncoder.Outputs[0].PixelFormat.EncodingName}."); await BeginProcessing(this.Camera.VideoPort, vidEncoder); } }
// motion without raw recording, decouple from onDetect event static async Task motion(int totalSeconds, int recordSeconds, int sensitivity) { DeleteFiles(ramdiskPath, "*.h264"); DeleteFiles(ramdiskPath, "*.raw"); var cam = GetConfiguredCamera(); // No longer cut-and-paste from the MMALSharp wiki: // The built-in MotionConfig "recordingTime" argument only applies to calling StartRecording // on the motion buffer, which is RAW (and huge). That also means the onStopDetect action // for cam.WithMotionDetection is not especially useful. So this variation doesn't record the // RAW stream and instead uses a token timeout to terminate the recording. // When using H.264 encoding we require key frames to be generated for the Circular buffer capture handler. MMALCameraConfig.InlineHeaders = true; Console.WriteLine("Preparing pipeline..."); using (var splitter = new MMALSplitterComponent()) { // Two capture handlers are being used here, one for motion detection and the other to record a H.264 stream. using var vidCaptureHandler = new CircularBufferCaptureHandler(4000000, "/media/ramdisk", "h264"); using var motionCircularBufferCaptureHandler = new CircularBufferCaptureHandler(4000000, "/media/ramdisk", "raw"); using var resizer = new MMALIspComponent(); using var vidEncoder = new MMALVideoEncoder(); using var renderer = new MMALVideoRenderer(); cam.ConfigureCameraSettings(); var splitterPortConfig = new MMALPortConfig(MMALEncoding.OPAQUE, MMALEncoding.I420, 0, 0, null); var vidEncoderPortConfig = new MMALPortConfig(MMALEncoding.H264, MMALEncoding.I420, 0, MMALVideoEncoder.MaxBitrateLevel4, null); // The ISP resizer is being used for better performance. Frame difference motion detection will only work if using raw video data. Do not encode to H.264/MJPEG. // Resizing to a smaller image may improve performance, but ensure that the width/height are multiples of 32 and 16 respectively to avoid cropping. var resizerPortConfig = new MMALPortConfig(MMALEncoding.RGB24, MMALEncoding.RGB24, 640, 480, 0, 0, 0, false, null); splitter.ConfigureInputPort(new MMALPortConfig(MMALEncoding.OPAQUE, MMALEncoding.I420), cam.Camera.VideoPort, null); splitter.ConfigureOutputPort(0, splitterPortConfig, null); splitter.ConfigureOutputPort(1, splitterPortConfig, null); resizer.ConfigureOutputPort <VideoPort>(0, resizerPortConfig, motionCircularBufferCaptureHandler); vidEncoder.ConfigureInputPort(new MMALPortConfig(MMALEncoding.OPAQUE, MMALEncoding.I420), splitter.Outputs[1], null); vidEncoder.ConfigureOutputPort(vidEncoderPortConfig, vidCaptureHandler); cam.Camera.VideoPort.ConnectTo(splitter); cam.Camera.PreviewPort.ConnectTo(renderer); splitter.Outputs[0].ConnectTo(resizer); splitter.Outputs[1].ConnectTo(vidEncoder); Console.WriteLine("Camera warmup..."); await Task.Delay(2000); Console.WriteLine($"Detecting motion for {totalSeconds} seconds with sensitivity threshold {sensitivity}..."); var cts = new CancellationTokenSource(TimeSpan.FromSeconds(totalSeconds)); // The recording duration doesn't matter; see notes at top of this method. var motionConfig = new MotionConfig(TimeSpan.FromSeconds(10), sensitivity); // Stephen Cleary says CTS disposal is unnecessary as long as you cancel! https://stackoverflow.com/a/19005066/152997 var startRecordingCTS = LocalPrepareToRecord(); await cam.WithMotionDetection( motionCircularBufferCaptureHandler, motionConfig, // This callback will be invoked when motion has been detected. () => { // This has no effect if the token is already cancelled. startRecordingCTS.Cancel(); }) .ProcessAsync(cam.Camera.VideoPort, cts.Token); CancellationTokenSource LocalPrepareToRecord() { var cts = new CancellationTokenSource(); cts.Token.Register(LocalStartRecording); return(cts); } async void LocalStartRecording() { Console.WriteLine($"Motion detected, recording {recordSeconds} seconds..."); motionCircularBufferCaptureHandler.DisableMotionDetection(); vidCaptureHandler.StartRecording(); vidEncoder.RequestIFrame(); // Prepare to record // Stephen Cleary says CTS disposal is unnecessary as long as you cancel! https://stackoverflow.com/a/19005066/152997 var recordingCTS = new CancellationTokenSource(); // When the token expires, stop recording and re-enable capture recordingCTS.Token.Register(LocalEndRecording); // Start the clock recordingCTS.CancelAfter(recordSeconds * 1000); // Record until the duration passes or the overall motion detection token expires await Task.WhenAny(new Task[] { cts.Token.AsTask(), recordingCTS.Token.AsTask() }); if (!recordingCTS.IsCancellationRequested) { recordingCTS.Cancel(); } } void LocalEndRecording() { Console.WriteLine("...recording stopped."); startRecordingCTS = LocalPrepareToRecord(); motionCircularBufferCaptureHandler.EnableMotionDetection(); vidCaptureHandler.StopRecording(); vidCaptureHandler.Split(); } } // can't use the convenient fall-through using or MMALCamera.Cleanup // throws: Argument is invalid. Unable to destroy component cam.Cleanup(); Console.WriteLine("Exiting."); }
public void StaticOverlay() { TestHelper.BeginTest("StaticOverlay"); TestHelper.SetConfigurationDefaults(); MMALCameraConfig.StillResolution = Resolution.As03MPixel; MMALCameraConfig.StillEncoding = MMALEncoding.I420; MMALCameraConfig.StillSubFormat = MMALEncoding.I420; AsyncContext.Run(async() => { var filename = string.Empty; using (var imgCaptureHandler = new ImageStreamCaptureHandler("/home/pi/images/tests/staticoverlay", "raw")) { TestHelper.CleanDirectory("/home/pi/images/tests"); TestHelper.CleanDirectory("/home/pi/images/tests/staticoverlay"); await _fixture.MMALCamera.TakeRawPicture(imgCaptureHandler); filename = imgCaptureHandler.GetFilepath(); } PreviewConfiguration previewConfig = new PreviewConfiguration { FullScreen = false, PreviewWindow = new Rectangle(160, 0, 640, 480), Layer = 2, Opacity = 1 }; MMALCameraConfig.StillResolution = Resolution.As1080p; MMALCameraConfig.StillEncoding = MMALEncoding.OPAQUE; using (var imgCaptureHandler = new ImageStreamCaptureHandler("/home/pi/images/tests", "jpg")) using (var imgEncoder = new MMALImageEncoder(imgCaptureHandler)) using (var video = new MMALVideoRenderer(previewConfig)) { _fixture.MMALCamera.ConfigureCameraSettings(); video.ConfigureRenderer(); PreviewOverlayConfiguration overlayConfig = new PreviewOverlayConfiguration { FullScreen = true, PreviewWindow = new Rectangle(50, 0, 640, 480), Layer = 1, Resolution = new Resolution(640, 480), Encoding = MMALEncoding.I420, Opacity = 255 }; var overlay = _fixture.MMALCamera.AddOverlay(video, overlayConfig, File.ReadAllBytes(filename)); overlay.ConfigureRenderer(); overlay.UpdateOverlay(); // Create our component pipeline. imgEncoder.ConfigureOutputPort(0, MMALEncoding.JPEG, MMALEncoding.I420, 90); _fixture.MMALCamera.Camera.StillPort.ConnectTo(imgEncoder); _fixture.MMALCamera.Camera.PreviewPort.ConnectTo(video); _fixture.MMALCamera.PrintPipeline(); await _fixture.MMALCamera.ProcessAsync(_fixture.MMALCamera.Camera.StillPort); if (System.IO.File.Exists(imgCaptureHandler.GetFilepath())) { var length = new System.IO.FileInfo(imgCaptureHandler.GetFilepath()).Length; Assert.True(length > 0); } else { Assert.True(false, $"File {imgCaptureHandler.GetFilepath()} was not created"); } } }); }
// motion as in the wiki (records raw file) static async Task motion_record_raw(int totalSeconds, int recordSeconds, int sensitivity) { DeleteFiles(ramdiskPath, "*.h264"); DeleteFiles(ramdiskPath, "*.raw"); var cam = GetConfiguredCamera(); // When using H.264 encoding we require key frames to be generated for the Circular buffer capture handler. MMALCameraConfig.InlineHeaders = true; Console.WriteLine("Preparing pipeline..."); using (var splitter = new MMALSplitterComponent()) { // Two capture handlers are being used here, one for motion detection and the other to record a H.264 stream. using var vidCaptureHandler = new CircularBufferCaptureHandler(4000000, "/media/ramdisk", "h264"); using var motionCircularBufferCaptureHandler = new CircularBufferCaptureHandler(4000000, "/media/ramdisk", "raw"); using var resizer = new MMALIspComponent(); using var vidEncoder = new MMALVideoEncoder(); using var renderer = new MMALVideoRenderer(); cam.ConfigureCameraSettings(); // The ISP resizer is being used for better performance. Frame difference motion detection will only work if using raw video data. Do not encode to H.264/MJPEG. // Resizing to a smaller image may improve performance, but ensure that the width/height are multiples of 32 and 16 respectively to avoid cropping. var resizerPortConfig = new MMALPortConfig(MMALEncoding.RGB24, MMALEncoding.RGB24, 640, 480, 0, 0, 0, false, null); var vidEncoderPortConfig = new MMALPortConfig(MMALEncoding.H264, MMALEncoding.I420, 0, MMALVideoEncoder.MaxBitrateLevel4, null); var splitterPortConfig = new MMALPortConfig(MMALEncoding.OPAQUE, MMALEncoding.I420, 0, 0, null); splitter.ConfigureInputPort(new MMALPortConfig(MMALEncoding.OPAQUE, MMALEncoding.I420), cam.Camera.VideoPort, null); splitter.ConfigureOutputPort(0, splitterPortConfig, null); splitter.ConfigureOutputPort(1, splitterPortConfig, null); resizer.ConfigureOutputPort <VideoPort>(0, resizerPortConfig, motionCircularBufferCaptureHandler); vidEncoder.ConfigureInputPort(new MMALPortConfig(MMALEncoding.OPAQUE, MMALEncoding.I420), splitter.Outputs[1], null); vidEncoder.ConfigureOutputPort(vidEncoderPortConfig, vidCaptureHandler); cam.Camera.VideoPort.ConnectTo(splitter); cam.Camera.PreviewPort.ConnectTo(renderer); splitter.Outputs[0].ConnectTo(resizer); splitter.Outputs[1].ConnectTo(vidEncoder); Console.WriteLine("Camera warmup..."); await Task.Delay(2000); var cts = new CancellationTokenSource(TimeSpan.FromSeconds(totalSeconds)); var motionConfig = new MotionConfig(TimeSpan.FromSeconds(recordSeconds), sensitivity); Console.WriteLine($"Detecting motion for {totalSeconds} seconds with sensitivity threshold {sensitivity}..."); await cam.WithMotionDetection( motionCircularBufferCaptureHandler, motionConfig, // This callback will be invoked when motion has been detected. () => { Console.WriteLine($"Motion detected, recording {recordSeconds} seconds..."); // Stop motion detection while we are recording. motionCircularBufferCaptureHandler.DisableMotionDetection(); // Start recording our H.264 video. vidCaptureHandler.StartRecording(); motionCircularBufferCaptureHandler.StartRecording(); // Request a key frame to be immediately generated by the h.264 encoder. vidEncoder.RequestIFrame(); }, // Invoked when motion handler recording-time expires () => { // We want to re-enable the motion detection. motionCircularBufferCaptureHandler.EnableMotionDetection(); // Stop recording on our capture handlers. motionCircularBufferCaptureHandler.StopRecording(); vidCaptureHandler.StopRecording(); // Optionally create new file for our next recording run (don't do the RAW file, we don't want it). vidCaptureHandler.Split(); Console.WriteLine("...recording stopped."); }) .ProcessAsync(cam.Camera.VideoPort, cts.Token); } // can't use the convenient fall-through using or MMALCamera.Cleanup // throws: Argument is invalid. Unable to destroy component cam.Cleanup(); Console.WriteLine("Exiting."); }
/// <summary> /// Creates an overlay renderer that is able to render an overlay from a static image source. /// </summary> /// <param name="parent">The parent renderer which is being used to overlay onto the display.</param> /// <param name="config">The configuration for rendering a static preview overlay.</param> /// <param name="source">A reference to the current stream being used in the overlay.</param> /// <returns>The created <see cref="MMALOverlayRenderer"/> object.</returns> public MMALOverlayRenderer AddOverlay(MMALVideoRenderer parent, PreviewOverlayConfiguration config, byte[] source) => new MMALOverlayRenderer(parent, config, source);