/// <summary> /// c# implementation of https://github.com/robidouille/robidouille/blob/master/raspicam_cv/RaspiCamTest.c /// </summary> public void Run() { Log.Info("Creating Window"); var windowName = "PiCamCVSimpleTest"; CvInvoke.NamedWindow(windowName); //Create the window using the specific name Log.Info("Creating capture"); EnvironmentService.DemandUnix("OpenCV 3.0 deprecated these capture methods. Only supported with PiCamCv on Pi"); var captureConfig = new CaptureConfig { Resolution= new Resolution(640,480) , Framerate = 25, Monochrome = true }; var piConfig = PiCameraConfig.FromConfig(captureConfig); IntPtr capture = CvInvokeRaspiCamCV.cvCreateCameraCapture2(0, ref piConfig); // Index doesn't really matter do { IntPtr imagePtr = CvInvokeRaspiCamCV.cvQueryFrame(capture); using (var managedImage = Image<Bgr, Byte>.FromIplImagePtr(imagePtr)) { CvInvoke.Imshow(windowName, managedImage); } } while (CvInvoke.WaitKey(100) < 0); CvInvoke.DestroyWindow("RaspiCamTest"); CvInvokeRaspiCamCV.cvReleaseCapture(ref capture); }
private static ICaptureGrab BuildCaptureGrabber(CaptureConfig config = null) { var request = new CaptureRequest {Device = CaptureDevice.Usb}; if (EnvironmentService.IsUnix) { request.Device = CaptureDevice.Pi; } if (config == null) { // Default capture request.Config = new CaptureConfig {Resolution = new Resolution(160, 120), Framerate = 50, Monochrome = false}; } else { request.Config = config; } var capture = CaptureFactory.GetCapture(request); var actualConfig = capture.GetCaptureProperties(); Log.Info($"Created capture: {actualConfig}"); SafetyCheckRoi(_consoleOptions, actualConfig); return capture; }
/// <summary> /// Expect of the form '120x340,40' say /// </summary> public static CaptureConfig Parse(string config) { if (string.IsNullOrWhiteSpace(config)) { return(null); } string strRegex = @"(?<resWidth>\d{1,})x(?<resHeight>\d{1,}),(?<fps>\d{1,})"; var captureRegex = new Regex(strRegex, RegexOptions.None); var match = captureRegex.Match(config); if (match.Success) { var captureConfig = new CaptureConfig(); try { captureConfig.Framerate = Convert.ToInt32(match.Groups["fps"].Value); captureConfig.Resolution.Width = Convert.ToInt32(match.Groups["resWidth"].Value); captureConfig.Resolution.Height = Convert.ToInt32(match.Groups["resHeight"].Value); captureConfig.Bitrate = 100000; } catch (Exception e) { throw new ArgumentException($"CaptureConfig='{config}' is invalid", e); } return(captureConfig); } return(null); }
/// <summary> /// Expect of the form '120x340,40' say /// </summary> public static CaptureConfig Parse(string config) { if (string.IsNullOrWhiteSpace(config)) { return null; } string strRegex = @"(?<resWidth>\d{1,})x(?<resHeight>\d{1,}),(?<fps>\d{1,})"; var captureRegex = new Regex(strRegex, RegexOptions.None); var match = captureRegex.Match(config); if (match.Success) { var captureConfig = new CaptureConfig(); try { captureConfig.Framerate = Convert.ToInt32(match.Groups["fps"].Value); captureConfig.Resolution.Width = Convert.ToInt32(match.Groups["resWidth"].Value); captureConfig.Resolution.Height = Convert.ToInt32(match.Groups["resHeight"].Value); captureConfig.Bitrate = 100000; } catch(Exception e) { throw new ArgumentException($"CaptureConfig='{config}' is invalid", e); } return captureConfig; } return null; }
public HandCalibratedModifierStrategy(CaptureConfig captureConfig, Point target) : base(captureConfig, target) { _regressorPair = new LinearRegressorFactory().GetHandMeasured320x240(); // calibration was done in 320x240. If capture settings different need to scale the calibration decimal xDiffScale = captureConfig.Resolution.Width / 320m; decimal yDiffScale = captureConfig.Resolution.Height / 240m; Scale = new PointD(xDiffScale, yDiffScale); }
private static void SafetyCheckRoi(ConsoleOptions options, CaptureConfig captureProperties) { if ( captureProperties.Resolution.IsValid && options.ColourSettings != null ) { var roiWidthTooBig = options.ColourSettings.Roi.Width > captureProperties.Resolution.Width; var roiHeightTooBig = options.ColourSettings.Roi.Height > captureProperties.Resolution.Height; if (roiWidthTooBig || roiHeightTooBig) { Log.Warn("ROI is too big! Ignoring"); options.ColourSettings.Roi = Rectangle.Empty; } } }
protected CameraBasedModifierStrategy(CaptureConfig captureConfig, Point target) { Target = target; CaptureConfig = captureConfig; Scale = new PointD(decimal.One, decimal.One); }
protected override void OnSubscribe() { base.OnSubscribe(); _captureConfig = CameraCapture.GetCaptureProperties(); _centre = _captureConfig.Resolution.GetCenter(); txtReticleX.Text = _centre.X.ToString(); txtReticleY.Text = _centre.Y.ToString(); var screen = new TextboxScreen(txtScreen); //var imageTransmitter = new BsonPostImageTransmitter(); var imageTransmitter = new BsonPostJpegTransmitter(); var colorSettings = _colourSettingsRepo.Read(); var motionSettings = _motionSettingsRepo.Read(); // these should be disposed if not null _faceTrackingController = new FaceTrackingPanTiltController(PanTiltMechanism, _captureConfig); _colourTrackingController = new ColourTrackingPanTiltController(PanTiltMechanism, _captureConfig); _motionTrackingController = new MotionTrackingPanTiltController(PanTiltMechanism, _captureConfig, screen); // if haven't subscribed first time yet if (_multimodePanTiltController == null) { _remoteScreen = new RemoteTextboxScreen(CameraHubProxy, txtScreen); _remoteImageSender = new RemoteImageSender(imageTransmitter, CameraHubProxy); CameraHubProxy.SettingsChanged += (o, s) => { _remoteScreen.WriteLine($"Camera received new settings: {s}"); _remoteImageSender.SendEveryPeriod = s.TransmitImagePeriod; _remoteImageSender.Enabled = s.EnableImageTransmit; _remoteScreen.Enabled = s.EnableConsoleTransmit; }; } else { // don't resubscribe and get duplicate events on cameraHubProxy _multimodePanTiltController.Unsubscribe(); } _multimodePanTiltController = new MultimodePanTiltController( PanTiltMechanism , _captureConfig , _remoteScreen , CameraHubProxy , _remoteImageSender); _calibratingPanTiltController = new CalibratingPanTiltController(PanTiltMechanism, new CalibrationReadingsRepository(), screen); _colourTrackingController.Settings = colorSettings; _calibratingPanTiltController.Settings = colorSettings; _motionTrackingController.Settings = motionSettings; _calibratingPanTiltController.GetCameraCapture = PullImage; _calibratingPanTiltController.WaitStep = CalibrationWaitStep; _calibratingPanTiltController.ColourCaptured += _calibratingPanTiltController_ColourCaptured; // default check states chkBoxColourTracking.Checked = false; chkBoxFaceTracker.Checked = false; chkBoxMotionTracking.Checked = false; chkMultimode.Checked = false; Log.InfoFormat("MotionSettings: {0}", motionSettings); }
protected override void OnSubscribe() { _captureConfig = CameraCapture.GetCaptureProperties(); _centre = _captureConfig.Resolution.GetCenter(); txtReticleX.Text = _centre.X.ToString(); txtReticleY.Text = _centre.Y.ToString(); InitI2C(); var screen = new TextboxScreen(txtScreen); var colorSettings = _colourSettingsRepo.Read(); var motionSettings = _motionSettingsRepo.Read(); // these should be disposed if not null _faceTrackingController = new FaceTrackingPanTiltController(PanTiltMechanism, _captureConfig); _colourTrackingController = new ColourTrackingPanTiltController(PanTiltMechanism, _captureConfig); _motionTrackingController = new MotionTrackingPanTiltController(PanTiltMechanism, _captureConfig, screen); _calibratingPanTiltController = new CalibratingPanTiltController(PanTiltMechanism, new CalibrationReadingsRepository(), screen); _colourTrackingController.Settings = colorSettings; _calibratingPanTiltController.Settings = colorSettings; _motionTrackingController.Settings = motionSettings; _calibratingPanTiltController.GetCameraCapture = PullImage; _calibratingPanTiltController.WaitStep = CalibrationWaitStep; _calibratingPanTiltController.ColourCaptured += _calibratingPanTiltController_ColourCaptured; Log.InfoFormat("MotionSettings: {0}", motionSettings); }
/* * CV_CAP_PROP_POS_MSEC Current position of the video file in milliseconds. CV_CAP_PROP_POS_FRAMES 0-based index of the frame to be decoded/captured next. CV_CAP_PROP_POS_AVI_RATIO Relative position of the video file CV_CAP_PROP_FRAME_WIDTH Width of the frames in the video stream. CV_CAP_PROP_FRAME_HEIGHT Height of the frames in the video stream. CV_CAP_PROP_FPS Frame rate. CV_CAP_PROP_FOURCC 4-character code of codec. CV_CAP_PROP_FRAME_COUNT Number of frames in the video file. CV_CAP_PROP_FORMAT Format of the Mat objects returned by retrieve() . CV_CAP_PROP_MODE Backend-specific value indicating the current capture mode. CV_CAP_PROP_BRIGHTNESS Brightness of the image (only for cameras). CV_CAP_PROP_CONTRAST Contrast of the image (only for cameras). CV_CAP_PROP_SATURATION Saturation of the image (only for cameras). CV_CAP_PROP_HUE Hue of the image (only for cameras). CV_CAP_PROP_GAIN Gain of the image (only for cameras). CV_CAP_PROP_EXPOSURE Exposure (only for cameras). CV_CAP_PROP_CONVERT_RGB Boolean flags indicating whether images should be converted to RGB. CV_CAP_PROP_WHITE_BALANCE Currently unsupported CV_CAP_PROP_RECTIFICATION Rectification flag for stereo cameras (note: only supported by DC1394 v 2.x backend currently) */ public static CaptureConfig GetCaptureProperties(this ICaptureGrab capture) { if (capture == null) { return null; } var settings = new CaptureConfig(); settings.Resolution.Height = Convert.ToInt32(capture.GetCaptureProperty(CapProp.FrameHeight)); settings.Resolution.Width = Convert.ToInt32(capture.GetCaptureProperty(CapProp.FrameWidth)); settings.Framerate = Convert.ToInt32(capture.GetCaptureProperty(CapProp.Fps)); settings.Monochrome = Convert.ToBoolean(capture.GetCaptureProperty(CapProp.Monochrome)); return settings; }
public void InvokeUpdateCapture(CaptureConfig captureConfig) { UpdateCapture?.Invoke(this, captureConfig); }