static void Main(string[] args) { Console.WriteLine("Hello World!"); string inputImagePath = "Images/gauge-cage.jpg"; _src = Cv2.ImRead(inputImagePath); if (_src is null) { return; } Cv2.ImShow("src", _src); Cv2.CvtColor(_src, _hsv, ColorConversionCodes.BGR2HSV); Cv2.ImShow("hsv", _hsv); //名前つきウィンドウを作成 Cv2.NamedWindow(WINDOW_NAME); //ウィンドウ名を指定してスライダーを配置 Cv2.CreateTrackbar("H_Min", WINDOW_NAME, 359, onChange: H_Min_Changed); Cv2.CreateTrackbar("H_Max", WINDOW_NAME, 359, onChange: H_Max_Changed); Cv2.CreateTrackbar("S_Min", WINDOW_NAME, 255, onChange: S_Min_Changed); Cv2.CreateTrackbar("S_Max", WINDOW_NAME, 255, onChange: S_Max_Changed); Cv2.CreateTrackbar("V_Min", WINDOW_NAME, 255, onChange: V_Min_Changed); Cv2.CreateTrackbar("V_Max", WINDOW_NAME, 255, onChange: V_Max_Changed); //初期画像を表示 Cv2.ImShow(WINDOW_NAME, _src); Cv2.WaitKey(); Cv2.ImWrite("Images/output.jpg", _mask); }
public MainWindow(string[] args) { // Set configuration parameters InitParameters init_params = new InitParameters(); init_params.resolution = RESOLUTION.HD1080; init_params.depthMode = DEPTH_MODE.ULTRA; init_params.coordinateUnits = UNIT.METER; init_params.coordinateSystem = COORDINATE_SYSTEM.RIGHT_HANDED_Y_UP; init_params.depthMaximumDistance = 10f; init_params.cameraDisableSelfCalib = true; maxDepthDistance = init_params.depthMaximumDistance; parseArgs(args, ref init_params); // Open the camera zedCamera = new Camera(0); ERROR_CODE err = zedCamera.Open(ref init_params); if (err != ERROR_CODE.SUCCESS) { Environment.Exit(-1); } if (zedCamera.CameraModel != sl.MODEL.ZED2) { Console.WriteLine(" ERROR : Use ZED2 Camera only"); return; } // Enable tracking (mandatory for object detection) Quaternion quat = Quaternion.Identity; Vector3 vec = Vector3.Zero; zedCamera.EnablePositionalTracking(ref quat, ref vec); runtimeParameters = new RuntimeParameters(); // Enable the Objects detection module ObjectDetectionParameters obj_det_params = new ObjectDetectionParameters(); obj_det_params.enableObjectTracking = true; // the object detection will track objects across multiple images, instead of an image-by-image basis isTrackingON = obj_det_params.enableObjectTracking; obj_det_params.enable2DMask = false; obj_det_params.imageSync = true; // the object detection is synchronized to the image obj_det_params.detectionModel = sl.DETECTION_MODEL.MULTI_CLASS_BOX_ACCURATE; if (USE_BATCHING) { batchParameters = new BatchParameters(); batchParameters.latency = 2.0f; batchParameters.enable = true; batchHandler = new BatchSystemHandler((int)batchParameters.latency * 2); obj_det_params.batchParameters = batchParameters; } zedCamera.EnableObjectDetection(ref obj_det_params); // Configure object detection runtime parameters obj_runtime_parameters = new ObjectDetectionRuntimeParameters(); detection_confidence = 60; obj_runtime_parameters.detectionConfidenceThreshold = detection_confidence; obj_runtime_parameters.objectClassFilter = new int[(int)OBJECT_CLASS.LAST]; obj_runtime_parameters.objectClassFilter[(int)sl.OBJECT_CLASS.PERSON] = Convert.ToInt32(true); //obj_runtime_parameters.objectClassFilter[(int)sl.OBJECT_CLASS.VEHICLE] = Convert.ToInt32(true); // To set a specific threshold obj_runtime_parameters.objectConfidenceThreshold = new int[(int)OBJECT_CLASS.LAST]; obj_runtime_parameters.objectConfidenceThreshold[(int)sl.OBJECT_CLASS.PERSON] = detection_confidence; //obj_runtime_parameters.object_confidence_threshold[(int)sl.OBJECT_CLASS.VEHICLE] = detection_confidence; // Create ZED Objects filled in the main loop objects = new Objects(); imageLeft = new sl.Mat(); int Height = zedCamera.ImageHeight; int Width = zedCamera.ImageWidth; displayRes = new Resolution(Math.Min((uint)Width, 1280), Math.Min((uint)Height, 720)); Resolution tracksRes = new Resolution(400, (uint)displayRes.height); // create a global image to store both image and tracks view globalImage = new OpenCvSharp.Mat((int)displayRes.height, (int)displayRes.width + (int)tracksRes.width, OpenCvSharp.MatType.CV_8UC4); // retrieve ref on image part imageLeftOcv = new OpenCvSharp.Mat(globalImage, new OpenCvSharp.Rect(0, 0, (int)displayRes.width, (int)displayRes.height)); // retrieve ref on tracks part imageTrackOcv = new OpenCvSharp.Mat(globalImage, new OpenCvSharp.Rect((int)displayRes.width, 0, (int)tracksRes.width, (int)tracksRes.height)); // init an sl::Mat from the ocv image ref (which is in fact the memory of global_image) imageLeft.Create(displayRes, MAT_TYPE.MAT_8U_C4, MEM.CPU); imageRenderLeft = new OpenCvSharp.Mat((int)displayRes.height, (int)displayRes.width, OpenCvSharp.MatType.CV_8UC4, imageLeft.GetPtr()); imgScale = new sl.float2((int)displayRes.width / (float)Width, (int)displayRes.height / (float)Height); // Create OpenGL Viewer viewer = new GLViewer(); camWorldPose = new Pose(); camCameraPose = new Pose(); pointCloud = new sl.Mat(); pcRes = new Resolution(Math.Min((uint)Width, 720), Math.Min((uint)Height, 404)); pointCloud.Create(pcRes, MAT_TYPE.MAT_32F_C4, MEM.CPU); // 2D tracks trackViewGenerator = new TrackingViewer(tracksRes, (int)zedCamera.GetCameraFPS(), maxDepthDistance, 3); trackViewGenerator.setCameraCalibration(zedCamera.GetCalibrationParameters()); window_name = "ZED| 2D View and Birds view"; Cv2.NamedWindow(window_name, WindowMode.Normal);// Create Window Cv2.CreateTrackbar("Confidence", window_name, ref detection_confidence, 100); // Create OpenGL window CreateWindow(); }