//javadoc: CascadeClassifier::detectMultiScale2(image, objects, numDetections) public void detectMultiScale2(Mat image, MatOfRect objects, MatOfInt numDetections) { ThrowIfDisposed(); if (image != null) { image.ThrowIfDisposed(); } if (objects != null) { objects.ThrowIfDisposed(); } if (numDetections != null) { numDetections.ThrowIfDisposed(); } #if ((UNITY_ANDROID || UNITY_IOS || UNITY_WEBGL) && !UNITY_EDITOR) || UNITY_5 || UNITY_5_3_OR_NEWER Mat objects_mat = objects; Mat numDetections_mat = numDetections; objdetect_CascadeClassifier_detectMultiScale2_15(nativeObj, image.nativeObj, objects_mat.nativeObj, numDetections_mat.nativeObj); return; #else return; #endif }
/** * Given the {code input} frame, create input blob, run net and return result detections. * param classIds Class indexes in result detection. * param confidences A set of corresponding confidences. * param boxes A set of bounding boxes. * param frame automatically generated */ public void detect(Mat frame, MatOfInt classIds, MatOfFloat confidences, MatOfRect boxes) { ThrowIfDisposed(); if (frame != null) { frame.ThrowIfDisposed(); } if (classIds != null) { classIds.ThrowIfDisposed(); } if (confidences != null) { confidences.ThrowIfDisposed(); } if (boxes != null) { boxes.ThrowIfDisposed(); } Mat classIds_mat = classIds; Mat confidences_mat = confidences; Mat boxes_mat = boxes; dnn_DetectionModel_detect_12(nativeObj, frame.nativeObj, classIds_mat.nativeObj, confidences_mat.nativeObj, boxes_mat.nativeObj); }
//javadoc: CascadeClassifier::detectMultiScale2(image, objects, numDetections, scaleFactor, minNeighbors, flags, minSize) public void detectMultiScale2(Mat image, MatOfRect objects, MatOfInt numDetections, double scaleFactor, int minNeighbors, int flags, Size minSize) { ThrowIfDisposed(); if (image != null) { image.ThrowIfDisposed(); } if (objects != null) { objects.ThrowIfDisposed(); } if (numDetections != null) { numDetections.ThrowIfDisposed(); } #if ((UNITY_ANDROID || UNITY_IOS || UNITY_WEBGL) && !UNITY_EDITOR) || UNITY_5 || UNITY_5_3_OR_NEWER Mat objects_mat = objects; Mat numDetections_mat = numDetections; objdetect_CascadeClassifier_detectMultiScale2_11(nativeObj, image.nativeObj, objects_mat.nativeObj, numDetections_mat.nativeObj, scaleFactor, minNeighbors, flags, minSize.width, minSize.height); return; #else return; #endif }
// // C++: void cv::xfeatures2d::matchLOGOS(vector_KeyPoint keypoints1, vector_KeyPoint keypoints2, vector_int nn1, vector_int nn2, vector_DMatch matches1to2) // /** * LOGOS (Local geometric support for high-outlier spatial verification) feature matching strategy described in CITE: Lowry2018LOGOSLG . * param keypoints1 Input keypoints of image1. * param keypoints2 Input keypoints of image2. * param nn1 Index to the closest BoW centroid for each descriptors of image1. * param nn2 Index to the closest BoW centroid for each descriptors of image2. * param matches1to2 Matches returned by the LOGOS matching strategy. * <b>Note:</b> * This matching strategy is suitable for features matching against large scale database. * First step consists in constructing the bag-of-words (BoW) from a representative image database. * Image descriptors are then represented by their closest codevector (nearest BoW centroid). */ public static void matchLOGOS(MatOfKeyPoint keypoints1, MatOfKeyPoint keypoints2, MatOfInt nn1, MatOfInt nn2, MatOfDMatch matches1to2) { if (keypoints1 != null) { keypoints1.ThrowIfDisposed(); } if (keypoints2 != null) { keypoints2.ThrowIfDisposed(); } if (nn1 != null) { nn1.ThrowIfDisposed(); } if (nn2 != null) { nn2.ThrowIfDisposed(); } if (matches1to2 != null) { matches1to2.ThrowIfDisposed(); } Mat keypoints1_mat = keypoints1; Mat keypoints2_mat = keypoints2; Mat nn1_mat = nn1; Mat nn2_mat = nn2; Mat matches1to2_mat = matches1to2; xfeatures2d_Xfeatures2d_matchLOGOS_10(keypoints1_mat.nativeObj, keypoints2_mat.nativeObj, nn1_mat.nativeObj, nn2_mat.nativeObj, matches1to2_mat.nativeObj); }
//javadoc: CascadeClassifier::detectMultiScale3(image, objects, rejectLevels, levelWeights, scaleFactor, minNeighbors, flags, minSize) public void detectMultiScale3(Mat image, MatOfRect objects, MatOfInt rejectLevels, MatOfDouble levelWeights, double scaleFactor, int minNeighbors, int flags, Size minSize) { ThrowIfDisposed(); if (image != null) { image.ThrowIfDisposed(); } if (objects != null) { objects.ThrowIfDisposed(); } if (rejectLevels != null) { rejectLevels.ThrowIfDisposed(); } if (levelWeights != null) { levelWeights.ThrowIfDisposed(); } #if ((UNITY_ANDROID || UNITY_IOS || UNITY_WEBGL) && !UNITY_EDITOR) || UNITY_5 || UNITY_5_3_OR_NEWER Mat objects_mat = objects; Mat rejectLevels_mat = rejectLevels; Mat levelWeights_mat = levelWeights; objdetect_CascadeClassifier_detectMultiScale3_12(nativeObj, image.nativeObj, objects_mat.nativeObj, rejectLevels_mat.nativeObj, levelWeights_mat.nativeObj, scaleFactor, minNeighbors, flags, minSize.width, minSize.height); return; #else return; #endif }
//javadoc: NMSBoxesRotated(bboxes, scores, score_threshold, nms_threshold, indices) public static void NMSBoxesRotated(MatOfRotatedRect bboxes, MatOfFloat scores, float score_threshold, float nms_threshold, MatOfInt indices) { if (bboxes != null) { bboxes.ThrowIfDisposed(); } if (scores != null) { scores.ThrowIfDisposed(); } if (indices != null) { indices.ThrowIfDisposed(); } #if ((UNITY_ANDROID || UNITY_IOS || UNITY_WEBGL) && !UNITY_EDITOR) || UNITY_5 || UNITY_5_3_OR_NEWER Mat bboxes_mat = bboxes; Mat scores_mat = scores; Mat indices_mat = indices; dnn_Dnn_NMSBoxesRotated_12(bboxes_mat.nativeObj, scores_mat.nativeObj, score_threshold, nms_threshold, indices_mat.nativeObj); return; #else return; #endif }
// // C++: void cv::dnn::Net::getMemoryConsumption(int layerId, MatShape netInputShape, size_t& weights, size_t& blobs) // //javadoc: Net::getMemoryConsumption(layerId, netInputShape, weights, blobs) public void getMemoryConsumption(int layerId, MatOfInt netInputShape, long[] weights, long[] blobs) { ThrowIfDisposed(); if (netInputShape != null) { netInputShape.ThrowIfDisposed(); } #if ((UNITY_ANDROID || UNITY_IOS || UNITY_WEBGL) && !UNITY_EDITOR) || UNITY_5 || UNITY_5_3_OR_NEWER Mat netInputShape_mat = netInputShape; double[] weights_out = new double[1]; double[] blobs_out = new double[1]; dnn_Net_getMemoryConsumption_11(nativeObj, layerId, netInputShape_mat.nativeObj, weights_out, blobs_out); if (weights != null) { weights [0] = (long)weights_out [0]; } if (blobs != null) { blobs [0] = (long)blobs_out [0]; } return; #else return; #endif }
//javadoc: CascadeClassifier::detectMultiScale3(image, objects, rejectLevels, levelWeights) public void detectMultiScale3(Mat image, MatOfRect objects, MatOfInt rejectLevels, MatOfDouble levelWeights) { ThrowIfDisposed(); if (image != null) { image.ThrowIfDisposed(); } if (objects != null) { objects.ThrowIfDisposed(); } if (rejectLevels != null) { rejectLevels.ThrowIfDisposed(); } if (levelWeights != null) { levelWeights.ThrowIfDisposed(); } #if ((UNITY_ANDROID || UNITY_IOS || UNITY_WEBGL) && !UNITY_EDITOR) || UNITY_5 || UNITY_5_3_OR_NEWER Mat objects_mat = objects; Mat rejectLevels_mat = rejectLevels; Mat levelWeights_mat = levelWeights; objdetect_CascadeClassifier_detectMultiScale3_16(nativeObj, image.nativeObj, objects_mat.nativeObj, rejectLevels_mat.nativeObj, levelWeights_mat.nativeObj); return; #else return; #endif }
// // C++: static Ptr_BRISK cv::BRISK::create(int thresh, int octaves, vector_float radiusList, vector_int numberList, float dMax = 5.85f, float dMin = 8.2f, vector_int indexChange = std::vector<int>()) // //javadoc: BRISK::create(thresh, octaves, radiusList, numberList, dMax, dMin, indexChange) public static BRISK create(int thresh, int octaves, MatOfFloat radiusList, MatOfInt numberList, float dMax, float dMin, MatOfInt indexChange) { if (radiusList != null) { radiusList.ThrowIfDisposed(); } if (numberList != null) { numberList.ThrowIfDisposed(); } if (indexChange != null) { indexChange.ThrowIfDisposed(); } #if ((UNITY_ANDROID || UNITY_IOS || UNITY_WEBGL) && !UNITY_EDITOR) || UNITY_5 || UNITY_5_3_OR_NEWER Mat radiusList_mat = radiusList; Mat numberList_mat = numberList; Mat indexChange_mat = indexChange; BRISK retVal = BRISK.__fromPtr__(features2d_BRISK_create_10(thresh, octaves, radiusList_mat.nativeObj, numberList_mat.nativeObj, dMax, dMin, indexChange_mat.nativeObj)); return(retVal); #else return(null); #endif }
// // C++: static Ptr_FREAK cv::xfeatures2d::FREAK::create(bool orientationNormalized = true, bool scaleNormalized = true, float patternScale = 22.0f, int nOctaves = 4, vector_int selectedPairs = std::vector<int>()) // /** * param orientationNormalized Enable orientation normalization. * param scaleNormalized Enable scale normalization. * param patternScale Scaling of the description pattern. * param nOctaves Number of octaves covered by the detected keypoints. * param selectedPairs (Optional) user defined selected pairs indexes, * return automatically generated */ public static FREAK create(bool orientationNormalized, bool scaleNormalized, float patternScale, int nOctaves, MatOfInt selectedPairs) { if (selectedPairs != null) { selectedPairs.ThrowIfDisposed(); } Mat selectedPairs_mat = selectedPairs; return(FREAK.__fromPtr__(xfeatures2d_FREAK_create_10(orientationNormalized, scaleNormalized, patternScale, nOctaves, selectedPairs_mat.nativeObj))); }
// // C++: cv::VideoWriter::VideoWriter(String filename, int apiPreference, int fourcc, double fps, Size frameSize, vector_int _params) // public VideoWriter(string filename, int apiPreference, int fourcc, double fps, Size frameSize, MatOfInt _params) { if (_params != null) { _params.ThrowIfDisposed(); } Mat _params_mat = _params; nativeObj = videoio_VideoWriter_VideoWriter_12(filename, apiPreference, fourcc, fps, frameSize.width, frameSize.height, _params_mat.nativeObj); }
// // C++: void cv::xfeatures2d::PCTSignatures::setInitSeedIndexes(vector_int initSeedIndexes) // /** * Initial seed indexes for the k-means algorithm. * param initSeedIndexes automatically generated */ public void setInitSeedIndexes(MatOfInt initSeedIndexes) { ThrowIfDisposed(); if (initSeedIndexes != null) { initSeedIndexes.ThrowIfDisposed(); } Mat initSeedIndexes_mat = initSeedIndexes; xfeatures2d_PCTSignatures_setInitSeedIndexes_10(nativeObj, initSeedIndexes_mat.nativeObj); }
// // C++: bool cv::VideoWriter::open(String filename, int fourcc, double fps, Size frameSize, vector_int _params) // public bool open(string filename, int fourcc, double fps, Size frameSize, MatOfInt _params) { ThrowIfDisposed(); if (_params != null) { _params.ThrowIfDisposed(); } Mat _params_mat = _params; return(videoio_VideoWriter_open_15(nativeObj, filename, fourcc, fps, frameSize.width, frameSize.height, _params_mat.nativeObj)); }
// // C++: void cv::dnn::Net::setInputShape(String inputName, MatShape shape) // /** * Specify shape of network input. * param inputName automatically generated * param shape automatically generated */ public void setInputShape(string inputName, MatOfInt shape) { ThrowIfDisposed(); if (shape != null) { shape.ThrowIfDisposed(); } Mat shape_mat = shape; dnn_Net_setInputShape_10(nativeObj, inputName, shape_mat.nativeObj); }
// // C++: void cv::Subdiv2D::getLeadingEdgeList(vector_int& leadingEdgeList) // /** * Returns a list of the leading edge ID connected to each triangle. * * param leadingEdgeList Output vector. * * The function gives one edge ID for each triangle. */ public void getLeadingEdgeList(MatOfInt leadingEdgeList) { ThrowIfDisposed(); if (leadingEdgeList != null) { leadingEdgeList.ThrowIfDisposed(); } Mat leadingEdgeList_mat = leadingEdgeList; imgproc_Subdiv2D_getLeadingEdgeList_10(nativeObj, leadingEdgeList_mat.nativeObj); }
// // C++: int64 cv::dnn::Net::getFLOPS(int layerId, MatShape netInputShape) // public long getFLOPS(int layerId, MatOfInt netInputShape) { ThrowIfDisposed(); if (netInputShape != null) { netInputShape.ThrowIfDisposed(); } Mat netInputShape_mat = netInputShape; return(dnn_Net_getFLOPS_11(nativeObj, layerId, netInputShape_mat.nativeObj)); }
// // C++: bool cv::imwrite(String filename, Mat img, vector_int _params = std::vector<int>()) // /** * Saves an image to a specified file. * * The function imwrite saves the image to the specified file. The image format is chosen based on the * filename extension (see cv::imread for the list of extensions). In general, only 8-bit * single-channel or 3-channel (with 'BGR' channel order) images * can be saved using this function, with these exceptions: * * <ul> * <li> * 16-bit unsigned (CV_16U) images can be saved in the case of PNG, JPEG 2000, and TIFF formats * </li> * <li> * 32-bit float (CV_32F) images can be saved in PFM, TIFF, OpenEXR, and Radiance HDR formats; * 3-channel (CV_32FC3) TIFF images will be saved using the LogLuv high dynamic range encoding * (4 bytes per pixel) * </li> * <li> * PNG images with an alpha channel can be saved using this function. To do this, create * 8-bit (or 16-bit) 4-channel image BGRA, where the alpha channel goes last. Fully transparent pixels * should have alpha set to 0, fully opaque pixels should have alpha set to 255/65535 (see the code sample below). * </li> * <li> * Multiple images (vector of Mat) can be saved in TIFF format (see the code sample below). * </li> * </ul> * * If the format, depth or channel order is different, use * Mat::convertTo and cv::cvtColor to convert it before saving. Or, use the universal FileStorage I/O * functions to save the image to XML or YAML format. * * The sample below shows how to create a BGRA image, how to set custom compression parameters and save it to a PNG file. * It also demonstrates how to save multiple images in a TIFF file: * INCLUDE: snippets/imgcodecs_imwrite.cpp * param filename Name of the file. * param img (Mat or vector of Mat) Image or Images to be saved. * param _params automatically generated * return automatically generated */ public static bool imwrite(string filename, Mat img, MatOfInt _params) { if (img != null) { img.ThrowIfDisposed(); } if (_params != null) { _params.ThrowIfDisposed(); } Mat _params_mat = _params; return(imgcodecs_Imgcodecs_imwrite_10(filename, img.nativeObj, _params_mat.nativeObj)); }
// // C++: static Ptr_FREAK cv::xfeatures2d::FREAK::create(bool orientationNormalized = true, bool scaleNormalized = true, float patternScale = 22.0f, int nOctaves = 4, vector_int selectedPairs = std::vector<int>()) // //javadoc: FREAK::create(orientationNormalized, scaleNormalized, patternScale, nOctaves, selectedPairs) public static FREAK create(bool orientationNormalized, bool scaleNormalized, float patternScale, int nOctaves, MatOfInt selectedPairs) { if (selectedPairs != null) { selectedPairs.ThrowIfDisposed(); } #if ((UNITY_ANDROID || UNITY_IOS || UNITY_WEBGL) && !UNITY_EDITOR) || UNITY_5 || UNITY_5_3_OR_NEWER Mat selectedPairs_mat = selectedPairs; FREAK retVal = FREAK.__fromPtr__(xfeatures2d_FREAK_create_10(orientationNormalized, scaleNormalized, patternScale, nOctaves, selectedPairs_mat.nativeObj)); return(retVal); #else return(null); #endif }
/** * The BRISK constructor for a custom pattern, detection threshold and octaves * * param thresh AGAST detection threshold score. * param octaves detection octaves. Use 0 to do single scale. * param radiusList defines the radii (in pixels) where the samples around a keypoint are taken (for * keypoint scale 1). * param numberList defines the number of sampling points on the sampling circle. Must be the same * size as radiusList.. * param dMax threshold for the short pairings used for descriptor formation (in pixels for keypoint * scale 1). * keypoint scale 1). * return automatically generated */ public static BRISK create(int thresh, int octaves, MatOfFloat radiusList, MatOfInt numberList, float dMax) { if (radiusList != null) { radiusList.ThrowIfDisposed(); } if (numberList != null) { numberList.ThrowIfDisposed(); } Mat radiusList_mat = radiusList; Mat numberList_mat = numberList; return(BRISK.__fromPtr__(features2d_BRISK_create_12(thresh, octaves, radiusList_mat.nativeObj, numberList_mat.nativeObj, dMax))); }
/** * The BRISK constructor for a custom pattern * * param radiusList defines the radii (in pixels) where the samples around a keypoint are taken (for * keypoint scale 1). * param numberList defines the number of sampling points on the sampling circle. Must be the same * size as radiusList.. * scale 1). * keypoint scale 1). * return automatically generated */ public static BRISK create(MatOfFloat radiusList, MatOfInt numberList) { if (radiusList != null) { radiusList.ThrowIfDisposed(); } if (numberList != null) { numberList.ThrowIfDisposed(); } Mat radiusList_mat = radiusList; Mat numberList_mat = numberList; return(BRISK.__fromPtr__(features2d_BRISK_create_111(radiusList_mat.nativeObj, numberList_mat.nativeObj))); }
public static void groupRectangles(MatOfRect rectList, MatOfInt weights, int groupThreshold) { if (rectList != null) { rectList.ThrowIfDisposed(); } if (weights != null) { weights.ThrowIfDisposed(); } Mat rectList_mat = rectList; Mat weights_mat = weights; objdetect_Objdetect_groupRectangles_11(rectList_mat.nativeObj, weights_mat.nativeObj, groupThreshold); }
// // C++: static Ptr_PCTSignatures cv::xfeatures2d::PCTSignatures::create(vector_Point2f initSamplingPoints, vector_int initClusterSeedIndexes) // /** * Creates PCTSignatures algorithm using pre-generated sampling points * and clusterization seeds indexes. * param initSamplingPoints Sampling points used in image sampling. * param initClusterSeedIndexes Indexes of initial clusterization seeds. * Its size must be lower or equal to initSamplingPoints.size(). * return Created algorithm. */ public static PCTSignatures create(MatOfPoint2f initSamplingPoints, MatOfInt initClusterSeedIndexes) { if (initSamplingPoints != null) { initSamplingPoints.ThrowIfDisposed(); } if (initClusterSeedIndexes != null) { initClusterSeedIndexes.ThrowIfDisposed(); } Mat initSamplingPoints_mat = initSamplingPoints; Mat initClusterSeedIndexes_mat = initClusterSeedIndexes; return(PCTSignatures.__fromPtr__(xfeatures2d_PCTSignatures_create_15(initSamplingPoints_mat.nativeObj, initClusterSeedIndexes_mat.nativeObj))); }
// // C++: void cv::xfeatures2d::PCTSignatures::setInitSeedIndexes(vector_int initSeedIndexes) // //javadoc: PCTSignatures::setInitSeedIndexes(initSeedIndexes) public void setInitSeedIndexes(MatOfInt initSeedIndexes) { ThrowIfDisposed(); if (initSeedIndexes != null) { initSeedIndexes.ThrowIfDisposed(); } #if ((UNITY_ANDROID || UNITY_IOS || UNITY_WEBGL) && !UNITY_EDITOR) || UNITY_5 || UNITY_5_3_OR_NEWER Mat initSeedIndexes_mat = initSeedIndexes; xfeatures2d_PCTSignatures_setInitSeedIndexes_10(nativeObj, initSeedIndexes_mat.nativeObj); return; #else return; #endif }
// // C++: void cv::Subdiv2D::getLeadingEdgeList(vector_int& leadingEdgeList) // //javadoc: Subdiv2D::getLeadingEdgeList(leadingEdgeList) public void getLeadingEdgeList(MatOfInt leadingEdgeList) { ThrowIfDisposed(); if (leadingEdgeList != null) { leadingEdgeList.ThrowIfDisposed(); } #if ((UNITY_ANDROID || UNITY_IOS || UNITY_WEBGL) && !UNITY_EDITOR) || UNITY_5 || UNITY_5_3_OR_NEWER Mat leadingEdgeList_mat = leadingEdgeList; imgproc_Subdiv2D_getLeadingEdgeList_10(nativeObj, leadingEdgeList_mat.nativeObj); return; #else return; #endif }
// // C++: int64 cv::dnn::Net::getFLOPS(int layerId, MatShape netInputShape) // //javadoc: Net::getFLOPS(layerId, netInputShape) public long getFLOPS(int layerId, MatOfInt netInputShape) { ThrowIfDisposed(); if (netInputShape != null) { netInputShape.ThrowIfDisposed(); } #if ((UNITY_ANDROID || UNITY_IOS || UNITY_WEBGL) && !UNITY_EDITOR) || UNITY_5 || UNITY_5_3_OR_NEWER Mat netInputShape_mat = netInputShape; long retVal = dnn_Net_getFLOPS_11(nativeObj, layerId, netInputShape_mat.nativeObj); return(retVal); #else return(-1); #endif }
// // C++: bool cv::imwrite(String filename, Mat img, vector_int _params = std::vector<int>()) // //javadoc: imwrite(filename, img, _params) public static bool imwrite(string filename, Mat img, MatOfInt _params) { if (img != null) { img.ThrowIfDisposed(); } if (_params != null) { _params.ThrowIfDisposed(); } #if ((UNITY_ANDROID || UNITY_IOS || UNITY_WEBGL) && !UNITY_EDITOR) || UNITY_5 || UNITY_5_3_OR_NEWER Mat _params_mat = _params; bool retVal = imgcodecs_Imgcodecs_imwrite_10(filename, img.nativeObj, _params_mat.nativeObj); return(retVal); #else return(false); #endif }
// // C++: void cv::Subdiv2D::getVoronoiFacetList(vector_int idx, vector_vector_Point2f& facetList, vector_Point2f& facetCenters) // /** * Returns a list of all Voronoi facets. * * param idx Vector of vertices IDs to consider. For all vertices you can pass empty vector. * param facetList Output vector of the Voronoi facets. * param facetCenters Output vector of the Voronoi facets center points. */ public void getVoronoiFacetList(MatOfInt idx, List <MatOfPoint2f> facetList, MatOfPoint2f facetCenters) { ThrowIfDisposed(); if (idx != null) { idx.ThrowIfDisposed(); } if (facetCenters != null) { facetCenters.ThrowIfDisposed(); } Mat idx_mat = idx; Mat facetList_mat = new Mat(); Mat facetCenters_mat = facetCenters; imgproc_Subdiv2D_getVoronoiFacetList_10(nativeObj, idx_mat.nativeObj, facetList_mat.nativeObj, facetCenters_mat.nativeObj); Converters.Mat_to_vector_vector_Point2f(facetList_mat, facetList); facetList_mat.release(); }
// // C++: bool cv::imencode(String ext, Mat img, vector_uchar& buf, vector_int _params = std::vector<int>()) // /** * Encodes an image into a memory buffer. * * The function imencode compresses the image and stores it in the memory buffer that is resized to fit the * result. See cv::imwrite for the list of supported formats and flags description. * * param ext File extension that defines the output format. * param img Image to be written. * param buf Output buffer resized to fit the compressed image. * param _params automatically generated * return automatically generated */ public static bool imencode(string ext, Mat img, MatOfByte buf, MatOfInt _params) { if (img != null) { img.ThrowIfDisposed(); } if (buf != null) { buf.ThrowIfDisposed(); } if (_params != null) { _params.ThrowIfDisposed(); } Mat buf_mat = buf; Mat _params_mat = _params; return(imgcodecs_Imgcodecs_imencode_10(ext, img.nativeObj, buf_mat.nativeObj, _params_mat.nativeObj)); }
//javadoc: BRISK::create(radiusList, numberList) public static BRISK create(MatOfFloat radiusList, MatOfInt numberList) { if (radiusList != null) { radiusList.ThrowIfDisposed(); } if (numberList != null) { numberList.ThrowIfDisposed(); } #if ((UNITY_ANDROID || UNITY_IOS || UNITY_WEBGL) && !UNITY_EDITOR) || UNITY_5 || UNITY_5_3_OR_NEWER Mat radiusList_mat = radiusList; Mat numberList_mat = numberList; BRISK retVal = BRISK.__fromPtr__(features2d_BRISK_create_111(radiusList_mat.nativeObj, numberList_mat.nativeObj)); return(retVal); #else return(null); #endif }
// // C++: static Ptr_PCTSignatures cv::xfeatures2d::PCTSignatures::create(vector_Point2f initSamplingPoints, vector_int initClusterSeedIndexes) // //javadoc: PCTSignatures::create(initSamplingPoints, initClusterSeedIndexes) public static PCTSignatures create(MatOfPoint2f initSamplingPoints, MatOfInt initClusterSeedIndexes) { if (initSamplingPoints != null) { initSamplingPoints.ThrowIfDisposed(); } if (initClusterSeedIndexes != null) { initClusterSeedIndexes.ThrowIfDisposed(); } #if ((UNITY_ANDROID || UNITY_IOS || UNITY_WEBGL) && !UNITY_EDITOR) || UNITY_5 || UNITY_5_3_OR_NEWER Mat initSamplingPoints_mat = initSamplingPoints; Mat initClusterSeedIndexes_mat = initClusterSeedIndexes; PCTSignatures retVal = PCTSignatures.__fromPtr__(xfeatures2d_PCTSignatures_create_15(initSamplingPoints_mat.nativeObj, initClusterSeedIndexes_mat.nativeObj)); return(retVal); #else return(null); #endif }