public DoubleImage(DoubleImage image) : this(VisionLabPINVOKE.new_DoubleImage__SWIG_3(DoubleImage.getCPtr(image)), true) { if (VisionLabPINVOKE.SWIGPendingException.Pending) { throw VisionLabPINVOKE.SWIGPendingException.Retrieve(); } }
public virtual void SnapShot(DoubleImage image) { VisionLabPINVOKE.Camera_Double_SnapShot__SWIG_1(swigCPtr, DoubleImage.getCPtr(image)); if (VisionLabPINVOKE.SWIGPendingException.Pending) { throw VisionLabPINVOKE.SWIGPendingException.Retrieve(); } }
/// <summary> /// Calculates eigen objects /// </summary> public static void CalcEigenObjects(Bitmap[] input, int maxIteration, double eps, DoubleImage[] eigVecs, double[] eigVals, DoubleImage avg) { if (input.Length == 0) { return; } int nObjects = input.Length; int nEigens = nObjects - 1; byte[][] objs = new byte[nObjects][]; double[][] eigs = new double[nEigens][]; int obj_step = 0, old_step = 0; int eig_step = 0, oldeig_step = 0; Size obj_size = avg.Size, old_size = avg.Size, oldeig_size = avg.Size; for (var i = 0; i < nObjects; i++) { Bitmap obj = input[i]; objs[i] = obj.CopyGrayscaleBitmapToByteArray(out obj_step); obj_size = obj.Size; if (obj_size != avg.Size || obj_size != old_size) { throw new EigenObjectException("Different sizes of objects"); } if (i > 0 && obj_step != old_step) { throw new EigenObjectException("Different steps of objects"); } old_step = obj_step; old_size = obj_size; } for (var i = 0; i < nEigens; i++) { DoubleImage eig = eigVecs[i]; eig_step = eig.Step; eigs[i] = eig.Data; if (eig.Size != avg.Size || eig.Size != oldeig_size) { throw new EigenObjectException("Different sizes of objects"); } if (i > 0 && eig_step != oldeig_step) { throw new EigenObjectException("Different steps of objects"); } oldeig_step = eig.Step; oldeig_size = eig.Size; } CalcEigenObjects(nObjects, objs, obj_step, eigs, eig_step, obj_size, maxIteration, eps, avg.Data, avg.Step, eigVals); }
/// <summary> /// Calculates eigen decomposite /// </summary> public static double[] EigenDecomposite(Bitmap obj, DoubleImage[] eigInput, DoubleImage avg) { var nEigObjs = eigInput.Length; var coeffs = new double[nEigObjs]; int i; int obj_step; byte[] obj_data = obj.CopyGrayscaleBitmapToByteArray(out obj_step); Size obj_size = obj.Size; /*cvGetImageRawData( avg, (uchar **) & avg_data, &avg_step, &avg_size ); * if( avg->depth != IPL_DEPTH_32F ) * CV_ERROR( CV_BadDepth, cvUnsupportedFormat ); * if( avg->nChannels != 1 ) * CV_ERROR( CV_BadNumChannels, cvUnsupportedFormat ); * * cvGetImageRawData( obj, &obj_data, &obj_step, &obj_size ); * if( obj->depth != IPL_DEPTH_8U ) * CV_ERROR( CV_BadDepth, cvUnsupportedFormat ); * if( obj->nChannels != 1 ) * CV_ERROR( CV_BadNumChannels, cvUnsupportedFormat );*/ if (obj_size != avg.Size) { throw new EigenObjectException("Different sizes of objects"); } double[][] eigs = new double[nEigObjs][]; int eig_step = 0, old_step = 0; Size eig_size = avg.Size, old_size = avg.Size; for (i = 0; i < nEigObjs; i++) { DoubleImage eig = eigInput[i]; eig_step = eig.Step; eigs[i] = eig.Data; if (eig_size != avg.Size || eig_size != old_size) { throw new EigenObjectException("Different sizes of objects"); } if (i > 0 && eig_step != old_step) { throw new EigenObjectException("Different steps of objects"); } old_step = eig.Step; old_size = eig.Size; } EigenDecomposite(obj_data, obj_step, nEigObjs, eigs, eig_step, avg.Data, avg.Step, obj_size, coeffs); return(coeffs); }
public bool GetImage(string imageName, DoubleImage image) { bool ret = VisionLabPINVOKE.VisLibCmdInt_GetImage__SWIG_6(swigCPtr, imageName, DoubleImage.getCPtr(image)); if (VisionLabPINVOKE.SWIGPendingException.Pending) { throw VisionLabPINVOKE.SWIGPendingException.Retrieve(); } return(ret); }
public double ClassifyOutputTab(DoubleImage image, vector_ClassOutput outputTab) { double ret = VisionLabPINVOKE.BPN_ImageClassifier_Double_ClassifyOutputTab(swigCPtr, DoubleImage.getCPtr(image), vector_ClassOutput.getCPtr(outputTab)); if (VisionLabPINVOKE.SWIGPendingException.Pending) { throw VisionLabPINVOKE.SWIGPendingException.Retrieve(); } return(ret); }
public double EvaluateImage(DoubleImage image, int classExp, ref int classRes, ref double confidency, vector_double output) { double ret = VisionLabPINVOKE.BPN_ImageClassifier_Double_EvaluateImage(swigCPtr, DoubleImage.getCPtr(image), classExp, ref classRes, ref confidency, vector_double.getCPtr(output)); if (VisionLabPINVOKE.SWIGPendingException.Pending) { throw VisionLabPINVOKE.SWIGPendingException.Retrieve(); } return(ret); }
public double TrainImage(double learnRate, double momentum, DoubleImage image, int classNr) { double ret = VisionLabPINVOKE.BPN_ImageClassifier_Double_TrainImage(swigCPtr, learnRate, momentum, DoubleImage.getCPtr(image), classNr); if (VisionLabPINVOKE.SWIGPendingException.Pending) { throw VisionLabPINVOKE.SWIGPendingException.Retrieve(); } return(ret); }
public int AddImage(string className, DoubleImage image) { int ret = VisionLabPINVOKE.ClassImageSet_Double_AddImage(swigCPtr, className, DoubleImage.getCPtr(image)); if (VisionLabPINVOKE.SWIGPendingException.Pending) { throw VisionLabPINVOKE.SWIGPendingException.Retrieve(); } return(ret); }
public DoubleImage GetImage(int classId, int imageIndex) { DoubleImage ret = new DoubleImage(VisionLabPINVOKE.ClassImageSet_Double_GetImage__SWIG_1(swigCPtr, classId, imageIndex), false); if (VisionLabPINVOKE.SWIGPendingException.Pending) { throw VisionLabPINVOKE.SWIGPendingException.Retrieve(); } return(ret); }
public int Classify(DoubleImage image, ref double confidency) { int ret = VisionLabPINVOKE.BPN_ImageClassifier_Double_Classify(swigCPtr, DoubleImage.getCPtr(image), ref confidency); if (VisionLabPINVOKE.SWIGPendingException.Pending) { throw VisionLabPINVOKE.SWIGPendingException.Retrieve(); } return(ret); }
private void btn_exctract_Click(object sender, EventArgs e) { try { watermark_Extract = algorithm.KIMextract(initial_Extract, changed_Extract, sizeWatermark, sizeWatermark); txt_status_Extract.Text = "ЦВЗ был успешно извлечен. Для просмотра его необходимо сохранить."; btn_save_Extract.Enabled = true; } catch (Exception ex) { txt_status_Extract.Text = ex.Message; } }
/// <summary> /// Calculates eigen objects /// </summary> public static void CalcEigenObjects(Bitmap[] input, int maxIteration, double eps, DoubleImage[] eigVecs, double[] eigVals, DoubleImage avg) { if (input.Length == 0) return; int nObjects = input.Length; int nEigens = nObjects - 1; byte[][] objs = new byte[nObjects][]; double[][] eigs = new double[nEigens][]; int obj_step = 0, old_step = 0; int eig_step = 0, oldeig_step = 0; Size obj_size = avg.Size, old_size = avg.Size, oldeig_size = avg.Size; for (var i = 0; i < nObjects; i++) { Bitmap obj = input[i]; objs[i] = obj.CopyGrayscaleBitmapToByteArray(out obj_step); obj_size = obj.Size; if (obj_size != avg.Size || obj_size != old_size) throw new EigenObjectException("Different sizes of objects"); if (i > 0 && obj_step != old_step) throw new EigenObjectException("Different steps of objects"); old_step = obj_step; old_size = obj_size; } for (var i = 0; i < nEigens; i++) { DoubleImage eig = eigVecs[i]; eig_step = eig.Step; eigs[i] = eig.Data; if (eig.Size != avg.Size || eig.Size != oldeig_size) throw new EigenObjectException("Different sizes of objects"); if (i > 0 && eig_step != oldeig_step) throw new EigenObjectException("Different steps of objects"); oldeig_step = eig.Step; oldeig_size = eig.Size; } CalcEigenObjects(nObjects, objs, obj_step, eigs, eig_step, obj_size, maxIteration, eps, avg.Data, avg.Step, eigVals); }
private void btn_Embed_Click(object sender, EventArgs e) { try { Stopwatch timer = new Stopwatch(); timer.Start(); changed_Embed = algorithm.KIMembed(initial_Embed, watermark_Embed); timer.Stop(); TimeSpan secs = timer.Elapsed; string time = string.Format("{0} секунд", secs.TotalSeconds); txt_Status_Embed.Text = "ЦВЗ был успешно встроен. На это потребовалось " + time + ". Сохраните полученное изображение для его просмотра."; btn_Save_Embed.Enabled = true; } catch (Exception ex) { txt_Status_Embed.Text = ex.Message; } }
/// <summary> /// Caculate the eigen images for the specific traning image /// </summary> public static void CalcEigenObjects(Bitmap[] trainingImages, int maxIter, double eps, out DoubleImage[] eigenImages, out DoubleImage avg) { int width = trainingImages[0].Width; int height = trainingImages[0].Height; if (maxIter <= 0 || maxIter > trainingImages.Length) maxIter = trainingImages.Length; int maxEigenObjs = maxIter; eigenImages = new DoubleImage[maxEigenObjs]; for (int i = 0; i < eigenImages.Length; i++) eigenImages[i] = new DoubleImage(width, height); avg = new DoubleImage(width, height); EigenObjects.CalcEigenObjects(trainingImages, maxIter, eps, eigenImages, null, avg); }
private void btn_loadChanged_Extract_Click(object sender, EventArgs e) { OpenFileDialog openFileDialog1 = new OpenFileDialog(); openFileDialog1.Filter = "jpg files (*.jpg)|*.jpg|png files (*.png)|*.png|All files (*.*)|*.*"; if (openFileDialog1.ShowDialog() == DialogResult.Cancel) { return; } string filename = openFileDialog1.FileName; using (Bitmap imageBitmap = new Bitmap(filename)) { changed_Extract = new DoubleImage(imageBitmap); } btn_exctract.Enabled = true; txt_status_Extract.Text = "Изображение, содержащее ЦВЗ, загружено. Теперь вы можете извлечь ЦВЗ."; }
private void btn_LoadWM_Embed_Click(object sender, EventArgs e) { OpenFileDialog openFileDialog1 = new OpenFileDialog(); openFileDialog1.Filter = "jpg files (*.jpg)|*.jpg|png files (*.png)|*.png|All files (*.*)|*.*"; if (openFileDialog1.ShowDialog() == DialogResult.Cancel) { return; } string filename = openFileDialog1.FileName; using (Bitmap imageBitmap = new Bitmap(filename)) { DoubleImage dImage = new DoubleImage(imageBitmap); watermark_Embed = new Watermark(dImage); } btn_Embed.Enabled = true; txt_Status_Embed.Text = "ЦВЗ загружен."; }
private void btn_LoadInit_Embed_Click(object sender, EventArgs e) { OpenFileDialog openFileDialog1 = new OpenFileDialog(); openFileDialog1.Filter = "jpg files (*.jpg)|*.jpg|png files (*.png)|*.png|All files (*.*)|*.*"; if (openFileDialog1.ShowDialog() == DialogResult.Cancel) { return; } string filename = openFileDialog1.FileName; using (Bitmap imageBitmap = new Bitmap(filename)) { initial_Embed = new DoubleImage(imageBitmap); } btn_LoadWM_Embed.Enabled = true; btn_KnowSizeWM_Embed.Enabled = true; btn_OptimalSizeWM_Embed.Enabled = true; txt_Status_Embed.Text = "Изображение загружено. Вы можете проверить максимальный и оптимальный размеры ЦВЗ."; }
/// <summary> /// Caculate the eigen images for the specific traning image /// </summary> public static void CalcEigenObjects(Bitmap[] trainingImages, int maxIter, double eps, out DoubleImage[] eigenImages, out DoubleImage avg) { int width = trainingImages[0].Width; int height = trainingImages[0].Height; if (maxIter <= 0 || maxIter > trainingImages.Length) { maxIter = trainingImages.Length; } int maxEigenObjs = maxIter; eigenImages = new DoubleImage[maxEigenObjs]; for (int i = 0; i < eigenImages.Length; i++) { eigenImages[i] = new DoubleImage(width, height); } avg = new DoubleImage(width, height); EigenObjects.CalcEigenObjects(trainingImages, maxIter, eps, eigenImages, null, avg); }
public static vector_HoughLine FindFastBestLines(DoubleImage src, HLParams p, double edgeMin, int nrLines, double minR, double minPhi, int minHits) { vector_HoughLine ret = new vector_HoughLine(VisionLabPINVOKE.FindFastBestLines__SWIG_6(DoubleImage.getCPtr(src), HLParams.getCPtr(p), edgeMin, nrLines, minR, minPhi, minHits), true); if (VisionLabPINVOKE.SWIGPendingException.Pending) throw VisionLabPINVOKE.SWIGPendingException.Retrieve(); return ret; }
public static void Extract1Channel(HSV161616Image image, HSVColor plane, DoubleImage chan) { VisionLabPINVOKE.Extract1Channel__SWIG_78(HSV161616Image.getCPtr(image), (int)plane, DoubleImage.getCPtr(chan)); if (VisionLabPINVOKE.SWIGPendingException.Pending) throw VisionLabPINVOKE.SWIGPendingException.Retrieve(); }
public static void InterpolateAtSpecificPixel(DoubleImage src, DoubleImage dest, double value) { VisionLabPINVOKE.InterpolateAtSpecificPixel__SWIG_6(DoubleImage.getCPtr(src), DoubleImage.getCPtr(dest), value); if (VisionLabPINVOKE.SWIGPendingException.Pending) throw VisionLabPINVOKE.SWIGPendingException.Retrieve(); }
public static void LoGFilter(DoubleImage image, double sigma) { VisionLabPINVOKE.LoGFilter__SWIG_13(DoubleImage.getCPtr(image), sigma); if (VisionLabPINVOKE.SWIGPendingException.Pending) throw VisionLabPINVOKE.SWIGPendingException.Retrieve(); }
public static void CircleShape(DoubleImage image, XYCoord centre, int r, double value, ZeroOrOriginal zorg) { VisionLabPINVOKE.CircleShape__SWIG_12(DoubleImage.getCPtr(image), XYCoord.getCPtr(centre), r, value, (int)zorg); if (VisionLabPINVOKE.SWIGPendingException.Pending) throw VisionLabPINVOKE.SWIGPendingException.Retrieve(); }
public static void DeInterlace(DoubleImage image) { VisionLabPINVOKE.DeInterlace__SWIG_6(DoubleImage.getCPtr(image)); if (VisionLabPINVOKE.SWIGPendingException.Pending) throw VisionLabPINVOKE.SWIGPendingException.Retrieve(); }
public static void FastHoughCircleT(DoubleImage src, CircleBrightness brightness, double edgeMin, double minR, double maxR, double deltaR, vector_DoubleImage destTab) { VisionLabPINVOKE.FastHoughCircleT__SWIG_6(DoubleImage.getCPtr(src), (int)brightness, edgeMin, minR, maxR, deltaR, vector_DoubleImage.getCPtr(destTab)); if (VisionLabPINVOKE.SWIGPendingException.Pending) throw VisionLabPINVOKE.SWIGPendingException.Retrieve(); }
public static void AddBorder(DoubleImage src, DoubleImage dest, int top, int left, int right, int bottom, double value) { VisionLabPINVOKE.AddBorder__SWIG_6(DoubleImage.getCPtr(src), DoubleImage.getCPtr(dest), top, left, right, bottom, value); if (VisionLabPINVOKE.SWIGPendingException.Pending) throw VisionLabPINVOKE.SWIGPendingException.Retrieve(); }
public double ClassifyOutputTab(DoubleImage image, vector_ClassOutput outputTab) { double ret = VisionLabPINVOKE.BPN_ImageClassifier_Double_ClassifyOutputTab(swigCPtr, DoubleImage.getCPtr(image), vector_ClassOutput.getCPtr(outputTab)); if (VisionLabPINVOKE.SWIGPendingException.Pending) throw VisionLabPINVOKE.SWIGPendingException.Retrieve(); return ret; }
internal static global::System.Runtime.InteropServices.HandleRef getCPtr(DoubleImage obj) { return((obj == null) ? new global::System.Runtime.InteropServices.HandleRef(null, global::System.IntPtr.Zero) : obj.swigCPtr); }
public double TrainImage(double learnRate, double momentum, DoubleImage image, int classNr) { double ret = VisionLabPINVOKE.BPN_ImageClassifier_Double_TrainImage(swigCPtr, learnRate, momentum, DoubleImage.getCPtr(image), classNr); if (VisionLabPINVOKE.SWIGPendingException.Pending) throw VisionLabPINVOKE.SWIGPendingException.Retrieve(); return ret; }
public static void SNN_MedianFilter(DoubleImage src, DoubleImage dest, double radius, FixEdge edge) { VisionLabPINVOKE.SNN_MedianFilter__SWIG_6(DoubleImage.getCPtr(src), DoubleImage.getCPtr(dest), radius, (int)edge); if (VisionLabPINVOKE.SWIGPendingException.Pending) throw VisionLabPINVOKE.SWIGPendingException.Retrieve(); }
public static void NonMaximumSuppression(DoubleImage src, DoubleImage dest, DoubleImage direction, double dirScale) { VisionLabPINVOKE.NonMaximumSuppression__SWIG_6(DoubleImage.getCPtr(src), DoubleImage.getCPtr(dest), DoubleImage.getCPtr(direction), dirScale); if (VisionLabPINVOKE.SWIGPendingException.Pending) throw VisionLabPINVOKE.SWIGPendingException.Retrieve(); }
public static void NormaliseHue(HSV888Image src, byte hue, byte minVal, byte minSat, DoubleImage dest, float notNormalised) { VisionLabPINVOKE.NormaliseHue__SWIG_2(HSV888Image.getCPtr(src), hue, minVal, minSat, DoubleImage.getCPtr(dest), notNormalised); if (VisionLabPINVOKE.SWIGPendingException.Pending) throw VisionLabPINVOKE.SWIGPendingException.Retrieve(); }
public static HoughCircle FindBestCircle(DoubleImage src, double minR, double maxR, double deltaR) { HoughCircle ret = new HoughCircle(VisionLabPINVOKE.FindBestCircle__SWIG_6(DoubleImage.getCPtr(src), minR, maxR, deltaR), true); if (VisionLabPINVOKE.SWIGPendingException.Pending) throw VisionLabPINVOKE.SWIGPendingException.Retrieve(); return ret; }
public static void NormaliseHue(HSV161616Image src, short hue, short minVal, short minSat, DoubleImage dest) { VisionLabPINVOKE.NormaliseHue__SWIG_5(HSV161616Image.getCPtr(src), hue, minVal, minSat, DoubleImage.getCPtr(dest)); if (VisionLabPINVOKE.SWIGPendingException.Pending) throw VisionLabPINVOKE.SWIGPendingException.Retrieve(); }
internal static HandleRef getCPtr(DoubleImage obj) { return((obj == null) ? new HandleRef(null, IntPtr.Zero) : obj.swigCPtr); }
public static void BlockPattern(DoubleImage image, XYCoord leftTop, int height, int width, double value, int repeatx, int repeaty) { VisionLabPINVOKE.BlockPattern__SWIG_13(DoubleImage.getCPtr(image), XYCoord.getCPtr(leftTop), height, width, value, repeatx, repeaty); if (VisionLabPINVOKE.SWIGPendingException.Pending) throw VisionLabPINVOKE.SWIGPendingException.Retrieve(); }
/// <summary> /// Decompose the image as eigen values, using the specific eigen vectors /// </summary> public static double[] EigenDecomposite(Bitmap src, DoubleImage[] eigenImages, DoubleImage avg) { return(EigenObjects.EigenDecomposite(src, eigenImages, avg)); }
public static int CountPixel(DoubleImage image, double value) { int ret = VisionLabPINVOKE.CountPixel__SWIG_6(DoubleImage.getCPtr(image), value); if (VisionLabPINVOKE.SWIGPendingException.Pending) throw VisionLabPINVOKE.SWIGPendingException.Retrieve(); return ret; }
public static void FastHoughLineT(DoubleImage src, HLParams p, double edgeMin, DoubleImage dest) { VisionLabPINVOKE.FastHoughLineT__SWIG_6(DoubleImage.getCPtr(src), HLParams.getCPtr(p), edgeMin, DoubleImage.getCPtr(dest)); if (VisionLabPINVOKE.SWIGPendingException.Pending) throw VisionLabPINVOKE.SWIGPendingException.Retrieve(); }
public static void DiskShape(DoubleImage image, XYCoord centre, double r, double value) { VisionLabPINVOKE.DiskShape__SWIG_13(DoubleImage.getCPtr(image), XYCoord.getCPtr(centre), r, value); if (VisionLabPINVOKE.SWIGPendingException.Pending) throw VisionLabPINVOKE.SWIGPendingException.Retrieve(); }
public static HoughLine FindFastBestLine(DoubleImage src, HLParams p, double edgeMin) { HoughLine ret = new HoughLine(VisionLabPINVOKE.FindFastBestLine__SWIG_6(DoubleImage.getCPtr(src), HLParams.getCPtr(p), edgeMin), true); if (VisionLabPINVOKE.SWIGPendingException.Pending) throw VisionLabPINVOKE.SWIGPendingException.Retrieve(); return ret; }
public static vector_HoughCircle FindBestCircles(DoubleImage src, double minR, double maxR, double deltaR, int nrCircles) { vector_HoughCircle ret = new vector_HoughCircle(VisionLabPINVOKE.FindBestCircles__SWIG_26(DoubleImage.getCPtr(src), minR, maxR, deltaR, nrCircles), true); if (VisionLabPINVOKE.SWIGPendingException.Pending) throw VisionLabPINVOKE.SWIGPendingException.Retrieve(); return ret; }
public static void LocalMinFilter(DoubleImage src, DoubleImage dest, double backGround, FixEdge edge, Mask_Int32 mask) { VisionLabPINVOKE.LocalMinFilter__SWIG_6(DoubleImage.getCPtr(src), DoubleImage.getCPtr(dest), backGround, (int)edge, Mask_Int32.getCPtr(mask)); if (VisionLabPINVOKE.SWIGPendingException.Pending) throw VisionLabPINVOKE.SWIGPendingException.Retrieve(); }
public static void VarianceFilter(DoubleImage src, DoubleImage dest, FixEdge edge, Mask_Int32 mask) { VisionLabPINVOKE.VarianceFilter__SWIG_6(DoubleImage.getCPtr(src), DoubleImage.getCPtr(dest), (int)edge, Mask_Int32.getCPtr(mask)); if (VisionLabPINVOKE.SWIGPendingException.Pending) throw VisionLabPINVOKE.SWIGPendingException.Retrieve(); }
public static vector_HoughCircle FindFastBestCircles(DoubleImage src, CircleBrightness brightness, double edgeMin, double minR, double maxR, double deltaR) { vector_HoughCircle ret = new vector_HoughCircle(VisionLabPINVOKE.FindFastBestCircles__SWIG_27(DoubleImage.getCPtr(src), (int)brightness, edgeMin, minR, maxR, deltaR), true); if (VisionLabPINVOKE.SWIGPendingException.Pending) throw VisionLabPINVOKE.SWIGPendingException.Retrieve(); return ret; }
public int Classify(DoubleImage image, ref double confidency) { int ret = VisionLabPINVOKE.BPN_ImageClassifier_Double_Classify(swigCPtr, DoubleImage.getCPtr(image), ref confidency); if (VisionLabPINVOKE.SWIGPendingException.Pending) throw VisionLabPINVOKE.SWIGPendingException.Retrieve(); return ret; }
public static void ZeroCrossings(DoubleImage src, DoubleImage dest) { VisionLabPINVOKE.ZeroCrossings__SWIG_13(DoubleImage.getCPtr(src), DoubleImage.getCPtr(dest)); if (VisionLabPINVOKE.SWIGPendingException.Pending) throw VisionLabPINVOKE.SWIGPendingException.Retrieve(); }
public double EvaluateImage(DoubleImage image, int classExp, ref int classRes, ref double confidency, vector_double output) { double ret = VisionLabPINVOKE.BPN_ImageClassifier_Double_EvaluateImage(swigCPtr, DoubleImage.getCPtr(image), classExp, ref classRes, ref confidency, vector_double.getCPtr(output)); if (VisionLabPINVOKE.SWIGPendingException.Pending) throw VisionLabPINVOKE.SWIGPendingException.Retrieve(); return ret; }
public static void HoughCircleT(DoubleImage src, double minR, double maxR, double deltaR, vector_DoubleImage destTab) { VisionLabPINVOKE.HoughCircleT__SWIG_6(DoubleImage.getCPtr(src), minR, maxR, deltaR, vector_DoubleImage.getCPtr(destTab)); if (VisionLabPINVOKE.SWIGPendingException.Pending) throw VisionLabPINVOKE.SWIGPendingException.Retrieve(); }