/// <summary> /// 画像データのファイルストレージへの書き込み /// </summary> /// <param name="fileName">書きこむXML or YAMLファイル</param> private static void SampleFileStorageWriteImage(string fileName) { // cvWrite, cvWriteComment // IplImage構造体の情報をファイルに保存する // (1)画像を読み込む using (IplImage colorImg = new IplImage(Const.ImageLenna, LoadMode.Color)) using (IplImage grayImg = new IplImage(colorImg.Size, BitDepth.U8, 1)) { // (2)ROIの設定と二値化処理 colorImg.CvtColor(grayImg, ColorConversion.BgrToGray); CvRect roi = new CvRect(0, 0, colorImg.Width / 2, colorImg.Height / 2); grayImg.SetROI(roi); colorImg.SetROI(roi); grayImg.Threshold(grayImg, 90, 255, ThresholdType.Binary); // (3)xmlファイルへの書き出し using (CvFileStorage fs = new CvFileStorage(fileName, null, FileStorageMode.Write)) { fs.WriteComment("This is a comment line.", false); fs.Write("color_img", colorImg); fs.StartNextStream(); fs.Write("gray_img", grayImg); } // (4)書きこんだxmlファイルを開く //using (Process p = Process.Start(fileName)) { // p.WaitForExit(); //} } }
protected void Hough_Click(object sender, EventArgs e) { using (IplImage src = Cv.LoadImage("./Images/Car.png", LoadMode.Color)) using (IplImage dst = Cv.CreateImage(new CvSize(src.Width, src.Height), BitDepth.U8, 3)) { IplImage gray = new IplImage(src.Size, BitDepth.U8, 1); src.CvtColor(gray, ColorConversion.BgraToGray); IplImage bin = gray.Clone(); Cv.Threshold(gray, bin, 0, 255, ThresholdType.Binary | ThresholdType.Otsu); // 輪郭の検出 CvSeq <CvPoint> contours; CvMemStorage storage = new CvMemStorage(); Cv.FindContours(bin, storage, out contours, CvContour.SizeOf, ContourRetrieval.Tree, ContourChain.ApproxSimple); //輪郭の描画 Cv.DrawContours(src, contours, new CvScalar(0, 0, 255), new CvScalar(0, 255, 0), 3); System.Drawing.Bitmap bmp = BitmapConverter.ToBitmap(src); MemoryStream ms = new MemoryStream(); bmp.Save(ms, ImageFormat.Jpeg); var base64Data = Convert.ToBase64String(ms.ToArray()); imgCtrl.Src = "data:image/jpg;base64," + base64Data; } }
/// <summary> /// Отделяет изображение от фона /// </summary> /// <param name="source">Исходное изображение</param> /// <param name="destinatation">Результат разделения</param> private void separateBackground(IplImage source, IplImage destinatation) { // Преобразуем иходное изображение в HSV source.CvtColor(hsvImg, ColorConversion.RgbToHsv); // Разбиваем изображение на отельные каналы hsvImg.CvtPixToPlane(hImg, sImg, vImg, null); // Если диапазон Hue состоит из 2х частей if (BackgroundRange.HMin > BackgroundRange.HMax) { hImg.InRangeS(CvScalar.RealScalar(BackgroundRange.HMin), CvScalar.RealScalar(HsvRange.MAX_H), tmpImg); hImg.InRangeS(CvScalar.RealScalar(HsvRange.MIN_H), CvScalar.RealScalar(BackgroundRange.HMax), hImg); Cv.Or(tmpImg, hImg, hImg); } // Если диапазон Hue состоит из 1 части else { hImg.InRangeS(CvScalar.RealScalar(BackgroundRange.HMin), CvScalar.RealScalar(BackgroundRange.HMax), hImg); } // Ограничиваем значение остальных цветовых компонент sImg.InRangeS(CvScalar.RealScalar(BackgroundRange.SMin), CvScalar.RealScalar(BackgroundRange.SMax), sImg); vImg.InRangeS(CvScalar.RealScalar(BackgroundRange.VMin), CvScalar.RealScalar(BackgroundRange.VMax), vImg); // Формируем окончательный результат Cv.And(hImg, sImg, destinatation); Cv.And(destinatation, vImg, destinatation); Cv.Not(destinatation, destinatation); }
public Snake() { using (var src = new IplImage(FilePath.Image.Cake, LoadMode.GrayScale)) using (var dst = new IplImage(src.Size, BitDepth.U8, 3)) { CvPoint[] contour = new CvPoint[100]; CvPoint center = new CvPoint(src.Width / 2, src.Height / 2); for (int i = 0; i < contour.Length; i++) { contour[i].X = (int)(center.X * Math.Cos(2 * Math.PI * i / contour.Length) + center.X); contour[i].Y = (int)(center.Y * Math.Sin(2 * Math.PI * i / contour.Length) + center.Y); } Console.WriteLine("Press any key to snake\nEsc - quit"); using (var window = new CvWindow()) { while (true) { src.SnakeImage(contour, 0.45f, 0.35f, 0.2f, new CvSize(15, 15), new CvTermCriteria(1), true); src.CvtColor(dst, ColorConversion.GrayToRgb); for (int i = 0; i < contour.Length - 1; i++) { dst.Line(contour[i], contour[i + 1], new CvColor(255, 0, 0), 2); } dst.Line(contour[contour.Length - 1], contour[0], new CvColor(255, 0, 0), 2); window.Image = dst; int key = CvWindow.WaitKey(); if (key == 27) { break; } } } } }
//yong's codes //yong's codes public static IplImage ConvertToBinaryIpl(IplImage target) { IplImage retImg = new IplImage(target.Width, target.Height, BitDepth.U8, 1); target.CvtColor(target, ColorConversion.BgrToCrCb); target.InRangeS(new CvScalar(0, 140, 40), new CvScalar(255, 170, 150), retImg); return(retImg); }
void preProcessing() { m_img = WebcamController.m_img; //CrCb컬러맵을 통해서 피부색부분 바이너리 뭐...그렇다고 m_img.CvtColor(m_img, ColorConversion.BgrToCrCb); m_img.InRangeS(new CvScalar(0, 140, 40), new CvScalar(255, 170, 150), m_skinBinaryImg); m_retImg = m_skinBinaryImg; }
public static IplImage CvtToGray(this IplImage ipl) { var gray = new IplImage(ipl.Width, ipl.Height, BitDepth.U8, 1); var roi = ipl.ROI; ipl.ResetROI(); ipl.CvtColor(gray, ColorConversion.BgrToGray); ipl.SetROI(roi); gray.SetROI(roi); return(gray); }
static IplImage GetThresholdImage(IplImage src) { IplImage dst = src; if (src.ElemChannels != 1) { dst = new IplImage(src.Size, BitDepth.U8, 1); src.CvtColor(dst, ColorConversion.BgrToGray); } dst.Smooth(dst, SmoothType.Gaussian); dst.Threshold(dst, 255.0 * 0.9, 255.0, ThresholdType.Binary); return(dst); }
private static void createAGrayScaleClone2() { using (var src = new IplImage(@"..\..\images\ocv02.jpg", LoadMode.Color)) //using (var dst = new IplImage(new CvSize(src.Width, src.Height), BitDepth.U8, 1)) using (var dst = new IplImage(src.Size, BitDepth.U8, 1)) { src.CvtColor(dst, ColorConversion.BgrToGray); using (new CvWindow("src", image: src)) using (new CvWindow("dst", image: dst)) { Cv.WaitKey(); } } }
public IplImage BuildHist(IplImage src_tmp) { const int histSize = 64; float[] range0 = { 0, 256 }; float[][] ranges = { range0 }; // 화상의 읽기 using (IplImage srcImg = new IplImage(src_tmp.Size, BitDepth.U8, 1)) using (IplImage dstImg = new IplImage(src_tmp.Size, BitDepth.U8, 1)) using (IplImage histImg = new IplImage(new CvSize(400, 400), BitDepth.U8, 1)) using (CvHistogram hist = new CvHistogram(new int[] { histSize }, HistogramFormat.Array, ranges, true)) { src_tmp.CvtColor(srcImg, ColorConversion.BgrToGray); srcImg.Copy(dstImg); using (CvWindow windowImage = new CvWindow("변환된 이미지", WindowMode.AutoSize)) using (CvWindow windowHist = new CvWindow("히스토그램", WindowMode.AutoSize)) { // 트랙바가 동작되었을 때의 처리 CvTrackbar ctBrightness = null; CvTrackbar ctContrast = null; CvTrackbarCallback callback = delegate(int pos) { int brightness = ctBrightness.Pos - 100; int contrast = ctContrast.Pos - 100; // LUT의 적용 byte[] lut = CalcLut(contrast, brightness); srcImg.LUT(dstImg, lut); // 히스토그램 그리기 CalcHist(dstImg, hist); DrawHist(histImg, hist, histSize); // 윈도우에 표시 DstHist = histImg.Clone(); windowImage.ShowImage(dstImg); windowHist.ShowImage(histImg); dstImg.Zero(); histImg.Zero(); }; // 트랙바의 작성 ctBrightness = windowImage.CreateTrackbar("명도", 100, 200, callback); ctContrast = windowImage.CreateTrackbar("대조", 100, 200, callback); // 첫회 그리기 callback(0); // 키 입력대기 Cv.WaitKey(0); } return(DstHist); } }
/// <summary> /// Находит углы в изображении шахматной доски, установленном с помощью SetImage /// </summary> /// <param name="corners">Найденные углы</param> /// <returns>Показывает соответствуют-ли найленные углы шаблону CornersPattern</returns> public bool FindCorners(out CvPoint2D32f[] corners) { bool result; int numCorners; ChessboardFlag flags = ChessboardFlag.AdaptiveThresh | ChessboardFlag.NormalizeImage | ChessboardFlag.FilterQuads; CvTermCriteria criteria = new CvTermCriteria(CriteriaType.Iteration | CriteriaType.Epsilon, MaxIterations, Epsilon); // Находим углы result = chessBoard.FindChessboardCorners(CornersPattern, out corners, out numCorners, flags); // Уточнаем положение углов chessBoard.CvtColor(grayChessBoard, ColorConversion.RgbToGray); grayChessBoard.FindCornerSubPix(corners, corners.Length, new CvSize(11, 11), new CvSize(-1, -1), criteria); return(result); }
public static IplImage extractSkinAsIpl(IplImage target) { IplImage origin = target.Clone(); IplImage maskImg = ConvertToBinaryIpl(origin); maskImg.Not(maskImg); target.AndS(0, target, maskImg); target.Smooth(target, SmoothType.Median); //temp test code IplImage temp = new IplImage(target.Size, BitDepth.U8, 1); target.CvtColor(temp, ColorConversion.BgrToGray); target = temp; // return(target); }
static void CannyCamera() { using (CvWindow win = new CvWindow("Canny")) using (CvCapture cap = new CvCapture(0)) { using (IplImage frame = cap.QueryFrame()) using (IplImage dst = new IplImage(frame.Size, BitDepth.U8, 1)) { frame.CvtColor(dst, ColorConversion.BgrToGray); dst.Canny(dst, 50.0, 50.0, ApertureSize.Size3); win.Image = dst; } CvWindow.WaitKey(); } }
public Threshold() { using (IplImage src = new IplImage(FilePath.Image.Lenna, LoadMode.Color)) using (IplImage srcGray = new IplImage(src.Size, BitDepth.U8, 1)) using (IplImage dst = new IplImage(src.Size, BitDepth.U8, 1)) using (CvWindow window = new CvWindow("SampleThreshold")) { src.CvtColor(srcGray, ColorConversion.BgrToGray); srcGray.Smooth(srcGray, SmoothType.Gaussian, 5); int threshold = 90; window.CreateTrackbar("threshold", threshold, 255, delegate(int pos) { srcGray.Threshold(dst, pos, 255, ThresholdType.Binary); window.Image = dst; }); srcGray.Threshold(dst, threshold, 255, ThresholdType.Binary); window.Image = dst; CvWindow.WaitKey(); } }
/// <summary> /// Генерирует радужную линейку заданных размеров /// </summary> /// <param name="width"></param> /// <param name="height"></param> /// <returns></returns> private static IplImage GenerateRainbowLine(int width, int height) { IplImage result = new IplImage(new CvSize(width, height), BitDepth.U8, 3); IntPtr ptr = result.ImageData; for (int x = 0; x < result.Width; x++) { for (int y = 0; y < result.Height; y++) { int offset = (result.WidthStep * y) + (x * 3); byte val = (byte)Math.Round(180.0 * (x + 1) / result.Width); Marshal.WriteByte(ptr, offset + 0, val); Marshal.WriteByte(ptr, offset + 1, 255); Marshal.WriteByte(ptr, offset + 2, 255); } } result.CvtColor(result, ColorConversion.HsvToRgb); return(result); }
public void PreProcess() { IplConvKernel element = Cv.CreateStructuringElementEx(21, 3, 10, 2, ElementShape.Rect, null); timg = new IplImage(src.Size, BitDepth.U8, 1); IplImage temp = timg.Clone(); IplImage dest = timg.Clone(); src.CvtColor(timg, ColorConversion.RgbaToGray); pimg = timg.Clone(); //Cv.Threshold(pimg, pimg, 128, 255, ThresholdType.Binary | ThresholdType.Otsu); Cv.Smooth(timg, timg, SmoothType.Gaussian); Cv.MorphologyEx(timg, dest, temp, element, MorphologyOperation.TopHat, 1); Cv.Threshold(dest, timg, 128, 255, ThresholdType.Binary | ThresholdType.Otsu); Cv.Smooth(timg, dest, SmoothType.Median); Cv.Dilate(dest, dest, element, 2); Cv.ReleaseImage(temp); Cv.ReleaseImage(dest); }
/// <summary> /// /// </summary> /// <param name="fileName"></param> private static void SampleFileStorageWriteImage(string fileName) { // cvWrite, cvWriteComment using (IplImage colorImg = new IplImage(FilePath.Image.Lenna, LoadMode.Color)) using (IplImage grayImg = new IplImage(colorImg.Size, BitDepth.U8, 1)) { colorImg.CvtColor(grayImg, ColorConversion.BgrToGray); CvRect roi = new CvRect(0, 0, colorImg.Width / 2, colorImg.Height / 2); grayImg.SetROI(roi); colorImg.SetROI(roi); grayImg.Threshold(grayImg, 90, 255, ThresholdType.Binary); using (CvFileStorage fs = new CvFileStorage(fileName, null, FileStorageMode.Write)) { fs.WriteComment("This is a comment line.", false); fs.Write("color_img", colorImg); fs.StartNextStream(); fs.Write("gray_img", grayImg); } } }
public static IplImage testContours(IplImage target) { if (g_storage == null) { g_gray = new IplImage(target.Size, BitDepth.U8, 1); g_binary = new IplImage(target.Size, BitDepth.U8, 1); g_storage = new CvMemStorage(0); } else { g_storage.Clear(); } CvSeq <CvPoint> contours; target.CvtColor(g_gray, ColorConversion.BgrToGray); g_gray.Threshold(g_gray, g_thresh, 255, ThresholdType.Binary); g_gray.Copy(g_binary); g_gray.FindContours(g_storage, out contours, CvContour.SizeOf, ContourRetrieval.CComp); g_gray.Zero(); if (contours != null) { contours.ApproxPoly(CvContour.SizeOf, g_storage, ApproxPolyMethod.DP, 3, true); g_gray.DrawContours(contours, new CvScalar(255), new CvScalar(128), 100); } //g_gray.Dilate(g_gray, null, 2); //g_gray.Erode(g_gray, null, 2); return(g_gray); }
public Edge() { using (var src = new IplImage(FilePath.Image.Lenna, LoadMode.Color)) using (var gray = new IplImage(src.Size, BitDepth.U8, 1)) using (var temp = new IplImage(src.Size, BitDepth.S16, 1)) using (var dstSobel = new IplImage(src.Size, BitDepth.U8, 1)) using (var dstLaplace = new IplImage(src.Size, BitDepth.U8, 1)) using (var dstCanny = new IplImage(src.Size, BitDepth.U8, 1)) { //src.CvtColor(gray, ColorConversion.RgbToGray); src.CvtColor(gray, ColorConversion.BgrToGray); // Sobel Cv.Sobel(gray, temp, 1, 0, ApertureSize.Size3); Cv.ConvertScaleAbs(temp, dstSobel); // Laplace Cv.Laplace(gray, temp); Cv.ConvertScaleAbs(temp, dstLaplace); // Canny Cv.Canny(gray, dstCanny, 50, 200, ApertureSize.Size3); using (new CvWindow("src", src)) using (new CvWindow("sobel", dstSobel)) using (new CvWindow("laplace", dstLaplace)) using (new CvWindow("canny", dstCanny)) { CvWindow.WaitKey(); } dstSobel.SaveImage("sobel.png"); dstLaplace.SaveImage("laplace.png"); dstCanny.SaveImage("canny.png"); } }
private String ProcessThresholdForPlate(IplImage image, Int32 threshold1, Int32 threshold2, Int32 method) { String result = null; IplImage tmpImage = null; IplImage dest = null; IplImage destFinal = null; try { tmpImage = new IplImage(image.Size, BitDepth.U8, 1); dest = new IplImage(image.Size, BitDepth.U8, 1); destFinal = new IplImage(image.Size, BitDepth.U8, 3); image.CvtColor(tmpImage, ColorConversion.RgbaToGray); Cv.Threshold(tmpImage, dest, threshold1, threshold2, ThresholdType.Binary); dest.CvtColor(destFinal, ColorConversion.GrayToRgb); result = mainLogic.ImageAnalysis(destFinal, method); } finally { if (null != dest) { Cv.ReleaseImage(dest); } if (null != tmpImage) { Cv.ReleaseImage(tmpImage); } if (null != destFinal) { Cv.ReleaseImage(destFinal); } } return(result); }
//* MODE : RING *// private IplImage mode_Ring(IplImage srcImg) { // 1. 링(고리) 생성 IplImage temp = Properties.Resources.gr.ToIplImage(); IplImage gr = new IplImage(temp.Size, srcImg.Depth, srcImg.NChannels); temp.CvtColor(gr, ColorConversion.BgrToGray); temp.Dispose(); //해제 // 2. 객체추출 Took3D.checkSize(srcImg); int minX = Took3D.minX, minY = Took3D.minY; int maxX = Took3D.maxX - minX, maxY = Took3D.maxY - minY; srcImg.SetROI(new CvRect(minX, minY, maxX, maxY)); IplImage src = new IplImage(maxX, maxY, srcImg.Depth, srcImg.NChannels); srcImg.Copy(src); // 3. 이미지 변환 Took3D.negativeImg(src); // 4. 이미지 크기 조정 IplImage gr_hole; int roi_X = 37; int roi_Y = 226; int roi_width = 204; int roi_height = 175; //int X_wid = 240; //int Y_hei = 379; int setHeight = 0, setWidth = 0; if (src.Width > src.Height) { setWidth = roi_width; setHeight = (roi_width * src.Height) / src.Width; if (setHeight > roi_height) { setHeight = roi_height; setWidth = (roi_height * setWidth) / setHeight; } gr_hole = new IplImage(setWidth, setHeight, src.Depth, src.NChannels); } else if (src.Width < src.Height) { setHeight = roi_height; setWidth = (roi_height * src.Width) / src.Height; if (setWidth > roi_width) { setWidth = roi_width; setHeight = (roi_width * setHeight) / setWidth; } gr_hole = new IplImage(setWidth, setHeight, src.Depth, src.NChannels); } else { setHeight = roi_height; setWidth = (roi_height * src.Width) / src.Height; gr_hole = new IplImage(setWidth, setHeight, src.Depth, src.NChannels); } src.Resize(gr_hole, Interpolation.Cubic); //5. 위치 조정 int mid_X = (roi_width / 2) - (gr_hole.Width / 2); gr.SetROI(roi_X + mid_X, roi_Y, gr_hole.Width, gr_hole.Height); //6. 합성 gr_hole.Copy(gr); //7. 메모리 정리 gr.ResetROI(); srcImg.ResetROI(); gr_hole.Dispose(); src.Dispose(); return(gr); }
private CvBlobs PreProcessImage2_old(IplImage img) { CvBlobs blobs = null; IplConvKernel element = null; IplImage temp = null; IplImage dest = null; IplImage tmpImage = null; IplImage tmpImage2 = null; IplImage labelImg = null; try { element = Cv.CreateStructuringElementEx(180, 5, 90, 1, ElementShape.Rect, null); tmpImage = new IplImage(img.Size, BitDepth.U8, 1); temp = tmpImage.Clone(); dest = tmpImage.Clone(); img.CvtColor(tmpImage, ColorConversion.RgbaToGray); tmpImage.Rectangle(new CvPoint(0, 0), new CvPoint((Int32)(tmpImage.Size.Width), (Int32)((tmpImage.Size.Height / 9) * 3)), new CvScalar(255, 255, 255), -1); tmpImage.Rectangle(new CvPoint(0, (Int32)((tmpImage.Size.Height / 5) * 4)), new CvPoint((Int32)(tmpImage.Size.Width), (Int32)(tmpImage.Size.Height)), new CvScalar(255, 255, 255), -1); tmpImage.Rectangle(new CvPoint((Int32)((tmpImage.Size.Width / 9) * 7), 0), new CvPoint((Int32)((tmpImage.Size.Width)), (Int32)(tmpImage.Size.Height)), new CvScalar(255, 255, 255), -1); Cv.Smooth(tmpImage, tmpImage, SmoothType.Gaussian); Cv.MorphologyEx(tmpImage, dest, temp, element, MorphologyOperation.TopHat, 1); Cv.Threshold(dest, tmpImage, 128, 255, ThresholdType.Binary | ThresholdType.Otsu); Cv.Smooth(tmpImage, dest, SmoothType.Median); labelImg = new IplImage(img.Size, CvBlobLib.DepthLabel, 1); blobs = new CvBlobs(); tmpImage2 = tmpImage.Clone(); CvBlobLib.Label(tmpImage2, labelImg, blobs); //Cv.ReleaseImage(tmpImage); //tmpImage = img.Clone(); //blobs.RenderBlobs(labelImg, img, tmpImage); //tmpImage.SaveImage(@"c:\temp\newImages\RenderBlobsNOFiltered.png"); CvBlobLib.FilterByArea(blobs, 850, 4850); Cv.ReleaseImage(tmpImage); tmpImage = img.Clone(); //CvTracks tracks = new CvTracks(); //CvBlobLib.UpdateTracks(blobs, tracks, 200.0, 5); //CvBlobLib.RenderTracks(tracks, tmpImage, tmpImage, RenderTracksMode.ID); blobs.RenderBlobs(labelImg, img, tmpImage, RenderBlobsMode.BoundingBox | RenderBlobsMode.Angle); //tmpImage.SaveImage(@"c:\temp\newImages\RenderBlobsFiltered.png"); } finally { if (null != temp) { Cv.ReleaseImage(temp); } if (null != dest) { Cv.ReleaseImage(dest); } if (null != tmpImage) { Cv.ReleaseImage(tmpImage); } if (null != tmpImage2) { Cv.ReleaseImage(tmpImage2); } if (null != labelImg) { Cv.ReleaseImage(labelImg); } } return(blobs); }
public IplImage ImageTreatment(IplImage img, out IplImage[] plateImages) { IplImage tgray = null; IplImage gray = null; IplImage mainSubImage = null; IplImage tmpImage = null; IplImage tmpImage2 = null; CvBlobs blobs1 = null; CvBlobs blobs2 = null; List <IplImage> plates = null; CvRect subImageRect; plateImages = null; try { plates = new List <IplImage>(); mainSubImage = ExtractSubImage(img, out subImageRect); tgray = new IplImage(mainSubImage.Size, BitDepth.U8, 1); mainSubImage.CvtColor(tgray, ColorConversion.RgbaToGray); blobs1 = PreProcessImage1(mainSubImage, tgray); blobs2 = PreProcessImage2(mainSubImage, tgray); tmpImage = img.Clone(); tmpImage2 = mainSubImage.Clone(); tmpImage.SetROI(subImageRect); if (null != blobs1 && blobs1.Count > 0) { IplImage[] plateImage = GetPlates(tmpImage, tmpImage2, blobs1, 2.4); if (null != plateImage) { plates.AddRange(plateImage); } } if (null != blobs2 && blobs2.Count > 0) { IplImage[] plateImage = GetPlates(tmpImage, tmpImage2, blobs2, 3.5); if (null != plateImage) { plates.AddRange(plateImage); } } tmpImage.ResetROI(); Cv.ReleaseImage(gray); gray = tmpImage; } finally { if (null != tmpImage2) { Cv.ReleaseImage(tmpImage2); } if (null != tgray) { Cv.ReleaseImage(tgray); } if (null != mainSubImage) { Cv.ReleaseImage(mainSubImage); } } if (plates.Count > 0) { plateImages = plates.ToArray(); } return(gray); }
/// <summary> /// フルカラー画像(24bit)をグレースケール画像(8bit)に変換する /// mImageFull --> mImageGray /// </summary> private void convertFullToGray() { mImageGray = new IplImage(mImageFull.GetSize(), BitDepth.U8, 1); mImageFull.CvtColor(mImageGray, ColorConversion.BgrToGray); }