public Text() { // cvInitFont, cvPutText // フォントを初期化して,テキストを描画する List<FontFace> font_face = new List<FontFace>( (FontFace[])Enum.GetValues(typeof(FontFace)) ); font_face.Remove(FontFace.Italic); // (1)画像を確保し初期化する using (IplImage img = Cv.CreateImage(new CvSize(450, 600), BitDepth.U8, 3)) { Cv.Zero(img); // (2)フォント構造体を初期化する CvFont[] font = new CvFont[font_face.Count * 2]; for (int i = 0; i < font.Length; i += 2) { font[i] = new CvFont(font_face[i / 2], 1.0, 1.0); font[i + 1] = new CvFont(font_face[i / 2] | FontFace.Italic, 1.0, 1.0); } // (3)フォントを指定して,テキストを描画する for (int i = 0; i < font.Length; i++) { CvColor rcolor = CvColor.Random(); Cv.PutText(img, "OpenCV sample code", new CvPoint(15, (i + 1) * 30), font[i], rcolor); } // (4)画像の表示,キーが押されたときに終了 using (CvWindow w = new CvWindow(img)) { CvWindow.WaitKey(0); } } }
public Filter2D() { // cvFilter2D // ユーザが定義したカーネルによるフィルタリング // (1)画像の読み込み using (IplImage srcImg = new IplImage(Const.ImageFruits, LoadMode.AnyDepth | LoadMode.AnyColor)) using (IplImage dstImg = new IplImage(srcImg.Size, srcImg.Depth, srcImg.NChannels)) { // (2)カーネルの正規化と,フィルタ処理 float[] data = { 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 }; CvMat kernel = new CvMat(1, 21, MatrixType.F32C1, data); Cv.Normalize(kernel, kernel, 1.0, 0, NormType.L1); Cv.Filter2D(srcImg, dstImg, kernel, new CvPoint(0, 0)); // (3)結果を表示する using (CvWindow window = new CvWindow("Filter2D", dstImg)) { Cv.WaitKey(0); } } }
public SaveImage() { using (IplImage img = new IplImage(Const.Image16bit, LoadMode.Color)) { // JPEG quality test img.SaveImage("q000.jpg", new JpegEncodingParam(0)); img.SaveImage("q025.jpg", new JpegEncodingParam(25)); img.SaveImage("q050.jpg", new JpegEncodingParam(50)); img.SaveImage("q075.jpg", new JpegEncodingParam(75)); img.SaveImage("q100.jpg", new JpegEncodingParam(100)); using (IplImage q000 = new IplImage("q000.jpg", LoadMode.Color)) using (IplImage q025 = new IplImage("q025.jpg", LoadMode.Color)) using (IplImage q050 = new IplImage("q050.jpg", LoadMode.Color)) using (IplImage q075 = new IplImage("q075.jpg", LoadMode.Color)) using (IplImage q100 = new IplImage("q100.jpg", LoadMode.Color)) using (CvWindow w000 = new CvWindow("quality 0", q000)) using (CvWindow w025 = new CvWindow("quality 25", q025)) using (CvWindow w050 = new CvWindow("quality 50", q050)) using (CvWindow w075 = new CvWindow("quality 75", q075)) using (CvWindow w100 = new CvWindow("quality 100", q100)) { Cv.WaitKey(); } } }
public VideoWriter() { // (1)カメラに対するキャプチャ構造体を作成する using (CvCapture capture = CvCapture.FromCamera(0)) { // (2)キャプチャサイズを取得する(この設定は,利用するカメラに依存する) int width = capture.FrameWidth; int height = capture.FrameHeight; double fps = 15;//capture.Fps; // (3)ビデオライタ構造体を作成する using (CvVideoWriter writer = new CvVideoWriter("cap.avi", FourCC.Prompt, fps, new CvSize(width, height))) using (CvFont font = new CvFont(FontFace.HersheyComplex, 0.7, 0.7)) using (CvWindow window = new CvWindow("Capture", WindowMode.AutoSize)) { // (4)カメラから画像をキャプチャし,ファイルに書き出す for (int frames = 0; ; frames++) { IplImage frame = capture.QueryFrame(); string str = string.Format("{0}[frame]", frames); frame.PutText(str, new CvPoint(10, 20), font, new CvColor(0, 255, 100)); writer.WriteFrame(frame); window.ShowImage(frame); int key = CvWindow.WaitKey((int)(1000 / fps)); if (key == '\x1b') { break; } } } } }
public Snake() { using (IplImage src = new IplImage(Const.ImageCake, LoadMode.GrayScale)) using (IplImage dst = new IplImage(src.Size, BitDepth.U8, 3)) { CvPoint[] contour = new CvPoint[100]; CvPoint center = new CvPoint(src.Width / 2, src.Height / 2); for (int i = 0; i < contour.Length; i++) { contour[i].X = (int)(center.X * Math.Cos(2 * Math.PI * i / contour.Length) + center.X); contour[i].Y = (int)(center.Y * Math.Sin(2 * Math.PI * i / contour.Length) + center.Y); } Console.WriteLine("Press any key to snake\nEsc - quit"); using (CvWindow w = new CvWindow()) { while (true) { src.SnakeImage(contour, 0.45f, 0.35f, 0.2f, new CvSize(15, 15), new CvTermCriteria(1), true); src.CvtColor(dst, ColorConversion.GrayToRgb); for (int i = 0; i < contour.Length - 1; i++) { dst.Line(contour[i], contour[i + 1], new CvColor(255, 0, 0), 2); } dst.Line(contour[contour.Length - 1], contour[0], new CvColor(255, 0, 0), 2); w.Image = dst; int key = CvWindow.WaitKey(); if (key == 27) { break; } } } } }
public Undistort() { // cvUndistort2 // キャリブレーションデータを利用して,歪みを補正する // (1)補正対象となる画像の読み込み using (IplImage srcImg = new IplImage(Const.ImageDistortion, LoadMode.Color)) using (IplImage dstImg = srcImg.Clone()) { // (2)パラメータファイルの読み込み CvMat intrinsic, distortion; using (CvFileStorage fs = new CvFileStorage(Const.XmlCamera, null, FileStorageMode.Read)) { CvFileNode param = fs.GetFileNodeByName(null, "intrinsic"); intrinsic = fs.Read<CvMat>(param); param = fs.GetFileNodeByName(null, "distortion"); distortion = fs.Read<CvMat>(param); } // (3)歪み補正 Cv.Undistort2(srcImg, dstImg, intrinsic, distortion); // (4)画像を表示,キーが押されたときに終了 using (CvWindow w1 = new CvWindow("Distortion", WindowMode.AutoSize, srcImg)) using (CvWindow w2 = new CvWindow("Undistortion", WindowMode.AutoSize, dstImg)) { CvWindow.WaitKey(0); } intrinsic.Dispose(); distortion.Dispose(); } }
public Contour() { // cvContourArea, cvArcLength // 輪郭によって区切られた領域の面積と,輪郭の長さを求める const int SIZE = 500; // (1)画像を確保し初期化する using (CvMemStorage storage = new CvMemStorage()) using (IplImage img = new IplImage(SIZE, SIZE, BitDepth.U8, 3)) { img.Zero(); // (2)点列を生成する CvSeq<CvPoint> points = new CvSeq<CvPoint>(SeqType.PolyLine, storage); CvRNG rng = new CvRNG((ulong)DateTime.Now.Ticks); double scale = rng.RandReal() + 0.5; CvPoint pt0 = new CvPoint { X = (int)(Math.Cos(0) * SIZE / 4 * scale + SIZE / 2), Y = (int)(Math.Sin(0) * SIZE / 4 * scale + SIZE / 2) }; img.Circle(pt0, 2, CvColor.Green); points.Push(pt0); for (int i = 1; i < 20; i++) { scale = rng.RandReal() + 0.5; CvPoint pt1 = new CvPoint { X = (int)(Math.Cos(i * 2 * Math.PI / 20) * SIZE / 4 * scale + SIZE / 2), Y = (int)(Math.Sin(i * 2 * Math.PI / 20) * SIZE / 4 * scale + SIZE / 2) }; img.Line(pt0, pt1, CvColor.Green, 2); pt0.X = pt1.X; pt0.Y = pt1.Y; img.Circle(pt0, 3, CvColor.Green, Cv.FILLED); points.Push(pt0); } img.Line(pt0, points.GetSeqElem(0).Value, CvColor.Green, 2); // (3)包含矩形,面積,長さを求める CvRect rect = points.BoundingRect(false); double area = points.ContourArea(); double length = points.ArcLength(CvSlice.WholeSeq, 1); // (4)結果を画像に書き込む img.Rectangle(new CvPoint(rect.X, rect.Y), new CvPoint(rect.X + rect.Width, rect.Y + rect.Height), CvColor.Red, 2); string text_area = string.Format("Area: wrect={0}, contour={1}", rect.Width * rect.Height, area); string text_length = string.Format("Length: rect={0}, contour={1}", 2 * (rect.Width + rect.Height), length); using (CvFont font = new CvFont(FontFace.HersheySimplex, 0.7, 0.7, 0, 1, LineType.AntiAlias)) { img.PutText(text_area, new CvPoint(10, img.Height - 30), font, CvColor.White); img.PutText(text_length, new CvPoint(10, img.Height - 10), font, CvColor.White); } // (5)画像を表示,キーが押されたときに終了 using (CvWindow window = new CvWindow("BoundingRect", WindowMode.AutoSize)) { window.Image = img; CvWindow.WaitKey(0); } } }
static void showrite(String s, CvMat image) { CvWindow window = new CvWindow(showiteCounter.ToString() + s); window.ShowImage(image); image.SaveImage(showiteCounter.ToString() + s + ".png"); showiteCounter++; }
public Watershed() { // cvWatershed // マウスで円形のマーカー(シード領域)の中心を指定し,複数のマーカーを設定する. // このマーカを画像のgradientに沿って広げて行き,gradientの高い部分に出来る境界を元に領域を分割する. // 領域は,最初に指定したマーカーの数に分割される. // (2)画像の読み込み,マーカー画像の初期化,結果表示用画像領域の確保を行なう using (IplImage srcImg = new IplImage(Const.ImageGoryokaku, LoadMode.AnyDepth | LoadMode.AnyColor)) using (IplImage dstImg = srcImg.Clone()) using (IplImage dspImg = srcImg.Clone()) using (IplImage markers = new IplImage(srcImg.Size, BitDepth.S32, 1)) { markers.Zero(); // (3)入力画像を表示しシードコンポーネント指定のためのマウスイベントを登録する using (CvWindow wImage = new CvWindow("image", WindowMode.AutoSize)) { wImage.Image = srcImg; // クリックにより中心を指定し,円形のシード領域を設定する int seedNum = 0; wImage.OnMouseCallback += delegate(MouseEvent ev, int x, int y, MouseEvent flags) { if (ev == MouseEvent.LButtonDown) { seedNum++; CvPoint pt = new CvPoint(x, y); markers.Circle(pt, 20, CvScalar.ScalarAll(seedNum), Cv.FILLED, LineType.Link8, 0); dspImg.Circle(pt, 20, CvColor.White, 3, LineType.Link8, 0); wImage.Image = dspImg; } }; CvWindow.WaitKey(); } // (4)watershed分割を実行する Cv.Watershed(srcImg, markers); // (5)実行結果の画像中のwatershed境界(ピクセル値=-1)を結果表示用画像上に表示する for (int i = 0; i < markers.Height; i++) { for (int j = 0; j < markers.Width; j++) { int idx = (int)(markers.Get2D(i, j).Val0); if (idx == -1) { dstImg.Set2D(i, j, CvColor.Red); } } } using (CvWindow wDst = new CvWindow("watershed transform", WindowMode.AutoSize)) { wDst.Image = dstImg; CvWindow.WaitKey(); } } }
public PixelSampling() { // 並進移動のためのピクセルサンプリング cvGetRectSubPix // (1) 画像の読み込み,出力用画像領域の確保を行なう using (IplImage srcImg = new IplImage(Const.ImageLenna, LoadMode.AnyDepth | LoadMode.AnyColor)) using (IplImage dstImg = srcImg.Clone()) { // (2)dst_imgの画像中心になるsrc_img中の位置centerを指定する CvPoint2D32f center = new CvPoint2D32f { X = srcImg.Width - 1, Y = srcImg.Height - 1 }; // (3)centerが画像中心になるように,GetRectSubPixを用いて画像全体をシフトさせる Cv.GetRectSubPix(srcImg, dstImg, center); // (4)結果を表示する using (CvWindow wSrc = new CvWindow("src")) using (CvWindow wDst = new CvWindow("dst")) { wSrc.Image = srcImg; wDst.Image = dstImg; Cv.WaitKey(0); } } // 回転移動のためのピクセルサンプリング cvGetQuadrangleSubPix const int angle = 45; // (1)画像の読み込み,出力用画像領域の確保を行なう using (IplImage srcImg = new IplImage(Const.ImageLenna, LoadMode.AnyDepth | LoadMode.AnyColor)) using (IplImage dstImg = srcImg.Clone()) { // (2)回転のための行列(アフィン行列)要素を設定し,CvMat行列Mを初期化する float[] m = new float[6]; m[0] = (float)(Math.Cos(angle * Cv.PI / 180.0)); m[1] = (float)(-Math.Sin(angle * Cv.PI / 180.0)); m[2] = srcImg.Width * 0.5f; m[3] = -m[1]; m[4] = m[0]; m[5] = srcImg.Height * 0.5f; using (CvMat mat = new CvMat(2, 3, MatrixType.F32C1, m)) { // (3)指定された回転行列により,GetQuadrangleSubPixを用いて画像全体を回転させる Cv.GetQuadrangleSubPix(srcImg, dstImg, mat); // (4)結果を表示する using (CvWindow wSrc = new CvWindow("src")) using (CvWindow wDst = new CvWindow("dst")) { wSrc.Image = srcImg; wDst.Image = dstImg; Cv.WaitKey(0); } } } }
public HOG() { CPP.Mat img = CPP.CvCpp.ImRead(Const.ImageAsahiyama, LoadMode.Color); /* if (GPU.CvGpu.IsEnabled) { GPU.GpuMat imgGpu = new GPU.GpuMat(img); GPU.HOGDescriptor hog = new GPU.HOGDescriptor(); hog.SetSVMDetector(OpenCvSharp.CPlusPlus.HOGDescriptor.GetDefaultPeopleDetector()); //bool b = hog.CheckDetectorSize(); //b.ToString(); } else //*/ { CPP.HOGDescriptor hog = new CPP.HOGDescriptor(); hog.SetSVMDetector(CPP.HOGDescriptor.GetDefaultPeopleDetector()); bool b = hog.CheckDetectorSize(); b.ToString(); Stopwatch watch = Stopwatch.StartNew(); // run the detector with default parameters. to get a higher hit-rate // (and more false alarms, respectively), decrease the hitThreshold and // groupThreshold (set groupThreshold to 0 to turn off the grouping completely). CvRect[] found = hog.DetectMultiScale(img, 0, new CvSize(8, 8), new CvSize(24, 16), 1.05, 2); watch.Stop(); Console.WriteLine("Detection time = {0}ms", watch.ElapsedMilliseconds); Console.WriteLine("{0} region(s) found", found.Length); foreach (CvRect rect in found) { // the HOG detector returns slightly larger rectangles than the real objects. // so we slightly shrink the rectangles to get a nicer output. CvRect r = new CvRect { X = rect.X + (int)Math.Round(rect.Width * 0.1), Y = rect.Y + (int)Math.Round(rect.Height * 0.1), Width = (int)Math.Round(rect.Width * 0.8), Height = (int)Math.Round(rect.Height * 0.8) }; img.Rectangle(r.TopLeft, r.BottomRight, CvColor.Red, 3, LineType.Link8, 0); } using (CvWindow window = new CvWindow("people detector", WindowMode.None, img.ToIplImage())) { window.SetProperty(WindowProperty.Fullscreen, 1); Cv.WaitKey(0); } } }
public CaptureCamera() { using (CvCapture cap = CvCapture.FromCamera(0)) // device type + camera index using (CvWindow w = new CvWindow("SampleCapture")) { while (CvWindow.WaitKey(10) < 0) { w.Image = cap.QueryFrame(); } } }
private static void readImage2() { using (var img = new IplImage(@"..\..\images\ocv02.jpg", LoadMode.Unchanged)) { using (var window = new CvWindow("window")) { window.Image = img; Cv.WaitKey(); } } }
private static void readImage3() { // it uses `System.Drawing.Bitmap` behind the scene. using (var img = IplImage.FromFile(@"..\..\images\ocv02.jpg", LoadMode.Unchanged)) { using (var window = new CvWindow("window")) { window.Image = img; Cv.WaitKey(); } } }
public PixelSampling() { // cvGetRectSubPix using (IplImage srcImg = new IplImage(FilePath.Image.Lenna, LoadMode.AnyDepth | LoadMode.AnyColor)) using (IplImage dstImg = srcImg.Clone()) { CvPoint2D32f center = new CvPoint2D32f { X = srcImg.Width - 1, Y = srcImg.Height - 1 }; Cv.GetRectSubPix(srcImg, dstImg, center); using (CvWindow wSrc = new CvWindow("src")) using (CvWindow wDst = new CvWindow("dst")) { wSrc.Image = srcImg; wDst.Image = dstImg; Cv.WaitKey(0); } } // cvGetQuadrangleSubPix const int angle = 45; using (IplImage srcImg = new IplImage(FilePath.Image.Lenna, LoadMode.AnyDepth | LoadMode.AnyColor)) using (IplImage dstImg = srcImg.Clone()) { float[] m = new float[6]; m[0] = (float)(Math.Cos(angle * Cv.PI / 180.0)); m[1] = (float)(-Math.Sin(angle * Cv.PI / 180.0)); m[2] = srcImg.Width * 0.5f; m[3] = -m[1]; m[4] = m[0]; m[5] = srcImg.Height * 0.5f; using (CvMat mat = new CvMat(2, 3, MatrixType.F32C1, m)) { Cv.GetQuadrangleSubPix(srcImg, dstImg, mat); using (CvWindow wSrc = new CvWindow("src")) using (CvWindow wDst = new CvWindow("dst")) { wSrc.Image = srcImg; wDst.Image = dstImg; Cv.WaitKey(0); } } } }
public Histogram() { // cvCalcHist // コントラストや明度をいろいろ変えられるサンプル const int histSize = 64; float[] range0 = { 0, 256 }; float[][] ranges = { range0 }; // 画像の読み込み using (IplImage srcImg = new IplImage(Const.ImageLenna, LoadMode.GrayScale)) using (IplImage dstImg = srcImg.Clone()) using (IplImage histImg = new IplImage(new CvSize(400, 400), BitDepth.U8, 1)) using (CvHistogram hist = new CvHistogram(new int[] { histSize }, HistogramFormat.Array, ranges, true)) { using (CvWindow windowImage = new CvWindow("image", WindowMode.AutoSize)) using (CvWindow windowHist = new CvWindow("histogram", WindowMode.AutoSize)) { // トラックバーが動かされた時の処理 CvTrackbar ctBrightness = null; CvTrackbar ctContrast = null; CvTrackbarCallback callback = delegate(int pos) { int brightness = ctBrightness.Pos - 100; int contrast = ctContrast.Pos - 100; // LUTの適用 byte[] lut = CalcLut(contrast, brightness); srcImg.LUT(dstImg, lut); // ヒストグラムの描画 CalcHist(dstImg, hist); DrawHist(histImg, hist, histSize); // ウィンドウに表示 windowImage.ShowImage(dstImg); windowHist.ShowImage(histImg); dstImg.Zero(); histImg.Zero(); }; // トラックバーの作成 // (OpenCVでは現在位置にポインタを渡すことでトラックバーの位置の変化が取得できるが、 // .NETではGCによりポインタが移動してしまうので廃止した。別の方法でうまく取得すべし。) ctBrightness = windowImage.CreateTrackbar("brightness", 100, 200, callback); ctContrast = windowImage.CreateTrackbar("contrast", 100, 200, callback); // 初回描画 callback(0); // キー入力待ち Cv.WaitKey(0); } } }
public Watershed() { using (var srcImg = new IplImage(FilePath.Image.Goryokaku, LoadMode.AnyDepth | LoadMode.AnyColor)) using (var dstImg = srcImg.Clone()) using (var dspImg = srcImg.Clone()) using (var markers = new IplImage(srcImg.Size, BitDepth.S32, 1)) { markers.Zero(); using (var window = new CvWindow("image", WindowMode.AutoSize)) { window.Image = srcImg; // Mouse event int seedNum = 0; window.OnMouseCallback += delegate(MouseEvent ev, int x, int y, MouseEvent flags) { if (ev == MouseEvent.LButtonDown) { seedNum++; CvPoint pt = new CvPoint(x, y); markers.Circle(pt, 20, CvScalar.ScalarAll(seedNum), Cv.FILLED, LineType.Link8, 0); dspImg.Circle(pt, 20, CvColor.White, 3, LineType.Link8, 0); window.Image = dspImg; } }; CvWindow.WaitKey(); } Cv.Watershed(srcImg, markers); // draws watershed for (int i = 0; i < markers.Height; i++) { for (int j = 0; j < markers.Width; j++) { int idx = (int)(markers.Get2D(i, j).Val0); if (idx == -1) { dstImg.Set2D(i, j, CvColor.Red); } } } using (CvWindow wDst = new CvWindow("watershed transform", WindowMode.AutoSize)) { wDst.Image = dstImg; CvWindow.WaitKey(); } } }
public ConvexHull() { using (IplImage img = Cv.CreateImage(new CvSize(500, 500), BitDepth.U8, 3)) using (CvWindow window = new CvWindow("hull")) { Random rand = new Random(); for (; ; ) { int count = rand.Next() % 100 + 1; // create sequence of random points CvPoint[] ptseq = new CvPoint[count]; for (int i = 0; i < ptseq.Length; i++) { ptseq[i] = new CvPoint { X = rand.Next() % (img.Width / 2) + img.Width / 4, Y = rand.Next() % (img.Height / 2) + img.Height / 4 }; } // draw points Cv.Zero(img); foreach(CvPoint pt in ptseq) { Cv.Circle(img, pt, 2, new CvColor(255, 0, 0), -1); } // find hull CvPoint[] hull; Cv.ConvexHull2(ptseq, out hull, ConvexHullOrientation.Clockwise); // draw hull CvPoint pt0 = hull.Last(); foreach(CvPoint pt in hull) { Cv.Line(img, pt0, pt, CvColor.Green); pt0 = pt; } window.ShowImage(img); if (Cv.WaitKey(0) == 27) // 'ESC' break; } } }
public Moments() { // (1)画像を読み込む.3チャンネル画像の場合はCOIがセットされていなければならない using (IplImage srcImg = new IplImage(Const.ImageLenna, LoadMode.AnyColor | LoadMode.AnyDepth)) { if (srcImg.NChannels == 3 && srcImg.COI == 0) { srcImg.COI = 1; } // (2)入力画像の3次までの画像モーメントを計算する CvMoments moments = new CvMoments(srcImg, false); srcImg.COI = 0; // (3)モーメントやHuモーメント不変量を,得られたCvMoments構造体の値を使って計算する. double spatialMoment = moments.GetSpatialMoment(0, 0); double centralMoment = moments.GetCentralMoment(0, 0); double normCMoment = moments.GetNormalizedCentralMoment(0, 0); CvHuMoments huMoments = new CvHuMoments(moments); // (4)得られたモーメントやHuモーメント不変量を文字として画像に描画 using (CvFont font = new CvFont(FontFace.HersheySimplex, 1.0, 1.0, 0, 2, LineType.Link8)) { string[] text = new string[10]; text[0] = string.Format("spatial={0:F3}", spatialMoment); text[1] = string.Format("central={0:F3}", centralMoment); text[2] = string.Format("norm={0:F3}", spatialMoment); text[3] = string.Format("hu1={0:F10}", huMoments.Hu1); text[4] = string.Format("hu2={0:F10}", huMoments.Hu2); text[5] = string.Format("hu3={0:F10}", huMoments.Hu3); text[6] = string.Format("hu4={0:F10}", huMoments.Hu4); text[7] = string.Format("hu5={0:F10}", huMoments.Hu5); text[8] = string.Format("hu6={0:F10}", huMoments.Hu6); text[9] = string.Format("hu7={0:F10}", huMoments.Hu7); CvSize textSize = font.GetTextSize(text[0]); for (int i = 0; i < 10; i++) { srcImg.PutText(text[i], new CvPoint(10, (textSize.Height + 3) * (i + 1)), font, CvColor.Black); } } // (5)入力画像とモーメント計算結果を表示,キーが押されたときに終了 using (CvWindow window = new CvWindow("Image", WindowMode.AutoSize)) { window.ShowImage(srcImg); Cv.WaitKey(0); } } }
void Start () { moveVec = new Vector3(0,0,0); cap = CvCapture.FromCamera (0); w = new CvWindow("Original"); IplImage frame = cap.QueryFrame (); cols = frame.Width; rows = frame.Height; myTexture2D = new Texture2D(cols/2, rows/2); prvs = new IplImage(cols, rows, BitDepth.U8, 1); frame.CvtColor (prvs, ColorConversion.BgrToGray); }
/// <summary> /// 指定された名前のウィンドウを破棄する /// </summary> /// <param name="name">破棄するウィンドウの名前</param> #else /// <summary> /// Destroys the window with a given name. /// </summary> /// <param name="name">Name of the window to be destroyed. </param> #endif public static void DestroyWindow(string name) { if (string.IsNullOrEmpty(name)) throw new ArgumentNullException("name"); //NativeMethods.cvDestroyWindow(name); CvWindow window = CvWindow.GetWindowByName(name); if (window == null) { NativeMethods.cvDestroyWindow(name); } else { window.Dispose(); } }
// Use this for initialization void Start () { webcamTexture = new WebCamTexture(imWidth, imHeight); rawimage.texture = webcamTexture; rawimage.material.mainTexture = webcamTexture; webcamTexture.Play(); // prvs = new IplImage(imWidth, imHeight, BitDepth.U8, 1); rez = new IplImage(imWidth, imHeight, BitDepth.U8, 1); src = new IplImage(imWidth, imHeight, BitDepth.U8, 3); // velx = Cv.CreateMat(imWidth, imHeight, MatrixType.F32C1); // vely = Cv.CreateMat(imWidth, imHeight, MatrixType.F32C1); // FromTextureToIplImage(prvs); // prvs.CvtColor(prvs, ColorConversion.RgbToGray); w = new CvWindow("Web Camera"); }
public BoundingRect() { // cvBoundingRect // 点列を包含する矩形を求める // (1)画像とメモリストレージを確保し初期化する // (メモリストレージは、CvSeqを使わないのであれば不要) using (IplImage img = new IplImage(640, 480, BitDepth.U8, 3)) using (CvMemStorage storage = new CvMemStorage(0)) { img.Zero(); CvRNG rng = new CvRNG(DateTime.Now); // (2)点列を生成する ///* // お手軽な方法 (普通の配列を使う) CvPoint[] points = new CvPoint[50]; for (int i = 0; i < 50; i++) { points[i] = new CvPoint() { X = (int)(rng.RandInt() % (img.Width / 2) + img.Width / 4), Y = (int)(rng.RandInt() % (img.Height / 2) + img.Height / 4) }; img.Circle(points[i], 3, new CvColor(0, 255, 0), Cv.FILLED); } //*/ /* // サンプルに準拠した方法 (CvSeqを使う) CvSeq points = new CvSeq(SeqType.EltypePoint, CvSeq.SizeOf, CvPoint.SizeOf, storage); for (int i = 0; i < 50; i++) { CvPoint pt = new CvPoint(); pt.X = (int)(rng.RandInt() % (img.Width / 2) + img.Width / 4); pt.Y = (int)(rng.RandInt() % (img.Height / 2) + img.Height / 4); points.Push(pt); img.Circle(pt, 3, new CvColor(0, 255, 0), Cv.FILLED); } //*/ // (3)点列を包含する矩形を求めて描画する CvRect rect = Cv.BoundingRect(points); img.Rectangle(new CvPoint(rect.X, rect.Y), new CvPoint(rect.X + rect.Width, rect.Y + rect.Height), new CvColor(255, 0, 0), 2); // (4)画像の表示,キーが押されたときに終了 using (CvWindow w = new CvWindow("BoundingRect", WindowMode.AutoSize, img)) { CvWindow.WaitKey(0); } } }
public Moments() { using (IplImage srcImg = new IplImage(FilePath.Image.Lenna, LoadMode.AnyColor | LoadMode.AnyDepth)) { if (srcImg.NChannels == 3 && srcImg.COI == 0) { srcImg.COI = 1; } CvMoments moments = new CvMoments(srcImg, false); srcImg.COI = 0; double spatialMoment = moments.GetSpatialMoment(0, 0); double centralMoment = moments.GetCentralMoment(0, 0); double normCMoment = moments.GetNormalizedCentralMoment(0, 0); CvHuMoments huMoments = new CvHuMoments(moments); // drawing using (CvFont font = new CvFont(FontFace.HersheySimplex, 1.0, 1.0, 0, 2, LineType.Link8)) { string[] text = new string[10]; text[0] = string.Format("spatial={0:F3}", spatialMoment); text[1] = string.Format("central={0:F3}", centralMoment); text[2] = string.Format("norm={0:F3}", spatialMoment); text[3] = string.Format("hu1={0:F10}", huMoments.Hu1); text[4] = string.Format("hu2={0:F10}", huMoments.Hu2); text[5] = string.Format("hu3={0:F10}", huMoments.Hu3); text[6] = string.Format("hu4={0:F10}", huMoments.Hu4); text[7] = string.Format("hu5={0:F10}", huMoments.Hu5); text[8] = string.Format("hu6={0:F10}", huMoments.Hu6); text[9] = string.Format("hu7={0:F10}", huMoments.Hu7); CvSize textSize = font.GetTextSize(text[0]); for (int i = 0; i < 10; i++) { srcImg.PutText(text[i], new CvPoint(10, (textSize.Height + 3) * (i + 1)), font, CvColor.Black); } } using (var window = new CvWindow("Image", WindowMode.AutoSize)) { window.ShowImage(srcImg); Cv.WaitKey(0); } } }
/// <summary> /// /// </summary> public SkinDetector() { using (IplImage imgSrc = new IplImage(Const.ImageBalloon, LoadMode.Color)) using (IplImage imgHueMask = new IplImage(imgSrc.Size, BitDepth.U8, 1)) using (IplImage imgDst = imgSrc.Clone()) { CvAdaptiveSkinDetector detector = new CvAdaptiveSkinDetector(1, MorphingMethod.None); detector.Process(imgSrc, imgHueMask); DisplaySkinPoints(imgHueMask, imgDst, CvColor.Green); using (CvWindow windowSrc = new CvWindow("src", imgSrc)) using (CvWindow windowDst = new CvWindow("skin", imgDst)) { Cv.WaitKey(0); } } }
public PyrSegmentation() { // cvPyrSegmentation // レベルを指定して画像ピラミッドを作成し,その情報を用いて画像のセグメント化を行なう. const double threshold1 = 255.0; const double threshold2 = 50.0; // (1)画像の読み込み using (IplImage srcImg = new IplImage(Const.ImageGoryokaku, LoadMode.AnyDepth | LoadMode.AnyColor)) { // level1から4それぞれでやってみる IplImage[] dstImg = new IplImage[4]; for (int level = 0; level < dstImg.Length; level++) { // (2)領域分割のためにROIをセットする CvRect roi = new CvRect() { X = 0, Y = 0, Width = srcImg.Width & -(1 << (level + 1)), Height = srcImg.Height & -(1 << (level + 1)) }; srcImg.ROI = roi; // (3)分割結果画像出力用の画像領域を確保し,領域分割を実行 dstImg[level] = srcImg.Clone(); Cv.PyrSegmentation(srcImg, dstImg[level], level + 1, threshold1, threshold2); } // (4)入力画像と分割結果画像の表示 CvWindow wSrc = new CvWindow("src", srcImg); CvWindow[] wDst = new CvWindow[dstImg.Length]; for (int i = 0; i < dstImg.Length; i++) { wDst[i] = new CvWindow("dst" + i, dstImg[i]); } CvWindow.WaitKey(); CvWindow.DestroyAllWindows(); foreach (IplImage item in dstImg) { item.Dispose(); } } }
public Delaunay() { CvRect rect = new CvRect(0, 0, 600, 600); CvColor activeFacetColor = new CvColor(255, 0, 0); CvColor delaunayColor = new CvColor(0, 0, 0); CvColor voronoiColor = new CvColor(0, 180, 0); CvColor bkgndColor = new CvColor(255, 255, 255); Random rand = new Random(); using (CvMemStorage storage = new CvMemStorage(0)) using (IplImage img = new IplImage(rect.Size, BitDepth.U8, 3)) using (CvWindow window = new CvWindow("delaunay")) { img.Set(bkgndColor); CvSubdiv2D subdiv = new CvSubdiv2D(rect, storage); for (int i = 0; i < 200; i++) { CvPoint2D32f fp = new CvPoint2D32f { X = (float)rand.Next(5, rect.Width - 10), Y = (float)rand.Next(5, rect.Height - 10) }; LocatePoint(subdiv, fp, img, activeFacetColor); window.Image = img; if (CvWindow.WaitKey(100) >= 0) { break; } subdiv.Insert(fp); subdiv.CalcVoronoi2D(); img.Set(bkgndColor); DrawSubdiv(img, subdiv, delaunayColor, voronoiColor); window.Image = img; if (CvWindow.WaitKey(100) >= 0) { break; } } img.Set(bkgndColor); PaintVoronoi(subdiv, img); window.Image = img; CvWindow.WaitKey(0); } }
public Histogram() { // cvCalcHist const int histSize = 64; float[] range0 = { 0, 256 }; float[][] ranges = { range0 }; using (IplImage srcImg = new IplImage(FilePath.Image.Lenna, LoadMode.GrayScale)) using (IplImage dstImg = srcImg.Clone()) using (IplImage histImg = new IplImage(new CvSize(400, 400), BitDepth.U8, 1)) using (CvHistogram hist = new CvHistogram(new int[] { histSize }, HistogramFormat.Array, ranges, true)) { using (CvWindow windowImage = new CvWindow("image", WindowMode.AutoSize)) using (CvWindow windowHist = new CvWindow("histogram", WindowMode.AutoSize)) { CvTrackbar ctBrightness = null; CvTrackbar ctContrast = null; CvTrackbarCallback callback = delegate(int pos) { int brightness = ctBrightness.Pos - 100; int contrast = ctContrast.Pos - 100; // perform LUT byte[] lut = CalcLut(contrast, brightness); srcImg.LUT(dstImg, lut); // draws histogram CalcHist(dstImg, hist); DrawHist(histImg, hist, histSize); windowImage.ShowImage(dstImg); windowHist.ShowImage(histImg); dstImg.Zero(); histImg.Zero(); }; ctBrightness = windowImage.CreateTrackbar("brightness", 100, 200, callback); ctContrast = windowImage.CreateTrackbar("contrast", 100, 200, callback); // initial action callback(0); Cv.WaitKey(0); } } }
public PyrMeanShiftFiltering() { // cvPyrMeanShiftFiltering // 平均値シフト法による画像のセグメント化を行う const int level = 2; // (1)画像の読み込み using (IplImage srcImg = new IplImage(Const.ImageGoryokaku, LoadMode.AnyDepth | LoadMode.AnyColor)) { if (srcImg.NChannels != 3) { throw new Exception(); } if (srcImg.Depth != BitDepth.U8) { throw new Exception(); } // (2)領域分割のためにROIをセットする CvRect roi = new CvRect { X = 0, Y = 0, Width = srcImg.Width & -(1 << level), Height = srcImg.Height & -(1 << level) }; srcImg.ROI = roi; // (3)分割結果画像出力用の画像領域を確保し,領域分割を実行 using (IplImage dstImg = srcImg.Clone()) { Cv.PyrMeanShiftFiltering(srcImg, dstImg, 30.0, 30.0, level, new CvTermCriteria(5, 1)); // (4)入力画像と分割結果画像の表示 using (CvWindow wSrc = new CvWindow("Source", srcImg)) using (CvWindow wDst = new CvWindow("MeanShift", dstImg)) { CvWindow.WaitKey(); } } } }
public Threshold() { using (IplImage src = new IplImage(Const.ImageLenna, LoadMode.Color)) using (IplImage srcGray = new IplImage(src.Size, BitDepth.U8, 1)) using (IplImage dst = new IplImage(src.Size, BitDepth.U8, 1)) using (CvWindow window = new CvWindow("SampleThreshold")) { src.CvtColor(srcGray, ColorConversion.BgrToGray); srcGray.Smooth(srcGray, SmoothType.Gaussian, 5); int threshold = 90; window.CreateTrackbar("threshold", threshold, 255, delegate(int pos) { srcGray.Threshold(dst, pos, 255, ThresholdType.Binary); window.Image = dst; }); srcGray.Threshold(dst, threshold, 255, ThresholdType.Binary); window.Image = dst; CvWindow.WaitKey(); } }
public BgSubtractorMOG() { using (CvCapture capture = new CvCapture(Const.MovieHara)) // specify your movie file using (BackgroundSubtractorMOG mog = new BackgroundSubtractorMOG()) using (CvWindow windowSrc = new CvWindow("src")) using (CvWindow windowDst = new CvWindow("dst")) { IplImage imgFrame; using (Mat imgFg = new Mat()) while ((imgFrame = capture.QueryFrame()) != null) { mog.Run(new Mat(imgFrame, false), imgFg, 0.01); windowSrc.Image = imgFrame; windowDst.Image = imgFg.ToIplImage(); Cv.WaitKey(50); } } }
/// <summary> /// 全ての HighGUI ウィンドウを破棄する /// </summary> #else /// <summary> /// Destroys all the opened HighGUI windows. /// </summary> #endif public static void DestroyAllWindows() { foreach (KeyValuePair <string, CvWindow> wpair in Windows) { CvWindow w = wpair.Value; if (w == null || w.IsDisposed) { continue; } NativeMethods.cvDestroyWindow(w.name); foreach (KeyValuePair <string, CvTrackbar> tpair in w.trackbars) { if (tpair.Value != null) { tpair.Value.Dispose(); } } //w.Dispose(); } Windows.Clear(); NativeMethods.cvDestroyAllWindows(); }
/// <summary> /// オープンされている全ての HighGUI ウィンドウを破棄する /// </summary> #else /// <summary> /// Destroys all the opened HighGUI windows. /// </summary> #endif public static void DestroyAllWindows() { //OpenCV.cvDestroyAllWindows(); CvWindow.DestroyAllWindows(); }