/* * public IplImage HoughLines_Point(IplImage src, int canny1, int canny2, int thresh, int sideData) * { * // cvHoughLines2 * // 확률적 허프 변환을 지정해 선분의 검출을 실시한다 * * // (1) 화상 읽기 * IplImage srcImgStd = src.Clone(); * IplImage srcImgGray = new IplImage(src.Size, BitDepth.U8, 1); * * CvMemStorage storage = new CvMemStorage(); * CvSeq houghLines; * Cv.CvtColor(srcImgStd, srcImgGray, ColorConversion.BgrToGray); * Cv.Canny(srcImgGray, srcImgGray, canny1, canny2, ApertureSize.Size3); * houghLines = srcImgGray.HoughLines2(storage, HoughLinesMethod.Probabilistic, 1, Math.PI/180, thresh, 5, 0); * * * LinePoints.Clear(); * int limit = Math.Min(houghLines.Total, 6); * for (int i = 0; i < limit; i++) * { * CvLineSegmentPolar elem = houghLines.GetSeqElem<CvLineSegmentPolar>(i).Value; * CvPoint pt1 = houghLines.GetSeqElem<CvLineSegmentPoint>(i).Value.P1; * CvPoint pt2 = houghLines.GetSeqElem<CvLineSegmentPoint>(i).Value.P2; * * //Trace.WriteLine(pt1.X.ToString("000.00000 ") + pt1.Y.ToString("000.00000 ") + pt2.X.ToString("000.00000 ")+ pt2.Y.ToString("000.00000")); * * srcImgStd.Line(pt1, pt2, CvColor.Red, 1, LineType.AntiAlias, 0); * * LinePoints.Add(pt1); * LinePoints.Add(pt2); * } * srcImgStd.Dispose(); * srcImgGray.Dispose(); * houghLines.Dispose(); * storage.Dispose(); * return srcImgStd; * } */ public IplImage HoughLines_Point08(IplImage src, int canny1, int canny2, int thresh, int sideData) { List <CvPoint> LinePoints = new List <CvPoint>(); int lineMinLength = 0; if (sideData == 0 && sideData == 2) { lineMinLength = src.Width / 2; } else { lineMinLength = src.Height / 2; } // cvHoughLines2 // 확률적 허프 변환을 지정해 선분의 검출을 실시한다 // (1) 화상 읽기 using (IplImage srcImgStd = src.Clone()) using (IplImage srcImgGray = new IplImage(src.Size, BitDepth.U8, 1)) { Cv.CvtColor(srcImgStd, srcImgGray, ColorConversion.BgrToGray); // (2) 허프변환을 위한 캐니엣지 처리 //Cv.Canny(srcImgGray, srcImgGray, 50, 200, ApertureSize.Size3); Cv.Canny(srcImgGray, srcImgGray, canny1, canny2, ApertureSize.Size3); houghLine = srcImgGray.Clone(); using (CvMemStorage storage = new CvMemStorage()) { LinePoints.Clear(); // (3) 표준적 허프 변환에 의한 선의 검출과 검출된 선 그리기 CvSeq lines = srcImgGray.HoughLines2(storage, HoughLinesMethod.Probabilistic, 1, Math.PI / 180, thresh, 5, 0); int limit = Math.Min(lines.Total, 6); for (int i = 0; i < limit; i++) { CvLineSegmentPolar elem = lines.GetSeqElem <CvLineSegmentPolar>(i).Value; CvPoint pt1 = lines.GetSeqElem <CvLineSegmentPoint>(i).Value.P1; CvPoint pt2 = lines.GetSeqElem <CvLineSegmentPoint>(i).Value.P2; //Trace.WriteLine(pt1.X.ToString("000.00000 ") + pt1.Y.ToString("000.00000 ") + pt2.X.ToString("000.00000 ")+ pt2.Y.ToString("000.00000")); srcImgStd.Line(pt1, pt2, CvColor.Red, 1, LineType.AntiAlias, 0); LinePoints.Add(pt1); LinePoints.Add(pt2); houghLine = srcImgStd.Clone(); } } } return(houghLine); }
//============================================================= // // 古典的Hough変換処理 // //============================================================= private void HoughStd(PictureBox pbox, IplImage image) { IplImage gray; IplImage canny; IplImage hstd; gray = Cv.CreateImage(image.Size, BitDepth.U8, 1); canny = Cv.CreateImage(image.Size, BitDepth.U8, 1); hstd = Cv.CreateImage(image.Size, BitDepth.U8, 3); Cv.CvtColor(image, gray, ColorConversion.RgbToGray); Cv.Canny(gray, canny, 50, 200); Cv.CvtColor(canny, hstd, ColorConversion.GrayToRgb); CvMemStorage storage = new CvMemStorage(); CvSeq lines = Cv.HoughLines2(canny, storage, HoughLinesMethod.Standard, 1, Math.PI / 180, 120); for (int i = 0; i < lines.Total; i++) { CvLineSegmentPolar elem = lines.GetSeqElem <CvLineSegmentPolar>(i).Value; float rho = elem.Rho; float theta = elem.Theta; double a = Math.Cos(theta); double b = Math.Sin(theta); double x0 = a * rho; double y0 = b * rho; CvPoint pt1 = new CvPoint(Cv.Round(x0 + 10000 * (-b)), Cv.Round(y0 + 10000 * (a))); CvPoint pt2 = new CvPoint(Cv.Round(x0 - 10000 * (-b)), Cv.Round(y0 - 10000 * (a))); Cv.Line(hstd, pt1, pt2, CvColor.Red, 1, LineType.AntiAlias, 0); } lines.Dispose(); storage.Dispose(); ViewBitmap(pbox, hstd); Cv.ReleaseImage(gray); Cv.ReleaseImage(canny); Cv.ReleaseImage(hstd); pictureBox2.Invalidate(); }
/// <summary> /// sample of C style wrapper /// </summary> private void SampleC() { // cvHoughLines2 using (IplImage srcImgGray = new IplImage(FilePath.Image.Goryokaku, LoadMode.GrayScale)) using (IplImage srcImgStd = new IplImage(FilePath.Image.Goryokaku, LoadMode.Color)) using (IplImage srcImgProb = srcImgStd.Clone()) { Cv.Canny(srcImgGray, srcImgGray, 50, 200, ApertureSize.Size3); using (CvMemStorage storage = new CvMemStorage()) { // Standard algorithm CvSeq lines = srcImgGray.HoughLines2(storage, HoughLinesMethod.Standard, 1, Math.PI / 180, 50, 0, 0); // wrapper style //CvLineSegmentPolar[] lines = src_img_gray.HoughLinesStandard(1, Math.PI / 180, 50, 0, 0); int limit = Math.Min(lines.Total, 10); for (int i = 0; i < limit; i++) { // native code style /* * unsafe * { * float* line = (float*)lines.GetElem<IntPtr>(i).Value.ToPointer(); * float rho = line[0]; * float theta = line[1]; * } * //*/ // wrapper style CvLineSegmentPolar elem = lines.GetSeqElem <CvLineSegmentPolar>(i).Value; float rho = elem.Rho; float theta = elem.Theta; double a = Math.Cos(theta); double b = Math.Sin(theta); double x0 = a * rho; double y0 = b * rho; CvPoint pt1 = new CvPoint { X = Cv.Round(x0 + 1000 * (-b)), Y = Cv.Round(y0 + 1000 * (a)) }; CvPoint pt2 = new CvPoint { X = Cv.Round(x0 - 1000 * (-b)), Y = Cv.Round(y0 - 1000 * (a)) }; srcImgStd.Line(pt1, pt2, CvColor.Red, 3, LineType.AntiAlias, 0); } // Probabilistic algorithm lines = srcImgGray.HoughLines2(storage, HoughLinesMethod.Probabilistic, 1, Math.PI / 180, 50, 50, 10); // wrapper style //CvLineSegmentPoint[] lines = src_img_gray.HoughLinesProbabilistic(1, Math.PI / 180, 50, 0, 0); for (int i = 0; i < lines.Total; i++) { // native code style /* * unsafe * { * CvPoint* point = (CvPoint*)lines.GetElem<IntPtr>(i).Value.ToPointer(); * src_img_prob.Line(point[0], point[1], CvColor.Red, 3, LineType.AntiAlias, 0); * } * //*/ // wrapper style CvLineSegmentPoint elem = lines.GetSeqElem <CvLineSegmentPoint>(i).Value; srcImgProb.Line(elem.P1, elem.P2, CvColor.Red, 3, LineType.AntiAlias, 0); } } using (new CvWindow("Hough_line_standard", WindowMode.AutoSize, srcImgStd)) using (new CvWindow("Hough_line_probabilistic", WindowMode.AutoSize, srcImgProb)) { CvWindow.WaitKey(0); } } }
//HoughLines2() //확률적 허프 변환을 지정해 선분의 검출을 실시한다 //1. CvArr* image : 변환을 할 이미지가 들어간다. //2. void* line_storage : 라인을 저장할 공간 //3. int method : 허프변환에는 3개의 방법이 있다 . 이 방법을 설정하는 인자 //4. double rho / double theta : 이 둘은 얼마나 조밀한 단위(?)를 사용할 것인가를 정하는 인자 이다. (예로 rho=1이라면 1픽셀단위로 조사를 하고 theta = PI/180 이라면 1도 단위로 조사를 하겠다는 뜻) //5. int threshold : 허프 공간상에 그려지는 곡선들이 중첩되는 부분을 이용해서 직선을 검출하는데 threshold 값보다 중첩되는 갯수가 많으면 직선으로 간주한다는 뜻이다. 숫자가 커지면 당연히 더 직선의 기준이 엄격하게 된다. //6.double param1 : probabilistic 일 경우 직선의 최소 길이를 설정할 수 있다. //7.double param2 : probabilistic 일 경우 직선의 최대 길이를 설정할 수 있다. public IplImage HoughLines(IplImage src, IplImage boxImage, ref IplImage resultImage) { // cvHoughLines2 // 확률적 허프 변환을 지정해 선분의 검출을 실시한다 IplImage orImage = boxImage.Clone(); // (1) 화상 읽기 using (IplImage srcImgStd = src.Clone()) using (IplImage srcImgGray = new IplImage(src.Size, BitDepth.U8, 1)) { Cv.CvtColor(srcImgStd, srcImgGray, ColorConversion.BgrToGray); // (2) 허프변환을 위한 캐니엣지 처리 Cv.Canny(srcImgGray, srcImgGray, 50, 200, ApertureSize.Size3); houghLine = srcImgGray.Clone(); int lineMinLength = srcImgStd.Width / 2; int lineMaxLength = srcImgStd.Width; using (CvMemStorage storage = new CvMemStorage()) { // (3) 표준적 허프 변환에 의한 선의 검출과 검출된 선 그리기 CvSeq lines = srcImgGray.HoughLines2(storage, HoughLinesMethod.Standard, 1, Math.PI / 180, 50, 0, 0); int limit = Math.Min(lines.Total, 10); for (int i = 0; i < limit; i++) { CvLineSegmentPolar elem = lines.GetSeqElem <CvLineSegmentPolar>(i).Value; float rho = elem.Rho; float theta = elem.Theta; double a = Math.Cos(theta); double b = Math.Sin(theta); double x0 = a * rho; double y0 = b * rho; CvPoint pt1 = new CvPoint { X = Cv.Round(x0 + src.Width * (-b)), Y = Cv.Round(y0 + src.Height * (a)) }; CvPoint pt2 = new CvPoint { X = Cv.Round(x0 - src.Width * (-b)), Y = Cv.Round(y0 - src.Height * (a)) }; if (pt1.X < 1) { pt1.X = 0; pt2.X = src.Width; } if (pt2.X < 1) { pt1.X = src.Width; pt2.X = 0; } if (pt1.Y < 1) { pt1.Y = 0; pt2.Y = src.Height; } if (pt2.Y < 1) { pt1.Y = src.Height; pt2.Y = 0; } srcImgStd.Line(pt1, pt2, CvColor.Red, 1, LineType.AntiAlias, 0); houghLine = srcImgStd.Clone(); orImage.Line(pt1, pt2, CvColor.Red, 1, LineType.AntiAlias, 0); houghLine = orImage.Clone(); return(houghLine); } } } return(houghLine); }
public IplImage HoughLines_Line(IplImage src, int canny1, int canny2, int thresh) { // cvHoughLines2 // 확률적 허프 변환을 지정해 선분의 검출을 실시한다 List <CvPoint> LinePoints = new List <CvPoint>(); // (1) 화상 읽기 using (IplImage srcImgStd = src.Clone()) using (IplImage srcImgGray = new IplImage(src.Size, BitDepth.U8, 1)) { Cv.CvtColor(srcImgStd, srcImgGray, ColorConversion.BgrToGray); // (2) 허프변환을 위한 캐니엣지 처리 //Cv.Canny(srcImgGray, srcImgGray, 50, 200, ApertureSize.Size3); Cv.Canny(srcImgGray, srcImgGray, canny1, canny2, ApertureSize.Size3); houghLine = srcImgGray.Clone(); using (CvMemStorage storage = new CvMemStorage()) { LinePoints.Clear(); // (3) 표준적 허프 변환에 의한 선의 검출과 검출된 선 그리기 CvSeq lines = srcImgGray.HoughLines2(storage, HoughLinesMethod.Standard, 1, Math.PI / 180, thresh, 0, 0); //int limit = Math.Min(lines.Total, 10); for (int i = 0; i < Math.Min(lines.Total, 3); i++) { CvLineSegmentPolar elem = lines.GetSeqElem <CvLineSegmentPolar>(i).Value; float rho = elem.Rho; float theta = elem.Theta; double a = Math.Cos(theta), b = Math.Sin(theta); double x0 = a * rho, y0 = b * rho; CvPoint pt1, pt2; //pt1.X = Cv.Round(x0 + 1000*(-b)); //pt1.Y = Cv.Round(y0 + 1000*(a)); //pt2.X = Cv.Round(x0 - 1000*(-b)); //pt2.Y = Cv.Round(y0 - 1000*(a)); pt1.X = Cv.Round(x0 + srcImgStd.Width * (-b)); pt1.Y = Cv.Round(y0 + srcImgStd.Height * (a)); pt2.X = Cv.Round(x0 - srcImgStd.Width * (-b)); pt2.Y = Cv.Round(y0 - srcImgStd.Height * (a)); if (pt1.X < 0) { pt1.X = 0; pt2.X = src.Width; } else if (pt2.X < 0) { pt1.X = src.Width; pt2.X = 0; } if (pt1.Y < 0) { pt1.Y = 0; pt2.Y = src.Height; } else if (pt2.Y < 0) { pt1.Y = src.Height; pt2.Y = 0; } //Trace.WriteLine(pt1.X.ToString("000.00000 ") + pt1.Y.ToString("000.00000 ") + pt2.X.ToString("000.00000 ") + pt2.Y.ToString("000.00000")); srcImgStd.Line(pt1, pt2, CvColor.Red, 1, LineType.AntiAlias, 0); LinePoints.Add(pt1); LinePoints.Add(pt2); houghLine = srcImgStd.Clone(); } } } return(houghLine); }
//private CvSeq houghLines; //public IplImage srcImgGray = new IplImage(); //public IplImage srcImgStd = new IplImage(); //public CvMemStorage storage = new CvMemStorage(); public List <CvPoint> HoughLines_Point(IplImage src, int edge1, int edge2, int line1, int line2, int line3) { try { // 확률적 허프 변환을 지정해 선분의 검출을 실시한다 List <CvPoint> LinePoints = new List <CvPoint>(); IplImage srcImgStd = new IplImage(src.ROI.Size, BitDepth.U8, 3); Cv.Copy(src, srcImgStd); IplImage srcImgGray = new IplImage(src.ROI.Size, BitDepth.U8, 1); Cv.CvtColor(srcImgStd, srcImgGray, ColorConversion.BgrToGray); Cv.Canny(srcImgGray, srcImgGray, edge1, edge2, ApertureSize.Size3); CvMemStorage storage = new CvMemStorage(); CvSeq houghLines = srcImgGray.HoughLines2(storage, HoughLinesMethod.Probabilistic, 1, Math.PI / 180, line1, line2, line3); //CvWindow.ShowImages(src); //Hough_Lines_Point LinesFinder = new Hough_Lines_Point(); //LinesFinder.srcImgGray = new IplImage(src.Size, BitDepth.U8, 1); //Cv.CvtColor(src, LinesFinder.srcImgGray, ColorConversion.BgrToGray); //Cv.Canny(LinesFinder.srcImgGray, LinesFinder.srcImgGray, edge1, edge2, ApertureSize.Size3); //CvMemStorage storage = new CvMemStorage(); //CvSeq houghLines = LinesFinder.srcImgGray.HoughLines2(storage, HoughLinesMethod.Probabilistic, 1, Math.PI / 180, line1, line2, line3); //CvWindow.ShowImages(LinesFinder.srcImgGray); //bool tmpFalg = true; // for(int i = 1; i < 256;i++) // { // if (houghLines.Total >= 10) // { // break; // } // houghLines = LinesFinder.srcImgGray.HoughLines2(LinesFinder.storage, HoughLinesMethod.Probabilistic, 1, Math.PI / 180, 2, line2, line3); // } //LinePoints = new List<CvPoint>(); LinePoints.Clear(); int limit = Math.Min(houghLines.Total, 10); for (int i = 0; i < limit; i++) { CvLineSegmentPolar elem = houghLines.GetSeqElem <CvLineSegmentPolar>(i).Value; CvPoint pt1 = houghLines.GetSeqElem <CvLineSegmentPoint>(i).Value.P1; CvPoint pt2 = houghLines.GetSeqElem <CvLineSegmentPoint>(i).Value.P2; //Trace.WriteLine(pt1.X.ToString("000.00000 ") + pt1.Y.ToString("000.00000 ") + pt2.X.ToString("000.00000 ")+ pt2.Y.ToString("000.00000")); //LinesFinder.srcImgStd.Line(LinesFinder.pt1, LinesFinder.pt2, CvColor.LawnGreen, 3, LineType.AntiAlias, 0); //TestImage = LinesFinder.srcImgStd.Clone(); //src.Line(pt1, pt2, CvColor.LawnGreen, 3, LineType.AntiAlias, 0); //CvWindow.ShowImages(src); //TestImage = src.Clone(); LinePoints.Add(pt1); LinePoints.Add(pt2); } srcImgStd.Dispose(); srcImgGray.Dispose(); houghLines.Dispose(); storage.Dispose(); return(LinePoints); } catch (Exception e) { MessageBox.Show(MethodBase.GetCurrentMethod().Name + " " + e.Message); throw; } }
/// <summary> /// sample of C style wrapper /// </summary> private void SampleC() { // cvHoughLines2 // 標準的ハフ変換と確率的ハフ変換を指定して線(線分)の検出を行なう.サンプルコード内の各パラメータ値は処理例の画像に対してチューニングされている. // (1)画像の読み込み using (IplImage srcImgGray = new IplImage(Const.ImageGoryokaku, LoadMode.GrayScale)) using (IplImage srcImgStd = new IplImage(Const.ImageGoryokaku, LoadMode.Color)) using (IplImage srcImgProb = srcImgStd.Clone()) { // (2)ハフ変換のための前処理 Cv.Canny(srcImgGray, srcImgGray, 50, 200, ApertureSize.Size3); using (CvMemStorage storage = new CvMemStorage()) { // (3)標準的ハフ変換による線の検出と検出した線の描画 CvSeq lines = srcImgGray.HoughLines2(storage, HoughLinesMethod.Standard, 1, Math.PI / 180, 50, 0, 0); // wrapper style //CvLineSegmentPolar[] lines = src_img_gray.HoughLinesStandard(1, Math.PI / 180, 50, 0, 0); int limit = Math.Min(lines.Total, 10); for (int i = 0; i < limit; i++) { // native code style /* * unsafe * { * float* line = (float*)lines.GetElem<IntPtr>(i).Value.ToPointer(); * float rho = line[0]; * float theta = line[1]; * } * //*/ // wrapper style CvLineSegmentPolar elem = lines.GetSeqElem <CvLineSegmentPolar>(i).Value; float rho = elem.Rho; float theta = elem.Theta; double a = Math.Cos(theta); double b = Math.Sin(theta); double x0 = a * rho; double y0 = b * rho; CvPoint pt1 = new CvPoint { X = Cv.Round(x0 + 1000 * (-b)), Y = Cv.Round(y0 + 1000 * (a)) }; CvPoint pt2 = new CvPoint { X = Cv.Round(x0 - 1000 * (-b)), Y = Cv.Round(y0 - 1000 * (a)) }; srcImgStd.Line(pt1, pt2, CvColor.Red, 3, LineType.AntiAlias, 0); } // (4)確率的ハフ変換による線分の検出と検出した線分の描画 lines = srcImgGray.HoughLines2(storage, HoughLinesMethod.Probabilistic, 1, Math.PI / 180, 50, 50, 10); // wrapper style //CvLineSegmentPoint[] lines = src_img_gray.HoughLinesProbabilistic(1, Math.PI / 180, 50, 0, 0); for (int i = 0; i < lines.Total; i++) { // native code style /* * unsafe * { * CvPoint* point = (CvPoint*)lines.GetElem<IntPtr>(i).Value.ToPointer(); * src_img_prob.Line(point[0], point[1], CvColor.Red, 3, LineType.AntiAlias, 0); * } * //*/ // wrapper style CvLineSegmentPoint elem = lines.GetSeqElem <CvLineSegmentPoint>(i).Value; srcImgProb.Line(elem.P1, elem.P2, CvColor.Red, 3, LineType.AntiAlias, 0); } } // (5)検出結果表示用のウィンドウを確保し表示する using (new CvWindow("Hough_line_standard", WindowMode.AutoSize, srcImgStd)) using (new CvWindow("Hough_line_probabilistic", WindowMode.AutoSize, srcImgProb)) { CvWindow.WaitKey(0); } } }