/* * public IplImage HoughLines_Point(IplImage src, int canny1, int canny2, int thresh, int sideData) * { * // cvHoughLines2 * // 확률적 허프 변환을 지정해 선분의 검출을 실시한다 * * // (1) 화상 읽기 * IplImage srcImgStd = src.Clone(); * IplImage srcImgGray = new IplImage(src.Size, BitDepth.U8, 1); * * CvMemStorage storage = new CvMemStorage(); * CvSeq houghLines; * Cv.CvtColor(srcImgStd, srcImgGray, ColorConversion.BgrToGray); * Cv.Canny(srcImgGray, srcImgGray, canny1, canny2, ApertureSize.Size3); * houghLines = srcImgGray.HoughLines2(storage, HoughLinesMethod.Probabilistic, 1, Math.PI/180, thresh, 5, 0); * * * LinePoints.Clear(); * int limit = Math.Min(houghLines.Total, 6); * for (int i = 0; i < limit; i++) * { * CvLineSegmentPolar elem = houghLines.GetSeqElem<CvLineSegmentPolar>(i).Value; * CvPoint pt1 = houghLines.GetSeqElem<CvLineSegmentPoint>(i).Value.P1; * CvPoint pt2 = houghLines.GetSeqElem<CvLineSegmentPoint>(i).Value.P2; * * //Trace.WriteLine(pt1.X.ToString("000.00000 ") + pt1.Y.ToString("000.00000 ") + pt2.X.ToString("000.00000 ")+ pt2.Y.ToString("000.00000")); * * srcImgStd.Line(pt1, pt2, CvColor.Red, 1, LineType.AntiAlias, 0); * * LinePoints.Add(pt1); * LinePoints.Add(pt2); * } * srcImgStd.Dispose(); * srcImgGray.Dispose(); * houghLines.Dispose(); * storage.Dispose(); * return srcImgStd; * } */ public IplImage HoughLines_Point08(IplImage src, int canny1, int canny2, int thresh, int sideData) { List <CvPoint> LinePoints = new List <CvPoint>(); int lineMinLength = 0; if (sideData == 0 && sideData == 2) { lineMinLength = src.Width / 2; } else { lineMinLength = src.Height / 2; } // cvHoughLines2 // 확률적 허프 변환을 지정해 선분의 검출을 실시한다 // (1) 화상 읽기 using (IplImage srcImgStd = src.Clone()) using (IplImage srcImgGray = new IplImage(src.Size, BitDepth.U8, 1)) { Cv.CvtColor(srcImgStd, srcImgGray, ColorConversion.BgrToGray); // (2) 허프변환을 위한 캐니엣지 처리 //Cv.Canny(srcImgGray, srcImgGray, 50, 200, ApertureSize.Size3); Cv.Canny(srcImgGray, srcImgGray, canny1, canny2, ApertureSize.Size3); houghLine = srcImgGray.Clone(); using (CvMemStorage storage = new CvMemStorage()) { LinePoints.Clear(); // (3) 표준적 허프 변환에 의한 선의 검출과 검출된 선 그리기 CvSeq lines = srcImgGray.HoughLines2(storage, HoughLinesMethod.Probabilistic, 1, Math.PI / 180, thresh, 5, 0); int limit = Math.Min(lines.Total, 6); for (int i = 0; i < limit; i++) { CvLineSegmentPolar elem = lines.GetSeqElem <CvLineSegmentPolar>(i).Value; CvPoint pt1 = lines.GetSeqElem <CvLineSegmentPoint>(i).Value.P1; CvPoint pt2 = lines.GetSeqElem <CvLineSegmentPoint>(i).Value.P2; //Trace.WriteLine(pt1.X.ToString("000.00000 ") + pt1.Y.ToString("000.00000 ") + pt2.X.ToString("000.00000 ")+ pt2.Y.ToString("000.00000")); srcImgStd.Line(pt1, pt2, CvColor.Red, 1, LineType.AntiAlias, 0); LinePoints.Add(pt1); LinePoints.Add(pt2); houghLine = srcImgStd.Clone(); } } } return(houghLine); }
public IplImage PostProcess(IplImage preProcessedImage, IplImage postProcessedImage) { using (CvMemStorage storage = new CvMemStorage()) { CvSeq seq = preProcessedImage.HoughLines2(storage, HoughLinesMethod.Probabilistic, 1, Math.PI / 180, 30, 40, 15); var lines = new List <CvLineSegmentPoint>(); for (int i = 0; i < seq.Total; i++) { var cvLineSegmentPoint = seq.GetSeqElem <CvLineSegmentPoint>(i); if (cvLineSegmentPoint != null) { lines.Add(cvLineSegmentPoint.Value); } } var groupedLines = RectangleFinder.GroupSegments(lines); var rects = RectangleFinder.Convert(groupedLines); RectangleFinder.Filter(rects); foreach (var cvRect in rects) { postProcessedImage.Rectangle(cvRect, CvColor.Red, 3, LineType.AntiAlias); } //for (int i = 0; i < groupedLines.Count; i++) //{ // var color = new CvColor(i*255/max,i*255/max,i*255/max); // var group = groupedLines[i]; // for (int j = 0; j < group.Lines.Count; j++) // { // CvLineSegmentPoint elem = group.Lines[j]; // imgHough.Line(elem.P1, elem.P2, color, 3, LineType.AntiAlias, 0); // } //} //Console.WriteLine(groupedLines.Count); CvSeq <CvCircleSegment> seq1 = preProcessedImage.HoughCircles(storage, HoughCirclesMethod.Gradient, 1, //imgGray.Size.Height / 8, 150, 55, 0, 50); 15, 100, 30, 9, 51); foreach (CvCircleSegment item in seq1) { postProcessedImage.Circle(item.Center, (int)item.Radius, CvColor.Red, 3); } } return(postProcessedImage); }
public IplImage HoughLines(IplImage src) { houline = new IplImage(src.Size, BitDepth.U8, 3); bin = this.Binary(src, 150); Cv.Dilate(bin, bin, null, 1); Cv.Erode(bin, bin, null, 3); Cv.Dilate(bin, bin, null, 2); Cv.Canny(bin, bin, 0, 255); Cv.CvtColor(bin, houline, ColorConversion.GrayToBgr); CvMemStorage Storage = new CvMemStorage(); CvSeq lines = canny.HoughLines2(Storage, HoughLinesMethod.Probabilistic, 1, Math.PI / 180, 140, 50, 10); //for (int i = 0; i < Math.Min(lines.Total, 20); i++) //{ // CvLineSegmentPolar element = lines.GetSeqElem<CvLineSegmentPolar>(i).Value; // float r = element.Rho; // float theta = element.Theta; // double a = Math.Cos(theta); // double b = Math.Sin(theta); // double x0 = r * a; // double y0 = r * b; // int scale = src.Size.Width + src.Size.Height; // CvPoint pt1 = new CvPoint(Convert.ToInt32(x0 - scale * b), Convert.ToInt32(y0 + scale * a)); // CvPoint pt2 = new CvPoint(Convert.ToInt32(x0 + scale * b), Convert.ToInt32(y0 - scale * a)); // houline.Line(pt1, pt2, CvColor.Red, 1, LineType.AntiAlias); //} for (int i = 0; i < Math.Min(lines.Total, 20); i++) { CvLineSegmentPoint element = lines.GetSeqElem <CvLineSegmentPoint>(i).Value; houline.Line(element.P1, element.P2, CvColor.Yellow, 1, LineType.AntiAlias); } return(houline); }
/// <summary> /// sample of C style wrapper /// </summary> private void SampleC() { // cvHoughLines2 using (IplImage srcImgGray = new IplImage(FilePath.Image.Goryokaku, LoadMode.GrayScale)) using (IplImage srcImgStd = new IplImage(FilePath.Image.Goryokaku, LoadMode.Color)) using (IplImage srcImgProb = srcImgStd.Clone()) { Cv.Canny(srcImgGray, srcImgGray, 50, 200, ApertureSize.Size3); using (CvMemStorage storage = new CvMemStorage()) { // Standard algorithm CvSeq lines = srcImgGray.HoughLines2(storage, HoughLinesMethod.Standard, 1, Math.PI / 180, 50, 0, 0); // wrapper style //CvLineSegmentPolar[] lines = src_img_gray.HoughLinesStandard(1, Math.PI / 180, 50, 0, 0); int limit = Math.Min(lines.Total, 10); for (int i = 0; i < limit; i++) { // native code style /* * unsafe * { * float* line = (float*)lines.GetElem<IntPtr>(i).Value.ToPointer(); * float rho = line[0]; * float theta = line[1]; * } * //*/ // wrapper style CvLineSegmentPolar elem = lines.GetSeqElem <CvLineSegmentPolar>(i).Value; float rho = elem.Rho; float theta = elem.Theta; double a = Math.Cos(theta); double b = Math.Sin(theta); double x0 = a * rho; double y0 = b * rho; CvPoint pt1 = new CvPoint { X = Cv.Round(x0 + 1000 * (-b)), Y = Cv.Round(y0 + 1000 * (a)) }; CvPoint pt2 = new CvPoint { X = Cv.Round(x0 - 1000 * (-b)), Y = Cv.Round(y0 - 1000 * (a)) }; srcImgStd.Line(pt1, pt2, CvColor.Red, 3, LineType.AntiAlias, 0); } // Probabilistic algorithm lines = srcImgGray.HoughLines2(storage, HoughLinesMethod.Probabilistic, 1, Math.PI / 180, 50, 50, 10); // wrapper style //CvLineSegmentPoint[] lines = src_img_gray.HoughLinesProbabilistic(1, Math.PI / 180, 50, 0, 0); for (int i = 0; i < lines.Total; i++) { // native code style /* * unsafe * { * CvPoint* point = (CvPoint*)lines.GetElem<IntPtr>(i).Value.ToPointer(); * src_img_prob.Line(point[0], point[1], CvColor.Red, 3, LineType.AntiAlias, 0); * } * //*/ // wrapper style CvLineSegmentPoint elem = lines.GetSeqElem <CvLineSegmentPoint>(i).Value; srcImgProb.Line(elem.P1, elem.P2, CvColor.Red, 3, LineType.AntiAlias, 0); } } using (new CvWindow("Hough_line_standard", WindowMode.AutoSize, srcImgStd)) using (new CvWindow("Hough_line_probabilistic", WindowMode.AutoSize, srcImgProb)) { CvWindow.WaitKey(0); } } }
//HoughLines2() //확률적 허프 변환을 지정해 선분의 검출을 실시한다 //1. CvArr* image : 변환을 할 이미지가 들어간다. //2. void* line_storage : 라인을 저장할 공간 //3. int method : 허프변환에는 3개의 방법이 있다 . 이 방법을 설정하는 인자 //4. double rho / double theta : 이 둘은 얼마나 조밀한 단위(?)를 사용할 것인가를 정하는 인자 이다. (예로 rho=1이라면 1픽셀단위로 조사를 하고 theta = PI/180 이라면 1도 단위로 조사를 하겠다는 뜻) //5. int threshold : 허프 공간상에 그려지는 곡선들이 중첩되는 부분을 이용해서 직선을 검출하는데 threshold 값보다 중첩되는 갯수가 많으면 직선으로 간주한다는 뜻이다. 숫자가 커지면 당연히 더 직선의 기준이 엄격하게 된다. //6.double param1 : probabilistic 일 경우 직선의 최소 길이를 설정할 수 있다. //7.double param2 : probabilistic 일 경우 직선의 최대 길이를 설정할 수 있다. public IplImage HoughLines(IplImage src, IplImage boxImage, ref IplImage resultImage) { // cvHoughLines2 // 확률적 허프 변환을 지정해 선분의 검출을 실시한다 IplImage orImage = boxImage.Clone(); // (1) 화상 읽기 using (IplImage srcImgStd = src.Clone()) using (IplImage srcImgGray = new IplImage(src.Size, BitDepth.U8, 1)) { Cv.CvtColor(srcImgStd, srcImgGray, ColorConversion.BgrToGray); // (2) 허프변환을 위한 캐니엣지 처리 Cv.Canny(srcImgGray, srcImgGray, 50, 200, ApertureSize.Size3); houghLine = srcImgGray.Clone(); int lineMinLength = srcImgStd.Width / 2; int lineMaxLength = srcImgStd.Width; using (CvMemStorage storage = new CvMemStorage()) { // (3) 표준적 허프 변환에 의한 선의 검출과 검출된 선 그리기 CvSeq lines = srcImgGray.HoughLines2(storage, HoughLinesMethod.Standard, 1, Math.PI / 180, 50, 0, 0); int limit = Math.Min(lines.Total, 10); for (int i = 0; i < limit; i++) { CvLineSegmentPolar elem = lines.GetSeqElem <CvLineSegmentPolar>(i).Value; float rho = elem.Rho; float theta = elem.Theta; double a = Math.Cos(theta); double b = Math.Sin(theta); double x0 = a * rho; double y0 = b * rho; CvPoint pt1 = new CvPoint { X = Cv.Round(x0 + src.Width * (-b)), Y = Cv.Round(y0 + src.Height * (a)) }; CvPoint pt2 = new CvPoint { X = Cv.Round(x0 - src.Width * (-b)), Y = Cv.Round(y0 - src.Height * (a)) }; if (pt1.X < 1) { pt1.X = 0; pt2.X = src.Width; } if (pt2.X < 1) { pt1.X = src.Width; pt2.X = 0; } if (pt1.Y < 1) { pt1.Y = 0; pt2.Y = src.Height; } if (pt2.Y < 1) { pt1.Y = src.Height; pt2.Y = 0; } srcImgStd.Line(pt1, pt2, CvColor.Red, 1, LineType.AntiAlias, 0); houghLine = srcImgStd.Clone(); orImage.Line(pt1, pt2, CvColor.Red, 1, LineType.AntiAlias, 0); houghLine = orImage.Clone(); return(houghLine); } } } return(houghLine); }
public IplImage HoughLines_Line(IplImage src, int canny1, int canny2, int thresh) { // cvHoughLines2 // 확률적 허프 변환을 지정해 선분의 검출을 실시한다 List <CvPoint> LinePoints = new List <CvPoint>(); // (1) 화상 읽기 using (IplImage srcImgStd = src.Clone()) using (IplImage srcImgGray = new IplImage(src.Size, BitDepth.U8, 1)) { Cv.CvtColor(srcImgStd, srcImgGray, ColorConversion.BgrToGray); // (2) 허프변환을 위한 캐니엣지 처리 //Cv.Canny(srcImgGray, srcImgGray, 50, 200, ApertureSize.Size3); Cv.Canny(srcImgGray, srcImgGray, canny1, canny2, ApertureSize.Size3); houghLine = srcImgGray.Clone(); using (CvMemStorage storage = new CvMemStorage()) { LinePoints.Clear(); // (3) 표준적 허프 변환에 의한 선의 검출과 검출된 선 그리기 CvSeq lines = srcImgGray.HoughLines2(storage, HoughLinesMethod.Standard, 1, Math.PI / 180, thresh, 0, 0); //int limit = Math.Min(lines.Total, 10); for (int i = 0; i < Math.Min(lines.Total, 3); i++) { CvLineSegmentPolar elem = lines.GetSeqElem <CvLineSegmentPolar>(i).Value; float rho = elem.Rho; float theta = elem.Theta; double a = Math.Cos(theta), b = Math.Sin(theta); double x0 = a * rho, y0 = b * rho; CvPoint pt1, pt2; //pt1.X = Cv.Round(x0 + 1000*(-b)); //pt1.Y = Cv.Round(y0 + 1000*(a)); //pt2.X = Cv.Round(x0 - 1000*(-b)); //pt2.Y = Cv.Round(y0 - 1000*(a)); pt1.X = Cv.Round(x0 + srcImgStd.Width * (-b)); pt1.Y = Cv.Round(y0 + srcImgStd.Height * (a)); pt2.X = Cv.Round(x0 - srcImgStd.Width * (-b)); pt2.Y = Cv.Round(y0 - srcImgStd.Height * (a)); if (pt1.X < 0) { pt1.X = 0; pt2.X = src.Width; } else if (pt2.X < 0) { pt1.X = src.Width; pt2.X = 0; } if (pt1.Y < 0) { pt1.Y = 0; pt2.Y = src.Height; } else if (pt2.Y < 0) { pt1.Y = src.Height; pt2.Y = 0; } //Trace.WriteLine(pt1.X.ToString("000.00000 ") + pt1.Y.ToString("000.00000 ") + pt2.X.ToString("000.00000 ") + pt2.Y.ToString("000.00000")); srcImgStd.Line(pt1, pt2, CvColor.Red, 1, LineType.AntiAlias, 0); LinePoints.Add(pt1); LinePoints.Add(pt2); houghLine = srcImgStd.Clone(); } } } return(houghLine); }
//private CvSeq houghLines; //public IplImage srcImgGray = new IplImage(); //public IplImage srcImgStd = new IplImage(); //public CvMemStorage storage = new CvMemStorage(); public List <CvPoint> HoughLines_Point(IplImage src, int edge1, int edge2, int line1, int line2, int line3) { try { // 확률적 허프 변환을 지정해 선분의 검출을 실시한다 List <CvPoint> LinePoints = new List <CvPoint>(); IplImage srcImgStd = new IplImage(src.ROI.Size, BitDepth.U8, 3); Cv.Copy(src, srcImgStd); IplImage srcImgGray = new IplImage(src.ROI.Size, BitDepth.U8, 1); Cv.CvtColor(srcImgStd, srcImgGray, ColorConversion.BgrToGray); Cv.Canny(srcImgGray, srcImgGray, edge1, edge2, ApertureSize.Size3); CvMemStorage storage = new CvMemStorage(); CvSeq houghLines = srcImgGray.HoughLines2(storage, HoughLinesMethod.Probabilistic, 1, Math.PI / 180, line1, line2, line3); //CvWindow.ShowImages(src); //Hough_Lines_Point LinesFinder = new Hough_Lines_Point(); //LinesFinder.srcImgGray = new IplImage(src.Size, BitDepth.U8, 1); //Cv.CvtColor(src, LinesFinder.srcImgGray, ColorConversion.BgrToGray); //Cv.Canny(LinesFinder.srcImgGray, LinesFinder.srcImgGray, edge1, edge2, ApertureSize.Size3); //CvMemStorage storage = new CvMemStorage(); //CvSeq houghLines = LinesFinder.srcImgGray.HoughLines2(storage, HoughLinesMethod.Probabilistic, 1, Math.PI / 180, line1, line2, line3); //CvWindow.ShowImages(LinesFinder.srcImgGray); //bool tmpFalg = true; // for(int i = 1; i < 256;i++) // { // if (houghLines.Total >= 10) // { // break; // } // houghLines = LinesFinder.srcImgGray.HoughLines2(LinesFinder.storage, HoughLinesMethod.Probabilistic, 1, Math.PI / 180, 2, line2, line3); // } //LinePoints = new List<CvPoint>(); LinePoints.Clear(); int limit = Math.Min(houghLines.Total, 10); for (int i = 0; i < limit; i++) { CvLineSegmentPolar elem = houghLines.GetSeqElem <CvLineSegmentPolar>(i).Value; CvPoint pt1 = houghLines.GetSeqElem <CvLineSegmentPoint>(i).Value.P1; CvPoint pt2 = houghLines.GetSeqElem <CvLineSegmentPoint>(i).Value.P2; //Trace.WriteLine(pt1.X.ToString("000.00000 ") + pt1.Y.ToString("000.00000 ") + pt2.X.ToString("000.00000 ")+ pt2.Y.ToString("000.00000")); //LinesFinder.srcImgStd.Line(LinesFinder.pt1, LinesFinder.pt2, CvColor.LawnGreen, 3, LineType.AntiAlias, 0); //TestImage = LinesFinder.srcImgStd.Clone(); //src.Line(pt1, pt2, CvColor.LawnGreen, 3, LineType.AntiAlias, 0); //CvWindow.ShowImages(src); //TestImage = src.Clone(); LinePoints.Add(pt1); LinePoints.Add(pt2); } srcImgStd.Dispose(); srcImgGray.Dispose(); houghLines.Dispose(); storage.Dispose(); return(LinePoints); } catch (Exception e) { MessageBox.Show(MethodBase.GetCurrentMethod().Name + " " + e.Message); throw; } }
public IplImage HoughLines(IplImage src) //허프를 통한 차선 검출 { flag_yellow = 0; flag_white = 0; slice = this.SliceImage(src); hsv = this.YellowTransform(slice); hls = this.WhiteTransform(slice); blur = this.BlurImage(hsv); bin = this.Binary(hsv, 50); //Adaptive Thresholding로 변환 Cv.Canny(bin, bin, 50, 200, ApertureSize.Size3); CvMemStorage Storage = new CvMemStorage(); /* Probabilistic 검출*/ CvSeq lines = bin.HoughLines2(Storage, HoughLinesMethod.Probabilistic, 1, Math.PI / 180, 50, 100, 100); double[] LineAngle = new double[lines.Total]; double[] LineAngle_q = new double[lines.Total]; if (lines != null) { for (int i = 0; i < lines.Total; i++) //검출된 모든 라인을 검사 { CvLineSegmentPoint elem = lines.GetSeqElem <CvLineSegmentPoint>(i).Value; // 해당 라인의 데이터를 가져옴 int dx = elem.P2.X - elem.P1.X; //x좌표의 차 int dy = elem.P2.Y - elem.P1.Y; //y좌표의 차 double angle = Math.Atan2(dy, dx) * 180 / Math.PI; //기울기 구하기 LineAngle[i] = angle; LineAngle_q[i] = angle; } if (lines.Total != 0) { Quick_Sort(LineAngle_q, lines.Total); } for (int i = 0; i < lines.Total; i++) { CvLineSegmentPoint elem = lines.GetSeqElem <CvLineSegmentPoint>(i).Value; // 해당 라인의 데이터를 가져옴 int dx = elem.P2.X - elem.P1.X; //x좌표의 차 int dy = elem.P2.Y - elem.P1.Y; //y좌표의 차 double angle = Math.Atan2(dy, dx) * 180 / Math.PI; //기울기 구하기 if (LineAngle_q[lines.Total - 1] == LineAngle[i] || LineAngle_q[0] == LineAngle[i]) { //P2에 50을 더해도 P1보다 작거나, P2에 50을 빼도 P1보다 큰경우 if (elem.P1.Y > elem.P2.Y || elem.P1.Y < elem.P2.Y) //P1 :시작점, P2 : 도착점 { if (Math.Abs(angle) >= 14 && Math.Abs(angle) <= 80) { Cv.PutText(src, "Yellow angle : " + angle.ToString(), new CvPoint(100, 50), new CvFont(FontFace.HersheyComplex, 1, 1), CvColor.Yellow); P1 = elem.P1; P2 = elem.P2; d_dx = Math.Abs(P2.X - P1.X); d_dy = Math.Abs(P2.Y - P1.Y); while (true) { if (P1.Y > P2.Y) { if (P1.Y < src.Height) { if (P1.X < P2.X) { P1.X -= d_dx / 100; P1.Y += d_dy / 100; } else { P1.X += d_dx / 100; P1.Y += d_dy / 100; } } else if (P2.Y > src.Height * 5 / 8) { if (P1.X > P2.X) { P2.X -= d_dx / 100; P2.Y -= d_dy / 100; } else { P2.X += d_dx / 100; P2.Y -= d_dy / 100; } } else { break; } } else { if (P2.Y < src.Height) { if (P1.X > P2.X) { P2.X -= d_dx / 100; P2.Y += d_dy / 100; } else { P2.X += d_dx / 100; P2.Y += d_dy / 100; } } else if (P1.Y > src.Height * 5 / 8) { if (P1.X < P2.X) { P1.X -= d_dx / 100; P1.Y -= d_dy / 100; } else { P1.X += d_dx / 100; P1.Y -= d_dy / 100; } } else { break; } } d_dx = Math.Abs(P2.X - P1.X); d_dy = Math.Abs(P2.Y - P1.Y); } flag_yellow = 1; prev_elemLeft.P1 = P1; prev_elemLeft.P2 = P2; src.Line(P1, P2, CvColor.Yellow, 10, LineType.AntiAlias, 0); //P1,P2의 좌표를 가지고 사용자에게 정보를 줄수있음(ex) angle이 심하게 올라가면 차선이탈 break; } } } } } blur = this.BlurImage(hls); bin = this.Binary(hls, 50); //Adaptive Thresholding로 변환 Cv.Canny(bin, bin, 50, 150, ApertureSize.Size3); /* Probabilistic 검출*/ lines = bin.HoughLines2(Storage, HoughLinesMethod.Probabilistic, 1, Math.PI / 180, 50, 100, 100); LineAngle = new double[lines.Total]; LineAngle_q = new double[lines.Total]; if (lines != null) { for (int i = 0; i < lines.Total; i++) //검출된 모든 라인을 검사 { CvLineSegmentPoint elem = lines.GetSeqElem <CvLineSegmentPoint>(i).Value; // 해당 라인의 데이터를 가져옴 int dx = elem.P2.X - elem.P1.X; //x좌표의 차 int dy = elem.P2.Y - elem.P1.Y; //y좌표의 차 double angle = Math.Atan2(dy, dx) * 180 / Math.PI; //기울기 구하기 LineAngle[i] = angle; LineAngle_q[i] = angle; } if (lines.Total != 0) { Quick_Sort(LineAngle_q, lines.Total); } for (int i = 0; i < lines.Total; i++) { CvLineSegmentPoint elem = lines.GetSeqElem <CvLineSegmentPoint>(i).Value; // 해당 라인의 데이터를 가져옴 int dx = elem.P2.X - elem.P1.X; //x좌표의 차 int dy = elem.P2.Y - elem.P1.Y; //y좌표의 차 double angle = Math.Atan2(dy, dx) * 180 / Math.PI; //기울기 구하기 if (LineAngle_q[lines.Total - 1] == LineAngle[i] || LineAngle_q[0] == LineAngle[i]) { //P2에 50을 더해도 P1보다 작거나, P2에 50을 빼도 P1보다 큰경우 if (elem.P1.Y > elem.P2.Y || elem.P1.Y < elem.P2.Y) //P1 :시작점, P2 : 도착점 { if (Math.Abs(angle) >= 14 && Math.Abs(angle) <= 80) { Cv.PutText(src, "Whtie angle : " + angle.ToString(), new CvPoint(100, 100), new CvFont(FontFace.HersheyComplex, 1, 1), CvColor.White); P1 = elem.P1; P2 = elem.P2; d_dx = Math.Abs(P2.X - P1.X); d_dy = Math.Abs(P2.Y - P1.Y); while (true) { if (P1.Y > P2.Y) { if (P1.Y < src.Height) { if (P1.X < P2.X) { P1.X -= d_dx / 100; P1.Y += d_dy / 100; } else { P1.X += d_dx / 100; P1.Y += d_dy / 100; } } else if (P2.Y > src.Height * 5 / 8) { if (P1.X > P2.X) { P2.X -= d_dx / 100; P2.Y -= d_dy / 100; } else { P2.X += d_dx / 100; P2.Y -= d_dy / 100; } } else { break; } } else { if (P2.Y < src.Height) { if (P1.X > P2.X) { P2.X -= d_dx / 100; P2.Y += d_dy / 100; } else { P2.X += d_dx / 100; P2.Y += d_dy / 100; } } else if (P1.Y > src.Height * 5 / 8) { if (P1.X < P2.X) { P1.X -= d_dx / 100; P1.Y -= d_dy / 100; } else { P1.X += d_dx / 100; P1.Y -= d_dy / 100; } } else { break; } } d_dx = Math.Abs(P2.X - P1.X); d_dy = Math.Abs(P2.Y - P1.Y); } flag_white = 1; prev_elemRight.P1 = P1; prev_elemRight.P2 = P2; src.Line(P1, P2, CvColor.White, 10, LineType.AntiAlias, 0); //P1,P2의 좌표를 가지고 사용자에게 정보를 줄수있음(ex) angle이 심하게 올라가면 차선이탈 break; } } } } } if (flag_yellow == 0) { src.Line(prev_elemLeft.P1, prev_elemLeft.P2, CvColor.Yellow, 10, LineType.AntiAlias, 0); } if (flag_white == 0) { src.Line(prev_elemRight.P1, prev_elemRight.P2, CvColor.White, 10, LineType.AntiAlias, 0); } Cv.ReleaseMemStorage(Storage); //vehicle_detection = this.VDConvert.VehicleDetect(src); Dispose(); return(src); }
/// <summary> /// sample of C style wrapper /// </summary> private void SampleC() { // cvHoughLines2 // 標準的ハフ変換と確率的ハフ変換を指定して線(線分)の検出を行なう.サンプルコード内の各パラメータ値は処理例の画像に対してチューニングされている. // (1)画像の読み込み using (IplImage srcImgGray = new IplImage(Const.ImageGoryokaku, LoadMode.GrayScale)) using (IplImage srcImgStd = new IplImage(Const.ImageGoryokaku, LoadMode.Color)) using (IplImage srcImgProb = srcImgStd.Clone()) { // (2)ハフ変換のための前処理 Cv.Canny(srcImgGray, srcImgGray, 50, 200, ApertureSize.Size3); using (CvMemStorage storage = new CvMemStorage()) { // (3)標準的ハフ変換による線の検出と検出した線の描画 CvSeq lines = srcImgGray.HoughLines2(storage, HoughLinesMethod.Standard, 1, Math.PI / 180, 50, 0, 0); // wrapper style //CvLineSegmentPolar[] lines = src_img_gray.HoughLinesStandard(1, Math.PI / 180, 50, 0, 0); int limit = Math.Min(lines.Total, 10); for (int i = 0; i < limit; i++) { // native code style /* * unsafe * { * float* line = (float*)lines.GetElem<IntPtr>(i).Value.ToPointer(); * float rho = line[0]; * float theta = line[1]; * } * //*/ // wrapper style CvLineSegmentPolar elem = lines.GetSeqElem <CvLineSegmentPolar>(i).Value; float rho = elem.Rho; float theta = elem.Theta; double a = Math.Cos(theta); double b = Math.Sin(theta); double x0 = a * rho; double y0 = b * rho; CvPoint pt1 = new CvPoint { X = Cv.Round(x0 + 1000 * (-b)), Y = Cv.Round(y0 + 1000 * (a)) }; CvPoint pt2 = new CvPoint { X = Cv.Round(x0 - 1000 * (-b)), Y = Cv.Round(y0 - 1000 * (a)) }; srcImgStd.Line(pt1, pt2, CvColor.Red, 3, LineType.AntiAlias, 0); } // (4)確率的ハフ変換による線分の検出と検出した線分の描画 lines = srcImgGray.HoughLines2(storage, HoughLinesMethod.Probabilistic, 1, Math.PI / 180, 50, 50, 10); // wrapper style //CvLineSegmentPoint[] lines = src_img_gray.HoughLinesProbabilistic(1, Math.PI / 180, 50, 0, 0); for (int i = 0; i < lines.Total; i++) { // native code style /* * unsafe * { * CvPoint* point = (CvPoint*)lines.GetElem<IntPtr>(i).Value.ToPointer(); * src_img_prob.Line(point[0], point[1], CvColor.Red, 3, LineType.AntiAlias, 0); * } * //*/ // wrapper style CvLineSegmentPoint elem = lines.GetSeqElem <CvLineSegmentPoint>(i).Value; srcImgProb.Line(elem.P1, elem.P2, CvColor.Red, 3, LineType.AntiAlias, 0); } } // (5)検出結果表示用のウィンドウを確保し表示する using (new CvWindow("Hough_line_standard", WindowMode.AutoSize, srcImgStd)) using (new CvWindow("Hough_line_probabilistic", WindowMode.AutoSize, srcImgProb)) { CvWindow.WaitKey(0); } } }
static void Main() { // CvCapture cap = CvCapture.FromFile("video.avi"); CvCapture cap = CvCapture.FromFile("road_3.avi"); CvWindow w = new CvWindow("Lane Detection"); CvWindow canny = new CvWindow("Lane Detection_2"); CvWindow hough = new CvWindow("Lane Detection"); // CvWindow smoothing = new CvWindow("Lane Detection_3"); IplImage src, gray, dstCanny, halfFrame, smallImg; CvMemStorage storage = new CvMemStorage(); CvSeq lines; CvHaarClassifierCascade cascade = CvHaarClassifierCascade.FromFile("haarcascade_cars3.xml"); const double Scale = 2.0; const double ScaleFactor = 1.05; const int MinNeighbors = 3; double min_range = 70; double max_range = 120; CvSeq <CvAvgComp> cars; while (CvWindow.WaitKey(10) < 0) { src = cap.QueryFrame(); halfFrame = new IplImage(new CvSize(src.Size.Width / 2, src.Size.Height / 2), BitDepth.U8, 3); Cv.PyrDown(src, halfFrame, CvFilter.Gaussian5x5); gray = new IplImage(src.Size, BitDepth.U8, 1); dstCanny = new IplImage(src.Size, BitDepth.U8, 1); /* * * smallImg = new IplImage(new CvSize(Cv.Round(src.Width / Scale), Cv.Round(src.Height / Scale)), BitDepth.U8, 1); * using (IplImage grey = new IplImage(src.Size, BitDepth.U8, 1)) * { * Cv.CvtColor(src, grey, ColorConversion.BgrToGray); * Cv.Resize(grey, smallImg, Interpolation.Linear); * Cv.EqualizeHist(smallImg, smallImg); * } * * cars = Cv.HaarDetectObjects(smallImg, cascade, storage, ScaleFactor, MinNeighbors, HaarDetectionType.DoCannyPruning, new CvSize(30, 30)); * * for (int i = 0; i < cars.Total; i++) * { * CvRect r = cars[i].Value.Rect; * CvPoint center = new CvPoint * { * X = Cv.Round((r.X + r.Width * 0.5) * Scale), * Y = Cv.Round((r.Y + r.Height * 0.5) * Scale) * }; * int radius = Cv.Round((r.Width + r.Height) * 0.25 * Scale); * src.Circle(center, radius, CvColor.Blue, 2, LineType.AntiAlias, 0); * } */ // Crop off top half of image since we're only interested in the lower portion of the video int halfWidth = src.Width / 2; int halfHeight = src.Height / 2; int startX = halfWidth - (halfWidth / 2); src.SetROI(new CvRect(0, halfHeight - 0, src.Width - 1, src.Height - 1)); gray.SetROI(src.GetROI()); dstCanny.SetROI(src.GetROI()); src.CvtColor(gray, ColorConversion.BgrToGray); Cv.Smooth(gray, gray, SmoothType.Gaussian, 5, 5); Cv.Canny(gray, dstCanny, 50, 200, ApertureSize.Size3); storage.Clear(); lines = dstCanny.HoughLines2(storage, HoughLinesMethod.Probabilistic, 1, Math.PI / 180, 50, 50, 100); for (int i = 0; i < lines.Total; i++) { CvLineSegmentPoint elem = lines.GetSeqElem <CvLineSegmentPoint>(i).Value; int dx = elem.P2.X - elem.P1.X; int dy = elem.P2.Y - elem.P1.Y; double angle = Math.Atan2(dy, dx) * 180 / Math.PI; // if (Math.Abs(angle) <= 10) // continue; if (elem.P1.Y > elem.P2.Y + 50 || elem.P1.Y < elem.P2.Y - 50) { src.Line(elem.P1, elem.P2, CvColor.Green, 9, LineType.Link8, 0); } } src.ResetROI(); storage.Clear(); w.Image = src; // canny.Image = dstCanny; // smoothing.Image = gray; // w.Image = dstCanny; // w.Image = dstCanny; } }