/// <summary> /// 評価関数 /// </summary> /// <param name="pos">位置</param> /// <param name="maxval">ブロブの面積</param> /// <param name="pos_pre">前回の目標位置</param> /// <param name="distance0">距離定数</param> public static double Cal_Evaluate(CvPoint2D64f pos, double maxval, CvPoint2D64f pos_pre, double distance0) { double distance = Cal_distance(pos, pos_pre); double eval = maxval * Math.Exp(-distance / distance0); return(eval); }
/// <summary> /// 最適blobの選定 /// </summary> /// <remarks> /// 最適blobの選定(areaの大きさと前回からの距離) /// </remarks> public int mesure(CvBlobs blobs) { if (blobs.Count == 0) { return(0); } CvPoint2D64f pos_ans = new CvPoint2D64f(-1, -1); CvBlob maxBlob = blobs.LargestBlob(); int max_label = blobs.GreaterBlob().Label; if (blobs.Count == 0) { return(0); } pos_ans = maxBlob.Centroid; distance0 = Cal_distance_const(distance_pre); if (blobs.Count > 1) { // 最適blobの選定 double eval, eval_max = 0; foreach (var item in blobs) { eval = position_mesure.Cal_Evaluate(item.Value.Centroid, item.Value.Area, pos_pre, distance0); if (eval > eval_max) { eval_max = eval; max_label = item.Key; pos_ans = item.Value.Centroid; Console.WriteLine("{0} | Centroid:{1} Area:{2} eval:{3}", item.Key, item.Value.Centroid, item.Value.Area, eval); //w.WriteLine("{0} {1} {2} {3} {4}", dis, dv, i, item.Key, item.Value.Area); } //sw.Stop(); t5 = 1000.0 * sw.ElapsedTicks / Stopwatch.Frequency; sw.Reset(); sw.Start(); Console.WriteLine(" pos_ans:{0}", pos_ans); } } double dis = Cal_distance(pos_ans, pos_pre); if (distance_pre > dis) { distance_pre = (1 - dist_alpha) * distance_pre + dist_alpha * dis; } else { distance_pre = dis; } pos_pre = pos_ans; return(max_label); }
/// <summary> /// ConvexityDefectsの描画 /// </summary> /// <param name="img"></param> /// <param name="defect"></param> private void DrawDefects(IplImage img, CvSeq <CvConvexityDefect> defect) { int count = 0; foreach (CvConvexityDefect item in defect) { CvPoint p1 = item.Start, p2 = item.End; double dist = GetDistance(p1, p2); CvPoint2D64f mid = GetMidpoint(p1, p2); img.DrawLine(p1, p2, CvColor.White, 3); img.DrawCircle(item.DepthPoint, 10, CvColor.Green, -1); img.DrawLine(mid, item.DepthPoint, CvColor.White, 1); Console.WriteLine("No:{0} Depth:{1} Dist:{2}", count, item.Depth, dist); count++; } }
/// <summary> /// Set central/hu moments and centroid value from moment values (M**) /// </summary> public void SetMoments() { // 重心 Centroid = new CvPoint2D64f(M10 / Area, M01 / Area); // 各モーメント U11 = M11 - (M10 * M01) / M00; U20 = M20 - (M10 * M10) / M00; U02 = M02 - (M01 * M01) / M00; double m00Pow2 = M00 * M00; N11 = U11 / m00Pow2; N20 = U20 / m00Pow2; N02 = U02 / m00Pow2; P1 = N20 + N02; double nn = N20 - N02; P2 = nn * nn + 4.0 * (N11 * N11); }
/// <summary> /// 回転座標計算ルーチン /// IN:中心座標 CvPoint2D64f /// 半径 double /// 回転角 double /// OUT:目標座標 CvPoint2D64f /// </summary> /// <param name="capacity">画像表示用回転座標計算ルーチン</param> public CvPoint2D64f Rotation(CvPoint2D64f xy, double r, double theta) { double sinth = 0, costh = r; CvPoint2D64f ans = new CvPoint2D64f(); if (appSettings.CamPlatform == Platform.MT2 && udpkv.mt2mode == udpkv.mmWest) { sinth = Math.Sin(-(theta + 90) * Math.PI / 180.0); costh = Math.Cos(-(theta + 90) * Math.PI / 180.0); ans.Y = -costh * r; ans.X = +sinth * r; } if (appSettings.CamPlatform == Platform.MT2 && udpkv.mt2mode == udpkv.mmEast) { sinth = Math.Sin(-(theta + 90) * Math.PI / 180.0); costh = Math.Cos(-(theta + 90) * Math.PI / 180.0); ans.Y = -costh * r; ans.X = +sinth * r; } return(ans + xy); }
public CvPoint2D64f convertKinectToProjector(OpenCvSharp.CvPoint3D64f kinectPoint) { CvPoint2D64f outp = new CvPoint2D64f(); //Debug.Log("In: " + kinectPoint.X + " " +kinectPoint.Y + " " +kinectPoint.Z); //xp = (q1*xk + q2*yk + q3*zk + q4)/(q9*xk + q10*yk + q11*zk + 1) //outp.X = (result.get(0,0)*kinectPoint.x + result.get(0,1)*kinectPoint.y + result.get(0,2)*kinectPoint.z + result.get(0,3))/ // (result.get(0,8)*kinectPoint.x + result.get(0,9)*kinectPoint.y + result.get(0,10)*kinectPoint.z + 1); outp.X = ((result.Get<double>(0, 0) * kinectPoint.X) + (result.Get<double>(0, 1) * kinectPoint.Y) + (result.Get<double>(0, 2) * kinectPoint.Z) + result.Get<double>(0, 3)) / ((result.Get<double>(0, 8) * kinectPoint.X) + (result.Get<double>(0, 9) * kinectPoint.Y) + (result.Get<double>(0, 10) * kinectPoint.Z) + 1); //yp = (q5*xk + q6*yk + q7*zk + q8)/(q9*xk + q10*yk + q11*zk + 1) //outp.y = (result.get(0, 4) * kinectPoint.x + result.get(0, 5) * kinectPoint.y + result.get(0, 6) * kinectPoint.z + result.get(0, 7)) / // (result.get(0, 8) * kinectPoint.x + result.get(0, 9) * kinectPoint.y + result.get(0, 10) * kinectPoint.z + 1); outp.Y = ((result.Get<double>(0, 4) * kinectPoint.X) + (result.Get<double>(0, 5) * kinectPoint.Y) + (result.Get<double>(0, 6) * kinectPoint.Z) + result.Get<double>(0, 7)) / ((result.Get<double>(0, 8) * kinectPoint.X) + (result.Get<double>(0, 9) * kinectPoint.Y) + (result.Get<double>(0, 10) * kinectPoint.Z) + 1); //Debug.Log(outp.X + " " + outp.Y); //outp.X = ((result.Get<double>(0, 0) * kinectPoint.X) + (result.Get<double>(1,0) * kinectPoint.Y) + (result.Get<double>(2,0) * kinectPoint.Z) + result.Get<double>(3,0)) / // ((result.Get<double>(8,0) * kinectPoint.X) + (result.Get<double>(9,0) * kinectPoint.Y) + (result.Get<double>(10,0) * kinectPoint.Z) + 1); //outp.Y = ((result.Get<double>(4,0) * kinectPoint.X) + (result.Get<double>(5,0) * kinectPoint.Y) + (result.Get<double>(6,0) * kinectPoint.Z) + result.Get<double>(7,0)) / // ((result.Get<double>(8,0) * kinectPoint.X) + (result.Get<double>(9,0) * kinectPoint.Y) + (result.Get<double>(10,0) * kinectPoint.Z) + 1); return outp; }
/// <summary> /// 1チャンネル画像のサブピクセル値を返します.座標の小数点以下が0の場合にその座標の値と等しくなります /// </summary> /// <param name="mat">1チャンネル画像</param> /// <param name="point">座標</param> /// <param name="invalidValue">無効とみなされる値</param> /// <returns></returns> public static double?Get2DSubPixel(CvMat mat, CvPoint2D64f point, double invalidValue) { int preX = (int)Math.Floor(point.X); int preY = (int)Math.Floor(point.Y); int postX = preX == point.X ? preX : preX + 1; int postY = preY == point.Y ? preY : preY + 1; double fracX = point.X - preX; double fracY = point.Y - preY; if (postX >= mat.Cols || postY >= mat.Rows) { return(null); } double depth00 = mat[preY, preX]; double depth01 = mat[preY, postX]; double depth10 = mat[postY, preX]; double depth11 = mat[postY, postX]; if (depth00 == invalidValue || depth01 == invalidValue || depth10 == invalidValue || depth11 == invalidValue) { return(null); } return(CalcEx.Lerp(CalcEx.Lerp(depth00, depth01, fracX), CalcEx.Lerp(depth10, depth11, fracX), fracY)); }
/// <summary> /// 画像表示ルーチン /// </summary> /// <param name="capacity">画像表示用タイマールーチン</param> private void timerDisplay_Tick(object sender, EventArgs e) { if (this.States == STOP) { return; } //if (img_dmk3 == null) return; //OpenCV 表示ルーチン if (imgdata.img != null) { // カラー判定 if (cam_color == Camera_Color.mono) { if (checkBoxDispAvg.Checked == true) { // 移動平均画像の表示 double scale = 1.0; Cv.ConvertScale(imgAvg, img_dmk, scale); Cv.CvtColor(img_dmk, img_dmk3, ColorConversion.GrayToBgr); } else { Cv.CvtColor(imgdata.img, img_dmk3, ColorConversion.GrayToBgr); } } else { Cv.CvtColor(imgdata.img, img_dmk3, ColorConversion.BayerGbToBgr); } double k1 = 1.3333; //4deg double k2 = 0.3333; //直径1deg double roa = appSettings.Roa; CvPoint2D64f OCPoint = new CvPoint2D64f(appSettings.Xoa, appSettings.Yoa); Cv.Circle(img_dmk3, OCPoint, (int)appSettings.Roa, new CvColor(0, 255, 0)); CvPoint2D64f Point1; CvPoint2D64f Point2; String str; if (udpkv.mt2mode == udpkv.mmWest) { Point1 = Rotation(OCPoint, k1 * roa, theta_c); Point2 = Rotation(OCPoint, k2 * roa, theta_c); Cv.Line(img_dmk3, Point1, Point2, new CvColor(0, 205, 0)); Cv.Circle(img_dmk3, Point1, 5, new CvColor(0, 255, 0)); // Arrow Point1 = Rotation(OCPoint, k1 * roa, theta_c + 90); Point2 = Rotation(OCPoint, k2 * roa, theta_c + 90); Cv.Line(img_dmk3, Point1, Point2, new CvColor(0, 205, 0)); Point1 = Rotation(OCPoint, k1 * roa, theta_c + 180); Point2 = Rotation(OCPoint, k2 * roa, theta_c + 180); Cv.Line(img_dmk3, Point1, Point2, new CvColor(0, 205, 0)); Point1 = Rotation(OCPoint, k1 * roa, theta_c + 270); Point2 = Rotation(OCPoint, k2 * roa, theta_c + 270); Cv.Line(img_dmk3, Point1, Point2, new CvColor(230, 105, 0)); str = String.Format("ID:{4,7:D1} W: dAz({5,6:F1},{6,6:F1}) dPix({0,6:F1},{1,6:F1})({2,6:F0})({3,0:00}), th:{7,6:F1}", gx, gy, max_val, max_label, id, daz, dalt, theta_c); } else { Point1 = Rotation(OCPoint, k1 * roa, theta_c); Point2 = Rotation(OCPoint, k2 * roa, theta_c); Cv.Line(img_dmk3, Point1, Point2, new CvColor(0, 205, 0)); //Cv.Circle(img_dmk3, Point1, 5, new CvColor(0, 255, 0)); // Arrow Point1 = Rotation(OCPoint, k1 * roa, theta_c + 90); Point2 = Rotation(OCPoint, k2 * roa, theta_c + 90); Cv.Line(img_dmk3, Point1, Point2, new CvColor(230, 105, 0)); //Cv.Line(img_dmk3, Point1, Point2, new CvColor(0, 205, 0)); Point1 = Rotation(OCPoint, k1 * roa, theta_c + 180); Point2 = Rotation(OCPoint, k2 * roa, theta_c + 180); Cv.Line(img_dmk3, Point1, Point2, new CvColor(0, 205, 0)); Cv.Circle(img_dmk3, Point1, 5, new CvColor(0, 255, 0)); // Arrow Point1 = Rotation(OCPoint, k1 * roa, theta_c + 270); Point2 = Rotation(OCPoint, k2 * roa, theta_c + 270); Cv.Line(img_dmk3, Point1, Point2, new CvColor(0, 205, 0)); //Cv.Line(img_dmk3, Point1, Point2, new CvColor(230, 105, 0)); str = String.Format("ID:{4,7:D1} E: dAz({5,6:F1},{6,6:F1}) dPix({0,6:F1},{1,6:F1})({2,6:F0})({3,0:00}), th:{7,6:F1}", gx, gy, max_val, max_label, id, daz, dalt, theta_c); } img_dmk3.PutText(str, new CvPoint(6, 12), font, new CvColor(0, 150, 250)); img_dmk3.Circle(new CvPoint((int)Math.Round(gx), (int)Math.Round(gy)), 15, new CvColor(0, 100, 255)); try { pictureBox1.Image = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(img_dmk3); } catch (System.ArgumentException) { this.Invoke(new dlgSetString(ShowRText), new object[] { richTextBox1, id.ToString() }); //System.Diagnostics.Trace.WriteLine(ex.Message); //System.Console.WriteLine(ex.Message); return; } catch (System.Exception ex) { //すべての例外をキャッチする //例外の説明を表示する //匿名デリゲートで表示する this.Invoke(new dlgSetString(ShowRText), new object[] { richTextBox1, ex.ToString() }); //System.Diagnostics.Trace.WriteLine(ex.Message); //System.Console.WriteLine(ex.Message); return; } } string s = null; if (appSettings.CamPlatform == Platform.MT2) { //string s = string.Format("KV:[x2:{0:D6} y2:{1:D6} x2v:{2:D5} y2v:{3:D5} {4} {5}]\n", udpkv.x2pos, udpkv.y2pos, udpkv.x2v, udpkv.y2v, udpkv.binStr_status, udpkv.binStr_request); s = string.Format("KV:[x1:{0:D6} y1:{1:D6} Az1:{2,6:F1} Alt1:{3,6:F1}]\n", udpkv.xpos, udpkv.ypos, udpkv.az1_c, udpkv.alt1_c); } if (appSettings.CamPlatform == Platform.MT3) { s = string.Format("KV:[x2:{0:D6} y2:{1:D6} Az2:{2,6:F1} Alt2:{3,6:F1}]\n", udpkv.x2pos, udpkv.y2pos, udpkv.az2_c, udpkv.alt2_c); } label_X2Y2.Text = s; // label_ID.Text = max_label.ToString("00000"); //this.Invoke(new dlgSetString(ShowRText), new object[] { richTextBox1, id.ToString() }); // Status表示 //this.Invoke(new dlgSetString(ShowLabelText), new object[] { label_X2Y2, String.Format("({0},{1}", udpkv.az2_c, udpkv.alt2_c) }); //long frame_timestamp=0; //double dFramerate = 0; // Frame rate[fr/s] //double dExpo = 0; // Exposure[us] //long igain = 0; //Gain // Error rate long frame_total = 0, frame_error = 0; long frame_underrun = 0, frame_shoved = 0, frame_dropped = 0; double err_rate = 0; // IDS if (cam_maker == Camera_Maker.IDS) { cam.Timing.Framerate.GetCurrentFps(out dFramerate); //IDS statusRet = cam.Timing.Exposure.Get(out dExpo); //[ms] dExpo *= 1000; // [us] int ig; cam.Gain.Hardware.Scaled.GetMaster(out ig); igain = ig; uEye.Types.CaptureStatus captureStatus; cam.Information.GetCaptureStatus(out captureStatus); //IDS ueye frame_error = (long)captureStatus.Total; frame_total = (long)(imageInfo.FrameNumber - ueye_frame_number); //Int32 s32Value; //statusRet = cam.Timing.PixelClock.Get(out s32Value); // toolStripStatusLabelPixelClock.Text = "fr time[0.1ms]: " + 10000*(elapsed21-elapsed20)/(double)(Stopwatch.Frequency) +" "+ 10000*(elapsed22-elapsed21)/(double)(Stopwatch.Frequency); } if (cam_maker == Camera_Maker.Basler) { dFramerate = m_imageProvider.GetFrameRate(); // Basler dExpo = GetExposureTime(); igain = GetGain(); frame_timestamp = m_imageProvider.GetTimestamp(); frame_total = m_imageProvider.Get_Statistic_Total_Buffer_Count(); frame_underrun = m_imageProvider.Get_Statistic_feature("Statistic_Buffer_Underrun_Count"); frame_error = frame_underrun + m_imageProvider.Get_Statistic_feature("Statistic_Failed_Buffer_Count"); //frame_dropped = m_imageProvider.Get_Statistic_feature("Statistic_Total_Packet_Count"); } if (cam_maker == Camera_Maker.AVT) { try { dFramerate = StatFrameRate(); //AVT dExpo = ExposureTimeAbs(); } catch { MessageBox.Show("error1"); } igain = GainRaw(); frame_total = StatFrameDelivered(); frame_underrun = StatFrameUnderrun();// AVT frame_shoved = StatFrameShoved(); frame_dropped = StatFrameDropped(); frame_error = frame_underrun + frame_dropped; } toolStripStatusLabelFramerate.Text = "Fps: " + dFramerate.ToString("000.0"); toolStripStatusLabelExposure.Text = "Exposure: " + (dExpo / 1000.0).ToString("00.00") + "[ms]"; toolStripStatusLabelGain.Text = "Gain: " + igain.ToString("00"); toolStripStatusLabelFailed.Text = "Failed U:" + frame_underrun.ToString("0000") + " S:" + frame_shoved.ToString("0000") + " D:" + frame_dropped.ToString("0000"); //label_frame_rate.Text = (1000 * lap21).ToString("0000") + "[us] " + (1000 * lap22).ToString("0000"); //double err_rate = 100.0 * (frame_total / (double)id); if (frame_total > 0) { err_rate = 100.0 * (frame_error / (double)frame_total); } toolStripStatusLabelID.Text = "Frames: " + frame_total.ToString("0000") + " " + frame_error.ToString("0000") + " " + err_rate.ToString("00.00");// +"TS:" + timestamp; if (this.States == SAVE) { this.buttonSave.BackColor = Color.Red; this.buttonSave.Enabled = false; this.ButtonSaveEnd.Enabled = true; } if (this.States == RUN) { this.buttonSave.BackColor = Color.FromKnownColor(KnownColor.Control); this.buttonSave.Enabled = true; this.ButtonSaveEnd.Enabled = false; this.ObsStart.BackColor = Color.Red; if (!checkBoxObsAuto.Checked) { this.ObsStart.Enabled = false; this.ObsEndButton.Enabled = true; } } if (this.States == STOP) { this.buttonSave.BackColor = Color.FromKnownColor(KnownColor.Control); this.buttonSave.Enabled = false; this.ButtonSaveEnd.Enabled = false; this.ObsStart.BackColor = Color.FromKnownColor(KnownColor.Control); this.ObsStart.Enabled = true; this.ObsEndButton.Enabled = false; } }
/// <summary> /// Calculates centroid. /// Centroid will be returned and stored in the blob structure. (cvCentroid) /// </summary> /// <returns>Centroid.</returns> public CvPoint2D64f CalcCentroid() { Centroid = new CvPoint2D64f(M10 / Area, M01 / Area); return Centroid; }
/// <summary> /// 二つのベクトルの二乗距離を返します /// </summary> /// <param name="first"></param> /// <param name="second"></param> /// <returns></returns> public static double GetDistanceSq(CvPoint2D64f first, CvPoint2D64f second) { return(GetLengthSq(first - second)); }
/// <summary> /// ベクトルのユークリッド距離における長さを返します /// </summary> /// <param name="vector"></param> /// <returns></returns> public static double GetLength(CvPoint2D64f vector) { return(Math.Sqrt(vector.X * vector.X + vector.Y * vector.Y)); }
/// <summary> /// ベクトルの二乗和を返します /// </summary> /// <param name="vector"></param> /// <returns></returns> public static double GetLengthSq(CvPoint2D64f vector) { return(vector.X * vector.X + vector.Y * vector.Y); }
/// <summary> /// Calculates centroid. /// Centroid will be returned and stored in the blob structure. (cvCentroid) /// </summary> /// <returns>Centroid.</returns> public CvPoint2D64f CalcCentroid() { Centroid = new CvPoint2D64f(M10 / Area, M01 / Area); return(Centroid); }
/// <summary> /// ベクトルのユークリッド距離における長さを返します /// </summary> /// <param name="vector"></param> /// <returns></returns> public static double GetLength(CvPoint2D64f vector) { return Math.Sqrt(vector.X * vector.X + vector.Y * vector.Y); }
/// <summary> /// /// </summary> /// <param name="m1"></param> /// <param name="m2"></param> /// <param name="model"></param> /// <returns></returns> public override unsafe int RunKernel(CvMat m1, CvMat m2, CvMat model) { int count = m1.Rows * m1.Cols; CvPoint2D64f* M = (CvPoint2D64f*)m1.DataByte; CvPoint2D64f* m = (CvPoint2D64f*)m2.DataByte; double* LtL = stackalloc double[9 * 9], W = stackalloc double[9 * 9], V = stackalloc double[9 * 9]; CvMat _LtL = new CvMat(9, 9, MatrixType.F64C1, new IntPtr(LtL)); CvMat matW = new CvMat(9, 9, MatrixType.F64C1, new IntPtr(W)); CvMat matV = new CvMat(9, 9, MatrixType.F64C1, new IntPtr(V)); CvMat _H0 = new CvMat(3, 3, MatrixType.F64C1, new IntPtr(V + (9 * 8))); CvMat _Htemp = new CvMat(3, 3, MatrixType.F64C1, new IntPtr(V + (9 * 7))); CvPoint2D64f cM = new CvPoint2D64f(0, 0), cm = new CvPoint2D64f(0, 0), sM = new CvPoint2D64f(0, 0), sm = new CvPoint2D64f(0, 0); for (int i = 0; i < count; i++) { cm.X += m[i].X; cm.Y += m[i].Y; cM.X += M[i].X; cM.Y += M[i].Y; } cm.X /= count; cm.Y /= count; cM.X /= count; cM.Y /= count; for (int i = 0; i < count; i++) { sm.X += Math.Abs(m[i].X - cm.X); sm.Y += Math.Abs(m[i].Y - cm.Y); sM.X += Math.Abs(M[i].X - cM.X); sM.Y += Math.Abs(M[i].Y - cM.Y); } if (Math.Abs(sm.X) < double.Epsilon || Math.Abs(sm.Y) < double.Epsilon || Math.Abs(sM.X) < double.Epsilon || Math.Abs(sM.Y) < double.Epsilon) return 0; sm.X = count / sm.X; sm.Y = count / sm.Y; sM.X = count / sM.X; sM.Y = count / sM.Y; double[] invHnorm = new double[9] { 1.0 / sm.X, 0, cm.X, 0, 1.0 / sm.Y, cm.Y, 0, 0, 1 }; double[] Hnorm2 = new double[9] { sM.X, 0, -cM.X * sM.X, 0, sM.Y, -cM.Y * sM.Y, 0, 0, 1 }; CvMat _invHnorm = new CvMat(3, 3, MatrixType.F64C1, invHnorm); CvMat _Hnorm2 = new CvMat(3, 3, MatrixType.F64C1, Hnorm2); Cv.Zero(_LtL); for (int i = 0; i < count; i++) { double x = (m[i].X - cm.X) * sm.X, y = (m[i].Y - cm.Y) * sm.Y; double X = (M[i].X - cM.X) * sM.X, Y = (M[i].Y - cM.Y) * sM.Y; double[] Lx = { X, Y, 1, 0, 0, 0, -x * X, -x * Y, -x }; double[] Ly = { 0, 0, 0, X, Y, 1, -y * X, -y * Y, -y }; int j, k; for (j = 0; j < 9; j++) for (k = j; k < 9; k++) LtL[j * 9 + k] += Lx[j] * Lx[k] + Ly[j] * Ly[k]; } Cv.CompleteSymm(_LtL); //cvSVD( &_LtL, &matW, 0, &matV, CV_SVD_MODIFY_A + CV_SVD_V_T ); Cv.EigenVV(_LtL, matV, matW); Cv.MatMul(_invHnorm, _H0, _Htemp); Cv.MatMul(_Htemp, _Hnorm2, _H0); Cv.ConvertScale(_H0, model, 1.0 / _H0.DataDouble[8]); return 1; }
public IplImage HoughLines(IplImage src) //허프를 통한 차선 검출 { flag_yellow = 0; flag_white = 0; slice = this.SliceImage(src); hsv = this.YellowTransform(slice); hls = this.WhiteTransform(slice); blur = this.BlurImage(hsv); bin = this.Binary(hsv, 50); //Adaptive Thresholding로 변환 Cv.Canny(bin, bin, 50, 200, ApertureSize.Size3); CvMemStorage Storage = new CvMemStorage(); /* Probabilistic 검출*/ CvSeq lines = bin.HoughLines2(Storage, HoughLinesMethod.Probabilistic, 1, Math.PI / 180, 50, 100, 100); double[] LineAngle = new double[lines.Total]; double[] LineAngle_q = new double[lines.Total]; if (lines != null) { for (int i = 0; i < lines.Total; i++) //검출된 모든 라인을 검사 { CvLineSegmentPoint elem = lines.GetSeqElem <CvLineSegmentPoint>(i).Value; // 해당 라인의 데이터를 가져옴 int dx = elem.P2.X - elem.P1.X; //x좌표의 차 int dy = elem.P2.Y - elem.P1.Y; //y좌표의 차 double angle = Math.Atan2(dy, dx) * 180 / Math.PI; //기울기 구하기 LineAngle[i] = angle; LineAngle_q[i] = angle; } if (lines.Total != 0) { Quick_Sort(LineAngle_q, lines.Total); } for (int i = 0; i < lines.Total; i++) { CvLineSegmentPoint elem = lines.GetSeqElem <CvLineSegmentPoint>(i).Value; // 해당 라인의 데이터를 가져옴 int dx = elem.P2.X - elem.P1.X; //x좌표의 차 int dy = elem.P2.Y - elem.P1.Y; //y좌표의 차 double angle = Math.Atan2(dy, dx) * 180 / Math.PI; //기울기 구하기 if (LineAngle_q[lines.Total - 1] == LineAngle[i] || LineAngle_q[0] == LineAngle[i]) { //P2에 50을 더해도 P1보다 작거나, P2에 50을 빼도 P1보다 큰경우 if (elem.P1.Y > elem.P2.Y || elem.P1.Y < elem.P2.Y) //P1 :시작점, P2 : 도착점 { if (Math.Abs(angle) >= 14 && Math.Abs(angle) <= 80) { Cv.PutText(src, "Yellow angle : " + angle.ToString(), new CvPoint(100, 50), new CvFont(FontFace.HersheyComplex, 1, 1), CvColor.Yellow); P1 = elem.P1; P2 = elem.P2; d_dx = Math.Abs(P2.X - P1.X); d_dy = Math.Abs(P2.Y - P1.Y); while (true) { if (P1.Y > P2.Y) { if (P1.Y < src.Height) { if (P1.X < P2.X) { P1.X -= d_dx / 100; P1.Y += d_dy / 100; } else { P1.X += d_dx / 100; P1.Y += d_dy / 100; } } else if (P2.Y > src.Height * 5 / 8) { if (P1.X > P2.X) { P2.X -= d_dx / 100; P2.Y -= d_dy / 100; } else { P2.X += d_dx / 100; P2.Y -= d_dy / 100; } } else { break; } } else { if (P2.Y < src.Height) { if (P1.X > P2.X) { P2.X -= d_dx / 100; P2.Y += d_dy / 100; } else { P2.X += d_dx / 100; P2.Y += d_dy / 100; } } else if (P1.Y > src.Height * 5 / 8) { if (P1.X < P2.X) { P1.X -= d_dx / 100; P1.Y -= d_dy / 100; } else { P1.X += d_dx / 100; P1.Y -= d_dy / 100; } } else { break; } } d_dx = Math.Abs(P2.X - P1.X); d_dy = Math.Abs(P2.Y - P1.Y); } flag_yellow = 1; prev_elemLeft.P1 = P1; prev_elemLeft.P2 = P2; src.Line(P1, P2, CvColor.Yellow, 10, LineType.AntiAlias, 0); //P1,P2의 좌표를 가지고 사용자에게 정보를 줄수있음(ex) angle이 심하게 올라가면 차선이탈 break; } } } } } blur = this.BlurImage(hls); bin = this.Binary(hls, 50); //Adaptive Thresholding로 변환 Cv.Canny(bin, bin, 50, 150, ApertureSize.Size3); /* Probabilistic 검출*/ lines = bin.HoughLines2(Storage, HoughLinesMethod.Probabilistic, 1, Math.PI / 180, 50, 100, 100); LineAngle = new double[lines.Total]; LineAngle_q = new double[lines.Total]; if (lines != null) { for (int i = 0; i < lines.Total; i++) //검출된 모든 라인을 검사 { CvLineSegmentPoint elem = lines.GetSeqElem <CvLineSegmentPoint>(i).Value; // 해당 라인의 데이터를 가져옴 int dx = elem.P2.X - elem.P1.X; //x좌표의 차 int dy = elem.P2.Y - elem.P1.Y; //y좌표의 차 double angle = Math.Atan2(dy, dx) * 180 / Math.PI; //기울기 구하기 LineAngle[i] = angle; LineAngle_q[i] = angle; } if (lines.Total != 0) { Quick_Sort(LineAngle_q, lines.Total); } for (int i = 0; i < lines.Total; i++) { CvLineSegmentPoint elem = lines.GetSeqElem <CvLineSegmentPoint>(i).Value; // 해당 라인의 데이터를 가져옴 int dx = elem.P2.X - elem.P1.X; //x좌표의 차 int dy = elem.P2.Y - elem.P1.Y; //y좌표의 차 double angle = Math.Atan2(dy, dx) * 180 / Math.PI; //기울기 구하기 if (LineAngle_q[lines.Total - 1] == LineAngle[i] || LineAngle_q[0] == LineAngle[i]) { //P2에 50을 더해도 P1보다 작거나, P2에 50을 빼도 P1보다 큰경우 if (elem.P1.Y > elem.P2.Y || elem.P1.Y < elem.P2.Y) //P1 :시작점, P2 : 도착점 { if (Math.Abs(angle) >= 14 && Math.Abs(angle) <= 80) { Cv.PutText(src, "Whtie angle : " + angle.ToString(), new CvPoint(100, 100), new CvFont(FontFace.HersheyComplex, 1, 1), CvColor.White); P1 = elem.P1; P2 = elem.P2; d_dx = Math.Abs(P2.X - P1.X); d_dy = Math.Abs(P2.Y - P1.Y); while (true) { if (P1.Y > P2.Y) { if (P1.Y < src.Height) { if (P1.X < P2.X) { P1.X -= d_dx / 100; P1.Y += d_dy / 100; } else { P1.X += d_dx / 100; P1.Y += d_dy / 100; } } else if (P2.Y > src.Height * 5 / 8) { if (P1.X > P2.X) { P2.X -= d_dx / 100; P2.Y -= d_dy / 100; } else { P2.X += d_dx / 100; P2.Y -= d_dy / 100; } } else { break; } } else { if (P2.Y < src.Height) { if (P1.X > P2.X) { P2.X -= d_dx / 100; P2.Y += d_dy / 100; } else { P2.X += d_dx / 100; P2.Y += d_dy / 100; } } else if (P1.Y > src.Height * 5 / 8) { if (P1.X < P2.X) { P1.X -= d_dx / 100; P1.Y -= d_dy / 100; } else { P1.X += d_dx / 100; P1.Y -= d_dy / 100; } } else { break; } } d_dx = Math.Abs(P2.X - P1.X); d_dy = Math.Abs(P2.Y - P1.Y); } flag_white = 1; prev_elemRight.P1 = P1; prev_elemRight.P2 = P2; src.Line(P1, P2, CvColor.White, 10, LineType.AntiAlias, 0); //P1,P2의 좌표를 가지고 사용자에게 정보를 줄수있음(ex) angle이 심하게 올라가면 차선이탈 break; } } } } } if (flag_yellow == 0) { src.Line(prev_elemLeft.P1, prev_elemLeft.P2, CvColor.Yellow, 10, LineType.AntiAlias, 0); } if (flag_white == 0) { src.Line(prev_elemRight.P1, prev_elemRight.P2, CvColor.White, 10, LineType.AntiAlias, 0); } Cv.ReleaseMemStorage(Storage); //vehicle_detection = this.VDConvert.VehicleDetect(src); Dispose(); return(src); }
/// <summary> /// 1チャンネル画像のサブピクセル値を返します.座標の小数点以下が0の場合にその座標の値と等しくなります /// </summary> /// <param name="mat">1チャンネル画像</param> /// <param name="point">座標</param> /// <param name="invalidValue">無効とみなされる値</param> /// <returns></returns> public static double? Get2DSubPixel(CvMat mat, CvPoint2D64f point, double invalidValue) { int preX = (int)Math.Floor(point.X); int preY = (int)Math.Floor(point.Y); int postX = preX == point.X ? preX : preX + 1; int postY = preY == point.Y ? preY : preY + 1; double fracX = point.X - preX; double fracY = point.Y - preY; if (postX >= mat.Cols || postY >= mat.Rows) { return null; } double depth00 = mat[preY, preX]; double depth01 = mat[preY, postX]; double depth10 = mat[postY, preX]; double depth11 = mat[postY, postX]; if (depth00 == invalidValue || depth01 == invalidValue || depth10 == invalidValue || depth11 == invalidValue) { return null; } return CalcEx.Lerp(CalcEx.Lerp(depth00, depth01, fracX), CalcEx.Lerp(depth10, depth11, fracX), fracY); }
/// <summary> /// 二つのベクトルの二乗距離を返します /// </summary> /// <param name="first"></param> /// <param name="second"></param> /// <returns></returns> public static double GetDistanceSq(CvPoint2D64f first, CvPoint2D64f second) { return GetLengthSq(first - second); }
/// <summary> /// ベクトルの二乗和を返します /// </summary> /// <param name="vector"></param> /// <returns></returns> public static double GetLengthSq(CvPoint2D64f vector) { return vector.X * vector.X + vector.Y * vector.Y; }
public static double Cal_distance(CvPoint2D64f pos1, CvPoint2D64f pos2) { return(Math.Sqrt(((pos1.X - pos2.X) * (pos1.X - pos2.X) + (pos1.Y - pos2.Y) * (pos1.Y - pos2.Y)))); }
/// <summary> /// 指定した点と直線の距離を返す /// </summary> /// <param name="point"></param> #else /// <summary> /// Returns the distance between this line and the specified point /// </summary> /// <param name="point"></param> #endif public double Distance(CvPoint2D64f point) { return Distance(point.X, point.Y); }
// 別スレッド処理(キャプチャー) private void worker_DoWork(object sender, DoWorkEventArgs e) { BackgroundWorker bw = (BackgroundWorker)sender; Stopwatch sw = new Stopwatch(); string str; id = 0; //PID送信用UDP //バインドするローカルポート番号 // FSI_PID_DATA pid_data = new FSI_PID_DATA(); int localPort = mmFsiUdpPortMT3PV; System.Net.Sockets.UdpClient udpc2 = null;; /* try * { * udpc2 = new System.Net.Sockets.UdpClient(localPort); * * } * catch (Exception ex) * { * //匿名デリゲートで表示する * this.Invoke(new dlgSetString(ShowRText), new object[] { richTextBox1, ex.ToString() }); * } */ //videoInputオブジェクト const int DeviceID = 0; // 0; // 3 (pro), 4(piccolo) 7(DMK) const int CaptureFps = 30; // 30 int interval = (int)(1000 / CaptureFps / 10); const int CaptureWidth = 640; const int CaptureHeight = 480; // 画像保存枚数 int mmFsiPostRec = 60; int save_counter = mmFsiPostRec; using (VideoInput vi = new VideoInput()) { vi.SetIdealFramerate(DeviceID, CaptureFps); vi.SetupDevice(DeviceID, CaptureWidth, CaptureHeight); int width = vi.GetWidth(DeviceID); int height = vi.GetHeight(DeviceID); using (IplImage img = new IplImage(width, height, BitDepth.U8, 3)) using (IplImage img_dark8 = Cv.LoadImage(@"C:\piccolo\MT3V_dark.bmp", LoadMode.GrayScale)) //using (IplImage img_dark = new IplImage(width, height, BitDepth.U8, 3)) using (IplImage img_mono = new IplImage(width, height, BitDepth.U8, 1)) using (IplImage img2 = new IplImage(width, height, BitDepth.U8, 1)) // using (Bitmap bitmap = new Bitmap(width, height, PixelFormat.Format24bppRgb)) using (CvFont font = new CvFont(FontFace.HersheyComplex, 0.45, 0.45)) //using (CvWindow window0 = new CvWindow("FIFO0", WindowMode.AutoSize)) { //this.Size = new Size(width + 12, height + 148); double min_val, max_val; CvPoint min_loc, max_loc; int size = 15; int size2x = size / 2; int size2y = size / 2; int crop = 20; double sigma = 3; long elapsed0 = 0, elapsed1 = 0; double framerate0 = 0, framerate1 = 0; double alfa_fr = 0.99; sw.Start(); while (bw.CancellationPending == false) { if (vi.IsFrameNew(DeviceID)) { DateTime dn = DateTime.Now; //取得時刻 vi.GetPixels(DeviceID, img.ImageData, false, true); // 画面time表示 str = String.Format("Wide ID:{0:D2} ", id) + dn.ToString("yyyyMMdd_HHmmss_fff");// +String.Format(" ({0,000:F2},{1,000:F2}) ({2,000:0},{3,000:0})({4,0:F1})", gx, gy, max_loc.X, max_loc.Y, max_val); img.PutText(str, new CvPoint(10, 475), font, new CvColor(0, 100, 40)); Cv.CvtColor(img, img_mono, ColorConversion.BgrToGray); Cv.Sub(img_mono, img_dark8, imgdata.img); // dark減算 imgdata.id = ++id; imgdata.t = dn; imgdata.ImgSaveFlag = !(ImgSaveFlag != 0); //int->bool変換 if (fifo.Count == MaxFrame - 1) { fifo.EraseLast(); } fifo.InsertFirst(imgdata); #region 位置検出1//MinMaxLoc /*// 位置検出 * Cv.Smooth(imgdata.img, img2, SmoothType.Gaussian, size, 0, sigma, 0); * CvRect rect; * if (PvMode == MyDETECT) * { * rect = new CvRect( (int)(gx+0.5) - size, (int)(gy+0.5) - size, size*2, size*2); * Cv.SetImageROI(img2, rect); * Cv.MinMaxLoc(img2, out min_val, out max_val, out min_loc, out max_loc, null); * Cv.ResetImageROI(img2); * max_loc.X += (int)(gx + 0.5) - size; // 基準点が(1,1)のため+1 * max_loc.Y += (int)(gy + 0.5) - size; * } * else * { * rect = new CvRect(crop, crop, width - (crop + crop), height - (crop + crop)); * Cv.SetImageROI(img2, rect); * Cv.MinMaxLoc(img2, out min_val, out max_val, out min_loc, out max_loc, null); * Cv.ResetImageROI(img2); * max_loc.X += crop; // 基準点が(1,1)のため+1 * max_loc.Y += crop; * } * window0.ShowImage(img2); * * double m00, m10, m01; * size2x = size2y = size / 2; * if (max_loc.X - size2x < 0) size2x = max_loc.X; * if (max_loc.Y - size2y < 0) size2y = max_loc.Y; * if (max_loc.X + size2x >= width ) size2x = width -max_loc.X -1; * if (max_loc.Y + size2y >= height) size2y = height -max_loc.Y -1; * rect = new CvRect(max_loc.X - size2x, max_loc.Y - size2y, size, size); * CvMoments moments; * Cv.SetImageROI(img2, rect); * Cv.Moments(img2, out moments, false); * Cv.ResetImageROI(img2); * m00 = Cv.GetSpatialMoment(moments, 0, 0); * m10 = Cv.GetSpatialMoment(moments, 1, 0); * m01 = Cv.GetSpatialMoment(moments, 0, 1); * gx = max_loc.X - size2x + m10 / m00; * gy = max_loc.Y - size2y + m01 / m00; */ #endregion #region 位置検出2 //Blob Cv.Threshold(imgdata.img, img2, threshold_blob, 255, ThresholdType.Binary); //2ms blobs.Label(img2, imgLabel); //1.4ms max_label = blobs.GreaterBlob(); elapsed1 = sw.ElapsedTicks; //1.3ms if (blobs.Count > 1 && gx >= 0) { uint min_area = (uint)(threshold_min_area * blobs[max_label].Area); blobs.FilterByArea(min_area, uint.MaxValue); //0.001ms // 最適blobの選定(area大 かつ 前回からの距離小) double x = blobs[max_label].Centroid.X; double y = blobs[max_label].Centroid.Y; uint area = blobs[max_label].Area; //CvRect rect; distance_min = ((x - gx) * (x - gx) + (y - gy) * (y - gy)); //Math.Sqrt() foreach (var item in blobs) { //Console.WriteLine("{0} | Centroid:{1} Area:{2}", item.Key, item.Value.Centroid, item.Value.Area); x = item.Value.Centroid.X; y = item.Value.Centroid.Y; //rect = item.Value.Rect; distance = ((x - gx) * (x - gx) + (y - gy) * (y - gy)); //将来はマハラノビス距離 if (distance < distance_min) { d_val = (item.Value.Area) / max_area; if (distance <= 25) //近距離(5pix) { if (d_val >= 0.4) //&& d_val <= 1.2) { max_label = item.Key; distance_min = distance; } } else { if (d_val >= 0.8 && d_val <= 1.5) { max_label = item.Key; distance_min = distance; } } } //w.WriteLine("{0} {1} {2} {3} {4}", dis, dv, i, item.Key, item.Value.Area); } //gx = x; gy = y; max_val = area; } if (max_label > 0) { maxBlob = blobs[max_label]; max_centroid = maxBlob.Centroid; gx = max_centroid.X; gy = max_centroid.Y; max_area = maxBlob.Area; if (this.States == SAVE) { Pid_Data_Send(); timerSavePostTime.Stop(); timerSaveMainTime.Stop(); timerSaveMainTime.Start(); } } else { gx = gy = 0; max_area = 0; } #endregion // 画面表示 str = String.Format("ID:{0:D2} ", id) + dn.ToString("yyyyMMdd_HHmmss_fff") + String.Format(" ({0,000:F2},{1,000:F2}) ({2,000:0},{3,000:0})({4,0:F1})", gx, gy, xoa, yoa, max_area); if (imgdata.ImgSaveFlag) { str += " True"; } img.PutText(str, new CvPoint(10, 20), font, new CvColor(0, 255, 100)); img.Circle(new CvPoint((int)gx, (int)gy), 10, new CvColor(255, 255, 100)); bw.ReportProgress(0, img); // 処理速度 elapsed0 = sw.ElapsedTicks - elapsed1; // 1frameのticks elapsed1 = sw.ElapsedTicks; framerate0 = alfa_fr * framerate1 + (1 - alfa_fr) * (Stopwatch.Frequency / (double)elapsed0); framerate1 = framerate0; str = String.Format("fr time = {0}({1}){2:F1}", sw.Elapsed, id, framerate0); //," ", sw.ElapsedMilliseconds); //匿名デリゲートで現在の時間をラベルに表示する this.Invoke(new dlgSetString(ShowText), new object[] { textBox1, str }); //img.ToBitmap(bitmap); //pictureBox1.Refresh(); } Application.DoEvents(); Thread.Sleep(interval); } this.States = STOP; this.Invoke(new dlgSetColor(SetColor), new object[] { ObsStart, this.States }); this.Invoke(new dlgSetColor(SetColor), new object[] { ObsEndButton, this.States }); vi.StopDevice(DeviceID); //udpc2.Close(); } } }
public static extern void cvCalibrationMatrixValues(IntPtr camera_matrix, CvSize image_size, double aperture_width, double aperture_height, out double fovx, out double fovy, out double focal_length, out CvPoint2D64f principal_point, out double pixel_aspect_ratio);
private static unsafe int icvCompressPoints( CvPoint2D64f* ptr, byte* mask, int mstep, int count ) { int i, j; for( i = j = 0; i < count; i++ ) if( mask[i*mstep] != 0 ) { if( i > j ) ptr[j] = ptr[i]; j++; } return j; }