private void btnImgsrch_Click(object sender, RoutedEventArgs e) { IntPtr findwindow = FindWindow(null, AppName); if (findwindow != IntPtr.Zero) { //플레이어를 찾았을 경우 lblState.Content = "찾았습니다."; //찾은 플레이어를 바탕으로 Graphics 정보를 가져옵니다. Graphics Graphicsdata = Graphics.FromHwnd(findwindow); //찾은 플레이어 창 크기 및 위치를 가져옵니다. Rectangle rect = Rectangle.Round(Graphicsdata.VisibleClipBounds); //플레이어 창 크기 만큼의 비트맵을 선언해줍니다. Bitmap bmp = new Bitmap(rect.Width, rect.Height); //비트맵을 바탕으로 그래픽스 함수로 선언해줍니다. using (Graphics g = Graphics.FromImage(bmp)) { //찾은 플레이어의 크기만큼 화면을 캡쳐합니다. IntPtr hdc = g.GetHdc(); PrintWindow(findwindow, hdc, 0x2); g.ReleaseHdc(hdc); } // pictureBox1 이미지를 표시해줍니다. printImg(bmp, imgPrint); printImg(srcImg, imgSrcPrint); searchIMG(bmp, srcImg); } }
public static void DrawRound(this Graphics graphics, Color color, Rectangle rectangle, int curve) { using (var pen = new Pen(color)) using (GraphicsPath graphicsPath = rectangle.Round(curve)) graphics.DrawPath(pen, graphicsPath); }
public static void FillRoundGradient(this Graphics graphics, Color color1, Color color2, Rectangle rectangle, float angle, int curve) { using (var linearGradientBrush = new LinearGradientBrush(rectangle, color1, color2, angle)) using (GraphicsPath graphicsPath = rectangle.Round(curve)) graphics.FillPath(linearGradientBrush, graphicsPath); }
public static void FillRound(this Graphics graphics, Color color, Rectangle rectangle, int curve) { using (var solidBrush = new SolidBrush(color)) using (GraphicsPath graphicsPath = rectangle.Round(curve)) graphics.FillPath(solidBrush, graphicsPath); }
public DnnPage() : base() { var button = this.GetButton(); button.Text = "Perform Mask-rcnn Detection"; button.Clicked += OnButtonClicked; OnImagesLoaded += async(sender, image) => { if (image == null || image[0] == null) { return; } SetMessage("Please wait..."); SetImage(null); Task <Tuple <Mat, String, long> > t = new Task <Tuple <Mat, String, long> >( () => { InitDetector(); String msg = String.Empty; using (Mat blob = DnnInvoke.BlobFromImage(image[0])) using (VectorOfMat tensors = new VectorOfMat()) { _maskRcnnDetector.SetInput(blob, "image_tensor"); Stopwatch watch = Stopwatch.StartNew(); _maskRcnnDetector.Forward(tensors, new string[] { "detection_out_final", "detection_masks" }); watch.Stop(); msg = String.Format("Mask RCNN inception completed in {0} milliseconds.", watch.ElapsedMilliseconds); using (Mat boxes = tensors[0]) using (Mat masks = tensors[1]) { System.Drawing.Size imgSize = image[0].Size; float[,,,] boxesData = boxes.GetData(true) as float[, , , ]; int numDetections = boxesData.GetLength(2); for (int i = 0; i < numDetections; i++) { float score = boxesData[0, 0, i, 2]; if (score > 0.5) { int classId = (int)boxesData[0, 0, i, 1]; String label = _labels[classId]; MCvScalar color = _colors[classId]; float left = boxesData[0, 0, i, 3] * imgSize.Width; float top = boxesData[0, 0, i, 4] * imgSize.Height; float right = boxesData[0, 0, i, 5] * imgSize.Width; float bottom = boxesData[0, 0, i, 6] * imgSize.Height; RectangleF rectF = new RectangleF(left, top, right - left, bottom - top); Rectangle rect = Rectangle.Round(rectF); rect.Intersect(new Rectangle(Point.Empty, imgSize)); CvInvoke.Rectangle(image[0], rect, new MCvScalar(0, 0, 0, 0), 1); CvInvoke.PutText(image[0], label, rect.Location, FontFace.HersheyComplex, 1.0, new MCvScalar(0, 0, 255), 2); int[] masksDim = masks.SizeOfDimension; using (Mat mask = new Mat( masksDim[2], masksDim[3], DepthType.Cv32F, 1, masks.GetDataPointer(i, classId), masksDim[3] * masks.ElementSize)) using (Mat maskLarge = new Mat()) using (Mat maskLargeInv = new Mat()) using (Mat subRegion = new Mat(image[0], rect)) using (Mat largeColor = new Mat(subRegion.Size, Emgu.CV.CvEnum.DepthType.Cv8U, 3)) { CvInvoke.Resize(mask, maskLarge, rect.Size); //give the mask at least 30% transparency using (ScalarArray sa = new ScalarArray(0.7)) CvInvoke.Min(sa, maskLarge, maskLarge); //Create the inverse mask for the original image using (ScalarArray sa = new ScalarArray(1.0)) CvInvoke.Subtract(sa, maskLarge, maskLargeInv); //The mask color largeColor.SetTo(color); if (subRegion.NumberOfChannels == 4) { using (Mat bgrSubRegion = new Mat()) { CvInvoke.CvtColor(subRegion, bgrSubRegion, ColorConversion.Bgra2Bgr); CvInvoke.BlendLinear(largeColor, bgrSubRegion, maskLarge, maskLargeInv, bgrSubRegion); CvInvoke.CvtColor(bgrSubRegion, subRegion, ColorConversion.Bgr2Bgra); } } else { CvInvoke.BlendLinear(largeColor, subRegion, maskLarge, maskLargeInv, subRegion); } } } } } } long time = 0; return(new Tuple <Mat, String, long>(image[0], msg, time)); }); t.Start(); var result = await t; SetImage(t.Result.Item1); //String computeDevice = CvInvoke.UseOpenCL ? "OpenCL: " + Ocl.Device.Default.Name : "CPU"; SetMessage(t.Result.Item2); }; }
//백그라운드 워커가 실행하는 작업 //DoWork 이벤트 처리 메소드에서 lstNumber.Items.Add(i) 와 같은 코드를 //직접 실행시키면 "InvalidOperationException" 오류발생 private void myThread_DoWork(object sender, DoWorkEventArgs e) { //int count = (int)e.Argument; int i = 0; int prevKeyState = 0; string str; int count = 0; RECT cRect, wRect; int appPointX; int appPointY; int appSizeWidth; int appSizeHeight; bool b = false; while (!myThread.CancellationPending) { if (GetAsyncKeyState(69) > 1 && prevKeyState <= 1) { //if (b) b = false; //else b = true; b = true; } if (b && tglRun.IsOn) { IntPtr hWnd = FindWindow(null, AppName); if (hWnd != IntPtr.Zero) { GetClientRect(hWnd, out cRect); GetWindowRect(hWnd, out wRect); appPointX = wRect.left + (wRect.right - wRect.left - cRect.right) / 2; appPointY = wRect.bottom - cRect.bottom - (wRect.right - wRect.left - cRect.right) / 2; appSizeWidth = cRect.right; appSizeHeight = cRect.bottom; using (Bitmap bmp = new Bitmap(55, 55, PixelFormat.Format32bppArgb)) using (Graphics gr = Graphics.FromImage(bmp)) { gr.CopyFromScreen(appPointX + 825, appPointY + 970, 0, 0, bmp.Size); //printImg(bmp, imgSpeed); using (Mat ScreenMat = OpenCvSharp.Extensions.BitmapConverter.ToMat(bmp)) using (Mat FindMat = OpenCvSharp.Extensions.BitmapConverter.ToMat(srcImg)) using (Mat FindMat2 = OpenCvSharp.Extensions.BitmapConverter.ToMat(srcImg2)) using (Mat res = ScreenMat.MatchTemplate(FindMat, TemplateMatchModes.CCoeffNormed)) using (Mat res2 = ScreenMat.MatchTemplate(FindMat2, TemplateMatchModes.CCoeffNormed)) { double minval, maxval = 0; double minval2, maxval2 = 0; OpenCvSharp.Point minloc, maxloc; OpenCvSharp.Point minloc2, maxloc2; Cv2.MinMaxLoc(res, out minval, out maxval, out minloc, out maxloc); Cv2.MinMaxLoc(res2, out minval2, out maxval2, out minloc2, out maxloc2); if (maxval2 > 0.90) { //찾을때 이벤트 if (sp.IsOpen) { sp.Write("KDw"); Thread.Sleep(10); sp.Write("KUw"); } } if (maxval > 0.90) { //찾을때 이벤트 if (sp.IsOpen) { sp.Write("KDw"); Thread.Sleep(10); sp.Write("KUw"); b = false; } } } } } } this.Dispatcher.BeginInvoke(DispatcherPriority.Normal, (ThreadStart) delegate() { if (b) { lblResult.Content = "실행중"; lblResult.Foreground = System.Windows.Media.Brushes.Blue; } else { lblResult.Content = "정지중"; lblResult.Foreground = System.Windows.Media.Brushes.Red; } }); if (GetAsyncKeyState(121) > 1 && prevKeyState <= 1 && false) { string strpos = ""; count++; IntPtr hWnd = FindWindow(null, (string)e.Argument); if (hWnd != IntPtr.Zero) { GetClientRect(hWnd, out cRect); GetWindowRect(hWnd, out wRect); appPointX = wRect.left + (wRect.right - wRect.left - cRect.right) / 2; appPointY = wRect.bottom - cRect.bottom - (wRect.right - wRect.left - cRect.right) / 2; appSizeWidth = cRect.right; appSizeHeight = cRect.bottom; strpos += "cRect.left : " + cRect.left + "\n"; strpos += "cRect.right : " + cRect.right + "\n"; strpos += "cRect.top : " + cRect.top + "\n"; strpos += "cRect.bottom : " + cRect.bottom + "\n\n"; strpos += "wRect.left : " + wRect.left + "\n"; strpos += "wRect.right : " + wRect.right + "\n"; strpos += "wRect.top : " + wRect.top + "\n"; strpos += "wRect.bottom : " + wRect.bottom + "\n\n"; strpos += "appPointX : " + appPointX + "\n"; strpos += "appPointY : " + appPointY + "\n"; strpos += "appSizeWidth : " + appSizeWidth + "\n"; strpos += "appSizeHeight : " + appSizeHeight + "\n"; //플레이어를 찾았을 경우 this.Dispatcher.BeginInvoke(DispatcherPriority.Normal, (ThreadStart) delegate() { lblPos.Content = strpos; IntPtr findwindow = FindWindow(null, AppName); if (findwindow != IntPtr.Zero) { //플레이어를 찾았을 경우 lblState.Content = "찾았습니다."; //찾은 플레이어를 바탕으로 Graphics 정보를 가져옵니다. Graphics Graphicsdata = Graphics.FromHwnd(findwindow); //찾은 플레이어 창 크기 및 위치를 가져옵니다. Rectangle rect = Rectangle.Round(Graphicsdata.VisibleClipBounds); //플레이어 창 크기 만큼의 비트맵을 선언해줍니다. Bitmap bmp = new Bitmap(rect.Width, rect.Height); //비트맵을 바탕으로 그래픽스 함수로 선언해줍니다. using (Graphics g = Graphics.FromImage(bmp)) { //찾은 플레이어의 크기만큼 화면을 캡쳐합니다. IntPtr hdc = g.GetHdc(); PrintWindow(findwindow, hdc, 0x2); g.ReleaseHdc(hdc); } // pictureBox1 이미지를 표시해줍니다. printImg(bmp, imgPrint); printImg(srcImg, imgSrcPrint); searchIMG(bmp, srcImg); } }); } else { //플레이어를 못찾을경우 this.Dispatcher.BeginInvoke(DispatcherPriority.Normal, (ThreadStart) delegate() { lblState.Content = "못찾았어요"; }); } } prevKeyState = GetAsyncKeyState(69); str = i++ + "\n정지신호 : " + myThread.CancellationPending.ToString() + "\nF9 상태 : " + GetAsyncKeyState(121).ToString() + "\ncount : " + count.ToString(); this.Dispatcher.BeginInvoke(DispatcherPriority.Normal, (ThreadStart) delegate() { lblState.Content = str; }); Thread.Sleep(50); } e.Cancel = true; }
public DnnPage() : base() { var button = this.GetButton(); button.Text = "Perform Mask-rcnn Detection"; button.Clicked += OnButtonClicked; OnImagesLoaded += async(sender, image) => { if (image == null || image[0] == null) { return; } SetMessage("Please wait..."); SetImage(null); Task <Tuple <Mat, String, long> > t = new Task <Tuple <Mat, String, long> >( () => { String configFile = "mask_rcnn_inception_v2_coco_2018_01_28.pbtxt"; #if __ANDROID__ String path = System.IO.Path.Combine(Android.OS.Environment.ExternalStorageDirectory.AbsolutePath, Android.OS.Environment.DirectoryDownloads, "dnn_data"); FileInfo configFileInfo = AndroidFileAsset.WritePermanantFileAsset(Android.App.Application.Context, configFile, "dnn_data", AndroidFileAsset.OverwriteMethod.AlwaysOverwrite); configFile = configFileInfo.FullName; #else String path = "./dnn_data/"; #endif String graphFile = DnnDownloadFile(path, "frozen_inference_graph.pb"); String lookupFile = DnnDownloadFile(path, "coco-labels-paper.txt"); string[] labels = File.ReadAllLines(lookupFile); Emgu.CV.Dnn.Net net = Emgu.CV.Dnn.DnnInvoke.ReadNetFromTensorflow(graphFile, configFile); Mat blob = DnnInvoke.BlobFromImage(image[0]); net.SetInput(blob, "image_tensor"); using (VectorOfMat tensors = new VectorOfMat()) { net.Forward(tensors, new string[] { "detection_out_final", "detection_masks" }); using (Mat boxes = tensors[0]) using (Mat masks = tensors[1]) { System.Drawing.Size imgSize = image[0].Size; float[,,,] boxesData = boxes.GetData(true) as float[, , , ]; //float[,,,] masksData = masks.GetData(true) as float[,,,]; int numDetections = boxesData.GetLength(2); for (int i = 0; i < numDetections; i++) { float score = boxesData[0, 0, i, 2]; if (score > 0.5) { int classId = (int)boxesData[0, 0, i, 1]; String label = labels[classId]; float left = boxesData[0, 0, i, 3] * imgSize.Width; float top = boxesData[0, 0, i, 4] * imgSize.Height; float right = boxesData[0, 0, i, 5] * imgSize.Width; float bottom = boxesData[0, 0, i, 6] * imgSize.Height; RectangleF rectF = new RectangleF(left, top, right - left, bottom - top); Rectangle rect = Rectangle.Round(rectF); rect.Intersect(new Rectangle(Point.Empty, imgSize)); CvInvoke.Rectangle(image[0], rect, new MCvScalar(0, 0, 0, 0), 1); CvInvoke.PutText(image[0], label, rect.Location, FontFace.HersheyComplex, 1.0, new MCvScalar(0, 0, 255), 2); int[] masksDim = masks.SizeOfDimension; using (Mat mask = new Mat( masksDim[2], masksDim[3], DepthType.Cv32F, 1, //masks.DataPointer + //(i * masksDim[1] + classId ) //* masksDim[2] * masksDim[3] * masks.ElementSize, masks.GetDataPointer(i, classId), masksDim[3] * masks.ElementSize)) using (Mat maskLarge = new Mat()) using (Mat maskLargeInv = new Mat()) using (Mat subRegion = new Mat(image[0], rect)) using (Mat largeColor = new Mat(subRegion.Size, Emgu.CV.CvEnum.DepthType.Cv8U, 3)) { CvInvoke.Resize(mask, maskLarge, rect.Size); //give the mask at least 30% transparency using (ScalarArray sa = new ScalarArray(0.7)) CvInvoke.Min(sa, maskLarge, maskLarge); //Create the inverse mask for the original image using (ScalarArray sa = new ScalarArray(1.0)) CvInvoke.Subtract(sa, maskLarge, maskLargeInv); //The mask color largeColor.SetTo(new Emgu.CV.Structure.MCvScalar(255, 0, 0)); if (subRegion.NumberOfChannels == 4) { using (Mat bgrSubRegion = new Mat()) { CvInvoke.CvtColor(subRegion, bgrSubRegion, ColorConversion.Bgra2Bgr); CvInvoke.BlendLinear(largeColor, bgrSubRegion, maskLarge, maskLargeInv, bgrSubRegion); CvInvoke.CvtColor(bgrSubRegion, subRegion, ColorConversion.Bgr2Bgra); } } else { CvInvoke.BlendLinear(largeColor, subRegion, maskLarge, maskLargeInv, subRegion); } } } } } } long time = 0; return(new Tuple <Mat, String, long>(image[0], null, time)); }); t.Start(); var result = await t; SetImage(t.Result.Item1); String computeDevice = CvInvoke.UseOpenCL ? "OpenCL: " + Ocl.Device.Default.Name : "CPU"; SetMessage(t.Result.Item2); }; }