public static List<ImagePoint> GetFeaturePoints(Image faceImage) { List<ImagePoint> faceFeatureList = new List<ImagePoint>(); try { FSDK.CImage image = new FSDK.CImage(faceImage); FSDK.TFacePosition facePosition = image.DetectFace(); if (0 == facePosition.w) return null; else { int left = facePosition.xc - (int)(facePosition.w * 0.6f); int top = facePosition.yc - (int)(facePosition.w * 0.5f); //gr.DrawRectangle(Pens.LightGreen, left, top, (int)(facePosition.w * 1.2), (int)(facePosition.w * 1.2)); FSDK.TPoint[] facialFeatures = image.DetectFacialFeaturesInRegion(ref facePosition); //int i = 0; foreach (FSDK.TPoint point in facialFeatures) { faceFeatureList.Add(new ImagePoint(point.x, point.y)); //gr.DrawEllipse((++i > 2) ? Pens.LightGreen : Pens.Blue, point.x, point.y, 3, 3); } return faceFeatureList; } } catch (Exception ex) { throw new Exception(ex.Message); } }
private void button1_Click(object sender, EventArgs e) { this.button1.Enabled = false; int cameraHandle = 0; int r = FSDKCam.OpenVideoCamera(ref cameraName, ref cameraHandle); if (r != FSDK.FSDKE_OK) { MessageBox.Show("Error opening the first camera", "Error", MessageBoxButtons.OK, MessageBoxIcon.Error); Application.Exit(); } int tracker = 0; FSDK.CreateTracker(ref tracker); int err = 0; // set realtime face detection parameters FSDK.SetTrackerMultipleParameters(tracker, "RecognizeFaces=false; HandleArbitraryRotations=false; DetermineFaceRotationAngle=false; InternalResizeWidth=100; FaceDetectionThreshold=5;", ref err); while (!needClose) { Int32 imageHandle = 0; if (FSDK.FSDKE_OK != FSDKCam.GrabFrame(cameraHandle, ref imageHandle)) // grab the current frame from the camera { Application.DoEvents(); continue; } FSDK.CImage image = new FSDK.CImage(imageHandle); long [] IDs; long faceCount = 0; FSDK.FeedFrame(tracker, 0, image.ImageHandle, ref faceCount, out IDs, sizeof(long) * 256); // maximum 256 faces detected Array.Resize(ref IDs, (int)faceCount); Image frameImage = image.ToCLRImage(); Graphics gr = Graphics.FromImage(frameImage); for (int i = 0; i < IDs.Length; ++i) { FSDK.TFacePosition facePosition = new FSDK.TFacePosition(); FSDK.GetTrackerFacePosition(tracker, 0, IDs[i], ref facePosition); int left = facePosition.xc - (int)(facePosition.w * 0.6); int top = facePosition.yc - (int)(facePosition.w * 0.5); gr.DrawRectangle(Pens.LightGreen, left, top, (int)(facePosition.w * 1.2), (int)(facePosition.w * 1.2)); } // display current frame pictureBox1.Image = frameImage; GC.Collect(); // collect the garbage // make UI controls accessible Application.DoEvents(); } FSDK.FreeTracker(tracker); FSDKCam.CloseVideoCamera(cameraHandle); FSDKCam.FinalizeCapturing(); }
private void btnOpenPhoto_Click(object sender, EventArgs e) { if (openFileDialog1.ShowDialog() == DialogResult.OK) { try { FSDK.CImage image = new FSDK.CImage(openFileDialog1.FileName); // resize image to fit the window width double ratio = System.Math.Min((pictureBox1.Width + 0.4) / image.Width, (pictureBox1.Height + 0.4) / image.Height); image = image.Resize(ratio); Image frameImage = image.ToCLRImage(); Graphics gr = Graphics.FromImage(frameImage); FSDK.TFacePosition facePosition = image.DetectFace(); if (0 == facePosition.w) MessageBox.Show("No faces detected", "Face Detection"); else { int left = facePosition.xc - (int)(facePosition.w*0.6f); int top = facePosition.yc - (int)(facePosition.w*0.5f); gr.DrawRectangle(Pens.LightGreen, left, top, (int)(facePosition.w*1.2), (int)(facePosition.w*1.2)); FSDK.TPoint[] facialFeatures = image.DetectFacialFeaturesInRegion(ref facePosition); int i = 0; foreach (FSDK.TPoint point in facialFeatures) gr.DrawEllipse((++i > 2) ? Pens.LightGreen : Pens.Blue, point.x, point.y, 3, 3); gr.Flush(); } // display image pictureBox1.Image = frameImage; pictureBox1.Refresh(); } catch (Exception ex) { MessageBox.Show(ex.Message, "Exception"); } } }
public void ShowDetect() { { int cameraHandle = 0; int r = FSDKCam.OpenVideoCamera(ref cameraName, ref cameraHandle); if (r != FSDK.FSDKE_OK) { MessageBox.Show("Error opening the first camera", "Error", MessageBoxButtons.OK, MessageBoxIcon.Error); Application.Exit(); } int tracker = 0; // creating a Tracker if (FSDK.FSDKE_OK != FSDK.LoadTrackerMemoryFromFile(ref tracker, TrackerMemoryFile)) // try to load saved tracker state FSDK.CreateTracker(ref tracker); // if could not be loaded, create a new tracker int err = 0; // set realtime face detection parameters FSDK.SetTrackerMultipleParameters(tracker, "HandleArbitraryRotations=false; DetermineFaceRotationAngle=false; InternalResizeWidth=100; FaceDetectionThreshold=5;", ref err); while (!needClose) { Int32 imageHandle = 0; if (FSDK.FSDKE_OK != FSDKCam.GrabFrame(cameraHandle, ref imageHandle)) // grab the current frame from the camera { Application.DoEvents(); continue; } FSDK.CImage image = new FSDK.CImage(imageHandle); long[] IDs; long faceCount = 0; FSDK.FeedFrame(tracker, 0, image.ImageHandle, ref faceCount, out IDs, sizeof(long) * 256); // maximum of 256 faces detected Array.Resize(ref IDs, (int)faceCount); // make UI controls accessible (to find if the user clicked on a face) Application.DoEvents(); Image frameImage = image.ToCLRImage(); Graphics gr = Graphics.FromImage(frameImage); IsDetect= "False"; //txtDetect.Text = "ไม่เจอ"; for (int i = 0; i < IDs.Length; ++i) { //txtDetect.Text = "เจอ"; IsDetect = "True"; FSDK.TFacePosition facePosition = new FSDK.TFacePosition(); FSDK.GetTrackerFacePosition(tracker, 0, IDs[i], ref facePosition); int left = facePosition.xc - (int)(facePosition.w * 0.6); int top = facePosition.yc - (int)(facePosition.w * 0.5); int w = (int)(facePosition.w * 1.2); String name; int res = FSDK.GetAllNames(tracker, IDs[i], out name, 65536); // maximum of 65536 characters if (FSDK.FSDKE_OK == res && name.Length > 0) { // draw name StringFormat format = new StringFormat(); format.Alignment = StringAlignment.Center; gr.DrawString(name, new System.Drawing.Font("Arial", 16), new System.Drawing.SolidBrush(System.Drawing.Color.LightGreen), facePosition.xc, top + w + 5, format); } Pen pen = Pens.LightGreen; if (mouseX >= left && mouseX <= left + w && mouseY >= top && mouseY <= top + w) { pen = Pens.Blue; if (ProgramState.psRemember == programState) { if (FSDK.FSDKE_OK == FSDK.LockID(tracker, IDs[i])) { // get the user name //frmInputBox inputName = new frmInputBox(); //if (DialogResult.OK == inputName.ShowDialog()) //{ // userName = inputName.userName; // FSDK.SetName(tracker, IDs[i], userName); // FSDK.UnlockID(tracker, IDs[i]); //} } } } gr.DrawRectangle(pen, left, top, w, w); } programState = ProgramState.psRecognize; // display current frame pictureBox1.Image = frameImage; GC.Collect(); // collect the garbage after the deletion if (IsDetect == "True") { string strapppath = LiveFaceScan.CameraSetting.Drive + ":\\Kiosk_Image_Search\\imagecompare.jpg";// System.IO.Path.GetDirectoryName(System.Reflection.Assembly.GetExecutingAssembly().Location) + "\\imagesearch\\imagecompare.jpg"; if (System.IO.File.Exists(strapppath.Replace("\\", "/"))) { System.IO.File.Delete(strapppath.Replace("\\", "/")); } pictureBox1.Image.Save(strapppath.Replace("\\", "/"),ImageFormat.Jpeg); //ImageDetect(); } } FSDK.SaveTrackerMemoryToFile(tracker, TrackerMemoryFile); FSDK.FreeTracker(tracker); FSDKCam.CloseVideoCamera(cameraHandle); FSDKCam.FinalizeCapturing(); //if (needClose == true) { // if (frmCameraDetect.FaceList.Count == 0) // { // MessageBox.Show("Please enroll faces first", "Error"); // } // else { // } //} } }
public void PopulateData() { //if (FSDK.FSDKE_OK != FSDK.ActivateLibrary("eLDwg+IxLV+w/pfOUzvf7OhNDgnO4M0ZSQZzy7Os2hUn0z3b1driMrhlq+r/eFjkQkjIL4Men2VLH29plmHC/ojpfhyrk6v0tzfc96TD72U4yqYeq4l0VR7phnG1EeFTIPzuXPRKAwtefblB7DmM6uYUrcgGZ5r8R04EdMsJl/k=")) //{ // MessageBox.Show("Please run the License Key Wizard (Start - Luxand - FaceSDK - License Key Wizard)", "Error activating FaceSDK", MessageBoxButtons.OK, MessageBoxIcon.Error); // Application.Exit(); //} //FSDK.InitializeLibrary(); //FSDKCam.InitializeCapturing(); //int count; //cameraName = LiveFaceScan.CameraSetting.CameraName; //FSDKCam.VideoFormatInfo[] formatList; //FSDKCam.GetVideoFormatList(ref cameraName, out formatList, out count); //formatList[0].Height = 600; //formatList[0].Width = 800; // FSDKCam.SetVideoFormat(ref cameraName, formatList[0]); //int VideoFormat = LiveFaceScan.CameraSetting.VideoFormat;//0; // choose a video format //pictureBox1.Height = formatList[VideoFormat].Height; //pictureBox1.Width = formatList[VideoFormat].Width; //this.Height = formatList[VideoFormat].Height + 48; //this.Width = formatList[VideoFormat].Width + 96; // 777, 592 //int screenWidth; //int screenHeight; //// grabs the resolution of the monitor //Screen screen = Screen.PrimaryScreen; //screenWidth = 777;//screen.Bounds.Width; //screenHeight = 570;//screen.Bounds.Height; //// MessageBox.Show("height = " + screenHeight + "\n" + "Width = " + screenWidth); //// grabs the resolution of the monitor //// sets the size of the window of Pictureviewer //this.ClientSize = new Size(screenWidth, screenHeight); //// sets the size of the window of Pictureviewer //pictureBox1.Size = new Size(screenWidth, screenHeight); //pictureBox1.Location = new Point((ClientSize.Width / 2) - (pictureBox1.Width / 2), (ClientSize.Height / 2) - (pictureBox1.Height / 2)); try { if (FSDK.FSDKE_OK != FSDK.ActivateLibrary("J98H0OOSi4gGwMxLZ0daeM5sCGAFl4wyClviJFdPlYpa48vaFm46LvwLq9T9L0W3vMjimMsBOFFSuTmn8S7nsWoLdS0GLiwGXHuXDJxlgYMo4ufYFVraAPrJfiDeKWaLLoxlR4ZbMIMnujLnM+t/NjixxITVxO522C0Sh8BcbAU=")) { MessageBox.Show("Please run the License Key Wizard (Start - Luxand - FaceSDK - License Key Wizard)", "Error activating FaceSDK", MessageBoxButtons.OK, MessageBoxIcon.Error); Application.Exit(); } FSDK.InitializeLibrary(); FSDKCam.InitializeCapturing(); string[] cameraList; int count; FSDKCam.GetCameraList(out cameraList, out count); if (0 == count) { MessageBox.Show("Please attach a camera", "Error", MessageBoxButtons.OK, MessageBoxIcon.Error); Application.Exit(); } cameraName = LiveFaceScan.CameraSetting.CameraName; FSDKCam.VideoFormatInfo[] formatList; FSDKCam.GetVideoFormatList(ref cameraName, out formatList, out count); int VideoFormat = LiveFaceScan.CameraSetting.VideoFormat; // choose a video format //pictureBox1.Width = formatList[VideoFormat].Width; //pictureBox1.Height = formatList[VideoFormat].Height; //this.Width = formatList[VideoFormat].Width + 48; //this.Height = formatList[VideoFormat].Height + 96; pictureBox1.Location = new Point(0, 188); pictureBox1.Width = 800; pictureBox1.Height = 800; this.Width = 800; this.Height = 1224; pictureSearch.Location = new Point(222,800+260); int cameraHandle = 0; int r = FSDKCam.OpenVideoCamera(ref cameraName, ref cameraHandle); if (r != FSDK.FSDKE_OK) { MessageBox.Show("Error opening the first camera", "Error", MessageBoxButtons.OK, MessageBoxIcon.Error); Application.Exit(); } int tracker = 0; // creating a Tracker if (FSDK.FSDKE_OK != FSDK.LoadTrackerMemoryFromFile(ref tracker, TrackerMemoryFile)) // try to load saved tracker state FSDK.CreateTracker(ref tracker); // if could not be loaded, create a new tracker int err = 0; // set realtime face detection parameters FSDK.SetTrackerMultipleParameters(tracker, "HandleArbitraryRotations=false; DetermineFaceRotationAngle=false; InternalResizeWidth=100; FaceDetectionThreshold=5;", ref err); string IsDetect; while (!needClose) { Int32 imageHandle = 0; if (FSDK.FSDKE_OK != FSDKCam.GrabFrame(cameraHandle, ref imageHandle)) // grab the current frame from the camera { Application.DoEvents(); continue; } FSDK.CImage image = new FSDK.CImage(imageHandle); long[] IDs; long faceCount = 0; FSDK.FeedFrame(tracker, 0, image.ImageHandle, ref faceCount, out IDs, sizeof(long) * 256); // maximum of 256 faces detected Array.Resize(ref IDs, (int)faceCount); // make UI controls accessible (to find if the user clicked on a face) Application.DoEvents(); Image frameImage = image.ToCLRImage(); Graphics gr = Graphics.FromImage(frameImage); IsDetect = "False"; for (int i = 0; i < IDs.Length; ++i) { IsDetect = "True"; //pictureBox1.Image = frameImage; //string strapppath = "D:\\Kiosk_Image_Search\\imagecompare.jpg";// System.IO.Path.GetDirectoryName(System.Reflection.Assembly.GetExecutingAssembly().Location) + "\\imagesearch\\imagecompare.jpg"; //if (System.IO.File.Exists(strapppath.Replace("\\", "/"))) //{ // System.IO.File.Delete(strapppath.Replace("\\", "/")); //} //pictureBox1.Image.Save(strapppath.Replace("\\", "/"), ImageFormat.Jpeg); FSDK.TFacePosition facePosition = new FSDK.TFacePosition(); FSDK.GetTrackerFacePosition(tracker, 0, IDs[i], ref facePosition); int left = facePosition.xc - (int)(facePosition.w * 0.6); int top = facePosition.yc - (int)(facePosition.w * 0.5); int w = (int)(facePosition.w * 1.2); String name; int res = FSDK.GetAllNames(tracker, IDs[i], out name, 65536); // maximum of 65536 characters if (FSDK.FSDKE_OK == res && name.Length > 0) { // draw name StringFormat format = new StringFormat(); format.Alignment = StringAlignment.Center; gr.DrawString(name, new System.Drawing.Font("Arial", 16), new System.Drawing.SolidBrush(System.Drawing.Color.LightGreen), facePosition.xc, top + w + 5, format); } Pen pen = Pens.LightGreen; if (mouseX >= left && mouseX <= left + w && mouseY >= top && mouseY <= top + w) { pen = Pens.Blue; if (ProgramState.psRemember == programState) { if (FSDK.FSDKE_OK == FSDK.LockID(tracker, IDs[i])) { } } } gr.DrawRectangle(pen, left, top, w, w); } programState = ProgramState.psRecognize; // display current frame pictureBox1.Image = frameImage; GC.Collect(); // collect the garbage after the deletion if (IsDetect == "True") { //string strapppath = "D:\\Kiosk_Image_Search\\imagecompare.jpg";// System.IO.Path.GetDirectoryName(System.Reflection.Assembly.GetExecutingAssembly().Location) + "\\imagesearch\\imagecompare.jpg"; ////if (System.IO.File.Exists(strapppath.Replace("\\", "/"))) ////{ //// System.IO.File.Delete(strapppath.Replace("\\", "/")); ////} ////pictureBox1.Image.Save(strapppath.Replace("\\", "/"), ImageFormat.Jpeg); ////pictureBox1.Dispose(); ////pictureBox1.Refresh(); //Image img; //string file = @"D:\Kiosk_Image_Search\imagecompare.jpg"; //using (Bitmap bmp = new Bitmap(file)) //{ // img = new Bitmap(bmp); // // pictureBox1.Image = img; //} //if (System.IO.File.Exists(file)) //{ // System.IO.File.Delete(file); // pictureBox1.Image.Save(file, ImageFormat.Jpeg); //} //else // pictureBox1.Image.Save(file, ImageFormat.Jpeg); // needClose = true; //frCompare.ImageFileName = "imaggecompre"; //frCompare.FacePosition = new FSDK.TFacePosition(); //frCompare.FacialFeatures = new FSDK.TPoint[2]; //frCompare.Template = new byte[FSDK.TemplateSize]; //frCompare.image = new FSDK.CImage(pictureBox1.Image); //int images; //int img1; //FSDK.LoadImageFromFile(ref img1, strapppath); //FSDK.SaveImageToFile(img1, strapppath.Replace("\\", "/")); // ImageDetect(); // PopulateAllImage(); //if (FaceList.Count > 0) //{ // ImageDetect(); //} // MessageBox.Show(FaceList.Count + ""); //MessageBox.Show("Face"); //FaceList = new List<TFaceRecord>(); //string strapppathTemp = "D:\\Kiosk_Image"; //String[] filenames = System.IO.Directory.GetFiles(strapppathTemp, "*.jpg"); ////MessageBox.Show(filenames.Length + ""); //for (int i = 0; i <= filenames.Length-1; i++) //{ // string strpath = filenames[i].Replace("\\", "/"); // MessageBox.Show(strpath); // TFaceRecord fr = new TFaceRecord(); // fr.ImageFileName = strpath; // fr.FacePosition = new FSDK.TFacePosition(); // fr.FacialFeatures = new FSDK.TPoint[2]; // fr.Template = new byte[FSDK.TemplateSize]; // fr.image = new FSDK.CImage(strpath); // try // { // fr.FacePosition = fr.image.DetectFace(); // if (0 != fr.FacePosition.w) // { // MessageBox.Show(fr.FacePosition.w + ""); // fr.faceImage = fr.image.CopyRect((int)(fr.FacePosition.xc - Math.Round(fr.FacePosition.w * 0.5)), (int)(fr.FacePosition.yc - Math.Round(fr.FacePosition.w * 0.5)), (int)(fr.FacePosition.xc + Math.Round(fr.FacePosition.w * 0.5)), (int)(fr.FacePosition.yc + Math.Round(fr.FacePosition.w * 0.5))); // try // { // fr.FacialFeatures = fr.image.DetectEyesInRegion(ref fr.FacePosition); // } // catch (Exception ex2) // { // MessageBox.Show(ex2.Message, "Error detecting eyes."); // } // try // { // fr.Template = fr.image.GetFaceTemplateInRegion(ref fr.FacePosition); // get template with higher precision // } // catch (Exception ex2) // { // MessageBox.Show(ex2.Message, "Error retrieving face template."); // } // FaceList.Add(fr); // //imageList1.Images.Add(fr2.faceImage.ToCLRImage()); // //lvRegister.Items.Add((imageList1.Images.Count - 1).ToString(), strpath, imageList1.Images.Count - 1); // } // } // catch (Exception exMain) // { // MessageBox.Show(exMain.Message, "Error retrieving face template. Main"); // } // } //MessageBox.Show(FaceList.Count + ""); // needClose = true; } } FSDK.SaveTrackerMemoryToFile(tracker, TrackerMemoryFile); FSDK.FreeTracker(tracker); FSDKCam.CloseVideoCamera(cameraHandle); FSDKCam.FinalizeCapturing(); //if (needClose == true) //{ // Progresss(); //} } catch (Exception exPopulateData) { } }
//캠활성화 버튼 클릭 private void FaceDetection(object sender, EventArgs e) { int motion_check = 0; //cmb_cam.Items.AddRange(cameraList); cameraName = cmb_cam.SelectedItem.ToString(); //this.btn_start.Enabled = false; int cameraHandle = 0; int r = FSDKCam.OpenVideoCamera(ref cameraName, ref cameraHandle); if (r != FSDK.FSDKE_OK) { MessageBox.Show("Error opening the first camera", "Error", MessageBoxButtons.OK, MessageBoxIcon.Error); Application.Exit(); } int tracker = 0; // creating a Tracker if (FSDK.FSDKE_OK != FSDK.LoadTrackerMemoryFromFile(ref tracker, dataRootDir + "\\" + TrackerMemoryFile)) // try to load saved tracker state FSDK.CreateTracker(ref tracker); // if could not be loaded, create a new tracker int err = 0; // set realtime face detection parameters FSDK.SetTrackerMultipleParameters(tracker, "HandleArbitraryRotations=false; DetermineFaceRotationAngle=false; InternalResizeWidth=100; FaceDetectionThreshold=5;", ref err); while (!needClose) { Int32 imageHandle = 0; if (FSDK.FSDKE_OK != FSDKCam.GrabFrame(cameraHandle, ref imageHandle)) // grab the current frame from the camera { Application.DoEvents(); continue; } FSDK.CImage image = new FSDK.CImage(imageHandle); long[] IDs; long faceCount = 0; FSDK.FeedFrame(tracker, 0, image.ImageHandle, ref faceCount, out IDs, sizeof(long) * 256); // maximum of 256 faces detected 얼굴여부 체크 Array.Resize(ref IDs, (int)faceCount); // make UI controls accessible (to find if the user clicked on a face) Application.DoEvents(); Image frameImage = image.ToCLRImage(); Graphics gr = Graphics.FromImage(frameImage); //모션 없음 if (faceCount == 0) { motion_check++; //모션 있음 } else { motion_check = 0; } if (motion_check > 1000) { System.Windows.Forms.MessageBox.Show("방범모드"); motion_check = 0; } for (int i = 0; i < IDs.Length; ++i) { FSDK.TFacePosition facePosition = new FSDK.TFacePosition(); FSDK.GetTrackerFacePosition(tracker, 0, IDs[i], ref facePosition); int left = facePosition.xc - (int)(facePosition.w * 0.6); int top = facePosition.yc - (int)(facePosition.w * 0.5); int w = (int)(facePosition.w * 1.2); String name; int res = FSDK.GetAllNames(tracker, IDs[i], out name, 65536); // maximum of 65536 characters //데이터와 얼굴 영상이 일치하는 경우 if (FSDK.FSDKE_OK == res && name.Length > 0) { // draw name StringFormat format = new StringFormat(); format.Alignment = StringAlignment.Center; gr.DrawString("이름:" + name, new System.Drawing.Font("Arial", 16), new System.Drawing.SolidBrush(System.Drawing.Color.LightGreen), facePosition.xc, top + w + 5, format); } //데이터와 얼굴 영상이 일치하지 않는 경우 else { } FSDK.UnlockID(tracker, IDs[i]); Pen pen = Pens.LightGreen; gr.DrawRectangle(pen, left, top, w, w); } programState = ProgramState.psRecognize; // display current frame pictureBox1.Image = frameImage; GC.Collect(); // collect the garbage after the deletion } FSDK.FreeTracker(tracker); FSDKCam.CloseVideoCamera(cameraHandle); FSDKCam.FinalizeCapturing(); }
int Recongnizehandle() { #if TIME_DEBUG label2.Text = "Stopwatch REC start"; stopwatch.Reset(); stopwatch.Start(); #endif faceTemplates.Clear(); //label1.Text = "Recongnize start"; int maxbear = 0; int minbear = 0; reset(); recperson = -1; for (int count = 0; count < REPEAT_REC; count++) { Int32 ImageHandle = 0; if (FSDK.FSDKE_OK != FSDKCam.GrabFrame(CameraHandle, ref ImageHandle)) { return -1; } FSDK.CImage Image = new FSDK.CImage(ImageHandle); Image FrameImage = Image.ToCLRImage(); gr = Graphics.FromImage(FrameImage); FSDK.TFacePosition[] FacePosition = Image.DetectMultipleFaces(); if (FacePosition.Length == 0) { minbear++; //无人脸状态超出忍受值 if (minbear > MAX_MINBEAR) { recperson = -2; #if TIME_DEBUG stopwatch.Stop(); TimeSpan timespan2 = stopwatch.Elapsed; double milliseconds2 = timespan2.TotalMilliseconds; // 总毫秒数 label2.Text = milliseconds2.ToString() + " .REM"; #endif return 1; } count--; continue; } if (FacePosition.Length != 1) { maxbear++; //多人脸状态超出忍受值 if (maxbear > MAX_MAXBEAR) { recperson = -3; return 1; } count--; continue; } //draw(FacePosition[0]); FaceTemplate Template = new FaceTemplate(); FSDK.TPoint[] features = Image.DetectEyesInRegion(ref FacePosition[0]); Template.templateData = Image.GetFaceTemplateUsingEyes(ref features); int recnum = recongnize(Template); faceTemplates.Add(Template); who[count] = recnum; if (recnum != -1) { StringFormat format = new StringFormat(); format.Alignment = StringAlignment.Center; gr.DrawString(userName[recnum], new System.Drawing.Font("Arial", 16), new System.Drawing.SolidBrush(System.Drawing.Color.LightGreen), FacePosition[0].xc, FacePosition[0].yc + FacePosition[0].w * 0.55f, format); } Application.DoEvents(); } recperson = vote(); //label1.Text = "Recongnize the man"; if (recperson > -1) { List<FaceTemplate> temp = new List<FaceTemplate>(faceTemplates.ToArray()); int i; for (i = 0; i < temp.Count; i++) { UserTemplates[recperson].Add(temp[i]); } } #if TIME_DEBUG stopwatch.Stop(); TimeSpan timespan = stopwatch.Elapsed; double milliseconds = timespan.TotalMilliseconds; // 总毫秒数 label2.Text = milliseconds.ToString() + " .REC"; #endif return 0; }
int AddFacehandle() { #if TIME_DEBUG label2.Text = "Stopwatch REC start"; stopwatch.Reset(); stopwatch.Start(); #endif faceTemplates.Clear(); int maxbear = 0; int minbear = 0; //label1.Text = "Adding new one"; for (int count = 0; count < REPEAT_REM; count++) { Int32 ImageHandle = 0; if (FSDK.FSDKE_OK != FSDKCam.GrabFrame(CameraHandle, ref ImageHandle)) { return -1; } FSDK.CImage Image = new FSDK.CImage(ImageHandle); Image FrameImage = Image.ToCLRImage(); gr = Graphics.FromImage(FrameImage); FSDK.TFacePosition[] FacePosition = Image.DetectMultipleFaces(); //只允许有一个人在检测范围内 if (FacePosition.Length == 0) { minbear++; //无人脸状态超出忍受值 if (minbear > MAX_MINBEAR) { #if TIME_DEBUG stopwatch.Stop(); TimeSpan timespan2 = stopwatch.Elapsed; double milliseconds2 = timespan2.TotalMilliseconds; // 总毫秒数 label2.Text = milliseconds2.ToString() + " .REM"; #endif faceTemplates.Clear(); username = ""; //label1.Text = "no people"; recperson = -2; return -1; } count--; continue; } if (FacePosition.Length != 1) { maxbear++; //多人脸状态超出忍受值 if (maxbear > MAX_MAXBEAR) { faceTemplates.Clear(); username = ""; //label1.Text = "Too many people"; recperson = -3; return -1; } count--; continue; } //draw(FacePosition[0]); FaceTemplate Template = new FaceTemplate(); FSDK.TPoint[] features = Image.DetectEyesInRegion(ref FacePosition[0]); Template.templateData = Image.GetFaceTemplateUsingEyes(ref features); faceTemplates.Add(Template); Application.DoEvents(); } if (AddNewOne) { if (btn1Click) { btn1Click = false; int i = 0; for (i = 0; i < REPEAT_REM; i++) { int fnum = btn1Count * REPEAT_REM + i; string tpath = ".\\Users\\" + btn1UserName + fnum.ToString() + ".dat"; MemoryStream m = new MemoryStream(faceTemplates[i].templateData); FileStream fs = new FileStream(tpath, FileMode.OpenOrCreate); m.WriteTo(fs); m.Close(); fs.Close(); } } if (btn2Click) { btn2Click = false; int i = 0; for (i = 0; i < REPEAT_REM; i++) { int fnum = btn2Count * REPEAT_REM + i; string tpath = ".\\Users\\" + btn2UserName + fnum.ToString() + ".dat"; MemoryStream m = new MemoryStream(faceTemplates[i].templateData); FileStream fs = new FileStream(tpath, FileMode.OpenOrCreate); m.WriteTo(fs); m.Close(); fs.Close(); } } if(true) { AddNewOne = false; string name = username; username = ""; if (namexist(name) == -1) { userName.Add(name); List<FaceTemplate> temp = new List<FaceTemplate>(faceTemplates.ToArray()); UserTemplates.Add(temp); } else { List<FaceTemplate> temp = new List<FaceTemplate>(faceTemplates.ToArray()); int existnum = namexist(name); int i; for (i = 0; i < temp.Count; i++) { UserTemplates[existnum].Add(temp[i]); } } } } faceTemplates.Clear(); username = ""; //label1.Text = "Saved the man"; #if TIME_DEBUG stopwatch.Stop(); TimeSpan timespan = stopwatch.Elapsed; double milliseconds = timespan.TotalMilliseconds; // 总毫秒数 label2.Text = milliseconds.ToString()+" .REM"; #endif recperson = -1; return 0; }
int Normalhandle() { try { Int32 ImageHandle = 0; if (FSDK.FSDKE_OK != FSDKCam.GrabFrame(CameraHandle, ref ImageHandle)) { return -1; } FSDK.CImage Image = new FSDK.CImage(ImageHandle); Image FrameImage = Image.ToCLRImage(); gr = Graphics.FromImage(FrameImage); pictureBox1.Image = FrameImage; FSDK.TFacePosition[] FacePosition = Image.DetectMultipleFaces(); for (int person = 0; person < FacePosition.Length; person++) draw(FacePosition[person]); GC.Collect(); // collect the garbage after the deletion return 0; } catch { return -1; } }
private void button1_Click(object sender, EventArgs e) { this.button1.Enabled = false; int cameraHandle = 0; int r = FSDKCam.OpenVideoCamera(ref cameraName, ref cameraHandle); if (r != FSDK.FSDKE_OK) { MessageBox.Show("Error opening the first camera", "Error", MessageBoxButtons.OK, MessageBoxIcon.Error); Application.Exit(); } int tracker = 0; // creating a Tracker if (FSDK.FSDKE_OK != FSDK.LoadTrackerMemoryFromFile(ref tracker, TrackerMemoryFile)) // try to load saved tracker state FSDK.CreateTracker(ref tracker); // if could not be loaded, create a new tracker int err = 0; // set realtime face detection parameters FSDK.SetTrackerMultipleParameters(tracker, "HandleArbitraryRotations=false; DetermineFaceRotationAngle=false; InternalResizeWidth=100; FaceDetectionThreshold=5;", ref err); while (!needClose) { Int32 imageHandle = 0; if (FSDK.FSDKE_OK != FSDKCam.GrabFrame(cameraHandle, ref imageHandle)) // grab the current frame from the camera { Application.DoEvents(); continue; } FSDK.CImage image = new FSDK.CImage(imageHandle); long[] IDs; long faceCount = 0; FSDK.FeedFrame(tracker, 0, image.ImageHandle, ref faceCount, out IDs, sizeof(long) * 256); // maximum of 256 faces detected Array.Resize(ref IDs, (int)faceCount); // make UI controls accessible (to find if the user clicked on a face) Application.DoEvents(); Image frameImage = image.ToCLRImage(); Graphics gr = Graphics.FromImage(frameImage); for (int i = 0; i < IDs.Length; ++i) { FSDK.TFacePosition facePosition = new FSDK.TFacePosition(); FSDK.GetTrackerFacePosition(tracker, 0, IDs[i], ref facePosition); int left = facePosition.xc - (int)(facePosition.w * 0.6); int top = facePosition.yc - (int)(facePosition.w * 0.5); int w = (int)(facePosition.w * 1.2); String name; int res = FSDK.GetAllNames(tracker, IDs[i], out name, 65536); // maximum of 65536 characters if (FSDK.FSDKE_OK == res && name.Length > 0) { // draw name StringFormat format = new StringFormat(); format.Alignment = StringAlignment.Center; gr.DrawString(name, new System.Drawing.Font("Arial", 16), new System.Drawing.SolidBrush(System.Drawing.Color.LightGreen), facePosition.xc, top + w + 5, format); } Pen pen = Pens.LightGreen; if (mouseX >= left && mouseX <= left + w && mouseY >= top && mouseY <= top + w) { pen = Pens.Blue; if (ProgramState.psRemember == programState) { if (FSDK.FSDKE_OK == FSDK.LockID(tracker, IDs[i])) { // get the user name InputName inputName = new InputName(); if (DialogResult.OK == inputName.ShowDialog()) { userName = inputName.userName; if (userName == null) { String s = ""; FSDK.SetName(tracker, IDs[i], ""); } else { FSDK.SetName(tracker, IDs[i], userName); } FSDK.UnlockID(tracker, IDs[i]); } } } } gr.DrawRectangle(pen, left, top, w, w); } programState = ProgramState.psRecognize; // display current frame pictureBox1.Image = frameImage; GC.Collect(); // collect the garbage after the deletion } FSDK.SaveTrackerMemoryToFile(tracker, TrackerMemoryFile); FSDK.FreeTracker(tracker); FSDKCam.CloseVideoCamera(cameraHandle); FSDKCam.FinalizeCapturing(); }
public void DetectFace(Bitmap bitmap) { Left = 0; Top = 0; FaceDetected = false; _facialFeatures = null; FSDK.CImage image = new FSDK.CImage(bitmap); FSDK.TFacePosition facePosition = image.DetectFace(); FaceDetected = facePosition.w > 0; if (FaceDetected) { Left = facePosition.xc - facePosition.w / 2; Top = facePosition.yc - facePosition.w / 2; _facialFeatures = image.DetectFacialFeaturesInRegion(ref facePosition); _eyes = image.DetectEyesInRegion(ref facePosition); } }
public void TrackFace(Bitmap bitmap) { Left = 0; Top = 0; FaceDetected = false; _facialFeatures = null; FSDK.CImage image = new FSDK.CImage(bitmap); long faceCount = 0; long[] ids; if (image.ImageHandle == 0) { return; } FSDK.FeedFrame(_tracker, 0L, image.ImageHandle, ref faceCount, out ids, sizeof(long) * 1); Array.Resize(ref ids, (int)faceCount); FSDK.TFacePosition facePosition = null; foreach (var id in ids) { FSDK.GetTrackerFacePosition(_tracker, 0L, id, ref facePosition); FaceDetected = facePosition.w > 0; if (FaceDetected) { Left = facePosition.xc - facePosition.w / 2; Top = facePosition.yc - facePosition.w / 2; FSDK.GetTrackerFacialFeatures(_tracker, 0, id, out _facialFeatures); FSDK.GetTrackerEyes(_tracker, 0, id, out _eyes); } } }
private void detect_button_Click(object sender, EventArgs e) { var sampes = Directory.GetFiles("C:\\Projects\\misoi\\bsuir-misoi\\Presentation.WindowsForms\\Samples"); foreach (var sample in sampes) { try { FSDK.CImage image = new FSDK.CImage(sample); // resize image to fit the window width double ratio = System.Math.Min((resultPictureBox.Width + 0.4) / image.Width, (resultPictureBox.Height + 0.4) / image.Height); image = image.Resize(ratio); FSDK.TFacePosition facePosition = image.DetectFace(); if (0 == facePosition.w) MessageBox.Show("No faces detected", "Face Detection"); else { FSDK.TPoint[] facialFeatures = image.DetectFacialFeaturesInRegion(ref facePosition); int i = 0; using (var writer = File.AppendText("sasi.txt")) { foreach (FSDK.TPoint point in facialFeatures) { writer.Write((double)(point.x - facePosition.xc)/facePosition.w + "," + (double)(point.y - facePosition.yc)/facePosition.w + ","); } writer.Write("Happy"); writer.WriteLine(); } } } catch (Exception ex) { MessageBox.Show(ex.Message, "Exception"); } } }
private void openBtn_Click(object sender, EventArgs e) { using (var dlg = new OpenFileDialog()) { dlg.Title = "Open Image"; dlg.Filter = "Image Files(*.BMP;*.JPG;*.GIF;*.PNG)|*.BMP;*.JPG;*.GIF;*.PNG"; if (dlg.ShowDialog() == DialogResult.OK) { sourcePictureBox.Image = new Bitmap(dlg.FileName); resultPictureBox.Image = null; try { FSDK.CImage image = new FSDK.CImage(dlg.FileName); // resize image to fit the window width double ratio = System.Math.Min((resultPictureBox.Width + 0.4) / image.Width, (resultPictureBox.Height + 0.4) / image.Height); image = image.Resize(ratio); Image frameImage = image.ToCLRImage(); Graphics gr = Graphics.FromImage(frameImage); FSDK.TFacePosition facePosition = image.DetectFace(); if (0 == facePosition.w) MessageBox.Show("No faces detected", "Face Detection"); else { int left = facePosition.xc - facePosition.w / 2; int top = facePosition.yc - facePosition.w / 2; gr.DrawRectangle(Pens.LightGreen, left, top, facePosition.w, facePosition.w); FSDK.TPoint[] facialFeatures = image.DetectFacialFeaturesInRegion(ref facePosition); int i = 0; using (var writer = File.AppendText("sasi.txt")) { foreach (FSDK.TPoint point in facialFeatures) { gr.DrawEllipse((++i > 2) ? Pens.LightGreen : Pens.Blue, point.x, point.y, 3, 3); writer.Write((point.x - facePosition.xc) + "," + (point.y - facePosition.yc) + ","); } writer.Write("Happy"); writer.WriteLine(); } gr.Flush(); } // display image resultPictureBox.Image = frameImage; resultPictureBox.Refresh(); } catch (Exception ex) { MessageBox.Show(ex.Message, "Exception"); } } } }
private void button1_Click(object sender, EventArgs e) { this.button1.Enabled = false; int cameraHandle = 0; int r = FSDKCam.OpenVideoCamera(ref cameraName, ref cameraHandle); if (r != FSDK.FSDKE_OK) { MessageBox.Show("Error opening the first camera", "Error", MessageBoxButtons.OK, MessageBoxIcon.Error); Application.Exit(); } int tracker = 0; FSDK.CreateTracker(ref tracker); int err = 0; // set realtime face detection parameters FSDK.SetTrackerMultipleParameters(tracker, "RecognizeFaces=false; DetectGender=true; HandleArbitraryRotations=false; DetermineFaceRotationAngle=false; InternalResizeWidth=100; FaceDetectionThreshold=5;", ref err); while (!needClose) { Int32 imageHandle = 0; if (FSDK.FSDKE_OK != FSDKCam.GrabFrame(cameraHandle, ref imageHandle)) // grab the current frame from the camera { Application.DoEvents(); continue; } FSDK.CImage image = new FSDK.CImage(imageHandle); long [] IDs; long faceCount = 0; FSDK.FeedFrame(tracker, 0, image.ImageHandle, ref faceCount, out IDs, sizeof(long) * 256); // maximum 256 faces detected Array.Resize(ref IDs, (int)faceCount); Image frameImage = image.ToCLRImage(); Graphics gr = Graphics.FromImage(frameImage); for (int i = 0; i < IDs.Length; ++i) { FSDK.TFacePosition facePosition = new FSDK.TFacePosition(); FSDK.GetTrackerFacePosition(tracker, 0, IDs[i], ref facePosition); int left = facePosition.xc - (int)(facePosition.w * 0.6); int top = facePosition.yc - (int)(facePosition.w * 0.5); int w = (int)(facePosition.w * 1.2); gr.DrawRectangle(Pens.LightGreen, left, top, w, w); String AttributeValues; if (0 == FSDK.GetTrackerFacialAttribute(tracker, 0, IDs[i], "Gender", out AttributeValues, 1024)) { float ConfidenceMale = 0.0f; float ConfidenceFemale = 0.0f; FSDK.GetValueConfidence(AttributeValues, "Male", ref ConfidenceMale); FSDK.GetValueConfidence(AttributeValues, "Female", ref ConfidenceFemale); String str = (ConfidenceMale > ConfidenceFemale ? "Male" : "Female") + ", " + (ConfidenceMale > ConfidenceFemale ? (int)(ConfidenceMale * 100) : (int)(ConfidenceFemale * 100)).ToString() + "%"; StringFormat format = new StringFormat(); format.Alignment = StringAlignment.Center; gr.DrawString(str, new System.Drawing.Font("Arial", 16), new System.Drawing.SolidBrush(System.Drawing.Color.LightGreen), facePosition.xc, top + w + 5, format); } } // display current frame pictureBox1.Image = frameImage; GC.Collect(); // collect the garbage // make UI controls accessible Application.DoEvents(); } FSDK.FreeTracker(tracker); FSDKCam.CloseVideoCamera(cameraHandle); FSDKCam.FinalizeCapturing(); }
public bool Recognize(ref string path, bool needCrop) { FaceRectRelative = RectangleF.Empty; LeftEyeCenter = RightEyeCenter = LeftMouth = LeftNose = RightNose = RightMouth = Vector2.Zero; var executablePath = Path.GetDirectoryName(Application.ExecutablePath); FSDK.TPoint[] pointFeature; FSDK.CImage image = new FSDK.CImage(path); var faceRectangle = Rectangle.Empty; var mouthRectangle = Rectangle.Empty; #region Определение цвета лица if (needCrop) { var openCvImage = new Image<Bgr, byte>(path); var detector = new AdaptiveSkinDetector(1, AdaptiveSkinDetector.MorphingMethod.NONE); using (var skin = new Image<Gray, Byte>(image.Width, image.Height)) { var color = new Bgr(0, 0, 0); var count = 0; detector.Process(openCvImage, skin); for (int y = 0; y < skin.Height; y++) { for (int x = 0; x < skin.Width; x++) { byte value = skin.Data[y, x, 0]; if (value != 0) { var c = openCvImage[y, x]; color.Red += c.Red; color.Green += c.Green; color.Blue += c.Blue; ++count; } } } if (count > 0) { color.Red /= count; color.Green /= count; color.Blue /= count; FaceColor = new Vector4((float)color.Red / 255f, (float)color.Green / 255f, (float)color.Blue / 255f, 1.0f); } else { FaceColor = new Vector4(0.72f, 0.72f, 0.72f, 1.0f); } } } #endregion FSDK.TFacePosition facePosition = image.DetectFace(); if (0 == facePosition.w) { faceRectangle = new Rectangle(0, 0, image.Width, image.Height); MessageBox.Show("No faces detected", "Face Detection"); return false; } else { pointFeature = image.DetectFacialFeaturesInRegion(ref facePosition); String AttributeValues; // определение пола FSDK.DetectFacialAttributeUsingFeatures(image.ImageHandle, ref pointFeature, "Gender", out AttributeValues, 1024); float ConfidenceMale = 0.0f; float ConfidenceFemale = 0.0f; FSDK.GetValueConfidence(AttributeValues, "Male", ref ConfidenceMale); FSDK.GetValueConfidence(AttributeValues, "Female", ref ConfidenceFemale); IsMale = ConfidenceMale > ConfidenceFemale; int left = facePosition.xc - (int)(facePosition.w * 0.6f); left = left < 0 ? 0 : left; // int top = facePosition.yc - (int)(facePosition.w * 0.5f); // верхушку определяет неправильлно. поэтому просто не будем обрезать :) BottomFace = new Vector2(pointFeature[11].x, pointFeature[11].y); var distance = pointFeature[2].y - pointFeature[11].y; var top = pointFeature[16].y + distance - 15; // определение высоты по алгоритму старикана top = top < 0 ? 0 : top; var newWidth = (int)(facePosition.w * 1.2); newWidth = newWidth > image.Width ? image.Width : newWidth; faceRectangle = new Rectangle(left, top, newWidth, BottomFace.Y + 15 < image.Height ? (int)(BottomFace.Y + 15) - top : image.Height - top - 1); if (needCrop) // если это создание проекта - то нужно обрезать фотку и оставить только голову { using (var croppedImage = ImageEx.Crop(path, faceRectangle)) { path = UserConfig.AppDataDir; FolderEx.CreateDirectory(path); path = Path.Combine(path, "tempHaarImage.jpg"); croppedImage.Save(path, ImageFormat.Jpeg); return Recognize(ref path, false); } } LeftEyeCenter = new Vector2(pointFeature[0].x, pointFeature[0].y); RightEyeCenter = new Vector2(pointFeature[1].x, pointFeature[1].y); LeftMouth = new Vector2(pointFeature[3].x, pointFeature[3].y); RightMouth = new Vector2(pointFeature[4].x, pointFeature[4].y); LeftNose = new Vector2(pointFeature[45].x, pointFeature[45].y); RightNose = new Vector2(pointFeature[46].x, pointFeature[46].y); TopFace = new Vector2(pointFeature[66].x, pointFeature[66].y); MiddleFace1 = new Vector2(pointFeature[66].x, pointFeature[66].y); MiddleFace2 = new Vector2(pointFeature[5].x, pointFeature[5].y); RightMiddleFace1 = new Vector2(pointFeature[67].x, pointFeature[67].y); RightMiddleFace2 = new Vector2(pointFeature[6].x, pointFeature[6].y); #region Поворот фотки по глазам! var v = new Vector2(LeftEyeCenter.X - RightEyeCenter.X, LeftEyeCenter.Y - RightEyeCenter.Y); v.Normalize(); // ПД ! var xVector = new Vector2(1, 0); var xDiff = xVector.X - v.X; var yDiff = xVector.Y - v.Y; var angle = Math.Atan2(yDiff, xDiff) * 180.0 / Math.PI; if (Math.Abs(angle) > 1 && angleCount <= 5) // поворачиваем наклоненные головы { ++angleCount; using (var ms = new MemoryStream(File.ReadAllBytes(path))) // Don't use using!! { var originalImg = (Bitmap)Bitmap.FromStream(ms); path = UserConfig.AppDataDir; FolderEx.CreateDirectory(path); path = Path.Combine(path, "tempHaarImage.jpg"); using (var ii = ImageEx.RotateImage(new Bitmap(originalImg), (float)-angle)) ii.Save(path, ImageFormat.Jpeg); } return Recognize(ref path, false); } #endregion #region Переводим в относительные координаты LeftMouth = new Vector2(LeftMouth.X / (image.Width * 1f), LeftMouth.Y / (image.Height * 1f)); RightMouth = new Vector2(RightMouth.X / (image.Width * 1f), RightMouth.Y / (image.Height * 1f)); LeftEyeCenter = new Vector2(LeftEyeCenter.X / (image.Width * 1f), LeftEyeCenter.Y / (image.Height * 1f)); RightEyeCenter = new Vector2(RightEyeCenter.X / (image.Width * 1f), RightEyeCenter.Y / (image.Height * 1f)); LeftNose = new Vector2(LeftNose.X / (image.Width * 1f), LeftNose.Y / (image.Height * 1f)); RightNose = new Vector2(RightNose.X / (image.Width * 1f), RightNose.Y / (image.Height * 1f)); TopFace = new Vector2(TopFace.X / (image.Width * 1f), TopFace.Y / (image.Height * 1f)); MiddleFace1 = new Vector2(MiddleFace1.X / (image.Width * 1f), MiddleFace1.Y / (image.Height * 1f)); MiddleFace2 = new Vector2(MiddleFace2.X / (image.Width * 1f), MiddleFace2.Y / (image.Height * 1f)); BottomFace = new Vector2(BottomFace.X / (image.Width * 1f), BottomFace.Y / (image.Height * 1f)); RightMiddleFace1 = new Vector2(RightMiddleFace1.X / (image.Width * 1f), RightMiddleFace1.Y / (image.Height * 1f)); RightMiddleFace2 = new Vector2(RightMiddleFace2.X / (image.Width * 1f), RightMiddleFace2.Y / (image.Height * 1f)); FacialFeatures = new List<Vector2>(); foreach (var point in pointFeature) FacialFeatures.Add(new Vector2(point.x / (image.Width * 1f), point.y / (image.Height * 1f))); /* int left = facePosition.xc - (int)(facePosition.w * 0.6f); int top = facePosition.yc - (int)(facePosition.w * 0.5f); var lRelative = faceRectangle = new Rectangle(left, top, (int)(facePosition.w * 1.2), (int)(facePosition.w * 1.2)); FaceRectRelative = */ // var leftTop = new Vector2(LeftEyeCenter.X, Math.Max(LeftEyeCenter.Y, RightEyeCenter.Y)); // var rightBottom = new Vector2(RightEyeCenter.X, LeftMouth.Y); // FaceRectRelative = new RectangleF(leftTop.X, leftTop.Y, rightBottom.X - leftTop.X, rightBottom.Y - leftTop.Y); #endregion return true; } }