//主处理部分 private void start() { loadUsers(); NeedClose = false; int CameraHandle = CameraStart(); //设置监测条件 其中 /* * SetFaceDetectionParameters(false, false, 100);参数1将人脸的检测角度从正负15°拓展到30° * 参数2将决定是否检测人脸所处的角度 * 参数3是缩放大小 值越大质量越高 *现在有效距离1米 最大有效距离1.3米 * /* * SetFaceDetectionThreshold(3);设置对于人脸的敏感性 数值越高就越对于人脸不敏感 只能在人脸十分清晰的时候才能将其检测 */ FSDK.SetFaceDetectionParameters(true, false, 100); FSDK.SetFaceDetectionThreshold(1); while (!NeedClose) { try { switch (programState) { case ProgramState.psNormal: Normalhandle(); break; case ProgramState.psAddFace: AddFacehandle(); lock (obj) { ok = true; Monitor.PulseAll(obj); programState = ProgramState.psNormal; } break; case ProgramState.psRecognize: Recongnizehandle(); lock (obj) { ok = true; Monitor.PulseAll(obj); programState = ProgramState.psNormal; } break; case ProgramState.psNothing: Nothinghandle(); break; } Application.DoEvents(); } catch (Exception e) { Console.WriteLine(e.Message); } } }
private void loadSubject(string fileName) { FSDK.SetFaceDetectionParameters(false, true, 384); FSDK.SetFaceDetectionThreshold((int)FaceDetectionThreshold); TFaceRecord fr = new TFaceRecord(); fr.ImageFileName = fileName; fr.FacePosition = new FSDK.TFacePosition(); fr.FacialFeatures = new FSDK.TPoint[2]; fr.Template = new byte[FSDK.TemplateSize]; fr.image = new FSDK.CImage(fileName); fr.FacePosition = fr.image.DetectFace(); if (0 == fr.FacePosition.w) { if (fileName.Length <= 1) { MessageBox.Show("No faces found. Try to lower the Minimal Face Quality parameter in the Options dialog box.", "Enrollment error"); } } else { fr.faceImage = fr.image.CopyRect((int)(fr.FacePosition.xc - Math.Round(fr.FacePosition.w * 0.5)), (int)(fr.FacePosition.yc - Math.Round(fr.FacePosition.w * 0.5)), (int)(fr.FacePosition.xc + Math.Round(fr.FacePosition.w * 0.5)), (int)(fr.FacePosition.yc + Math.Round(fr.FacePosition.w * 0.5))); try { fr.FacialFeatures = fr.image.DetectEyesInRegion(ref fr.FacePosition); } catch (Exception ex2) { MessageBox.Show(ex2.Message, "Error detecting eyes."); } try { fr.Template = fr.image.GetFaceTemplateInRegion(ref fr.FacePosition); // get template with higher precision } catch (Exception ex2) { MessageBox.Show(ex2.Message, "Error retrieving face template."); } FaceSearchList.Add(fr); FaceSearchImageList.Images.Add(fr.faceImage.ToCLRImage()); listView1.Items.Add((FaceSearchImageList.Images.Count - 1).ToString(), fileName.Split('\\')[fileName.Split('\\').Length - 1], FaceSearchImageList.Images.Count - 1); using (Image img = fr.image.ToCLRImage()) { pictureBox1.Image = img; pictureBox1.Refresh(); } } pictureBox1.Image = null; }
private void Form1_Load(object sender, EventArgs e) { if (FSDK.FSDKE_OK != FSDK.ActivateLibrary("Jl3R1DBC1qVQonaiBAq8gK7KzetXbFb4r+OF1DLzInT3KyXHvgHNLyk2Tymk5G6GBv58/Oqn+SQeOWCQfQASTV1Mcd7RQAsrmW02oOa9lhZsMockPLoEnpsH4W1I0+zmxmUwecWKEep9j4BrYhQWuiA3QcNeQO+tfyLOHASk3+M=")) { MessageBox.Show("Please run the License Key Wizard (Start - Luxand - FaceSDK - License Key Wizard)", "Error activating FaceSDK", MessageBoxButtons.OK, MessageBoxIcon.Error); Application.Exit(); } FSDK.InitializeLibrary(); FSDK.SetFaceDetectionParameters(true, true, 384); }
private void Form1_Load(object sender, EventArgs e) { if (FSDK.FSDKE_OK != FSDK.ActivateLibrary("aCGamccfB6Uj3vlS7eDEryPnDrTbrZQb77ZHouPl3J8Q7o+BG4PcGevchFjppkWrVa038OU6Fghhy/BJfJV1n82InviCSijl8Vbxb11fs+VrcbSEfpESqjKSJQK8OLCqU0qYDy1oRHLRAg/3CHKCBzP/6IHuamy9Y/aY/xd1E7A=")) { MessageBox.Show("Please run the License Key Wizard (Start - Luxand - FaceSDK - License Key Wizard)", "Error activating FaceSDK", MessageBoxButtons.OK, MessageBoxIcon.Error); Application.Exit(); } FSDK.InitializeLibrary(); FSDK.SetFaceDetectionParameters(true, true, 384); }
public static bool ActivateRecognition() { if (FSDK.FSDKE_OK != FSDK.ActivateLibrary("DWysHuomlBcczVM2MQfiz/3WraXb7r+fM0th71X5A9z+gsHn2kpGOgWrVh9D/9sQWlPXO00CFmGMvetl9A+VEr9Y5GVBIccyV32uaZutZjKYH5KB2k87NJAAw6NPkzK0DSQ5b5W7EO0yg2+x4HxpWzPogGyIIYcAHIYY11/YGsU=")) { return(false); } FSDK.InitializeLibrary(); FSDK.SetFaceDetectionParameters(true, true, 384); return(true); }
private void Form1_Load(object sender, EventArgs e) { int panel_size = 120; if (FSDK.FSDKE_OK != FSDK.ActivateLibrary("ql0enzjOpt6Eg/bKTjTnmV/CHacZsldzhWFqaWe54rvlnqKxP+QIBGPN6tBluefo7pprgH+pNOxaUy4ZrwJWJsSDlJWcv7N7mZn5c5+8ssAuowWDqMjAn5O9IeheV2kP3VXx0xaVLEGIXcm2p/aERbQQQesBNeoGEHidf7ew2F8=")) { MessageBox.Show("Sytem Error", "System Erorr", MessageBoxButtons.OK, MessageBoxIcon.Error); Application.Exit(); } FSDK.InitializeLibrary(); FSDK.SetFaceDetectionParameters(false, true, 384); }
private void camera_select(object sender, EventArgs e) { progressBar1.Value = 0; int s = camera.SelectedIndex; if (s != Fitems.cameraN) { Fitems.init = false; } if ((Fitems.cameraN == s) && Fitems.init == true) { doneLoadingCamera(); return; } // System.Threading.Thread t = new System.Threading.Thread(new System.Threading.ThreadStart(fsdk_async_funcs)); // t.IsBackground = true; // t.SetApartmentState(System.Threading.ApartmentState.STA); // t.Priority = System.Threading.ThreadPriority.BelowNormal; // t.Start(); backgroundWorker1.RunWorkerAsync(); string cameraName = (string)camera.SelectedItem; int r = FSDKCam.OpenVideoCamera(ref cameraName, ref s); if (r != FSDK.FSDKE_OK) { MessageBox.Show("Error opening camera\nCamera Busy", "Error", MessageBoxButtons.OK, MessageBoxIcon.Error); Application.Exit(); } int count; String[] cameraList; FSDKCam.GetCameraList(out cameraList, out count); FSDK.SetFaceDetectionParameters(Fitems.facerotationB, Fitems.faceAngle, Fitems.faceWidth); FSDK.SetFaceDetectionThreshold(Fitems.facethreshold); FSDKCam.VideoFormatInfo[] formatList; FSDKCam.GetVideoFormatList(ref cameraName, out formatList, out count); Fitems.CamX = formatList[0].Width; Fitems.CamY = formatList[0].Height; Fitems.cameraN = s; Application.DoEvents(); Fitems.init = true; }
private void PersonFrm_Load(object sender, EventArgs e) { this.Location = new Point(Screen.PrimaryScreen.WorkingArea.Width - (Width + 200), Screen.PrimaryScreen.WorkingArea.Height - (Height + 200)); comTitle.SelectedIndex = 0; comSex.SelectedIndex = 0; if (FSDK.FSDKE_OK != FSDK.ActivateLibrary("vzm3vx/iIfmU4NsxPHciqHwP/fdsnVT4vo3MpwZvuI0e3oqsOjq1Gp4CeTC4m963GGJdSFwgR40MB3jdXKvT+IB9uuaFhdTS6Y5kbi/LXu4MqGkNDVHRKcP47VaP/djTvJFOsfP9gxH4qneFm/C5m0jHEzdPTc5O8tPmsC7EOoE=")) { MessageBox.Show("Please run the License Key Wizard (Start - Luxand - FaceSDK - License Key Wizard)", "Error activating FaceSDK", MessageBoxButtons.OK, MessageBoxIcon.Error); Application.Exit(); } FSDK.InitializeLibrary(); FSDK.SetFaceDetectionParameters(true, true, 384); }
void _is_face_in_image(string filename) { try { //assuming that faces are vertical (handlearbitraryrotations=false) to speed up face detection FSDK.SetFaceDetectionParameters(false, true, 384); FSDK.SetFaceDetectionThreshold((int)(FaceDetectionThreshold)); fr = new TFaceRecord(); fr.ImageFileName = filename; fr.FacePosition = new FSDK.TFacePosition(); fr.FacialFeatures = new FSDK.TPoint[1]; fr.Template = new Byte[FSDK.TemplateSize - 1]; fr.image = new FSDK.CImage(filename); fr.FacePosition = fr.image.DetectFace(); if (fr.FacePosition.w == 0) { MessageBox.Show("no face found, make sure you face the camera then try capturing image again..."); } else { fr.faceImage = fr.image.CopyRect((int)(fr.FacePosition.xc - System.Math.Round(fr.FacePosition.w * 0.5)), (int)(fr.FacePosition.yc - System.Math.Round(fr.FacePosition.w * 0.5)), (int)(fr.FacePosition.xc + System.Math.Round(fr.FacePosition.w * 0.5)), (int)(fr.FacePosition.yc + System.Math.Round(fr.FacePosition.w * 0.5))); bool eyesDetected = false; try { fr.FacialFeatures = fr.image.DetectEyesInRegion(ref fr.FacePosition); eyesDetected = true; } catch (Exception ex) { MessageBox.Show("Error detecting eyes during registration!!!"); } if (eyesDetected) { fr.Template = fr.image.GetFaceTemplateInRegion(ref fr.FacePosition); } //call savemethod here ->SAVE(fr); save_records(fr); FaceList = new List <TFaceRecord>(); FaceList.Add(fr); } } catch (Exception ex) { MessageBox.Show("Can't open image(s) with error: " + ex.Message.ToString()); } }
private bool EnrollFace(Face a_face) { //ASSUMING THAT FACES ARE VERTICAL (HANDLEARBITRARYROTATIONS IS FALSE) TO SPEED UP FACE DETECTION FSDK.SetFaceDetectionParameters(false, true, 384); FSDK.SetFaceDetectionThreshold((int)FaceDetectionThreshold); //GET POSITION OF FACE IN IMAGE a_face.face_position = FacePosition.FromFSDK(a_face.Clone().image.DetectFace()); a_face.face_image = a_face.Clone().image.CopyRect((int)(a_face.face_position.xc - Math.Round(a_face.face_position.w * 0.5)), (int)(a_face.face_position.yc - Math.Round(a_face.face_position.w * 0.5)), (int)(a_face.face_position.xc + Math.Round(a_face.face_position.w * 0.5)), (int)(a_face.face_position.yc + Math.Round(a_face.face_position.w * 0.5))); //GET THE FACIAL FEATURES OF THE FACE FSDK.TFacePosition face_pos = a_face.face_position; a_face.facial_features = a_face.Clone().image.DetectEyesInRegion(ref face_pos); //GET A TEMPLATE OF THE FACE TO BE USED FOR LATER COMPARISON a_face.face_template = a_face.Clone().image.GetFaceTemplateInRegion(ref face_pos); known_faces_list.Add(a_face); return(true); }
public MainForm() { InitializeComponent(); // Enable double duffering to stop flickering. SetStyle(ControlStyles.DoubleBuffer, true); SetStyle(ControlStyles.AllPaintingInWmPaint, true); SetStyle(ControlStyles.UserPaint, true); SetStyle(ControlStyles.SupportsTransparentBackColor, false); SetStyle(ControlStyles.Opaque, false); SetStyle(ControlStyles.OptimizedDoubleBuffer, true); SetStyle(ControlStyles.ResizeRedraw, true); if (FSDK.FSDKE_OK != FSDK.ActivateLibrary("DWysHuomlBcczVM2MQfiz/3WraXb7r+fM0th71X5A9z+gsHn2kpGOgWrVh9D/9sQWlPXO00CFmGMvetl9A+VEr9Y5GVBIccyV32uaZutZjKYH5KB2k87NJAAw6NPkzK0DSQ5b5W7EO0yg2+x4HxpWzPogGyIIYcAHIYY11/YGsU=")) { MessageBox.Show(@"Please run the License Key Wizard (Start - Luxand - FaceSDK - License Key Wizard)", @"Error activating FaceSDK", MessageBoxButtons.OK, MessageBoxIcon.Error); Application.Exit(); } FSDK.InitializeLibrary(); FSDK.SetFaceDetectionParameters(true, true, 384); // btnImportVector.Visible = btnExportVector.Visible = btnEditPoint.Visible = false; // служебные кнопки. для показа старику не нужны. }
private void StartStreamingAndDetecting() { int r = FSDKCam.OpenVideoCamera(ref cameraName, ref cameraHandle); if (r != FSDK.FSDKE_OK) { MessageBox.Show("Error opening the first camera", "Error", MessageBoxButtons.OK, MessageBoxIcon.Error); Application.Exit(); } try { int tracker = 0; // creating a Tracker if (FSDK.FSDKE_OK != FSDK.LoadTrackerMemoryFromFile(ref tracker, TrackerMemoryFile)) // try to load saved tracker state { FSDK.CreateTracker(ref tracker); // if could not be loaded, create a new tracker } int err = 0; // set realtime face detection parameters FSDK.SetTrackerMultipleParameters(tracker, "HandleArbitraryRotations=false; DetermineFaceRotationAngle=true; InternalResizeWidth=100; FaceDetectionThreshold=1;", ref err); FSDK.SetFaceDetectionParameters(false, true, 384); int FrameCounter = 0; Pen P = new Pen(Color.AliceBlue); int L = 0; int T = 0; int W = 0; string[] Names = new string[10]; while (!needClose) { Int32 imageHandle = 0; if (FSDK.FSDKE_OK != FSDKCam.GrabFrame(cameraHandle, ref imageHandle)) // grab the current frame from the camera { Application.DoEvents(); continue; } FSDK.CImage image = new FSDK.CImage(imageHandle); long[] IDs; long faceCount = 0; FSDK.FeedFrame(tracker, cameraHandle, image.ImageHandle, ref faceCount, out IDs, sizeof(long) * 256); // maximum of 256 faces detected Array.Resize(ref IDs, (int)faceCount); // make UI controls accessible (to find if the user clicked on a face) Application.DoEvents(); Image frameImage = image.ToCLRImage(); Graphics gr = Graphics.FromImage(frameImage); for (int i = 0; i < IDs.Length; ++i) { string FaceFullName = "Unknown Face"; // Name = ""; if (FaceFullName == "Unknown Face") { FSDK.TFacePosition facePosition = new FSDK.TFacePosition(); FSDK.GetTrackerFacePosition(tracker, cameraHandle, IDs[i], ref facePosition); int left = facePosition.xc - (int)(facePosition.w * 0.6); int top = facePosition.yc - (int)(facePosition.w * 0.5); int w = (int)(facePosition.w * 1.2); string name; TFaceRecord fIn = new TFaceRecord(); fIn.FacePosition = new FSDK.TFacePosition(); fIn.FacialFeatures = new FSDK.TPoint[2]; fIn.Template = new byte[FSDK.TemplateSize]; FSDK.CImage img = new FSDK.CImage(image.ImageHandle); img = image.CopyRect((int)(facePosition.xc - Math.Round(facePosition.w * 0.5)), (int)(facePosition.yc - Math.Round(facePosition.w * 0.5)), (int)(facePosition.xc + Math.Round(facePosition.w * 0.5)), (int)(facePosition.yc + Math.Round(facePosition.w * 0.5))); fIn.image = img; fIn.FacePosition = fIn.image.DetectFace(); fIn.faceImage = fIn.image.CopyRect((int)(fIn.FacePosition.xc - Math.Round(fIn.FacePosition.w * 0.5)), (int)(fIn.FacePosition.yc - Math.Round(fIn.FacePosition.w * 0.5)), (int)(fIn.FacePosition.xc + Math.Round(fIn.FacePosition.w * 0.5)), (int)(fIn.FacePosition.yc + Math.Round(fIn.FacePosition.w * 0.5))); fIn.FacialFeatures = fIn.image.DetectEyesInRegion(ref fIn.FacePosition); fIn.Template = fIn.image.GetFaceTemplateInRegion(ref fIn.FacePosition); // get template with higher precision //TFaceRecord fOut = new TFaceRecord(); //string FaceFullName = "Unknown Face"; //if (FrameCounter < 9) //{ // FrameCounter++; // StringFormat format = new StringFormat(); // format.Alignment = StringAlignment.Center; // gr.DrawString("Searching ...", new Font("Arial", 16), new SolidBrush(Color.LightGreen), facePosition.xc, top + w + 5, format); // Fx.GetPerson(fIn, out FaceFullName); // Names[FrameCounter] = FaceFullName; // name = FaceFullName; //} //else //{ //FrameCounter++; Fx.GetPerson(fIn, out FaceFullName); Names[FrameCounter] = FaceFullName; name = FaceFullName; if (FaceFullName != "") { IntruderFullName = FaceFullName; } FrameCounter = 0; /*/ draw name * name = (from n in Names * group n by n into g * select new * { * Key = g.Key, * Count = g.Count() * }).OrderByDescending(g => g.Count).ToArray()[0].ToString(); */ StringFormat format = new StringFormat(); format.Alignment = StringAlignment.Center; gr.DrawString(name, new Font("Arial", 16), new SolidBrush(Color.LightGreen), facePosition.xc, top + w + 5, format); Pen pen = Pens.LightGreen; gr.DrawRectangle(pen, left, top, w, w); if (mouseX >= left && mouseX <= left + w && mouseY >= top && mouseY <= top + w) { pen = Pens.Blue; } P = pen; L = left; T = top; W = w; gr.DrawRectangle(pen, left, top, w, w); if (FaceFullName == "Unknown Face") { un++; if (un >= 20) { //MessageBox.Show("I don't kno him"); } } else { kno++; if (kno >= 5) { //MessageBox.Show("i know him"); //Get detected student details var d_student = db.Students.Where(p => p.fullname == Name).FirstOrDefault(); var att = db.Attendances.Where(p => p.fullname == name && p.insertdate > DateTime.Now.AddDays(-1)); if (att.Count() == 0) { //save to attedance db.Attendances.Add( new Attendance { // regno = regno = d_student.regno, fullname = d_student.fullname, gender = d_student.gender, level = d_student.level, insertdate = DateTime.Now }); db.SaveChanges(); } } } if (MakingCall == false) { MakingCall = true; // timer.Tick += timer_Tick; //timer.Interval = 30000000; // timer.Start(); //MessageBox.Show("UNKNOWN"); Thread.Sleep(5000); TwilioClass call = new TwilioClass(); if (call.MakeCall(IntruderFullName)) { MakingCall = false; } } //} //gr.DrawRectangle(P, L, T, W, W); name = ""; IntruderFullName = ""; } } //programState = ProgramState.psRecognize; // display current frame imageBox.Image = frameImage; GC.Collect(); // collect the garbage after the deletion } FSDKCam.CloseVideoCamera(cameraHandle); FSDKCam.FinalizeCapturing(); } catch (Exception n) { StartStreamingAndDetecting(); } }
/// <summary> /// Путь на волосы и аксессуары приходит в виде ссылке на картинку, Там же с тем же названием должен лежать обж. /// Мне проще обработать такие ссылки тут, чем в яве /// <img src=\"http://printahead.net/printahead.online/Library/Hair/Standard/20.jpg\" </summary> /// <param name="manTypeInt"></param> /// <param name="sessionID"></param> /// <param name="hairPath"></param> /// <param name="hairMaterialPath"></param> /// <param name="accessoryPath"></param> /// <param name="accessoryMaterialPath"></param> /// <param name="size">96% (3.2"), 113%(3.8"), 134%(4.5") ( 1 это 3.2, 2 - 3.8 дюйма и т.п.</param> public void CreateObj(int manTypeInt, string sessionID, string hairPath, string hairMaterialPath, string accessoryPath, string accessoryMaterialPath, string basePath, string baseMaterialPath, string addonPath1, string addonPath2, string addonPath3, string addonPath4, string addonMaterialPath, int oldMorphingValue, int fatMorphingValue, int smoothingValue, int size, string ftpOutputName) { var manType = ManType.Male; switch (manTypeInt) { case 1: manType = ManType.Female; break; case 2: case 3: manType = ManType.Child; break; } #region Создание проекта WebClient client = new WebClient(); var imagePath = "https://printahead.net/printahead.online/PrintAhead_images/" + sessionID + ".jpeg"; byte[] imageBytes = client.DownloadData(imagePath); var ms = new MemoryStream(imageBytes); var templateImage = new Bitmap(ms); ProgramCore.Project = new Project(sessionID, null, null, GenesisType.Genesis2, manType, null, false, 1024, false); ProgramCore.Project.FrontImage = templateImage; //ProgramCore.Project.LoadMeshes(); #endregion #region активация охуенной распознавалки if (FSDK.FSDKE_OK != FSDK.ActivateLibrary("DWysHuomlBcczVM2MQfiz/3WraXb7r+fM0th71X5A9z+gsHn2kpGOgWrVh9D/9sQWlPXO00CFmGMvetl9A+VEr9Y5GVBIccyV32uaZutZjKYH5KB2k87NJAAw6NPkzK0DSQ5b5W7EO0yg2+x4HxpWzPogGyIIYcAHIYY11/YGsU=")) { MessageBox.Show("Please run the License Key Wizard (Start - Luxand - FaceSDK - License Key Wizard)", "Error activating FaceSDK", MessageBoxButtons.OK, MessageBoxIcon.Error); Application.Exit(); } FSDK.InitializeLibrary(); FSDK.SetFaceDetectionParameters(true, true, 384); #endregion Recognize(templateImage); var aabb = ProgramCore.Project.RenderMainHelper.InitializeShapedotsHelper(true); ProgramCore.Project.RenderMainHelper.LoadProject(true, aabb, imagePath, false); headMeshesController.InitializeTexturing(autodotsShapeHelper.GetBaseDots(), HeadController.GetIndices()); autodotsShapeHelper.Transform(headMeshesController.TexturingInfo.Points.ToArray()); headController.StartAutodots(); ProgramCore.Project.RenderMainHelper.UpdateUserCenterPositions(); for (var i = 0; i < headMeshesController.RenderMesh.Parts.Count; i++) { var part = headMeshesController.RenderMesh.Parts[i]; if (part.Texture == 0) { part.Texture = ProgramCore.Project.RenderMainHelper.HeadTextureId; part.TextureName = ProgramCore.Project.RenderMainHelper.GetTexturePath(part.Texture); } } headController.EndAutodots(); #region Загружаем и применяем морфинги #region Старость if (oldMorphingValue != 20) // назачем морфить, если по дефолту итак так. чтобы время на загрузку не тратить { var intTemp = 0; var oldMorphingPath = "ftp://108.167.164.209/public_html/printahead.online/PrintAhead_DefaultModels/" + manType.GetObjDirPath(false) + "/Old.obj"; // загружаем трансформации для старения oldMorphingPath = oldMorphingPath.Replace(@"\", "/"); if (FTPHelper.IsFileExists(oldMorphingPath)) { OldMorphing = ProgramCore.Project.RenderMainHelper.pickingController.LoadPartsMorphInfo(oldMorphingPath, headMeshesController.RenderMesh, ref intTemp); } if (OldMorphing != null) { var delta = oldMorphingValue == 20 ? 0 : oldMorphingValue / 80f; foreach (var m in OldMorphing) { m.Value.Delta = delta; } ProgramCore.Project.AgeCoefficient = delta; } } #endregion #region Толстость if (fatMorphingValue != 0) { var intTemp = 0; var fatMorphingPath = "ftp://108.167.164.209/public_html/printahead.online/PrintAhead_DefaultModels/" + manType.GetObjDirPath(false) + "/Fat.obj"; // загружаем трансформации для толстения fatMorphingPath = fatMorphingPath.Replace(@"\", "/"); if (FTPHelper.IsFileExists(fatMorphingPath)) { FatMorphing = ProgramCore.Project.RenderMainHelper.pickingController.LoadPartsMorphInfo(fatMorphingPath, headMeshesController.RenderMesh, ref intTemp); } if (FatMorphing != null) { var delta = fatMorphingValue == 0 ? 0 : fatMorphingValue / 30f; foreach (var m in FatMorphing) { m.Value.Delta = delta; } ProgramCore.Project.FatCoefficient = delta; } } #endregion float?k = null; if (smoothingValue != 0) { var delta = (100 - smoothingValue) / 100f; ProgramCore.Project.MorphingScale = delta; k = delta; } DoMorth(k); #endregion FTPHelper.UpdateAddress(@"ftp://108.167.164.209/public_html/printahead.online/PrintAhead_models/" + sessionID); ProgramCore.Project.RenderMainHelper.InitializeHairPositions(); ProgramCore.Project.RenderMainHelper.InitializeAccessoryPositions(); ZipOutputStream zipStream = null; MemoryStream outputMemStream = new MemoryStream(); if (!string.IsNullOrEmpty(ftpOutputName)) { zipStream = new ZipOutputStream(outputMemStream); // заодно все будем паковать в архивчик zipStream.SetLevel(3); } #region Attach hair var hairObjPath = string.Empty; if (string.IsNullOrEmpty(hairPath)) { switch (manTypeInt) { case 0: hairObjPath = @"ftp://108.167.164.209/public_html/printahead.online/Library/Hair/Standard/20.obj"; // мужик break; case 1: hairObjPath = @"ftp://108.167.164.209/public_html/printahead.online/Library/Hair/Standard/3.obj"; // баба break; case 2: hairObjPath = @"ftp://108.167.164.209/public_html/printahead.online/Library/Hair/Standard/10C.obj"; // мальчик break; case 3: hairObjPath = @"ftp://108.167.164.209/public_html/printahead.online/Library/Hair/Standard/12C.obj"; // девочка break; } } else { hairObjPath = GetParcedHairAccessoriesLink(hairPath, manType == ManType.Child ? "C.obj" : ".obj"); } if (!string.IsNullOrEmpty(hairObjPath) && FTPHelper.IsFileExists(hairObjPath)) { hairMaterialPath = GetParcedHairAccessoriesLink(hairMaterialPath, "T.jpg").Replace("/Icons/", "/Materials/"); if (string.IsNullOrEmpty(hairMaterialPath)) { hairMaterialPath = "ftp://108.167.164.209/public_html/printahead.online/Library/Hair/Materials/H7T.jpg"; } var temp = @"ftp://108.167.164.209/public_html/printahead.online/PrintAhead_models/" + sessionID + "/Textures"; var fileName = Path.GetFileNameWithoutExtension(hairMaterialPath) + ".jpg"; FTPHelper.CopyFromFtpToFtp(hairMaterialPath, temp, fileName, zipStream, fileName); hairMaterialPath = @"ftp://108.167.164.209/public_html/printahead.online/PrintAhead_models/" + sessionID + "/Textures/" + fileName; ProgramCore.Project.RenderMainHelper.AttachHair(hairObjPath, hairMaterialPath, manType); } #endregion #region Attach accessories var accessoryObjPath = string.Empty; if (string.IsNullOrEmpty(accessoryPath)) { switch (manType) { case ManType.Child: accessoryObjPath = @"ftp://108.167.164.209/public_html/printahead.online/Library/Accessory/Standard/I.obj"; break; case ManType.Female: accessoryObjPath = @"ftp://108.167.164.209/public_html/printahead.online/Library/Accessory/Standard/HF.obj"; break; default: accessoryObjPath = @"ftp://108.167.164.209/public_html/printahead.online/Library/Accessory/Standard/HM.obj"; break; } } else { accessoryObjPath = GetParcedHairAccessoriesLink(accessoryPath, manType == ManType.Child ? "C.obj" : ".obj"); } if (!string.IsNullOrEmpty(accessoryObjPath) && FTPHelper.IsFileExists(accessoryObjPath)) { accessoryMaterialPath = GetParcedHairAccessoriesLink(accessoryMaterialPath, "T.jpg").Replace("/Icons/", "/Materials/");; if (string.IsNullOrEmpty(accessoryMaterialPath)) { accessoryMaterialPath = "ftp://108.167.164.209/public_html/printahead.online/Library/Accessory/Materials/lgreenT.jpg"; } var temp = @"ftp://108.167.164.209/public_html/printahead.online/PrintAhead_models/" + sessionID + "/Textures"; var fileName = Path.GetFileNameWithoutExtension(accessoryMaterialPath) + ".jpg"; FTPHelper.CopyFromFtpToFtp(accessoryMaterialPath, temp, fileName, zipStream, fileName); accessoryMaterialPath = @"ftp://108.167.164.209/public_html/printahead.online/PrintAhead_models/" + sessionID + "/Textures/" + fileName; ProgramCore.Project.RenderMainHelper.AttachAccessory(accessoryObjPath, accessoryMaterialPath, manType); } #region Base var baseObjPath = string.Empty; if (!string.IsNullOrEmpty(basePath)) { baseObjPath = GetParcedHairAccessoriesLink(basePath, manType == ManType.Child ? "C.obj" : ".obj"); } if (!string.IsNullOrEmpty(baseObjPath) && FTPHelper.IsFileExists(baseObjPath)) { baseMaterialPath = GetParcedHairAccessoriesLink(baseMaterialPath, "T.jpg").Replace("/Icons/", "/Materials/"); if (string.IsNullOrEmpty(baseMaterialPath)) { baseMaterialPath = "ftp://108.167.164.209/public_html/printahead.online/Library/Accessory/Materials/lgreenT.jpg"; } var temp = @"ftp://108.167.164.209/public_html/printahead.online/PrintAhead_models/" + sessionID + "/Textures"; var fileName = Path.GetFileNameWithoutExtension(baseMaterialPath) + ".jpg"; FTPHelper.CopyFromFtpToFtp(baseMaterialPath, temp, fileName, zipStream, fileName); baseMaterialPath = @"ftp://108.167.164.209/public_html/printahead.online/PrintAhead_models/" + sessionID + "/Textures/" + fileName; ProgramCore.Project.RenderMainHelper.AttachAccessory(baseObjPath, baseMaterialPath, manType); } #endregion #region Addons var addonObjPath = GetParcedHairAccessoriesLink(addonPath1, manType == ManType.Child ? "C.obj" : ".obj"); AttachAddon(addonObjPath, ref addonMaterialPath, sessionID, manType, ref zipStream); addonObjPath = GetParcedHairAccessoriesLink(addonPath2, manType == ManType.Child ? "C.obj" : ".obj"); AttachAddon(addonObjPath, ref addonMaterialPath, sessionID, manType, ref zipStream); addonObjPath = GetParcedHairAccessoriesLink(addonPath3, manType == ManType.Child ? "C.obj" : ".obj"); AttachAddon(addonObjPath, ref addonMaterialPath, sessionID, manType, ref zipStream); addonObjPath = GetParcedHairAccessoriesLink(addonPath4, manType == ManType.Child ? "C.obj" : ".obj"); AttachAddon(addonObjPath, ref addonMaterialPath, sessionID, manType, ref zipStream); #endregion #endregion ProgramCore.Project.RenderMainHelper.SaveMergedHead(sessionID, zipStream, size); ProgramCore.Project.RenderMainHelper.SaveSmoothedTextures(zipStream); var address = "ftp://108.167.164.209/public_html/printahead.online/PrintAhead_models/" + ProgramCore.Project.ProjectName + "/Textures"; var profileImgPath = sessionID + ".jpeg"; var ftpHelper = new FTPHelper(address); var stream = new MemoryStream(); templateImage.Save(stream, ImageFormat.Jpeg); ftpHelper.Upload(stream, profileImgPath); if (zipStream != null) { ms.Seek(0, SeekOrigin.Begin); var newEntry = new ZipEntry(profileImgPath); zipStream.PutNextEntry(newEntry); ms.CopyTo(zipStream); zipStream.CloseEntry(); zipStream.IsStreamOwner = false; // False stops the Close also Closing the underlying stream. zipStream.Close(); // Must finish the ZipOutputStream before using outputMemStream. outputMemStream.Position = 0; address = "ftp://108.167.164.209/public_html/printahead.online/PrintAhead_output/"; ftpHelper = new FTPHelper(address); ftpHelper.Upload(outputMemStream, ftpOutputName + ".zip"); } }
private void button1_Click(object sender, EventArgs e) { this.button1.Enabled = false; int cameraHandle = 0; int r = FSDKCam.OpenVideoCamera(ref cameraName, ref cameraHandle); if (r != FSDK.FSDKE_OK) { MessageBox.Show("Error opening the first camera", "Error", MessageBoxButtons.OK, MessageBoxIcon.Error); Application.Exit(); } // set realtime face detection parameters FSDK.SetFaceDetectionParameters(false, false, 100); FSDK.SetFaceDetectionThreshold(3); // list where we store face templates // faceTemplates = new List(); faceTemplates = new List <FaceTemplate>(); while (!needClose) { // faceTemplates.Clear(); Int32 imageHandle = 0; if (FSDK.FSDKE_OK != FSDKCam.GrabFrame(cameraHandle, ref imageHandle)) // grab the current frame from the camera { Application.DoEvents(); continue; } FSDK.CImage image = new FSDK.CImage(imageHandle); Image frameImage = image.ToCLRImage(); Graphics gr = Graphics.FromImage(frameImage); FSDK.TFacePosition facePosition = image.DetectFace(); // if a face is detected, we can recognize it if (facePosition.w != 0) { gr.DrawRectangle(Pens.LightGreen, facePosition.xc - facePosition.w / 2, facePosition.yc - facePosition.w / 2, facePosition.w, facePosition.w); // create a new face template FaceTemplate template = new FaceTemplate(); template.templateData = new byte[FSDK.TemplateSize]; FaceTemplate template1 = new FaceTemplate(); if (programState == ProgramState.psRemember || programState == ProgramState.psRecognize) { template.templateData = image.GetFaceTemplateInRegion(ref facePosition); } switch (programState) { case ProgramState.psNormal: // normal state - do nothing break; case ProgramState.psRemember: // Remember Me state - store facial templates label1.Text = "Templates stored: " + faceTemplates.Count.ToString(); faceTemplates.Add(template); if (faceTemplates.Count > 9) { // get the user name InputName inputName = new InputName(); inputName.ShowDialog(); userName = inputName.userName; cmd = new SqlCommand("insert into facetb values(@Name,@face)", con); cmd.Parameters.AddWithValue("@Name", userName); cmd.Parameters.AddWithValue("@face", template.templateData); con.Open(); cmd.ExecuteNonQuery(); con.Close(); MessageBox.Show("Record Save!"); programState = ProgramState.psRecognize; } break; case ProgramState.psRecognize: // recognize the user bool match = false; con.Open(); cmd = new SqlCommand("select * from facetb ORDER BY id ASC ", con); SqlDataReader dr = cmd.ExecuteReader(); while (dr.Read()) { template1.templateData = (byte[])dr["face"]; faceTemplates.Add(template1); strList.Add(dr["Name"].ToString()); } con.Close(); int ii = 0; foreach (FaceTemplate t in faceTemplates) { float similarity = 0.0f; FaceTemplate t1 = t; FSDK.MatchFaces(ref template.templateData, ref t1.templateData, ref similarity); float threshold = 0.0f; FSDK.GetMatchingThresholdAtFAR(0.01f, ref threshold); // set FAR to 1% if (similarity > threshold) { userName = strList[ii].ToString(); label3.Text = strList[ii].ToString(); match = true; break; } ii++; } con.Close(); if (match) { StringFormat format = new StringFormat(); format.Alignment = StringAlignment.Center; gr.DrawString(userName, new System.Drawing.Font("Arial", 16), new System.Drawing.SolidBrush(System.Drawing.Color.LightGreen), facePosition.xc, facePosition.yc + facePosition.w * 0.55f, format); // abc = 0; send(); } else { abc = 0; label3.Text = "UnKnow FACE"; } break; } } // display current frame pictureBox1.Image = frameImage; GC.Collect(); // collect the garbage after the deletion // make UI controls accessible Application.DoEvents(); } FSDKCam.CloseVideoCamera(cameraHandle); FSDKCam.FinalizeCapturing(); }
private void checkBox2_CheckedChanged(object sender, EventArgs e) { Fitems.faceAngle = checkBox2.Checked; FSDK.SetFaceDetectionParameters(Fitems.facerotationB, Fitems.faceAngle, Fitems.faceWidth); }
private void trackBar2_Scroll(object sender, EventArgs e) { Fitems.faceWidth = trackBar2.Value; FSDK.SetFaceDetectionParameters(Fitems.facerotationB, Fitems.faceAngle, Fitems.faceWidth); label5.Text = "Face Detection Threshold =>" + Fitems.facethreshold + "\n Internal Resize Width =>" + Fitems.faceWidth; }
public void recFace(object pppp) { Bitmap ppp = (Bitmap)pppp; if (label1.InvokeRequired) { label1.Invoke(new System.Threading.ParameterizedThreadStart(recFace), new object[] { pppp }); return; } bool error = false; try { IntPtr ff = ppp.GetHbitmap(); FSDK.LoadImageFromHBitmap(ref cimg, ff); FSDK.SetFaceDetectionParameters(false, false, 500); FSDK.SetFaceDetectionThreshold(1); FSDK.TFacePosition facePosition = new FSDK.TFacePosition(); IntPtr hbitmapHandle = IntPtr.Zero; FSDK.SaveImageToHBitmap(cimg, ref hbitmapHandle); Image ccimg = Image.FromHbitmap(hbitmapHandle); Graphics gr = Graphics.FromImage(ccimg); if (FSDK.FSDKE_OK == FSDK.DetectFace(cimg, ref facePosition)) { gr.DrawRectangle(Pens.LightBlue, facePosition.xc - facePosition.w / 2, facePosition.yc - facePosition.w / 2, facePosition.w, facePosition.w); byte[] tempd = new byte[FSDK.TemplateSize]; FSDK.GetFaceTemplateInRegion(cimg, ref facePosition, out tempd); temp = System.Text.Encoding.ASCII.GetString(tempd); if (check_reg(tempd)) { timer2.Enabled = false; StringFormat format = new StringFormat(); format.Alignment = StringAlignment.Center; gr.DrawString(_name + " | " + _matric + " | " + _room, new System.Drawing.Font("Candara", 18), new System.Drawing.SolidBrush(System.Drawing.Color.LightBlue), facePosition.xc, facePosition.yc + facePosition.w * 0.55f, format); } else { timer2.Enabled = true; } } else { timer2.Enabled = false; label1.Text = "Please Focus on the camera"; } pictureBox1.Height = ccimg.Height; pictureBox1.Width = ccimg.Width; pictureBox1.Image = ccimg; FSDK.FreeImage(cimg); DeleteObject(hbitmapHandle); GC.Collect(); Application.DoEvents(); } catch (NullReferenceException nul) { timer1.Enabled = false; timer1.Dispose(); MessageBox.Show(nul.ToString()); error = true; } catch (ExternalException exer) { } catch (AccessViolationException acs) { } finally { if (error) { canClose = true; Application.Exit(); Application.Restart(); } } }
private void button2_Click(object sender, EventArgs e) { if (textBox1.Text.Length == 0) { MessageBox.Show("Please Enter the Subject Name"); textBox1.Focus(); } else { OpenFileDialog dlg = new OpenFileDialog(); dlg.Filter = "JPEG (*.jpg)|*.jpg|Windows bitmap (*.bmp)|*.bmp|All files|*.*"; dlg.Multiselect = true; if (dlg.ShowDialog() == DialogResult.OK) { try { FSDK.SetFaceDetectionParameters(false, true, 384); FSDK.SetFaceDetectionThreshold((int)FaceDetectionThreshold); foreach (string fn in dlg.FileNames) { FaceList.Clear(); TFaceRecord fr = new TFaceRecord(); fr.ImageFileName = fn; fr.suspectName = textBox1.Text.Trim(); fr.FacePosition = new FSDK.TFacePosition(); fr.FacialFeatures = new FSDK.TPoint[2]; fr.Template = new byte[FSDK.TemplateSize]; fr.image = new FSDK.CImage(fn); fr.FacePosition = fr.image.DetectFace(); if (0 == fr.FacePosition.w) { if (dlg.FileNames.Length <= 1) { MessageBox.Show("No faces found. Try to lower the Minimal Face Quality parameter in the Options dialog box.", "Enrollment error"); } else { } } else { fr.faceImage = fr.image.CopyRect((int)(fr.FacePosition.xc - Math.Round(fr.FacePosition.w * 0.5)), (int)(fr.FacePosition.yc - Math.Round(fr.FacePosition.w * 0.5)), (int)(fr.FacePosition.xc + Math.Round(fr.FacePosition.w * 0.5)), (int)(fr.FacePosition.yc + Math.Round(fr.FacePosition.w * 0.5))); fr.FacialFeatures = fr.image.DetectEyesInRegion(ref fr.FacePosition); fr.Template = fr.image.GetFaceTemplateInRegion(ref fr.FacePosition); // get template with higher precision FaceList.Add(fr); imageList1.Images.Add(fr.faceImage.ToCLRImage()); listView1.Items.Add((imageList1.Images.Count - 1).ToString(), fn.Split('\\')[fn.Split('\\').Length - 1], imageList1.Images.Count - 1); } listView1.SelectedIndices.Clear(); // listView1.SelectedIndices.Add(listView1.Items.Count - 1); db.SaveSubject(fr, Constants.conString); } listView1.Refresh(); } catch (Exception ex) { MessageBox.Show("Can't open image(s) with error: " + ex.Message.ToString(), "Error"); } } } }
public bool Recognize(ref string path, bool needCrop) { var AppDataDir = Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.ApplicationData), "Abalone", "TestRotator"); FaceRectRelative = RectangleF.Empty; LeftEyeCenter = RightEyeCenter = LeftMouth = LeftNose = RightNose = RightMouth = Vector2.Zero; var executablePath = Path.GetDirectoryName(Application.ExecutablePath); FSDK.TPoint[] pointFeature; var image = new FSDK.CImage(path); var faceRectangle = Rectangle.Empty; var mouthRectangle = Rectangle.Empty; FSDK.SetFaceDetectionThreshold(5); FSDK.SetFaceDetectionParameters(true, true, 512); var facePosition = image.DetectFace(); if (0 == facePosition.w) { MessageBox.Show("No faces detected", "Face Detection"); return(false); } if (needCrop) { RotatedAngle = facePosition.angle; // угол, на который повернута голова. } pointFeature = image.DetectFacialFeaturesInRegion(ref facePosition); var left = facePosition.xc - (int)(facePosition.w * 0.6f); left = left < 0 ? 0 : left; // int top = facePosition.yc - (int)(facePosition.w * 0.5f); // верхушку определяет неправильлно. поэтому просто не будем обрезать :) BottomFace = new Vector2(pointFeature[11].x, pointFeature[11].y); var distance = pointFeature[2].y - pointFeature[11].y; var top = pointFeature[16].y + distance - 15; // определение высоты по алгоритму старикана top = top < 0 ? 0 : top; var newWidth = (int)(facePosition.w * 1.2); newWidth = newWidth > image.Width || newWidth == 0 ? image.Width : newWidth; faceRectangle = new Rectangle(left, top, newWidth, BottomFace.Y + 15 < image.Height ? (int)(BottomFace.Y + 15) - top : image.Height - top - 1); if (needCrop) // если это создание проекта - то нужно обрезать фотку и оставить только голову { var bmpImage = new Bitmap(path); using (var croppedImage = ImageEx.Crop(bmpImage, faceRectangle)) { path = AppDataDir; FolderEx.CreateDirectory(path); path = Path.Combine(path, "tempHaarImage.jpg"); croppedImage.Save(path, ImageFormat.Jpeg); return(Recognize(ref path, false)); } } LeftEyeCenter = new Vector2(pointFeature[0].x, pointFeature[0].y); RightEyeCenter = new Vector2(pointFeature[1].x, pointFeature[1].y); LeftMouth = new Vector2(pointFeature[3].x, pointFeature[3].y); RightMouth = new Vector2(pointFeature[4].x, pointFeature[4].y); LeftNose = new Vector2(pointFeature[45].x, pointFeature[45].y); RightNose = new Vector2(pointFeature[46].x, pointFeature[46].y); TopFace = new Vector2(pointFeature[66].x, pointFeature[66].y); MiddleFace1 = new Vector2(pointFeature[66].x, pointFeature[66].y); MiddleFace2 = new Vector2(pointFeature[5].x, pointFeature[5].y); RightMiddleFace1 = new Vector2(pointFeature[67].x, pointFeature[67].y); RightMiddleFace2 = new Vector2(pointFeature[6].x, pointFeature[6].y); #region Поворот фотки по глазам! var v = new Vector2(LeftEyeCenter.X - RightEyeCenter.X, LeftEyeCenter.Y - RightEyeCenter.Y); v.Normalize(); // ПД ! var xVector = new Vector2(1, 0); var xDiff = xVector.X - v.X; var yDiff = xVector.Y - v.Y; var angle = Math.Atan2(yDiff, xDiff) * 180.0 / Math.PI; if (Math.Abs(angle) > 1 && angleCount <= 5) // поворачиваем наклоненные головы { ++angleCount; using (var ms = new MemoryStream(File.ReadAllBytes(path))) // Don't use using!! { var originalImg = (Bitmap)Image.FromStream(ms); path = AppDataDir; FolderEx.CreateDirectory(path); path = Path.Combine(path, "tempHaarImage.jpg"); using (var ii = ImageEx.RotateImage(new Bitmap(originalImg), (float)-angle)) ii.Save(path, ImageFormat.Jpeg); } return(Recognize(ref path, false)); } #endregion var upperUpperLip = pointFeature[54]; // вехняя точка верхней губы var lowerUpperLip = pointFeature[61]; // нижняя точка верхней губы var lowerLip = pointFeature[64]; // верхняя точка нижней губы var diff2 = Math.Abs(lowerUpperLip.y - upperUpperLip.y); var diffX = Math.Abs(lowerLip.y - lowerUpperLip.y); IsOpenSmile = diffX > diff2; // экспериментальным путем выяснено, что улыбка на 90% открытая, если расстояние между верхней и нижней губой больше, чем толщина верхней губы #region Переводим в относительные координаты LeftMouth = new Vector2(LeftMouth.X / (image.Width * 1f), LeftMouth.Y / (image.Height * 1f)); RightMouth = new Vector2(RightMouth.X / (image.Width * 1f), RightMouth.Y / (image.Height * 1f)); LeftEyeCenter = new Vector2(LeftEyeCenter.X / (image.Width * 1f), LeftEyeCenter.Y / (image.Height * 1f)); RightEyeCenter = new Vector2(RightEyeCenter.X / (image.Width * 1f), RightEyeCenter.Y / (image.Height * 1f)); LeftNose = new Vector2(LeftNose.X / (image.Width * 1f), LeftNose.Y / (image.Height * 1f)); RightNose = new Vector2(RightNose.X / (image.Width * 1f), RightNose.Y / (image.Height * 1f)); TopFace = new Vector2(TopFace.X / (image.Width * 1f), TopFace.Y / (image.Height * 1f)); MiddleFace1 = new Vector2(MiddleFace1.X / (image.Width * 1f), MiddleFace1.Y / (image.Height * 1f)); MiddleFace2 = new Vector2(MiddleFace2.X / (image.Width * 1f), MiddleFace2.Y / (image.Height * 1f)); BottomFace = new Vector2(BottomFace.X / (image.Width * 1f), BottomFace.Y / (image.Height * 1f)); RightMiddleFace1 = new Vector2(RightMiddleFace1.X / (image.Width * 1f), RightMiddleFace1.Y / (image.Height * 1f)); RightMiddleFace2 = new Vector2(RightMiddleFace2.X / (image.Width * 1f), RightMiddleFace2.Y / (image.Height * 1f)); FacialFeatures = new List <Vector3>(); RealPoints = new List <Vector2>(); int index = 0; var pointDepths = GetPointDepths(); foreach (var point in pointFeature) { FacialFeatures.Add(new Vector3(point.x / (image.Width * 1f), point.y / (image.Height * 1f), pointDepths[index++])); RealPoints.Add(new Vector2(point.x, point.y)); } ImageWidth = image.Width; ImageHeight = image.Height; #endregion return(true); }
private void button1_Click(object sender, EventArgs e) { OpenFileDialog dlg = new OpenFileDialog(); dlg.Filter = "JPEG (*.jpg)|*.jpg|Windows bitmap (*.bmp)|*.bmp|All files|*.*"; dlg.Multiselect = true; if (dlg.ShowDialog() == DialogResult.OK) { listView1.Visible = true; if (thunho == false) { this.Width = this.Width + listView1.Width; thunho = true; } try { //Set các thông số độ nghiêng FSDK.SetFaceDetectionParameters(false, true, 384); ///Set thông số phát hiện khung mặt FSDK.SetFaceDetectionThreshold((int)Form1.FaceDetectionThreshold); foreach (string fn in dlg.FileNames) { TFaceRecord fr = new TFaceRecord(); fr.ImageFileName = fn; //fr.FacePosition = new FSDK.TFacePosition(); fr.FacialFeatures = new FSDK.TPoint[2]; //fr.Template = new byte[FSDK.TemplateSize]; fr.image = new FSDK.CImage(fn); //textBox1.Text += "Enrolling '" + fn + "'\r\n"; //textBox1.Refresh(); ///Lấy vị trí mặt trong ảnh //fr.FacePosition = fr.image.DetectFace(); FSDK.DetectMultipleFaces(fr.image.ImageHandle, ref fr.CountFace, out fr.FacePosition, sizeof(long) * 256); Array.Resize(ref fr.FacePosition, fr.CountFace); if (fr.FacePosition.Length == 0) { if (dlg.FileNames.Length <= 1) { MessageBox.Show("Không có khuôn mặt nào !", "Lỗi "); } else { MessageBox.Show(fn + ": Không tìm thấy khuôn mặt nào !"); } } else { //Sao chép mặt fr.faceImage = fr.image.CopyRect((int)(fr.FacePosition[0].xc - Math.Round(fr.FacePosition[0].w * 0.5)), (int)(fr.FacePosition[0].yc - Math.Round(fr.FacePosition[0].w * 0.5)), (int)(fr.FacePosition[0].xc + Math.Round(fr.FacePosition[0].w * 0.5)), (int)(fr.FacePosition[0].yc + Math.Round(fr.FacePosition[0].w * 0.5))); try { ///Lấy đặc điểm của mắt fr.FacialFeatures = fr.image.DetectEyesInRegion(ref fr.FacePosition[0]); } catch (Exception ex2) { MessageBox.Show(ex2.Message, "Lỗi không nhận diện được mắt !"); byte[] by = new byte[FSDK.TemplateSize]; by = null; fr.Template.Add(by); } try { ///Lấy giá trị nhận diện khung mặt byte[] by = new byte[FSDK.TemplateSize]; by = fr.image.GetFaceTemplateInRegion(ref fr.FacePosition[0]); // get template with higher precision fr.Template.Add(by); } catch (Exception ex2) { MessageBox.Show(ex2.Message, "Không phát hiện khuôn mặt"); } //Them vào danh sách FaceList.Add(fr); imageList1.Images.Add(fr.faceImage.ToCLRImage()); listView1.Items.Add((imageList1.Images.Count - 1).ToString(), fn.Split('\\')[fn.Split('\\').Length - 1], imageList1.Images.Count - 1); //textBox1.Text += "File '" + fn + "' enrolled\r\n"; //textBox1.Refresh(); listView1.SelectedIndices.Clear(); listView1.SelectedIndices.Add(listView1.Items.Count - 1); } } } catch (Exception ex) { MessageBox.Show(ex.Message.ToString(), "Exception"); } } }
private void button1_Click(object sender, EventArgs e) { TFaceRecord fr = new TFaceRecord(); FSDK.SetFaceDetectionParameters(false, true, 384); FSDK.SetFaceDetectionThreshold((int)FaceDetectionThreshold); fr.ImageFileName = imgLoc; fr.FacePosition = new FSDK.TFacePosition(); fr.FacialFeatures = new FSDK.TPoint[2]; fr.Template = new byte[FSDK.TemplateSize]; fr.image = new FSDK.CImage(imgLoc); fr.FacePosition = fr.image.DetectFace(); if (0 == fr.FacePosition.w) { if (imgLoc.Length <= 1) { MessageBox.Show("No faces found. Try to lower the Minimal Face Quality parameter in the Options dialog box.", "Enrollment error"); } else { } } else { fr.faceImage = fr.image.CopyRect((int)(fr.FacePosition.xc - Math.Round(fr.FacePosition.w * 0.5)), (int)(fr.FacePosition.yc - Math.Round(fr.FacePosition.w * 0.5)), (int)(fr.FacePosition.xc + Math.Round(fr.FacePosition.w * 0.5)), (int)(fr.FacePosition.yc + Math.Round(fr.FacePosition.w * 0.5))); fr.FacialFeatures = fr.image.DetectEyesInRegion(ref fr.FacePosition); fr.Template = fr.image.GetFaceTemplateInRegion(ref fr.FacePosition); // get template with higher precision FaceList.Add(fr); } Image img = null; Image img_face = null; MemoryStream strm = new MemoryStream(); MemoryStream strm_face = new MemoryStream(); img = fr.image.ToCLRImage(); img_face = fr.faceImage.ToCLRImage(); img.Save(strm, System.Drawing.Imaging.ImageFormat.Jpeg); img_face.Save(strm_face, System.Drawing.Imaging.ImageFormat.Jpeg); byte[] img_array = new byte[strm.Length]; byte[] img_face_array = new byte[strm_face.Length]; strm.Position = 0; strm.Read(img_array, 0, img_array.Length); strm_face.Position = 0; strm_face.Read(img_face_array, 0, img_face_array.Length); conn = new SqlCeConnection(stringCon); conn.Open(); var cmd = new SqlCeCommand("insert into FaceList (ImageFileName,SubjectName,FacePositionXc,FacePositionYc,FacePositionW,FacePositionAngle,Eye1X,Eye1Y,Eye2X,Eye2Y,Template,Image,FaceImage) values (@IFName,@SName,@FPXc,@FPYc,@FPW,@FPA,@Eye1X,@Eye1Y,@Eye2X,@Eye2Y,@Template,@Image,@FaceImage)", conn); cmd.Parameters.Add(@"IFName", fr.ImageFileName); cmd.Parameters.Add(@"SName", textBox1.Text.Trim()); cmd.Parameters.Add(@"FPXc", fr.FacePosition.xc); cmd.Parameters.Add(@"FPYc", fr.FacePosition.yc); cmd.Parameters.Add(@"FPW", fr.FacePosition.w); cmd.Parameters.Add(@"FPA", fr.FacePosition.angle); cmd.Parameters.Add(@"Eye1X", fr.FacialFeatures[0].x); cmd.Parameters.Add(@"Eye1Y", fr.FacialFeatures[0].y); cmd.Parameters.Add(@"Eye2X", fr.FacialFeatures[1].x); cmd.Parameters.Add(@"Eye2Y", fr.FacialFeatures[1].y); cmd.Parameters.Add(@"Template", fr.Template); cmd.Parameters.Add(@"Image", img_array); cmd.Parameters.Add(@"FaceImage", img_face_array); int x = cmd.ExecuteNonQuery(); conn.Close(); conn.Dispose(); cmd.Dispose(); MessageBox.Show(x.ToString() + "Image successfully added !!"); }
private void btnStart_Click(object sender, EventArgs e) { this.btnStart.Enabled = false; //int FSDK.SetHTTPProxy("192.168.43.1:8080", 8080, "", ""); //int r = FSDKCam.OpenIPVideoCamera(FSDKCam.FSDK_VIDEOCOMPRESSIONTYPE.FSDK_MJPEG, "192.168.43.1", "", "", 60, ref cameraHandle); int r = FSDKCam.OpenVideoCamera(ref cameraName, ref cameraHandle); if (r != FSDK.FSDKE_OK) { MessageBox.Show("Error opening the first camera", "Error", MessageBoxButtons.OK, MessageBoxIcon.Error); Application.Exit(); } try { int tracker = 0; // creating a Tracker if (FSDK.FSDKE_OK != FSDK.LoadTrackerMemoryFromFile(ref tracker, TrackerMemoryFile)) // try to load saved tracker state { FSDK.CreateTracker(ref tracker); // if could not be loaded, create a new tracker } int err = 0; // set realtime face detection parameters FSDK.SetTrackerMultipleParameters(tracker, "HandleArbitraryRotations=false; DetermineFaceRotationAngle=true; InternalResizeWidth=100; FaceDetectionThreshold=1;", ref err); FSDK.SetFaceDetectionParameters(false, true, 384); while (!needClose) { Int32 imageHandle = 0; if (FSDK.FSDKE_OK != FSDKCam.GrabFrame(cameraHandle, ref imageHandle)) // grab the current frame from the camera { Application.DoEvents(); continue; } FSDK.CImage image = new FSDK.CImage(imageHandle); long[] IDs; long faceCount = 0; FSDK.FeedFrame(tracker, cameraHandle, image.ImageHandle, ref faceCount, out IDs, sizeof(long) * 256); // maximum of 256 faces detected Array.Resize(ref IDs, (int)faceCount); // make UI controls accessible (to find if the user clicked on a face) Application.DoEvents(); Image frameImage = image.ToCLRImage(); Graphics gr = Graphics.FromImage(frameImage); for (int i = 0; i < IDs.Length; ++i) { FSDK.TFacePosition facePosition = new FSDK.TFacePosition(); FSDK.GetTrackerFacePosition(tracker, cameraHandle, IDs[i], ref facePosition); int left = facePosition.xc - (int)(facePosition.w * 0.6); int top = facePosition.yc - (int)(facePosition.w * 0.5); int w = (int)(facePosition.w * 1.2); string name; TFaceRecord fIn = new TFaceRecord(); fIn.FacePosition = new FSDK.TFacePosition(); fIn.FacialFeatures = new FSDK.TPoint[2]; fIn.Template = new byte[FSDK.TemplateSize]; FSDK.CImage img = new FSDK.CImage(image.ImageHandle); img = image.CopyRect((int)(facePosition.xc - Math.Round(facePosition.w * 0.5)), (int)(facePosition.yc - Math.Round(facePosition.w * 0.5)), (int)(facePosition.xc + Math.Round(facePosition.w * 0.5)), (int)(facePosition.yc + Math.Round(facePosition.w * 0.5))); fIn.image = img; fIn.FacePosition = fIn.image.DetectFace(); fIn.faceImage = fIn.image.CopyRect((int)(fIn.FacePosition.xc - Math.Round(fIn.FacePosition.w * 0.5)), (int)(fIn.FacePosition.yc - Math.Round(fIn.FacePosition.w * 0.5)), (int)(fIn.FacePosition.xc + Math.Round(fIn.FacePosition.w * 0.5)), (int)(fIn.FacePosition.yc + Math.Round(fIn.FacePosition.w * 0.5))); fIn.FacialFeatures = fIn.image.DetectEyesInRegion(ref fIn.FacePosition); fIn.Template = fIn.image.GetFaceTemplateInRegion(ref fIn.FacePosition); // get template with higher precision TFaceRecord fOut = new TFaceRecord(); //if (Fx.GetPerson(fIn, out fOut)) //{ //name = fOut.Fullname; // draw name //StringFormat format = new StringFormat(); //format.Alignment = StringAlignment.Center; //lblPersons.Text = name; // gr.DrawString(name, new Font("Arial", 16), new SolidBrush(Color.LightGreen), facePosition.xc, top + w + 5, format); //} //else //{ //lblPersons.Text = ""; if (0 != fIn.FacePosition.w) { //img = new FSDK.CImage(image.ImageHandle); //img = fIn.image.CopyRect((int)(facePosition.xc - Math.Round(facePosition.w * 0.5)), (int)(facePosition.yc - Math.Round(facePosition.w * 0.5)), (int)(facePosition.xc + Math.Round(facePosition.w * 0.5)), (int)(facePosition.yc + Math.Round(facePosition.w * 0.5))); if (Global.AutomaticTaining) { InputName inputName = new InputName(fIn); inputName.AutoSave(); userName = inputName.userName; } else { InputName inputName = new InputName(fIn); if (DialogResult.OK == inputName.ShowDialog(this)) { userName = inputName.userName; if (userName == null || userName.Length <= 0) { String s = ""; } else { } } } //} } //int res = FSDK.GetAllNames(tracker, IDs[i], out name, 65536); // maximum of 65536 characters /*/if (FSDK.FSDKE_OK == res && name.Length > 0) * if (name.Length > 0) * { * // draw name * //name = "Prince Daniel"; * StringFormat format = new StringFormat(); * format.Alignment = StringAlignment.Center; * * gr.DrawString(name, new Font("Arial", 16), new SolidBrush(Color.LightGreen), facePosition.xc, top + w + 5, format); * } * */ Pen pen = Pens.LightGreen; if (mouseX >= left && mouseX <= left + w && mouseY >= top && mouseY <= top + w) { pen = Pens.Blue; //programState = ProgramState.psRemember; if (ProgramState.psRemember == programState) { //FSDK.CImage img = new FSDK.CImage(image.ImageHandle); img = new FSDK.CImage(image.ImageHandle); img = image.CopyRect((int)(facePosition.xc - Math.Round(facePosition.w * 0.5)), (int)(facePosition.yc - Math.Round(facePosition.w * 0.5)), (int)(facePosition.xc + Math.Round(facePosition.w * 0.5)), (int)(facePosition.yc + Math.Round(facePosition.w * 0.5))); if (Global.AutomaticTaining) { InputName inputName = new InputName(img); inputName.AutoSave(); userName = inputName.userName; } else { InputName inputName = new InputName(img); if (DialogResult.OK == inputName.ShowDialog(this)) { userName = inputName.userName; if (userName == null || userName.Length <= 0) { String s = ""; } else { } } } } } gr.DrawRectangle(pen, left, top, w, w); } programState = ProgramState.psRecognize; // display current frame imageBox.Image = frameImage; GC.Collect(); // collect the garbage after the deletion } FSDKCam.CloseVideoCamera(cameraHandle); FSDKCam.FinalizeCapturing(); } catch (Exception n) { Application.DoEvents(); } }
private void enrollFacesToolStripMenuItem_Click(object sender, EventArgs e) { OpenFileDialog dlg = new OpenFileDialog(); dlg.Filter = "JPEG (*.jpg)|*.jpg|Windows bitmap (*.bmp)|*.bmp|All files|*.*"; dlg.Multiselect = true; if (dlg.ShowDialog() == DialogResult.OK) { try { //Assuming that faces are vertical (HandleArbitraryRotations = false) to speed up face detection FSDK.SetFaceDetectionParameters(false, true, 384); FSDK.SetFaceDetectionThreshold((int)FaceDetectionThreshold); foreach (string fn in dlg.FileNames) { TFaceRecord fr = new TFaceRecord(); fr.ImageFileName = fn; fr.FacePosition = new FSDK.TFacePosition(); fr.FacialFeatures = new FSDK.TPoint[2]; fr.Template = new byte[FSDK.TemplateSize]; fr.image = new FSDK.CImage(fn); textBox1.Text += "Enrolling '" + fn + "'\r\n"; textBox1.Refresh(); fr.FacePosition = fr.image.DetectFace(); if (0 == fr.FacePosition.w) { if (dlg.FileNames.Length <= 1) { MessageBox.Show("No faces found. Try to lower the Minimal Face Quality parameter in the Options dialog box.", "Enrollment error"); } else { textBox1.Text += (fn + ": No faces found. Try to lower the Minimal Face Quality parameter in the Options dialog box.\r\n"); } } else { fr.faceImage = fr.image.CopyRect((int)(fr.FacePosition.xc - Math.Round(fr.FacePosition.w * 0.5)), (int)(fr.FacePosition.yc - Math.Round(fr.FacePosition.w * 0.5)), (int)(fr.FacePosition.xc + Math.Round(fr.FacePosition.w * 0.5)), (int)(fr.FacePosition.yc + Math.Round(fr.FacePosition.w * 0.5))); fr.FacialFeatures = fr.image.DetectEyesInRegion(ref fr.FacePosition); fr.Template = fr.image.GetFaceTemplateInRegion(ref fr.FacePosition); // get template with higher precision SaveFaceInDB(fr); FaceList.Add(fr); imageList1.Images.Add(fr.faceImage.ToCLRImage()); listView1.Items.Add((imageList1.Images.Count - 1).ToString(), fn.Split('\\')[fn.Split('\\').Length - 1], imageList1.Images.Count - 1); textBox1.Text += "File '" + fn + "' enrolled\r\n"; textBox1.Refresh(); } listView1.SelectedIndices.Clear(); listView1.SelectedIndices.Add(listView1.Items.Count - 1); } } catch (Exception ex) { MessageBox.Show("Can't open image(s) with error: " + ex.Message.ToString(), "Error"); } } }
private void enrollFacesToolStripMenuItem_Click(object sender, EventArgs e) { OpenFileDialog dlg = new OpenFileDialog(); dlg.Filter = "JPEG (*.jpg)|*.jpg|Windows bitmap (*.bmp)|*.bmp|All files|*.*"; dlg.Multiselect = true; if (dlg.ShowDialog() == DialogResult.OK) { try { //Assuming that faces are vertical (HandleArbitraryRotations = false) to speed up face detection FSDK.SetFaceDetectionParameters(false, true, 384); FSDK.SetFaceDetectionThreshold((int)FaceDetectionThreshold); foreach (string fn in dlg.FileNames) { string name = Path.GetFileNameWithoutExtension(fn); TFaceRecord fr = new TFaceRecord(); fr.ImageFileName = fn; fr.FacePosition = new FSDK.TFacePosition(); fr.FacialFeatures = new FSDK.TPoint[2]; fr.Template = new byte[FSDK.TemplateSize]; fr.image = new FSDK.CImage(fn); fr.FacePosition = fr.image.DetectFace(); if (0 == fr.FacePosition.w) { if (dlg.FileNames.Length <= 1) { MessageBox.Show("No faces found. Try to lower the Minimal Face Quality parameter in the Options dialog box.", "Enrollment error"); } else { add(name + " face enroll failed.\nTry to lower the Minimal Face Quality parameter in the Options dialog box."); } } else { fr.faceImage = fr.image.CopyRect((int)(fr.FacePosition.xc - Math.Round(fr.FacePosition.w * 0.5)), (int)(fr.FacePosition.yc - Math.Round(fr.FacePosition.w * 0.5)), (int)(fr.FacePosition.xc + Math.Round(fr.FacePosition.w * 0.5)), (int)(fr.FacePosition.yc + Math.Round(fr.FacePosition.w * 0.5))); try { fr.FacialFeatures = fr.image.DetectEyesInRegion(ref fr.FacePosition); } catch (Exception ex2) { MessageBox.Show(ex2.Message, "Error detecting eyes."); } try { fr.Template = fr.image.GetFaceTemplateInRegion(ref fr.FacePosition); // get template with higher precision } catch (Exception ex2) { MessageBox.Show(ex2.Message, "Error retrieving face template."); } FaceList.Add(fr); FaceName.Add(name); imageList1.Images.Add(fr.faceImage.ToCLRImage()); listView1.Items.Add((imageList1.Images.Count - 1).ToString(), name, /**fn.Split('\\')[fn.Split('\\').Length - 1],*/ imageList1.Images.Count - 1); add(name + " enrolled"); listView1.SelectedIndices.Clear(); listView1.SelectedIndices.Add(listView1.Items.Count - 1); } } } catch (Exception ex) { MessageBox.Show(ex.Message.ToString(), "Exception"); } } }