//追加メソッド private void OnKeyDownHandler(object sender, KeyEventArgs e) { //追加:顔を登録する if (e.Key == Key.R) { int id = rdata.RegisterUser(); noticeInfo.Text = "Registed:" + id.ToString(); } //追加:顔の識別を解除する if (e.Key == Key.U) { rdata.UnregisterUser(); noticeInfo.Text = "UnRegisted!"; } }
private void RegisterUser(PXCMFaceData faceOutput) { m_form.Register = false; if (faceOutput.QueryNumberOfDetectedFaces() <= 0) { return; } PXCMFaceData.Face qface = faceOutput.QueryFaceByIndex(0); if (qface == null) { throw new Exception("PXCMFaceData.Face null"); } PXCMFaceData.RecognitionData rdata = qface.QueryRecognition(); if (rdata == null) { throw new Exception(" PXCMFaceData.RecognitionData null"); } rdata.RegisterUser(); }
private void ProcessingThread() { while (senseManager.AcquireFrame(true) >= pxcmStatus.PXCM_STATUS_NO_ERROR) { // Acquire the color image data PXCMCapture.Sample sample = senseManager.QuerySample(); Bitmap colorBitmap; PXCMImage.ImageData colorData; PXCMImage depth = sample.depth; PXCMImage color = sample.color; PXCMProjection projection = senseManager.QueryCaptureManager().QueryDevice().CreateProjection(); PXCMImage compare_depth = projection.CreateDepthImageMappedToColor(depth, color); PXCMImage.ImageInfo image_info = compare_depth.QueryInfo(); PXCMImage.PixelFormat p = image_info.format; PXCMImage.ImageInfo depth_info = depth.QueryInfo(); sample.color.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_RGB24, out colorData); colorBitmap = colorData.ToBitmap(0, sample.color.info.width, sample.color.info.height); // Get face data if (faceData != null) { //Updates face data to most recent face data faceData.Update(); if (faceData != null) { //Gets the number of faces numFacesDetected = faceData.QueryNumberOfDetectedFaces(); bool num_faces = false; if(numFacesDetected ==1 ) { num_faces = true; } if (num_faces) { if(userId == "No Users in View") { userId = "Prcessing"; } // Get the first face detected (index 0) PXCMFaceData.Face face = faceData.QueryFaceByIndex(0); //Get landmark points PXCMFaceData.LandmarksData landmarks = face.QueryLandmarks(); if (landmarks != null) { int total_points = landmarks.QueryNumPoints(); PXCMFaceData.LandmarkPoint[] v = new PXCMFaceData.LandmarkPoint[total_points]; landmarks.QueryPoints(out v); float avg = 0; for (int i = 0; i < total_points; i++) { avg += v[i].world.z; } //Found Standard Deviation float k = 0; avg = avg / total_points; for (int i = 0; i < total_points; i++) { k += (avg - v[i].world.z) * (avg - v[i].world.z); } double std = 0; std = Math.Sqrt(k * 1 / total_points); s.Add(std); } // Retrieve face location data PXCMFaceData.DetectionData faceDetectionData = face.QueryDetection(); if (faceDetectionData != null) { //Bounding Box for Face //X and Y are the coordinates for the top left pixel of the rectangle, //h and w are the height and width of the rectangle PXCMRectI32 faceRectangle; faceDetectionData.QueryBoundingRect(out faceRectangle); faceRectangleHeight = faceRectangle.h; faceRectangleWidth = faceRectangle.w; faceRectangleX = faceRectangle.x; faceRectangleY = faceRectangle.y; int n = faceRectangleHeight + 5; } double avg_list = 0; if (s.Count == 24) { avg_list = s.Average(); s.Clear(); first_process = false; } //Find threshold if (avg_list > .01) { person = true; } if (s.Count < 24 && first_process) { userId = "Processing"; person = false; } if (avg_list > 0 && avg_list < .01) { userId = "Invalid"; person = false; } // Process face recognition data if (face != null && person) { // Retrieve the recognition data instance recognitionData = face.QueryRecognition(); // Set the user ID and process register/unregister logic //Only executes when user is registered //doRegister = false if (recognitionData.IsRegistered()) { this.Dispatcher.Invoke((Action)(() => { if (btnRegisterMode.IsEnabled == true) { date = DateTime.Today.ToString("dd-MM-yyyy"); time = DateTime.Now.ToString("HH:mm:ss"); log = date + " " + time+" "; } })); // dbConnection = new SQLiteConnection("Data Source=/FlaskApp/database.db; Version=3;"); // dbConnection.Open(); userId = Convert.ToString(recognitionData.QueryUserID()); log += "User ID: "+ userId + "Recognized"; System.Diagnostics.Debug.WriteLine(userId); //_serialPort.Write(1.ToString()); if (doUnregister) { recognitionData.UnregisterUser(); doUnregister = false; } } else { if (doRegister) { Process registrationProcess = System.Diagnostics.Process.Start("http://127.0.0.1:5000/register"); Thread.Sleep(10000); recognitionData.RegisterUser(); // Capture a jpg image of registered user colorBitmap.Save("image.jpg", System.Drawing.Imaging.ImageFormat.Jpeg); doRegister = false; } else { userId = "Unrecognized"; } } } } else if (numFacesDetected > 1) { userId = "Too many faces in view"; } else { //_serialPort.Write(0.ToString()); userId = "No users in view"; first_process = true; } } // Display the color stream and other UI elements UpdateUI(colorBitmap); // Release resources colorBitmap.Dispose(); sample.color.ReleaseAccess(colorData); sample.color.Dispose(); // Release the frame senseManager.ReleaseFrame(); } } }
private void ProcessingThread() { try { while (senseManager.AcquireFrame(true) >= pxcmStatus.PXCM_STATUS_NO_ERROR) { PXCMCapture.Sample sample = senseManager.QuerySample(); PXCMImage.ImageData colorData; sample.color.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_RGB24, out colorData); Bitmap colorBitmap = colorData.ToBitmap(0, sample.color.info.width, sample.color.info.height); // Get face data if (faceData != null) { faceData.Update(); numFacesDetected = faceData.QueryNumberOfDetectedFaces(); if (numFacesDetected > 0) { // Get the first face detected (index 0) PXCMFaceData.Face face = faceData.QueryFaceByIndex(numFacesDetected - 1); // Retrieve face location data PXCMFaceData.DetectionData faceDetectionData = face.QueryDetection(); if (faceDetectionData != null) { PXCMRectI32 faceRectangle; faceDetectionData.QueryBoundingRect(out faceRectangle); faceRectangleHeight = faceRectangle.h; faceRectangleWidth = faceRectangle.w; faceRectangleX = faceRectangle.x; faceRectangleY = faceRectangle.y; } // Process face recognition data if (face != null) { // Retrieve the recognition data instance recognitionData = face.QueryRecognition(); // Set the user ID and process register/unregister logic if (recognitionData.IsRegistered()) { userId = Convert.ToString(recognitionData.QueryUserID()); //if (lastUserId == userId) if (flagUserId != userId) { Actions.LoadUser(Convert.ToInt16(userId), 255, "userinview", true); flagUserId = userId; } if (doUnregister) { recognitionData.UnregisterUser(); SaveDatabaseToFile(); doUnregister = false; } } else { if (doRegister) { recognitionData.RegisterUser(); // Capture a jpg image of registered user colorBitmap.Save("image.jpg", System.Drawing.Imaging.ImageFormat.Jpeg); doRegister = false; } else { userId = "Unrecognized"; } } } } else { userId = "No users in view"; flagUserId = "modifyed"; } } // Release resources colorBitmap.Dispose(); sample.color.ReleaseAccess(colorData); sample.color.Dispose(); senseManager.ReleaseFrame(); coords = faceRectangleX.ToString() + " " + faceRectangleY.ToString() + " " + faceRectangleWidth.ToString() + " " + faceRectangleHeight.ToString(); Server.sendMsg(255, "rect", coords, userId); } } catch { Console.WriteLine("ERRO ProcessingThread"); } }
private void Analyze(PXCMFaceData data) { numFacesDetected = data.QueryNumberOfDetectedFaces(); bool isRegisteringFace = false; for (int i = 0; i < numFacesDetected; i++) { PXCMFaceData.Face face = faceData.QueryFaceByIndex(i); if (face != null) { FaceObject fObj = new FaceObject(); CreateFaceObject(face, ref fObj); //Check landmarks alignment //GetFaceLandmarks(face, out fObj.noseTip, out fObj.leftEye, out fObj.rightEye, out fObj.landmarkDetected); GetFacePose(face, out fObj.pitch, out fObj.yaw, out fObj.roll); CheckFaceAlignment(ref fObj); if (fObj.ID < 0) //Face is NOT recognized { // TODO: Check if the face is registerable if (doRegister && fObj.isAligned) { recogData = face.QueryRecognition(); if (recogData != null) { if (isAutoRegister) { if (failedCount[i] >= RecogCompensation) { fObj.ID = recogData.RegisterUser(); isRegisteringFace = true; failedCount[i] = 0; Console.WriteLine("Registrering Face: " + fObj.ID); } else { failedCount[i]++; Console.WriteLine("Registration Failed (" + i + "): " + failedCount[i] + "/" + RecogCompensation); } } else //Button Triggered Registration { fObj.ID = recogData.RegisterUser(); } } fObj.registerTime = DateTime.Now; fObj.lastSeen = DateTime.Now; } } else // Face is RECOGNIZED { failedCount[i] = 0; } DrawFace(fObj); CheckFace(fObj); } } if (isRegisteringFace) { lastRegisterTime = DateTime.Now; } if (doRegister) { doRegister = false; } CheckMissingFace(); }
private void RegisterUser(PXCMFaceData faceOutput) { m_form.Register = false; if (faceOutput.QueryNumberOfDetectedFaces() <= 0) { return; } PXCMFaceData.Face qface = faceOutput.QueryFaceByIndex(0); if (qface == null) { throw new Exception("PXCMFaceData.Face null"); } PXCMFaceData.RecognitionData rdata = qface.QueryRecognition(); if (rdata == null) { throw new Exception(" PXCMFaceData.RecognitionData null"); } #region 註冊視窗 bool isRegistered = rdata.IsRegistered();//已註冊? int realSenseId = rdata.RegisterUser(); var collection = faceOutput.QueryRecognitionModule() .GetDatabase() .Where(x => x.ForeignKey == realSenseId); var dbItem = collection.LastOrDefault(); if (realSenseId == -1) { return; } if (isRegistered) { //已註冊的畫面卻又重新註冊,可能是辨識錯誤的更正 List <RecognitionFaceData> faceData = faceOutput.QueryRecognitionModule() .GetDatabase().ToList(); dbItem = faceData.Last(); dbItem.ForeignKey = dbItem.PrimaryKey + 100;//校正Id faceData[faceData.Count - 1] = dbItem; //整理資料庫的錯誤 FaceDatabaseFile.FormatData(faceData, NameMapping); qrecognition.SetDatabase(faceData.ToArray()); } var registerForm = new RegisterForm() { Picture = dbItem.Image }; if (registerForm.ShowDialog() == DialogResult.OK) { var mapping = NameMapping.Where(x => x.Id == registerForm.Id).FirstOrDefault(); if (mapping == null) { mapping = new NameMapping() { Id = registerForm.Id, Name = registerForm.Name }; NameMapping.Add(mapping); } if (registerForm.Name.Length > 0) { mapping.Name = registerForm.Name; } mapping.DataIds.Add(dbItem.ForeignKey); } #endregion }
private void ProcessingThread() { // Start AcquireFrame/ReleaseFrame loop while (senseManager.AcquireFrame(true) >= pxcmStatus.PXCM_STATUS_NO_ERROR) { // Acquire the color image data PXCMCapture.Sample sample = senseManager.QuerySample(); Bitmap colorBitmap; PXCMImage.ImageData colorData; int topScore = 0; FaceExpression expression = FaceExpression.None; sample.color.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_RGB24, out colorData); colorBitmap = colorData.ToBitmap(0, sample.color.info.width, sample.color.info.height); try { IBarcodeReader reader = new BarcodeReader(); // load a bitmap //var barcodeBitmap = (Bitmap)Bitmap.LoadFrom("C:\\sample-barcode-image.png"); // detect and decode the barcode inside the bitmap var result = reader.Decode(colorBitmap); // do something with the result if (result != null) { MessageBox.Show(result.BarcodeFormat.ToString()); MessageBox.Show(result.Text); } } catch (Exception ex) { } // Get face data if (faceData != null) { faceData.Update(); numFacesDetected = faceData.QueryNumberOfDetectedFaces(); if (numFacesDetected > 0) { // Get the first face detected (index 0) PXCMFaceData.Face face = faceData.QueryFaceByIndex(0); // Retrieve face location data PXCMFaceData.DetectionData faceDetectionData = face.QueryDetection(); if (faceDetectionData != null) { PXCMRectI32 faceRectangle; faceDetectionData.QueryBoundingRect(out faceRectangle); if ((faceRectangle.h > 90) || (faceRectangle.w > 90)) { faceRectangleHeight = faceRectangle.h * 3 / 2; faceRectangleWidth = faceRectangle.w * 3 / 2; } else if (((faceRectangle.h < 90) || (faceRectangle.w < 90)) && ((faceRectangle.h > 70) || (faceRectangle.w > 70))) { faceRectangleHeight = faceRectangle.h * 2; faceRectangleWidth = faceRectangle.w * 2; } else { faceRectangleHeight = faceRectangle.h * 5 / 2; faceRectangleWidth = faceRectangle.w * 5 / 2; } faceRectangleX = faceRectangle.x; faceRectangleY = faceRectangle.y; } // Retrieve pose estimation data PXCMFaceData.PoseData facePoseData = face.QueryPose(); if (facePoseData != null) { PXCMFaceData.PoseEulerAngles headAngles; facePoseData.QueryPoseAngles(out headAngles); headRoll = headAngles.roll; headPitch = headAngles.pitch; headYaw = headAngles.yaw; } // Retrieve expression data PXCMFaceData.ExpressionsData expressionData = face.QueryExpressions(); if (expressionData != null) { PXCMFaceData.ExpressionsData.FaceExpressionResult score; expressionData.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_KISS, out score); expressionScore[Convert.ToInt32(FaceExpression.Kiss)] = score.intensity; expressionData.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_MOUTH_OPEN, out score); expressionScore[Convert.ToInt32(FaceExpression.Open)] = score.intensity; expressionData.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_SMILE, out score); expressionScore[Convert.ToInt32(FaceExpression.Smile)] = score.intensity; expressionData.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_TONGUE_OUT, out score); expressionScore[Convert.ToInt32(FaceExpression.Tongue)] = score.intensity; // Determine the highest scoring expression for (int i = 1; i < TotalExpressions; i++) { if (expressionScore[i] > topScore) { expression = (FaceExpression)i; } } } // Process face recognition data if (face != null) { // Retrieve the recognition data instance recognitionData = face.QueryRecognition(); // Set the user ID and process register/unregister logic if (recognitionData.IsRegistered()) { userId = Convert.ToString(recognitionData.QueryUserID()); if (doUnregister) { recognitionData.UnregisterUser(); SaveDatabaseToFile(); doUnregister = false; if (_persistentDict.ContainsKey(userId) == true) { _persistentDict.Remove(userId); } } } else { if (doRegister) { int uId = recognitionData.RegisterUser(); SaveDatabaseToFile(); if (newUserName != "") { if (_persistentDict.ContainsKey(uId.ToString()) == false) { _persistentDict.Add(uId.ToString(), newUserName); _persistentDict.Flush(); newUserName = ""; } } // Capture a jpg image of registered user colorBitmap.Save("image.jpg", System.Drawing.Imaging.ImageFormat.Jpeg); doRegister = false; } else { userId = "New User"; } } } } else { userId = "No users in view"; } } //hand = senseManager.QueryHand(); //if (hand != null) //{ // // Retrieve the most recent processed data // handData = hand.CreateOutput(); // handData.Update(); // // Get number of tracked hands // nhands = handData.QueryNumberOfHands(); // if (nhands > 0) // { // // Retrieve hand identifier // handData.QueryHandId(PXCMHandData.AccessOrderType.ACCESS_ORDER_BY_TIME, 0, out handId); // // Retrieve hand data // handData.QueryHandDataById(handId, out ihand); // PXCMHandData.BodySideType bodySideType = ihand.QueryBodySide(); // if (bodySideType == PXCMHandData.BodySideType.BODY_SIDE_LEFT) // { // leftHand = true; // } // else if (bodySideType == PXCMHandData.BodySideType.BODY_SIDE_RIGHT) // { // leftHand = false; // } // // Retrieve all hand joint data // for (int i = 0; i < nhands; i++) // { // for (int j = 0; j < 0x20; j++) // { // PXCMHandData.JointData jointData; // ihand.QueryTrackedJoint((PXCMHandData.JointType)j, out jointData); // nodes[i][j] = jointData; // } // } // // Get world coordinates for tip of middle finger on the first hand in camera range // handTipX = nodes[0][Convert.ToInt32(PXCMHandData.JointType.JOINT_MIDDLE_TIP)].positionWorld.x; // handTipY = nodes[0][Convert.ToInt32(PXCMHandData.JointType.JOINT_MIDDLE_TIP)].positionWorld.y; // handTipZ = nodes[0][Convert.ToInt32(PXCMHandData.JointType.JOINT_MIDDLE_TIP)].positionWorld.z; // swipehandTipX = nodes[0][Convert.ToInt32(PXCMHandData.JointType.JOINT_MIDDLE_TIP)].positionImage.x; // swipehandTipY = nodes[0][Convert.ToInt32(PXCMHandData.JointType.JOINT_MIDDLE_TIP)].positionImage.y; // swipehandTipZ = nodes[0][Convert.ToInt32(PXCMHandData.JointType.JOINT_MIDDLE_TIP)].positionImage.z; // //Console.Out.WriteLine("Before x={0}", swipehandTipX); // //Console.Out.WriteLine("Before speed={0}", nodes[0][Convert.ToInt32(PXCMHandData.JointType.JOINT_MIDDLE_TIP)].speed.x); // // Retrieve gesture data // if (handData.IsGestureFired("spreadfingers", out gestureData)) { gesture = Gesture.FingerSpread; } // else if (handData.IsGestureFired("two_fingers_pinch_open", out gestureData)) { gesture = Gesture.Pinch; } // else if (handData.IsGestureFired("wave", out gestureData)) { gesture = Gesture.Wave; } // else if (handData.IsGestureFired("swipe_left", out gestureData)) { gesture = Gesture.SwipeLeft; } // else if (handData.IsGestureFired("swipe_right", out gestureData)) { gesture = Gesture.SwipeRight; } // else if (handData.IsGestureFired("fist", out gestureData)) { gesture = Gesture.Fist; } // else if (handData.IsGestureFired("thumb_up", out gestureData)) { gesture = Gesture.Thumb; } // } // else // { // gesture = Gesture.Undefined; // } // //UpdateUI(); // if (handData != null) handData.Dispose(); //} // Display the color stream and other UI elements //UpdateUI(colorBitmap, expression, gesture); UpdateUI(colorBitmap, expression); // Release resources colorBitmap.Dispose(); sample.color.ReleaseAccess(colorData); sample.color.Dispose(); // Release the frame senseManager.ReleaseFrame(); } }
private void ProcessingThread() { // Start AcquireFrame/ReleaseFrame loop while (senseManager.AcquireFrame(true) >= pxcmStatus.PXCM_STATUS_NO_ERROR) { // Acquire the color image data PXCMCapture.Sample sample = senseManager.QuerySample(); Bitmap colorBitmap; PXCMImage.ImageData colorData; sample.color.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_RGB24, out colorData); colorBitmap = colorData.ToBitmap(0, sample.color.info.width, sample.color.info.height); // Get face data if (faceData != null) { faceData.Update(); numFacesDetected = faceData.QueryNumberOfDetectedFaces(); if (numFacesDetected > 0) { // Get the first face detected (index 0) PXCMFaceData.Face face = faceData.QueryFaceByIndex(0); face.QueryExpressions(); PXCMFaceData.PoseData poseData = face.QueryPose(); // PXCMPoint3DF32 outHeadPosition = new PXCMPoint3DF32(); //F200 has added confidence into struct PXCMFaceData.HeadPosition outHeadPosition = new PXCMFaceData.HeadPosition(); //processing the head pose data to find the head center position poseData.QueryHeadPosition(out outHeadPosition); System.Windows.Media.Media3D.Point3DCollection points = new System.Windows.Media.Media3D.Point3DCollection(); points.Add(new System.Windows.Media.Media3D.Point3D(outHeadPosition.headCenter.x, outHeadPosition.headCenter.y, outHeadPosition.headCenter.z)); Console.WriteLine("head center position: " + points); // poseData.QueryHeadPosition(out outHeadPosition); PXCMFaceData.PoseEulerAngles outPoseEulerAngles = new PXCMFaceData.PoseEulerAngles(); poseData.QueryPoseAngles(out outPoseEulerAngles); roll = (int)outPoseEulerAngles.roll; pitch = (int)outPoseEulerAngles.pitch; yaw = (int)outPoseEulerAngles.yaw; // PXCMFaceData.LandmarkType.LANDMARK_EYE_LEFT_CENTER what can I do with this? if (pitch + 12 > 10) { headUp = true; } else { headUp = false; } if (pitch < -10) { headDown = true; } else { headDown = false; } if (roll > 5) { headTiltLeft = true; } else { headTiltLeft = false; } if (roll < -5) { headTiltRight = true; } else { headTiltRight = false; } if (yaw > 5) { headTurnLeft = true; } else { headTurnLeft = false; } if (yaw < -5) { headTurnRight = true; } else { headTurnRight = false; } //Console.WriteLine("Rotation: " + outPoseEulerAngles.roll + " " + outPoseEulerAngles.pitch + " " + outPoseEulerAngles.yaw); PXCMFaceData.ExpressionsData edata = face.QueryExpressions(); // retrieve the expression information PXCMFaceData.ExpressionsData.FaceExpressionResult smileScore; PXCMFaceData.ExpressionsData.FaceExpressionResult eyesUpScore; PXCMFaceData.ExpressionsData.FaceExpressionResult eyesDownScore; PXCMFaceData.ExpressionsData.FaceExpressionResult eyesTurnLeftScore; PXCMFaceData.ExpressionsData.FaceExpressionResult eyesTurnRightScore; PXCMFaceData.ExpressionsData.FaceExpressionResult headTiltedLeftScore; PXCMFaceData.ExpressionsData.FaceExpressionResult headTurnedLeftScore; // PXCMFaceData.ExpressionsData.FaceExpressionResult headUpScore; //PXCMFaceData.ExpressionsData.FaceExpressionResult headDownScore; edata.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_SMILE, out smileScore); edata.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_EYES_UP, out eyesUpScore); edata.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_EYES_DOWN, out eyesDownScore); edata.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_EYES_TURN_LEFT, out eyesTurnLeftScore); edata.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_EYES_TURN_RIGHT, out eyesTurnRightScore); edata.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_HEAD_TILT_LEFT, out headTiltedLeftScore); edata.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_HEAD_TURN_LEFT, out headTurnedLeftScore); // edata.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_HEAD_UP, out headUpScore); //edata.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_HEAD_DOWN, out headDownScore); smile = smileScore.intensity; eyesUp = eyesUpScore.intensity; if (eyesUp == 100) { eyeIsUp = true; } else { eyeIsUp = false; } eyesDown = eyesDownScore.intensity; if (eyesDown == 100) { eyeIsDown = true; } else { eyeIsDown = false; } eyesTurnLeft = eyesTurnLeftScore.intensity; eyesTurnRight = eyesTurnRightScore.intensity; // headTiltLeft = headTiltedLeftScore.intensity; // headTurnLeft= headTurnedLeftScore.intensity; // headUp = headUpScore.intensity; //headDown = headDownScore.intensity; PXCMCapture.Device device = senseManager.captureManager.device; device.SetIVCAMAccuracy(PXCMCapture.Device.IVCAMAccuracy.IVCAM_ACCURACY_FINEST); // eyeIsUP= CheckFaceExpression(edata, FaceExpression.EXPRESSION_EYES_UP, 15); if ((headTiltLeft | headTurnLeft) & headUp & (eyesTurnLeft == 100) & (!eyeIsDown)) { looksForward = true; } else if ((headTiltRight | headTurnRight) & headUp & (eyesTurnRight == 100) & (!eyeIsDown)) { looksForward = true; } /* else if (headTiltRight & (headDown|headUp) & (!headTurnRight) & (eyesTurnRight==100)) * looksForward = true; * else if (headTiltLeft & (headDown|headUp) & (!headTurnLeft) & (eyesTurnLeft == 100)) * looksForward = true; * */ else { looksForward = eyeIsUp; } // headTiltLeftThreshold = CheckFaceExpression(edata, FaceExpression.EXPRESSION_HEAD_TILT_LEFT, 15); //csv mona // var csv = new StringBuilder(); // outputs 10:00 PM // var newLine = string.Format("{0},{1},{2},{3},{4}{5}", DateTime.Now.ToString("dd-MM-yyyy-hh:mm:ss:fff"), roll, pitch, yaw, eyesUp, Environment.NewLine); // csv.Append(newLine); // string pathString = System.IO.Path.Combine(filePath, fileName); // File.AppendAllText(pathString, csv.ToString()); // Retrieve face location data PXCMFaceData.DetectionData faceDetectionData = face.QueryDetection(); if (faceDetectionData != null) { PXCMRectI32 faceRectangle; faceDetectionData.QueryBoundingRect(out faceRectangle); faceRectangleHeight = faceRectangle.h; faceRectangleWidth = faceRectangle.w; faceRectangleX = faceRectangle.x; faceRectangleY = faceRectangle.y; } // Process face recognition data if (face != null) { // Retrieve the recognition data instance recognitionData = face.QueryRecognition(); // Set the user ID and process register/unregister logic if (recognitionData.IsRegistered()) { userId = Convert.ToString(recognitionData.QueryUserID()); if (doUnregister) { recognitionData.UnregisterUser(); doUnregister = false; } } else { if (doRegister) { recognitionData.RegisterUser(); // Capture a jpg image of registered user colorBitmap.Save("image.jpg", System.Drawing.Imaging.ImageFormat.Jpeg); doRegister = false; } else { userId = "Unrecognized"; } } } } else { userId = "No users in view"; } } // Display the color stream and other UI elements UpdateUI(colorBitmap); // Release resources colorBitmap.Dispose(); sample.color.ReleaseAccess(colorData); sample.color.Dispose(); // Release the frame senseManager.ReleaseFrame(); } }
private void ProcessingThread() { // Start AcquireFrame/ReleaseFrame loop while (senseManager.AcquireFrame(true) >= pxcmStatus.PXCM_STATUS_NO_ERROR) { // Acquire the color image data PXCMCapture.Sample sample = senseManager.QuerySample(); Bitmap colorBitmap; PXCMImage.ImageData colorData; sample.color.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_RGB24, out colorData); colorBitmap = colorData.ToBitmap(0, sample.color.info.width, sample.color.info.height); // Get face data if (faceData != null) { faceData.Update(); numFacesDetected = faceData.QueryNumberOfDetectedFaces(); if (numFacesDetected > 0) { // Get the first face detected (index 0) PXCMFaceData.Face face = faceData.QueryFaceByIndex(0); // Retrieve face location data PXCMFaceData.DetectionData faceDetectionData = face.QueryDetection(); if (faceDetectionData != null) { PXCMRectI32 faceRectangle; faceDetectionData.QueryBoundingRect(out faceRectangle); faceRectangleHeight = faceRectangle.h; faceRectangleWidth = faceRectangle.w; faceRectangleX = faceRectangle.x; faceRectangleY = faceRectangle.y; } // Process face recognition data if (face != null) { // Retrieve the recognition data instance recognitionData = face.QueryRecognition(); // Set the user ID and process register/unregister logic if (recognitionData.IsRegistered()) { userId = Convert.ToString(recognitionData.QueryUserID()); if (doUnregister) { recognitionData.UnregisterUser(); doUnregister = false; } } else { if (doRegister) { recognitionData.RegisterUser(); // Capture a jpg image of registered user colorBitmap.Save("image.jpg", System.Drawing.Imaging.ImageFormat.Jpeg); doRegister = false; } else { userId = "Unrecognized"; } } } } else { userId = "No users in view"; } } // Display the color stream and other UI elements UpdateUI(colorBitmap); // Release resources colorBitmap.Dispose(); sample.color.ReleaseAccess(colorData); sample.color.Dispose(); // Release the frame senseManager.ReleaseFrame(); } }
//---------------------------------------------------------ProcessingThread----------------------------------------------------------------------------------------------- private void ProcessingThread() { /* Start AcquireFrame/ReleaseFrame loop * (ADS) DESCREVER O LOOP: verificar qual o retorno da função 'AcquireFrame(true)' e o retorno de 'pxcmStatus.PXCM_STATUS_NO_ERRO' * The frame processing is paused between the 'AcquireFrame' function and the next 'ReleaseFrame' function. * 'AcquireFrame(true)' Pausa o processamento de frame, lê o frame atual e salva em algum local que é acessado pela função 'QuerySample()'. * Mais abaixo o processamento de frame é liberado com a função 'ReleaseFrame()' */ try { while (senseManager.AcquireFrame(true) >= pxcmStatus.PXCM_STATUS_NO_ERROR) { /* Acquire the color image data * Consulta a sample (imagem salva com a chamada da função 'AcquireFrame(true)') * e Atribui para a variável sample(amostra) (Dados na forma Bruta) */ PXCMCapture.Sample sample = senseManager.QuerySample(); /* Cria uma variável que é uma estrutura apropriada para receber a um tipo de imagem. No caso a imagem bruta recebida * pela função 'AcquireFrame()' e convertida pela função 'color.AcquireAccess()' */ PXCMImage.ImageData colorData; /* Converte a imagem(dados brutos) retornada para Sample e salva na estrutura de * imagem ImageData por meio o ultimo parâmetro da função 'color.AcquireAccess(out colorData)' */ sample.color.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_RGB24, out colorData); /* Converte para Bitmap */ Bitmap colorBitmap = colorData.ToBitmap(0, sample.color.info.width, sample.color.info.height); // Get face data if (faceData != null) { faceData.Update(); numFacesDetected = faceData.QueryNumberOfDetectedFaces(); if (numFacesDetected > 0) { // Get the first face detected (index 0) PXCMFaceData.Face face = faceData.QueryFaceByIndex(0); // Retrieve face location data PXCMFaceData.DetectionData faceDetectionData = face.QueryDetection(); if (faceDetectionData != null) { PXCMRectI32 faceRectangle; faceDetectionData.QueryBoundingRect(out faceRectangle); faceRectangleHeight = faceRectangle.h; faceRectangleWidth = faceRectangle.w; faceRectangleX = faceRectangle.x; faceRectangleY = faceRectangle.y; //int faceRectangleX2 = (faceRectangleX - 510) * -1; } // Process face recognition data if (face != null) { // Retrieve the recognition data instance recognitionData = face.QueryRecognition(); // Set the user ID and process register/unregister logic if (recognitionData.IsRegistered()) { userId = Convert.ToString(recognitionData.QueryUserID()); if (flagUserId != userId) { Actions.LoadUser(Convert.ToInt16(userId)); flagUserId = userId; } if (doUnregister) { recognitionData.UnregisterUser(); doUnregister = false; } } else { if (doRegister) { recognitionData.RegisterUser(); // Capture a jpg image of registered user colorBitmap.Save("image.jpg", System.Drawing.Imaging.ImageFormat.Jpeg); doRegister = false; } else { userId = "Unrecognized"; } } } } else { userId = "No users in view"; } } // Display the color stream and other UI elements UpdateUI(colorBitmap); // Release resources colorBitmap.Dispose(); sample.color.ReleaseAccess(colorData); sample.color.Dispose(); /* Release the frame * 'ReleaseFrame' libera o bloqueio sobre o quadro atual. O processamento de frame continua. */ senseManager.ReleaseFrame(); coords = faceRectangleX.ToString() + " " + faceRectangleY.ToString() + " " + faceRectangleWidth.ToString() + " " + faceRectangleHeight.ToString(); Server.sendMsg(255, "rect", coords, userId); } } catch { Console.WriteLine("ERRORRRRRRRRRRRRRRRRRRRRRR"); } }