private void UnregisterUser(PXCMFaceData faceOutput) { m_form.Unregister = false; if (faceOutput.QueryNumberOfDetectedFaces() <= 0) { return; } PXCMFaceData.Face qface = faceOutput.QueryFaceByIndex(0); if (qface == null) { throw new Exception("PXCMFaceData.Face null"); } PXCMFaceData.RecognitionData rdata = qface.QueryRecognition(); if (rdata == null) { throw new Exception(" PXCMFaceData.RecognitionData null"); } if (!rdata.IsRegistered()) { return; } rdata.UnregisterUser(); }
public void DrawRecognition(PXCMFaceData.Face face) { Debug.Assert(face != null); if (m_bitmap == null || !Recognition.Checked) { return; } PXCMFaceData.RecognitionData qrecognition = face.QueryRecognition(); if (qrecognition == null) { throw new Exception(" PXCMFaceData.RecognitionData null"); } int userId = qrecognition.QueryUserID(); string recognitionText = userId == -1 ? "Not Registered" : String.Format("Registered ID: {0}", userId); lock (m_bitmapLock) { using (Graphics graphics = Graphics.FromImage(m_bitmap)) using (var brush = new SolidBrush(m_faceTextOrganizer.Colour)) using (var font = new Font(FontFamily.GenericMonospace, m_faceTextOrganizer.FontSize, FontStyle.Bold)) { graphics.DrawString(recognitionText, font, brush, m_faceTextOrganizer.RecognitionLocation); } } }
//function to detect the registered face public bool isRegisteredDetected(PXCMFaceData.Face face) { bool isRegister = false; // Condition for fixed head PXCMFaceData.RecognitionData qrecognition = face.QueryRecognition(); if (qrecognition == null) { throw new Exception(" PXCMFaceData.RecognitionData null"); } var userId = qrecognition.QueryUserID(); if (userId > 99) { isRegister = true; faceregistered = true; } else { isRegister = false; faceregistered = false; } return(isRegister); }
private void RecognizeFace(PXCMFaceData faceData, PXCMFaceData.Face face) { var rdata = face.QueryRecognition(); var userId = rdata.QueryUserID(); switch (_recognitionState) { case RecognitionState.Idle: break; case RecognitionState.Requested: rdata.RegisterUser(); _recognitionState = RecognitionState.Working; break; case RecognitionState.Working: if (userId > 0) { _recognitionState = RecognitionState.Done; } break; case RecognitionState.Done: SaveDatabase(faceData); _recognitionState = RecognitionState.Idle; break; default: throw new ArgumentOutOfRangeException(); } Face.UserId = userId; }
public void Loop(LoopObjects loopObjects) { PXCMFaceData.Face face = loopObjects.Get <PXCMFaceData.Face>(); PXCMFaceData faceData = loopObjects.Get <PXCMFaceData>(); var rdata = face.QueryRecognition(); var userId = rdata.QueryUserID(); switch (_recognitionState) { case RecognitionState.Idle: break; case RecognitionState.Requested: rdata.RegisterUser(); _recognitionState = RecognitionState.Working; break; case RecognitionState.Working: if (userId > 0) { _recognitionState = RecognitionState.Done; } break; case RecognitionState.Done: SaveDatabase(faceData); _recognitionState = RecognitionState.Idle; break; default: throw new ArgumentOutOfRangeException(); } _camera.Face.UserId = userId; }
private void CreateFaceObject(PXCMFaceData.Face face, ref FaceObject fObj) { PXCMFaceData.DetectionData detection = face.QueryDetection(); if (detection != null) { PXCMRectI32 faceRect = new PXCMRectI32(); detection.QueryBoundingRect(out faceRect); PXCMFaceData.RecognitionData recogData = face.QueryRecognition(); int userID = -1; if (recogData != null) { userID = recogData.QueryUserID(); } //Registered Face if (userID > 0) { //Get Face by USER-ID fObj = this.GetRecognizedFaceByID(userID); //Due to pre-loaded DB, FaceObject can be null if (fObj == null) { fObj = new FaceObject(); } } float faceDistance; detection.QueryFaceAverageDepth(out faceDistance); faceDistance /= 1000; Rectangle rect = new Rectangle(faceRect.x, faceRect.y, faceRect.w, faceRect.h); //Point faceLoc = faceCamConfig.GetFaceLoc(rect); Point faceLoc = faceCamConfig.Location; fObj.ID = userID; fObj.boundingBox = rect; fObj.cellLocation = faceLoc; fObj.registerTime = DateTime.Now; fObj.lastSeen = DateTime.Now; fObj.distance = faceDistance; } }
public void DrawRecognition(PXCMFaceData.Face face) { Debug.Assert(face != null); if (m_bitmap == null || !Recognition.Checked) { return; } PXCMFaceData.RecognitionData qrecognition = face.QueryRecognition(); if (qrecognition == null) { throw new Exception(" PXCMFaceData.RecognitionData null"); } var userId = qrecognition.QueryUserID(); #region 顯示辨識結果 string recognitionText = null; if (userId == -1) { recognitionText = "Not Registered"; } else { var mapping = FaceTracking.NameMapping.Where(x => x.DataIds.Contains(userId)).FirstOrDefault(); if (mapping != null) { recognitionText = $"{mapping.Id} - {mapping.Name}"; } } #endregion lock (m_bitmapLock) { using (Graphics graphics = Graphics.FromImage(m_bitmap)) using (var brush = new SolidBrush(m_faceTextOrganizer.Colour)) using (var font = new Font(FontFamily.GenericMonospace, m_faceTextOrganizer.FontSize, FontStyle.Bold)) { graphics.DrawString(recognitionText, font, brush, m_faceTextOrganizer.RecognitionLocation); } } }
private void ProcessingThread() { try { while (senseManager.AcquireFrame(true) >= pxcmStatus.PXCM_STATUS_NO_ERROR) { PXCMCapture.Sample sample = senseManager.QuerySample(); PXCMImage.ImageData colorData; sample.color.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_RGB24, out colorData); Bitmap colorBitmap = colorData.ToBitmap(0, sample.color.info.width, sample.color.info.height); // Get face data if (faceData != null) { faceData.Update(); numFacesDetected = faceData.QueryNumberOfDetectedFaces(); if (numFacesDetected > 0) { // Get the first face detected (index 0) PXCMFaceData.Face face = faceData.QueryFaceByIndex(numFacesDetected - 1); // Retrieve face location data PXCMFaceData.DetectionData faceDetectionData = face.QueryDetection(); if (faceDetectionData != null) { PXCMRectI32 faceRectangle; faceDetectionData.QueryBoundingRect(out faceRectangle); faceRectangleHeight = faceRectangle.h; faceRectangleWidth = faceRectangle.w; faceRectangleX = faceRectangle.x; faceRectangleY = faceRectangle.y; } // Process face recognition data if (face != null) { // Retrieve the recognition data instance recognitionData = face.QueryRecognition(); // Set the user ID and process register/unregister logic if (recognitionData.IsRegistered()) { userId = Convert.ToString(recognitionData.QueryUserID()); //if (lastUserId == userId) if (flagUserId != userId) { Actions.LoadUser(Convert.ToInt16(userId), 255, "userinview", true); flagUserId = userId; } if (doUnregister) { recognitionData.UnregisterUser(); SaveDatabaseToFile(); doUnregister = false; } } else { if (doRegister) { recognitionData.RegisterUser(); // Capture a jpg image of registered user colorBitmap.Save("image.jpg", System.Drawing.Imaging.ImageFormat.Jpeg); doRegister = false; } else { userId = "Unrecognized"; } } } } else { userId = "No users in view"; flagUserId = "modifyed"; } } // Release resources colorBitmap.Dispose(); sample.color.ReleaseAccess(colorData); sample.color.Dispose(); senseManager.ReleaseFrame(); coords = faceRectangleX.ToString() + " " + faceRectangleY.ToString() + " " + faceRectangleWidth.ToString() + " " + faceRectangleHeight.ToString(); Server.sendMsg(255, "rect", coords, userId); } } catch { Console.WriteLine("ERRO ProcessingThread"); } }
private void Analyze(PXCMFaceData data) { numFacesDetected = data.QueryNumberOfDetectedFaces(); bool isRegisteringFace = false; for (int i = 0; i < numFacesDetected; i++) { PXCMFaceData.Face face = faceData.QueryFaceByIndex(i); if (face != null) { FaceObject fObj = new FaceObject(); CreateFaceObject(face, ref fObj); //Check landmarks alignment //GetFaceLandmarks(face, out fObj.noseTip, out fObj.leftEye, out fObj.rightEye, out fObj.landmarkDetected); GetFacePose(face, out fObj.pitch, out fObj.yaw, out fObj.roll); CheckFaceAlignment(ref fObj); if (fObj.ID < 0) //Face is NOT recognized { // TODO: Check if the face is registerable if (doRegister && fObj.isAligned) { recogData = face.QueryRecognition(); if (recogData != null) { if (isAutoRegister) { if (failedCount[i] >= RecogCompensation) { fObj.ID = recogData.RegisterUser(); isRegisteringFace = true; failedCount[i] = 0; Console.WriteLine("Registrering Face: " + fObj.ID); } else { failedCount[i]++; Console.WriteLine("Registration Failed (" + i + "): " + failedCount[i] + "/" + RecogCompensation); } } else //Button Triggered Registration { fObj.ID = recogData.RegisterUser(); } } fObj.registerTime = DateTime.Now; fObj.lastSeen = DateTime.Now; } } else // Face is RECOGNIZED { failedCount[i] = 0; } DrawFace(fObj); CheckFace(fObj); } } if (isRegisteringFace) { lastRegisterTime = DateTime.Now; } if (doRegister) { doRegister = false; } CheckMissingFace(); }
private void RegisterUser(PXCMFaceData faceOutput) { m_form.Register = false; if (faceOutput.QueryNumberOfDetectedFaces() <= 0) { return; } PXCMFaceData.Face qface = faceOutput.QueryFaceByIndex(0); if (qface == null) { throw new Exception("PXCMFaceData.Face null"); } PXCMFaceData.RecognitionData rdata = qface.QueryRecognition(); if (rdata == null) { throw new Exception(" PXCMFaceData.RecognitionData null"); } #region 註冊視窗 bool isRegistered = rdata.IsRegistered();//已註冊? int realSenseId = rdata.RegisterUser(); var collection = faceOutput.QueryRecognitionModule() .GetDatabase() .Where(x => x.ForeignKey == realSenseId); var dbItem = collection.LastOrDefault(); if (realSenseId == -1) { return; } if (isRegistered) { //已註冊的畫面卻又重新註冊,可能是辨識錯誤的更正 List <RecognitionFaceData> faceData = faceOutput.QueryRecognitionModule() .GetDatabase().ToList(); dbItem = faceData.Last(); dbItem.ForeignKey = dbItem.PrimaryKey + 100;//校正Id faceData[faceData.Count - 1] = dbItem; //整理資料庫的錯誤 FaceDatabaseFile.FormatData(faceData, NameMapping); qrecognition.SetDatabase(faceData.ToArray()); } var registerForm = new RegisterForm() { Picture = dbItem.Image }; if (registerForm.ShowDialog() == DialogResult.OK) { var mapping = NameMapping.Where(x => x.Id == registerForm.Id).FirstOrDefault(); if (mapping == null) { mapping = new NameMapping() { Id = registerForm.Id, Name = registerForm.Name }; NameMapping.Add(mapping); } if (registerForm.Name.Length > 0) { mapping.Name = registerForm.Name; } mapping.DataIds.Add(dbItem.ForeignKey); } #endregion }
private void ProcessingThread() { // Start AcquireFrame/ReleaseFrame loop while (senseManager.AcquireFrame(true) >= pxcmStatus.PXCM_STATUS_NO_ERROR) { // Acquire the color image data PXCMCapture.Sample sample = senseManager.QuerySample(); Bitmap colorBitmap; PXCMImage.ImageData colorData; int topScore = 0; FaceExpression expression = FaceExpression.None; sample.color.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_RGB24, out colorData); colorBitmap = colorData.ToBitmap(0, sample.color.info.width, sample.color.info.height); try { IBarcodeReader reader = new BarcodeReader(); // load a bitmap //var barcodeBitmap = (Bitmap)Bitmap.LoadFrom("C:\\sample-barcode-image.png"); // detect and decode the barcode inside the bitmap var result = reader.Decode(colorBitmap); // do something with the result if (result != null) { MessageBox.Show(result.BarcodeFormat.ToString()); MessageBox.Show(result.Text); } } catch (Exception ex) { } // Get face data if (faceData != null) { faceData.Update(); numFacesDetected = faceData.QueryNumberOfDetectedFaces(); if (numFacesDetected > 0) { // Get the first face detected (index 0) PXCMFaceData.Face face = faceData.QueryFaceByIndex(0); // Retrieve face location data PXCMFaceData.DetectionData faceDetectionData = face.QueryDetection(); if (faceDetectionData != null) { PXCMRectI32 faceRectangle; faceDetectionData.QueryBoundingRect(out faceRectangle); if ((faceRectangle.h > 90) || (faceRectangle.w > 90)) { faceRectangleHeight = faceRectangle.h * 3 / 2; faceRectangleWidth = faceRectangle.w * 3 / 2; } else if (((faceRectangle.h < 90) || (faceRectangle.w < 90)) && ((faceRectangle.h > 70) || (faceRectangle.w > 70))) { faceRectangleHeight = faceRectangle.h * 2; faceRectangleWidth = faceRectangle.w * 2; } else { faceRectangleHeight = faceRectangle.h * 5 / 2; faceRectangleWidth = faceRectangle.w * 5 / 2; } faceRectangleX = faceRectangle.x; faceRectangleY = faceRectangle.y; } // Retrieve pose estimation data PXCMFaceData.PoseData facePoseData = face.QueryPose(); if (facePoseData != null) { PXCMFaceData.PoseEulerAngles headAngles; facePoseData.QueryPoseAngles(out headAngles); headRoll = headAngles.roll; headPitch = headAngles.pitch; headYaw = headAngles.yaw; } // Retrieve expression data PXCMFaceData.ExpressionsData expressionData = face.QueryExpressions(); if (expressionData != null) { PXCMFaceData.ExpressionsData.FaceExpressionResult score; expressionData.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_KISS, out score); expressionScore[Convert.ToInt32(FaceExpression.Kiss)] = score.intensity; expressionData.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_MOUTH_OPEN, out score); expressionScore[Convert.ToInt32(FaceExpression.Open)] = score.intensity; expressionData.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_SMILE, out score); expressionScore[Convert.ToInt32(FaceExpression.Smile)] = score.intensity; expressionData.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_TONGUE_OUT, out score); expressionScore[Convert.ToInt32(FaceExpression.Tongue)] = score.intensity; // Determine the highest scoring expression for (int i = 1; i < TotalExpressions; i++) { if (expressionScore[i] > topScore) { expression = (FaceExpression)i; } } } // Process face recognition data if (face != null) { // Retrieve the recognition data instance recognitionData = face.QueryRecognition(); // Set the user ID and process register/unregister logic if (recognitionData.IsRegistered()) { userId = Convert.ToString(recognitionData.QueryUserID()); if (doUnregister) { recognitionData.UnregisterUser(); SaveDatabaseToFile(); doUnregister = false; if (_persistentDict.ContainsKey(userId) == true) { _persistentDict.Remove(userId); } } } else { if (doRegister) { int uId = recognitionData.RegisterUser(); SaveDatabaseToFile(); if (newUserName != "") { if (_persistentDict.ContainsKey(uId.ToString()) == false) { _persistentDict.Add(uId.ToString(), newUserName); _persistentDict.Flush(); newUserName = ""; } } // Capture a jpg image of registered user colorBitmap.Save("image.jpg", System.Drawing.Imaging.ImageFormat.Jpeg); doRegister = false; } else { userId = "New User"; } } } } else { userId = "No users in view"; } } //hand = senseManager.QueryHand(); //if (hand != null) //{ // // Retrieve the most recent processed data // handData = hand.CreateOutput(); // handData.Update(); // // Get number of tracked hands // nhands = handData.QueryNumberOfHands(); // if (nhands > 0) // { // // Retrieve hand identifier // handData.QueryHandId(PXCMHandData.AccessOrderType.ACCESS_ORDER_BY_TIME, 0, out handId); // // Retrieve hand data // handData.QueryHandDataById(handId, out ihand); // PXCMHandData.BodySideType bodySideType = ihand.QueryBodySide(); // if (bodySideType == PXCMHandData.BodySideType.BODY_SIDE_LEFT) // { // leftHand = true; // } // else if (bodySideType == PXCMHandData.BodySideType.BODY_SIDE_RIGHT) // { // leftHand = false; // } // // Retrieve all hand joint data // for (int i = 0; i < nhands; i++) // { // for (int j = 0; j < 0x20; j++) // { // PXCMHandData.JointData jointData; // ihand.QueryTrackedJoint((PXCMHandData.JointType)j, out jointData); // nodes[i][j] = jointData; // } // } // // Get world coordinates for tip of middle finger on the first hand in camera range // handTipX = nodes[0][Convert.ToInt32(PXCMHandData.JointType.JOINT_MIDDLE_TIP)].positionWorld.x; // handTipY = nodes[0][Convert.ToInt32(PXCMHandData.JointType.JOINT_MIDDLE_TIP)].positionWorld.y; // handTipZ = nodes[0][Convert.ToInt32(PXCMHandData.JointType.JOINT_MIDDLE_TIP)].positionWorld.z; // swipehandTipX = nodes[0][Convert.ToInt32(PXCMHandData.JointType.JOINT_MIDDLE_TIP)].positionImage.x; // swipehandTipY = nodes[0][Convert.ToInt32(PXCMHandData.JointType.JOINT_MIDDLE_TIP)].positionImage.y; // swipehandTipZ = nodes[0][Convert.ToInt32(PXCMHandData.JointType.JOINT_MIDDLE_TIP)].positionImage.z; // //Console.Out.WriteLine("Before x={0}", swipehandTipX); // //Console.Out.WriteLine("Before speed={0}", nodes[0][Convert.ToInt32(PXCMHandData.JointType.JOINT_MIDDLE_TIP)].speed.x); // // Retrieve gesture data // if (handData.IsGestureFired("spreadfingers", out gestureData)) { gesture = Gesture.FingerSpread; } // else if (handData.IsGestureFired("two_fingers_pinch_open", out gestureData)) { gesture = Gesture.Pinch; } // else if (handData.IsGestureFired("wave", out gestureData)) { gesture = Gesture.Wave; } // else if (handData.IsGestureFired("swipe_left", out gestureData)) { gesture = Gesture.SwipeLeft; } // else if (handData.IsGestureFired("swipe_right", out gestureData)) { gesture = Gesture.SwipeRight; } // else if (handData.IsGestureFired("fist", out gestureData)) { gesture = Gesture.Fist; } // else if (handData.IsGestureFired("thumb_up", out gestureData)) { gesture = Gesture.Thumb; } // } // else // { // gesture = Gesture.Undefined; // } // //UpdateUI(); // if (handData != null) handData.Dispose(); //} // Display the color stream and other UI elements //UpdateUI(colorBitmap, expression, gesture); UpdateUI(colorBitmap, expression); // Release resources colorBitmap.Dispose(); sample.color.ReleaseAccess(colorData); sample.color.Dispose(); // Release the frame senseManager.ReleaseFrame(); } }
/// <summary> /// 取得臉部圖片並且匯出 /// </summary> /// <param name="moduleOutput"></param> public void DrawInformation(PXCMFaceData moduleOutput) { for (var i = 0; i < moduleOutput.QueryNumberOfDetectedFaces(); i++) { PXCMFaceData.Face face = moduleOutput.QueryFaceByIndex(i); if (face == null) { continue; } #region 臉部追蹤資訊取得 //取得臉部定位資訊 PXCMFaceData.DetectionData detection = face.QueryDetection(); if (detection == null) { continue; } //取得臉部範圍 PXCMRectI32 range; detection.QueryBoundingRect(out range); #endregion #region 僅顯示FaceId為0者獨立照片 if (i == 0) { lock (PicLock) { FaceImage = new Bitmap(128, 128); using (Graphics g = Graphics.FromImage(FaceImage)) { g.DrawImage(Image, new Rectangle(0, 0, 128, 128), new Rectangle(range.x, range.y, range.w, range.h) , GraphicsUnit.Pixel); } } } //繪製使用者方框 lock (PicLock) using (var g = Graphics.FromImage(Image)) { Pen pan = i == 0 ? (Pen)Pens.Red.Clone() : (Pen)Pens.Yellow.Clone(); Brush brush = i == 0 ? Brushes.Red : Brushes.Yellow; pan.Width = 4; g.DrawRectangle( pan, new Rectangle( range.x, range.y, range.w, range.h )); var userId = face.QueryRecognition().QueryUserID(); var text = "未註冊使用者"; try { if (userId != -1) { text = UserTable[userId]; } } catch { } var size = g.MeasureString(//取得大小 "#" + i + " " + text, new Font("Arial", 14)); g.FillRectangle( brush, new Rectangle( range.x, range.y, (int)size.Width, 20 )); g.DrawString( "#" + i + " " + text, new Font("Arial", 14), Brushes.Black, range.x, range.y); } #endregion } }
private void ProcessingThread() { // Start AcquireFrame/ReleaseFrame loop while (senseManager.AcquireFrame(true) >= pxcmStatus.PXCM_STATUS_NO_ERROR) { // Acquire the color image data PXCMCapture.Sample sample = senseManager.QuerySample(); Bitmap colorBitmap; PXCMImage.ImageData colorData; sample.color.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_RGB24, out colorData); colorBitmap = colorData.ToBitmap(0, sample.color.info.width, sample.color.info.height); // Get face data if (faceData != null) { faceData.Update(); numFacesDetected = faceData.QueryNumberOfDetectedFaces(); if (numFacesDetected > 0) { // Get the first face detected (index 0) PXCMFaceData.Face face = faceData.QueryFaceByIndex(0); face.QueryExpressions(); PXCMFaceData.PoseData poseData = face.QueryPose(); // PXCMPoint3DF32 outHeadPosition = new PXCMPoint3DF32(); //F200 has added confidence into struct PXCMFaceData.HeadPosition outHeadPosition = new PXCMFaceData.HeadPosition(); //processing the head pose data to find the head center position poseData.QueryHeadPosition(out outHeadPosition); System.Windows.Media.Media3D.Point3DCollection points = new System.Windows.Media.Media3D.Point3DCollection(); points.Add(new System.Windows.Media.Media3D.Point3D(outHeadPosition.headCenter.x, outHeadPosition.headCenter.y, outHeadPosition.headCenter.z)); Console.WriteLine("head center position: " + points); // poseData.QueryHeadPosition(out outHeadPosition); PXCMFaceData.PoseEulerAngles outPoseEulerAngles = new PXCMFaceData.PoseEulerAngles(); poseData.QueryPoseAngles(out outPoseEulerAngles); roll = (int)outPoseEulerAngles.roll; pitch = (int)outPoseEulerAngles.pitch; yaw = (int)outPoseEulerAngles.yaw; // PXCMFaceData.LandmarkType.LANDMARK_EYE_LEFT_CENTER what can I do with this? if (pitch + 12 > 10) { headUp = true; } else { headUp = false; } if (pitch < -10) { headDown = true; } else { headDown = false; } if (roll > 5) { headTiltLeft = true; } else { headTiltLeft = false; } if (roll < -5) { headTiltRight = true; } else { headTiltRight = false; } if (yaw > 5) { headTurnLeft = true; } else { headTurnLeft = false; } if (yaw < -5) { headTurnRight = true; } else { headTurnRight = false; } //Console.WriteLine("Rotation: " + outPoseEulerAngles.roll + " " + outPoseEulerAngles.pitch + " " + outPoseEulerAngles.yaw); PXCMFaceData.ExpressionsData edata = face.QueryExpressions(); // retrieve the expression information PXCMFaceData.ExpressionsData.FaceExpressionResult smileScore; PXCMFaceData.ExpressionsData.FaceExpressionResult eyesUpScore; PXCMFaceData.ExpressionsData.FaceExpressionResult eyesDownScore; PXCMFaceData.ExpressionsData.FaceExpressionResult eyesTurnLeftScore; PXCMFaceData.ExpressionsData.FaceExpressionResult eyesTurnRightScore; PXCMFaceData.ExpressionsData.FaceExpressionResult headTiltedLeftScore; PXCMFaceData.ExpressionsData.FaceExpressionResult headTurnedLeftScore; // PXCMFaceData.ExpressionsData.FaceExpressionResult headUpScore; //PXCMFaceData.ExpressionsData.FaceExpressionResult headDownScore; edata.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_SMILE, out smileScore); edata.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_EYES_UP, out eyesUpScore); edata.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_EYES_DOWN, out eyesDownScore); edata.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_EYES_TURN_LEFT, out eyesTurnLeftScore); edata.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_EYES_TURN_RIGHT, out eyesTurnRightScore); edata.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_HEAD_TILT_LEFT, out headTiltedLeftScore); edata.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_HEAD_TURN_LEFT, out headTurnedLeftScore); // edata.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_HEAD_UP, out headUpScore); //edata.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_HEAD_DOWN, out headDownScore); smile = smileScore.intensity; eyesUp = eyesUpScore.intensity; if (eyesUp == 100) { eyeIsUp = true; } else { eyeIsUp = false; } eyesDown = eyesDownScore.intensity; if (eyesDown == 100) { eyeIsDown = true; } else { eyeIsDown = false; } eyesTurnLeft = eyesTurnLeftScore.intensity; eyesTurnRight = eyesTurnRightScore.intensity; // headTiltLeft = headTiltedLeftScore.intensity; // headTurnLeft= headTurnedLeftScore.intensity; // headUp = headUpScore.intensity; //headDown = headDownScore.intensity; PXCMCapture.Device device = senseManager.captureManager.device; device.SetIVCAMAccuracy(PXCMCapture.Device.IVCAMAccuracy.IVCAM_ACCURACY_FINEST); // eyeIsUP= CheckFaceExpression(edata, FaceExpression.EXPRESSION_EYES_UP, 15); if ((headTiltLeft | headTurnLeft) & headUp & (eyesTurnLeft == 100) & (!eyeIsDown)) { looksForward = true; } else if ((headTiltRight | headTurnRight) & headUp & (eyesTurnRight == 100) & (!eyeIsDown)) { looksForward = true; } /* else if (headTiltRight & (headDown|headUp) & (!headTurnRight) & (eyesTurnRight==100)) * looksForward = true; * else if (headTiltLeft & (headDown|headUp) & (!headTurnLeft) & (eyesTurnLeft == 100)) * looksForward = true; * */ else { looksForward = eyeIsUp; } // headTiltLeftThreshold = CheckFaceExpression(edata, FaceExpression.EXPRESSION_HEAD_TILT_LEFT, 15); //csv mona // var csv = new StringBuilder(); // outputs 10:00 PM // var newLine = string.Format("{0},{1},{2},{3},{4}{5}", DateTime.Now.ToString("dd-MM-yyyy-hh:mm:ss:fff"), roll, pitch, yaw, eyesUp, Environment.NewLine); // csv.Append(newLine); // string pathString = System.IO.Path.Combine(filePath, fileName); // File.AppendAllText(pathString, csv.ToString()); // Retrieve face location data PXCMFaceData.DetectionData faceDetectionData = face.QueryDetection(); if (faceDetectionData != null) { PXCMRectI32 faceRectangle; faceDetectionData.QueryBoundingRect(out faceRectangle); faceRectangleHeight = faceRectangle.h; faceRectangleWidth = faceRectangle.w; faceRectangleX = faceRectangle.x; faceRectangleY = faceRectangle.y; } // Process face recognition data if (face != null) { // Retrieve the recognition data instance recognitionData = face.QueryRecognition(); // Set the user ID and process register/unregister logic if (recognitionData.IsRegistered()) { userId = Convert.ToString(recognitionData.QueryUserID()); if (doUnregister) { recognitionData.UnregisterUser(); doUnregister = false; } } else { if (doRegister) { recognitionData.RegisterUser(); // Capture a jpg image of registered user colorBitmap.Save("image.jpg", System.Drawing.Imaging.ImageFormat.Jpeg); doRegister = false; } else { userId = "Unrecognized"; } } } } else { userId = "No users in view"; } } // Display the color stream and other UI elements UpdateUI(colorBitmap); // Release resources colorBitmap.Dispose(); sample.color.ReleaseAccess(colorData); sample.color.Dispose(); // Release the frame senseManager.ReleaseFrame(); } }
private void ProcessingThread() { // Start AcquireFrame/ReleaseFrame loop while (senseManager.AcquireFrame(true) >= pxcmStatus.PXCM_STATUS_NO_ERROR) { // Acquire the color image data PXCMCapture.Sample sample = senseManager.QuerySample(); Bitmap colorBitmap; PXCMImage.ImageData colorData; sample.color.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_RGB24, out colorData); colorBitmap = colorData.ToBitmap(0, sample.color.info.width, sample.color.info.height); // Get face data if (faceData != null) { faceData.Update(); numFacesDetected = faceData.QueryNumberOfDetectedFaces(); if (numFacesDetected > 0) { // Get the first face detected (index 0) PXCMFaceData.Face face = faceData.QueryFaceByIndex(0); // Retrieve face location data PXCMFaceData.DetectionData faceDetectionData = face.QueryDetection(); if (faceDetectionData != null) { PXCMRectI32 faceRectangle; faceDetectionData.QueryBoundingRect(out faceRectangle); faceRectangleHeight = faceRectangle.h; faceRectangleWidth = faceRectangle.w; faceRectangleX = faceRectangle.x; faceRectangleY = faceRectangle.y; } // Process face recognition data if (face != null) { // Retrieve the recognition data instance recognitionData = face.QueryRecognition(); // Set the user ID and process register/unregister logic if (recognitionData.IsRegistered()) { userId = Convert.ToString(recognitionData.QueryUserID()); if (doUnregister) { recognitionData.UnregisterUser(); doUnregister = false; } } else { if (doRegister) { recognitionData.RegisterUser(); // Capture a jpg image of registered user colorBitmap.Save("image.jpg", System.Drawing.Imaging.ImageFormat.Jpeg); doRegister = false; } else { userId = "Unrecognized"; } } } } else { userId = "No users in view"; } } // Display the color stream and other UI elements UpdateUI(colorBitmap); // Release resources colorBitmap.Dispose(); sample.color.ReleaseAccess(colorData); sample.color.Dispose(); // Release the frame senseManager.ReleaseFrame(); } }
//顔のフレームの更新処理 private void updateFaceFrame() { // フレームデータを取得する PXCMCapture.Sample sample = senceManager.QuerySample(); UpdateColorImage(sample.color); //SenceManagerモジュールの顔のデータを更新する faceData.Update(); //検出した顔の数を取得する int numFaces = faceData.QueryNumberOfDetectedFaces(); if (senceManager != null) { //それぞれの顔ごとに情報取得および描画処理を行う for (int i = 0; i < numFaces; ++i) { //顔の情報を取得する PXCMFaceData.Face face = faceData.QueryFaceByIndex(i); // 顔の位置を取得:Depthで取得する var detection = face.QueryDetection(); int face_x = 0; int face_y = 0; if (detection != null) { PXCMRectI32 faceRect; detection.QueryBoundingRect(out faceRect); //追加:顔の位置を格納するための変数を用意する face_x = faceRect.x; face_y = faceRect.y; //顔の位置に合わせて長方形を変更 TranslateTransform transform = new TranslateTransform(faceRect.x, faceRect.y); rect[i].Width = faceRect.w; rect[i].Height = faceRect.h; rect[i].Stroke = Brushes.Blue; rect[i].StrokeThickness = 3; rect[i].RenderTransform = transform; } //追加:顔識別の結果を格納するための変数を用意する rdata = face.QueryRecognition(); if (rdata.IsRegistered()) { //追加:識別したIDかどうかを確認する int uid = rdata.QueryUserID(); if (uid != -1) { { faceID_tb[i].Text = "Recognition:" + uid; faceID_tb[i].RenderTransform = new TranslateTransform(face_x, face_y - 30); } } else { { faceID_tb[i].Text = "Recognition:" + "NO"; faceID_tb[i].RenderTransform = new TranslateTransform(face_x, face_y - 30); } } } } } }
//---------------------------------------------------------ProcessingThread----------------------------------------------------------------------------------------------- private void ProcessingThread() { /* Start AcquireFrame/ReleaseFrame loop * (ADS) DESCREVER O LOOP: verificar qual o retorno da função 'AcquireFrame(true)' e o retorno de 'pxcmStatus.PXCM_STATUS_NO_ERRO' * The frame processing is paused between the 'AcquireFrame' function and the next 'ReleaseFrame' function. * 'AcquireFrame(true)' Pausa o processamento de frame, lê o frame atual e salva em algum local que é acessado pela função 'QuerySample()'. * Mais abaixo o processamento de frame é liberado com a função 'ReleaseFrame()' */ try { while (senseManager.AcquireFrame(true) >= pxcmStatus.PXCM_STATUS_NO_ERROR) { /* Acquire the color image data * Consulta a sample (imagem salva com a chamada da função 'AcquireFrame(true)') * e Atribui para a variável sample(amostra) (Dados na forma Bruta) */ PXCMCapture.Sample sample = senseManager.QuerySample(); /* Cria uma variável que é uma estrutura apropriada para receber a um tipo de imagem. No caso a imagem bruta recebida * pela função 'AcquireFrame()' e convertida pela função 'color.AcquireAccess()' */ PXCMImage.ImageData colorData; /* Converte a imagem(dados brutos) retornada para Sample e salva na estrutura de * imagem ImageData por meio o ultimo parâmetro da função 'color.AcquireAccess(out colorData)' */ sample.color.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_RGB24, out colorData); /* Converte para Bitmap */ Bitmap colorBitmap = colorData.ToBitmap(0, sample.color.info.width, sample.color.info.height); // Get face data if (faceData != null) { faceData.Update(); numFacesDetected = faceData.QueryNumberOfDetectedFaces(); if (numFacesDetected > 0) { // Get the first face detected (index 0) PXCMFaceData.Face face = faceData.QueryFaceByIndex(0); // Retrieve face location data PXCMFaceData.DetectionData faceDetectionData = face.QueryDetection(); if (faceDetectionData != null) { PXCMRectI32 faceRectangle; faceDetectionData.QueryBoundingRect(out faceRectangle); faceRectangleHeight = faceRectangle.h; faceRectangleWidth = faceRectangle.w; faceRectangleX = faceRectangle.x; faceRectangleY = faceRectangle.y; //int faceRectangleX2 = (faceRectangleX - 510) * -1; } // Process face recognition data if (face != null) { // Retrieve the recognition data instance recognitionData = face.QueryRecognition(); // Set the user ID and process register/unregister logic if (recognitionData.IsRegistered()) { userId = Convert.ToString(recognitionData.QueryUserID()); if (flagUserId != userId) { Actions.LoadUser(Convert.ToInt16(userId)); flagUserId = userId; } if (doUnregister) { recognitionData.UnregisterUser(); doUnregister = false; } } else { if (doRegister) { recognitionData.RegisterUser(); // Capture a jpg image of registered user colorBitmap.Save("image.jpg", System.Drawing.Imaging.ImageFormat.Jpeg); doRegister = false; } else { userId = "Unrecognized"; } } } } else { userId = "No users in view"; } } // Display the color stream and other UI elements UpdateUI(colorBitmap); // Release resources colorBitmap.Dispose(); sample.color.ReleaseAccess(colorData); sample.color.Dispose(); /* Release the frame * 'ReleaseFrame' libera o bloqueio sobre o quadro atual. O processamento de frame continua. */ senseManager.ReleaseFrame(); coords = faceRectangleX.ToString() + " " + faceRectangleY.ToString() + " " + faceRectangleWidth.ToString() + " " + faceRectangleHeight.ToString(); Server.sendMsg(255, "rect", coords, userId); } } catch { Console.WriteLine("ERRORRRRRRRRRRRRRRRRRRRRRR"); } }