/// <summary> /// Process the face frame /// </summary> private void OnFaceFrameArrived(object sender, FaceFrameArrivedEventArgs e) { // Retrieve the face reference FaceFrameReference faceRef = e.FrameReference; if (faceRef == null) { return; } // Acquire the face frame using (FaceFrame faceFrame = faceRef.AcquireFrame()) { if (faceFrame == null) { return; } // Retrieve the face frame result FaceFrameResult frameResult = faceFrame.FaceFrameResult; // Display the values HappyResult.Text = frameResult.FaceProperties[FaceProperty.Happy].ToString(); EngagedResult.Text = frameResult.FaceProperties[FaceProperty.Engaged].ToString(); GlassesResult.Text = frameResult.FaceProperties[FaceProperty.WearingGlasses].ToString(); LeftEyeResult.Text = frameResult.FaceProperties[FaceProperty.LeftEyeClosed].ToString(); RightEyeResult.Text = frameResult.FaceProperties[FaceProperty.RightEyeClosed].ToString(); MouthOpenResult.Text = frameResult.FaceProperties[FaceProperty.MouthOpen].ToString(); MouthMovedResult.Text = frameResult.FaceProperties[FaceProperty.MouthMoved].ToString(); LookingAwayResult.Text = frameResult.FaceProperties[FaceProperty.LookingAway].ToString(); } }
/// <summary> /// Handles the face frame data arriving from the sensor /// </summary> /// <param name="sender">object sending the event</param> /// <param name="e">event arguments</param> private void Reader_FaceFrameArrived(object sender, FaceFrameArrivedEventArgs e) { using (FaceFrame faceFrame = e.FrameReference.AcquireFrame()) { if (faceFrame != null) { // get the index of the face source from the face source array int index = this.GetFaceSourceIndex(faceFrame.FaceFrameSource); // check if this face frame has valid face frame results if (this.ValidateFaceBoxAndPoints(faceFrame.FaceFrameResult)) { // store this face frame result to draw later this.faceFrameResults[index] = faceFrame.FaceFrameResult; RectI oldFace = faceFrame.FaceFrameResult.FaceBoundingBoxInColorSpace; FaceRectangle newface = new FaceRectangle(); newface.Left = oldFace.Left; newface.Top = oldFace.Top; newface.Height = (oldFace.Top - oldFace.Bottom); newface.Width = (oldFace.Left - oldFace.Right); DrawRect(newface); } else { // indicates that the latest face frame result from this reader is invalid this.faceFrameResults[index] = null; } } } }
private void OnFaceReaderFrameArrived(object sender, FaceFrameArrivedEventArgs e) { using (var frame = e.FrameReference.AcquireFrame()) { if (frame != null) { FaceFrameResult result = frame.FaceFrameResult; if (result != null) { _faceState.IsHappy = result.FaceProperties[FaceProperty.Happy] == DetectionResult.Yes; _faceState.IsLeftEyeClosed = result.FaceProperties[FaceProperty.LeftEyeClosed] == DetectionResult.Yes; _faceState.IsRightEyeClosed = result.FaceProperties[FaceProperty.RightEyeClosed] == DetectionResult.Yes; _faceState.IsMouthMoved = result.FaceProperties[FaceProperty.MouthMoved] == DetectionResult.Yes; _faceState.IsMouthOpen = result.FaceProperties[FaceProperty.MouthOpen] == DetectionResult.Yes; _faceState.IsWearingGlasses = result.FaceProperties[FaceProperty.WearingGlasses] == DetectionResult.Yes; if (this.OnFaceChanged != null) { this.OnFaceChanged(sender, _faceState); } } } } }
/// <summary> /// Handles the face frame data arriving from the sensor /// </summary> /// <param name="sender">object sending the event</param> /// <param name="e">event arguments</param> private void Reader_FaceFrameArrived(object sender, FaceFrameArrivedEventArgs e) { using (FaceFrame faceFrame = e.FrameReference.AcquireFrame()) { if (faceFrame != null) { if (faceFrameSource != faceFrame.FaceFrameSource) { return; } // store this face frame result FaceFrameResult faceFrameResult = faceFrame.FaceFrameResult; if (faceFrameResult != null && faceFrameResult.FaceProperties != null) { isMouthOpen = (faceFrameResult.FaceProperties[FaceProperty.MouthOpen] == (DetectionResult.Yes | DetectionResult.Maybe)); //isMouthOpen = (faceFrameResult.FaceProperties[FaceProperty.MouthOpen] != DetectionResult.No); mouthCornerLeft = faceFrameResult.FacePointsInInfraredSpace[FacePointType.MouthCornerLeft]; mouthCornerRight = faceFrameResult.FacePointsInInfraredSpace[FacePointType.MouthCornerRight]; mouthCenterY = (int)((mouthCornerLeft.Y + mouthCornerRight.Y) / 2f); mouthLeft = (int)mouthCornerLeft.X; mouthWidth = (int)(mouthCornerRight.X - mouthCornerLeft.X); mouthHeight = mouthWidth / 2; mouthTop = mouthCenterY - mouthHeight / 2; } } } }
/// <summary> /// Process Face Frames /// </summary> private void OnFaceFrameArrived(object sender, FaceFrameArrivedEventArgs e) { // Retrieve the face reference FaceFrameReference faceRef = e.FrameReference; if (faceRef == null) { return; } // Acquire the face frame using (FaceFrame faceFrame = faceRef.AcquireFrame()) { if (faceFrame == null) { return; } // Retrieve the face frame result FaceFrameResult frameResult = faceFrame.FaceFrameResult; if (frameResult != null) { // Update trackers UpdateTrackers(frameResult); } } }
private void FaceFrameReader_FrameArrived(object sender, FaceFrameArrivedEventArgs e) { try { using (FaceFrame faceFrame = e.FrameReference.AcquireFrame()) { if (faceFrame == null) { return; } bool tracked; tracked = faceFrame.IsTrackingIdValid; if (!tracked) { return; } FaceFrameResult faceResult = faceFrame.FaceFrameResult; int index = GetFaceSourceIndex(faceFrame.FaceFrameSource); faceFrameResults[index] = faceResult; } } catch (Exception exception) { MessageBox.Show(exception.Message); Close(); } }
private void FaceReader_FrameArrived(object sender, FaceFrameArrivedEventArgs e) { using (var frame = e.FrameReference.AcquireFrame()) { if (frame != null) { // Get the face frame result FaceFrameResult result = frame.FaceFrameResult; if (result != null) { // Get the face points, mapped in the color space //var eyeLeft = result.FacePointsInColorSpace[FacePointType.EyeLeft]; //var eyeRight = result.FacePointsInColorSpace[FacePointType.EyeRight]; var nose = result.FacePointsInColorSpace[FacePointType.Nose]; //var mouthLeft = result.FacePointsInColorSpace[FacePointType.MouthCornerLeft]; //var mouthRight = result.FacePointsInColorSpace[FacePointType.MouthCornerRight]; // Get the face characteristics //var eyeLeftClosed = result.FaceProperties[FaceProperty.LeftEyeClosed]; //var eyeRightClosed = result.FaceProperties[FaceProperty.RightEyeClosed]; //var mouthOpen = result.FaceProperties[FaceProperty.MouthOpen]; SerialPortHelper.SendBytesOverCom(sp, nose.X.ToString() + ',' + nose.Y.ToString()); } } } }
/// <summary> /// FaceFrameが利用できるようになった時のイベントを処理します /// </summary> private void OnFaceFrameArrived(object sender, FaceFrameArrivedEventArgs e) { using (var faceFrame = e.FrameReference.AcquireFrame()) { if (faceFrame == null) { return; } // 顔情報に関するフレームを取得 if (!faceFrame.IsTrackingIdValid) { return; } var result = faceFrame.FaceFrameResult; if (result == null) { return; } // 表情等に関する結果を取得し、プロパティを更新 this.Happy = result.FaceProperties[FaceProperty.Happy].ToString(); this.FaceEngagement = result.FaceProperties[FaceProperty.Engaged].ToString(); this.Glasses = result.FaceProperties[FaceProperty.WearingGlasses].ToString(); this.LeftEyeClosed = result.FaceProperties[FaceProperty.LeftEyeClosed].ToString(); this.RightEyeClosed = result.FaceProperties[FaceProperty.RightEyeClosed].ToString(); this.MouthOpen = result.FaceProperties[FaceProperty.MouthOpen].ToString(); this.MouthMoved = result.FaceProperties[FaceProperty.MouthMoved].ToString(); this.LookingAway = result.FaceProperties[FaceProperty.LookingAway].ToString(); // 顔の回転に関する結果を取得する this.FaceRotation = result.FaceRotationQuaternion; // カラーデータを描画する var drawContext = drawVisual.RenderOpen(); // 顔の特徴点を取得し描画する foreach (var point in result.FacePointsInColorSpace) { if (point.Key == FacePointType.None) { continue; } drawContext.DrawEllipse(facePointColor[(int)point.Key], null, new Point(point.Value.X, point.Value.Y), 5, 5); } drawContext.Close(); // ビットマップの描画 _FacePointBitmap.Clear(); _FacePointBitmap.Render(drawVisual); OnPropertyChanged("FacePointBitmap"); } }
private void FaceReader_FrameArrived(object sender, FaceFrameArrivedEventArgs e) { using (var frame = e.FrameReference.AcquireFrame()) { if (frame != null) { _faceResult = frame.FaceFrameResult; } } }
private void OnFaceFrameArrived(object sender, FaceFrameArrivedEventArgs e) { if (e != null) { lock (faceFrameArrivedEvents) { faceFrameArrivedEvents.Enqueue(e); } } }
private void FaceFrameReader_FrameArrived(object sender, FaceFrameArrivedEventArgs e) { using (var frame = e.FrameReference.AcquireFrame()) { if (frame != null) { evaluateFaceExpression(frame); } } }
/// <summary> /// Handles the face frame data arriving from the sensor /// </summary> /// <param name="sender">object sending the event</param> /// <param name="e">event arguments</param> private void Reader_FaceFrameArrived(object sender, FaceFrameArrivedEventArgs e) { using (FaceFrame faceFrame = e.FrameReference.AcquireFrame()) { if (faceFrame != null) { // get the index of the face source from the face source array //int index = this.GetFaceSourceIndex(faceFrame.FaceFrameSource); // check if this face frame has valid face frame results if (this.ValidateFaceBoxAndPoints(faceFrame.FaceFrameResult)) { // store this face frame result to draw later //this.faceFrameResults[index] = faceFrame.FaceFrameResult; this.faceFrameResult = faceFrame.FaceFrameResult; string faceText = string.Empty; if (this.faceFrameResult.FaceProperties != null) { foreach (var item in faceFrameResult.FaceProperties) { faceText += item.Key.ToString() + " : "; if (item.Value == DetectionResult.Maybe) { faceText += DetectionResult.Yes + "\n"; } else { faceText += item.Value.ToString() + "\n"; } //判断人物表情 if (item.Key == FaceProperty.Happy) { //if (item.Value == DetectionResult.Yes || item.Value == DetectionResult.Maybe) // faceHappy = true; //else // faceHappy = false; } } } //this.StatusText = faceText; } else { // indicates that the latest face frame result from this reader is invalid //this.faceFrameResults[index] = null; this.faceFrameResult = null; } } } }
/// <summary> /// 臉部事件 /// </summary> private void _faceReader_FrameArrived(object sender, FaceFrameArrivedEventArgs e) { using (var face_frame = e.FrameReference.AcquireFrame()) { if (face_frame != null) { // Get the face frame result. Get the face points, mapped in the color space. in the Posetures.cs face_result = face_frame.FaceFrameResult; } } }
void faceFrameArrived(object sender, FaceFrameArrivedEventArgs e) { //Console.WriteLine("Frame arrived"); using (FaceFrame faceFrame = e.FrameReference.AcquireFrame()) { if (faceFrame != null) { faceFrameResult[faceFrame.TrackingId] = faceFrame.FaceFrameResult; } } }
private void Face_FrameArrived(object sender, FaceFrameArrivedEventArgs e) { using (FaceFrame faceFrame = e.FrameReference.AcquireFrame()) { if (faceFrame != null) { int index = GetFaceSourceIndex(faceFrame.FaceFrameSource); faceFrameResults[index] = faceFrame.FaceFrameResult; } } }
private void OnFaceFrameArrived(object sender, FaceFrameArrivedEventArgs e) { if (e.FrameReference != null) { using (FaceFrame faceFrame = e.FrameReference.AcquireFrame()) { if (faceFrame != null) { _CurrentFaceFrameResult = faceFrame.FaceFrameResult; } } } }
void NormalFaceReader_FrameArrived(object sender, FaceFrameArrivedEventArgs e) { using (var frame = e.FrameReference.AcquireFrame()) { if (frame != null) { // 4) Get the face frame result FaceFrameResult result = frame.FaceFrameResult; if (result != null) { // 5) Do magic! var f = new FaceNormal(result); _faceData.addNormalData(f); infoNormal.Text = f.dump_str(); infoNormal.Text += _faceData.dump_str(); // Get the face points, mapped in the color space. var eyeLeft = result.FacePointsInColorSpace[FacePointType.EyeLeft]; var eyeRight = result.FacePointsInColorSpace[FacePointType.EyeRight]; // Position the canvas UI elements Canvas.SetLeft(ellipseEyeLeft, eyeLeft.X - ellipseEyeLeft.Width / 2.0); Canvas.SetTop(ellipseEyeLeft, eyeLeft.Y - ellipseEyeLeft.Height / 2.0); Canvas.SetLeft(ellipseEyeRight, eyeRight.X - ellipseEyeRight.Width / 2.0); Canvas.SetTop(ellipseEyeRight, eyeRight.Y - ellipseEyeRight.Height / 2.0); // Display or hide the ellipses if (f.eyeLeftClosed == DetectionResult.Yes || f.eyeLeftClosed == DetectionResult.Maybe) { ellipseEyeLeft.Visibility = Visibility.Collapsed; } else { ellipseEyeLeft.Visibility = Visibility.Visible; } if (f.eyeRightClosed == DetectionResult.Yes || f.eyeRightClosed == DetectionResult.Maybe) { ellipseEyeRight.Visibility = Visibility.Collapsed; } else { ellipseEyeRight.Visibility = Visibility.Visible; } } } } }
private void FrameArrived(object sender, FaceFrameArrivedEventArgs e) { using (FaceFrame frame = e.FrameReference.AcquireFrame()) { if (frame != null) { if (frame.IsTrackingIdValid == false) { return; } if (this.FaceResultAcquired != null) { this.FaceResultAcquired(this, new FaceFrameResultEventArgs(this.frameSource.TrackingId, frame.FaceFrameResult)); } } } }
/// <summary> /// Kinects the service face frame arrived. /// </summary> /// <param name="sender">The sender.</param> /// <param name="e">The <see cref="FaceFrameArrivedEventArgs"/> instance containing the event data.</param> private void KinectServiceFaceFrameArrived(object sender, FaceFrameArrivedEventArgs e) { using (FaceFrame faceFrame = e.FrameReference.AcquireFrame()) { if (faceFrame != null) { // get the index of the face source from the face source array int index = this.GetFaceSourceIndex(faceFrame.FaceFrameSource); // check if this face frame has valid face frame results // store this face frame result to draw later if (faceFrame.FaceFrameResult != null) { this.faceFrameResults[index] = faceFrame.FaceFrameResult; } } } }
static void _faceFrameReader_FrameArrived(object sender, FaceFrameArrivedEventArgs e) { using (FaceFrame faceFrame = e.FrameReference.AcquireFrame()) { // get the index of the face source from the face source array int index = GetFaceSourceIndex(faceFrame.FaceFrameSource); if (faceFrame == null || !faceFrame.IsTrackingIdValid) { _faceFrameResults[index] = null; return; } // store this face frame result _faceFrameResults[index] = faceFrame.FaceFrameResult; } }
/// <summary> /// Handles the face frame data arriving from the sensor /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private void Reader_FaceFrameArrived(object sender, FaceFrameArrivedEventArgs e) { using (FaceFrame faceFrame = e.FrameReference.AcquireFrame()) { if (faceFrame == null || !faceFrame.IsTrackingIdValid) { return; } if (this.ValidateFaceBoxAndPoints(faceFrame.FaceFrameResult)) { this.faceFrameResults = faceFrame.FaceFrameResult; } else { // indicates that the latest face frame result from this reader is invalid this.faceFrameResults = null; } } }
void UpdateFaceFrame(FaceFrameArrivedEventArgs e) { using (FaceFrame faceFrame = e.FrameReference.AcquireFrame()) { if (faceFrame == null) { return; } bool tracked; tracked = faceFrame.IsTrackingIdValid; if (!tracked) { return; } FaceFrameResult faceResult = faceFrame.FaceFrameResult; int index = GetFaceSourceIndex(faceFrame.FaceFrameSource); faceFrameResults[index] = faceResult; } }
//Callback for a face arrive private void Face_FrameArrived(object sender, FaceFrameArrivedEventArgs e) { using (FaceFrame faceFrame = e.FrameReference.AcquireFrame()) { if (faceFrame != null) { //index of the source of this face int index = GetFaceSourceIndex(faceFrame.FaceFrameSource); if (ValidateFaceBoundingBox(faceFrame.FaceFrameResult)) { //store result of the frame _faceFrameResults[index] = faceFrame.FaceFrameResult; } else { _faceFrameResults[index] = null; } } } }
/// <summary> /// Face フレームが到着した際に実行されるハンドラーです。 /// </summary> /// <param name="sender">object sending the event</param> /// <param name="e">event arguments</param> private void Reader_FaceFrameArrived(object sender, FaceFrameArrivedEventArgs e) { using (FaceFrame faceFrame = e.FrameReference.AcquireFrame()) { if (faceFrame != null) { // Face フレームに適切な情報が格納されているかを検証します。 if (this.ValidateFaceBoxAndPoints(faceFrame.FaceFrameResult)) { // 適切な Face フレームの情報が格納されている場合、その情報を faceFrameResult 格納し、保持するようにします。 this.faceFrameResult = faceFrame.FaceFrameResult; } else { // 正しい情報が入っていない場合、Null 値を入れる。 this.faceFrameResult = null; } } } }
private void HdFaceReader_FrameArrived(object sender, FaceFrameArrivedEventArgs e) { // Retrieve the face reference FaceFrameReference faceRef = e.FrameReference; if (faceRef == null) { return; } // Acquire the face frame using (FaceFrame faceFrame = faceRef.AcquireFrame()) { if (faceFrame == null) { return; } // Retrieve the face frame result FaceFrameResult frameResult = faceFrame.FaceFrameResult; // Debug.Log ("hui"); // Display the values if (frameResult.FaceProperties [FaceProperty.LeftEyeClosed].ToString() == "Yes") { skinnedMeshRenderer.SetBlendShapeWeight(1, 100); } else { skinnedMeshRenderer.SetBlendShapeWeight(1, 0); } Debug.Log("hui"); // EngagedResult.Text = frameResult.FaceProperties[FaceProperty.Engaged].ToString(); //GlassesResult.Text = frameResult.FaceProperties[FaceProperty.WearingGlasses].ToString(); /*LeftEyeResult.Text = frameResult.FaceProperties[FaceProperty.LeftEyeClosed].ToString(); * RightEyeResult.Text = frameResult.FaceProperties[FaceProperty.RightEyeClosed].ToString(); * MouthOpenResult.Text = frameResult.FaceProperties[FaceProperty.MouthOpen].ToString();*/ // Debug.Log(MouthMovedResult.Text = frameResult.FaceProperties[FaceProperty.MouthMoved].ToString()); // LookingAwayResult.Text = frameResult.FaceProperties[FaceProperty.LookingAway].ToString();*/ } }
void faceFrameReader_FrameArrived(object sender, FaceFrameArrivedEventArgs e) { using (FaceFrame faceFrame = e.FrameReference.AcquireFrame()) { if (faceFrame != null) { ++idx; FaceFrameResult result = faceFrame.FaceFrameResult; if (result != null) { var eyeLeft = result.FacePointsInColorSpace[FacePointType.EyeLeft]; var eyeRight = result.FacePointsInColorSpace[FacePointType.EyeRight]; var nose = result.FacePointsInColorSpace[FacePointType.Nose]; var mouthLeft = result.FacePointsInColorSpace[FacePointType.MouthCornerLeft]; var mouthRight = result.FacePointsInColorSpace[FacePointType.MouthCornerRight]; eyeLX = (int)eyeLeft.X; eyeLY = (int)eyeLeft.Y; eyeRX = (int)eyeRight.X; eyeRY = (int)eyeRight.Y; noseX = (int)nose.X; noseY = (int)nose.Y; mouthLX = (int)mouthLeft.X; mouthLY = (int)mouthLeft.Y; mouthRX = (int)mouthRight.X; mouthRY = (int)mouthRight.Y; faceDetected = true; } else { faceDetected = false; } } } }
private void OnFaceFrameArrived(object sender, FaceFrameArrivedEventArgs e) { if (e.FrameReference != null) { using (FaceFrame faceFrame = e.FrameReference.AcquireFrame()) { if (faceFrame != null) { FaceFrameResult frameResult = faceFrame.FaceFrameResult; if (frameResult != null) { using (ColorFrame colorFrame = faceFrame.ColorFrameReference.AcquireFrame()) { if (colorFrame != null) { RectF currentColorBoundingBox = frameResult.FaceBoundingBoxInColorSpace.Offset(0.40f, 0.50f, 0.70f, colorFrame.FrameDescription.Width, colorFrame.FrameDescription.Height); if (colorFrame.RawColorImageFormat == ImageFormat) { colorFrame.CopyRawFrameDataToArray(_ColorPixels); } else { colorFrame.CopyConvertedFrameDataToArray(_ColorPixels, ImageFormat); } byte[] colorFace = _ColorPixels.ExtractPixelsFromImage(Convert.ToInt32(currentColorBoundingBox.Y), Convert.ToInt32(currentColorBoundingBox.X), Convert.ToInt32(currentColorBoundingBox.Width), Convert.ToInt32(currentColorBoundingBox.Height), _ColorFrameDesc.Width, Convert.ToInt32(_ColorFrameDesc.BytesPerPixel)); RaiseFaceFrameArrivedEvent(colorFace, currentColorBoundingBox, faceFrame.TrackingId); } } } } } } }
/// <summary> /// Handles the face frame data arriving from the sensor /// </summary> /// <param name="sender">object sending the event</param> /// <param name="e">event arguments</param> private void Reader_FaceFrameArrived(object sender, FaceFrameArrivedEventArgs e) { using (FaceFrame faceFrame = e.FrameReference.AcquireFrame()) { if (faceFrame != null) { // get the index of the face source from the face source array int index = this.GetFaceSourceIndex(faceFrame.FaceFrameSource); // check if this face frame has valid face frame results if (this.ValidateFaceBoxAndPoints(faceFrame.FaceFrameResult)) { this.faceFrameResults[index] = faceFrame.FaceFrameResult; } else { // indicates that the latest face frame result from this reader is invalid this.faceFrameResults[index] = null; } } } }
void FaceReader_FrameArrived(object sender, FaceFrameArrivedEventArgs e) { using (var frame = e.FrameReference.AcquireFrame()) { if (frame != null) { FaceFrameResult result = frame.FaceFrameResult; if (result != null) { // Get the face points, mapped in the color space. Point eyeLeft = result.FacePointsInColorSpace[FacePointType.EyeLeft]; Point eyeRight = result.FacePointsInColorSpace[FacePointType.EyeRight]; // Find the middle point. Point middle = new Point((eyeRight.X + eyeLeft.X) / 2.0, (eyeRight.Y + eyeLeft.Y) / 2.0); // Calculate the distance between the eyes. double distance = Math.Sqrt(Math.Pow(eyeLeft.X - eyeRight.X, 2) + Math.Pow(eyeLeft.Y - eyeRight.Y, 2)); // Calculate the new width and height of the image, to give the illusion of 3D. double width = ORIGINAL_IMAGE_WIDTH * distance / ORIGINAL_DISTANCE_EYES; double height = width * ORIGINAL_IMAGE_HEIGHT / ORIGINAL_IMAGE_WIDTH; // Calculate the angle of the two points. double angle = Math.Atan2(eyeRight.Y - eyeLeft.Y, eyeRight.X - eyeLeft.X) * 180.0 / Math.PI; // Transform the image! image.Width = width; transform.Angle = angle; // Position the image! Canvas.SetLeft(image, middle.X - width / 2.0); Canvas.SetTop(image, middle.Y - height / (ORIGINAL_IMAGE_HEIGHT / ORIGINAL_DISTANCE_EYES_TOP)); } } } }
/// <summary> /// 面部数据的采集 /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private void Reader_FaceFrameArrived(object sender, FaceFrameArrivedEventArgs e) { using (FaceFrame faceFrame = e.FrameReference.AcquireFrame()) { if (faceFrame != null) { //获取面部索引 int index = this.GetFaceSourceIndex(faceFrame.FaceFrameSource); //检查改面部数据是否有效 if (this.ValidateFaceBoxAndPoints(faceFrame.FaceFrameResult)) { //储存数据以待绘制和疲劳检测 this.faceFrameResults[index] = faceFrame.FaceFrameResult; } else { // this.faceFrameResults[index] = null; } } } }