Пример #1
0
    /*private IEnumerator GetPersonFromImageAsync(Texture2D imageBytes)
     * {
     *  var personGroups =  PersonGroup.GetGroupListAsync();
     *  for (var i = 0; i < personGroups.Length; i++)
     *  {
     *      labelText.text = "Searching Group: " + personGroups[i].name;
     *
     *      // try to detect the faces in the image.
     *      FaceInfo[] faces =  Face.DetectAsync(imageBytes);
     *
     *      if (faces != null)
     *      {
     *          if (faces.Length == 0)
     *          {
     *              yield return new WaitForUpdate();
     *              labelText.text = "No Faces Found!";
     *              return;
     *          }
     *
     *          // if faces are found, assign a GUID
     *          var faceIds = new string[faces.Length];
     *          for (int j = 0; j < faces.Length; j++)
     *          {
     *              faceIds[j] = faces[j].faceId;
     *          }
     *
     *          // try to identify the face found in the image by
     *          // retrieving a series of candidates form the queried group.
     *          var idResults =  Face.IdentifyAsync(faceIds, personGroups[i].personGroupId);
     *
     *          for (var j = 0; j < idResults.Length; j++)
     *          {
     *              double bestConfidence = 0f;
     *              string personId = null;
     *
     *              // try to match the candidate to the face found
     *              // in the image using a confidence value.
     *              for (var k = 0; k < idResults[j].candidates.Length; k++)
     *              {
     *                  var candidate = idResults[j].candidates[k];
     *
     *                  if (bestConfidence > candidate.confidence) { continue; }
     *
     *                  bestConfidence = candidate.confidence;
     *                  personId = candidate.personId;
     *              }
     *
     *              if (string.IsNullOrEmpty(personId))
     *              {
     *                  yield return new WaitForUpdate();
     *                  labelText.text = "No Faces Found!";
     *                  continue;
     *              }
     *
     *              // display the candidate with the highest confidence
     *              var person =  Person.GetPersonAsync(personGroups[i].personGroupId, personId);
     *              labelText.text = person.name;
     *              return;
     *          }
     *      }
     *  }
     *
     *  yield return new WaitForUpdate();
     *  labelText.text = "No Faces Found!";
     * }*/

    // performs face detection
    private IEnumerator DoFaceDetection(Texture2D texImage)
    {
        // get the face manager instance
        CloudFaceManager faceManager = CloudFaceManager.Instance;

        if (texImage && faceManager)
        {
            Texture2D texFlipped = FlipTextureV(texImage);
            yield return(faceManager.DetectFaces(texFlipped));

            if (faceManager.faces != null && faceManager.faces.Length > 0)
            {
                Face face = faceManager.faces[0];

                // set user data
                userGender = face.faceAttributes.gender;
                userAge    = face.faceAttributes.age;
                userSmile  = face.faceAttributes.smile;
                userTempID = face.faceId;

                string sMessage = string.Format("{0}, Age: {1:F1}", userGender.ToUpper(), userAge);
                Debug.Log(string.Format("CloudFaceDetector found " + sMessage));
                Debug.Log("@@@@ GAILE FACE ID: " + userTempID);
                if (infoText != null)
                {
                    infoText.text = sMessage;
                }

                // convert to gender enum
                UserGender gender = userGender.ToLower() == "male" ? UserGender.Male : UserGender.Female;

                // invoke UserDataDetected() of the category selector(s) related to the same user
                MonoBehaviour[] monoScripts = FindObjectsOfType(typeof(MonoBehaviour)) as MonoBehaviour[];
                foreach (MonoBehaviour monoScript in monoScripts)
                {
                    if ((monoScript is CloudFaceListenerInterface) && monoScript.enabled)
                    {
                        CloudFaceListenerInterface userFaceListener = (CloudFaceListenerInterface)monoScript;
                        userFaceListener.UserFaceDetected(playerIndex, gender, userAge, userSmile);
                    }
                }
            }
        }

        yield return(null);
    }
Пример #2
0
    // performs face detection
    private IEnumerator DoFaceDetection()
    {
        // get the image to detect
        faces = null;
        Texture2D texCamShot = null;

        if (cameraShot)
        {
            texCamShot = (Texture2D)cameraShot.GetComponent <Renderer>().material.mainTexture;

            //SetHintText("Wait...");
        }

        // get the face manager instance
        CloudFaceManager faceManager = CloudFaceManager.Instance;

        if (texCamShot && faceManager)
        {
            if (faces != null && faces.Length > 0)
            {
                //if(displayFaceRectangles)
                {
                    //faceManager.DrawFaceRects(texCamShot, faces, FaceDetectionUtils.FaceColors, this.displayHeadDirection);
                    faceManager.AddFaceToList(texCamShot, faces);
                }
                //Add most promininent face to faceList.

                //SetHintText("Click on the camera image to make a shot");
            }
            else
            {
                //SetHintText("No faces detected.");
            }

            yield return(faceManager.DetectFaces(texCamShot));

            faces = faceManager.faces;
        }
        else
        {
            //SetHintText("Check if the FaceManager component exists in the scene.");
        }

        yield return(null);
    }
Пример #3
0
    // performs face detection
    private IEnumerator DoFaceDetection()
    {
        // get the image to detect
        Face[]    faces      = null;
        Texture2D texCamShot = null;

        if (cameraShot)
        {
            texCamShot = (Texture2D)cameraShot.material.mainTexture;
            SetHintText("Wait...");
        }

        // get the face manager instance
        CloudFaceManager faceManager = CloudFaceManager.Instance;

        if (!faceManager)
        {
            SetHintText("Check if the FaceManager component exists in the scene.");
        }
        else if (texCamShot)
        {
            yield return(faceManager.DetectFaces(texCamShot));

            faces = faceManager.faces;

            if (faces != null && faces.Length > 0)
            {
                //if(displayFaceRectangles)
                {
                    faceManager.DrawFaceRects(texCamShot, faces, FaceDetectionUtils.FaceColors, displayHeadDirection);
                    faceManager.AddFaceToList(texCamShot, faces);
                }

                SetHintText(hintMessage);
                SetResultText(faces);
            }
            else
            {
                SetHintText("No face(s) detected.");
            }
        }

        yield return(null);
    }
    // performs face detection
    private IEnumerator DoFaceDetection()
    {
        // get the image to detect
        faces = null;
        Texture2D texCamShot = null;

        if (cameraShot)
        {
            texCamShot = (Texture2D)cameraShot.GetComponent <Renderer>().material.mainTexture;
            SetHintText("Wait...");
        }

        // get the face manager instance
        CloudFaceManager faceManager = CloudFaceManager.Instance;

        if (texCamShot && faceManager)
        {
            yield return(faceManager.DetectFaces(texCamShot));

            faces = faceManager.faces;
            // Debug.Log("Zeroth FaceID: " + faces[0].faceId);
            //yield return faceManager.matchingFace(faces[0].faceId);

            if (faces != null && faces.Length > 0)
            {
                //if(displayFaceRectangles)
                {
                    faceManager.DrawFaceRects(texCamShot, faces, FaceDetectionUtils.FaceColors, this.displayHeadDirection);
                    StartCoroutine(isTarget());
                }
                SetHintText("Click on the camera image to make a shot");
            }
            else
            {
                SetHintText("No faces detected.");
            }
        }
        else
        {
            SetHintText("Check if the FaceManager component exists in the scene.");
        }

        yield return(null);
    }
Пример #5
0
    // performs face detection
    private IEnumerator DoFaceDetection()
    {
        // get the image to detect
        Face[]    faces      = null;
        Texture2D texCamShot = null;

        if (cameraShot)
        {
            texCamShot = (Texture2D)cameraShot.texture;
            SetHintText("Wait...");
        }

        // get the face manager instance
        CloudFaceManager faceManager = CloudFaceManager.Instance;

        if (!faceManager)
        {
            SetHintText("Check if the FaceManager component exists in the scene.");
        }
        else if (texCamShot)
        {
            byte[] imageBytes = texCamShot.EncodeToJPG();
            yield return(null);

            //faces = faceManager.DetectFaces(texCamShot);
            AsyncTask <Face[]> taskFace = new AsyncTask <Face[]>(() => {
                return(faceManager.DetectFaces(imageBytes));
            });

            taskFace.Start();
            yield return(null);

            while (taskFace.State == TaskState.Running)
            {
                yield return(null);
            }

            if (string.IsNullOrEmpty(taskFace.ErrorMessage))
            {
                faces = taskFace.Result;

                if (faces != null && faces.Length > 0)
                {
                    // stick to detected face rectangles
                    FaceRectangle[] faceRects = new FaceRectangle[faces.Length];

                    for (int i = 0; i < faces.Length; i++)
                    {
                        faceRects[i] = faces[i].faceRectangle;
                    }

                    yield return(null);

                    // get the emotions of the faces
                    if (recognizeEmotions)
                    {
                        //Emotion[] emotions = faceManager.RecognizeEmotions(texCamShot, faceRects);
                        AsyncTask <Emotion[]> taskEmot = new AsyncTask <Emotion[]>(() => {
                            return(faceManager.RecognizeEmotions(imageBytes, faceRects));
                        });

                        taskEmot.Start();
                        yield return(null);

                        while (taskEmot.State == TaskState.Running)
                        {
                            yield return(null);
                        }

                        if (string.IsNullOrEmpty(taskEmot.ErrorMessage))
                        {
                            Emotion[] emotions = taskEmot.Result;
                            int       matched  = faceManager.MatchEmotionsToFaces(ref faces, ref emotions);

                            if (matched != faces.Length)
                            {
                                Debug.Log(string.Format("Matched {0}/{1} emotions to {2} faces.", matched, emotions.Length, faces.Length));
                            }
                        }
                        else
                        {
                            SetHintText(taskEmot.ErrorMessage);
                        }
                    }

                    CloudFaceManager.DrawFaceRects(texCamShot, faces, FaceDetectionUtils.FaceColors);
                    //SetHintText("Click on the camera image to make a shot");
                    SetHintText(hintMessage);
                    SetResultText(faces);
                }
                else
                {
                    SetHintText("No face(s) detected.");
                }
            }
            else
            {
                SetHintText(taskFace.ErrorMessage);
            }
        }

        yield return(null);
    }
Пример #6
0
    /// <summary>
    /// Identifies the users on the image.
    /// </summary>
    /// <returns><c>true</c>, if identification completed successfully, <c>false</c> otherwise.</returns>
    /// <param name="imageBytes">Image bytes.</param>
    /// <param name="faces">Array of faces.</param>
    /// <param name="results">Array of identification results.</param>
    public bool IdentifyUsers(byte[] imageBytes, ref Face[] faces, ref IdentifyResult[] results)
    {
        // create the user-group if needed
        if (userGroupId != initedGroupId)
        {
            GetOrGreateUserGroup();
        }
        if (userGroupId != initedGroupId)
        {
            return(false);
        }

        // detect and identify user faces
        faces   = null;
        results = null;

        if (faceManager != null)
        {
            faces = faceManager.DetectFaces(imageBytes);

            // get the training status
            TrainingStatus training    = faceManager.GetPersonGroupTrainingStatus(userGroupId);
            bool           bEmptyGroup = false;

            if (training != null)
            {
                if (training.status == Status.Failed)
                {
                    // check if there are persons in this group
                    List <Person> listPersons = GetUsersList();

                    if (listPersons.Count > 0)
                    {
                        // retrain the group
                        faceManager.TrainPersonGroup(userGroupId);
                    }
                    else
                    {
                        // empty group - always returns 'training failed'
                        training.status = Status.Succeeded;
                        bEmptyGroup     = true;
                    }
                }
                else if (training.status == Status.Succeeded && training.message.StartsWith("There is no person"))
                {
                    // the group exists but it's empty
                    bEmptyGroup = true;
                }
            }

            DateTime waitTill = DateTime.Now.AddSeconds(5);
            while ((training == null || training.status != Status.Succeeded) && (DateTime.Now < waitTill))
            {
                // wait for training to succeed
                System.Threading.Thread.Sleep(1000);
                training = faceManager.GetPersonGroupTrainingStatus(userGroupId);
            }

            if (bEmptyGroup)
            {
                // nothing to check
                return(true);
            }

            if (faces != null && faces.Length > 0)
            {
                results = faceManager.IdentifyFaces(userGroupId, ref faces, 1);
                faceManager.MatchCandidatesToFaces(ref faces, ref results, userGroupId);
                return(true);
            }
        }

        return(false);
    }