コード例 #1
0
ファイル: SpeechHelper.cs プロジェクト: nagyistge/See4Me
        public static string GetFaceMessage(FaceResult face)
        {
            // Creates the face description text to be speeched.
            string faceMessage = null;
            string personMessage;

            if (!string.IsNullOrWhiteSpace(face.Name))
            {
                // A person name has been identified.
                personMessage = $"{face.Name} ";
            }
            else
            {
                var ageDescription = GetAgeDescription(face.Age, face.Gender);
                personMessage = string.Format(GetString(Constants.PersonAgeMessage, face.Gender), ageDescription, face.Age);
            }

            if (face.Emotion != Emotion.Neutral)
            {
                var emotion        = GetString(face.Emotion.ToString(), face.Gender);
                var lookingMessage = string.Format(GetString(Constants.LookingMessage, face.Gender), emotion);
                faceMessage = $"{personMessage} {lookingMessage}";
            }
            else
            {
                // No emotion recognized, so includes only the person name or age in the message.
                faceMessage = personMessage;
            }

            faceMessage = $"{faceMessage} {Constants.SentenceEnd} ";
            return(faceMessage);
        }
コード例 #2
0
        private bool CalcFaceTowards_NotClaimed(ref FaceResult bestResult, DirectionWorld target, DirectionWorld faceDirection, float azimuthDelta)
        {
            if (Math.Abs(azimuthDelta) > m_requiredAccuracyRadians)
            {
                return(false);
            }

            float elevationDelta, elevationAccuracy;

            if (CalcElevation(target, faceDirection, azimuthDelta, out elevationDelta, out elevationAccuracy))
            {
                float accSq = azimuthDelta * azimuthDelta + elevationAccuracy * elevationAccuracy;
                if (accSq > m_requiredAccuracyRadians * m_requiredAccuracyRadians)
                {
                    return(false);
                }
                float sumDeltaMag = Math.Abs(elevationDelta);
                Log.TraceLog("Best: " + bestResult + ", current: " + new FaceResult()
                {
                    AccuracySquared = accSq, SumDeltaMag = sumDeltaMag, DeltaElevation = elevationDelta, DeltaAzimuth = 0f
                });
                if (bestResult.ReplaceBy(accSq, sumDeltaMag))
                {
                    bestResult.AccuracySquared = accSq;
                    bestResult.SumDeltaMag     = sumDeltaMag;
                    bestResult.DeltaAzimuth    = 0f;
                    bestResult.DeltaElevation  = elevationDelta;
                }
                return(true);
            }
            return(false);
        }
コード例 #3
0
 private void vpFace_Paint(object sender, PaintEventArgs e)
 {
     if (this.aiFace != null)
     {
         FaceResult fs = this.aiFace.FaceResult;
         this.DrawDetectRectangle(e.Graphics, fs.Rectangle);
         e.Graphics.DrawString(fs.ID, this.Font, Brushes.White, fs.Rectangle.Left, fs.Rectangle.Top - 20);
     }
 }
コード例 #4
0
 private bool CalcFaceTowards(DirectionWorld targetWorldSpace, out FaceResult bestResult)
 {
     if (StatorOk(StatorAz))
     {
         return(CalcFaceTowards_AzimuthOk(targetWorldSpace, out bestResult));
     }
     else
     {
         return(CalcFaceTowards_NoAzimuth(targetWorldSpace, out bestResult));
     }
 }
コード例 #5
0
        private static FaceResult GetFaceResult(Face face, string blobName)
        {
            var result = new FaceResult
            {
                PartitionKey = DateTime.Today.AddMinutes((int)DateTime.Now.TimeOfDay.TotalMinutes).ToString("o"),
                RowKey       = $"{blobName}-{Guid.NewGuid().ToString()}",
                BlobName     = blobName,
                Age          = (int)face.FaceAttributes.Age,
            };

            if (face.FaceAttributes.Gender != string.Empty)
            {
                if (face.FaceAttributes.Gender == "male")
                {
                    result.Gender = Gender.Male;
                }
                else
                {
                    result.Gender = Gender.Female;
                }
            }

            if (face.FaceAttributes.FacialHair != null)
            {
                result.HasBeard     = face.FaceAttributes.FacialHair.Beard > 0.5;
                result.HasMoustache = face.FaceAttributes.FacialHair.Moustache > 0.5;
            }

            if (face.FaceAttributes.Hair != null)
            {
                result.IsBald        = face.FaceAttributes.Hair.Bald > 0.5;
                result.MainHairColor = face.FaceAttributes.Hair.HairColor.OrderByDescending(h => h.Confidence).FirstOrDefault()?.Color ?? "none";
            }

            if (face.FaceAttributes.Emotion != null)
            {
                var emotions = new Dictionary <string, float> {
                    { "Fear", face.FaceAttributes.Emotion.Fear },
                    { "Anger", face.FaceAttributes.Emotion.Anger },
                    { "Comptent", face.FaceAttributes.Emotion.Contempt },
                    { "Disgust", face.FaceAttributes.Emotion.Disgust },
                    { "Happiness", face.FaceAttributes.Emotion.Happiness },
                    { "Neutral", face.FaceAttributes.Emotion.Neutral },
                    { "Sadness", face.FaceAttributes.Emotion.Sadness },
                    { "Surprise", face.FaceAttributes.Emotion.Surprise }
                };
                result.Emotion = emotions.OrderByDescending(e => e.Value).First().Key;
            }

            return(result);
        }
コード例 #6
0
        public ActionResult Face(string url)
        {
            FaceResult fr = new FaceResult();

            fr.ImageUrl = url;
            fr.Result   = FaceService.Detect(url, returnFaceAttributes: new List <FaceAttributeType>()
            {
                FaceAttributeType.Age,
                FaceAttributeType.FacialHair,
                FaceAttributeType.HeadPose,
                FaceAttributeType.Gender,
                FaceAttributeType.Glasses,
                FaceAttributeType.Smile
            }, returnFaceLandmarks: true);

            return(View("Face", fr));
        }
コード例 #7
0
 private void vpFace_Paint(object sender, PaintEventArgs e)
 {
     if (this.aiFace != null)
     {
         FaceResult fs = this.aiFace.FaceResult;
         this.DrawDetectRectangle(e.Graphics, fs.Rectangle);
         if (this.aiFace.IsDetected)
         {
             e.Graphics.DrawString(fs.ID, this.Font, Brushes.White, fs.Rectangle.Left, fs.Rectangle.Top - 20);
             Image successImage = Resources.detect_success_64;
             e.Graphics.DrawImage(successImage, new Point(this.vpFace.Width - successImage.Width, 0));
         }
         else
         {
             e.Graphics.DrawString("正在识别中...", this.Font, Brushes.ForestGreen, fs.Rectangle.Left, fs.Rectangle.Top - 20);
         }
     }
 }
コード例 #8
0
        /// <summary>
        /// From files use cloud vision face detector to generate a list showing how many faces
        /// were detected in each file with at least 70% confidence
        /// </summary>
        /// <param name="files"></param>
        /// <param name="client"></param>
        /// <param name="confidenceThreshold"></param>
        /// <returns></returns>
        public List <FaceResult> GetFaceResults(string[] files, ImageAnnotatorClient client, double confidenceThreshold = .7)
        {
            var faceResults = new List <FaceResult>();

            foreach (var file in files)
            {
                var response = GetFaceAnnotationResponse(client, file);

                var faceResult = new FaceResult
                {
                    Name  = Path.GetFileName(file),
                    Count = response.Count(a => a.DetectionConfidence >= confidenceThreshold)
                };
                faceResults.Add(faceResult);
            }

            return(faceResults);
        }
コード例 #9
0
ファイル: SpeechHelper.cs プロジェクト: nzigel/See4Me
        public static FaceResultMessage GetFaceMessage(FaceResult face)
        {
            var result = new FaceResultMessage();

            // Creates the face description text to be speeched.
            string faceMessage = null;
            string personMessage;

            if (!string.IsNullOrWhiteSpace(face.Name))
            {
                // A person name has been identified.
                personMessage = $"{face.Name} ";

                if (settings.ShowRecognitionConfidence)
                {
                    personMessage = $"{personMessage} ({Math.Round(face.IdentifyConfidence, 2)})";
                }

                result.ContainsFace = true;
            }
            else
            {
                var ageDescription = GetAgeDescription(face.Age, face.Gender);
                personMessage = string.Format(GetString(Constants.PersonAgeMessage, face.Gender), ageDescription, face.Age);
            }

            if (face.Emotion != Emotion.Neutral)
            {
                var emotion        = GetString(face.Emotion.ToString(), face.Gender).ToLower();
                var lookingMessage = string.Format(GetString(Constants.LookingMessage, face.Gender), emotion);
                faceMessage = $"{personMessage} {lookingMessage}";
            }
            else
            {
                // No emotion recognized, so includes only the person name or age in the message.
                faceMessage = personMessage;
            }

            faceMessage    = $"{faceMessage} {Constants.SentenceEnd} ";
            result.Message = faceMessage;

            return(result);
        }
コード例 #10
0
        private bool CalcFaceTowards_Claimed(ref FaceResult bestResult, DirectionWorld target, DirectionWorld faceDirection, float azimuthDelta)
        {
            float azimuthAccuracy;

            if (WithinLimits(StatorAz, azimuthDelta))
            {
                azimuthAccuracy = 0f;
            }
            else
            {
                float clamped = ClampToLimits(StatorAz, azimuthDelta);
                azimuthAccuracy = Math.Abs(azimuthDelta - clamped);
                if (azimuthAccuracy > m_requiredAccuracyRadians)
                {
                    return(false);
                }
                azimuthDelta = clamped;
            }

            float elevationDelta, elevationAccuracy;

            if (CalcElevation(target, faceDirection, azimuthDelta, out elevationDelta, out elevationAccuracy))
            {
                float accSq       = azimuthAccuracy * azimuthAccuracy + elevationAccuracy * elevationAccuracy;
                float sumDeltaMag = Math.Abs(azimuthDelta) + Math.Abs(elevationDelta);
                Log.TraceLog("Best: " + bestResult + ", current: " + new FaceResult()
                {
                    AccuracySquared = accSq, SumDeltaMag = sumDeltaMag, DeltaElevation = elevationDelta, DeltaAzimuth = azimuthDelta
                });
                if (bestResult.ReplaceBy(accSq, sumDeltaMag))
                {
                    bestResult.AccuracySquared = accSq;
                    bestResult.SumDeltaMag     = sumDeltaMag;
                    bestResult.DeltaAzimuth    = azimuthDelta;
                    bestResult.DeltaElevation  = elevationDelta;
                }
                return(true);
            }
            return(false);
        }
コード例 #11
0
        public static async Task OnQueueMessageReceivedAsync(
            [QueueTrigger("stats-calc-queue")] FaceResult uploadedPicture,
            [Table("UploadedTable")] CloudTable uploadTable,
            [Table("stats")] CloudTable statsTable,
            [SignalR(HubName = "NotifyHub")] IAsyncCollector <SignalRMessage> hubClient)
        {
            //Load data from Table Storage
            var allData = await uploadTable.QueryEntitiesAsync <FaceResultEntity>();

            //Calculate statistics
            var globalStats = new StatEntity(allData, "global");

            //Update stats in Table Storage
            await statsTable.InsertOrReplaceAsync(globalStats);

            //Push message to SignalR
            await hubClient.AddAsync(new SignalRMessage
            {
                Target    = "NotifyStatChanged",
                Arguments = new object[] { globalStats }
            });
        }
コード例 #12
0
        private bool CalcFaceTowards_NoAzimuth(DirectionWorld targetWorldSpace, out FaceResult bestResult)
        {
            bestResult = FaceResult.Default;

            foreach (var direction in FaceBlock.FaceDirections())
            {
                DirectionWorld faceDirection = new DirectionWorld()
                {
                    vector = FaceBlock.WorldMatrix.GetDirectionVector(direction)
                };
                CalcFaceTowards_NotClaimed(ref bestResult, targetWorldSpace, faceDirection, MathHelper.TwoPi);
            }

            if (bestResult.AccuracySquared != float.PositiveInfinity)
            {
                Log.TraceLog("Best: " + bestResult);
                return(true);
            }

            Log.TraceLog("Cannot rotate to target");
            return(false);
        }
コード例 #13
0
        private bool CalcFaceTowards_AzimuthOk(DirectionWorld targetWorldSpace, out FaceResult bestResult)
        {
            bestResult = FaceResult.Default;
            Vector3 target = targetWorldSpace.ToBlock(StatorAz);

            foreach (var direction in FaceBlock.FaceDirections())
            {
                DirectionWorld faceDirection = new DirectionWorld()
                {
                    vector = FaceBlock.WorldMatrix.GetDirectionVector(direction)
                };
                Vector3 current = faceDirection.ToBlock(StatorAz);

                float firstDelta, secondDelta;
                CalcDelta(current, target, out firstDelta, out secondDelta);
                if (m_claimedAzimuth)
                {
                    // azimuth has been claimed, check limits
                    if (CalcFaceTowards_Claimed(ref bestResult, targetWorldSpace, faceDirection, firstDelta))
                    {
                        Log.TraceLog("First azimuth delta is reachable: " + firstDelta);
                    }
                    else if (CalcFaceTowards_Claimed(ref bestResult, targetWorldSpace, faceDirection, secondDelta))
                    {
                        Log.TraceLog("Second azimuth delta is reachable: " + secondDelta);
                    }

                    if (bestResult.AccuracySquared > 0.1f)
                    {
                        // try flipped
                        float clamped = ClampToLimits(StatorAz, firstDelta);
                        if (clamped > 0f)
                        {
                            firstDelta  = clamped - MathHelper.Pi;
                            secondDelta = clamped + MathHelper.Pi;
                        }
                        else
                        {
                            firstDelta  = clamped + MathHelper.Pi;
                            secondDelta = clamped - MathHelper.Pi;
                        }

                        if (CalcFaceTowards_Claimed(ref bestResult, targetWorldSpace, faceDirection, firstDelta))
                        {
                            Log.TraceLog("First flipped azimuth delta is reachable: " + firstDelta);
                        }
                        else if (CalcFaceTowards_Claimed(ref bestResult, targetWorldSpace, faceDirection, secondDelta))
                        {
                            Log.TraceLog("Second flipped azimuth delta is reachable: " + secondDelta);
                        }
                    }
                }
                else if (CalcFaceTowards_NotClaimed(ref bestResult, targetWorldSpace, faceDirection, firstDelta))                 // azimuth has not been claimed, check that current azimuth is close enough
                {
                    Log.TraceLog("Azimuth is within tolerance: " + firstDelta);
                }
                else
                {
                    Log.TraceLog("Azimuth is outside tolerance: " + firstDelta);
                }
            }

            if (bestResult.AccuracySquared != float.PositiveInfinity)
            {
                Log.TraceLog("Best: " + bestResult);
                return(true);
            }

            Log.TraceLog("Cannot rotate to target");
            return(false);
        }
コード例 #14
0
ファイル: FaceService.cs プロジェクト: meijeran/TravelMonkey
        public async Task <FaceResult> GetFaceResult(Stream imageStream)
        {
            try
            {
                var result =
                    await _faceClient.Face.DetectWithStreamAsync(imageStream, true, true,
                                                                 new List <FaceAttributeType> {
                    FaceAttributeType.Emotion, FaceAttributeType.Gender, FaceAttributeType.Age
                });


                var faceResult = new FaceResult();
                foreach (var face in result)
                {
                    var emotionType  = string.Empty;
                    var emotionValue = 0.0;
                    var emotion      = face.FaceAttributes.Emotion;
                    if (emotion.Anger > emotionValue)
                    {
                        emotionValue = emotion.Anger; emotionType = "Anger";
                    }
                    if (emotion.Contempt > emotionValue)
                    {
                        emotionValue = emotion.Contempt; emotionType = "Contempt";
                    }
                    if (emotion.Disgust > emotionValue)
                    {
                        emotionValue = emotion.Disgust; emotionType = "Disgust";
                    }
                    if (emotion.Fear > emotionValue)
                    {
                        emotionValue = emotion.Fear; emotionType = "Fear";
                    }
                    if (emotion.Happiness > emotionValue)
                    {
                        emotionValue = emotion.Happiness; emotionType = "Happiness";
                    }
                    if (emotion.Neutral > emotionValue)
                    {
                        emotionValue = emotion.Neutral; emotionType = "Neutral";
                    }
                    if (emotion.Sadness > emotionValue)
                    {
                        emotionValue = emotion.Sadness; emotionType = "Sadness";
                    }
                    if (emotion.Surprise > emotionValue)
                    {
                        emotionType = "Surprise";
                    }

                    var faceDetails = new FaceDetails
                    {
                        Gender  = face.FaceAttributes.Gender.ToString(),
                        Emotion = emotionType
                    };
                    faceResult.Details.Add(faceDetails);
                }

                return(faceResult);
            }
            catch (Exception ex)
            {
                return(new FaceResult());
            }
        }
コード例 #15
0
    public int read_from_buffer(int exec_func, int data_len, byte[] data)
    {
        var cur = 0;

        var body_count = data[cur];
        var hand_count = data[cur + 1];
        var face_count = data[cur + 2];

        cur += 4; // for reserved

        // Human body detection
        for (var i = 0; i < body_count; i++)
        {
            var x    = BitConverter.ToInt16(data, cur);
            var y    = BitConverter.ToInt16(data, cur + 2);
            var size = BitConverter.ToInt16(data, cur + 4);
            var conf = BitConverter.ToInt16(data, cur + 6);

            var res = new DetectionResult(x, y, size, conf);

            this.bodies.Add(res);

            cur += 8;
        }

        // Hand detection
        for (var i = 0; i < hand_count; i++)
        {
            var x    = BitConverter.ToInt16(data, cur);
            var y    = BitConverter.ToInt16(data, cur + 2);
            var size = BitConverter.ToInt16(data, cur + 4);
            var conf = BitConverter.ToInt16(data, cur + 6);

            var res = new DetectionResult(x, y, size, conf);

            this.hands.Add(res);
            cur += 8;
        }

        // Face detection
        for (var i = 0; i < face_count; i++)
        {
            var x    = BitConverter.ToInt16(data, cur);
            var y    = BitConverter.ToInt16(data, cur + 2);
            var size = BitConverter.ToInt16(data, cur + 4);
            var conf = BitConverter.ToInt16(data, cur + 6);

            var res = new FaceResult(x, y, size, conf);

            cur += 8;

            // Face direction
            if ((exec_func & p2def.EX_DIRECTION) == p2def.EX_DIRECTION)
            {
                var LR             = BitConverter.ToInt16(data, cur);
                var UD             = BitConverter.ToInt16(data, cur + 2);
                var roll           = BitConverter.ToInt16(data, cur + 4);
                var direction_conf = BitConverter.ToInt16(data, cur + 6);

                res.direction = new DirectionResult(LR, UD, roll, direction_conf);
                cur          += 8;
            }

            // Age estimation
            if ((exec_func & p2def.EX_AGE) == p2def.EX_AGE)
            {
                var age      = data[cur];
                var age_conf = BitConverter.ToInt16(data, cur + 1);
                res.age = new AgeResult(age, age_conf);
                cur    += 3;
            }

            // Gender estimation
            if ((exec_func & p2def.EX_GENDER) == p2def.EX_GENDER)
            {
                var gen      = data[cur];
                var gen_conf = BitConverter.ToInt16(data, cur + 1);
                res.gender = new GenderResult(gen, gen_conf);
                cur       += 3;
            }

            // Gaze estimation
            if ((exec_func & p2def.EX_GAZE) == p2def.EX_GAZE)
            {
                int Gaze_LR = data[cur];
                int Gaze_UD = data[cur + 1];

                if (Gaze_LR > 127)
                {
                    Gaze_LR -= 256;
                }

                if (Gaze_UD > 127)
                {
                    Gaze_UD -= 256;
                }

                res.gaze = new GazeResult(Gaze_LR, Gaze_UD);
                cur     += 2;
            }

            // Blink estimation
            if ((exec_func & p2def.EX_BLINK) == p2def.EX_BLINK)
            {
                var L = BitConverter.ToInt16(data, cur);
                var R = BitConverter.ToInt16(data, cur + 2);
                res.blink = new BlinkResult(L, R);
                cur      += 4;
            }

            // Expression estimation
            if ((exec_func & p2def.EX_EXPRESSION) == p2def.EX_EXPRESSION)
            {
                var neu = data[cur];
                var hap = data[cur + 1];
                var sur = data[cur + 2];
                var ang = data[cur + 3];
                var sad = data[cur + 4];
                int neg = data[cur + 5];

                if (neg > 127)
                {
                    neg -= 256;
                }
                res.expression = new ExpressionResult(neu, hap, sur, ang, sad, neg);
                cur           += 6;
            }

            // Face recognition
            if ((exec_func & p2def.EX_RECOGNITION) == p2def.EX_RECOGNITION)
            {
                var uid   = BitConverter.ToInt16(data, cur);
                var score = BitConverter.ToInt16(data, cur + 2);
                res.recognition = new RecognitionResult(uid, score);
                cur            += 4;
            }

            this.faces.Add(res);
        }

        return(cur);
    }
コード例 #16
0
 public override void Init(object initData)
 {
     FaceResult = initData as FaceResult;
 }