Ejemplo n.º 1
0
        private void SaveToDB(RecognitionResult.Face face)
        {
            callcount++;
            try
            {
                MemoryStream saveStream = new MemoryStream();
                face.GrayFace.Save(saveStream, ImageFormat.Bmp);
                //cnn.Open();
                using (
                    MySqlCommand cmd =
                        new MySqlCommand("INSERT INTO user VALUES(" +
                                         "@Uid, @Name, @Phone_number, @FaceFront, @Restriction_type)", cnn))
                {
                    cmd.Parameters.AddWithValue("@Uid", Guid.NewGuid());
                    cmd.Parameters.AddWithValue("@Name", this.NameField.Text);
                    cmd.Parameters.AddWithValue("@Phone_number", "222");
                    cmd.Parameters.AddWithValue("@FaceFront", saveStream.ToArray());
                    //cmd.Parameters.AddWithValue("@ColorImage", saveStream.ToArray());
                    cmd.Parameters.AddWithValue("@Restriction_type", 2);

                    int rows = cmd.ExecuteNonQuery();
                }
            }
            catch (Exception ex)
            {
                MessageBox.Show("Counter = " + callcount);
                MessageBox.Show(string.Format("Failed to save to DB! {0}", ex.Message));
            }
        }
        /// <summary>
        /// Handles recognition complete events
        /// </summary>
        private void Engine_RecognitionComplete(object sender, RecognitionResult e)
        {
            RecognitionResult.Face face = null;

            if (e.Faces != null)
            {
                face = e.Faces.FirstOrDefault();
            }

            if (face != null)
            {
                if (!string.IsNullOrEmpty(face.Key))
                {
                    // Write the key on the image...
                    using (var g = Graphics.FromImage(e.ProcessedBitmap))
                    {
                        var rect = face.TrackingResults.FaceRect;
                        g.DrawString(face.Key, new Font("Arial", 20), Brushes.Pink, new System.Drawing.Point(rect.Left, rect.Top - 25));
                    }
                }

                if (this.takeTrainingImage)
                {
                    this.targetFaces.Add(new BitmapSourceTargetFace
                    {
                        Image = (Bitmap)face.GrayFace.Clone(),
                        Key   = this.NameField.Text
                    });

                    using (System.IO.StreamWriter file = new System.IO.StreamWriter(@"FaceDB\FaceKeys.txt", true))
                    {
                        file.Write(this.NameField.Text + ",");
                    }

                    face.GrayFace.Save(@"FaceDB/" + this.NameField.Text + ".bmp");

                    this.takeTrainingImage = false;
                    this.NameField.Text    = this.NameField.Text.Replace(this.targetFaces.Count.ToString(), (this.targetFaces.Count + 1).ToString());

                    if (this.targetFaces.Count > 1)
                    {
                        this.engine.SetTargetFaces(this.targetFaces);
                    }
                }
            }

            this.Video.Source = LoadBitmap(e.ProcessedBitmap);
        }
Ejemplo n.º 3
0
        /// <summary>
        /// Handles recognition complete events
        /// </summary>
        private void Engine_RecognitionComplete(object sender, RecognitionResult e)
        {
            RecognitionResult.Face face = null;

            if (AlertAVI.SaveVideo && AlertAVI.SaveEnabled)
            {
                AlertAVI.BuildArray(e.OriginalBitmap);
            }
            else if (!AlertAVI.SaveVideo && !AlertAVI.SaveEnabled)
            {
                ArmButton.Background = System.Windows.Media.Brushes.Blue;
            }

            if (AlertAVI.SaveVideo && !AlertAVI.SaveEnabled)
            {
                AlertAVI.SaveVideo = false; //User wasnt ready, must reset
            }

            if (e.Faces == null) //if Faces is null, face is lost, reset timer
            {
                MaxUnknownFaceTime = 5;
                LastKnownKey       = "";
            }

            if (e.Faces != null)
            {
                face = e.Faces.FirstOrDefault();
            }

            if (face != null)
            {
                if (!string.IsNullOrEmpty(face.Key))
                {
                    if (FaceTimer.IsEnabled)
                    {
                        FaceTimer.Stop();
                    }

                    MaxUnknownFaceTime = 5;

                    // Write the key on the image...
                    using (var g = Graphics.FromImage(e.ProcessedBitmap))
                    {
                        var rect = face.TrackingResults.FaceRect;
                        g.DrawString(face.Key, new Font("Arial", 30), Brushes.Red, new System.Drawing.Point(rect.Left, rect.Top - 25));
                        LastKnownKey = face.Key;
                    }
                }

                if (string.IsNullOrEmpty(face.Key))
                {
                    if (!FaceTimer.IsEnabled)
                    {
                        FaceTimer.Start();
                    }

                    using (var g = Graphics.FromImage(e.ProcessedBitmap))
                    {
                        var rect = face.TrackingResults.FaceRect;
                        g.DrawString(LastKnownKey + " " + MaxUnknownFaceTime.ToString(), new Font("Arial", 30), Brushes.Red, new System.Drawing.Point(rect.Left, rect.Top - 25));
                    }
                }


                if (this.takeTrainingImage)
                {
                    MaxUnknownFaceTime     = 5;
                    this.takeTrainingImage = false;
                    this.targetFaces.Add(new BitmapSourceTargetFace
                    {
                        Image = (Bitmap)face.GrayFace.Clone(),
                        Key   = this.NameField.Text
                    });
                    using (MemoryStream stream = new MemoryStream())
                    {
                        face.GrayFace.Save(stream, ImageFormat.Png);
                        result = stream.ToArray();
                    }

                    SaveToDB(face);

                    this.NameField.Text = this.NameField.Text.Replace(this.targetFaces.Count.ToString(), (this.targetFaces.Count + 1).ToString());

                    if (this.targetFaces.Count > 1)
                    {
                        this.engine.SetTargetFaces(this.targetFaces);
                    }
                }
            }
            this.Video.Source = LoadBitmap(e.ProcessedBitmap);
        }
Ejemplo n.º 4
0
        public async Task <RecognitionResult.Face[]> DetectEmotions(FaceCropResult.Face[] faces)
        {
            AnnotationRequest request = new AnnotationRequest();

            request.Requests = new AnnotationRequest.AnnotateImageRequest[faces.Length];

            for (int i = 0; i < faces.Length; i++)
            {
                FaceCropResult.Face face = faces[i];

                var r = new AnnotationRequest.AnnotateImageRequest
                {
                    ImageData = new AnnotationRequest.AnnotateImageRequest.Image
                    {
                        Content = face.ImageBase64
                    },
                    Features = new AnnotationRequest.AnnotateImageRequest.Feature[]
                    {
                        new AnnotationRequest.AnnotateImageRequest.Feature
                        {
                            Type       = "FACE_DETECTION",
                            MaxResults = 5
                        }
                    }
                };

                request.Requests[i] = r;
            }

            try
            {
                HttpClient  client  = new HttpClient();
                HttpContent content = new StringContent(JsonConvert.SerializeObject(request));

                var httpResponse = await client.PostAsync("https://vision.googleapis.com/v1/images:annotate?key=" + _apiKey, content);

                string responseData = await httpResponse.Content.ReadAsStringAsync();

                if (httpResponse.IsSuccessStatusCode)
                {
                    AnnotationResponse response = JsonConvert.DeserializeObject <AnnotationResponse>(responseData);

                    List <RecognitionResult.Face> faceResults = new List <RecognitionResult.Face>();

                    for (int i = 0; i < response.Responses.Length && i < faces.Length; i++)
                    {
                        AnnotationResponse.AnnotateImageResponse.FaceAnnotations faceAnnotations = response.Responses[i].FaceAnnotationData[0];
                        RecognitionResult.Face faceResult = new RecognitionResult.Face
                        {
                            Emotion = new RecognitionResult.Emotion
                            {
                                Anger     = FromLikelyhood(faceAnnotations.Anger),
                                Happiness = FromLikelyhood(faceAnnotations.Joy),
                                Neutral   = 0.0,
                                Sadness   = FromLikelyhood(faceAnnotations.Sorrow),
                                Surprise  = FromLikelyhood(faceAnnotations.Surprise)
                            }
                        };

                        faceResults.Add(faceResult);
                    }

                    return(faceResults.ToArray());
                }
            }
            catch (Exception ex)
            {
                // TODO?
            }

            return(null);
        }