Ejemplo n.º 1
0
        private Face CalculateTextRectangleForRendering(Face face, int maxSize, Tuple <int, int> imageInfo, int left, int top, int height, int width)
        {
            var   imageWidth  = imageInfo.Item1;
            var   imageHeight = imageInfo.Item2;
            float ratio       = (float)imageWidth / imageHeight;
            int   uiWidth;
            int   uiHeight;

            if (ratio > 1.0)
            {
                uiWidth  = maxSize;
                uiHeight = (int)(maxSize / ratio);
            }
            else
            {
                uiHeight = maxSize;
                uiWidth  = (int)(ratio * uiHeight);
            }

            int   uiXOffset = (maxSize - uiWidth) / 2;
            int   uiYOffset = (maxSize - uiHeight) / 2;
            float scale     = (float)uiWidth / imageWidth;

            face.Left   = (int)((left * scale) + uiXOffset);
            face.Top    = (int)((top * scale) + uiYOffset);
            face.Height = (int)(height * scale);
            face.Width  = (int)(width * scale);

            return(face);
        }
Ejemplo n.º 2
0
        public ObservableCollection <Face> OcrGetFramesRectanglesForRecognizedText(OcrResults ocrResults, string selectedFile)
        {
            var frames    = new ObservableCollection <Face>();
            var imageInfo = GetImageInfoForRendering(selectedFile);

            foreach (var region in ocrResults.Regions)
            {
                var face = new Face();
                face = CalculateTextRectangleForRendering(face, MaxImageSize, imageInfo, region.Rectangle.Left,
                                                          region.Rectangle.Top, region.Rectangle.Height, region.Rectangle.Width);
                face.ScoredEmotion = $"lines {region.Lines.Length}";
                frames.Add(face);
            }

            return(frames);
        }
Ejemplo n.º 3
0
        public async Task <Tuple <ObservableCollection <Face>, ObservableCollection <Face> > > StartFaceDetection(string selectedFile, bool analyzeEmotion)
        {
            var detectedFaces = new ObservableCollection <Face>();
            var facesRect     = new ObservableCollection <Face>();

            Debug.WriteLine("Request: Detecting {0}", selectedFile);

            using (var fileStreamImage = File.OpenRead(selectedFile))
            {
                try
                {
                    var client = new FaceServiceClient(_subscriptionKeyFace);
                    var faces  = await client.DetectAsync(fileStreamImage, false, true, true);

                    Debug.WriteLine("Response: Success. Detected {0} face(s) in {1}", faces.Length, selectedFile);
                    var imageInfo = GetImageInfoForRendering(selectedFile);
                    Debug.WriteLine("{0} face(s) has been detected", faces.Length);

                    foreach (var face in faces)
                    {
                        var detectedFace = new Face()
                        {
                            ImagePath = selectedFile,
                            Left      = face.FaceRectangle.Left,
                            Top       = face.FaceRectangle.Top,
                            Width     = face.FaceRectangle.Width,
                            Height    = face.FaceRectangle.Height,
                            FaceId    = face.FaceId,
                            Gender    = face.Attributes.Gender,
                            Age       = $"{face.Attributes.Age:#} years old",
                        };
                        detectedFaces.Add(detectedFace);
                    }

                    // Convert detection result into UI binding object for rendering
                    foreach (var face in CalculateFaceRectangleForRendering(faces, MaxImageSize, imageInfo))
                    {
                        facesRect.Add(face);
                    }

                    // update emotions
                    if (analyzeEmotion)
                    {
                        detectedFaces = await UpdateEmotions(selectedFile, detectedFaces);

                        foreach (var faceRect in facesRect)
                        {
                            foreach (var detectedFace in detectedFaces.Where(detectedFace => faceRect.FaceId == detectedFace.FaceId))
                            {
                                faceRect.Scores = detectedFace.Scores;
                            }
                        }
                    }
                }
                catch (Exception ex)
                {
                    Debug.WriteLine(ex.ToString());
                }
                var returnData = new Tuple <ObservableCollection <Face>, ObservableCollection <Face> >(detectedFaces, facesRect);
                return(returnData);
            }
        }
        public ObservableCollection<Face> OcrGetFramesRectanglesForRecognizedText(OcrResults ocrResults, string selectedFile)
        {
            var frames = new ObservableCollection<Face>();
            var imageInfo = GetImageInfoForRendering(selectedFile);

            foreach (var region in ocrResults.Regions)
            {
                var face = new Face();
                face = CalculateTextRectangleForRendering(face, MaxImageSize, imageInfo, region.Rectangle.Left,
                    region.Rectangle.Top, region.Rectangle.Height, region.Rectangle.Width);
                face.ScoredEmotion = $"lines {region.Lines.Length}";
                frames.Add(face);
            }

            return frames;
        }
        public async Task<Tuple<ObservableCollection<Face>, ObservableCollection<Face>>> StartFaceDetection(string selectedFile, bool analyzeEmotion)
        {
            var detectedFaces = new ObservableCollection<Face>();
            var facesRect = new ObservableCollection<Face>();

            Debug.WriteLine("Request: Detecting {0}", selectedFile);

            using (var fileStreamImage = File.OpenRead(selectedFile))
            {
                try
                {
                    var client = new FaceServiceClient(_subscriptionKeyFace);
                    var faces = await client.DetectAsync(fileStreamImage, false, true, true);
                    Debug.WriteLine("Response: Success. Detected {0} face(s) in {1}", faces.Length, selectedFile);
                    var imageInfo = GetImageInfoForRendering(selectedFile);
                    Debug.WriteLine("{0} face(s) has been detected", faces.Length);

                    foreach (var face in faces)
                    {
                        var detectedFace = new Face()
                        {
                            ImagePath = selectedFile,
                            Left = face.FaceRectangle.Left,
                            Top = face.FaceRectangle.Top,
                            Width = face.FaceRectangle.Width,
                            Height = face.FaceRectangle.Height,
                            FaceId = face.FaceId,
                            Gender = face.Attributes.Gender,
                            Age = $"{face.Attributes.Age:#} years old",
                        };
                        detectedFaces.Add(detectedFace);

                    }

                    // Convert detection result into UI binding object for rendering
                    foreach (var face in CalculateFaceRectangleForRendering(faces, MaxImageSize, imageInfo))
                    {
                        facesRect.Add(face);
                    }

                    // update emotions
                    if (analyzeEmotion)
                    {
                        detectedFaces = await UpdateEmotions(selectedFile, detectedFaces);
                        foreach (var faceRect in facesRect)
                        {
                            foreach (var detectedFace in detectedFaces.Where(detectedFace => faceRect.FaceId == detectedFace.FaceId))
                            {
                                faceRect.Scores = detectedFace.Scores;
                            }
                        }
                    }
                }
                catch (Exception ex)
                {
                    Debug.WriteLine(ex.ToString());
                }
                var returnData = new Tuple<ObservableCollection<Face>, ObservableCollection<Face>>(detectedFaces, facesRect);
                return returnData;
            }
        }
        private Face CalculateTextRectangleForRendering(Face face, int maxSize, Tuple<int, int> imageInfo, int left, int top, int height, int width)
        {
            var imageWidth = imageInfo.Item1;
            var imageHeight = imageInfo.Item2;
            float ratio = (float)imageWidth / imageHeight;
            int uiWidth;
            int uiHeight;
            if (ratio > 1.0)
            {
                uiWidth = maxSize;
                uiHeight = (int)(maxSize / ratio);
            }
            else
            {
                uiHeight = maxSize;
                uiWidth = (int)(ratio * uiHeight);
            }

            int uiXOffset = (maxSize - uiWidth) / 2;
            int uiYOffset = (maxSize - uiHeight) / 2;
            float scale = (float)uiWidth / imageWidth;

            face.Left = (int) ((left*scale) + uiXOffset);
            face.Top = (int) ((top*scale) + uiYOffset);
            face.Height = (int) (height*scale);
            face.Width = (int) (width*scale);

            return face;
        }