public void Evaluate(int SpreadMax)
        {
            if (this.FOutLayer[0] == null) { this.FOutLayer[0] = new DX11Resource<DX11Layer>(); }

            if (this.FEnabled[0])
            {
                this.FLayerIn.Sync();
            }

            if (rectangles.Length != SpreadMax)
            {
                rectangles = new System.Drawing.Rectangle[SpreadMax];
            }

            for (int i = 0; i < SpreadMax; i++)
            {
                int px ,py,sx,sy;
                px = (int)FInPosition[i].X;
                py = (int)FInPosition[i].Y;
                sx = (int)FInSize[i].X;
                sy = (int)FInSize[i].Y;

                rectangles[i] = new System.Drawing.Rectangle(px, py, sx, sy);
            }
        }
        private void CompositionTarget_Rendering(object sender, EventArgs e)
        {
            _status.Fill = _rd;

            #region Recognition
            currentFrame = grabber.QueryFrame().Resize(320, 240, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
            gray = currentFrame.Convert<Gray, Byte>();

            var size = new System.Drawing.Size(20, 20);
            var window = new System.Drawing.Size(grabber.Width, grabber.Height);

            _rects = _faceClassifier.DetectMultiScale(gray, 1.2, 10, size, window);

            foreach (var f in _rects)
            {
                result = currentFrame.Copy(f).Convert<Gray, byte>().Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
                _status.Fill = new SolidColorBrush(Colors.Green);
                currentFrame.Draw(f, new Bgr(System.Drawing.Color.Red), 2);

                //if we have already trained
                if (CommonData.TrainingImages.Count > 0)
                {
                    MCvTermCriteria termCrit = new MCvTermCriteria(ContTrain, 0.001);
                    //Eigen face recognizer
                    recognizer = new EigenObjectRecognizer(
                       CommonData.TrainingImages.ToArray(),
                       CommonData.Names.ToArray(),
                       3000,
                       ref termCrit);

                    string name = recognizer.Recognize(result);
                    currentFrame.Draw(name, ref font, new System.Drawing.Point(f.X - 2, f.Y - 2),
                        new Bgr(System.Drawing.Color.LightGreen));
                }

                //finally draw the source
                _imgCamera.Source = ImageHelper.ToBitmapSource(currentFrame);
            }
            #endregion
        }
Example #3
0
        private async void CompositionTarget_Rendering(object sender, EventArgs e)
        {
            currentFrame = grabber.QueryFrame().Resize(320, 240, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);            

            if (_rects == null || _rects.Length == 0)
            {
                gray = currentFrame.Convert<Gray, Byte>();

                var size = new System.Drawing.Size(20, 20);
                var window = new System.Drawing.Size(grabber.Width, grabber.Height);
                _rects = _faceClassifier.DetectMultiScale(gray, 1.2, 10, size, window);

                _vmodel.PersonRecognized = _rects.Length;

                if (_rects.Length > 0)
                {                    
                    /*
                    1) save the current rendered faces
                    2) upload the current frame to detect
                    3) verify with trained images
                    */
                    string snapshot = CommonData.TARGET_SNAPSHOT_PAHT + DateTime.Now.ToString().Replace('/', '_').Replace(':', '_') + "." + CommonData.IMAGE_EXT;
                    currentFrame.Save(snapshot);

                    _progressRec.IsIndeterminate = false;
                    _progressRec.IsEnabled = true;

                    var fr = await UploadAndDetectFaces(snapshot);

                    //detect all faces
                    foreach (var trainedFile in CommonData.ImageFiles)
                    {
                        var fileStream = File.OpenRead(trainedFile);
                        var f = await faceServiceClient.DetectAsync(fileStream);
                        _trainedFacesAI.AddRange(f.ToList());
                    }

                    int i = 0;
                    //verify reading from all db
                    foreach (var face in _detectedFaceFromAI)
                    {
                        foreach (var secondFace in _trainedFacesAI)
                        {
                            var res = await faceServiceClient.VerifyAsync(face.FaceId, secondFace.FaceId);

                            PictureModel _model = new PictureModel();
                            _model.ImgSource = new WriteableBitmap(new BitmapImage(new Uri(CommonData.ImageFiles[i])));
                            _model.AIID = face.FaceId.ToString();

                            if (res.IsIdentical)
                            {
                                _model.Name = CommonData.Names[i];
                                _model.ID = i;
                                _model.IsVerified = true;
                                _model.Confidence = res.Confidence;
                            }
                            else
                            {
                                _model.Name = "Unkonwn";
                                _model.IsVerified = false;
                                _model.Confidence = res.Confidence;
                            }

                            var c = _vmodel.Pictures.Where(x => x.AIID == _model.AIID).Count();
                            if (!(c > 0))//adds only if is not already added
                            {
                                _vmodel.Pictures.Add(_model);
                            }

                            i++;
                        }
                    }

                    _progressRec.IsIndeterminate = false;
                    _progressRec.IsEnabled = false;
                }                
            }            

            _imgSource.Source = ImageHelper.ToBitmapSource(currentFrame);
        }