public void SaveNewDetectedFace(string name, Image <Gray, byte> detectedFace)
        {
            using (var context = new FaceRecognitionContext())
            {
                byte[] pixelData;
                using (var ms = new MemoryStream())
                {
                    detectedFace.Bitmap.Save(ms, ImageFormat.Bmp);
                    pixelData = ms.ToArray();
                }

                var recognizedFace = new RecognizedFace
                {
                    Height    = detectedFace.Height,
                    Width     = detectedFace.Width,
                    PixelData = pixelData
                };
                var user = new User
                {
                    Face     = recognizedFace,
                    NickName = name
                };

                context.Users.Add(user);
                context.SaveChanges();
            }
        }
    private void SimulateFacialRecognitionData()
    {
        RecognizedFace simulatedFace = new RecognizedFace();

        simulatedFace.position2D = new Vector2(0.5f, 0.5f);
        simulatedFace.position3D = new Vector3(0, 0, 1.0f);
        simulatedFace.size       = new Vector2(100, 100);

        recognizedFaces.Add(simulatedFace);
    }
        private Stream CropFaceImage(RecognizedFace face, Image originalImage)
        {
            var height = face.Position.Y2 - face.Position.Y1;
            var width  = face.Position.X2 - face.Position.X1;

            var stream = _memoryStreamManager.GetStream("face_crop_stream");

            originalImage
            .Clone(ctx => ctx.Crop(new Rectangle(face.Position.X1, face.Position.Y1, width, height)))
            .SaveAsJpeg(stream);
            return(stream);
        }
Exemplo n.º 4
0
    void _threadProcess()
    {
        if (!_inited)
        {
            _inited = true;
            if (_config.EnableRecognizer)
            {
                _recognizer = new UnityOpenCVPersonRecognizerAPI(_config.trainingPath);
            }
            _detector = new UnityOpenCVFaceDetectorAPI(_config.cascadePath, _config.scaler, _config.minNeighbors, _config.minSize, _config.maxSize);
        }
        while (!_isDone)
        {
            _signalEvent.WaitOne();
            _signalEvent.Reset();
            if (_newImage)
            {
                _detecting = true;
                try{
                    _detector.BindImage(_img);
                    _faces = _detector.DetectFaces();
                    TriggerFaceDetected(_detector.DetectedFaces);
                    Debug.Log("Face Detected : " + _faces.Count.ToString());

                    if (_recognizer != null)
                    {
                        _recognizedFaces.Clear();
                        for (int i = 0; i < _detector.DetectedFaces.Count; ++i)
                        {
                            _recognizer.BindImage(_img);
                            RecognizedFace f = new RecognizedFace();
                            f.ID   = _recognizer.RecognizeFace(_detector.DetectedFaces[i], ref f.distance);
                            f.rect = _detector.DetectedFaces[i];
                            _recognizedFaces.Add(f);
                            //if(conf<60)
                            Debug.Log("Face Detected with label: " + f.ID.ToString() + " with distance:" + f.distance.ToString());
                        }
                        TriggerFaceRecognized(_recognizedFaces);
                    }
                }catch (Exception) {
                }
                _detecting = false;
                _newImage  = false;
            }
        }
    }