public void NoFacePictureTest()
        {
            string imagePath = Path.Combine(imageFolder, "Taiwan.png");
            Bitmap image     = new Bitmap(imagePath);

            MemoryStream memoryStream = new MemoryStream();

            image.Save(memoryStream, ImageFormat.Jpeg);
            image.Dispose();

            var faces = faceDetector.Detect(memoryStream.ToArray());

            Assert.AreEqual(faces.ToArray().Length, 0);
        }
Beispiel #2
0
        private void ProcessFrame(object sender, EventArgs e)
        {
            if (capture != null)
            {
                using (Mat frame = new Mat())
                {
                    capture.Retrieve(frame, 0);
                    IEnumerable <IReadOnlyDictionary <string, int> > faces = faceDetection.Detect(frame);
                    IEnumerable <Rectangle> facesRectangle = faces.Select(face => new Rectangle(face["top"], face["left"], face["width"], face["height"]));
                    foreach (Rectangle faceRectangle in facesRectangle)
                    {
                        Mat  face   = FaceDetector.GetFaceImage(frame, faceRectangle);
                        bool result = faceMaskDetector.Detect(BitmapExtension.ToBitmap(face));
                        if (result)
                        {
                            CvInvoke.Rectangle(frame, faceRectangle, new Bgr(Color.Green).MCvScalar, 2);
                        }
                        else
                        {
                            CvInvoke.Rectangle(frame, faceRectangle, new Bgr(Color.Yellow).MCvScalar, 2);
                        }
                    }

                    imageBox1.Image = frame.ToImage <Bgr, byte>();
                }
            }
        }
        public override SparseArray Detect(Frame frame)
        {
            try
            {
                var _framebuff = frame.GrayscaleImageData.Duplicate();

                var _frametimestamp = frame.GetMetadata().TimestampMillis;

                var detected = _detector.Detect(frame);

                _compressDataTasks.Add(Task.Run(() => Utils.AddConvertByteBuffer(ref _allFrameData, _framebuff, _frametimestamp, detected, frame.GetMetadata().Width, frame.GetMetadata().Height, _compressquality)));

                return(detected);
            }
            catch (Exception e)
            {
                return(_detector.Detect(frame));
            }
        }
Beispiel #4
0
        private void ProcessCameraPicture()
        {
            Bitmap bitmap = DecodeBitmapUri(this, imageUri);

            if (detector.IsOperational && bitmap != null)
            {
                editedBitmap = Bitmap.CreateBitmap(bitmap.Width, bitmap
                                                   .Height, bitmap.GetConfig());
                float scale = Resources.DisplayMetrics.Density;
                Paint paint = new Paint(PaintFlags.AntiAlias);
                paint.Color    = Color.Green;
                paint.TextSize = (int)(16 * scale);
                paint.SetShadowLayer(1f, 0f, 1f, Color.White);
                paint.SetStyle(Paint.Style.Stroke);
                paint.StrokeWidth = 6f;
                Canvas canvas = new Canvas(editedBitmap);
                canvas.DrawBitmap(bitmap, 0, 0, paint);
                Frame       frame = new Frame.Builder().SetBitmap(editedBitmap).Build();
                SparseArray faces = detector.Detect(frame);
                string      text  = "";
                for (int index = 0; index < faces.Size(); ++index)
                {
                    Face face = faces.ValueAt(index) as Face;
                    canvas.DrawRect(
                        face.Position.X,
                        face.Position.Y,
                        face.Position.X + face.Width,
                        face.Position.Y + face.Height, paint);             //CREA EL RECUADRO
                    text += "Cara " + (index + 1) + "\n";
                    text += "Probilidad de una sonrisa:" + " " + face.IsSmilingProbability * 100 + "\n";
                    text += "Probilidad que el ojo izquierdo este abierto : " + " " + face.IsLeftEyeOpenProbability * 100 + "\n";
                    text += "Probilidad que el ojo derecho este abierto: " + " " + face.IsRightEyeOpenProbability * 100 + "\n";
                    foreach (Landmark landmark in face.Landmarks)
                    {
                        int cx = (int)(landmark.Position.X);
                        int cy = (int)(landmark.Position.Y);
                        canvas.DrawCircle(cx, cy, 8, paint); // CREA EL CIRCULO
                    }
                }
                if (faces.Size() == 0)
                {
                    txtDescription.Text = "Scaneo fallido";
                }
                else
                {
                    _imageView.SetImageBitmap(editedBitmap);
                    text += "\n\n" + "Numero de caras detectadas: " + " " + faces.Size().ToString() + "\n\n";
                }
                txtDescription.Text = text;
            }
            else
            {
                txtDescription.Text = "No se pudo configurar el detector!";
            }
        }
Beispiel #5
0
        static void Main(string[] args)
        {
            //Load Bitmap
            Bitmap bmp = new Bitmap("./test.bmp");

            //FaceDetector
            FaceDetector fd     = new FaceDetector("./haarcascade_frontalface_alt2.xml");
            var          result = fd.Detect(bmp, 2.0);

            //Output Bitmap
            for (int i = 0; i < result.Count; i++)
            {
                result[i].Save("./result/result_" + i + ".bmp");
            }
        }
        // 「撮影」クリック時に画像を保存
        private void BtnCapture_Click(object sender, EventArgs e)
        {
            //Cv2.ImWrite("capt1.jpg", GetCapture(0));
            //Cv2.ImWrite("capt2.jpg", GetCapture(1));

            //PicPicture2.ImageLocation = "capt1.jpg";
            //PicPicture3.ImageLocation = "capt2.jpg";

            //return;

            // 時刻取得
            captureDt  = "";
            dt         = DateTime.Now;
            captureDt += (dt.Year).ToString("0000");
            captureDt += (dt.Month).ToString("00");
            captureDt += (dt.Day).ToString("00");
            captureDt += (dt.Hour).ToString("00");
            captureDt += (dt.Minute).ToString("00");
            captureDt += (dt.Second).ToString("00");

            Cv2.ImWrite("C:\\record\\capture\\" + captureDt + "_0.bmp", GetCapture(0));
            Cv2.ImWrite("C:\\record\\capture\\" + captureDt + "_1.bmp", GetCapture(1));

            PicPicture1.ImageLocation = "C:\\record\\capture\\" + captureDt + "_0.bmp";
            PicPicture2.ImageLocation = "C:\\record\\capture\\" + captureDt + "_1.bmp";


            // 画像撮影と、ファイル名指定保存
            //m_LatestImage.Save("img" + captureDt + ".bmp", System.Drawing.Imaging.ImageFormat.Bmp);
            //m_LatestImage.Save("C:\\record\\capture\\img" + captureDt + ".bmp", System.Drawing.Imaging.ImageFormat.Bmp);
            // 撮影時刻を画面に表示
            LblCapture.Text = dt + " に撮影しました。";

            //Bitmap bmp = new Bitmap("./img" + captureDt + ".bmp");
            Bitmap bmp = new Bitmap("C:\\record\\capture\\" + captureDt + "_" + cmbCamera.SelectedIndex + ".bmp");

            FaceDetectTools.FaceDetector fd = new FaceDetector("./haarcascade_frontalface_alt2.xml");

            var result = fd.Detect(bmp, 2.0);

            //Output Bitmap
            output = "";
            try {
                int i;
                for (i = 0; i < result.Count; i++)
                {
                    result[i].Save("./result/result_" + i + ".bmp");
                }
                //認証コマンドを実行する
                Process p = new Process();

                string wkFileName  = string.Format(Properties.Settings.Default.FormatAuthFileName, Properties.Settings.Default.PathExe);
                string wkArguments = string.Format(Properties.Settings.Default.FormatAuthArguments, Properties.Settings.Default.PathImg);
                p.StartInfo.FileName               = wkFileName;  // 実行するファイル
                p.StartInfo.Arguments              = wkArguments; // 引数
                p.StartInfo.CreateNoWindow         = true;        // コンソールを開かない
                p.StartInfo.UseShellExecute        = false;       // シェル機能を使用しない
                p.StartInfo.RedirectStandardOutput = true;        // 標準出力をリダイレクト
                p.Start();                                        // アプリの実行開始

                output          = p.StandardOutput.ReadToEnd();   // 標準出力の読み取り
                LblMessage.Text = output;
            }
            catch {
                LblMessage.Text = "顔が検出されませんでした";
                output          = "顔が検出できませんでした";
            }

            Encoding     enc    = Encoding.GetEncoding("Shift_JIS");
            StreamWriter writer = new StreamWriter("C:\\record\\Record.csv", true, enc);

            writer.WriteLine(Convert.ToString(dt) + "," + captureDt + "_" + cmbCamera.SelectedIndex + ".bmp" + "," + output);
            writer.Close();
        }
        private async Task FaceDetectAsync(FaceDetector detector, MediaCapture capture, CancellationToken token)
        {
            if (detector == null || capture == null || token == null)
            {
                throw new ArgumentNullException();
            }

            var previewProperties = capture.VideoDeviceController.GetMediaStreamProperties(MediaStreamType.VideoPreview) as VideoEncodingProperties;
            var videoFrame        = new VideoFrame(BitmapPixelFormat.Bgra8, (int)previewProperties.Width, (int)previewProperties.Height);

            int width  = (int)previewProperties.Width;
            int height = (int)previewProperties.Height;

            FaceDetectResult result = null;

            var stopWatch = Stopwatch.StartNew();

            {
                using (var currentFrame = await capture.GetPreviewFrameAsync(videoFrame))
                    using (var softwareBitmap = currentFrame.SoftwareBitmap)
                    {
                        if (softwareBitmap == null)
                        {
                            return;
                        }

                        // SoftwareBitmap -> byte array
                        var buffer = new byte[4 * width * height];
                        softwareBitmap.CopyToBuffer(buffer.AsBuffer());

                        token.ThrowIfCancellationRequested();

                        // Detect face
                        result = detector.Detect(buffer, width, height);

                        token.ThrowIfCancellationRequested();
                    }
            }
            stopWatch.Stop();

            videoFrame.Dispose();

            // Draw result to Canvas
            await Dispatcher.RunAsync(CoreDispatcherPriority.High, () =>
            {
                FaceDrawCanvas.Width  = width;
                FaceDrawCanvas.Height = height;

                // Draw fps
                FpsTextBlock.Text = (1000 / stopWatch.ElapsedMilliseconds) + "fps";

                // Draw face point
                if (_faceDrawer != null && result != null)
                {
                    List <FaceDetectData> faces = new List <FaceDetectData>();
                    foreach (var f in result.Faces)
                    {
                        FaceDetectData data = new FaceDetectData();
                        data.FaceRect       = f.FaceRect;

                        foreach (var p in f.FacePoints)
                        {
                            data.FaceLandmarks.Add(p);
                        }

                        faces.Add(data);
                    }

                    _faceDrawer.DrawToCanvas(FaceDrawCanvas, faces);
                }
            });
        }