示例#1
0
    void MatchFoundCallback(
        VideoWorker.MatchFoundCallbackData data,
        Object userdata)
    {
        MAssert.Check(data != null);
        MAssert.Check(userdata != null);

        int       stream_id = (int)data.stream_id;
        int       frame_id  = (int)data.frame_id;
        RawSample sample    = data.sample;
        float     quality   = (float)data.quality;
        Template  templ     = data.templ;

        VideoWorker.SearchResult[] search_results = data.search_results;

        // userdata is supposed to be pointer to this Worker
        // so cast the pointer
        Worker worker = (Worker)(userdata);

        // we care only about the worker._stream_id source
        // so just ignore any others
        if (stream_id != worker._stream_id)
        {
            return;
        }

        MAssert.Check(sample != null);
        MAssert.Check(templ != null);
        MAssert.Check(search_results.Length > 0);

        // just print distances in the console
        Console.WriteLine("stream {0} match track {1} : ", stream_id, sample.getID());

        for (int i = 0; i < search_results.Length; ++i)
        {
            ulong element_id = search_results[i].element_id;

            if (element_id == (ulong)VideoWorker.MATCH_NOT_FOUND_ID)
            {
                MAssert.Check(i == 0);
                Console.WriteLine("  {0}: MATCH NOT FOUND", i);
            }
            else
            {
                MAssert.Check(element_id < (UInt64)worker._database.names.Count);
                Console.WriteLine("  {0}:  with '{1}' distance: {2}",
                                  i,
                                  worker._database.names[(int)element_id],
                                  search_results[i].match_result.distance);
            }
        }
        Console.WriteLine("");

        ulong match_element_id = search_results[0].element_id;

        if (match_element_id != (ulong)VideoWorker.MATCH_NOT_FOUND_ID)
        {
            MAssert.Check((int)match_element_id < worker._database.thumbnails.Count);

            // set the match info in the worker._drawing_data.faces
            worker._drawing_data_mutex.WaitOne();

            FaceData face = worker._drawing_data.faces[sample.getID()];

            MAssert.Check(!face.lost);

            face.match_database_index = (int)match_element_id;

            worker._drawing_data_mutex.ReleaseMutex();
        }
    }
示例#2
0
    public void work(OpenCvSharp.Mat frame)
    {
        // sending the frame in the capturer (_tracker)
        // pbio::CVRawImage cvri_frame;
        byte[] data = new byte[frame.Total() * frame.Type().Channels];
        Marshal.Copy(frame.DataStart, data, 0, (int)data.Length);
        RawImage         ri_frame = new RawImage(frame.Width, frame.Height, RawImage.Format.FORMAT_BGR, data);
        List <RawSample> samples  = _tracker.capture(ri_frame);

        // clone the frame for drawing on it
        OpenCvSharp.Mat draw_image = frame.Clone();
        // handle each face on the frame separately
        for (int i = 0; i < samples.Count; ++i)
        {
            RawSample sample = samples[i];

            // get a face rectangle
            RawSample.Rectangle rectangle = sample.getRectangle();

            // set a point to place information for this face
            OpenCvSharp.Point2f text_point = new OpenCvSharp.Point2f(
                rectangle.x + rectangle.width + 3,
                rectangle.y + 10);

            const float text_line_height = 22;

            // draw facial points
            // red color for all points
            // green for left eye
            // yellow for right eye
            // (yes, there is a mess with left and right eyes in face_sdk api,
            // but if we fix it now we will lose compatibility with previous versions)
            if (_flag_points)
            {
                List <Point> points = sample.getLandmarks();

                for (int j = -2; j < points.Count; ++j)
                {
                    Point p =
                        j == -2 ?
                        sample.getLeftEye() :
                        j == -1 ?
                        sample.getRightEye() :
                        points[j];

                    OpenCvSharp.Scalar color =
                        j == -2 ?
                        new OpenCvSharp.Scalar(50, 255, 50) :
                        j == -1 ?
                        new OpenCvSharp.Scalar(50, 255, 255) :
                        new OpenCvSharp.Scalar(50, 50, 255);


                    OpenCvSharp.Cv2.Circle(
                        draw_image,
                        new OpenCvSharp.Point2f(p.x, p.y),
                        j < 0 ? 4 : 2,
                        color,
                        -1,
                        OpenCvSharp.LineTypes.AntiAlias);
                }
            }

            // draw rectangle
            if (_flag_positions)
            {
                OpenCvSharp.Cv2.Rectangle(
                    draw_image,
                    new OpenCvSharp.Rect(
                        rectangle.x,
                        rectangle.y,
                        rectangle.width,
                        rectangle.height),
                    new OpenCvSharp.Scalar(50, 50, 255),
                    2,
                    OpenCvSharp.LineTypes.AntiAlias);
            }

            // draw age and gender
            if (_flag_age_gender)
            {
                AgeGenderEstimator.AgeGender age_gender = _age_geder_estimator.estimateAgeGender(sample);

                string age_text = "age: ";

                switch (age_gender.age)
                {
                case AgeGenderEstimator.Age.AGE_KID: age_text += "kid    "; break;

                case AgeGenderEstimator.Age.AGE_YOUNG: age_text += "young  "; break;

                case AgeGenderEstimator.Age.AGE_ADULT: age_text += "adult  "; break;

                case AgeGenderEstimator.Age.AGE_SENIOR: age_text += "senior "; break;
                }

                age_text += string.Format("years: {0:G3}", age_gender.age_years);

                puttext(
                    draw_image,
                    age_text,
                    text_point);
                text_point.Y += text_line_height;

                puttext(
                    draw_image,
                    age_gender.gender == AgeGenderEstimator.Gender.GENDER_FEMALE ? "gender: female" :
                    age_gender.gender == AgeGenderEstimator.Gender.GENDER_MALE ? "gender: male" : "?",
                    text_point);
                text_point.Y += text_line_height;

                text_point.Y += text_line_height / 3;
            }

            // draw emotions
            if (_flag_emotions)
            {
                List <EmotionsEstimator.EmotionConfidence> emotions =
                    _emotions_estimator.estimateEmotions(sample);

                for (int j = 0; j < emotions.Count; ++j)
                {
                    EmotionsEstimator.Emotion emotion = emotions[j].emotion;
                    float confidence = emotions[j].confidence;

                    OpenCvSharp.Cv2.Rectangle(
                        draw_image,
                        new OpenCvSharp.Rect(
                            (int)text_point.X,
                            (int)text_point.Y - (int)text_line_height / 2,
                            (int)(100 * confidence),
                            (int)text_line_height),
                        emotion == EmotionsEstimator.Emotion.EMOTION_NEUTRAL  ? new OpenCvSharp.Scalar(255, 0, 0) :
                        emotion == EmotionsEstimator.Emotion.EMOTION_HAPPY    ? new OpenCvSharp.Scalar(0, 255, 0) :
                        emotion == EmotionsEstimator.Emotion.EMOTION_ANGRY    ? new OpenCvSharp.Scalar(0, 0, 255) :
                        emotion == EmotionsEstimator.Emotion.EMOTION_SURPRISE ? new OpenCvSharp.Scalar(0, 255, 255) :
                        new OpenCvSharp.Scalar(0, 0, 0),
                        -1);

                    puttext(
                        draw_image,
                        emotion == EmotionsEstimator.Emotion.EMOTION_NEUTRAL  ? "neutral" :
                        emotion == EmotionsEstimator.Emotion.EMOTION_HAPPY    ? "happy" :
                        emotion == EmotionsEstimator.Emotion.EMOTION_ANGRY    ? "angry" :
                        emotion == EmotionsEstimator.Emotion.EMOTION_SURPRISE ? "surprise" : "?",
                        text_point + new OpenCvSharp.Point2f(100, 0));

                    text_point.Y += text_line_height;

                    text_point.Y += text_line_height / 3;
                }
            }


            // draw angles text
            if (_flag_angles)
            {
                string yaw, pitch, roll;
                yaw   = string.Format("yaw: {0}", (0.1f * (int)10 * sample.getAngles().yaw + 0.5f));
                pitch = string.Format("pitch: {0}", (0.1f * (int)10 * sample.getAngles().pitch + 0.5f));
                roll  = string.Format("roll: {0}", (0.1f * (int)10 * sample.getAngles().roll + 0.5f));

                puttext(draw_image, yaw, text_point);
                text_point.Y += text_line_height;

                puttext(draw_image, pitch, text_point);
                text_point.Y += text_line_height;

                puttext(draw_image, roll, text_point);
                text_point.Y += text_line_height;

                text_point.Y += text_line_height / 3;
            }

            // draw angles vectors
            if (_flag_angles_vectors)
            {
                RawSample.Angles angles = sample.getAngles();

                float cos_a = (float)Math.Cos(angles.yaw * OpenCvSharp.Cv2.PI / 180);
                float sin_a = (float)Math.Sin(angles.yaw * OpenCvSharp.Cv2.PI / 180);

                float cos_b = (float)Math.Cos(angles.pitch * OpenCvSharp.Cv2.PI / 180);
                float sin_b = (float)Math.Sin(angles.pitch * OpenCvSharp.Cv2.PI / 180);

                float cos_c = (float)Math.Cos(angles.roll * OpenCvSharp.Cv2.PI / 180);
                float sin_c = (float)Math.Sin(angles.roll * OpenCvSharp.Cv2.PI / 180);

                OpenCvSharp.Point3f[] xyz =
                {
                    new OpenCvSharp.Point3f(cos_a * cos_c,        -sin_c, -sin_a),
                    new OpenCvSharp.Point3f(sin_c,         cos_b * cos_c, -sin_b),
                    new OpenCvSharp.Point3f(sin_a,         sin_b,         cos_a * cos_b)
                };

                OpenCvSharp.Point2f center = new OpenCvSharp.Point2f(
                    (sample.getLeftEye().x + sample.getRightEye().x) * 0.5f,
                    (sample.getLeftEye().y + sample.getRightEye().y) * 0.5f);

                float length = (rectangle.width + rectangle.height) * 0.3f;

                for (int c = 0; c < 3; ++c)
                {
                    OpenCvSharp.Cv2.Line(
                        draw_image,
                        center,
                        center + new OpenCvSharp.Point2f(xyz[c].X, -xyz[c].Y) * length,
                        c == 0 ? new OpenCvSharp.Scalar(50, 255, 255) :
                        c == 1 ? new OpenCvSharp.Scalar(50, 255, 50) :
                        c == 2 ? new OpenCvSharp.Scalar(50, 50, 255) : new OpenCvSharp.Scalar(),
                        2,
                        OpenCvSharp.LineTypes.AntiAlias);
                }
            }

            // draw quality text
            if (_flag_quality)
            {
                QualityEstimator.Quality q =
                    _quality_estimator.estimateQuality(sample);

                string lighting, noise, sharpness, flare;

                lighting = "lighting: " + q.lighting.ToString();
                puttext(draw_image, lighting, text_point);
                text_point.Y += text_line_height;

                noise = "noise: " + q.noise.ToString();
                puttext(draw_image, noise, text_point);
                text_point.Y += text_line_height;

                sharpness = "sharpness: " + q.sharpness.ToString();
                puttext(draw_image, sharpness, text_point);
                text_point.Y += text_line_height;

                flare = "flare: " + q.flare.ToString();
                puttext(draw_image, flare, text_point);
                text_point.Y += text_line_height;

                text_point.Y += text_line_height / 3;
            }

            // draw liveness text
            if (_flag_liveness)
            {
                // here we get/create the liveness estimator that work with this face
                int id = sample.getID();
                if (!id2le.ContainsKey(id))
                {
                    id2le[id] = _service.createLivenessEstimator();
                }

                LivenessEstimator le = id2le[id];

                // add information to the estimator
                le.addSample(sample);

                // get result
                LivenessEstimator.Liveness liveness = le.estimateLiveness();

                puttext(
                    draw_image,
                    "liveness: " + (
                        liveness == LivenessEstimator.Liveness.REAL ? "real" :
                        liveness == LivenessEstimator.Liveness.FAKE ? "fake" :
                        liveness == LivenessEstimator.Liveness.NOT_ENOUGH_DATA ? "not enough data" : "??"),
                    text_point);

                text_point.Y += text_line_height;
                text_point.Y += text_line_height / 3;
            }

            // draw face quality
            if (_flag_face_quality)
            {
                float quality = _face_quality_estimator.estimateQuality(sample);

                string ss = "face quality: " + quality.ToString();
                puttext(draw_image, ss, text_point);
                text_point.Y += text_line_height;
                text_point.Y += text_line_height / 3;
            }

            // draw face cuts
            for (int cut_i = 0; cut_i < 3; ++cut_i)
            {
                if ((cut_i == 0 && !_flag_cutting_base) ||
                    (cut_i == 1 && !_flag_cutting_full) ||
                    (cut_i == 2 && !_flag_cutting_token))
                {
                    continue;
                }

                puttext(
                    draw_image,
                    cut_i == 0 ? "base cut:" :
                    cut_i == 1 ? "full cut:" :
                    cut_i == 2 ? "token cut:" : "?? cut",
                    text_point);
                text_point.Y += text_line_height / 2;

                MemoryStream obuf = new MemoryStream();
                sample.cutFaceImage(
                    obuf,
                    RawSample.ImageFormat.IMAGE_FORMAT_BMP,
                    cut_i == 0 ? RawSample.FaceCutType.FACE_CUT_BASE :
                    cut_i == 1 ? RawSample.FaceCutType.FACE_CUT_FULL_FRONTAL :
                    cut_i == 2 ? RawSample.FaceCutType.FACE_CUT_TOKEN_FRONTAL :
                    (RawSample.FaceCutType) 999);

                byte[] sbuf = obuf.ToArray();

                // const OpenCvSharp.Mat_<uchar> cvbuf(1, sbuf.length(), (uchar*) sbuf.c_str());

                OpenCvSharp.Mat img = OpenCvSharp.Cv2.ImDecode(sbuf, OpenCvSharp.ImreadModes.Unchanged);

                OpenCvSharp.Cv2.Resize(img, img, OpenCvSharp.Size.Zero, 0.3, 0.3);


                int img_rect_x = (int)Math.Max(0, -text_point.X);
                int img_rect_y = (int)Math.Max(0, -text_point.Y);

                int img_rect_width = (int)Math.Min(
                    img.Cols - img_rect_x,
                    draw_image.Cols - Math.Max(0, text_point.X));

                int img_rect_height = (int)Math.Min(
                    img.Rows - img_rect_y,
                    draw_image.Rows - Math.Max(0, text_point.Y));

                if (img_rect_width <= 0 || img_rect_height <= 0)
                {
                    continue;
                }

                OpenCvSharp.Rect img_rect = new OpenCvSharp.Rect(img_rect_x, img_rect_y, img_rect_width, img_rect_height);

                img[img_rect].CopyTo(
                    draw_image[new OpenCvSharp.Rect(
                                   (int)Math.Max(0, text_point.X),
                                   (int)Math.Max(0, text_point.Y),
                                   img_rect.Width,
                                   img_rect.Height)]);

                text_point.Y += text_line_height / 2;
                text_point.Y += img.Rows;


                text_point.Y += text_line_height / 3;
            }
        }
        // draw checkboxes
        for (int i = 0; i < flags_count; ++i)
        {
            OpenCvSharp.Rect rect  = flag_rect(i);
            OpenCvSharp.Rect rect2 = new OpenCvSharp.Rect(rect.X + 5, rect.Y + 5, rect.Width - 10, rect.Height - 10);

            OpenCvSharp.Cv2.Rectangle(draw_image, rect, OpenCvSharp.Scalar.All(255), -1);
            OpenCvSharp.Cv2.Rectangle(draw_image, rect, OpenCvSharp.Scalar.All(0), 2, OpenCvSharp.LineTypes.AntiAlias);

            if (get_flag(i))
            {
                OpenCvSharp.Cv2.Rectangle(draw_image, rect2, OpenCvSharp.Scalar.All(0), -1, OpenCvSharp.LineTypes.AntiAlias);
            }

            puttext(
                draw_image,
                flag_name(i),
                new OpenCvSharp.Point2f(rect.X + rect.Width + 3, rect.Y + rect.Height - 3));
        }


        // show image with drawed information
        OpenCvSharp.Cv2.ImShow("demo", draw_image);

        // register callback on mouse events
        OpenCvSharp.Cv2.SetMouseCallback("demo", (OpenCvSharp.CvMouseCallback)onMouse);
    }