Exemplo n.º 1
0
    public void work(OpenCvSharp.Mat frame)
    {
        // sending the frame in the capturer (_tracker)
        // pbio::CVRawImage cvri_frame;
        byte[] data = new byte[frame.Total() * frame.Type().Channels];
        Marshal.Copy(frame.DataStart, data, 0, (int)data.Length);
        RawImage         ri_frame = new RawImage(frame.Width, frame.Height, RawImage.Format.FORMAT_BGR, data);
        List <RawSample> samples  = _tracker.capture(ri_frame);

        // clone the frame for drawing on it
        OpenCvSharp.Mat draw_image = frame.Clone();
        // handle each face on the frame separately
        for (int i = 0; i < samples.Count; ++i)
        {
            RawSample sample = samples[i];

            // get a face rectangle
            RawSample.Rectangle rectangle = sample.getRectangle();

            // set a point to place information for this face
            OpenCvSharp.Point2f text_point = new OpenCvSharp.Point2f(
                rectangle.x + rectangle.width + 3,
                rectangle.y + 10);

            const float text_line_height = 22;

            // draw facial points
            // red color for all points
            // green for left eye
            // yellow for right eye
            // (yes, there is a mess with left and right eyes in face_sdk api,
            // but if we fix it now we will lose compatibility with previous versions)
            if (_flag_points)
            {
                List <Point> points = sample.getLandmarks();

                for (int j = -2; j < points.Count; ++j)
                {
                    Point p =
                        j == -2 ?
                        sample.getLeftEye() :
                        j == -1 ?
                        sample.getRightEye() :
                        points[j];

                    OpenCvSharp.Scalar color =
                        j == -2 ?
                        new OpenCvSharp.Scalar(50, 255, 50) :
                        j == -1 ?
                        new OpenCvSharp.Scalar(50, 255, 255) :
                        new OpenCvSharp.Scalar(50, 50, 255);


                    OpenCvSharp.Cv2.Circle(
                        draw_image,
                        new OpenCvSharp.Point2f(p.x, p.y),
                        j < 0 ? 4 : 2,
                        color,
                        -1,
                        OpenCvSharp.LineTypes.AntiAlias);
                }
            }

            // draw rectangle
            if (_flag_positions)
            {
                OpenCvSharp.Cv2.Rectangle(
                    draw_image,
                    new OpenCvSharp.Rect(
                        rectangle.x,
                        rectangle.y,
                        rectangle.width,
                        rectangle.height),
                    new OpenCvSharp.Scalar(50, 50, 255),
                    2,
                    OpenCvSharp.LineTypes.AntiAlias);
            }

            // draw age and gender
            if (_flag_age_gender)
            {
                AgeGenderEstimator.AgeGender age_gender = _age_geder_estimator.estimateAgeGender(sample);

                string age_text = "age: ";

                switch (age_gender.age)
                {
                case AgeGenderEstimator.Age.AGE_KID: age_text += "kid    "; break;

                case AgeGenderEstimator.Age.AGE_YOUNG: age_text += "young  "; break;

                case AgeGenderEstimator.Age.AGE_ADULT: age_text += "adult  "; break;

                case AgeGenderEstimator.Age.AGE_SENIOR: age_text += "senior "; break;
                }

                age_text += string.Format("years: {0:G3}", age_gender.age_years);

                puttext(
                    draw_image,
                    age_text,
                    text_point);
                text_point.Y += text_line_height;

                puttext(
                    draw_image,
                    age_gender.gender == AgeGenderEstimator.Gender.GENDER_FEMALE ? "gender: female" :
                    age_gender.gender == AgeGenderEstimator.Gender.GENDER_MALE ? "gender: male" : "?",
                    text_point);
                text_point.Y += text_line_height;

                text_point.Y += text_line_height / 3;
            }

            // draw emotions
            if (_flag_emotions)
            {
                List <EmotionsEstimator.EmotionConfidence> emotions =
                    _emotions_estimator.estimateEmotions(sample);

                for (int j = 0; j < emotions.Count; ++j)
                {
                    EmotionsEstimator.Emotion emotion = emotions[j].emotion;
                    float confidence = emotions[j].confidence;

                    OpenCvSharp.Cv2.Rectangle(
                        draw_image,
                        new OpenCvSharp.Rect(
                            (int)text_point.X,
                            (int)text_point.Y - (int)text_line_height / 2,
                            (int)(100 * confidence),
                            (int)text_line_height),
                        emotion == EmotionsEstimator.Emotion.EMOTION_NEUTRAL  ? new OpenCvSharp.Scalar(255, 0, 0) :
                        emotion == EmotionsEstimator.Emotion.EMOTION_HAPPY    ? new OpenCvSharp.Scalar(0, 255, 0) :
                        emotion == EmotionsEstimator.Emotion.EMOTION_ANGRY    ? new OpenCvSharp.Scalar(0, 0, 255) :
                        emotion == EmotionsEstimator.Emotion.EMOTION_SURPRISE ? new OpenCvSharp.Scalar(0, 255, 255) :
                        new OpenCvSharp.Scalar(0, 0, 0),
                        -1);

                    puttext(
                        draw_image,
                        emotion == EmotionsEstimator.Emotion.EMOTION_NEUTRAL  ? "neutral" :
                        emotion == EmotionsEstimator.Emotion.EMOTION_HAPPY    ? "happy" :
                        emotion == EmotionsEstimator.Emotion.EMOTION_ANGRY    ? "angry" :
                        emotion == EmotionsEstimator.Emotion.EMOTION_SURPRISE ? "surprise" : "?",
                        text_point + new OpenCvSharp.Point2f(100, 0));

                    text_point.Y += text_line_height;

                    text_point.Y += text_line_height / 3;
                }
            }


            // draw angles text
            if (_flag_angles)
            {
                string yaw, pitch, roll;
                yaw   = string.Format("yaw: {0}", (0.1f * (int)10 * sample.getAngles().yaw + 0.5f));
                pitch = string.Format("pitch: {0}", (0.1f * (int)10 * sample.getAngles().pitch + 0.5f));
                roll  = string.Format("roll: {0}", (0.1f * (int)10 * sample.getAngles().roll + 0.5f));

                puttext(draw_image, yaw, text_point);
                text_point.Y += text_line_height;

                puttext(draw_image, pitch, text_point);
                text_point.Y += text_line_height;

                puttext(draw_image, roll, text_point);
                text_point.Y += text_line_height;

                text_point.Y += text_line_height / 3;
            }

            // draw angles vectors
            if (_flag_angles_vectors)
            {
                RawSample.Angles angles = sample.getAngles();

                float cos_a = (float)Math.Cos(angles.yaw * OpenCvSharp.Cv2.PI / 180);
                float sin_a = (float)Math.Sin(angles.yaw * OpenCvSharp.Cv2.PI / 180);

                float cos_b = (float)Math.Cos(angles.pitch * OpenCvSharp.Cv2.PI / 180);
                float sin_b = (float)Math.Sin(angles.pitch * OpenCvSharp.Cv2.PI / 180);

                float cos_c = (float)Math.Cos(angles.roll * OpenCvSharp.Cv2.PI / 180);
                float sin_c = (float)Math.Sin(angles.roll * OpenCvSharp.Cv2.PI / 180);

                OpenCvSharp.Point3f[] xyz =
                {
                    new OpenCvSharp.Point3f(cos_a * cos_c,        -sin_c, -sin_a),
                    new OpenCvSharp.Point3f(sin_c,         cos_b * cos_c, -sin_b),
                    new OpenCvSharp.Point3f(sin_a,         sin_b,         cos_a * cos_b)
                };

                OpenCvSharp.Point2f center = new OpenCvSharp.Point2f(
                    (sample.getLeftEye().x + sample.getRightEye().x) * 0.5f,
                    (sample.getLeftEye().y + sample.getRightEye().y) * 0.5f);

                float length = (rectangle.width + rectangle.height) * 0.3f;

                for (int c = 0; c < 3; ++c)
                {
                    OpenCvSharp.Cv2.Line(
                        draw_image,
                        center,
                        center + new OpenCvSharp.Point2f(xyz[c].X, -xyz[c].Y) * length,
                        c == 0 ? new OpenCvSharp.Scalar(50, 255, 255) :
                        c == 1 ? new OpenCvSharp.Scalar(50, 255, 50) :
                        c == 2 ? new OpenCvSharp.Scalar(50, 50, 255) : new OpenCvSharp.Scalar(),
                        2,
                        OpenCvSharp.LineTypes.AntiAlias);
                }
            }

            // draw quality text
            if (_flag_quality)
            {
                QualityEstimator.Quality q =
                    _quality_estimator.estimateQuality(sample);

                string lighting, noise, sharpness, flare;

                lighting = "lighting: " + q.lighting.ToString();
                puttext(draw_image, lighting, text_point);
                text_point.Y += text_line_height;

                noise = "noise: " + q.noise.ToString();
                puttext(draw_image, noise, text_point);
                text_point.Y += text_line_height;

                sharpness = "sharpness: " + q.sharpness.ToString();
                puttext(draw_image, sharpness, text_point);
                text_point.Y += text_line_height;

                flare = "flare: " + q.flare.ToString();
                puttext(draw_image, flare, text_point);
                text_point.Y += text_line_height;

                text_point.Y += text_line_height / 3;
            }

            // draw liveness text
            if (_flag_liveness)
            {
                Liveness2DEstimator.Liveness liveness_2d_result = _liveness_2d_estimator.estimateLiveness(sample);

                puttext(
                    draw_image,
                    "liveness: " + (
                        liveness_2d_result == Liveness2DEstimator.Liveness.REAL ? "real" :
                        liveness_2d_result == Liveness2DEstimator.Liveness.FAKE ? "fake" :
                        liveness_2d_result == Liveness2DEstimator.Liveness.NOT_ENOUGH_DATA ? "not enough data" : "??"),
                    text_point);

                text_point.Y += text_line_height;
                text_point.Y += text_line_height / 3;
            }

            // draw face quality
            if (_flag_face_quality)
            {
                float quality = _face_quality_estimator.estimateQuality(sample);

                string ss = "face quality: " + quality.ToString();
                puttext(draw_image, ss, text_point);
                text_point.Y += text_line_height;
                text_point.Y += text_line_height / 3;
            }

            // draw face cuts
            for (int cut_i = 0; cut_i < 3; ++cut_i)
            {
                if ((cut_i == 0 && !_flag_cutting_base) ||
                    (cut_i == 1 && !_flag_cutting_full) ||
                    (cut_i == 2 && !_flag_cutting_token))
                {
                    continue;
                }

                puttext(
                    draw_image,
                    cut_i == 0 ? "base cut:" :
                    cut_i == 1 ? "full cut:" :
                    cut_i == 2 ? "token cut:" : "?? cut",
                    text_point);
                text_point.Y += text_line_height / 2;

                MemoryStream obuf = new MemoryStream();
                sample.cutFaceImage(
                    obuf,
                    RawSample.ImageFormat.IMAGE_FORMAT_BMP,
                    cut_i == 0 ? RawSample.FaceCutType.FACE_CUT_BASE :
                    cut_i == 1 ? RawSample.FaceCutType.FACE_CUT_FULL_FRONTAL :
                    cut_i == 2 ? RawSample.FaceCutType.FACE_CUT_TOKEN_FRONTAL :
                    (RawSample.FaceCutType) 999);

                byte[] sbuf = obuf.ToArray();

                // const OpenCvSharp.Mat_<uchar> cvbuf(1, sbuf.length(), (uchar*) sbuf.c_str());

                OpenCvSharp.Mat img = OpenCvSharp.Cv2.ImDecode(sbuf, OpenCvSharp.ImreadModes.Unchanged);

                OpenCvSharp.Cv2.Resize(img, img, OpenCvSharp.Size.Zero, 0.3, 0.3);


                int img_rect_x = (int)Math.Max(0, -text_point.X);
                int img_rect_y = (int)Math.Max(0, -text_point.Y);

                int img_rect_width = (int)Math.Min(
                    img.Cols - img_rect_x,
                    draw_image.Cols - Math.Max(0, text_point.X));

                int img_rect_height = (int)Math.Min(
                    img.Rows - img_rect_y,
                    draw_image.Rows - Math.Max(0, text_point.Y));

                if (img_rect_width <= 0 || img_rect_height <= 0)
                {
                    continue;
                }

                OpenCvSharp.Rect img_rect = new OpenCvSharp.Rect(img_rect_x, img_rect_y, img_rect_width, img_rect_height);

                img[img_rect].CopyTo(
                    draw_image[new OpenCvSharp.Rect(
                                   (int)Math.Max(0, text_point.X),
                                   (int)Math.Max(0, text_point.Y),
                                   img_rect.Width,
                                   img_rect.Height)]);

                text_point.Y += text_line_height / 2;
                text_point.Y += img.Rows;


                text_point.Y += text_line_height / 3;
            }
        }
        // draw checkboxes
        for (int i = 0; i < flags_count; ++i)
        {
            OpenCvSharp.Rect rect  = flag_rect(i);
            OpenCvSharp.Rect rect2 = new OpenCvSharp.Rect(rect.X + 5, rect.Y + 5, rect.Width - 10, rect.Height - 10);

            OpenCvSharp.Cv2.Rectangle(draw_image, rect, OpenCvSharp.Scalar.All(255), -1);
            OpenCvSharp.Cv2.Rectangle(draw_image, rect, OpenCvSharp.Scalar.All(0), 2, OpenCvSharp.LineTypes.AntiAlias);

            if (get_flag(i))
            {
                OpenCvSharp.Cv2.Rectangle(draw_image, rect2, OpenCvSharp.Scalar.All(0), -1, OpenCvSharp.LineTypes.AntiAlias);
            }

            puttext(
                draw_image,
                flag_name(i),
                new OpenCvSharp.Point2f(rect.X + rect.Width + 3, rect.Y + rect.Height - 3));
        }


        // show image with drawed information
        OpenCvSharp.Cv2.ImShow("demo", draw_image);

        // register callback on mouse events
        OpenCvSharp.Cv2.SetMouseCallback("demo", (OpenCvSharp.CvMouseCallback)onMouse);
    }
Exemplo n.º 2
0
    OpenCvSharp.Mat Draw(
        DrawingData data,
        Database database)
    {
        // drawing params of stripe with current faces
        int stripe_width = thumbnail_size * 2 + draw_border * 2;

        int stripe_height =
            (thumbnail_size + draw_border) * max_count_in_stripe - draw_border;

        // image for draw the frame and the stripe
        OpenCvSharp.Mat result = new OpenCvSharp.Mat(
            Math.Max(data.frame.Rows + data.depth.Rows, stripe_height),
            Math.Max(data.frame.Cols, data.depth.Cols) + stripe_width,
            OpenCvSharp.MatType.CV_8UC3,
            OpenCvSharp.Scalar.All(0));

        // copy the frame
        int frame_y_offset = (result.Rows - data.frame.Rows - data.depth.Rows) / 2;
        int depth_y_offset = frame_y_offset + data.frame.Rows;

        data.frame.CopyTo(
            result[
                new OpenCvSharp.Rect(
                    0,
                    frame_y_offset,
                    data.frame.Cols,
                    data.frame.Rows)]);

        for (int i = 0; i < data.depth.Rows; ++i)
        {
            unsafe
            {
                UInt16 *src_ptr = (UInt16 *)data.depth.Ptr(i);
                byte *  dst_ptr = (byte *)result.Ptr(depth_y_offset + i);

                for (int j = 0; j < data.depth.Cols; ++j, ++src_ptr, dst_ptr += 3)
                {
                    UInt16 depth = *src_ptr;
                    byte * bgr   = dst_ptr;

                    bgr[0] = bgr[1] = bgr[2] = 0;

                    if (depth == 0)
                    {
                        continue;
                    }

                    if (depth < 256)
                    {
                        bgr[2] = (byte)(256 - depth);
                    }

                    if (depth < 256 * 2)
                    {
                        bgr[1] = (byte)(depth / 2);
                    }
                    else
                    {
                        bgr[1] = (byte)(255);
                    }

                    if (depth < 256 * 4)
                    {
                        bgr[0] = (byte)(depth / 4);
                    }
                    else
                    {
                        bgr[0] = (byte)(255);
                    }
                }
            }
        }

        // clone faces data for random access
        List <Tuple <int, FaceData> > faces = new List <Tuple <int, FaceData> >();

        foreach (KeyValuePair <int, FaceData> pair in data.faces)
        {
            faces.Add(new Tuple <int, FaceData>(pair.Key, pair.Value));
        }



        // make order with recognized first
        // pair<match_database_index, index in faces>
        List <Tuple <int, int> > order = new List <Tuple <int, int> >();

        for (int i = 0; i < faces.Count; ++i)
        {
            order.Add(new Tuple <int, int>(faces[i].Item2.match_database_index, i));
        }

        order.Sort();

        // draw alive faces
        for (int order_i = 0; order_i < order.Count; ++order_i)
        {
            FaceData face = faces[order[order_i].Item2].Item2;

            // draw circles of faces appeared on this frame
            if (face.frame_id == data.frame_id && !face.lost)
            {
                // get points
                List <Point> points      = face.sample.getLandmarks();
                List <Point> iris_points = face.sample.getIrisLandmarks();

                // compute center
                OpenCvSharp.Point2f center = new OpenCvSharp.Point2f(0, 0);

                for (int j = 0; j < points.Count; ++j)
                {
                    center.X += points[j].x;
                    center.Y += points[j].y;
                }
                center *= 1.0 / points.Count;


                // compute radius
                double radius = 0;

                for (int j = 0; j < points.Count; ++j)
                {
                    radius += OpenCvSharp.Point2f.Distance(new OpenCvSharp.Point2f(points[j].x, points[j].y), center);
                }

                radius *= 1.5 / points.Count;

                radius *= 2;

                RawSample.Rectangle rectangle = face.sample.getRectangle();

                // set a point to place information for this face
                OpenCvSharp.Point2f text_point = new OpenCvSharp.Point2f(
                    rectangle.x + rectangle.width + 3,
                    rectangle.y + 10);
                const float text_line_height = 22;

                // choose color
                OpenCvSharp.Scalar color =
                    face.match_database_index < 0 ?
                    new OpenCvSharp.Scalar(0, 0, 255) :                              // red color for unrecognized
                    new OpenCvSharp.Scalar(0, 255, 0);                               // green color for recognizerd

                for (int k = 0; k < (data.depth.Empty() ? 1 : 2); ++k)
                {
                    int y_offset = (k == 0 ? frame_y_offset : depth_y_offset);

                    // dashed circle for weak face samples
                    if (face.weak)
                    {
                        // draw dashed cirle for weak samples
                        int n = 8;
                        for (int i = 0; i < n; ++i)
                        {
                            OpenCvSharp.Cv2.Ellipse(
                                result,
                                (OpenCvSharp.Point)(center + new OpenCvSharp.Point2f(0f, y_offset)),
                                new OpenCvSharp.Size(radius, radius),
                                (face.frame_id * 2) % 360,
                                (i * 2 + 0) * 180 / n,
                                (i * 2 + 1) * 180 / n,
                                color,
                                3,
                                OpenCvSharp.LineTypes.AntiAlias);
                        }
                    }
                    else
                    {
                        OpenCvSharp.Cv2.Circle(
                            result,
                            (OpenCvSharp.Point)(center + new OpenCvSharp.Point2f(0f, y_offset)),
                            (int)radius,
                            color,
                            3,
                            OpenCvSharp.LineTypes.AntiAlias);
                    }
                }
                if (face.age_gender_set)
                {
                    // draw
                    AgeGenderEstimator.AgeGender age_gender = face.age_gender;

                    string age_text = "age: ";

                    switch (age_gender.age)
                    {
                    case AgeGenderEstimator.Age.AGE_KID: age_text += "kid    "; break;

                    case AgeGenderEstimator.Age.AGE_YOUNG: age_text += "young  "; break;

                    case AgeGenderEstimator.Age.AGE_ADULT: age_text += "adult  "; break;

                    case AgeGenderEstimator.Age.AGE_SENIOR: age_text += "senior "; break;
                    }

                    age_text += string.Format("years: {0:G3}", age_gender.age_years);

                    puttext(
                        result,
                        age_text,
                        text_point);

                    text_point.Y += text_line_height;

                    puttext(
                        result,
                        age_gender.gender == AgeGenderEstimator.Gender.GENDER_FEMALE ? "gender: female" :
                        age_gender.gender == AgeGenderEstimator.Gender.GENDER_MALE ? "gender: male" : "?",
                        text_point);
                    text_point.Y += text_line_height;

                    text_point.Y += text_line_height / 3;

                    // Console.WriteLine(face.age_gender.age_years);
                }
                if (face.emotion_set)
                {
                    // draw
                    List <EmotionsEstimator.EmotionConfidence> emotions = face.emotion_confidence;

                    for (int j = 0; j < emotions.Count; ++j)
                    {
                        EmotionsEstimator.Emotion emotion = emotions[j].emotion;
                        float confidence = emotions[j].confidence;

                        OpenCvSharp.Cv2.Rectangle(
                            result,
                            new OpenCvSharp.Rect(
                                (int)text_point.X,
                                (int)text_point.Y - (int)text_line_height / 2,
                                (int)(100 * confidence),
                                (int)text_line_height),
                            emotion == EmotionsEstimator.Emotion.EMOTION_NEUTRAL ? new OpenCvSharp.Scalar(255, 0, 0) :
                            emotion == EmotionsEstimator.Emotion.EMOTION_HAPPY ? new OpenCvSharp.Scalar(0, 255, 0) :
                            emotion == EmotionsEstimator.Emotion.EMOTION_ANGRY ? new OpenCvSharp.Scalar(0, 0, 255) :
                            emotion == EmotionsEstimator.Emotion.EMOTION_SURPRISE ? new OpenCvSharp.Scalar(0, 255, 255) :
                            new OpenCvSharp.Scalar(0, 0, 0), -1);

                        puttext(
                            result,
                            emotion == EmotionsEstimator.Emotion.EMOTION_NEUTRAL ? "neutral" :
                            emotion == EmotionsEstimator.Emotion.EMOTION_HAPPY ? "happy" :
                            emotion == EmotionsEstimator.Emotion.EMOTION_ANGRY ? "angry" :
                            emotion == EmotionsEstimator.Emotion.EMOTION_SURPRISE ? "surprise" : "?",
                            text_point + new OpenCvSharp.Point2f(100, 0));

                        text_point.Y += text_line_height;

                        text_point.Y += text_line_height / 3;
                    }
                }
                if (face.active_liveness_status.verdict != ActiveLiveness.Liveness.NOT_COMPUTED)
                {
                    string active_liveness = "";

                    if (face.active_liveness_status.verdict == ActiveLiveness.Liveness.WAITING_FACE_ALIGN)
                    {
                        active_liveness += face.active_liveness_status.verdict.ToString();
                    }
                    else
                    {
                        active_liveness += face.active_liveness_status.check_type.ToString();
                        active_liveness += ": ";
                        active_liveness += face.active_liveness_status.verdict.ToString();
                        active_liveness += " " + face.active_liveness_status.progress_level.ToString();
                    }
                    puttext(result, active_liveness, text_point);

                    text_point.Y += text_line_height;

                    text_point.Y += text_line_height / 3;
                }

//				// draw iris points
//				for(int j = 0; j < iris_points.Count; ++j)
//				{
//					int ms = 1;
//					OpenCvSharp.Scalar icolor = new OpenCvSharp.Scalar(50, 255, 50);
//					int oi = j - 20 * Convert.ToInt32(j >= 20);
//					Point pt1 = iris_points[j];
//					Point pt2 = iris_points[(oi < 19 ? j : j - 15) + 1];
//					OpenCvSharp.Point2f cv_pt1 = new OpenCvSharp.Point2f(pt1.x, frame_y_offset + pt1.y);
//					OpenCvSharp.Point2f cv_pt2 = new OpenCvSharp.Point2f(pt2.x, frame_y_offset + pt2.y);
//
//					if(oi < 5)
//					{
//						icolor = new OpenCvSharp.Scalar(0, 165, 255);
//						if(oi == 0)
//						{
//							double iradius = Math.Sqrt(Math.Pow(pt1.x - pt2.x, 2) + Math.Pow(pt1.y - pt2.y, 2));
//							OpenCvSharp.Cv2.Circle(
//								result,
//								cv_pt1,
//								(int) iradius,
//								icolor,
//								ms,
//								OpenCvSharp.LineTypes.AntiAlias);
//						}
//					}else
//					{
//						OpenCvSharp.Cv2.Line(
//							result,
//							cv_pt1,
//							cv_pt2,
//							icolor,
//							ms,
//							OpenCvSharp.LineTypes.AntiAlias);
//					}
//
//					OpenCvSharp.Cv2.Circle(
//						result,
//						cv_pt1,
//						ms,
//						color,
//						-1,
//						OpenCvSharp.LineTypes.AntiAlias);
//				}
            }

            // no - draw the stripe
            if (order_i < max_count_in_stripe)
            {
                // place for thumbnail from the frame
                OpenCvSharp.Rect sample_rect = new OpenCvSharp.Rect(
                    data.frame.Cols + draw_border,
                    (thumbnail_size + draw_border) * order_i,
                    thumbnail_size,
                    thumbnail_size);

                // place for thumbnail from the database
                OpenCvSharp.Rect match_rect = new OpenCvSharp.Rect(
                    data.frame.Cols + draw_border * 2 + thumbnail_size,
                    (thumbnail_size + draw_border) * order_i,
                    thumbnail_size,
                    thumbnail_size);

                // make thumbnail from the frame
                Database.makeThumbnail(face.sample).CopyTo(result[sample_rect]);

                // fade if image is lost
                if (face.draw_multilier < 1)
                {
                    result[sample_rect] *= face.draw_multilier;
                }

                if (face.match_database_index < 0)
                {
                    // gray color for unrecognized persons
                    result[match_rect].SetTo(128 * face.draw_multilier);
                }
                else
                {
                    // thumbnail from the database for recognized persons
                    database.thumbnails[face.match_database_index].CopyTo(result[match_rect]);

                    // fade if image is lost
                    if (face.draw_multilier < 1)
                    {
                        result[match_rect] *= face.draw_multilier;
                    }
                }
            }
        }

        return(result);
    }