Example #1
0
        private void bnRecognizeB_Click(object sender, EventArgs e)
        {
            double[] task = RawSample.BitmapToInputs(bmpB, sqareSideLengthB, sqareSideLengthB);
            double[] result;
            result = RecognizeBtoA(task);

            ClearField(ref bmpA);
            digitizedA = result;

            labelRound.Text = string.Format("Stabilized in {0} rounds.", round);
            pictureBox1.Refresh();
        }
Example #2
0
        private void bnRecognize_Click(object sender, EventArgs e)
        {
            double[] task = RawSample.BitmapToInputs(bmp, nx, ny);

            double[] result = charNet.FeedForward(task);

            char[] ans = chars.ToCharArray();
            Array.Sort(result, ans);

            txtAnswer.Text = ans[ans.Length - 1].ToString();
            labelSure.Text = (result[ans.Length - 1] * 100).ToString("F5") + "%";
            //Debug.Print(result.ToString());
        }
Example #3
0
    void TrackingLostCallback(
        VideoWorker.TrackingLostCallbackData data,
        Object userdata)
    {
        MAssert.Check(data != null);
        MAssert.Check(userdata != null);

        int       stream_id             = (int)data.stream_id;
        int       track_id              = (int)data.track_id;
        int       first_frame_id        = (int)data.first_frame_id;
        int       last_frame_id         = (int)data.last_frame_id;
        float     best_quality          = (float)data.best_quality;
        int       best_quality_frame_id = (int)data.best_quality_frame_id;
        RawSample best_quality_sample   = data.best_quality_sample;
        Template  best_quality_templ    = data.best_quality_templ;

        // userdata is supposed to be pointer to this Worker
        // so cast the pointer
        Worker worker = (Worker)(userdata);

        // we care only about the worker._stream_id source
        // so just ignore any others
        if (stream_id != worker._stream_id)
        {
            return;
        }

        // here we just remember the time when the face was lost
        // but the real purpose of this callback if to store faces
        // for futher matching or processing

        {
            worker._drawing_data_mutex.WaitOne();

            FaceData face = worker._drawing_data.faces[track_id];

            MAssert.Check(!face.lost);

            face.lost      = true;
            face.lost_time = new TimeSpan(DateTime.Now.Ticks);
            if (best_quality_sample != null)
            {
                face.sample = best_quality_sample;
            }

            worker._drawing_data_mutex.ReleaseMutex();
        }
    }
Example #4
0
        private void bnAdd_Click(object sender, EventArgs e)
        {
            // TODO check if sample is valid

            // create new element of the samples list
            RawSample rs = new RawSample();

            rs.bitmap = (Bitmap)bmp.Clone();
            rs.index  = rawSamples.Count;
            rs.letter = comboBox1.Text[0];
            rawSamples.Add(rs);
            rawSamplesIndex = rawSamples.Count - 1;

            comboRefresh();
            labelCounter.Text = string.Format("{0} / {1}", rawSamplesIndex + 1, rawSamples.Count);

            ClearField();
        }
Example #5
0
    public static void Main(string[] args)
    {
        if (args.Contains("--debug"))
        {
            Console.WriteLine($"Ready for debugger to attach. Process ID: {Process.GetCurrentProcess().Id}");
            Console.Write("Press ENTER to Continue");
            Console.ReadLine();
            args = args.Except(new[] { "--debug" }).ToArray();
        }

        var app = new CommandLineApplication();

        app.FullName    = "SignalR Client Samples";
        app.Description = "Client Samples for SignalR";

        RawSample.Register(app);
        HubSample.Register(app);
        StreamingSample.Register(app);
        UploadSample.Register(app);

        app.Command("help", cmd =>
        {
            cmd.Description = "Get help for the application, or a specific command";

            var commandArgument = cmd.Argument("<COMMAND>", "The command to get help for");
            cmd.OnExecute(() =>
            {
                app.ShowHelp(commandArgument.Value);
                return(0);
            });
        });

        app.OnExecute(() =>
        {
            app.ShowHelp();
            return(0);
        });

        app.Execute(args);
    }
Example #6
0
        private void pictureBox1_Paint(object sender, PaintEventArgs e)
        {
            if (showDigitized)
            {
                SolidBrush br = (SolidBrush)Brushes.White;
                int        dx = pictureBox1.Width / nx;
                int        dy = pictureBox1.Height / ny;

                if (digitized == null)
                {
                    digitized = RawSample.BitmapToInputs(bmp, nx, ny);
                }

                try
                {
                    e.Graphics.DrawImageUnscaled(bmp, 0, 0);
                }
                catch { }

                for (int i = 0; i < nx; i++)
                {
                    for (int j = 0; j < ny; j++)
                    {
                        int brightness = (int)(digitized[i * nx + j] * 254.0);
                        br.Color = Color.FromArgb(brightness, 255, 255, 255); // brightness, brightness, brightness
                        e.Graphics.FillRectangle(br, i * dx, j * dy, dx, dy);
                    }
                }
            }
            else
            {
                try
                {
                    e.Graphics.DrawImageUnscaled(bmp, 0, 0);
                }
                catch { }
            }
        }
Example #7
0
        private void pbPaint(PictureBox sender, Bitmap bmp, int n, ref double[] digitized, Graphics g)
        {
            if (showDigitized)
            {
                SolidBrush br = (SolidBrush)Brushes.White;
                int        dx = sender.Width / n;
                int        dy = sender.Height / n;

                if (digitized == null)
                {
                    digitized = RawSample.BitmapToInputs(bmp, n, n);
                }

                try
                {
                    g.DrawImageUnscaled(bmp, 0, 0);
                }
                catch { }

                for (int i = 0; i < n; i++)
                {
                    for (int j = 0; j < n; j++)
                    {
                        int brightness = (int)(digitized[i * n + j] * 125.0 + 125.0);
                        br.Color = Color.FromArgb(brightness, 255, 255, 255); // brightness, brightness, brightness
                        g.FillRectangle(br, i * dx, j * dy, dx, dy);
                    }
                }
            }
            else
            {
                try
                {
                    g.DrawImageUnscaled(bmp, 0, 0);
                }
                catch { }
            }
        }
Example #8
0
        private void bnAdd_Click(object sender, EventArgs e)
        {
            // TODO check if sample is valid

            // create new element of the samples list
            RawSample rs = new RawSample();

            rs.index   = rawSamples.Count;
            rs.bitmapA = (Bitmap)bmpA.Clone();
            rs.bitmapB = (Bitmap)bmpB.Clone();
            rawSamples.Add(rs);
            rawSamplesIndex = rawSamples.Count - 1;

            //comboRefresh();
            labelCounter.Text = string.Format("{0} / {1}", rawSamplesIndex + 1, rawSamples.Count);

            ClearField(ref bmpA);
            ClearField(ref bmpB);
            digitizedA = null;
            digitizedB = null;

            pictureBox1.Refresh();
            pictureBox2.Refresh();
        }
Example #9
0
    void MatchFoundCallback(
        VideoWorker.MatchFoundCallbackData data,
        Object userdata)
    {
        MAssert.Check(data != null);
        MAssert.Check(userdata != null);

        int       stream_id = (int)data.stream_id;
        int       frame_id  = (int)data.frame_id;
        RawSample sample    = data.sample;
        float     quality   = (float)data.quality;
        Template  templ     = data.templ;

        VideoWorker.SearchResult[] search_results = data.search_results;

        // userdata is supposed to be pointer to this Worker
        // so cast the pointer
        Worker worker = (Worker)(userdata);

        // we care only about the worker._stream_id source
        // so just ignore any others
        if (stream_id != worker._stream_id)
        {
            return;
        }

        MAssert.Check(sample != null);
        MAssert.Check(templ != null);
        MAssert.Check(search_results.Length > 0);

        // just print distances in the console
        Console.WriteLine("stream {0} match track {1} : ", stream_id, sample.getID());

        for (int i = 0; i < search_results.Length; ++i)
        {
            ulong element_id = search_results[i].element_id;

            if (element_id == (ulong)VideoWorker.MATCH_NOT_FOUND_ID)
            {
                MAssert.Check(i == 0);
                Console.WriteLine("  {0}: MATCH NOT FOUND", i);
            }
            else
            {
                MAssert.Check(element_id < (UInt64)worker._database.names.Count);
                Console.WriteLine("  {0}:  with '{1}' distance: {2}",
                                  i,
                                  worker._database.names[(int)element_id],
                                  search_results[i].match_result.distance);
            }
        }
        Console.WriteLine("");

        ulong match_element_id = search_results[0].element_id;

        if (match_element_id != (ulong)VideoWorker.MATCH_NOT_FOUND_ID)
        {
            MAssert.Check((int)match_element_id < worker._database.thumbnails.Count);

            // set the match info in the worker._drawing_data.faces
            worker._drawing_data_mutex.WaitOne();

            FaceData face = worker._drawing_data.faces[sample.getID()];

            MAssert.Check(!face.lost);

            face.match_database_index = (int)match_element_id;

            worker._drawing_data_mutex.ReleaseMutex();
        }
    }
Example #10
0
    // create the database
    public Database(
        string databaseDirPath,
        Recognizer recognizer,
        Capturer capturer,
        float distanceThreshold)
    {
        vwElements = new List <VideoWorker.DatabaseElement>();
        samples    = new List <RawSample>();
        thumbnails = new List <OpenCvSharp.Mat>();
        names      = new List <string>();
        // check paths
        MAssert.Check(Directory.Exists(databaseDirPath), "database not found");

        // get directory content
        List <string> path_l1 = new List <string>(Directory.EnumerateDirectories(databaseDirPath));

        // check every element in that directory

        ulong element_id_counter = 0;

        for (int il1 = 0; il1 < path_l1.Count; ++il1)
        {
            // ignore files
            if (!Directory.Exists(path_l1[il1]))
            {
                continue;
            }
            // so path_l1[il1] is supposed to be the path to the person directory

            // get files inside i
            List <string> path_l2 = new List <string>(Directory.EnumerateFiles(path_l1[il1]));
            string        name    = string.Empty;

            // search for the name.txt file

            for (int il2 = 0; il2 < path_l2.Count; ++il2)
            {
                if (Path.GetFileName(path_l2[il2]) == "name.txt")
                {
                    // put file content in the name

                    using (StreamReader sr = new StreamReader(path_l2[il2]))
                    {
                        name = sr.ReadToEnd();
                    }
                }
            }

            // try to open each file as an image
            for (int il2 = 0; il2 < path_l2.Count; ++il2)
            {
                if (Path.GetFileName(path_l2[il2]) == "name.txt")
                {
                    continue;
                }

                Console.WriteLine("processing '{0}' name: '{1}'", path_l2[il2], name);

                // read image with opencv

                OpenCvSharp.Mat readed_image = OpenCvSharp.Cv2.ImRead(path_l2[il2]);

                if (readed_image.Empty() || readed_image.Type() != OpenCvSharp.MatType.CV_8UC3)
                {
                    Console.WriteLine("\n\nWARNING: can't read image '{0}'\n\n", path_l2[il2]);
                    continue;
                }

                byte[] data = new byte[readed_image.Total() * readed_image.Type().Channels];
                Marshal.Copy(readed_image.DataStart, data, 0, (int)data.Length);
                RawImage image = new RawImage(readed_image.Width, readed_image.Height, RawImage.Format.FORMAT_BGR, data);

                // capture the face
                List <RawSample> capturedSamples = capturer.capture(image);

                if (capturedSamples.Count != 1)
                {
                    Console.WriteLine("\n\nWARNING: detected {0} faces on '{1}' image instead of one, image ignored \n\n", capturedSamples.Count, path_l2[il2]);
                    continue;
                }

                RawSample sample = capturedSamples[0];

                // make template
                Template templ = recognizer.processing(sample);

                // prepare data for VideoWorker
                VideoWorker.DatabaseElement vwElement = new VideoWorker.DatabaseElement(element_id_counter++, (ulong)il1, templ, distanceThreshold);

                vwElements.Add(vwElement);

                samples.Add(sample);

                thumbnails.Add(makeThumbnail(sample, name));

                names.Add(name);
            }
        }

        MAssert.Check((int)element_id_counter == vwElements.Count);
        MAssert.Check((int)element_id_counter == samples.Count);
        MAssert.Check((int)element_id_counter == thumbnails.Count);
        MAssert.Check((int)element_id_counter == names.Count);
    }
Example #11
0
    // make a thumbnail of a sample
    public static OpenCvSharp.Mat makeThumbnail(
        RawSample sample,
        string name = "")
    {
        int thumbnail_size = Worker.thumbnail_size;

        // buffer for the cutted image
        MemoryStream stream = new MemoryStream();

        // make a cut in bmp format
        // so we don't waste time for encode/decode image
        // just copying it few times, which is irrelevant
        sample.cutFaceImage(
            stream,
            RawSample.ImageFormat.IMAGE_FORMAT_BMP,
            RawSample.FaceCutType.FACE_CUT_BASE);

        OpenCvSharp.Mat temp = OpenCvSharp.Mat.ImDecode(stream.ToArray(), OpenCvSharp.ImreadModes.Color);

        // so we got an image

        // check it
        MAssert.Check(!temp.Empty());
        MAssert.Check(temp.Type() == OpenCvSharp.MatType.CV_8UC3);


        // and resize to the thumbnail_size

        OpenCvSharp.Rect resRect;

        if (temp.Rows >= temp.Cols)
        {
            resRect.Height = thumbnail_size;
            resRect.Width  = temp.Cols * thumbnail_size / temp.Rows;
        }
        else
        {
            resRect.Width  = thumbnail_size;
            resRect.Height = temp.Rows * thumbnail_size / temp.Cols;
        }

        resRect.X = (thumbnail_size - resRect.Width) / 2;
        resRect.Y = (thumbnail_size - resRect.Height) / 2;

        OpenCvSharp.Mat result = new OpenCvSharp.Mat(
            thumbnail_size,
            thumbnail_size,
            OpenCvSharp.MatType.CV_8UC3,
            OpenCvSharp.Scalar.All(0));

        OpenCvSharp.Cv2.Resize(
            temp,
            result[resRect],
            resRect.Size);

        if (!string.IsNullOrEmpty(name))
        {
            result[new OpenCvSharp.Rect(0, result.Rows - 27, result.Cols, 27)] = result.RowRange(result.Rows - 27, result.Rows) * 0.5f;

            OpenCvSharp.Cv2.PutText(
                result,
                name,
                new OpenCvSharp.Point(0, result.Rows - 7),
                OpenCvSharp.HersheyFonts.HersheyDuplex,
                0.7,
                OpenCvSharp.Scalar.All(255),
                1,
                OpenCvSharp.LineTypes.AntiAlias);
        }

        return(result);
    }
Example #12
0
    public void work(OpenCvSharp.Mat frame)
    {
        // sending the frame in the capturer (_tracker)
        // pbio::CVRawImage cvri_frame;
        byte[] data = new byte[frame.Total() * frame.Type().Channels];
        Marshal.Copy(frame.DataStart, data, 0, (int)data.Length);
        RawImage         ri_frame = new RawImage(frame.Width, frame.Height, RawImage.Format.FORMAT_BGR, data);
        List <RawSample> samples  = _tracker.capture(ri_frame);

        // clone the frame for drawing on it
        OpenCvSharp.Mat draw_image = frame.Clone();
        // handle each face on the frame separately
        for (int i = 0; i < samples.Count; ++i)
        {
            RawSample sample = samples[i];

            // get a face rectangle
            RawSample.Rectangle rectangle = sample.getRectangle();

            // set a point to place information for this face
            OpenCvSharp.Point2f text_point = new OpenCvSharp.Point2f(
                rectangle.x + rectangle.width + 3,
                rectangle.y + 10);

            const float text_line_height = 22;

            // draw facial points
            // red color for all points
            // green for left eye
            // yellow for right eye
            // (yes, there is a mess with left and right eyes in face_sdk api,
            // but if we fix it now we will lose compatibility with previous versions)
            if (_flag_points)
            {
                List <Point> points = sample.getLandmarks();

                for (int j = -2; j < points.Count; ++j)
                {
                    Point p =
                        j == -2 ?
                        sample.getLeftEye() :
                        j == -1 ?
                        sample.getRightEye() :
                        points[j];

                    OpenCvSharp.Scalar color =
                        j == -2 ?
                        new OpenCvSharp.Scalar(50, 255, 50) :
                        j == -1 ?
                        new OpenCvSharp.Scalar(50, 255, 255) :
                        new OpenCvSharp.Scalar(50, 50, 255);


                    OpenCvSharp.Cv2.Circle(
                        draw_image,
                        new OpenCvSharp.Point2f(p.x, p.y),
                        j < 0 ? 4 : 2,
                        color,
                        -1,
                        OpenCvSharp.LineTypes.AntiAlias);
                }
            }

            // draw rectangle
            if (_flag_positions)
            {
                OpenCvSharp.Cv2.Rectangle(
                    draw_image,
                    new OpenCvSharp.Rect(
                        rectangle.x,
                        rectangle.y,
                        rectangle.width,
                        rectangle.height),
                    new OpenCvSharp.Scalar(50, 50, 255),
                    2,
                    OpenCvSharp.LineTypes.AntiAlias);
            }

            // draw age and gender
            if (_flag_age_gender)
            {
                AgeGenderEstimator.AgeGender age_gender = _age_geder_estimator.estimateAgeGender(sample);

                string age_text = "age: ";

                switch (age_gender.age)
                {
                case AgeGenderEstimator.Age.AGE_KID: age_text += "kid    "; break;

                case AgeGenderEstimator.Age.AGE_YOUNG: age_text += "young  "; break;

                case AgeGenderEstimator.Age.AGE_ADULT: age_text += "adult  "; break;

                case AgeGenderEstimator.Age.AGE_SENIOR: age_text += "senior "; break;
                }

                age_text += string.Format("years: {0:G3}", age_gender.age_years);

                puttext(
                    draw_image,
                    age_text,
                    text_point);
                text_point.Y += text_line_height;

                puttext(
                    draw_image,
                    age_gender.gender == AgeGenderEstimator.Gender.GENDER_FEMALE ? "gender: female" :
                    age_gender.gender == AgeGenderEstimator.Gender.GENDER_MALE ? "gender: male" : "?",
                    text_point);
                text_point.Y += text_line_height;

                text_point.Y += text_line_height / 3;
            }

            // draw emotions
            if (_flag_emotions)
            {
                List <EmotionsEstimator.EmotionConfidence> emotions =
                    _emotions_estimator.estimateEmotions(sample);

                for (int j = 0; j < emotions.Count; ++j)
                {
                    EmotionsEstimator.Emotion emotion = emotions[j].emotion;
                    float confidence = emotions[j].confidence;

                    OpenCvSharp.Cv2.Rectangle(
                        draw_image,
                        new OpenCvSharp.Rect(
                            (int)text_point.X,
                            (int)text_point.Y - (int)text_line_height / 2,
                            (int)(100 * confidence),
                            (int)text_line_height),
                        emotion == EmotionsEstimator.Emotion.EMOTION_NEUTRAL  ? new OpenCvSharp.Scalar(255, 0, 0) :
                        emotion == EmotionsEstimator.Emotion.EMOTION_HAPPY    ? new OpenCvSharp.Scalar(0, 255, 0) :
                        emotion == EmotionsEstimator.Emotion.EMOTION_ANGRY    ? new OpenCvSharp.Scalar(0, 0, 255) :
                        emotion == EmotionsEstimator.Emotion.EMOTION_SURPRISE ? new OpenCvSharp.Scalar(0, 255, 255) :
                        new OpenCvSharp.Scalar(0, 0, 0),
                        -1);

                    puttext(
                        draw_image,
                        emotion == EmotionsEstimator.Emotion.EMOTION_NEUTRAL  ? "neutral" :
                        emotion == EmotionsEstimator.Emotion.EMOTION_HAPPY    ? "happy" :
                        emotion == EmotionsEstimator.Emotion.EMOTION_ANGRY    ? "angry" :
                        emotion == EmotionsEstimator.Emotion.EMOTION_SURPRISE ? "surprise" : "?",
                        text_point + new OpenCvSharp.Point2f(100, 0));

                    text_point.Y += text_line_height;

                    text_point.Y += text_line_height / 3;
                }
            }


            // draw angles text
            if (_flag_angles)
            {
                string yaw, pitch, roll;
                yaw   = string.Format("yaw: {0}", (0.1f * (int)10 * sample.getAngles().yaw + 0.5f));
                pitch = string.Format("pitch: {0}", (0.1f * (int)10 * sample.getAngles().pitch + 0.5f));
                roll  = string.Format("roll: {0}", (0.1f * (int)10 * sample.getAngles().roll + 0.5f));

                puttext(draw_image, yaw, text_point);
                text_point.Y += text_line_height;

                puttext(draw_image, pitch, text_point);
                text_point.Y += text_line_height;

                puttext(draw_image, roll, text_point);
                text_point.Y += text_line_height;

                text_point.Y += text_line_height / 3;
            }

            // draw angles vectors
            if (_flag_angles_vectors)
            {
                RawSample.Angles angles = sample.getAngles();

                float cos_a = (float)Math.Cos(angles.yaw * OpenCvSharp.Cv2.PI / 180);
                float sin_a = (float)Math.Sin(angles.yaw * OpenCvSharp.Cv2.PI / 180);

                float cos_b = (float)Math.Cos(angles.pitch * OpenCvSharp.Cv2.PI / 180);
                float sin_b = (float)Math.Sin(angles.pitch * OpenCvSharp.Cv2.PI / 180);

                float cos_c = (float)Math.Cos(angles.roll * OpenCvSharp.Cv2.PI / 180);
                float sin_c = (float)Math.Sin(angles.roll * OpenCvSharp.Cv2.PI / 180);

                OpenCvSharp.Point3f[] xyz =
                {
                    new OpenCvSharp.Point3f(cos_a * cos_c,        -sin_c, -sin_a),
                    new OpenCvSharp.Point3f(sin_c,         cos_b * cos_c, -sin_b),
                    new OpenCvSharp.Point3f(sin_a,         sin_b,         cos_a * cos_b)
                };

                OpenCvSharp.Point2f center = new OpenCvSharp.Point2f(
                    (sample.getLeftEye().x + sample.getRightEye().x) * 0.5f,
                    (sample.getLeftEye().y + sample.getRightEye().y) * 0.5f);

                float length = (rectangle.width + rectangle.height) * 0.3f;

                for (int c = 0; c < 3; ++c)
                {
                    OpenCvSharp.Cv2.Line(
                        draw_image,
                        center,
                        center + new OpenCvSharp.Point2f(xyz[c].X, -xyz[c].Y) * length,
                        c == 0 ? new OpenCvSharp.Scalar(50, 255, 255) :
                        c == 1 ? new OpenCvSharp.Scalar(50, 255, 50) :
                        c == 2 ? new OpenCvSharp.Scalar(50, 50, 255) : new OpenCvSharp.Scalar(),
                        2,
                        OpenCvSharp.LineTypes.AntiAlias);
                }
            }

            // draw quality text
            if (_flag_quality)
            {
                QualityEstimator.Quality q =
                    _quality_estimator.estimateQuality(sample);

                string lighting, noise, sharpness, flare;

                lighting = "lighting: " + q.lighting.ToString();
                puttext(draw_image, lighting, text_point);
                text_point.Y += text_line_height;

                noise = "noise: " + q.noise.ToString();
                puttext(draw_image, noise, text_point);
                text_point.Y += text_line_height;

                sharpness = "sharpness: " + q.sharpness.ToString();
                puttext(draw_image, sharpness, text_point);
                text_point.Y += text_line_height;

                flare = "flare: " + q.flare.ToString();
                puttext(draw_image, flare, text_point);
                text_point.Y += text_line_height;

                text_point.Y += text_line_height / 3;
            }

            // draw liveness text
            if (_flag_liveness)
            {
                Liveness2DEstimator.Liveness liveness_2d_result = _liveness_2d_estimator.estimateLiveness(sample);

                puttext(
                    draw_image,
                    "liveness: " + (
                        liveness_2d_result == Liveness2DEstimator.Liveness.REAL ? "real" :
                        liveness_2d_result == Liveness2DEstimator.Liveness.FAKE ? "fake" :
                        liveness_2d_result == Liveness2DEstimator.Liveness.NOT_ENOUGH_DATA ? "not enough data" : "??"),
                    text_point);

                text_point.Y += text_line_height;
                text_point.Y += text_line_height / 3;
            }

            // draw face quality
            if (_flag_face_quality)
            {
                float quality = _face_quality_estimator.estimateQuality(sample);

                string ss = "face quality: " + quality.ToString();
                puttext(draw_image, ss, text_point);
                text_point.Y += text_line_height;
                text_point.Y += text_line_height / 3;
            }

            // draw face cuts
            for (int cut_i = 0; cut_i < 3; ++cut_i)
            {
                if ((cut_i == 0 && !_flag_cutting_base) ||
                    (cut_i == 1 && !_flag_cutting_full) ||
                    (cut_i == 2 && !_flag_cutting_token))
                {
                    continue;
                }

                puttext(
                    draw_image,
                    cut_i == 0 ? "base cut:" :
                    cut_i == 1 ? "full cut:" :
                    cut_i == 2 ? "token cut:" : "?? cut",
                    text_point);
                text_point.Y += text_line_height / 2;

                MemoryStream obuf = new MemoryStream();
                sample.cutFaceImage(
                    obuf,
                    RawSample.ImageFormat.IMAGE_FORMAT_BMP,
                    cut_i == 0 ? RawSample.FaceCutType.FACE_CUT_BASE :
                    cut_i == 1 ? RawSample.FaceCutType.FACE_CUT_FULL_FRONTAL :
                    cut_i == 2 ? RawSample.FaceCutType.FACE_CUT_TOKEN_FRONTAL :
                    (RawSample.FaceCutType) 999);

                byte[] sbuf = obuf.ToArray();

                // const OpenCvSharp.Mat_<uchar> cvbuf(1, sbuf.length(), (uchar*) sbuf.c_str());

                OpenCvSharp.Mat img = OpenCvSharp.Cv2.ImDecode(sbuf, OpenCvSharp.ImreadModes.Unchanged);

                OpenCvSharp.Cv2.Resize(img, img, OpenCvSharp.Size.Zero, 0.3, 0.3);


                int img_rect_x = (int)Math.Max(0, -text_point.X);
                int img_rect_y = (int)Math.Max(0, -text_point.Y);

                int img_rect_width = (int)Math.Min(
                    img.Cols - img_rect_x,
                    draw_image.Cols - Math.Max(0, text_point.X));

                int img_rect_height = (int)Math.Min(
                    img.Rows - img_rect_y,
                    draw_image.Rows - Math.Max(0, text_point.Y));

                if (img_rect_width <= 0 || img_rect_height <= 0)
                {
                    continue;
                }

                OpenCvSharp.Rect img_rect = new OpenCvSharp.Rect(img_rect_x, img_rect_y, img_rect_width, img_rect_height);

                img[img_rect].CopyTo(
                    draw_image[new OpenCvSharp.Rect(
                                   (int)Math.Max(0, text_point.X),
                                   (int)Math.Max(0, text_point.Y),
                                   img_rect.Width,
                                   img_rect.Height)]);

                text_point.Y += text_line_height / 2;
                text_point.Y += img.Rows;


                text_point.Y += text_line_height / 3;
            }
        }
        // draw checkboxes
        for (int i = 0; i < flags_count; ++i)
        {
            OpenCvSharp.Rect rect  = flag_rect(i);
            OpenCvSharp.Rect rect2 = new OpenCvSharp.Rect(rect.X + 5, rect.Y + 5, rect.Width - 10, rect.Height - 10);

            OpenCvSharp.Cv2.Rectangle(draw_image, rect, OpenCvSharp.Scalar.All(255), -1);
            OpenCvSharp.Cv2.Rectangle(draw_image, rect, OpenCvSharp.Scalar.All(0), 2, OpenCvSharp.LineTypes.AntiAlias);

            if (get_flag(i))
            {
                OpenCvSharp.Cv2.Rectangle(draw_image, rect2, OpenCvSharp.Scalar.All(0), -1, OpenCvSharp.LineTypes.AntiAlias);
            }

            puttext(
                draw_image,
                flag_name(i),
                new OpenCvSharp.Point2f(rect.X + rect.Width + 3, rect.Y + rect.Height - 3));
        }


        // show image with drawed information
        OpenCvSharp.Cv2.ImShow("demo", draw_image);

        // register callback on mouse events
        OpenCvSharp.Cv2.SetMouseCallback("demo", (OpenCvSharp.CvMouseCallback)onMouse);
    }
Example #13
0
 public double[] BitmapToInputs(int nx, int ny)
 {
     return(RawSample.BitmapToInputs(this.bitmap, nx, ny));
 }