예제 #1
0
    public OpencvSource(string cam_or_url)
    {
        MAssert.Check(cam_or_url != string.Empty);

        // check if cam_or_url is number
        bool stream = false;

        for (int i = 0; i < cam_or_url.Length; ++i)
        {
            stream = stream ||
                     (cam_or_url[i] < '0') ||
                     (cam_or_url[i] > '9');
        }

        if (stream)
        {
            // open stream
            Console.WriteLine("opening stream '{0}'", cam_or_url);
            capturer = new OpenCvSharp.VideoCapture(cam_or_url);
        }
        else
        {
            // convert to integer
            int cam_id = Convert.ToInt32(cam_or_url, 10);
            MAssert.Check(cam_id >= 0, "wrong webcam id");

            // open vebcam
            Console.WriteLine("opening webcam {0}", cam_id);
            capturer = new OpenCvSharp.VideoCapture(cam_id);
            MAssert.Check(capturer.IsOpened(), "webcam not opened");

            // set resolution
            capturer.Set(OpenCvSharp.CaptureProperty.FrameWidth, 1280);
            capturer.Set(OpenCvSharp.CaptureProperty.FrameHeight, 720);

            MAssert.Check(capturer.IsOpened(), "webcam not opened");
        }

        // sometimes first few frames can be empty even if camera is good
        // so skip few frames
        OpenCvSharp.Mat frame;
        for (int i = 0; i < 10; ++i)
        {
            frame = capturer.RetrieveMat();
        }
        // check first two frames
        OpenCvSharp.Mat image1 = new OpenCvSharp.Mat(), image2 = new OpenCvSharp.Mat();
        capturer.Read(image1);
        capturer.Read(image2);
        Console.WriteLine("image1 size: {0}", image1.Size());
        Console.WriteLine("image1 size: {0}", image2.Size());

        MAssert.Check(
            !image1.Empty() &&
            !image2.Empty() &&
            image1.Size() == image2.Size() &&
            image1.Type() == OpenCvSharp.MatType.CV_8UC3 &&
            image2.Type() == OpenCvSharp.MatType.CV_8UC3,
            "error opening webcam or stream");
    }
예제 #2
0
    public DepthMapRaw depth_opts;      // all but depth_data and depth_data_stride_in_bytes must be set

    public DepthMapRaw make_dmr()
    {
        MAssert.Check(!depth.Empty());

        DepthMapRaw r = depth_opts;

        r.depth_data_ptr             = depth.Data;
        r.depth_data_stride_in_bytes = (int)depth.Step();

        return(r);
    }
예제 #3
0
    void TrackingLostCallback(
        VideoWorker.TrackingLostCallbackData data,
        Object userdata)
    {
        MAssert.Check(data != null);
        MAssert.Check(userdata != null);

        int       stream_id             = (int)data.stream_id;
        int       track_id              = (int)data.track_id;
        int       first_frame_id        = (int)data.first_frame_id;
        int       last_frame_id         = (int)data.last_frame_id;
        float     best_quality          = (float)data.best_quality;
        int       best_quality_frame_id = (int)data.best_quality_frame_id;
        RawSample best_quality_sample   = data.best_quality_sample;
        Template  best_quality_templ    = data.best_quality_templ;

        // userdata is supposed to be pointer to this Worker
        // so cast the pointer
        Worker worker = (Worker)(userdata);

        // we care only about the worker._stream_id source
        // so just ignore any others
        if (stream_id != worker._stream_id)
        {
            return;
        }

        // here we just remember the time when the face was lost
        // but the real purpose of this callback if to store faces
        // for futher matching or processing

        {
            worker._drawing_data_mutex.WaitOne();

            FaceData face = worker._drawing_data.faces[track_id];

            MAssert.Check(!face.lost);

            face.lost      = true;
            face.lost_time = new TimeSpan(DateTime.Now.Ticks);
            if (best_quality_sample != null)
            {
                face.sample = best_quality_sample;
            }

            worker._drawing_data_mutex.ReleaseMutex();
        }
    }
예제 #4
0
    public Worker(
        Database database,
        VideoWorker video_worker,
        ImageAndDepthSource source,
        int stream_id,
        Mutex draw_image_mutex,
        OpenCvSharp.Mat draw_image,
        float frame_fps_limit)
    {
        _draw_image_mutex   = draw_image_mutex;
        _draw_image         = draw_image;
        _video_worker       = video_worker;
        _source             = source;
        _frame_min_delay_ms = 1000f / frame_fps_limit;
        _stream_id          = stream_id;
        _database           = database;

        // check arguments
        MAssert.Check(video_worker != null);
        MAssert.Check(source != null);
        MAssert.Check(stream_id >= 0 && stream_id < video_worker.getStreamsCount());

        // set callbacks arguments
        _tracking_callback_id =
            video_worker.addTrackingCallbackU(
                TrackingCallback,
                this);

        _tracking_lost_callback_id =
            video_worker.addTrackingLostCallbackU(
                TrackingLostCallback,
                this);

        _match_found_callback_id =
            video_worker.addMatchFoundCallbackU(
                MatchFoundCallback,
                this);

        // start threads
        _shutdown = false;

        _drawing_thread = new Thread(DrawingThreadFunc);
        _camera_thread  = new Thread(CameraThreadFunc);
        _drawing_thread.Start();
        _camera_thread.Start();
    }
예제 #5
0
    public RealSenseSource()
    {
        using (Context ctx = new Context())
        {
            DeviceList devices = ctx.QueryDevices();

            Console.WriteLine("There are {0} connected RealSense devices.", devices.Count);

            MAssert.Check(devices.Count > 0, "there is no RealSense devices");

            for (int i = 0; i < devices.Count; ++i)
            {
                Console.WriteLine(
                    "Device {0}: name {1}, serial {2}, firmware version: {3}",
                    i,
                    devices[i].Info[CameraInfo.Name],
                    devices[i].Info[CameraInfo.SerialNumber],
                    devices[i].Info[CameraInfo.FirmwareVersion]);
            }
        }

        Config config = new Config();

        config.EnableStream(Stream.Depth, 640, 480, Format.Z16);
        config.EnableStream(Stream.Color, 640, 480, Format.Bgr8);

        pipeline = new Pipeline();

        PipelineProfile pipeline_profile = pipeline.Start(config);

        depth_scale = pipeline_profile.Device.Sensors[0].DepthScale * 1000;

        MAssert.Check(depth_scale > 0);

        align_to_color = new Align(Stream.Color);
    }
예제 #6
0
    void MatchFoundCallback(
        VideoWorker.MatchFoundCallbackData data,
        Object userdata)
    {
        MAssert.Check(data != null);
        MAssert.Check(userdata != null);

        int       stream_id = (int)data.stream_id;
        int       frame_id  = (int)data.frame_id;
        RawSample sample    = data.sample;
        float     quality   = (float)data.quality;
        Template  templ     = data.templ;

        VideoWorker.SearchResult[] search_results = data.search_results;

        // userdata is supposed to be pointer to this Worker
        // so cast the pointer
        Worker worker = (Worker)(userdata);

        // we care only about the worker._stream_id source
        // so just ignore any others
        if (stream_id != worker._stream_id)
        {
            return;
        }

        MAssert.Check(sample != null);
        MAssert.Check(templ != null);
        MAssert.Check(search_results.Length > 0);

        // just print distances in the console
        Console.WriteLine("stream {0} match track {1} : ", stream_id, sample.getID());

        for (int i = 0; i < search_results.Length; ++i)
        {
            ulong element_id = search_results[i].element_id;

            if (element_id == (ulong)VideoWorker.MATCH_NOT_FOUND_ID)
            {
                MAssert.Check(i == 0);
                Console.WriteLine("  {0}: MATCH NOT FOUND", i);
            }
            else
            {
                MAssert.Check(element_id < (UInt64)worker._database.names.Count);
                Console.WriteLine("  {0}:  with '{1}' distance: {2}",
                                  i,
                                  worker._database.names[(int)element_id],
                                  search_results[i].match_result.distance);
            }
        }
        Console.WriteLine("");

        ulong match_element_id = search_results[0].element_id;

        if (match_element_id != (ulong)VideoWorker.MATCH_NOT_FOUND_ID)
        {
            MAssert.Check((int)match_element_id < worker._database.thumbnails.Count);

            // set the match info in the worker._drawing_data.faces
            worker._drawing_data_mutex.WaitOne();

            FaceData face = worker._drawing_data.faces[sample.getID()];

            MAssert.Check(!face.lost);

            face.match_database_index = (int)match_element_id;

            worker._drawing_data_mutex.ReleaseMutex();
        }
    }
예제 #7
0
    void TrackingCallback(
        VideoWorker.TrackingCallbackData data,
        Object userdata)
    {
        // check arguments
        MAssert.Check(data != null);
        MAssert.Check(userdata != null);

        int stream_id = (int)data.stream_id;
        int frame_id  = (int)data.frame_id;

        RawSample[] samples         = data.samples;
        bool[]      samples_weak    = data.samples_weak;
        float[]     samples_quality = data.samples_quality;

        MAssert.Check(samples.Length == samples_weak.Length);
        MAssert.Check(samples.Length == samples_quality.Length);

        // userdata is supposed to be pointer to this Worker
        // so cast the pointer
        Worker worker = (Worker)(userdata);

        // we care only about the worker._stream_id source
        // so just ignore any others
        if (stream_id != worker._stream_id)
        {
            return;
        }

        // get the frame with frame_id id
        ImageAndDepth frame = new ImageAndDepth();

        {
            worker._frames_mutex.WaitOne();

            // searching in worker._frames
            for (;;)
            {
                // there already must be some frames
                MAssert.Check(worker._frames.Count > 0);

                // this frame_id can't be from future
                MAssert.Check(worker._frames.Peek().Item1 <= frame_id);

                if (worker._frames.Peek().Item1 == frame_id)
                {
                    // we found it
                    frame = worker._frames.Dequeue().Item2;
                    break;
                }
                else
                {
                    // this (i.e. the worker._frames.front() ) frame was skipeed by tracking
                    Console.WriteLine("skiped {0}:{1}", stream_id, worker._frames.Dequeue().Item1);
                }
            }
            worker._frames_mutex.ReleaseMutex();
        }

        // update the data
        {
            worker._drawing_data_mutex.WaitOne();

            // frame

            worker._drawing_data.frame    = frame.image;
            worker._drawing_data.depth    = frame.depth;
            worker._drawing_data.frame_id = frame_id;
            worker._drawing_data.updated  = true;

            // and samples
            for (int i = 0; i < samples.Length; ++i)
            {
                if (!worker._drawing_data.faces.ContainsKey(samples[i].getID()))
                {
                    worker._drawing_data.faces[samples[i].getID()] = new FaceData();
                }
                FaceData face = worker._drawing_data.faces[samples[i].getID()];
                face.frame_id = samples[i].getFrameID();                  // it can differ from the frame_id
                face.lost     = false;
                face.weak     = samples_weak[i];
                face.sample   = samples[i];
            }
            worker._drawing_data_mutex.ReleaseMutex();
        }
    }
예제 #8
0
파일: Main.cs 프로젝트: 3DiVi/face-sdk
    static int Main(string[] args)
    {
        try
        {
            // print usage
            Console.WriteLine("Usage: dotnet csharp_video_recognition_demo.dll [OPTIONS] <video_source>...");
            Console.WriteLine("Examples:");
            Console.WriteLine("    Webcam:  dotnet csharp_video_recognition_demo.dll --config_dir ../../../conf/facerec 0");
            Console.WriteLine("    RTSP stream:  dotnet csharp_video_recognition_demo.dll --config_dir ../../../conf/facerec rtsp://localhost:8554/");
            Console.WriteLine("");

            // parse arguments
            bool    error   = false;
            Options options = new Options();
            CommandLine.Parser.Default.ParseArguments <Options>(args)
            .WithParsed <Options>(opts => options  = opts)
            .WithNotParsed <Options>(errs => error = true);

            // exit if argument parsign error
            if (error)
            {
                return(1);
            }

            // print values of arguments
            Console.WriteLine("Arguments:");
            foreach (var opt in options.GetType().GetProperties())
            {
                if (opt.Name == "video_sources")
                {
                    Console.Write("video sources = ");
                    foreach (string vs in options.video_sources)
                    {
                        Console.Write(vs + " ");
                    }
                    Console.WriteLine();
                }
                else
                {
                    Console.WriteLine("--{0} = {1}", opt.Name, opt.GetValue(options, null));
                }
            }
            Console.WriteLine("\n");

            //parameters parse
            string        config_dir    = options.config_dir;
            string        license_dir   = options.license_dir;
            string        database_dir  = options.database_dir;
            string        method_config = options.method_config;
            float         recognition_distance_threshold = options.recognition_distance_threshold;
            float         frame_fps_limit = options.frame_fps_limit;
            List <string> video_sources   = new List <string>(options.video_sources);

            // check params
            MAssert.Check(config_dir != string.Empty, "Error! config_dir is empty.");
            MAssert.Check(database_dir != string.Empty, "Error! database_dir is empty.");
            MAssert.Check(method_config != string.Empty, "Error! method_config is empty.");
            MAssert.Check(recognition_distance_threshold > 0, "Error! Failed recognition distance threshold.");

            List <ImageAndDepthSource> sources = new List <ImageAndDepthSource>();
            List <string> sources_names        = new List <string>();


            MAssert.Check(video_sources.Count > 0, "Error! video_sources is empty.");

            for (int i = 0; i < video_sources.Count; i++)
            {
                sources_names.Add(string.Format("OpenCvS source {0}", i));
                sources.Add(new OpencvSource(video_sources[i]));
            }


            MAssert.Check(sources_names.Count == sources.Count);

            // print sources
            Console.WriteLine("\n{0} sources: ", sources.Count);

            for (int i = 0; i < sources_names.Count; ++i)
            {
                Console.WriteLine("  {0}", sources_names[i]);
            }
            Console.WriteLine("");

            // create facerec servcie
            FacerecService service =
                FacerecService.createService(
                    config_dir,
                    license_dir);

            Console.WriteLine("Library version: {0}\n", service.getVersion());

            // create database
            Recognizer recognizer = service.createRecognizer(method_config, true, false, false);
            Capturer   capturer   = service.createCapturer("common_capturer4_lbf_singleface.xml");
            Database   database   = new Database(
                database_dir,
                recognizer,
                capturer,
                recognition_distance_threshold);
            recognizer.Dispose();
            capturer.Dispose();

            FacerecService.Config vw_config = new FacerecService.Config("video_worker_fdatracker_blf_fda.xml");
            // vw_config.overrideParameter("single_match_mode", 1);
            vw_config.overrideParameter("search_k", 10);
            vw_config.overrideParameter("not_found_match_found_callback", 1);
            vw_config.overrideParameter("downscale_rawsamples_to_preferred_size", 0);

            //ActiveLiveness.CheckType[] checks = new ActiveLiveness.CheckType[3]
            //{
            //	ActiveLiveness.CheckType.BLINK,
            //			ActiveLiveness.CheckType.TURN_RIGHT,
            //			ActiveLiveness.CheckType.SMILE
            //};


            // create one VideoWorker
            VideoWorker video_worker =
                service.createVideoWorker(
                    new VideoWorker.Params()
                    .video_worker_config(vw_config)
                    .recognizer_ini_file(method_config)
                    .streams_count(sources.Count)
                    //.age_gender_estimation_threads_count(sources.Count)
                    //.emotions_estimation_threads_count(sources.Count)
                    //.active_liveness_checks_order(checks)
                    .processing_threads_count(sources.Count)
                    .matching_threads_count(sources.Count));

            // set database
            video_worker.setDatabase(database.vwElements, Recognizer.SearchAccelerationType.SEARCH_ACCELERATION_1);

            for (int i = 0; i < sources_names.Count; ++i)
            {
                OpenCvSharp.Window window = new OpenCvSharp.Window(sources_names[i]);

                OpenCvSharp.Cv2.ImShow(sources_names[i], new OpenCvSharp.Mat(100, 100, OpenCvSharp.MatType.CV_8UC3, OpenCvSharp.Scalar.All(0)));
            }

            // prepare buffers for store drawed results
            Mutex draw_images_mutex            = new Mutex();
            List <OpenCvSharp.Mat> draw_images = new List <OpenCvSharp.Mat>(sources.Count);


            // create one worker per one source
            List <Worker> workers = new List <Worker>();

            for (int i = 0; i < sources.Count; ++i)
            {
                draw_images.Add(new OpenCvSharp.Mat(100, 100, OpenCvSharp.MatType.CV_8UC3, OpenCvSharp.Scalar.All(0)));
                workers.Add(new Worker(
                                database,
                                video_worker,
                                sources[i],
                                i,          // stream_id
                                draw_images_mutex,
                                draw_images[i],
                                frame_fps_limit
                                ));
            }


            // draw results until escape presssed
            for (;;)
            {
                {
                    draw_images_mutex.WaitOne();
                    for (int i = 0; i < draw_images.Count; ++i)
                    {
                        OpenCvSharp.Mat drawed_im = workers[i]._draw_image;
                        if (!drawed_im.Empty())
                        {
                            OpenCvSharp.Cv2.ImShow(sources_names[i], drawed_im);
                            draw_images[i] = new OpenCvSharp.Mat();
                        }
                    }
                    draw_images_mutex.ReleaseMutex();
                }

                int key = OpenCvSharp.Cv2.WaitKey(20);
                if (27 == key)
                {
                    foreach (Worker w in workers)
                    {
                        w.Dispose();
                    }
                    break;
                }

                if (' ' == key)
                {
                    Console.WriteLine("enable processing 0");
                    video_worker.enableProcessingOnStream(0);
                }

                if (13 == key)
                {
                    Console.WriteLine("disable processing 0");
                    video_worker.disableProcessingOnStream(0);
                }


                if ('r' == key)
                {
                    Console.WriteLine("reset trackerOnStream");
                    video_worker.resetTrackerOnStream(0);
                }


                // check exceptions in callbacks
                video_worker.checkExceptions();
            }

            // force free resources
            // otherwise licence error may occur
            // when create sdk object in next time
            service.Dispose();
            video_worker.Dispose();
        }
        catch (Exception e)
        {
            Console.WriteLine("video_recognition_show exception catched: '{0}'", e.ToString());
            return(1);
        }

        return(0);
    }
예제 #9
0
    public override ImageAndDepth Get()
    {
        ImageAndDepth res = new ImageAndDepth();

        using (FrameSet frameset = pipeline.WaitForFrames())
        {
            DepthFrame depth_frame = frameset.FirstOrDefault <DepthFrame>(Stream.Depth, Format.Z16).DisposeWith(frameset);

            if (depth_frame != null && ((long)depth_frame.Number) > prev_depth_frame_number)
            {
                prev_depth_frame_number = (long)depth_frame.Number;

                FrameSet registered = align_to_color.Process(frameset).As <FrameSet>().DisposeWith(frameset);

                depth_frame = registered.FirstOrDefault <DepthFrame>(Stream.Depth, Format.Z16).DisposeWith(frameset);

                MAssert.Check(depth_frame != null);
                MAssert.Check(depth_frame.BitsPerPixel == 16);
                MAssert.Check(depth_frame.Stride >= depth_frame.Width * 2);

                float[] fov = depth_frame.Profile.As <VideoStreamProfile>().GetIntrinsics().FOV;

                res.depth_opts.horizontal_fov            = fov[0];
                res.depth_opts.vertical_fov              = fov[1];
                res.depth_opts.depth_unit_in_millimeters = depth_scale;


                res.depth_opts.depth_map_2_image_offset_x = 0;
                res.depth_opts.depth_map_2_image_offset_y = 0;
                res.depth_opts.depth_map_2_image_scale_x  = 1;
                res.depth_opts.depth_map_2_image_scale_y  = 1;

                res.depth_opts.depth_map_rows = depth_frame.Height;
                res.depth_opts.depth_map_cols = depth_frame.Width;

                res.depth_timestamp_microsec = (ulong)depth_frame.Timestamp * 1000;

                res.depth = new OpenCvSharp.MatOfUShort(
                    depth_frame.Height,
                    depth_frame.Width,
                    depth_frame.Data,
                    depth_frame.Stride).Clone();
            }


            VideoFrame video_frame = frameset.FirstOrDefault <VideoFrame>(Stream.Color, Format.Bgr8).DisposeWith(frameset);

            if (video_frame != null && ((long)video_frame.Number > prev_video_frame_number))
            {
                prev_video_frame_number = (long)video_frame.Number;

                MAssert.Check(video_frame.BitsPerPixel == 24);
                MAssert.Check(video_frame.Stride >= video_frame.Width * 3);

                res.image = new OpenCvSharp.Mat(
                    video_frame.Height,
                    video_frame.Width,
                    OpenCvSharp.MatType.CV_8UC3,
                    video_frame.Data,
                    video_frame.Stride).Clone();

                res.image_timestamp_microsec = (ulong)video_frame.Timestamp * 1000;
            }
        }

        return(res);
    }
예제 #10
0
    // create the database
    public Database(
        string databaseDirPath,
        Recognizer recognizer,
        Capturer capturer,
        float distanceThreshold)
    {
        vwElements = new List <VideoWorker.DatabaseElement>();
        samples    = new List <RawSample>();
        thumbnails = new List <OpenCvSharp.Mat>();
        names      = new List <string>();
        // check paths
        MAssert.Check(Directory.Exists(databaseDirPath), "database not found");

        // get directory content
        List <string> path_l1 = new List <string>(Directory.EnumerateDirectories(databaseDirPath));

        // check every element in that directory

        ulong element_id_counter = 0;

        for (int il1 = 0; il1 < path_l1.Count; ++il1)
        {
            // ignore files
            if (!Directory.Exists(path_l1[il1]))
            {
                continue;
            }
            // so path_l1[il1] is supposed to be the path to the person directory

            // get files inside i
            List <string> path_l2 = new List <string>(Directory.EnumerateFiles(path_l1[il1]));
            string        name    = string.Empty;

            // search for the name.txt file

            for (int il2 = 0; il2 < path_l2.Count; ++il2)
            {
                if (Path.GetFileName(path_l2[il2]) == "name.txt")
                {
                    // put file content in the name

                    using (StreamReader sr = new StreamReader(path_l2[il2]))
                    {
                        name = sr.ReadToEnd();
                    }
                }
            }

            // try to open each file as an image
            for (int il2 = 0; il2 < path_l2.Count; ++il2)
            {
                if (Path.GetFileName(path_l2[il2]) == "name.txt")
                {
                    continue;
                }

                Console.WriteLine("processing '{0}' name: '{1}'", path_l2[il2], name);

                // read image with opencv

                OpenCvSharp.Mat readed_image = OpenCvSharp.Cv2.ImRead(path_l2[il2]);

                if (readed_image.Empty() || readed_image.Type() != OpenCvSharp.MatType.CV_8UC3)
                {
                    Console.WriteLine("\n\nWARNING: can't read image '{0}'\n\n", path_l2[il2]);
                    continue;
                }

                byte[] data = new byte[readed_image.Total() * readed_image.Type().Channels];
                Marshal.Copy(readed_image.DataStart, data, 0, (int)data.Length);
                RawImage image = new RawImage(readed_image.Width, readed_image.Height, RawImage.Format.FORMAT_BGR, data);

                // capture the face
                List <RawSample> capturedSamples = capturer.capture(image);

                if (capturedSamples.Count != 1)
                {
                    Console.WriteLine("\n\nWARNING: detected {0} faces on '{1}' image instead of one, image ignored \n\n", capturedSamples.Count, path_l2[il2]);
                    continue;
                }

                RawSample sample = capturedSamples[0];

                // make template
                Template templ = recognizer.processing(sample);

                // prepare data for VideoWorker
                VideoWorker.DatabaseElement vwElement = new VideoWorker.DatabaseElement(element_id_counter++, (ulong)il1, templ, distanceThreshold);

                vwElements.Add(vwElement);

                samples.Add(sample);

                thumbnails.Add(makeThumbnail(sample, name));

                names.Add(name);
            }
        }

        MAssert.Check((int)element_id_counter == vwElements.Count);
        MAssert.Check((int)element_id_counter == samples.Count);
        MAssert.Check((int)element_id_counter == thumbnails.Count);
        MAssert.Check((int)element_id_counter == names.Count);
    }
예제 #11
0
    // make a thumbnail of a sample
    public static OpenCvSharp.Mat makeThumbnail(
        RawSample sample,
        string name = "")
    {
        int thumbnail_size = Worker.thumbnail_size;

        // buffer for the cutted image
        MemoryStream stream = new MemoryStream();

        // make a cut in bmp format
        // so we don't waste time for encode/decode image
        // just copying it few times, which is irrelevant
        sample.cutFaceImage(
            stream,
            RawSample.ImageFormat.IMAGE_FORMAT_BMP,
            RawSample.FaceCutType.FACE_CUT_BASE);

        OpenCvSharp.Mat temp = OpenCvSharp.Mat.ImDecode(stream.ToArray(), OpenCvSharp.ImreadModes.Color);

        // so we got an image

        // check it
        MAssert.Check(!temp.Empty());
        MAssert.Check(temp.Type() == OpenCvSharp.MatType.CV_8UC3);


        // and resize to the thumbnail_size

        OpenCvSharp.Rect resRect;

        if (temp.Rows >= temp.Cols)
        {
            resRect.Height = thumbnail_size;
            resRect.Width  = temp.Cols * thumbnail_size / temp.Rows;
        }
        else
        {
            resRect.Width  = thumbnail_size;
            resRect.Height = temp.Rows * thumbnail_size / temp.Cols;
        }

        resRect.X = (thumbnail_size - resRect.Width) / 2;
        resRect.Y = (thumbnail_size - resRect.Height) / 2;

        OpenCvSharp.Mat result = new OpenCvSharp.Mat(
            thumbnail_size,
            thumbnail_size,
            OpenCvSharp.MatType.CV_8UC3,
            OpenCvSharp.Scalar.All(0));

        OpenCvSharp.Cv2.Resize(
            temp,
            result[resRect],
            resRect.Size);

        if (!string.IsNullOrEmpty(name))
        {
            result[new OpenCvSharp.Rect(0, result.Rows - 27, result.Cols, 27)] = result.RowRange(result.Rows - 27, result.Rows) * 0.5f;

            OpenCvSharp.Cv2.PutText(
                result,
                name,
                new OpenCvSharp.Point(0, result.Rows - 7),
                OpenCvSharp.HersheyFonts.HersheyDuplex,
                0.7,
                OpenCvSharp.Scalar.All(255),
                1,
                OpenCvSharp.LineTypes.AntiAlias);
        }

        return(result);
    }