Ejemplo n.º 1
0
        public void FramesManagerResizeGrayImageTest()
        {
            Image <Gray, byte> image = new Image <Gray, byte>(new Bitmap(100, 100));

            image = FramesManager.ResizeGrayImage(image, new Size(10, 10));
            Assert.IsNotNull(image);
        }
Ejemplo n.º 2
0
        private void PerpetratorDetails_Load(object sender, EventArgs e)
        {
            //resize and display one of the images of the perpetrator
            Size face_size = new Size(perpetrator_picture_box.Width, perpetrator_picture_box.Height);

            perpetrator_picture_box.Image = FramesManager.ResizeBitmap(perpetrator.faces[0].ToBitmap(), face_size);
        }
        protected override void RecognizeFace(Image <Bgr, byte> face)
        {
            if (students.Length != 0)
            {
                //RESIZE THE FACE TO BE RECOGNIZED SO ITS EQUAL TO THE FACES ALREADY IN THE TRAINING SET
                int width  = 120;
                int height = 120;

                face = FramesManager.ResizeColoredImage(face, new Size(width, height));

                //ATTEMPT TO RECOGNIZE THE PERPETRATOR
                face_recognition_result = faces_manager.MatchFace(face);

                //IF A VALID ID IS RETURMED
                if (face_recognition_result.match_was_found)
                {
                    //GET STUDENT ASSOCIATED WITH ID
                    foreach (var student in students)
                    {
                        if (student.id == face_recognition_result.id)
                        {
                            face_recognition_result.identified_student = student;
                            break;
                        }
                    }
                }

                return;
            }
        }
Ejemplo n.º 4
0
 public void FramesManagerCropSelectedFaceTest()
 {
     Rectangle[] faces = FramesManager.DetectFacesInFrame(Singleton.FACE_PIC, Singleton.HAARCASCADE);
     //Image<Bgr, byte> image = Singleton.FACE_PIC.Clone();
     //Image<Bgr,byte> image_result=FramesManager.CropSelectedFace(faces[0], Singleton.FACE_PIC);
     //Assert.IsNotNull(image_result);
 }
Ejemplo n.º 5
0
        public void FramesManagerDetectFacesInFrameTest()
        {
            Rectangle[] faces = FramesManager.DetectFacesInFrame(Singleton.FACE_PIC, Singleton.HAARCASCADE);

            //THE METHOD MUST DETECT A FACE IN THE PROVIDED PIC
            Assert.IsTrue(faces.Length >= 1);
        }
Ejemplo n.º 6
0
        public void FramesManagerResizeBitmapTest()
        {
            Bitmap image = new Bitmap(100, 100);

            image = FramesManager.ResizeBitmap(image, new Size(10, 10));
            Assert.IsNotNull(image);
        }
        //DETECTS FACES IN THE CURRENT FRAME
        public bool DetectFacesInFrame()
        {
            //try to get a frame from the shared datastore for captured frames
            sucessfull = Singleton.REVIEW_FRAMES_TO_BE_PROCESSED.TryDequeue(out current_frame);


            //if ok
            if (sucessfull)
            {
                //detect faces in frame
                detected_faces = FramesManager.DetectFacesInFrame(current_frame.Clone(), haarcascade);
                return(true);
            }
            //IF NO FRAMES IN DATA STORE
            else
            {
                //IF OUTPUT GRABBER THREAD IS DONE THEN IT MEANS THE FRAMES ARE DONE
                //TERMINATE THIS THREAD AND SIGNAL TO OTHERS THAT IT IS DONE
                if (VideoFromFileThread.WORK_DONE)
                {
                    WORK_DONE = true;
                    running   = false;
                }
                return(false);
            }
        }
    /// <summary>
    /// Handles the Load event of the Page control.
    /// </summary>
    protected void Page_Load(object sender, EventArgs e)
    {
        CurrentMaster.BodyClass += " Buttons";

        // set button text
        btnOk.Text     = GetString("general.saveandclose");
        btnApply.Text  = GetString("general.apply");
        btnCancel.Text = GetString("general.cancel");

        chkRefresh.Text = GetString("Widget.Properties.Refresh");

        if (inline)
        {
            btnApply.Visible   = false;
            chkRefresh.Visible = false;
        }

        ltlScript.Text += ScriptHelper.GetScript(
            @"function GetRefreshStatus() {
    var refresh = document.getElementById('" + chkRefresh.ClientID + @"');
    if (refresh != null) {
        return refresh.checked;
    }
    return false;         
}");

        btnCancel.OnClientClick = FramesManager.GetCancelScript();
        btnApply.OnClientClick  = FramesManager.GetApplyScript();
        btnOk.OnClientClick     = FramesManager.GetOKScript();
    }
Ejemplo n.º 9
0
        public bool DrawFaceRectangles()
        {
            if (current_frame == null)
            {
                throw new NullReferenceException();
            }

            try
            {
                if (detected_faces_in_frame != null)
                {
                    Bitmap current_frame_bitmap = current_frame.ToBitmap();

                    Graphics graphics = Graphics.FromImage(current_frame_bitmap);

                    Parallel.ForEach(detected_faces_in_frame, detected_face =>
                    {
                        Bitmap a_frame = FramesManager.DrawShapeOnTransparentBackGround(detected_face, current_frame.Width, current_frame.Height);
                        lock (current_frame)
                        {
                            FramesManager.OverLayBitmapToFormNewImage(a_frame, graphics);
                        }
                    });

                    current_frame = new Image <Bgr, byte>(current_frame_bitmap);
                    return(true);
                }
            }
            catch (Exception e)
            {
                Debug.WriteLine(e.Message);
            }
            return(false);
        }
Ejemplo n.º 10
0
        //JUMPS FORWARD OR BACKWARDS IN THE VIDEO PLAYING
        public bool RewindOrForwardVideo(double millisecond_to_jump_to)
        {
            //SETS THE POINTER TO THE FRAME BEFORE THE SPECIFIED MILLISECOND
            bool sucess = FramesManager.PerformSeekOperationInVideo(millisecond_to_jump_to, video_file.video_capture);

            return(sucess);
        }
Ejemplo n.º 11
0
        //ADDS A CAPTURED FRAME TO A THREAD SAFE QUEUE FOR EASY ACESS WHEN THE FRAME IS PROCESSED BY MULTIPLE FRAMES
        public bool AddNextFrameToQueueForProcessing()
        {
            using (current_frame = FramesManager.GetNextFrame(camera.camera_capture))
            {
                if (current_frame != null)
                {
                    int  width    = Singleton.MAIN_WINDOW.GetControl(MainWindow.MainWindowControls.review_image_box).Width;
                    int  height   = Singleton.MAIN_WINDOW.GetControl(MainWindow.MainWindowControls.review_image_box).Height;
                    Size new_size = new Size(width, height);

                    Singleton.REVIEW_FRAMES_TO_BE_PROCESSED.Enqueue(FramesManager.ResizeColoredImage(current_frame.Clone(), new_size));

                    return(true);
                }

                //FRAME IS NULL
                //MEANING END OF FILE IS REACHED
                else
                {
                    //ADD BLACK FRAME TO DATASTORE AND TERMINATE THREAD
                    //ALSO SIGNAL TO OTHERS THAT THIS THREAD IS DONE
                    WORK_DONE = true;
                    running   = false;
                    Debug.WriteLine("Terminating video from file");
                    return(false);
                }
            }
        }
Ejemplo n.º 12
0
        protected override void RecognizeFace(Image <Bgr, byte> face)
        {
            if (active_perpetrators.Length != 0)
            {
                //RESIZE THE FACE TO RECOGNIZE SO ITS EQUAL TO THE FACES ALREADY IN THE TRAINING SET
                int width  = 120;
                int height = 120;

                face = FramesManager.ResizeColoredImage(face, new Size(width, height));

                //GET ID OF MOST SIMILAR PERPETRATOR
                FaceRecognitionResult result = faces_manager.MatchFace(face);

                //IF A VALID RESULT IS RETURNED
                if (result != null)
                {
                    //IF A VALID ID IS RETURMED
                    if (result.match_was_found)
                    {
                        //GET PERPETRATOR ASSOCIATED WITH ID
                        foreach (var perp in active_perpetrators)
                        {
                            if (perp.id == result.id)
                            {
                                result.identified_perpetrator = perp;
                                break;
                            }
                        }
                    }
                    Singleton.FACE_RECOGNITION_RESULTS.Enqueue(result);
                }
                return;
            }
        }
        private void AddDetectedFacesToListViewPanel()
        {
            try
            {
                if (its_time_to_pick_perpetrator_faces)
                {
                    if (detected_faces != null && current_frame != null)
                    {
                        ImageListView image_list_view = Singleton.SELECT_PERP_FACES.GetImageListView();

                        for (int i = 0; i < detected_faces.Length; i++)
                        {
                            //get face
                            Image <Bgr, byte> face = FramesManager.CropSelectedFace(detected_faces[i], current_frame.Clone());

                            //resize face
                            face = FramesManager.ResizeColoredImage(face, new Size(120, 120));

                            //add face to image list
                            Singleton.SELECT_PERP_FACES.suspect_faces.TryAdd(count, face);

                            //add face to image list view
                            image_list_view.Invoke(new AddImage(Singleton.SELECT_PERP_FACES.AddImage), new object[] { "face " + count, "face " + count, face.ToBitmap() });

                            //increment id counter
                            count++;
                        }
                    }
                }
            }
            catch (Exception e)
            {
                Debug.WriteLine(e.Message);
            }
        }
Ejemplo n.º 14
0
        public void FramesManagerDrawShapeAroundDetectedFacesTest()
        {
            Rectangle[] faces = FramesManager.DetectFacesInFrame(Singleton.FACE_PIC, Singleton.HAARCASCADE);
            bool        sucess;

            FramesManager.DrawShapeAroundDetectedFaces(faces[0], Singleton.FACE_PIC, out sucess);
            Assert.IsTrue(sucess);
        }
Ejemplo n.º 15
0
        public void FramesManagerResizeImageTest()
        {
            Image <Bgr, byte> image = new Image <Bgr, byte>(new Bitmap(100, 100));
            Size new_size           = new Size(10, 10);

            image = FramesManager.ResizeColoredImage(image, new_size);
            Assert.IsNotNull(image);
        }
        public SelectPerpetratorForm(Image <Bgr, byte> perpetrator_frame)
        {
            InitializeComponent();
            Size new_size = new Size(perpetrator_frame_picture_box.Width, perpetrator_frame_picture_box.Height);

            perpetrator_frame_picture_box.Image = FramesManager.ResizeBitmap(perpetrator_frame.ToBitmap(), new_size);
            this.frame_with_perpetrator         = perpetrator_frame;
        }
Ejemplo n.º 17
0
        public void FramesManagerDrawShapeOnTransparentBackGroundTest()
        {
            Point     starting_cordinate = new Point(10, 10);
            Size      size   = new Size(10, 10);
            Rectangle rec    = new Rectangle(starting_cordinate, size);
            Bitmap    bitmap = FramesManager.DrawShapeOnTransparentBackGround(rec, 100, 100);

            Assert.IsNotNull(bitmap);
        }
Ejemplo n.º 18
0
        public void FaceDrawingThreadFaceDrawingThreadTest()
        {
            Image <Bgr, byte> frame       = Singleton.FACE_PIC;
            HaarCascade       haarcascade = Singleton.HAARCASCADE;

            Rectangle[]       faces  = FramesManager.DetectFacesInFrame(frame, haarcascade);
            FaceDrawingThread thread = new FaceDrawingThread(frame, faces, 1, 0);

            Assert.IsNotNull(thread);
        }
Ejemplo n.º 19
0
        public void FaceDrawingThreadAddImageToQueueForDisplayTest()
        {
            HaarCascade haarcascade = Singleton.HAARCASCADE;

            Rectangle[]       faces  = FramesManager.DetectFacesInFrame(Singleton.FACE_PIC, haarcascade);
            FaceDrawingThread thread = new FaceDrawingThread(Singleton.FACE_PIC, faces, 1, 0);
            bool sucess = thread.AddImageToQueueForDisplay();

            Assert.IsTrue(sucess);
        }
Ejemplo n.º 20
0
        private void HandleFilesHashesMessage(FilesHashesMessage message)
        {
            Application.Current.Dispatcher.Invoke(async() =>
            {
                // Updater
                if (await this.ShowUpdatesAsync(message.FilesHashes))
                {
                    Environment.Exit(0);
                    return;
                }

                // Loading
                try
                {
                    var controller = await this.ShowProgressAsync(LanguageManager.Translate("483"), Randomize.GetRandomLoadingText());

                    await Task.Run(async() =>
                    {
                        Protocol.Messages.MessagesBuilder.Initialize();
                        controller.SetProgress(0.14);
                        await Task.Delay(200);

                        TypesBuilder.Initialize();
                        controller.SetProgress(0.28);
                        await Task.Delay(200);

                        DataManager.Initialize(DTConstants.AssetsVersion, GlobalConfiguration.Instance.Lang);
                        controller.SetProgress(0.42);
                        await Task.Delay(200);

                        MapsManager.Initialize(DTConstants.AssetsVersion);
                        controller.SetProgress(0.56);
                        await Task.Delay(200);

                        FramesManager.Initialize();
                        controller.SetProgress(0.70);
                        await Task.Delay(200);

                        CommandsHandler.Initialize();

                        BreedsUtility.Initialize();
                        controller.SetProgress(1);
                        await Task.Delay(200);

                        LuaScriptManager.Initialize();
                    });

                    await controller.CloseAsync();
                }
                catch (Exception ex)
                {
                    MessageBox.Show(ex.ToString());
                }
            });
        }
Ejemplo n.º 21
0
        public void FramesManagerOverLayBitmapToFormNewImageTest()
        {
            Point     starting_cordinate = new Point(10, 10);
            Size      size     = new Size(10, 10);
            Rectangle rec      = new Rectangle(starting_cordinate, size);
            Bitmap    bitmap   = FramesManager.DrawShapeOnTransparentBackGround(rec, 100, 100);
            Graphics  graphics = Graphics.FromImage(bitmap);
            bool      sucess   = FramesManager.OverLayBitmapToFormNewImage(bitmap, graphics);

            Assert.IsTrue(sucess);
        }
Ejemplo n.º 22
0
        public void FaceDrawingThreadDoWorkTest()
        {
            Image <Bgr, byte> frame       = Singleton.FACE_PIC;
            HaarCascade       haarcascade = Singleton.HAARCASCADE;

            Rectangle[]       faces  = FramesManager.DetectFacesInFrame(frame, haarcascade);
            FaceDrawingThread thread = new FaceDrawingThread(frame, faces, 1, 0);

            thread.StartWorking();
            Assert.IsTrue(thread.IsRunning());
            thread.RequestStop();
        }
        private void ShowFaceRecognitionProgress()
        {
            //THIS KEEPS TRACK OF PROGRESS
            double progress_decimal = 1;

            //DISPLAY EACH OF PERPETRATORS' FACES IN THE PERPETRATORS PICTURE BOX FOR A FLEETING MOMEMNT;REPEAT TILL ALL FACES ARE DONE
            foreach (var student in students)
            {
                for (int i = 0; i < student.photos.Length; i++)
                {
                    //GET THE AMOUNT OF WORK DONE                           PERPS.LENGTH*5 COZ EACH PERP HAS A MINIMUM OF 5 FACES
                    int percentage_completed = (int)(((progress_decimal / (students.Length * 5) * 100)));

                    //RESIZE THE FACE TO RECOGNIZE SO ITS EQUAL TO THE FACES ALREADY IN THE TRAINING SET
                    int width  = 120;
                    int height = 120;

                    student.photos[i] = FramesManager.ResizeColoredImage(student.photos[i], new Size(width, height));

                    //DISPLAY STUDENT FACE
                    SetControlPropertyThreadSafe(perpetrators_pictureBox, "Image", student.photos[i].ToBitmap());

                    if (percentage_completed >= 100)
                    {
                        if (face_recognition_result.match_was_found)
                        {
                            //UPDATE PROGRESS LABEL
                            progress_label.ForeColor = Color.Purple;
                            SetControlPropertyThreadSafe(progress_label, "Text", "Match\nFound");

                            //DISPLAY IDENTIFIED STUDENT FACE
                            SetControlPropertyThreadSafe(perpetrators_pictureBox, "Image", face_recognition_result.identified_student.photos[0].ToBitmap());
                        }
                        else
                        {
                            //UPDATE PROGRESS LABEL
                            progress_label.ForeColor = Color.Red;
                            SetControlPropertyThreadSafe(progress_label, "Text", "No\nMatch\nFound");
                        }
                    }
                    else
                    {
                        //UPDATE PROGRESS LABEL
                        SetControlPropertyThreadSafe(progress_label, "Text", "" + percentage_completed + "%");
                    }

                    //LET THE THREAD SLEEP
                    Thread.Sleep(SLEEP_TIME);

                    progress_decimal++;
                }
            }
        }
Ejemplo n.º 24
0
        //ADDS A CAPTURED FRAME TO THREAD SAFE QUEUES
        //FOR EASY ACESS WHEN THE FRAME IS PROCESSED BY MULTIPLE THREADS LATER
        public bool AddNextFrameToQueuesForProcessing()
        {
            //get next frame from camera
            current_frame = FramesManager.GetNextFrame(camera_capture);

            if (current_frame != null)
            {
                int  new_width  = Singleton.MAIN_WINDOW.GetControl(MainWindow.MainWindowControls.live_stream_image_box1).Width;
                int  new_height = Singleton.MAIN_WINDOW.GetControl(MainWindow.MainWindowControls.live_stream_image_box1).Height;
                Size new_size   = new Size(new_width, new_height);

                //add frame to queue for display
                Singleton.LIVE_FRAMES_TO_BE_DISPLAYED.Enqueue(FramesManager.ResizeColoredImage(current_frame.Clone(), new_size));

                //add frame to queue for storage
                Singleton.FRAMES_TO_BE_STORED.Enqueue(current_frame.Clone());

                //resize frame to save on memory and improve performance
                int width  = Singleton.MAIN_WINDOW.GetControl(MainWindow.MainWindowControls.review_image_box).Width;
                int height = Singleton.MAIN_WINDOW.GetControl(MainWindow.MainWindowControls.review_image_box).Height;

                Size size = new Size(width, height);

                current_frame = FramesManager.ResizeColoredImage(current_frame, size);

                //add frame to queue for face detection and recognition
                Singleton.LIVE_FRAMES_TO_BE_PROCESSED.Enqueue(current_frame.Clone());

                //return
                return(true);
            }

            //FRAME IS NULL
            //MEANING END OF FILE IS REACHED
            else
            {
                //ADD BLACK FRAME TO DATASTORE AND TERMINATE THREAD
                //ALSO SIGNAL TO OTHERS THAT THIS THREAD IS DONE
                WORK_DONE = true;
                running   = false;

                Debug.WriteLine("Terminating camera output");
                return(false);
            }
        }
    protected void Page_Load(object sender, EventArgs e)
    {
        CurrentMaster.BodyClass += " Buttons";

        // set button text
        btnOk.Text     = GetString("general.ok");
        btnApply.Text  = GetString("general.apply");
        btnCancel.Text = GetString("general.cancel");

        btnCancel.OnClientClick = FramesManager.GetCancelScript();
        btnApply.OnClientClick  = FramesManager.GetApplyScript();
        btnOk.OnClientClick     = FramesManager.GetOKScript();

        if (inline)
        {
            btnApply.Visible = false;
        }
    }
Ejemplo n.º 26
0
        //DETECTS FACES IN THE CURRENT FRAME
        public bool DetectFacesInFrame()
        {
            try
            {
                //try to get a frame from the shared datastore for captured frames
                bool sucessfull = Singleton.LIVE_FRAMES_TO_BE_PROCESSED.TryDequeue(out current_frame);


                //if ok
                if (sucessfull)
                {
                    //detect faces in frame
                    detected_faces = FramesManager.DetectFacesInFrame(current_frame.Clone(), haarcascade);

                    if (detected_faces != null)
                    {
                        //for each face we have detected in the frame
                        foreach (var detected_face in detected_faces)
                        {
                            //get the face
                            Image <Bgr, byte> face = FramesManager.CropSelectedFace(detected_face, current_frame.Clone());

                            //add face to shared datastore so face recog thread can access it
                            Singleton.FACES_TO_RECOGNIZE.Enqueue(face);
                        }
                        detected_faces = null;
                    }
                    return(true);
                }

                //IF NO FRAMES IN DATA STORE THEN CHECK SUPPLIER THREAD FOR LIFE
                else
                {
                    CheckForTerminationOfThisThread();
                    return(true);
                }
            }
            catch (Exception)
            {
                return(false);
            }
        }
    /// <summary>
    /// Load event handler
    /// </summary>
    protected override void OnLoad(EventArgs e)
    {
        base.OnLoad(e);

        chkRefresh.Text = GetString("WebpartProperties.Refresh");

        ltlScript.Text += ScriptHelper.GetScript("function GetRefreshStatus() { return document.getElementById('" + chkRefresh.ClientID + "').checked; }");

        // Set button texts
        btnOk.Text     = GetString("general.saveandclose");
        btnApply.Text  = GetString("general.apply");
        btnCancel.Text = GetString("general.cancel");

        // Set button click events
        btnCancel.OnClientClick = FramesManager.GetCancelScript();
        btnApply.OnClientClick  = FramesManager.GetApplyScript();
        btnOk.OnClientClick     = FramesManager.GetOKScript();

        string action = QueryHelper.GetString("tab", "properties");

        switch (action)
        {
        case "properties":
            break;

        case "code":
            break;

        case "binding":
            chkRefresh.Visible = false;
            btnApply.Visible   = false;
            btnOk.Visible      = false;
            btnCancel.Text     = GetString("WebpartProperties.Close");
            break;
        }
    }
Ejemplo n.º 28
0
        public void FramesManagerGetNextFrameTest()
        {
            Image <Bgr, byte> image = FramesManager.GetNextFrame(new Capture());

            Assert.IsNotNull(image);
        }
Ejemplo n.º 29
0
        public void FramesManagerPerformSeekOperationInVideoTest()
        {
            bool sucess = FramesManager.PerformSeekOperationInVideo(100, new Capture(Singleton.VIDEO_FILE_PATH));

            Assert.IsTrue(sucess);
        }
Ejemplo n.º 30
0
        public bool DisplayFaceRecognitionProgress(Image <Bgr, byte> face)
        {
            if (face != null)
            {
                //IF THERE ARE PERPETRATORS TO COMPARE AGAINIST
                if (active_perpetrators.Length != 0)
                {
                    //RESIZE THE FACE TO RECOGNIZE SO ITS EQUAL TO THE FACES ALREADY IN THE TRAINING SET
                    int width  = 120;
                    int height = 120;

                    face = FramesManager.ResizeColoredImage(face, new Size(width, height));

                    //CLEAR PANEL IF ITEMS ARE TOO MANY
                    ClearPanelIfItemsAreMany();

                    //CREATE PICTURE BOX FOR FACE TO BE RECOGNIZED
                    unknown_face_pictureBox             = new MyPictureBox();
                    unknown_face_pictureBox.Location    = new Point(10, 10);
                    unknown_face_pictureBox.Size        = new Size(120, 120);
                    unknown_face_pictureBox.BorderStyle = BorderStyle.FixedSingle;
                    unknown_face_pictureBox.Image       = face.ToBitmap();

                    //CREATE PICTURE BOX FOR PERPETRATORS
                    perpetrators_pictureBox             = new MyPictureBox();
                    perpetrators_pictureBox.Location    = new Point(185, 10);
                    perpetrators_pictureBox.Size        = new Size(120, 120);
                    perpetrators_pictureBox.BorderStyle = BorderStyle.FixedSingle;

                    //CREATE PROGRESS LABEL
                    progress_label           = new Label();
                    progress_label.Location  = new Point(143, 60);
                    progress_label.ForeColor = Color.Green;
                    progress_label.Text      = "0%";

                    //CREATE PANEL CONTAINER FOR THE ABOVE CONTROLS
                    Panel panel = new Panel();
                    panel.AutoSize    = true;
                    panel.Location    = new Point(x, y);
                    panel.BorderStyle = BorderStyle.FixedSingle;
                    panel.Padding     = new Padding(10);

                    panel.Controls.AddRange(new Control[] { unknown_face_pictureBox, perpetrators_pictureBox, progress_label });

                    //SINCE THIS THREAD IS STARTED OFF THE GUI THREAD THEN INVOKES MAY BE REQUIRED
                    if (panel_live_stream.InvokeRequired)
                    {
                        //ADD GUI CONTROLS USING INVOKES
                        Action action = () => panel_live_stream.Controls.Add(panel);
                        panel_live_stream.Invoke(action);
                    }

                    //IF NO INVOKES ARE NEEDED THEN
                    else
                    {
                        //JUST ADD THE CONTROLS
                        panel_live_stream.Controls.Add(panel);
                    }

                    //CREATE A NEW PROGRESS THREAD TO SHOW FACE RECOG PROGRESS
                    ShowFaceRecognitionProgress();

                    //INCREASE THE GLOBAL Y SO NEXT PIC BOXES ARE DRAWN BELOW THIS ONE
                    y += 145;
                    return(true);
                }
            }
            return(false);
        }