Esempio n. 1
0
        public static string getAOIsofFile_OpenFace(Constants.FilesForAOIDetection filesforAOIDetection, ImageConversion imageConversion)
        {
            string line_features, line_raw_data;

            char[] delimiterChars = { ' ', '\t', ',' };

            string[] words_features;
            int      frame_num, success;

            string[] words_eye_tracker_generated_file;
            string   fixation_type = "", raw_x = "", raw_y = "";
            int      frame_num_raw_data = 0;

            string AOIs            = "";
            string writeToFile_str = "";


            if (filesforAOIDetection.file_2d_landmarks != null)
            {
                filesforAOIDetection.file_2d_landmarks.ReadLine(); //skip header row,
            }
            while ((line_features = filesforAOIDetection.file_2d_landmarks.ReadLine()) != null)
            {
                words_features = line_features.Split(delimiterChars, StringSplitOptions.RemoveEmptyEntries);
                frame_num      = Convert.ToInt32(words_features[0]);

                line_raw_data = filesforAOIDetection.file_gaze_raw_data.ReadLine();


                Dictionary <int, string> manually_tracked_dict = null;
                string letter = "";
                if (!String.IsNullOrEmpty(filesforAOIDetection.file_manuallyLabelledAOIs_path) && !String.IsNullOrWhiteSpace(filesforAOIDetection.file_manuallyLabelledAOIs_path))
                {
                    string[] manually_tracked_lines = File.ReadAllLines(filesforAOIDetection.file_manuallyLabelledAOIs_path);

                    manually_tracked_dict = new Dictionary <int, string>();
                    foreach (string line in manually_tracked_lines)
                    {
                        if (!String.IsNullOrEmpty(line) && !String.IsNullOrWhiteSpace(line))
                        {
                            int frameNo = Convert.ToInt32(line.Split(' ')[0]);
                            letter = line.Split(' ')[1];
                            UtilityFunctions.addOrUpdateDictionary(manually_tracked_dict, frameNo, letter);
                        }
                    }
                }

                bool         gaze_raw_data_empty = false, face_detection_empty = false;
                List <Point> face = new List <Point>();
                List <Point> nose_rect  = new List <Point>();
                List <Point> eye_rect   = new List <Point>();
                List <Point> mouth_rect = new List <Point>();
                if (line_raw_data != null)
                {
                    words_eye_tracker_generated_file = line_raw_data.Split(delimiterChars, StringSplitOptions.RemoveEmptyEntries);

                    frame_num_raw_data = Convert.ToInt32(words_eye_tracker_generated_file[0]);
                    while (frame_num_raw_data > frame_num)
                    {
                        writeToFile_str += frame_num + " " + "\n";
                        line_features    = filesforAOIDetection.file_2d_landmarks.ReadLine();
                        words_features   = line_features.Split(delimiterChars, StringSplitOptions.RemoveEmptyEntries);
                        frame_num        = Convert.ToInt32(words_features[0]);
                    }
                    if (frame_num != frame_num_raw_data)
                    {
                        throw new Exception("There is a problem in the size of 2dLandmarks file and eye tracker generated raw gaze data file. Please be sure you import the accurate files!!");
                    }

                    fixation_type       = words_eye_tracker_generated_file[1];
                    success             = Convert.ToInt32(words_features[3]);
                    gaze_raw_data_empty = false; face_detection_empty = false;
                    if ((fixation_type == Constants.EyeTrackerNotEmptyLineTxt))
                    {
                        raw_x = words_eye_tracker_generated_file[2];
                        raw_y = words_eye_tracker_generated_file[3];
                    }
                    else
                    {
                        gaze_raw_data_empty = true;
                    }

                    int total_num_of_empy = 0;
                    for (int i = 4; i < 31; i++)
                    {
                        face.Add(new Point((int)Convert.ToDouble(words_features[i]), (int)Convert.ToDouble(words_features[i + 68])));

                        if ((int)Convert.ToDouble(words_features[i + 68]) == 0 && (int)Convert.ToDouble(words_features[i]) == 0)
                        {
                            total_num_of_empy++;
                        }
                    }

                    if (total_num_of_empy >= 4)
                    {
                        face_detection_empty = true;
                    }
                }

                bool addedtoAOIsFrame = false;
                if (manually_tracked_dict != null && manually_tracked_dict.Count > 0 && manually_tracked_dict.TryGetValue(frame_num, out letter)) //manualy tracked value override face tracking outcomes
                {
                    if (letter.Equals("NULL"))                                                                                                    //while manual tracking, when user does not decide what is the appropriate AOI to assign, s/he press 0  and it assign Null to the related frame, most probably because of the empty gaze row data
                    {
                        string newAOIs = "";
                        newAOIs = assignTheProblematicIssueLabel(frame_num, AOIs, gaze_raw_data_empty, face_detection_empty);
                        if (String.IsNullOrEmpty(newAOIs)) //sometimes if we assign null while manually track, but actually, there was no face detection issue or gaze raw data empty problem, assignTheProblematicIssueLabel do not add new line to the AOIs, for this cases it is necessary to add related frame AOI, based on detected face and raw gaze data.
                        {
                            addedtoAOIsFrame = false;
                        }
                        else
                        {
                            AOIs             = newAOIs;
                            addedtoAOIsFrame = true;
                        }
                    }

                    else
                    {
                        AOIs            += frame_num + " " + letter + "\n";
                        addedtoAOIsFrame = true;
                    }
                }
                if (!addedtoAOIsFrame && line_raw_data != null)
                {
                    if (!gaze_raw_data_empty && !face_detection_empty)
                    {
                        //face_rect
                        eye_rect.Add(new Point((int)Convert.ToDouble(words_features[4]), (int)Convert.ToDouble(words_features[4 + 68])));
                        eye_rect.Add(new Point((int)Convert.ToDouble(words_features[20]), (int)Convert.ToDouble(words_features[20 + 68])));
                        nose_rect.Add(new Point((int)Convert.ToDouble(words_features[5]), (int)Convert.ToDouble(words_features[5 + 68])));
                        nose_rect.Add(new Point((int)Convert.ToDouble(words_features[6]), (int)Convert.ToDouble(words_features[6 + 68])));
                        nose_rect.Add(new Point((int)Convert.ToDouble(words_features[19]), (int)Convert.ToDouble(words_features[19 + 68])));
                        nose_rect.Add(new Point((int)Convert.ToDouble(words_features[18]), (int)Convert.ToDouble(words_features[18 + 68])));

                        for (int i = 8; i < 19; i++)
                        {
                            mouth_rect.Add(new Point((int)Convert.ToDouble(words_features[i]), (int)Convert.ToDouble(words_features[i + 68])));
                        }

                        for (int i = 21; i < 31; i++)
                        {
                            eye_rect.Add(new Point((int)Convert.ToDouble(words_features[i]), (int)Convert.ToDouble(words_features[i + 68])));
                        }
                        for (int i = 31; i < 40; i++)
                        {
                            if (i == 31 || i == 32)
                            {
                                eye_rect.Add(new Point((int)Convert.ToDouble(words_features[i]), (int)Convert.ToDouble(words_features[i + 68])));
                            }
                            else
                            {
                                nose_rect.Add(new Point((int)Convert.ToDouble(words_features[i]), (int)Convert.ToDouble(words_features[i + 68])));
                            }
                        }
                        for (int i = 40; i < 46; i++)
                        {
                            eye_rect.Add(new Point((int)Convert.ToDouble(words_features[i]), (int)Convert.ToDouble(words_features[i + 68])));
                        }
                        for (int i = 46; i < 52; i++)
                        {
                            eye_rect.Add(new Point((int)Convert.ToDouble(words_features[i]), (int)Convert.ToDouble(words_features[i + 68])));
                        }
                        for (int i = 52; i < 72; i++)
                        {
                            mouth_rect.Add(new Point((int)Convert.ToDouble(words_features[i]), (int)Convert.ToDouble(words_features[i + 68])));
                        }



                        Dictionary <string, List <Point> > allFeaturePointLists = new Dictionary <string, List <Point> >();

                        allFeaturePointLists.Add(Constants.face, face);
                        allFeaturePointLists.Add(Constants.nose_rect, nose_rect);
                        allFeaturePointLists.Add(Constants.eye_rect, eye_rect);
                        allFeaturePointLists.Add(Constants.mouth_rect, mouth_rect);


                        string AOI = UtilityFunctions.determineAOIForFeatures(raw_x, raw_y, allFeaturePointLists, imageConversion);
                        AOIs += frame_num + " " + AOI + "\n";
                    }

                    else
                    {
                        AOIs = assignTheProblematicIssueLabel(frame_num, AOIs, gaze_raw_data_empty, face_detection_empty);
                    }
                }
            }

            return(AOIs);
        }
Esempio n. 2
0
        public static string determineAOIForFeatures(string raw_x, string raw_y, Dictionary <string, List <Point> > allFeaturePointLists, ImageConversion imageConversion)
        {
            try
            {
                List <Point> convexhull_of_face = Geometry.MakeConvexHull(allFeaturePointLists[Constants.face]);
                bool         insideFace         = false;

                int x = Convert.ToInt32(raw_x);
                int y = Convert.ToInt32(raw_y);

                if (imageConversion.error_x != 0 || imageConversion.error_y != 0)
                {
                    List <Point> list_p = imageConversion.eyeTrackerToFaceTrackingFrameworkWithErrors(x, y); //holds A,B,C,D points of rectangle sequentially
                    //check each point A->B; A->C; C->D; B->D
                    Point A = list_p.ElementAt(0);
                    Point B = list_p.ElementAt(1);
                    Point C = list_p.ElementAt(2);
                    Point D = list_p.ElementAt(3);


                    foreach (Point p in list_p)
                    {//if rectangle vertices inside polygon
                        insideFace = Geometry.IsPointInPolygon(convexhull_of_face.ToArray(), p);
                        if (insideFace)
                        {
                            x = p.X;
                            y = p.Y;
                            break;
                        }
                    }


                    if (!insideFace)
                    {//if polygon vertices inside rectange
                        Rectangle rec = new Rectangle(A.X, A.Y, (B.X - A.X), (C.Y - A.Y));
                        foreach (Point p in convexhull_of_face.ToArray())
                        {
                            if (rec.Contains(p))
                            {
                                insideFace = true;
                                x          = p.X;
                                y          = p.Y;
                                break;
                            }
                        }
                    }
                }

                else
                {
                    Point p = imageConversion.eyeTrackerToFaceTrackingFramework(x, y);
                    x          = p.X;
                    y          = p.Y;
                    insideFace = Geometry.IsPointInPolygon(convexhull_of_face.ToArray(), new Point(x, y));
                }



                //IsPointInPolygon metodu alternatif olarak var
                // bool insideFace = Geometry.PointInPolygon(convexhull_of_face.ToArray(), new Point(x, y));

                string AOI = "";
                string whichpart_of_face = "";
                if (insideFace)
                {
                    AOI = Constants.AOI_e;

                    bool insideEye_rect   = Geometry.minBoundingRecNotRotated(allFeaturePointLists[Constants.eye_rect]).Contains(x, y);
                    bool insideNose_rect  = Geometry.minBoundingRecNotRotated(allFeaturePointLists[Constants.nose_rect]).Contains(x, y);
                    bool insideMouth_rect = Geometry.minBoundingRecNotRotated(allFeaturePointLists[Constants.mouth_rect]).Contains(x, y);
                    if (insideEye_rect)
                    {
                        whichpart_of_face += Constants.eye_rect;
                    }
                    else if (insideMouth_rect)
                    {
                        whichpart_of_face += Constants.mouth_rect;
                    }
                    else //regions not inside eyes or mouth region supposed as nose region
                    {
                        whichpart_of_face += Constants.nose_rect;
                    }

                    Rectangle minBoundingBox = Geometry.minBoundingRecNotRotated(allFeaturePointLists[Constants.face]);
                    int       rec_x          = minBoundingBox.X;
                    int       rec_y          = minBoundingBox.Y;
                    int       rec_width      = minBoundingBox.Width;
                    int       rec_height     = minBoundingBox.Height;
                    string    min_rect       = " " + rec_x + "," + rec_y + "," + rec_width + "," + rec_height;
                    whichpart_of_face += min_rect;
                }
                else
                {
                    Rectangle minBoundingBox = Geometry.minBoundingRecNotRotated(allFeaturePointLists[Constants.face]);
                    int       rec_x          = minBoundingBox.X;
                    int       rec_y          = minBoundingBox.Y;
                    int       rec_width      = minBoundingBox.Width;
                    int       rec_height     = minBoundingBox.Height;
                    string    min_rect       = " " + rec_x + "," + rec_y + "," + rec_width + "," + rec_height;

                    //face is not as a rectangle much like in an oval shape, this part is used to make correct annotation for oval shape
                    if (minBoundingBox.Contains(x, y))
                    {
                        if (x <= (rec_width / 2 + rec_x))
                        {
                            AOI = Constants.AOI_d;
                        }
                        else
                        {
                            AOI = Constants.AOI_f;
                        }
                    }

                    else if (x >= rec_x && x <= (rec_x + rec_width))
                    {
                        if (y > (rec_y + rec_height))
                        {
                            AOI = Constants.AOI_h;
                        }
                        else if (y < rec_y)
                        {
                            AOI = Constants.AOI_b;
                        }
                    }
                    else if (y >= rec_y && y <= (rec_y + rec_height))
                    {
                        if (x < rec_x)
                        {
                            AOI = Constants.AOI_d;
                        }
                        else if (x > (rec_x + rec_width))
                        {
                            AOI = Constants.AOI_f;
                        }
                    }
                    else if (x < rec_x && y < rec_y)
                    {
                        AOI = Constants.AOI_a;
                    }
                    else if ((x > (rec_x + rec_width)) && (y > (rec_y + rec_height)))
                    {
                        AOI = Constants.AOI_i;
                    }
                    else if (x < rec_x && y > (rec_y + rec_height))
                    {
                        AOI = Constants.AOI_g;
                    }
                    else if ((x > (rec_x + rec_width)) && (y < rec_y))
                    {
                        AOI = Constants.AOI_c;
                    }



                    AOI += min_rect + " " + x + "," + y;
                }

                return(AOI + " " + whichpart_of_face);
            }
            catch (Exception ex)
            {
                throw ex;
            }
        }
Esempio n. 3
0
        private void btn_analyse_faceAsROI_Click(object sender, EventArgs e)
        {
            bool   errror         = false;
            string outputFileName = controls.txt_outputFile_faceAsROI.Text;

            if (String.IsNullOrEmpty(outputFileName) || String.IsNullOrWhiteSpace(outputFileName))
            {
                controls.errorProvider_outputFile_faceAsROI.SetError(controls.txt_outputFile_faceAsROI, Constants.MESSAGE_SELECT_OUTPUT_FILE);
                errror = true;
            }
            else
            {
                controls.errorProvider_outputFile_faceAsROI.Clear();
                controls.errorProvider_outputFile_faceAsROI.SetError(controls.txt_outputFile_faceAsROI, "");
            }

            string rawGazeDataFileName = controls.txt_rawGazeDataFile_faceAsROI.Text;

            if (String.IsNullOrEmpty(rawGazeDataFileName) || String.IsNullOrWhiteSpace(rawGazeDataFileName) || !File.Exists(rawGazeDataFileName))
            {
                controls.errorProvider_rawGazeDataFile_faceAsROI.SetError(controls.txt_rawGazeDataFile_faceAsROI, Constants.MESSAGE_SELECT_RAW_GAZE_DATA_FILE);
                errror = true;
            }
            else
            {
                controls.errorProvider_rawGazeDataFile_faceAsROI.Clear();
                controls.errorProvider_rawGazeDataFile_faceAsROI.SetError(controls.txt_rawGazeDataFile_faceAsROI, "");
            }

            string landmarkFileName = controls.txt_2dlandmark_faceAsROI.Text;

            if (String.IsNullOrEmpty(landmarkFileName) || String.IsNullOrWhiteSpace(landmarkFileName) || !File.Exists(landmarkFileName))
            {
                controls.errorProvider_2dlandmark_faceAsROI.SetError(controls.txt_2dlandmark_faceAsROI, Constants.MESSAGE_SELECT_2dLANDMARK_FILE);
                errror = true;
            }
            else
            {
                controls.errorProvider_2dlandmark_faceAsROI.Clear();
                controls.errorProvider_2dlandmark_faceAsROI.SetError(controls.txt_2dlandmark_faceAsROI, "");
            }

            string imageSizeTrackingFrameworkFileName = controls.txt_imageSizeTrakingFramework_faceAsROI.Text;

            if (String.IsNullOrEmpty(imageSizeTrackingFrameworkFileName) || String.IsNullOrWhiteSpace(imageSizeTrackingFrameworkFileName) || !File.Exists(imageSizeTrackingFrameworkFileName))
            {
                controls.errorProvider_imageSizeTrakingFramework_faceAsROI.SetError(controls.txt_imageSizeTrakingFramework_faceAsROI, Constants.MESSAGE_SELECT_IMAGE_SIZE_TRACKING_FRAMEWORK_FILE);
                errror = true;
            }
            else
            {
                controls.errorProvider_imageSizeTrakingFramework_faceAsROI.Clear();
                controls.errorProvider_imageSizeTrakingFramework_faceAsROI.SetError(controls.txt_imageSizeTrakingFramework_faceAsROI, "");
            }

            string eyetrackerImageWidth = controls.txt_imageSizeEyeTrackerWidth_faceAsROI.Text;
            string eyetrackerImageHeight = controls.txt_imageSizeEyeTrackerHeight_faceAsROI.Text;
            int    eye_tracker_width = 0, eye_tracker_height = 0;

            if (String.IsNullOrEmpty(eyetrackerImageWidth) || String.IsNullOrWhiteSpace(eyetrackerImageWidth) || (!int.TryParse(eyetrackerImageWidth, out eye_tracker_width)) ||
                String.IsNullOrEmpty(eyetrackerImageHeight) || String.IsNullOrWhiteSpace(eyetrackerImageHeight) || (!int.TryParse(eyetrackerImageHeight, out eye_tracker_height)))
            {
                controls.errorProvider_imageSizeEyeTracker_faceAsROI.SetError(controls.txt_imageSizeEyeTrackerHeight_faceAsROI, Constants.MESSAGE_ENTER_EYE_TRACKER_IMAGE_RESOLUTION);
                errror = true;
            }
            else
            {
                controls.errorProvider_imageSizeEyeTracker_faceAsROI.Clear();
                controls.errorProvider_imageSizeEyeTracker_faceAsROI.SetError(controls.txt_imageSizeEyeTrackerHeight_faceAsROI, "");
            }

            string eyetrackerImageErrorWidth = controls.txt_errorSizeEyeTrackerWidth_faceAsROI.Text;
            string eyetrackerImageErrorHeight = controls.txt_errorSizeEyeTrackerHeight_faceAsROI.Text;
            double eye_tracker_error_width = -1, eye_tracker_error_height = -1;

            bool error_in_errorValue = false;

            if (!String.IsNullOrEmpty(eyetrackerImageErrorWidth) && !String.IsNullOrWhiteSpace(eyetrackerImageErrorWidth))
            {
                if (!double.TryParse(eyetrackerImageErrorWidth, out eye_tracker_error_width))
                {
                    controls.errorProvider_errorSizeEyeTracker_visualizeTracking.SetError(controls.txt_errorSizeEyeTrackerHeight_faceAsROI, Constants.MESSAGE_ENTER_DOUBLE_VALUE);
                    errror = true;
                    error_in_errorValue = true;
                }
            }

            if (!String.IsNullOrEmpty(eyetrackerImageErrorHeight) && !String.IsNullOrWhiteSpace(eyetrackerImageErrorHeight))
            {
                if (!double.TryParse(eyetrackerImageErrorHeight, out eye_tracker_error_height))
                {
                    controls.errorProvider_errorSizeEyeTracker_visualizeTracking.SetError(controls.txt_errorSizeEyeTrackerHeight_faceAsROI, Constants.MESSAGE_ENTER_DOUBLE_VALUE);
                    errror = true;
                    error_in_errorValue = true;
                }
            }
            if (!error_in_errorValue)
            {
                controls.errorProvider_errorSizeEyeTracker_visualizeTracking.Clear();
                controls.errorProvider_errorSizeEyeTracker_visualizeTracking.SetError(controls.txt_errorSizeEyeTrackerHeight_faceAsROI, "");
            }

            if (errror)
            {
                return;
            }


            System.IO.StreamReader file_2d_landmarks  = new System.IO.StreamReader(landmarkFileName);
            System.IO.StreamReader file_gaze_raw_data = new System.IO.StreamReader(rawGazeDataFileName);
            System.IO.StreamReader file_image_size_trackingframework = new System.IO.StreamReader(imageSizeTrackingFrameworkFileName);

            if (file_image_size_trackingframework != null)
            {
                file_image_size_trackingframework.ReadLine(); //skip header
            }
            string[] tracking_framework_image_size;
            char[]   delimiterChars = { ' ', '\t', ',' };
            tracking_framework_image_size = file_image_size_trackingframework.ReadLine().Split(delimiterChars, StringSplitOptions.RemoveEmptyEntries);
            ImageConversion imageConversion = new ImageConversion(eye_tracker_width, eye_tracker_height, Convert.ToInt32(tracking_framework_image_size[0]), Convert.ToInt32(tracking_framework_image_size[1]));

            if (eye_tracker_error_width != -1 || eye_tracker_error_height != -1)
            {
                imageConversion.set_errors_of_eye_tracker(eye_tracker_error_width <= 0?0: eye_tracker_error_width, eye_tracker_error_height <= 0?0:eye_tracker_error_height);
            }

            Constants.FilesForAOIDetection filesForAOIDetection = new Constants.FilesForAOIDetection(file_2d_landmarks, file_gaze_raw_data);
            string AOIs = UtilityFunctions.getAOIsofFile_OpenFace(filesForAOIDetection, imageConversion);

            file_2d_landmarks.Close();
            file_gaze_raw_data.Close();
            file_image_size_trackingframework.Close();

            System.IO.File.WriteAllText(outputFileName, AOIs);
            MessageBox.Show("Successfully Done!!");
        }