Exemplo n.º 1
0
        //convertMilliSecondToEyeTrackerFrameNo
        public static void findStartingEndingFrameNo(string fileName_experimentInterval, int participantNo, Constants.ParticipantInfo participantInfo, int frequency, ref int startingFrameNo, ref int endingFrameNo)
        {
            System.IO.StreamReader file_experiment_interval = new System.IO.StreamReader(fileName_experimentInterval);

            if (file_experiment_interval != null)
            {
                file_experiment_interval.ReadLine(); //skip header
            }
            string[] words_experiment_interval;
            char[]   delimiterChars           = { ' ', '\t', ',' };
            string   line_experiment_interval = "";

            while ((line_experiment_interval = file_experiment_interval.ReadLine()) != null)
            {
                words_experiment_interval = line_experiment_interval.Split(delimiterChars, StringSplitOptions.RemoveEmptyEntries);
                if (Convert.ToInt32(words_experiment_interval[0]) == participantNo)
                {
                    if (participantInfo == Constants.ParticipantInfo.Second)
                    {
                        //when calculate ending frame by converting ending millisecond value to frame number as in finding the starting frame number case, it might cause different number of frame numbers in a pair, because of conversion issues.
                        //thus ending frame number is calculated based on difference value which is absolutely same in a pair.
                        startingFrameNo = convertMilliSecondToEyeTrackerFrameNo(frequency, Convert.ToInt32(words_experiment_interval[3])); //round up, for instance for 30 Hz eye tracker 0-30 ms refers first frame not 0
                        endingFrameNo   = startingFrameNo + convertMilliSecondToEyeTrackerFrameNo(frequency, (Convert.ToInt32(words_experiment_interval[4]) - Convert.ToInt32(words_experiment_interval[3]))) - 1;
                        return;
                    }
                    else
                    {
                        startingFrameNo = convertMilliSecondToEyeTrackerFrameNo(frequency, Convert.ToInt32(words_experiment_interval[1])); //round up, for instance for 30 Hz eye tracker 0-30 ms refers first frame not 0
                        endingFrameNo   = startingFrameNo + convertMilliSecondToEyeTrackerFrameNo(frequency, (Convert.ToInt32(words_experiment_interval[2]) - Convert.ToInt32(words_experiment_interval[1]))) - 1;
                        return;
                    }
                }
            }
        }
Exemplo n.º 2
0
        private List <string> fillDictionaries(ref Dictionary <int, string> dict_speech_annotation, ref Dictionary <int, string> dict_base_AOI, ref Dictionary <int, string> dict_ref_AOI)
        {
            try
            {
                List <string> starting_endingFrames = new List <string>();
                char[]        delimiterChars        = { ' ', '\t' };
                string[]      words_speech_annotation;
                string        line_sa = "", starting_time_str = "", ending_time_str = "", extended_speech_act = "";
                int           interval_pair_1_starting = 0, interval_pair_1_ending = 0, interval_pair_2_starting = 0, interval_pair_2_ending = 0, starting_time = 0,
                              ending_time = 0, time_offset_between_participants = 0, starting_frameNo = 0, ending_frameNo = 0;



                System.IO.StreamReader file_speechAnnotation = new System.IO.StreamReader(fileName_speechAnnotation);

                System.IO.StreamReader file_base = null, file_ref = null;

                Constants.ParticipantInfo participantInfo = Constants.ParticipantInfo.First;
                UtilityFunctions.findStartingEndingFrameNo(fileName_expInterval, id, participantInfo, frequency, ref interval_pair_1_starting, ref interval_pair_1_ending);

                if (dict_ref_AOI != null)
                {
                    participantInfo = Constants.ParticipantInfo.Second;
                    UtilityFunctions.findStartingEndingFrameNo(fileName_expInterval, id, participantInfo, frequency, ref interval_pair_2_starting, ref interval_pair_2_ending);
                }



                //set starting ending frame values
                if (dict_ref_AOI != null)
                {
                    starting_endingFrames.Add(interval_pair_1_starting + " " + interval_pair_1_ending);
                    starting_endingFrames.Add(interval_pair_2_starting + " " + interval_pair_2_ending);
                }
                else//if a single participant selected
                {
                    starting_endingFrames.Add(interval_pair_1_starting + " " + interval_pair_1_ending);
                }


                bool firstLine = true;
                while ((line_sa = file_speechAnnotation.ReadLine()) != null)
                {
                    words_speech_annotation = line_sa.Split(delimiterChars, StringSplitOptions.RemoveEmptyEntries);
                    if (words_speech_annotation.Count() <= 3)
                    {
                        continue;
                    }

                    starting_time_str = words_speech_annotation[0];
                    starting_time     = Convert.ToInt32(starting_time_str);

                    ending_time_str = words_speech_annotation[1];
                    ending_time     = Convert.ToInt32(ending_time_str);

                    extended_speech_act = words_speech_annotation[2] + " ";
                    for (int i = 3; i < words_speech_annotation.Length; i++)
                    {
                        extended_speech_act += words_speech_annotation[i];
                    }


                    if (firstLine & dict_ref_AOI != null)
                    {
                        if (UtilityFunctions.convertMilliSecondToEyeTrackerFrameNo(frequency, starting_time) == interval_pair_1_starting)
                        {
                            file_base = new System.IO.StreamReader(fileName_AOI_file_summary_first_pair);
                            file_ref  = new System.IO.StreamReader(fileName_AOI_file_summary_second_pair);
                            time_offset_between_participants = interval_pair_1_starting - interval_pair_2_starting;
                            baseisFirst = true;
                        }
                        else
                        {
                            file_base = new System.IO.StreamReader(fileName_AOI_file_summary_second_pair);
                            file_ref  = new System.IO.StreamReader(fileName_AOI_file_summary_first_pair);
                            time_offset_between_participants = interval_pair_2_starting - interval_pair_1_starting;
                            baseisFirst = false;
                        }

                        //diff_timeOffset_frameNo = UtilityFunctions.convertMilliSecondToEyeTrackerFrameNo(frequency, time_offset_between_participants);
                        diff_timeOffset_frameNo = time_offset_between_participants;
                        dict_base_AOI           = fill_AOI_dict(file_base);
                        dict_ref_AOI            = fill_AOI_dict(file_ref);

                        firstLine = false;
                    }
                    else if (firstLine & dict_ref_AOI == null)
                    {
                        file_base     = new System.IO.StreamReader(fileName_AOI_file_summary_single);
                        dict_base_AOI = fill_AOI_dict(file_base);
                    }

                    //when calculate ending frame by converting ending millisecond value to frame number as in finding the starting frame number case, it might cause different number of frame numbers in a pair, because of conversion issues.
                    //thus ending frame number is calculated based on difference value which is absolutely same in a pair.
                    starting_frameNo = UtilityFunctions.convertnextMilliSecondToEyeTrackerFrameNo(frequency, starting_time_str);
                    ending_frameNo   = starting_frameNo + UtilityFunctions.convertMilliSecondToEyeTrackerFrameNo(frequency, ending_time - starting_time) - 1;



                    for (int i = starting_frameNo; i <= ending_frameNo; i++)
                    {
                        string val;

                        if (dict_speech_annotation.TryGetValue(i, out val))
                        {
                            // yay, value exists!
                            string[] outputLine_words = val.Split(delimiterChars);

                            string updatedLine = "";


                            string actor = outputLine_words[0];
                            //string speech_act = outputLine_words[1];
                            string speech_act = "";
                            for (int index = 1; index < outputLine_words.Length; index++)
                            {
                                speech_act += (i == 1 ? "" : " ") + outputLine_words[index];
                            }


                            string actor_newline = words_speech_annotation[2];
                            //string speech_act_newline = words_speech_annotation[3];
                            string speech_act_newline = "";
                            for (int index = 3; index < words_speech_annotation.Length; index++)
                            {
                                //speech_act_newline += (i==3?"":" ")+words_speech_annotation[index];
                                speech_act_newline += words_speech_annotation[index]; //remove spaces inside speechAct, as did we before
                            }


                            if (!actor.Contains(actor_newline))
                            {
                                updatedLine += actor + "," + actor_newline + " ";
                            }
                            else
                            {
                                updatedLine += actor_newline + " ";
                            }

                            if (!speech_act.Contains(speech_act_newline))
                            {
                                updatedLine += speech_act + "," + speech_act_newline;
                            }
                            else
                            {
                                updatedLine += speech_act_newline;
                            }

                            dict_speech_annotation[i] = updatedLine;
                        }
                        else
                        {
                            dict_speech_annotation[i] = extended_speech_act;
                        }
                    }
                }
                return(starting_endingFrames);
            }
            catch (Exception ex)
            {
                throw ex;
            }
        }