private void ProcessBuffer(ref Array inpBuf, ref int segmentID, ref int cEndPosSec, ref int cStartPosSec) { /* Read the initial time. */ startTime = DateTime.Now; //bufSize = (short)(bufferSize - 1); bufSize = bufferSize; Console.WriteLine("Size of the buffer is: " + bufSize.ToString()); processBufferResult = nmsCOMcallee.nmsProcessBuffer(ref inpBuf, ref bufSize, ref emoValsArray, ref aIres, ref bStr, ref testbuf, ref testBufLeng, ref brderS); cEndPosSec += 2; /* If Analysis is ready */ Console.WriteLine("Sound captured and processed"); Console.WriteLine(processBufferResult); if (processBufferResult == NMS_PROCESS_ANALYSISREADY) { silenceCount = 0; emoVals = CopyValuesFromEmoArrayIntoEmotionsStructure(emoValsArray, aIres); String fvStr = CopyValuesFromEmoArrayIntoString(emoValsArray, aIres); Console.WriteLine("Features extracted!"); Console.WriteLine(fvStr); string[] lines = System.IO.File.ReadAllLines("FeatureVectorValence.arff"); lines[23] = fvStr; System.IO.File.WriteAllLines("FeatureVectorValence.arff", lines); lines = System.IO.File.ReadAllLines("FeatureVectorArousal.arff"); lines[23] = fvStr; System.IO.File.WriteAllLines("FeatureVectorArousal.arff", lines); //Run command prompt command // Start the child process. System.Diagnostics.Process p = new System.Diagnostics.Process(); // Redirect the output stream of the child process. p.StartInfo.UseShellExecute = false; p.StartInfo.RedirectStandardOutput = true; p.StartInfo.FileName = "classify.bat"; p.Start(); // Do not wait for the child process to exit before // reading to the end of its redirected stream. // p.WaitForExit(); // Read the output stream first and then wait. string output = p.StandardOutput.ReadToEnd(); p.WaitForExit(); p.Close(); //System.Console.WriteLine(output); string[] tokens = output.Split(':'); //for(int i=0; i<tokens.Length; i++) //{ // Console.WriteLine("#"+i+"-"+tokens[i]); //} //Parse out the valence and arousal if(tokens[3][0] == '-') valence = "-" + tokens[3][1].ToString(); else valence = tokens[3][0].ToString(); if (tokens[6][0] == '-') arousal = "-" + tokens[6][1].ToString(); else arousal = tokens[6][0].ToString(); Console.WriteLine("Valence: " + valence); Console.WriteLine("Arousal: " + arousal); log = File.AppendText("LogFeatureVector.csv"); log.WriteLine(dataArrivedTime + "," + fvStr + "," + valence + "," + arousal); log.Close(); voiceOutput = File.AppendText("..\\..\\voiceOutput.txt"); voiceOutput.WriteLine(valence.ToString() + "," + arousal.ToString()); voiceOutput.Close(); nmsCOMcallee.nmsQA_CollectAgentScoreData(); if (segmentID >= lioNetResultsCache.Count) { for (int i = 0; i <= 100; i++) lioNetResultsCache.Add(string.Empty); } lioNetResultsCache[segmentID] = bStr; nmsCOMcallee.nmsQA_Logdata(ref segmentID, ref cStartPosSec, ref cEndPosSec); nmsCOMcallee.nmsSD_LogData(); nmsCOMcallee.nmsCollectProfiler(); cStartPosSec = 0; segmentID ++; /* Read the time. */ stopTime = DateTime.Now; ProcDuration += stopTime - startTime; } /* Voice Detected */ else if (processBufferResult == NMS_PROCESS_VOICEDETECTED && count < countNum && (cStartPosSec == 0)) { cEndPosSec -= 2; cStartPosSec = cEndPosSec; } /* The QA5Core fail to identify the buffer */ else if (processBufferResult == -1 && count < countNum) { cStartPosSec = 0; } /* Silence Detected*/ else if (processBufferResult == NMS_PROCESS_SILENCE && count < countNum) { cStartPosSec = 0; silenceCount++; } /* Reset silenceCount if no voice was detected */ if (shortTermCount == 0 && silenceCount == 2) silenceCount = 0; /* Return the Dominant Emotion after two non sequential silences */ if (silenceCount == 2 && processBufferResult == NMS_PROCESS_SILENCE) { /* Read the end time */ DateTime stoptime = DateTime.Now; /* The processing duration*/ Console.Write("Processing Time: "); Console.WriteLine(ProcDuration); ProcDuration = TimeSpan.Zero; } /* If Program is running with determined time */ if (count == countNum) { cEndPosSec = 0; cStartPosSec = 0; /* Stop audio output */ if ((m_Player != null)) try { m_Player.Dispose(); } finally { m_Player = null; } /* Stop audio input */ if ((m_Recorder != null)) try { m_Recorder.Dispose(); } finally { m_Recorder = null; } /* Clear All Pending Data */ m_Fifo.Flush(); } /* Running during defined time*/ count++; while (File.Exists(@"..\..\voiceOutput.txt")) { Console.WriteLine("Waiting for Alex..."); Thread.Sleep(100); } Console.WriteLine("Start timer..."); oldRecordedTime = Int32.Parse(DateTime.Now.ToString("HHmmssfff")); }
private void ProcessBuffer(ref Array inpBuf, ref int segmentID, ref int cEndPosSec, ref int cStartPosSec) //private void ProcessBuffer(ref int segmentID, // ref int cEndPosSec, // ref int cStartPosSec) { /* Read the initial time. */ startTime = DateTime.Now; if (emoValsArray == null) { Console.WriteLine("ENTERED emovalsarray"); emoValsArray = new short[21]; } if (testbuf == null) { Console.WriteLine("ENTERED testbuf"); testbuf = new short[22051]; } bufSize = (short)(bufferSize - 1); emoValsArray = new short[21]; aIres = ""; bStr = ""; testbuf = new short[22051]; testBufLeng = 22050; brderS = -1.0; Array emoValsArray2 = new short[21]; processBufferResult = nmsCOMcallee.nmsProcessBuffer( ref inpBuf, ref bufSize, ref emoValsArray2, ref aIres, ref bStr, ref testbuf, ref testBufLeng, ref brderS); cEndPosSec += 2; /* If Analysis is ready */ Console.WriteLine("Is analysis ready? .... {0}", processBufferResult); if (processBufferResult == NMS_PROCESS_ANALYSISREADY && bStr != null && aIres != null) { silenceCount = 0; emoVals = CopyValuesFromEmoArrayIntoEmotionsStructure(emoValsArray2, aIres); //csv //string newLine = CopyValuesFromEmoArrayIntoString(emoValsArray, aIres); // Create csv file to write it in //File.AppendAllText(resultsPath, newLine); //add values to the sum array CopyValuesFromEmoArrayIntoEmotionSums(emoValsArray2, aIres); successSegNum++; /* Store values of the curret buffer */ energysum += emoVals.Energy; contentsum += emoVals.content; upsetSum += emoVals.upset; angrySum += emoVals.angry; stressSum += emoVals.stress; concentrationSum += emoVals.concentration_level; intenThinkSum += emoVals.intensive_thinking; safsum += emoVals.saf; excitSum += emoVals.excitement; atmossum += emoVals.Atmos; emocogsum += emoVals.EmoCogRatio; embarrasSum += emoVals.embarrassment; hesitatSum += emoVals.hesitation; imag_actSum += emoVals.imagination_activity; extr_statSum += emoVals.extremeState; uncertSum += emoVals.uncertainty; brainpowSum += emoVals.BrainPower; max_volSum += emoVals.maxAmpVol; voice_energSum += emoVals.VoiceEnergy; shortTermCount++; nmsCOMcallee.nmsQA_CollectAgentScoreData(); if (segmentID >= lioNetResultsCache.Count) { for (int i = 0; i <= 100; i++) lioNetResultsCache.Add(string.Empty); } lioNetResultsCache[segmentID] = bStr; nmsCOMcallee.nmsQA_Logdata(ref segmentID, ref cStartPosSec, ref cEndPosSec); nmsCOMcallee.nmsSD_LogData(); nmsCOMcallee.nmsCollectProfiler(); cStartPosSec = 0; segmentID ++; /* Read the time. */ stopTime = DateTime.Now; ProcDuration += stopTime - startTime; } /* Voice Detected */ else if (processBufferResult == NMS_PROCESS_VOICEDETECTED && count < countNum && (cStartPosSec == 0)) { ncount += 0.5; cEndPosSec -= 2; cStartPosSec = cEndPosSec; } /* The QA5Core fail to identify the buffer */ else if (processBufferResult == -1 && count < countNum) { ncount += 0.5; cStartPosSec = 0; } /* Silence Detected*/ else if (processBufferResult == NMS_PROCESS_SILENCE && count < countNum) { cStartPosSec = 0; silenceCount++; } /* Reset silenceCount if silence keeps being detected */ if (shortTermCount == 0 && silenceCount == 2) silenceCount = 0; /* Return the Dominant Emotion after two non sequential silences */ if (silenceCount == 2 && processBufferResult == NMS_PROCESS_SILENCE) { /* Dominant Human Emotion */ dominantTypeOnly = ClassifierLogisticModel(); //dominantTypeOnly = KNN_EmotionModel(); /* Store reults */ if (dominantTypeOnly == 1) emt[0]++; else if (dominantTypeOnly == 2) emt[1]++; else if (dominantTypeOnly == 3) emt[2]++; else if (dominantTypeOnly == 4) emt[3]++; else if (dominantTypeOnly == 5) emt[4]++; else if (dominantTypeOnly == 6) emt[5]++; /* Reset all the stored values */ energysum = 0; contentsum = 0; upsetSum = 0; angrySum = 0; stressSum = 0; concentrationSum = 0; intenThinkSum = 0; safsum = 0; excitSum = 0; atmossum = 0; emocogsum = 0; embarrasSum = 0; hesitatSum = 0; imag_actSum = 0; extr_statSum = 0; uncertSum = 0; brainpowSum = 0; max_volSum = 0; voice_energSum = 0; shortTermCount = 0; /* Write file to send emotion to main controller */ TextWriter UpdateHA = new StreamWriter("HumanEmo.txt"); UpdateHA.Write(dominantTypeOnly); UpdateHA.Close(); /* Read the end time */ DateTime stoptime = DateTime.Now; /* The processing duration*/ Console.Write("Processing Time: "); Console.WriteLine(ProcDuration); ProcDuration = TimeSpan.Zero; /* Append to history files */ HA = File.AppendText("HumanAffectHistory.txt"); int min1 = Math.Abs((int)(SecCount-ncount) / 60), min2 = Math.Abs((int)(SecCount) / 60), sec1 = Math.Abs((int)(SecCount-ncount) % 60), sec2 = Math.Abs((int)(SecCount) % 60); string smin1 = String.Format("{0:D2}",min1), smin2 = String.Format("{0:D2}",min2), ssec1 = String.Format("{0:D2}",sec1), ssec2 = String.Format("{0:D2}",sec2); HA.Write(smin1); HA.Write(":"); HA.Write(ssec1); HA.Write("~"); HA.Write(smin2); HA.Write(":"); HA.Write(ssec2); HA.Write(" "); HA.WriteLine(dominantTypeOnly); HA.Close(); /* Show in cmd window */ Console.Write("Time: {0:D2}:{1:D2}~{2:D2}:{3:D2}", min1, sec1, min2, sec2); Console.Write("Human Affect: "); Console.WriteLine(dominantTypeOnly); ncount = 0.0; } }
private void ProcessBuffer(ref Array inpBuf, ref int segmentID, ref int cEndPosSec, ref int cStartPosSec) { /* Read the initial time. */ startTime = DateTime.Now; bufSize = (short)(bufferSize - 1); processBufferResult = nmsCOMcallee.nmsProcessBuffer(ref inpBuf, ref bufSize, ref emoValsArray, ref aIres, ref bStr, ref testbuf, ref testBufLeng, ref brderS); cEndPosSec += 2; /* If Analysis is ready */ if (processBufferResult == NMS_PROCESS_ANALYSISREADY) { silenceCount = 0; emoVals = CopyValuesFromEmoArrayIntoEmotionsStructure(emoValsArray, aIres); String fvStr = CopyValuesFromEmoArrayIntoString(emoValsArray, aIres); Console.WriteLine("Features extracted!"); Console.WriteLine(fvStr); string[] lines = System.IO.File.ReadAllLines("FeatureVector.arff"); Console.WriteLine(lines.Length); lines[23] = fvStr; System.IO.File.WriteAllLines("FeatureVector.arff", lines); //Run command prompt command //string strCmdText; //strCmdText = "/C java -cp weka.jar weka.classifiers.functions.Logistic -T FeatureVector.arff -l logistic_dmd.model -p 0"; //System.Diagnostics.Process.Start(@"C:\Windows\System32\cmd.exe", strCmdText); // Start the child process. System.Diagnostics.Process p = new System.Diagnostics.Process(); // Redirect the output stream of the child process. p.StartInfo.UseShellExecute = false; p.StartInfo.RedirectStandardOutput = true; p.StartInfo.FileName = "classify.bat"; p.Start(); // Do not wait for the child process to exit before // reading to the end of its redirected stream. // p.WaitForExit(); // Read the output stream first and then wait. string output = p.StandardOutput.ReadToEnd(); p.WaitForExit(); System.Console.WriteLine(output); if (output.Contains("angry")) { Console.WriteLine("Angry"); Console.WriteLine("Valence: -0.40"); Console.WriteLine("Arousal: 0.79"); } else if (output.Contains("neutral")) { Console.WriteLine("Neutral"); Console.WriteLine("Valence: 0.0"); Console.WriteLine("Arousal: 0.0"); } else if (output.Contains("sadness")) { Console.WriteLine("Sad"); Console.WriteLine("Valence: -0.81"); Console.WriteLine("Arousal: -0.40"); } else if (output.Contains("happy")) { Console.WriteLine("Happy"); Console.WriteLine("Valence: 0.89"); Console.WriteLine("Arousal: 0.17"); } tw = File.AppendText("VoiceAnalysisResults.txt"); tw.WriteLine("---------- Start Segment ----------"); tw.Write("Energy "); tw.WriteLine(emoVals.Energy); tw.Write("Content "); tw.WriteLine(emoVals.content); tw.Write("Upset "); tw.WriteLine(emoVals.upset); tw.Write("Angry "); tw.WriteLine(emoVals.angry); tw.Write("Stress "); tw.WriteLine(emoVals.stress); tw.Write("Concentration "); tw.WriteLine(emoVals.concentration_level); tw.Write("Intensive Thinking "); tw.WriteLine(emoVals.intensive_thinking); tw.Write("SAF "); tw.WriteLine(emoVals.saf); tw.Write("Excitement "); tw.WriteLine(emoVals.excitement); tw.Write("Atmosphere "); tw.WriteLine(emoVals.Atmos); tw.Write("EmoCog Ratio "); tw.WriteLine(emoVals.EmoCogRatio); tw.Write("Embarrassment "); tw.WriteLine(emoVals.embarrassment); tw.Write("Hesitation "); tw.WriteLine(emoVals.hesitation); tw.Write("Imagination "); tw.WriteLine(emoVals.imagination_activity); tw.Write("Extreme State "); tw.WriteLine(emoVals.extremeState); tw.Write("Uncertainty "); tw.WriteLine(emoVals.uncertainty); tw.Write("Brain Power "); tw.WriteLine(emoVals.BrainPower); tw.Write("Max Volume "); tw.WriteLine(emoVals.maxAmpVol); tw.Write("Voice Energy "); tw.WriteLine(emoVals.VoiceEnergy); tw.WriteLine("---------- End Segment ----------"); tw.Close(); nmsCOMcallee.nmsQA_CollectAgentScoreData(); if (segmentID >= lioNetResultsCache.Count) { for (int i = 0; i <= 100; i++) lioNetResultsCache.Add(string.Empty); } lioNetResultsCache[segmentID] = bStr; nmsCOMcallee.nmsQA_Logdata(ref segmentID, ref cStartPosSec, ref cEndPosSec); nmsCOMcallee.nmsSD_LogData(); nmsCOMcallee.nmsCollectProfiler(); cStartPosSec = 0; segmentID ++; /* Read the time. */ stopTime = DateTime.Now; ProcDuration += stopTime - startTime; } /* Voice Detected */ else if (processBufferResult == NMS_PROCESS_VOICEDETECTED && count < countNum && (cStartPosSec == 0)) { cEndPosSec -= 2; cStartPosSec = cEndPosSec; } /* The QA5Core fail to identify the buffer */ else if (processBufferResult == -1 && count < countNum) { cStartPosSec = 0; } /* Silence Detected*/ else if (processBufferResult == NMS_PROCESS_SILENCE && count < countNum) { cStartPosSec = 0; silenceCount++; } /* Reset silenceCount if no voice was detected */ if (shortTermCount == 0 && silenceCount == 2) silenceCount = 0; /* Return the Dominant Emotion after two non sequential silences */ if (silenceCount == 2 && processBufferResult == NMS_PROCESS_SILENCE) { /* Read the end time */ DateTime stoptime = DateTime.Now; /* The processing duration*/ Console.Write("Processing Time: "); Console.WriteLine(ProcDuration); ProcDuration = TimeSpan.Zero; } /* If Program is running with determined time */ if ((count == countNum)) { cEndPosSec = 0; cStartPosSec = 0; /* Stop audio output */ if ((m_Player != null)) try { m_Player.Dispose(); } finally { m_Player = null; } /* Stop audio input */ if ((m_Recorder != null)) try { m_Recorder.Dispose(); } finally { m_Recorder = null; } /* Clear All Pending Data */ m_Fifo.Flush(); } /* Running during defined time*/ count++; }
private void UpdateAlarms(EmotionResults EmoVals, int segmentID) { try { int rotateCycle; double rotatingAvrg = 0; bool callIsOutOfAcceptableLevels = false; if (EmoVals.upset > 0) { upsetSegments++; } if (ui.AlarmIfAngerTrendIsRaisingAndAbove) { ui.HighlightAngerTrendLevel = false; double angry = EmoVals.angry; rotateCycle = 4; rotatingAvrg = trenders[1].regNewValRotatingAvrg(ref angry, ref rotateCycle); if (ui.AlarmIfAngerTrendIsRaisingAndAbove && (rotatingAvrg > ui.MaxAngerTrandLevel)) { callIsOutOfAcceptableLevels = true; onlineFlag = "angry " + rotatingAvrg.ToString("F02"); ui.HighlightAngerTrendLevel = true; } ui.AngerTrendLevel = rotatingAvrg.ToString("F02"); } if (ui.AlarmIfStressTrendIsRaisingAndAbove) { ui.HighlightStressTrendLevel = false; double stress = EmoVals.stress; rotateCycle = 4; rotatingAvrg = trenders[0].regNewValRotatingAvrg(ref stress, ref rotateCycle); if (rotatingAvrg > ui.MaxStressTrendLevel) { ui.NotifyStressLevelIsRaising = true; ui.HighlightStressTrendLevel = true; onlineFlag = "STRESS " + rotatingAvrg.ToString("F02"); stressSegments++; } else if (rotatingAvrg < (ui.MaxAngerTrandLevel - 1.0)) { ui.NotifyStressLevelIsRaising = false; ui.HighlightStressTrendLevel = false; } else { ui.HighlightStressTrendLevel = false; } ui.StressTrendLevel = rotatingAvrg.ToString("F02"); } if (ui.AlarmIfStressTrendIsRaisingAndAbove || ui.AlarmIfStressTrendIsLow) { ui.HighlightEnergyTrendIsRaisingFor = false; ui.HighlightEnergyLevelBelow = false; double difEnrgy = 0; if (segmentID > 5) { rotateCycle = 8; double energy = EmoVals.Energy; rotatingAvrg = trenders[2].regNewValRotatingAvrg(ref energy, ref rotateCycle); double num6 = trenders[2].getFirstValuesRotating() + 3.0; difEnrgy = (rotatingAvrg - num6)*2.0; ui.EnergyDifference = difEnrgy.ToString("F02"); if (difEnrgy < 7.0) { enrgyStaysHighSegments = 0; } else { enrgyStaysHighSegments++; } ui.EnrgyHighSegments = enrgyStaysHighSegments.ToString(""); ui.SetEnergyLevel(difEnrgy, (difEnrgy >= 7.0)); } if (ui.AlarmIfStressTrendIsRaisingAndAbove) { if (enrgyStaysHighSegments > ui.LimitForEnergyTrendIsRaisingFor && (EmoVals.content == 0)) { callIsOutOfAcceptableLevels = true; ui.HighlightEnergyTrendIsRaisingFor = true; onlineFlag = "ANGRY " + rotatingAvrg.ToString("F02"); angerSegments++; ui.HighlightEnergyLevel = true; } else { ui.HighlightEnergyTrendIsRaisingFor = false; if (difEnrgy > 0.0) { midEnergySegments++; } } ui.EnergyTrendIsRaisingFor = enrgyStaysHighSegments.ToString(); } if (ui.AlarmIfStressTrendIsLow && (rotatingAvrg <= ui.LimitForEnergyLevelBelow)) { callIsOutOfAcceptableLevels = true; ui.HighlightEnergyLevelBelow = true; lowEnergySegments++; ui.NotifySpeakerIsTired = true; } else { ui.NotifySpeakerIsTired = false; } ui.EnergyLevelBelow = rotatingAvrg.ToString("F02"); } ui.NotifyCallIsOutOfAcceptableLevels = callIsOutOfAcceptableLevels; } catch (Exception exception) { ui.ShowMessage(exception.Message); } }
private void UpdateSegmentsListAndCsv(int segCount, string bstring, int sPos, int fPos, EmotionResults emotionResults, string comment) { ui.AddSegmentToList(segCount, bstring, sPos, fPos, emotionResults, onlineFlag, comment); var stringBuilder = new StringBuilder(); stringBuilder.Append(segCount.ToString()); double position = (sPos) / 100.0; stringBuilder.Append("," + position.ToString("F02")); position = (fPos) / 100.0; stringBuilder.Append("," + position.ToString("F02")); stringBuilder.Append("," + emotionResults.Energy); stringBuilder.Append("," + emotionResults.content); stringBuilder.Append("," + emotionResults.upset); stringBuilder.Append("," + emotionResults.angry); stringBuilder.Append("," + emotionResults.stress); stringBuilder.Append("," + emotionResults.uncertainty); stringBuilder.Append("," + emotionResults.excitement); stringBuilder.Append("," + emotionResults.concentration_level); stringBuilder.Append("," + emotionResults.EmoCogRatio); stringBuilder.Append("," + emotionResults.hesitation); stringBuilder.Append("," + emotionResults.BrainPower); stringBuilder.Append("," + emotionResults.embarrassment); stringBuilder.Append("," + emotionResults.intensive_thinking); stringBuilder.Append("," + emotionResults.imagination_activity); stringBuilder.Append("," + emotionResults.extremeState); stringBuilder.Append("," + emotionResults.saf); stringBuilder.Append("," + emotionResults.Atmos); stringBuilder.Append("," + onlineFlag); stringBuilder.Append("," + emotionResults.AIres); stringBuilder.Append("," + emotionResults.maxAmpVol); stringBuilder.Append("," + comment); stringBuilder.Append("," + bstring); cSvWriter.WriteLine(stringBuilder.ToString()); }
public void UpdateHistoryBars(EmotionResults emoVals) { if (NoRealTimeUpdates) return; if (InvokeRequired) { Invoke(new UpdateHistoryBarsDelegate(UpdateHistoryBars), new object[] {emoVals}); } else { nmsHSAhistoryBar1.addNewValues(emoVals.content, emoVals.upset, emoVals.Atmos); nmsHSAhistoryBar2.addNewValues((short) (emoVals.EmoCogRatio/20), (short) (emoVals.BrainPower/100), (short) (emoVals.Energy*2)); nmsAShistoryBar1.addNewValues((short) (emoVals.stress*2), (short) (emoVals.angry*2)); Application.DoEvents(); } }
public void AddSegmentToList(int segCount, string bstring, int sPos, int fPos, EmotionResults emoVals, string onlineFlag, string comment) { if (InvokeRequired) { Invoke(new AddSegmentToListDelegate(AddSegmentToList), new object[] {segCount, bstring, sPos, fPos, emoVals, onlineFlag, comment}); } else { ListViewItem item = segmentsList.Items.Add(segCount.ToString()); double position = (sPos)/100.0; item.SubItems.Add(position.ToString("F02")); position = (fPos)/100.0; item.SubItems.Add(position.ToString("F02")); item.SubItems.Add(emoVals.Energy.ToString()); item.SubItems.Add(emoVals.content.ToString()); item.SubItems.Add(emoVals.upset.ToString()); item.SubItems.Add(emoVals.angry.ToString()); item.SubItems.Add(emoVals.stress.ToString()); item.SubItems.Add(emoVals.uncertainty.ToString()); item.SubItems.Add(emoVals.excitement.ToString()); item.SubItems.Add(emoVals.concentration_level.ToString()); item.SubItems.Add(emoVals.EmoCogRatio.ToString()); item.SubItems.Add(emoVals.hesitation.ToString()); item.SubItems.Add(emoVals.BrainPower.ToString()); item.SubItems.Add(emoVals.embarrassment.ToString()); item.SubItems.Add(emoVals.intensive_thinking.ToString()); item.SubItems.Add(emoVals.imagination_activity.ToString()); item.SubItems.Add(emoVals.extremeState.ToString()); item.SubItems.Add(emoVals.saf.ToString()); item.SubItems.Add(emoVals.Atmos.ToString()); item.SubItems.Add(onlineFlag); item.SubItems.Add(emoVals.AIres); item.SubItems.Add(emoVals.maxAmpVol.ToString()); item.SubItems.Add(comment); item.SubItems.Add(bstring); segmentsList.Columns[0].Width = 60; } }