/// <summary> /// Stops the main loop. /// </summary> public void Stop() { m_running = false; m_methodQueue.Clear(); m_syncEvent.Set(); // Send a stop message to subscriber (Invoke the call back) if (m_StopEvent != null) { m_StopEvent(); } }
/// <summary> /// Move para o prómixo registro /// </summary> /// <returns></returns> public bool MoveNext() { try { if (_reader.BaseStream.Position == _reader.BaseStream.Length) { return(false); } var content = (EntryContent)_reader.ReadByte(); if (content == EntryContent.Multi) { _entryQueue.Clear(); int count = _reader.ReadInt16(); for (int n = 0; n < count; n++) { ReadEntry(); } return(true); } else { return(false); } } catch (IOException) { return(false); } }
/// <summary> /// Clears queue. /// </summary> public void Clear() { lock (this) { dataQueue.Clear(); queueHasData.Reset(); } }
public void Clear() { // Decrement the free count. We do not expect it to ever get big, so we won't go // through this loop many times. while (messagesEnqueued.WaitOne(0, false)) { } downKeystrokeWasEnqueued.Reset(); messageQueue.Clear(); }
/// <summary> /// 清除队列 /// </summary> public void Clear() { mutex.WaitOne(); cmdq.Clear(); mutex.ReleaseMutex(); }
public void Clear() { keystrokesQueue.Clear(); keystrokeWasEnqueued.Reset(); downKeystrokeWasEnqueued.Reset(); }
public void Clear() { inner.Clear(); }
/// <summary> /// Writes buffered text to file /// </summary> /// <returns></returns> public bool WriteToFile() { //Write text to output file bool result = true; System.IO.StreamWriter file = null; try { //Delete existing file if required if (OutputStyle == WriteStyle.Overwrite) { if (System.IO.File.Exists(OutputFileName)) { try { System.IO.File.Delete(OutputFileName); } catch { } } } //Create output folder if required string outputFolder = System.IO.Path.GetDirectoryName(OutputFileName); if (!System.IO.Directory.Exists(outputFolder)) { System.IO.Directory.CreateDirectory(outputFolder); } //Open file file = new System.IO.StreamWriter(OutputFileName, true); //Get sync locked queue System.Collections.Queue syncQueue = Synchronized(this); //Copy queue to array object[] textList = syncQueue.ToArray(); //Construct output text System.Text.StringBuilder outputText = new StringBuilder(); if (OutputStyle == WriteStyle.Append) { //Add each text string in queue foreach (string textEntry in textList) { outputText.Append(textEntry); } } else { //Add last text string in queue outputText.Append(textList[textList.Length - 1]); } //Write to file file.Write(outputText.ToString()); //Empty array syncQueue.Clear(); } catch (Exception ex) { Debug.WriteLine(ex.Message); result = false; } finally { if (file != null) { file.Dispose(); } } return(result); }
/** * Process Trial Related Data. */ public static XmlNode processTrialStart(String geLogFileName, XmlNode DisplayFileStartNode, XmlDocument xmlDoc, String edfFileName) { XmlNode trialStartNode = null; long prevGazeTimeStamp = 0; ObjPositionData gePrevPositionData = null; try { log.WriteLine("Processing Trial Start"); log.Flush(); Regex gazePostionRegex = new Regex(@"^(\d+)\s*(\d.*)\s*(\d.*)\s"); trialStartNode = processSingleNode("TrialStart", DisplayFileStartNode, xmlDoc); GazeDataWorker gazeWorker = new GazeDataWorker(edfFileName, gazePostionRegex); ObjectDataWorker objectWorker = new ObjectDataWorker(geLogFileName); ReplayTrial.timeStampDataQueue.Clear(); geLogTrialDataQueue.Clear(); Thread gazeWorkerThread = new Thread(gazeWorker.doWork); Thread objectWorkerThread = new Thread(objectWorker.doWork); log.WriteLine("created threads"); log.Flush(); try { gazeWorkerThread.Start(); objectWorkerThread.Start(); gazeWorkerThread.Join(); objectWorkerThread.Join(); } catch (ThreadInterruptedException e) { log.WriteLine(" Thread Interrupted Exception " + e.StackTrace); } catch (ThreadAbortException e) { log.WriteLine(" ThreadAbortException : " + e.StackTrace); } catch (SystemException e) { log.WriteLine("SystemException : " + e.StackTrace); } catch (Exception) { log.WriteLine("Exception with Threads"); } log.WriteLine(" threads completed job"); log.Flush(); int edfCount = edfTrialDataQueue.Count; int geCount = geLogTrialDataQueue.Count; long relativeDifference = 0; long gelogFirstTimeStamp = 0; long gazeFirstTimeStamp = 0; if (edfCount > 0) { ObjPositionData firstTimeStampData = (ObjPositionData)edfTrialDataQueue.Peek(); gazeFirstTimeStamp = firstTimeStampData.timeStamp; } if (geCount > 0) { ObjPositionData geFirstData = (ObjPositionData)geLogTrialDataQueue.Peek(); gelogFirstTimeStamp = geFirstData.timeStamp; } if (gazeFirstTimeStamp != gelogFirstTimeStamp) { if (gazeFirstTimeStamp > gelogFirstTimeStamp) { relativeDifference = gazeFirstTimeStamp - gelogFirstTimeStamp; } } if (edfCount != geCount) { log.WriteLine("WARNING : Time Stamps Obtained from EDFFile does not match the number of timeStamps in GELog "); log.WriteLine("WARNING : TimeStamp Count :: edf :" + edfCount + " , GELog : " + geCount); log.Flush(); } XmlNode timeStartNode = null; long gePrevTimeStamp = 0; int count = 0; while (edfTrialDataQueue.Count > 0) //&& geLogTrialDataQueue.Count > 0 { long edfTimeStamp = -1; long gazeTimeStamp = 0; if (edfTrialDataQueue.Count > 0) { ObjPositionData data = null; do { data = (ObjPositionData)edfTrialDataQueue.Dequeue(); edfTimeStamp = data.timeStamp; if (prevGazeTimeStamp == 0) { prevGazeTimeStamp = edfTimeStamp; } } while ((edfTimeStamp != prevGazeTimeStamp) && (edfTrialDataQueue.Count > 0)); // Obtain the Refresh rate and calculate number of times data would have been recorded. int tmpRefreshRate = 1; int refreshRateFactor = 1000 / REFRESH_RATE; if ((refreshRateFactor % 2) == 0) { tmpRefreshRate = refreshRateFactor; } else { tmpRefreshRate = refreshRateFactor - 1; } prevGazeTimeStamp = edfTimeStamp + (tmpRefreshRate); timeStartNode = processSingleNode("Time", "stamp", edfTimeStamp.ToString(), trialStartNode, xmlDoc); LinkedList <ObjectData> gazedataList = data.getobjList(); foreach (ObjectData objData in gazedataList) { String name = objData.objName; String xCoord = objData.xPos.ToString(); String yCoord = objData.yPos.ToString(); XmlElement objNode = addElement("obj", timeStartNode, xmlDoc); addAttribute("name", name, objNode); addAttribute("x", xCoord, objNode); addAttribute("y", yCoord, objNode); log.WriteLine(" objname : " + name + " , x : " + xCoord + " , Y :" + yCoord); log.Flush(); } edfCount--; } log.WriteLine(" dequeing gelog data "); log.Flush(); long geTimeStamp = -1; ObjPositionData geData = null; // for (int index = 0; index < speedControl * 5; index++) // { if (geLogTrialDataQueue.Count > 0) { long geNewTimeStamp = 0; geData = (ObjPositionData)geLogTrialDataQueue.Dequeue(); geTimeStamp = geData.timeStamp; geCount--; } else if (gePrevPositionData != null) { geData = gePrevPositionData; geTimeStamp = geData.timeStamp; } if (geData != null) { gePrevPositionData = geData; LinkedList <ObjectData> objdataList = geData.getobjList(); foreach (ObjectData objData in objdataList) { String name = objData.objName; String xCoord = objData.xPos.ToString(); String yCoord = objData.yPos.ToString(); XmlElement objNode = addElement("obj", timeStartNode, xmlDoc); addAttribute("name", name, objNode); addAttribute("x", xCoord, objNode); addAttribute("y", yCoord, objNode); log.WriteLine(" objname : " + name + " , x : " + xCoord + " , Y :" + yCoord); log.Flush(); } count++; } } //end of while loop geLogTrialDataQueue.Clear(); } catch (Exception e) { throw new ATLogicException(" Error while processing TrialStart parameters . gecount : " + geLogTrialDataQueue.Count + " , edf count : " + edfTrialDataQueue.Count); } return(trialStartNode); }