public void Add(T item) { _Operations.Enqueue(new Operation() { Type = TYPE.ADD, Item = item }); }
protected virtual void OnDisable() { if (reportTo) { eventQueue.Enqueue(new DataPoint(reportingID + "Disabled", TimeStamp(), new Dictionary <string, object>())); reportTo.RemoveReporter(this); } }
/// <summary> /// This function runs in a thread and is responsible for retrieve each message from untreatedInbox and /// converting it a message object and store it in treatedInbox. /// </summary> private void CastingMessages() { while (true) { if (_untreatedInbox.Count == 0) { continue; } try { Message receivedMessage; byte[] receivedBytes; while (!_untreatedInbox.TryDequeue(out receivedBytes)) { } ; // Added a useless string instead of the message for now // TODO: Serialization of messages to better decoding _treatedInbox.Enqueue(new Message(Perfomative.Failure)); MainCluster.GotNewMessage.Invoke(null, null); } catch (Exception e) { Console.WriteLine(e); } } }
public static void ProcessWord(string currentWord, int indexArray) { List <string> newWord = new List <string>(); int found = 0; foreach (var item in wordInLines.Where(x => x.Count > 1 && x.Any(c => c.ToLower() == currentWord.ToLower()))) { var index = item.IndexOf(currentWord); if (index + 1 <= item.Count - 1) { found++; newWord.Add(currentWord + " " + item[index + 1]); } } var statistics = newWord.GroupBy(word => word).ToDictionary(kvp => kvp.Key, kvp => kvp.Count()).Where(x => x.Key.Length > 1).OrderByDescending(x => x.Value).Take(7).ToList(); var result = new StringBuilder(); foreach (var key in statistics) { Finded.Enqueue($"{key.Key}");//: {key.Value}"); } Finished.Enqueue(currentWord); Console.WriteLine($"index word {indexArray} word {currentWord} found {found}");// Console.WriteLine($" "); }
public void put(string item) { lock (_lockObject) { _last++; _last = _last > _size ? 1 : _last; _buffer.Enqueue(item); } }
public void AddPrintData(MyStruct data) { _cachList.Enqueue(data); //LogWriter.WriteLog(new string[] { string.Format("[ReceiveDataThread] CachList.Count = {0}", CachList.Count) }, true); while (_cachList.Count >= 8192) { Thread.Sleep(10); } }
public Task Send(byte[] buffer, int offset, int count) { var task = new Task() { Buffer = buffer, Offset = offset, Count = count }; _SendTasks.Enqueue(task); return(task); }
private void QueryData() { try { const string queryTemplate = "SELECT ?friend WHERE {{ {{<{0}> <http://purl.org/foaf/0.1/knows> ?friend }} UNION {{ ?friend <http://purl.org/foaf/0.1/knows> <{0}> }} }}"; var rng = new Random(); var timer = new Stopwatch(); do { try { var randomPerson = String.Format("http://example.org/person/{0}", rng.Next(25000)); timer.Restart(); var query = String.Format(queryTemplate, randomPerson); int rowCount; using (var stream = _client.ExecuteQuery(_storeName, query)) { var resultDoc = XDocument.Load(stream); rowCount = resultDoc.SparqlResultRows().Count(); } timer.Stop(); _queryTimes.Enqueue(new Tuple <long, int>(timer.ElapsedMilliseconds, rowCount)); } catch (Exception ex) { Console.ForegroundColor = ConsoleColor.Red; Console.WriteLine("Query failed: " + ex.Message); Console.ResetColor(); _queryTimes.Enqueue(new Tuple <long, int>(-1, 0)); } } while (!_endQueries); } catch (Exception ex) { Console.Error.WriteLine(ex); } }
/// <summary> /// 构造函数 /// </summary> /// <param name="maxClients">允许的最大连接数</param> /// <param name="receiveBufferSize">每个连接接收数据缓存的大小</param> /// <param name="sendBufferSize">每个连发送收数据缓存的大小</param> /// <param name="sendQueueSize">等待发送数据的最大业务包</param> public SessionFactory(int maxClients, int receiveBufferSize, int sendBufferSize, int sendQueueSize, IBufferProcessor commandReader) { m_receiveBufferSize = receiveBufferSize; m_sendBufferSize = sendBufferSize; m_bufferPool = new System.Collections.Concurrent.ConcurrentQueue<UserBuffer>(); m_receivebuffer = new byte[maxClients * receiveBufferSize]; m_sendbuffer = new byte[maxClients * sendBufferSize]; m_process = commandReader; for (int i = 0; i < maxClients; i++) { var receivebuffer = new Sinan.Collections.BytesSegment(m_receivebuffer, m_receiveBufferSize * i, m_receiveBufferSize); var sendbuffer = new Sinan.Collections.BytesSegment(m_sendbuffer, m_sendBufferSize * i, m_sendBufferSize); UserBuffer helper = new UserBuffer(i, this, receivebuffer, sendbuffer, sendQueueSize); m_bufferPool.Enqueue(helper); } }
private void Communicator_DoWork(object sender, DoWorkEventArgs e) { try { while (true) { if (Communicator.CancellationPending) { return; } if (HoseLeakTestBench == null) { continue; } string data = string.Empty; if (HoseLeakTestBench.CommandQ.TryDequeue(out data)) { pressureTransmitterManager.CommandQ.Enqueue(String.Copy(data)); } data = string.Empty; if (pressureTransmitterManager == null) { continue; } if (pressureTransmitterManager.ResponseQ.TryDequeue(out data)) { if (data == "P") { HoseLeakTestBench.ResponseQ.Enqueue("Pass"); } else if (data == "F") { HoseLeakTestBench.ResponseQ.Enqueue("Fail"); } } } } catch (Exception ex) { TestBenchLogQ.Enqueue(ex.Message); } }
/// <summary> /// /// </summary> /// <param name="item"></param> public void Enqueue(T item) { if (item == null) { return; } if (Interlocked.CompareExchange(ref _isDisposed, 1, 1) != 0) { return; } switch (item.MPriority) { case MessagePriority.Hign: mq_high.Enqueue(item); break; default: mq_low.Enqueue(item); break; } StartProcess(); }
public MainWindow() { try { InitializeComponent(); TestBenchLogQ = new System.Collections.Concurrent.ConcurrentQueue <string>(); TransmitterLogQ = new System.Collections.Concurrent.ConcurrentQueue <string>(); Logger.DoWork += Logger_DoWork; Logger.WorkerSupportsCancellation = true; Logger.RunWorkerAsync(); Communicator.DoWork += Communicator_DoWork; Communicator.WorkerSupportsCancellation = true; String testbenchport = ConfigurationManager.AppSettings.Get("TestBenchPort"); String testPressure = ConfigurationManager.AppSettings.Get("TestPressure"); Convert.ToDouble(testPressure); HoseLeakTestBench = new TestBench(testbenchport, 9600, Parity.None, 8, StopBits.One); HoseLeakTestBench.LogQ = TestBenchLogQ; String transmitterport = ConfigurationManager.AppSettings.Get("PressureTransmitterPort"); pressureTransmitterManager = new PressureTransmitterManager(transmitterport, 9600, Parity.None, 8, StopBits.One, 1); pressureTransmitterManager.LogQ = TransmitterLogQ; Communicator.RunWorkerAsync(); } catch (Exception e) { TestBenchLogQ.Enqueue(e.Message); } }
private void OnPortEvent(string port_name, DateTime event_time, SerialPortWrapper.TrafficDirection direction, byte[] data) { port_events_queue.Enqueue(new PortEvent { port_name = port_name, event_time = event_time, direction = direction, data = data }); }
public static void ExecuteMain(string nick, IndexArgumentSetup setup, Action prepare_db) { var dbname = String.Format ("DB.{0}", Path.GetFileName(setup.DATABASE)); setup.BINARY_DATABASE = dbname; prepare_db (); // It is required to be already on memory at this point. The reason is to avoid the loading of several instances // of the same database SpaceGenericIO.Load (setup.BINARY_DATABASE); var arglist = new System.Collections.Concurrent.ConcurrentQueue<String> (); arglist.Enqueue ("--save"); arglist.Enqueue (String.Format ("Tab.ApproxIndexes.{0}.{1}.qarg={2}.json", nick, Path.GetFileName (setup.QUERIES), setup.QARG)); /*var arglist = new List<string> () { "--save", String.Format("Tab.{0}.{1}.qarg={2}.json", nick, Path.GetFileName(setup.QUERIES), setup.QARG) */ if (setup.ExecuteSequential) { arglist.Enqueue (Indexes.ExecuteSeq (setup, nick)); } var actionlist = new List<Action> (); // arglist.Add (Indexes.ExecuteSATApprox (setup, nick)); // arglist.Add (Indexes.ExecuteSATForest (setup, nick)); foreach (var max_instances in setup.NeighborhoodHash_MaxInstances) { foreach (var expected_recall in setup.NeighborhoodHash_ExpectedRecall) { var _max_instances = max_instances; var _expected_recall = expected_recall; actionlist.Add (() => { var reslist = Indexes.ExecuteMultiNeighborhoodHash (setup, nick, _expected_recall, _max_instances); foreach (var res in reslist) { arglist.Enqueue(res); } }); } } foreach (var numrefs in setup.KNR_NUMREFS) { foreach (var k in setup.KNR_KBUILD) { foreach (var maxcand_ratio in setup.KNR_MAXCANDRATIO) { var _numrefs = numrefs; var _k = k; var _maxcand_ratio = maxcand_ratio; actionlist.Add (() => { var reslist = Indexes.ExecuteKNRSEQ (setup, nick, _numrefs, _k, _maxcand_ratio); foreach (var res in reslist) { arglist.Enqueue(res); } }); } } } // actionlist.Add (() => { // var resname = Indexes.ExecuteAPG_OptTabuSatNeighborhood (setup, nick); // arglist.Enqueue(resname); // }); // // actionlist.Add (() => { // var resname = Indexes.ExecuteAPG_OptTabuSatNeighborhoodMontecarloStart(setup, nick); // arglist.Enqueue(resname); // }); foreach (var neighbors in setup.OPTSEARCH_NEIGHBORS) { // arglist.Add (Indexes.ExecuteLocalSearchRestarts (setup, nick, dbname, setup.QUERIES, neighbors)); // arglist.Add (Indexes.ExecuteLocalSearchBestFirst (setup, nick, dbname, setup.QUERIES, neighbors)); var _neighbors = neighbors; actionlist.Add (() => { var resname = Indexes.ExecuteApproxGraphOptRestartsIS(setup, nick, _neighbors); arglist.Enqueue(resname); }); actionlist.Add (() => { var resname = Indexes.ExecuteApproxGraphOptRandomRestarts(setup, nick, _neighbors); arglist.Enqueue(resname); }); // actionlist.Add (() => { // var resname = Indexes.ExecuteApproxGraphOptSimplerOptRandomRestarts(setup, nick, _neighbors); // arglist.Enqueue(resname); // }); actionlist.Add (() => { var resname = Indexes.ExecuteMetricGraphGreedy(setup, nick, _neighbors); arglist.Enqueue(resname); }); foreach (var restarts in setup.OPTSEARCH_RESTARTS) { var _restarts = restarts; actionlist.Add (() => { var resname = Indexes.ExecuteApproxGraphIS(setup, nick, _neighbors, _restarts); arglist.Enqueue(resname); }); // actionlist.Add (() => { // var resname = Indexes.ExecuteApproxGraph(setup, nick, _neighbors, _restarts); // arglist.Enqueue(resname); // }); } actionlist.Add (() => { var resname = Indexes.ExecuteLocalSearchGallopingBeam(setup, nick, _neighbors); arglist.Enqueue(resname); }); foreach (var beamsize in setup.OPTSEARCH_BEAMSIZE) { var _beamsize = beamsize; actionlist.Add (() => { var resname = Indexes.ExecuteLocalSearchBeam(setup, nick, _beamsize, _neighbors); arglist.Enqueue(resname); }); // actionlist.Add (() => { // var resname = Indexes.ExecuteLocalSearchMontecarloBeam(setup, nick, _beamsize, _neighbors); // arglist.Enqueue(resname); // }); } } foreach (var numInstances in setup.LSHFloatVector_INDEXES) { foreach (var numSamples in setup.LSHFloatVector_SAMPLES) { var _numInstances = numInstances; var _numSamples = numSamples; actionlist.Add (() => { var resname = Indexes.ExecuteLSHFloatVector (setup, nick, _numInstances, _numSamples); arglist.Enqueue(resname); }); } } if (setup.SPAWN == 1) { foreach (var action in actionlist) { action.Invoke (); } } else { LongParallel.ForEach (actionlist, (a) => a.Invoke (), setup.SPAWN); } if (setup.ExecuteSearch) { Commands.Check (arglist); } }
public virtual void FrameRecovery(byte[] buffer) { mBuffers.Enqueue(buffer); }
void InnerXMLRead() { if (!XMLStream.HasData) { return; } Log(4, "Innerxml read has data"); XmlReaderSettings Settings = new XmlReaderSettings(); Settings.ValidationType = ValidationType.None; Settings.ConformanceLevel = ConformanceLevel.Fragment; XmlReader Reader = XmlReader.Create(XMLStream, Settings); XElement RootNode = null; XElement CurrentElement = null; Log(4, "InnerXmlRead"); while (Reader.Read()) { Log(4, "InnerXmlRead: NodeType: {0}, Name: {1}", Reader.NodeType, Reader.Name); switch (Reader.NodeType) { case XmlNodeType.Attribute: break; case XmlNodeType.CDATA: { if (CurrentElement == null) { continue; } XCData PureData = new XCData(Reader.Value); CurrentElement.Add(PureData); } break; case XmlNodeType.Comment: break; case XmlNodeType.Document: break; case XmlNodeType.DocumentFragment: break; case XmlNodeType.DocumentType: break; case XmlNodeType.Element: { bool SelfClosing = Reader.IsEmptyElement; XNamespace Namespace = Reader.NamespaceURI; XElement NewElement = new XElement(Namespace + Reader.Name); LoadAttributesFromReaderToElement(Reader, NewElement); if (RootNode == null) { RootNode = NewElement; CurrentElement = RootNode; if (SelfClosing) { if (DebugLevel >= 2) { Log(2, "Enqueing document: {0}", RootNode.ToString()); } Documents.Enqueue(RootNode); RootNode = CurrentElement = null; NewEvents.Enqueue(new Events(Events.EventType.GotData)); } } else { CurrentElement.Add(NewElement); if (!SelfClosing) { CurrentElement = NewElement; } } break; } case XmlNodeType.EndElement: { if (CurrentElement == null) { continue; } if (CurrentElement == RootNode) { if (DebugLevel >= 2) { Log(2, "Enqueing document: {0}", RootNode.ToString()); } TotalXmlDocumentsEnqueueud.Inc1(); Documents.Enqueue(RootNode); RootNode = CurrentElement = null; NewEvents.Enqueue(new Events(Events.EventType.GotData)); } else { CurrentElement = CurrentElement.Parent; } } break; case XmlNodeType.EndEntity: break; case XmlNodeType.Entity: break; case XmlNodeType.EntityReference: break; case XmlNodeType.None: break; case XmlNodeType.ProcessingInstruction: break; case XmlNodeType.SignificantWhitespace: break; case XmlNodeType.Text: { if (CurrentElement == null) { continue; } XText PureText = new XText(Reader.Value); CurrentElement.Add(PureText); } break; case XmlNodeType.Whitespace: break; case XmlNodeType.XmlDeclaration: break; } } }
public void QueuePoint(DataPoint data) { eventQueue.Enqueue(data); }