protected override void StorageTerrainThread() { Thread.CurrentThread.Name = "Storage Terrain Thread: " + m_RegionID.ToString(); var knownSerialNumbers = new C5.TreeDictionary <uint, uint>(); while (!m_StopStorageThread || m_StorageTerrainRequestQueue.Count != 0) { LayerPatch req; try { req = m_StorageTerrainRequestQueue.Dequeue(1000); } catch { continue; } var serialNumber = req.Serial; if (!knownSerialNumbers.Contains(req.ExtendedPatchID) || knownSerialNumbers[req.ExtendedPatchID] != req.Serial) { m_Data[m_RegionID][req.ExtendedPatchID] = req.Serialization; knownSerialNumbers[req.ExtendedPatchID] = serialNumber; } } }
private static bool HasIntervalThatIsSubsetOfInterval( C5.TreeDictionary <ulong, ulong> regions, ulong regionStart, ulong regionEnd) { // Is there any region in whose [start, end] interval is a sub-interval of // [regionStart, regionEnd]? // TryWeakSuccessor gives us the first region whose start is >= regionStart, // but there could be more regions with start >= regionStart and end <= regionEnd. // Need to traverse successors until start > regionEnd, making containment impossible. C5.KeyValuePair <ulong, ulong> range; if (regions.TryWeakSuccessor(regionStart, out range)) { if (range.Key >= regionStart && range.Value <= regionEnd) { return(true); } } while (regions.TrySuccessor(range.Key, out range)) { if (range.Key > regionEnd) { break; } if (range.Key >= regionStart && range.Value <= regionEnd) { return(true); } } return(false); }
public static bool TryGetValue <K, V>(this C5.TreeDictionary <K, V> dictionary, K key, out V value) { value = default(V); if (dictionary.Contains(key)) { value = dictionary[key]; return(true); } return(false); }
// Constructors public Orderbook() { _bidsDict = new Dictionary <double, IList <IOrder_Mutable> >(); _bids = new C5.TreeDictionary <double, IList <IOrder_Mutable> >(); _asksDict = new Dictionary <double, IList <IOrder_Mutable> >(); _asks = new C5.TreeDictionary <double, IList <IOrder_Mutable> >(); _numBids = _numAsks = 0; _matcher = new Matcher(); _obsList = new List <IOrderbookObserver>(); }
public Trajectory(string name, double timeQuantum, double historicalBias, double burnin) { _name = name; _mint = Double.MaxValue; _maxt = Double.MinValue; _lastTimestampOutput = Double.MinValue; _temporalGranularityThreshold = timeQuantum; _burnin = burnin; HISTORICAL_BIAS = historicalBias; _lastTimestampReported = Double.MinValue; _lastHistoricallyAdjustedValue = 0.0; _virgin = true; _values = new C5.TreeDictionary <double, double>(); }
private Orderbook(Orderbook orig) { _bidsDict = new Dictionary <double, IList <IOrder_Mutable> >(); _bids = new C5.TreeDictionary <double, IList <IOrder_Mutable> >(); foreach (double p in orig._bidsDict.Keys) { IList <IOrder_Mutable> v = orig._bidsDict[p]; IList <IOrder_Mutable> v2 = new List <IOrder_Mutable>(); foreach (IOrder_Mutable o in v) { v2.Add(o.clone()); } _bidsDict.Add(p, v2); _bids.Add(p, v2); } _asksDict = new Dictionary <double, IList <IOrder_Mutable> >(); _asks = new C5.TreeDictionary <double, IList <IOrder_Mutable> >(); foreach (double p in orig._asksDict.Keys) { IList <IOrder_Mutable> v = orig._asksDict[p]; IList <IOrder_Mutable> v2 = new List <IOrder_Mutable>(); foreach (IOrder o in v) { v2.Add(o.clone()); } _asksDict.Add(p, v2); _asks.Add(p, v2); } _numBids = orig._numBids; _numAsks = orig._numAsks; _matcher = orig._matcher; _matcher.reset(); _obsList = new List <IOrderbookObserver>(); foreach (IOrderbookObserver obs in orig._obsList) { _obsList.Add(obs); } }
/// <summary> /// Initializes a new instance of the Mapper class. /// </summary> public Mapper() { KToTMap = new TreeDictionary <K, T>(); TToKMap = new HashDictionary <T, K>(); }
protected override void StorageTerrainThread() { try { m_TerrainListenerThreads.Add(this); Thread.CurrentThread.Name = "Storage Terrain Thread: " + RegionID.ToString(); var knownSerialNumbers = new C5.TreeDictionary <uint, uint>(); string replaceIntoTerrain = string.Empty; var updateRequests = new List <string>(); while (!m_StopStorageThread || m_StorageTerrainRequestQueue.Count != 0) { LayerPatch req; try { req = m_StorageTerrainRequestQueue.Dequeue(1000); } catch { continue; } uint serialNumber = req.Serial; if (!knownSerialNumbers.Contains(req.ExtendedPatchID) || knownSerialNumbers[req.ExtendedPatchID] != req.Serial) { var data = new Dictionary <string, object> { ["RegionID"] = RegionID, ["PatchID"] = req.ExtendedPatchID, ["TerrainData"] = req.Serialization }; if (replaceIntoTerrain.Length == 0) { replaceIntoTerrain = "REPLACE INTO terrains (" + MySQLUtilities.GenerateFieldNames(data) + ") VALUES "; } updateRequests.Add("(" + MySQLUtilities.GenerateValues(data) + ")"); knownSerialNumbers[req.ExtendedPatchID] = serialNumber; } if ((m_StorageTerrainRequestQueue.Count == 0 && updateRequests.Count > 0) || updateRequests.Count >= 256) { string elems = string.Join(",", updateRequests); try { using (var conn = new MySqlConnection(m_ConnectionString)) { conn.Open(); using (var cmd = new MySqlCommand(replaceIntoTerrain + elems, conn)) { cmd.ExecuteNonQuery(); } } updateRequests.Clear(); Interlocked.Increment(ref m_ProcessedPatches); } catch (Exception e) { m_Log.Error("Terrain store failed", e); } } } } finally { m_TerrainListenerThreads.Remove(this); } }
/** * @param rules The rules used by the game. */ public void SetRules(C5.TreeDictionary<int, List<Implication>> rules) { _rules = rules; }
/** * @param relations The relations to set. */ public void SetRelations(C5.TreeDictionary<int, RelationInfo> relations) { _relations = relations; }
public IndexedRangeTreeDictionary(Func <V, K> getRangeStart, Func <V, K> getRangeEnd) { _dictionary = new C5.TreeDictionary <K, V>(); _getRangeStart = getRangeStart; _getRangeEnd = getRangeEnd; }
protected override void StorageTerrainThread() { try { m_TerrainListenerThreads.Add(this); Thread.CurrentThread.Name = "Storage Terrain Thread: " + RegionID.ToString(); var knownSerialNumbers = new C5.TreeDictionary <uint, uint>(); Dictionary <string, object> updateRequestData = new Dictionary <string, object>(); int updateRequestCount = 0; while (!m_StopStorageThread || m_StorageTerrainRequestQueue.Count != 0) { LayerPatch req; try { req = m_StorageTerrainRequestQueue.Dequeue(1000); } catch { continue; } if (req == null) { using (var connection = new NpgsqlConnection(m_ConnectionString)) { connection.Open(); connection.InsideTransaction((transaction) => { using (var cmd = new NpgsqlCommand("DELETE FROM defaultterrains WHERE RegionID=@regionid", connection) { Transaction = transaction }) { cmd.Parameters.AddParameter("@RegionID", RegionID); cmd.ExecuteNonQuery(); } using (var cmd = new NpgsqlCommand("INSERT INTO defaultterrains (RegionID, PatchID, TerrainData) SELECT RegionID, PatchID, TerrainData FROM terrains WHERE RegionID=@regionid", connection) { Transaction = transaction }) { cmd.Parameters.AddParameter("@RegionID", RegionID); cmd.ExecuteNonQuery(); } }); } } else { uint serialNumber = req.Serial; if (!knownSerialNumbers.Contains(req.ExtendedPatchID) || knownSerialNumbers[req.ExtendedPatchID] != req.Serial) { updateRequestData.Add("PatchID" + updateRequestCount, req.ExtendedPatchID); updateRequestData.Add("TerrainData" + updateRequestCount, req.Serialization); ++updateRequestCount; knownSerialNumbers[req.ExtendedPatchID] = serialNumber; } if ((m_StorageTerrainRequestQueue.Count == 0 && updateRequestCount > 0) || updateRequestCount >= 256) { StringBuilder updateCmd = new StringBuilder(); try { using (NpgsqlConnection conn = new NpgsqlConnection(m_ConnectionString)) { conn.Open(); if (conn.HasOnConflict() && m_EnableOnConflict) { for (int i = 0; i < updateRequestCount; ++i) { updateCmd.AppendFormat("INSERT INTO terrains (\"RegionID\", \"PatchID\", \"TerrainData\") VALUES (@regionid, @patchid{0}, @terraindata{0}) ON CONFLICT(\"RegionID\", \"PatchID\") DO UPDATE SET \"TerrainData\"= @terraindata{0};", i); } } else { for (int i = 0; i < updateRequestCount; ++i) { updateCmd.AppendFormat("UPDATE terrains SET \"TerrainData\"=@terraindata{0} WHERE \"RegionID\" = @regionid AND \"PatchID\" = @patchid{0};", i); updateCmd.AppendFormat("INSERT INTO terrains (\"RegionID\", \"PatchID\", \"TerrainData\") SELECT @regionid, @patchid{0}, @terraindata{0} WHERE NOT EXISTS " + "(SELECT 1 FROM terrains WHERE \"RegionID\" = @regionid AND \"PatchID\" = @patchid{0});", i); } } using (NpgsqlCommand cmd = new NpgsqlCommand(updateCmd.ToString(), conn)) { cmd.Parameters.AddParameter("@regionid", RegionID); foreach (KeyValuePair <string, object> kvp in updateRequestData) { cmd.Parameters.AddParameter(kvp.Key, kvp.Value); } cmd.ExecuteNonQuery(); } } updateRequestData.Clear(); updateRequestCount = 0; Interlocked.Increment(ref m_ProcessedPatches); } catch (Exception e) { m_Log.Error("Terrain store failed", e); } } } } } finally { m_TerrainListenerThreads.Remove(this); } }
public virtual IEnumerable<BrowseFacet> GetFacets() { C5.IDictionary<object, BrowseFacet> facetMap; if (FacetSpec.FacetSortSpec.OrderValueAsc.Equals(fspec.OrderBy)) { facetMap = new C5.TreeDictionary<object, BrowseFacet>(); } else { facetMap = new C5.HashDictionary<object, BrowseFacet>(); } foreach (IFacetAccessible facetAccessor in this.list) { IEnumerator<BrowseFacet> iter = facetAccessor.GetFacets().GetEnumerator(); if (facetMap.Count == 0) { while (iter.MoveNext()) { BrowseFacet facet = iter.Current; facetMap.Add(facet.Value, facet); } } else { while (iter.MoveNext()) { BrowseFacet facet = iter.Current; BrowseFacet existing = facetMap[facet.Value]; if (existing == null) { facetMap.Add(facet.Value, facet); } else { existing.HitCount = existing.HitCount + facet.HitCount; } } } } List<BrowseFacet> list = new List<BrowseFacet>(facetMap.Values); // FIXME: we need to reorganize all that stuff with comparators Comparer comparer = new Comparer(System.Globalization.CultureInfo.InvariantCulture); if (FacetSpec.FacetSortSpec.OrderHitsDesc.Equals(fspec.OrderBy)) { list.Sort( delegate(BrowseFacet f1, BrowseFacet f2) { int val = f2.HitCount - f1.HitCount; if (val == 0) { val = -(comparer.Compare(f1.Value, f2.Value)); } return val; } ); } return list; }
public void C5_TreeDictionary() => _ = new C5.TreeDictionary <Char, String>();
public static bool ContainsKey <K, V>(this C5.TreeDictionary <K, V> dictionary, K key) => dictionary.Contains(key);
/** * @param groundFacts The game's ground facts. */ public void SetGroundFacts(C5.TreeDictionary<int, List<GroundFact>> groundFacts) { _groundFacts = groundFacts; }
public override FacetDataCache Load(BoboIndexReader reader) { int doc = -1; C5.TreeDictionary <object, List <int> > dataMap = null; List <int> docList = null; int nullMinId = -1; int nullMaxId = -1; int nullFreq = 0; TermDocs termDocs = reader.TermDocs(null); try { while (termDocs.Next()) { doc = termDocs.Doc; object val = _facetDataFetcher.Fetch(reader, doc); if (val == null) { if (nullMinId < 0) { nullMinId = doc; } nullMaxId = doc; ++nullFreq; continue; } if (dataMap == null) { // Initialize. if (val is long[]) { if (_termListFactory == null) { _termListFactory = new TermFixedLengthLongArrayListFactory( ((long[])val).Length); } dataMap = new C5.TreeDictionary <object, List <int> >(new VirtualSimpleFacetHandlerLongArrayComparator()); } else if (val is IComparable) { // NOTE: In .NET 3.5, the default constructor doesn't work in this case. We therefore have a custom type // that converts the objects to IComparable before comparing them, falling back to a string comparison // if they don't convert. This differs from the Java implementation that uses the default constructor. dataMap = new C5.TreeDictionary <object, List <int> >(new VirtualSimpleFacetHandlerComparableComparator()); } else { dataMap = new C5.TreeDictionary <object, List <int> >(new VirtualSimpleFacetHandlerObjectComparator()); } } if (dataMap.Contains(val)) { docList = dataMap[val]; } else { docList = null; } if (docList == null) { docList = new List <int>(); dataMap[val] = docList; } docList.Add(doc); } } finally { termDocs.Dispose(); } _facetDataFetcher.Cleanup(reader); int maxDoc = reader.MaxDoc; int size = dataMap == null ? 1 : (dataMap.Count + 1); BigSegmentedArray order = new BigIntArray(maxDoc); ITermValueList list = _termListFactory == null ? new TermStringList(size) : _termListFactory.CreateTermList(size); int[] freqs = new int[size]; int[] minIDs = new int[size]; int[] maxIDs = new int[size]; list.Add(null); freqs[0] = nullFreq; minIDs[0] = nullMinId; maxIDs[0] = nullMaxId; if (dataMap != null) { int i = 1; int?docId; foreach (var entry in dataMap) { list.Add(list.Format(entry.Key)); docList = entry.Value; freqs[i] = docList.Count; minIDs[i] = docList.Get(0, int.MinValue); while ((docId = docList.Poll(int.MinValue)) != int.MinValue) { doc = (int)docId; order.Add(doc, i); } maxIDs[i] = doc; ++i; } } list.Seal(); FacetDataCache dataCache = new FacetDataCache(order, list, freqs, minIDs, maxIDs, TermCountSize.Large); return(dataCache); }