/// <summary> /// 通过秒转换为中文时间格式 /// </summary> public static String formatDuring(double mss) { int days = (int)(mss / (60 * 60 * 24)); int hours = (int)((mss % (60 * 60 * 24)) / (60 * 60)); int minutes = (int)((mss % (60 * 60)) / 60); int seconds = (int)(mss % 60); DictionaryEx <string, int> time = new DictionaryEx <string, int>(); if (days != 0) { time.Add("days", days); } if (hours != 0) { time.Add("hours", hours); } if (minutes != 0) { time.Add("minutes", minutes); } if (seconds != 0) { time.Add("seconds", seconds); } string timeText = string.Empty; for (int i = 0; i < time.mList.Count; ++i) { timeText += string.Format(TextManager.GetUIString(time.mList[i].ToString()), time[time.mList[i]]); } time.Clear(); return(timeText); }
static int MeasureTupleVsKeyValuePairInDictionaryLookup() { const int iterations = 1000000; int ran = 0; var tupleMap = new DictionaryEx <Tuple <int, int, int, DateTime>, int>(200); var structMap = new DictionaryEx <StructTuple <int, int, int, DateTime>, int>(200); for (int i = 200 - 1; i >= 0; i--) { tupleMap.Add(new Tuple <int, int, int, DateTime>(400 - i, i + 1, i - 1, new DateTime(1990 + i % 10, 5, 2)), 0); structMap.Add(new StructTuple <int, int, int, DateTime>(400 - i, i + 1, i - 1, new DateTime(1990 + i % 10, 5, 2)), 0); } //CodeTimer.Time(true, "Tuple lookup", // iterations, // () => // { // int value; // if (tupleMap.TryGetValue(new Tuple<int, int, int, DateTime>(390, 11, 9, new DateTime(1990, 5, 2)), out value)) // ran += 1; // }); CodeTimer.Time(true, "Struct lookup outer add or update", iterations, () => { var key = new StructTuple <int, int, int, DateTime>(390, 11, 9, new DateTime(1990, 5, 2)); int a = 1; int value; if (structMap.TryGetValue(key, out value)) { structMap[key] = value + a; } else { structMap.Add(key, a); } ran += 1; }); CodeTimer.Time(true, "Struct lookup AddOrUpdate", iterations, () => { int a = 1; structMap.AddOrUpdate( new StructTuple <int, int, int, DateTime>(390, 11, 9, new DateTime(1990, 5, 2)), a, (k, cv, nv) => cv + nv); ran += 1; }); return(ran); }
private void AddServerFrame(FSPFrame frame) { if (frame.frameId <= 0) { ExecuteFrame(frame.frameId, frame); return; } frame.frameId = frame.frameId * m_Param.clientFrameRateMultiple; m_ClientLockedFrame = frame.frameId + m_Param.clientFrameRateMultiple - 1; m_FrameBuffer.Add(frame.frameId, frame); m_FrameCtrl.AddFrameId(frame.frameId); }
public bool AddRecord2Set(string name, ref GameRecord record) { try { if (name == null || name.Length == 0) { LogSystem.Log("name is null"); return(false); } if (record == null) { LogSystem.Log("record is null"); return(false); } if (mRecordSet.ContainsKey(name)) { return(false); } mRecordSet.Add(name, record); //test begin //test end } catch (Exception ex) { LogSystem.Log("Exception:", ex.ToString()); return(false); } return(true); }
public IConnection CreateConnection(uint connId, Type type, string ip, int port) { IConnection conn = type.Assembly.CreateInstance(type.FullName) as IConnection; m_mapConnection.Add(connId, conn); conn.Connect(ip, port); return(conn); }
internal static void ThreadInit() { int id = Thread.CurrentThread.ManagedThreadId; if (!instanceList.ContainsKey(id)) { instanceList.Add(id, new Instance()); } }
public void Push(IRecyclableObject obj) { string type = obj.GetRecycleType(); Stack <IRecyclableObject> stackIdleObject = m_poolIdleObject[type]; if (stackIdleObject == null) { stackIdleObject = new Stack <IRecyclableObject>(); m_poolIdleObject.Add(type, stackIdleObject); } stackIdleObject.Push(obj); }
public Color GetUniqueColor(int colorId) { if (m_mapColor.ContainsKey(colorId)) { return(m_mapColor[colorId]); } Color c = new Color(random.Rnd(), random.Rnd(), random.Rnd()); m_mapColor.Add(colorId, c); return(c); }
public void Add(TKey key, TValue value) { // key could be null if (key == null) { if (_hasNull) { throw new ArgumentException("An element for the null key already exists.", nameof(key)); } SetForNull(value); } else { _wrapped.Add(key, value); } }
public static IniFile LoadFile(string fileName) { var tempCache = new DictionaryEx <string, DictionaryEx <string, string> >(); tempCache.IsAutoAddKeyPair = true; if (string.IsNullOrEmpty(fileName)) { return(null); } string Contents = FileManager.GetFileContents(fileName); //Regex Section = new Regex("[" + Regex.Escape(" ") + "\t]*" + Regex.Escape("[") + ".*" + Regex.Escape("]\r\n")); string[] Sections = SectionReg.Split(Contents); MatchCollection SectionHeaders = SectionReg.Matches(Contents); int Counter = 1; foreach (Match SectionHeader in SectionHeaders) { string[] Splitter = { "\r\n" }; string[] Splitter2 = { "=" }; string[] Items = Sections[Counter].Split(Splitter, StringSplitOptions.RemoveEmptyEntries); var SectionValues = new DictionaryEx <string, string>(); SectionValues.IsAutoAddKeyPair = true; foreach (string Item in Items) { SectionValues.Add(Item.Split(Splitter2, StringSplitOptions.None)[0], Item.Split(Splitter2, StringSplitOptions.None)[1]); } tempCache.Add(SectionHeader.Value.Replace("[", "").Replace("]\r\n", ""), SectionValues); ++Counter; } return(new IniFile(tempCache) { FilePath = fileName }); }
private static IReadOnlyDictionary <string, TEnum> GetNameValuesCore(bool ignoreCase) { #if NET45_OR_GREATER || TARGETS_NETSTANDARD || TARGETS_NETCOREAPP var result = new Dictionary <string, TEnum>(ignoreCase ? StringComparer.OrdinalIgnoreCase : StringComparer.Ordinal); #else var result = new DictionaryEx <string, TEnum>(ignoreCase ? StringComparer.OrdinalIgnoreCase : StringComparer.Ordinal); #endif var names = Enum.GetNames(typeof(TEnum)); var values = GetValuesCore(); for (var i = 0; i < names.Length; i++) { // DONTTOUCH: case may be ignored if (result.ContainsKey(names[i])) { continue; } result.Add(names[i], values[i]); } return(result); }
private static IReadOnlyDictionary <string, TEnum> GetNameValuesCore(bool ignoreCase) { var result = #if LESSTHAN_NET45 new DictionaryEx <string, TEnum>(ignoreCase ? StringComparer.OrdinalIgnoreCase : StringComparer.Ordinal); #else new Dictionary <string, TEnum>(ignoreCase ? StringComparer.OrdinalIgnoreCase : StringComparer.Ordinal); #endif var names = Enum.GetNames(typeof(TEnum)); var values = (TEnum[])Enum.GetValues(typeof(TEnum)); for (var i = 0; i < names.Length; i++) { // DONTTOUCH: case may be ignored if (result.ContainsKey(names[i])) { continue; } result.Add(names[i], values[i]); } return(result); }
/// <summary> /// Expands the current collection of obscuring <see cref="Graph"/> nodes with all neighbors /// of the specified node, within maximum world distance from the source node.</summary> /// <param name="node"> /// The <see cref="Graph"/> node whose neighbors to examine.</param> /// <remarks><para> /// <b>FindObscuringNodes</b> recursively visits all directly connected nodes, and adds them /// to an internal collection of obscuring nodes if they are opaque. Nodes which are fully /// obscured by other obscuring nodes are removed from the collection. /// </para><para> /// <b>FindObscuringNodes</b> never revisits nodes that were already examined. All visited /// nodes are added to <see cref="NodeArcs"/> for later processing by <see /// cref="FindVisibleNodes"/>.</para></remarks> private void FindObscuringNodes(T node) { // get valid neighbors of current node IList <T> neighbors = Graph.GetNeighbors(node); // recurse into all valid neighbors for (int i = 0; i < neighbors.Count; i++) { T neighbor = neighbors[i]; // skip source and previously visited nodes if (ComparerCache <T> .EqualityComparer.Equals(_source, neighbor) || _nodeArcs.ContainsKey(neighbor)) { continue; } // compute tangential arc and source distance NodeArc arc = CreateNodeArc(neighbor); // skip nodes beyond maximum distance if (_distance > 0 && arc.Distance > _distance) { continue; } // record visited node with tangential arc _nodeArcs.Add(neighbor, arc); // nothing else to do for transparent nodes if (!_isOpaque(neighbor)) { goto nextNeighbor; } /* * Try adding current opaque node to list of all obscuring nodes recorded so far. * * If any single recorded node completely obscures the current node, we skip it. * If the current node completely obscures any recorded nodes, we delete those. * * We also clear the VisiblityFraction for all completely obscured nodes (current * or recorded) so we won't waste time testing them again in FindVisibleNodes. */ foreach (var pair in _obscuringNodes) { int result = arc.IsObscured(pair.Value); if (result < 0) { arc._visibleFraction = 0; goto nextNeighbor; } if (result > 0) { pair.Value._visibleFraction = 0; _removeNodes.Add(pair.Key); } } // remove obscuring nodes that were themselves obscured for (int j = 0; j < _removeNodes.Count; j++) { _obscuringNodes.Remove(_removeNodes[j]); } _removeNodes.Clear(); // add neighbor to obscuring nodes _obscuringNodes.Add(neighbor, arc); nextNeighbor: FindObscuringNodes(neighbor); } }
static void dictionary_rehash2_benchmark() { Console.Write("-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=\n"); Console.Write(" dictionary_rehash2_benchmark()\n"); Console.Write("-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=\n"); Console.Write("\n"); int kHeaderFieldSize = header_fields.Length; #if DEBUG int kRepeatTimes = 200; #else int kRepeatTimes = (kIterations / kHeaderFieldSize / 2); #endif string[] field_str = new string[kHeaderFieldSize]; string[] index_str = new string[kHeaderFieldSize]; for (int i = 0; i < kHeaderFieldSize; ++i) { field_str[i] = header_fields[i]; index_str[i] = string.Format("{0}", i); } { long checksum = 0; int buckets = 128; double totalTime = 0.0; Stopwatch sw = new Stopwatch(); sw.Restart(); for (int i = 0; i < kRepeatTimes; ++i) { DictionaryEx <string, string> dict = new DictionaryEx <string, string>(); for (int j = 0; j < kHeaderFieldSize; ++j) { if (!dict.ContainsKey(field_str[j])) { dict.Add(field_str[j], index_str[j]); } } checksum += dict.Count(); checksum += dict.BucketCount; buckets = 128; dict.Resize(buckets); checksum += dict.BucketCount; for (int j = 0; j < 7; ++j) { buckets *= 2; dict.Resize(buckets); checksum += dict.BucketCount; } } sw.Stop(); totalTime += sw.Elapsed.TotalMilliseconds; Console.Write("-------------------------------------------------------------------------\n"); Console.Write(" {0,-28} ", "DictionaryEx<string, string>"); Console.Write("sum = {0,-10:g} time: {1,8:f} ms\n", checksum, totalTime); Console.Write("-------------------------------------------------------------------------\n"); Console.Write("\n"); } }
static void hashtable_rehash2_benchmark_impl() { int kHeaderFieldSize = header_fields.Length; #if DEBUG int kRepeatTimes = 200; #else int kRepeatTimes = (kIterations / kHeaderFieldSize / 2); #endif String[] field_str = new String[kHeaderFieldSize]; String[] index_str = new String[kHeaderFieldSize]; for (int i = 0; i < kHeaderFieldSize; ++i) { #if NO_STRING_INTERNING field_str[i] = String.Format("{0}", header_fields[i].ToCharArray()); index_str[i] = String.Format("{0}", i); #else field_str[i] = String.Intern(header_fields[i]); index_str[i] = String.Intern(String.Format("{0}", i)); #endif } { long checksum = 0; int buckets = 128; double totalTime = 0.0; Stopwatch sw = new Stopwatch(); sw.Restart(); for (int i = 0; i < kRepeatTimes; ++i) { DictionaryEx <String, String> dict = new DictionaryEx <String, String>(); for (int j = 0; j < kHeaderFieldSize; ++j) { if (!dict.ContainsKey(field_str[j])) { dict.Add(field_str[j], index_str[j]); } } checksum += dict.Count(); checksum += dict.BucketCount; buckets = 128; dict.Resize(buckets); checksum += dict.BucketCount; for (int j = 0; j < 7; ++j) { buckets *= 2; dict.Resize(buckets); checksum += dict.BucketCount; } } sw.Stop(); totalTime += sw.Elapsed.TotalMilliseconds; //Console.Write("-------------------------------------------------------------------------\n"); Console.Write(" {0,-28} ", "DictionaryEx<String, String>"); Console.Write("sum = {0,-10:g} time: {1,8:f} ms\n", checksum, totalTime); //Console.Write("-------------------------------------------------------------------------\n"); Console.Write("\n"); } }
static int MeasureTupleVsKeyValuePairInDictionaryLookup() { const int iterations = 1000000; int ran = 0; var tupleMap = new DictionaryEx<Tuple<int, int, int, DateTime>, int>(200); var structMap = new DictionaryEx<StructTuple<int, int, int, DateTime>, int>(200); for (int i = 200 - 1; i >= 0; i--) { tupleMap.Add(new Tuple<int, int, int, DateTime>(400 - i, i + 1, i - 1, new DateTime(1990 + i % 10, 5, 2)), 0); structMap.Add(new StructTuple<int, int, int, DateTime>(400 - i, i + 1, i - 1, new DateTime(1990 + i % 10, 5, 2)), 0); } //CodeTimer.Time(true, "Tuple lookup", // iterations, // () => // { // int value; // if (tupleMap.TryGetValue(new Tuple<int, int, int, DateTime>(390, 11, 9, new DateTime(1990, 5, 2)), out value)) // ran += 1; // }); CodeTimer.Time(true, "Struct lookup outer add or update", iterations, () => { var key = new StructTuple<int, int, int, DateTime>(390, 11, 9, new DateTime(1990, 5, 2)); int a = 1; int value; if (structMap.TryGetValue(key, out value)) structMap[key] = value + a; else structMap.Add(key, a); ran += 1; }); CodeTimer.Time(true, "Struct lookup AddOrUpdate", iterations, () => { int a = 1; structMap.AddOrUpdate( new StructTuple<int, int, int, DateTime>(390, 11, 9, new DateTime(1990, 5, 2)), a, (k, cv, nv) => cv + nv); ran += 1; }); return ran; }