public static void PopulateCollections25_25_50PctUnique(int maxN, out int[] uniqueArray, out int[] mixedArray, SCG.HashSet <int> h, FastHashSet <int> f = null, C5.HashSet <int> c5 = null, SCG.SortedSet <int> sortedSet = null, SCG.List <int> lst = null) { uniqueArray = new int[maxN]; mixedArray = new int[maxN]; Random rand = new Random(89); BenchUtil.PopulateIntArray(uniqueArray, rand, int.MinValue, int.MaxValue, 1.0); // a array should have 100% unique values int uniqueValuesCount = maxN / 2; // this should produce a c array that has 50% unique values (the other 50% are duplicates), but all are actually in the uniqueArray, so 1, 1, 2, 2 would be an example of this if (uniqueValuesCount == 0) { uniqueValuesCount = 1; } BenchUtil.PopulateIntArrayFromUniqueArray(mixedArray, rand, uniqueArray, uniqueValuesCount); BenchUtil.PopulateIntArrayAtRandomIndices(mixedArray, rand, int.MinValue, int.MaxValue, maxN - uniqueValuesCount); if (h != null) { for (int i = 0; i < maxN; i++) { h.Add(uniqueArray[i]); } } if (f != null) { for (int i = 0; i < maxN; i++) { f.Add(uniqueArray[i]); } } if (c5 != null) { for (int i = 0; i < maxN; i++) { c5.Add(uniqueArray[i]); } } if (sortedSet != null) { for (int i = 0; i < maxN; i++) { sortedSet.Add(uniqueArray[i]); } } if (lst != null) { for (int i = 0; i < maxN; i++) { lst.Add(uniqueArray[i]); } lst.Sort(); } }
private async Task InitializeAsync(int[] IDs) { try { Logger.Verbose("FilterLists Initialization Started."); var Lists = await GetFilterListsAsync(IDs); Logger.Information("FilterLists Initialization Started with {@Count} Selected Lists.", $"{Lists.Count:n0}"); string File = null; string[] Domains = null; foreach (var List in Lists) { try { File = await WebClient.DownloadStringTaskAsync(List.ViewUrl); Domains = Parse(File); foreach (var Domain in Domains) { if (!Filter.Contains(Domain)) { Filter.Add(Domain); } } Logger.Verbose("{@FilterList} Download Completed.", List); } catch (Exception Error) { Logger.Error("{@Error} While Downloading {@FilterList}.", Error, List); } } Filter.TrimExcess(); File = null; Domains = null; Lists = null; Logger.Information("FilterLists Initialization Completed with {@Count} Domains.", $"{Filter.Count:n0}"); } catch (Exception Error) { Logger.Fatal("Fatal {@Error} Occurred While Initializing FilterLists.", Error); } }
static void Main(string[] args) { // cmd line params variables string dbConnStr = null; int runID = 0; int benchmarkMethodID = 0; int n; int maxN; try { DateTime startTime = DateTime.Now; //Console.WriteLine($"Args Count:" + args.Length.ToString()); //foreach (string s in args) //{ // Console.WriteLine(s); //} //Console.ReadKey(); string errMsg = PerfUtil.GetCmdLineParams_DbNAndMaxN(args, out dbConnStr, out runID, out benchmarkMethodID, out n, out maxN); //if (errMsg != null) //{ // Console.WriteLine(errMsg); //} //Console.WriteLine($"Args: {dbConnStr}; {runID.ToString()}; {benchmarkMethodID.ToString()}; {n.ToString()}; {maxN.ToString()}"); //Console.ReadKey(); int[] a = new int[n]; int[] a2 = new int[n]; Random rand = new Random(89); for (int i = 0; i < a.Length; i++) { a[i] = rand.Next(); a2[i] = rand.Next(); } FastHashSet <SmallStruct> setWarmup = new FastHashSet <SmallStruct>(); setWarmup.Add(new SmallStruct(1, 2)); FastHashSet <SmallStruct> set = new FastHashSet <SmallStruct>(); double overheadNanoSecs = PerfUtil.GetTimestampOverheadInNanoSeconds(); PerfUtil.DoGCCollect(); int iterations = 1; long startTicks; long endTicks; double ticks; // this is enough to jit things and not put everything in the cache //bool isContained = set.Contains(0); iterations = 1; startTicks = Stopwatch.GetTimestamp(); for (int i = 0; i < a.Length; i++) { set.Add(new SmallStruct(a[i], a2[i])); } endTicks = Stopwatch.GetTimestamp(); ticks = (double)(endTicks - startTicks); double nanoSecs = PerfUtil.GetNanoSecondsFromTicks(ticks, Stopwatch.Frequency) - overheadNanoSecs; PerfDb.InsertMeasurement(dbConnStr, runID, benchmarkMethodID, n, iterations, nanoSecs, startTime, DateTime.Now); } catch (Exception ex) { Console.Write(ex.ToString()); if (!string.IsNullOrEmpty(dbConnStr)) { // write error to db PerfDb.InsertRunError(dbConnStr, runID, benchmarkMethodID, ex); } else { // log error to file } } }
static void Main(string[] args) { // cmd line params variables string dbConnStr = null; int runID = 0; int benchmarkMethodID = 0; int n; int maxN; try { DateTime startTime = DateTime.Now; //Console.WriteLine($"Args Count:" + args.Length.ToString()); //foreach (string s in args) //{ // Console.WriteLine(s); //} //Console.ReadKey(); string errMsg = PerfUtil.GetCmdLineParams_DbNAndMaxN(args, out dbConnStr, out runID, out benchmarkMethodID, out n, out maxN); //if (errMsg != null) //{ // Console.WriteLine(errMsg); //} //Console.WriteLine($"Args: {dbConnStr}; {runID.ToString()}; {benchmarkMethodID.ToString()}; {n.ToString()}; {maxN.ToString()}"); //Console.ReadKey(); int[] a; int[] c; FastHashSet <int> set = new FastHashSet <int>(); BenchUtil.PopulateArrays25_25_50PctUnique(maxN, out a, out c); // in a real world scenario, we will have probably have recently added the items into the set, so no need to try to clear the cache or anything for (int i = 0; i < maxN; i++) { set.Add(a[i]); } double overheadNanoSecs = PerfUtil.GetTimestampOverheadInNanoSeconds(); PerfUtil.DoGCCollect(); int iterations = 1; long startTicks; long endTicks; double ticks; // this is enough to jit things and not put everything in the cache bool isContained = set.Contains(0); if (maxN <= 1000) { iterations = 1; // there amount of time taken for these is too small to measure just one iteration - so we measure multiple iterations in a loop and get the time for these // the mean time is this total time / iterations startTicks = Stopwatch.GetTimestamp(); for (int i = 0; i < maxN; i++) { set.Contains(c[i]); } endTicks = Stopwatch.GetTimestamp(); ticks = ((endTicks - startTicks) * n) / (double)maxN; } else { iterations = 1; startTicks = Stopwatch.GetTimestamp(); for (int i = 0; i < n; i++) // loop overhead is ok because we assume there will be some loop in real-world scenario { set.Contains(c[i]); } endTicks = Stopwatch.GetTimestamp(); ticks = (double)(endTicks - startTicks); } double nanoSecs = PerfUtil.GetNanoSecondsFromTicks(ticks, Stopwatch.Frequency) - overheadNanoSecs; PerfDb.InsertMeasurement(dbConnStr, runID, benchmarkMethodID, n, iterations, nanoSecs, startTime, DateTime.Now); } catch (Exception ex) { Console.Write(ex.ToString()); if (!string.IsNullOrEmpty(dbConnStr)) { // write error to db PerfDb.InsertRunError(dbConnStr, runID, benchmarkMethodID, ex); } else { // log error to file } } }
public HashSetPerf() { var allKeys = new Key[] { _key1, _key2, _key3, _key4, _key5, _key6, _key7, _key8, _key9, _key10, _key11, _key12, _key13, _key14, _key15, _key16, _key17, _key18, _key19, _key20, _key21, _key22, _key23, _key24, _key25, _key26 }; for (var i = 0; i < 26; i++) { var str = ((char)('A' + i)).ToString(); _stringDict.Add(str, str); _stringSet.Add(str); _stringFastSet.Add(str); _intDict.Add(i, i); _intSet.Add(i); _intFastSet.Add(i); var key = allKeys[i]; _objDict.Add(key, key); _objSet.Add(key); _objFastSet.Add(key); _customDict.Add(key, key); _customSet.Add(key); _customFastSet.Add(key); } }