public void AddIn() { FastHashSet <int> set = new FastHashSet <int>(); int x = 1; int x2 = 2; int x3 = 3; int x4 = 4; int x5 = 5; int x6 = 6; int x7 = 7; int x8 = 8; int x9 = 9; int x10 = 10; int x11 = 11; int x12 = 12; set.Add(in x); set.Add(in x2); set.Add(in x3); set.Add(in x4); set.Add(in x5); set.Add(in x6); set.Add(in x7); set.Add(in x8); set.Add(in x9); set.Add(in x10); set.Add(in x11); set.Add(in x12); Assert.Contains(1, set); Assert.True(set.Count == 12); }
public static void PopulateCollections25_25_50PctUnique(int maxN, out int[] uniqueArray, out int[] mixedArray, SCG.HashSet <int> h, FastHashSet <int> f = null, C5.HashSet <int> c5 = null, SCG.SortedSet <int> sortedSet = null, SCG.List <int> lst = null) { uniqueArray = new int[maxN]; mixedArray = new int[maxN]; Random rand = new Random(89); BenchUtil.PopulateIntArray(uniqueArray, rand, int.MinValue, int.MaxValue, 1.0); // a array should have 100% unique values int uniqueValuesCount = maxN / 2; // this should produce a c array that has 50% unique values (the other 50% are duplicates), but all are actually in the uniqueArray, so 1, 1, 2, 2 would be an example of this if (uniqueValuesCount == 0) { uniqueValuesCount = 1; } BenchUtil.PopulateIntArrayFromUniqueArray(mixedArray, rand, uniqueArray, uniqueValuesCount); BenchUtil.PopulateIntArrayAtRandomIndices(mixedArray, rand, int.MinValue, int.MaxValue, maxN - uniqueValuesCount); if (h != null) { for (int i = 0; i < maxN; i++) { h.Add(uniqueArray[i]); } } if (f != null) { for (int i = 0; i < maxN; i++) { f.Add(uniqueArray[i]); } } if (c5 != null) { for (int i = 0; i < maxN; i++) { c5.Add(uniqueArray[i]); } } if (sortedSet != null) { for (int i = 0; i < maxN; i++) { sortedSet.Add(uniqueArray[i]); } } if (lst != null) { for (int i = 0; i < maxN; i++) { lst.Add(uniqueArray[i]); } lst.Sort(); } }
public void ContainsIn2() { FastHashSet <int> set = new FastHashSet <int>(); int x = 1; set.Add(in x); Assert.True(set.Contains(in x)); }
public void AddIn2() { FastHashSet <int> set = new FastHashSet <int>(); int x = 1; set.Add(in x); Assert.Contains(1, set); }
public HeightMapCache(int poolSize) { mSectors = new FastHashSet <int>(16); mSectorsPool = new HeightMapInfoPoolEntry[poolSize]; for (int i = 0; i < mSectorsPool.Length; i++) { mSectorsPool[i] = new HeightMapInfoPoolEntry(); } mLastSector = -1; }
public FilterListsMiddleware(IOptionsMonitor <FilterListsMiddlewareOptions> Options, ILogger Logger) : base() { this.Logger = Logger; WebClient = new WebClient(); Filter = new FastHashSet <string>(); Options.OnChange(OptionsOnChange); _ = InitializeAsync(Options.CurrentValue.IDs); }
public bool Remove() { var res = false; var found = _found; var copy = new FastHashSet <T>(_hashSet); for (var i = 0; i < found.Length; i++) { res ^= copy.Remove(found[i]); } return(res); }
public void RemoveIf() { FastHashSet <int> set = new FastHashSet <int>(); int x = 1; int x2 = 2; int x3 = 3; int x4 = 4; int x5 = 5; int x6 = 6; int x7 = 7; int x8 = 8; int x9 = 9; int x10 = 10; int x11 = 11; int x12 = 12; set.Add(in x); set.Add(in x2); set.Add(in x3); set.Add(in x4); set.Add(in x5); set.Add(in x6); set.Add(in x7); set.Add(in x8); set.Add(in x9); set.Add(in x10); set.Add(in x11); set.Add(in x12); bool isRemoved = set.RemoveIf(in x, n => n < 2); Assert.True(isRemoved); Assert.DoesNotContain(1, set); isRemoved = set.RemoveIf(x2, n => n < 2); Assert.False(isRemoved); Assert.Contains(x2, set); set.Add(in x); bool isRemoved2 = set.RemoveIf(in x, n => n > 2); Assert.False(isRemoved2); Assert.Contains(1, set); }
public void RemoveIf2() { FastHashSet <int> set = new FastHashSet <int>(); int x = 1; set.Add(in x); bool isRemoved = set.RemoveIf(in x, n => n < 2); Assert.True(isRemoved); Assert.DoesNotContain(1, set); set.Add(in x); bool isRemoved2 = set.RemoveIf(in x, n => n > 2); Assert.False(isRemoved2); Assert.Contains(1, set); }
public void ContainsIn() { FastHashSet <int> set = new FastHashSet <int>(); int x = 1; int x2 = 2; int x3 = 3; int x4 = 4; int x5 = 5; int x6 = 6; int x7 = 7; int x8 = 8; int x9 = 9; int x10 = 10; int x11 = 11; int x12 = 12; set.Add(in x); set.Add(in x2); set.Add(in x3); set.Add(in x4); set.Add(in x5); set.Add(in x6); set.Add(in x7); set.Add(in x8); set.Add(in x9); set.Add(in x10); set.Add(in x11); set.Add(in x12); set.Remove(x12); Assert.True(set.Contains(in x)); Assert.True(set.Contains(in x2)); Assert.True(set.Contains(in x3)); Assert.True(set.Contains(in x4)); Assert.True(set.Contains(in x5)); Assert.True(set.Contains(in x6)); Assert.True(set.Contains(in x7)); Assert.True(set.Contains(in x8)); Assert.True(set.Contains(in x9)); Assert.True(set.Contains(in x10)); Assert.True(set.Contains(in x11)); Assert.False(set.Contains(in x12)); }
static void Main(string[] args) { //string outputFileName = @"e:\\proj\\summary.tsv"; //int minN = 100_000; //int maxN = 1_000_000; //int incrementNBy = 10_000; string errMsg = PerfUtil.GetCmdLineParams_OutputFileAndMinMaxIncN(args, out int minN, out int maxN, out int incrementNBy, out string outputFileName); int nCount = ((maxN - minN) / incrementNBy) + 1; int[] nArray = new int[nCount]; int idx = 0; for (int n = minN; n <= maxN; n += incrementNBy, idx++) { nArray[idx] = n; } const int LoopUnrollCount = 1; const int IterartionCount = 512; const int IterartionWarmupCount = 16; long[] ticksH = new long[nArray.Length * IterartionCount * LoopUnrollCount]; int ticksIdxForH = 0; long[] ticksF = new long[nArray.Length * IterartionCount * LoopUnrollCount]; int ticksIdxForF = 0; long[] ticksC = new long[nArray.Length * IterartionCount * LoopUnrollCount]; int ticksIdxForC = 0; long startTicks; double overheadTicks = PerfUtil.GetTimestampOverheadInNanoSeconds(); int[] a; int[] c; SCG.HashSet <int> h = new HashSet <int>(); FastHashSet <int> f = new FastHashSet <int>(); C5.HashSet <int> c5 = new C5.HashSet <int>(); HashSetBench.BenchUtil.PopulateCollections25_25_50PctUnique(maxN, out a, out c, h, f, c5); // not sure if we should run bechmark 1 and then benchmark 2 separately so that the presence of the one doesn't effect the other??? // in practice they will probably not be run together one after the other PerfUtil.DoGCCollect(); int N; for (int j = 0; j < nArray.Length; j++) { N = nArray[j]; // not really sure what running the warmup really does - it can put things in the cache that maybe shouldn't be there because they won't be in a real application??? // still, if we execute the same code with the same data in a loop alot of times, this will populate the cache unrealistically // also if we do a warmup, the jit times will be removed, but does this represent reality - jit times do happen in real running code??? for (int iterationIdx = 0; iterationIdx < IterartionWarmupCount; iterationIdx++) { // SCG_Contains for (int i = 0; i < N; i++) { h.Contains(c[i]); } // Fast_Contains for (int i = 0; i < N; i++) { f.Contains(c[i]); } for (int i = 0; i < N; i++) { c5.Contains(c[i]); } } for (int iterationIdx = 0; iterationIdx < IterartionCount; iterationIdx++) { // to minimize the effects of the loop code on the count, unroll each bechmark 2 times // also alternate randomly between the order of these to minimize any effects of order // not sure what effects loop unrolling has since that part isn't contained in the stopwatch time // still there might be some residual effects on CPU registers? - not really sure // 1 // there is some overhead that should be removed - it is returning from GetTimestamp and setting startTicks and afterwards calling GetTimestamp until the point where the return value is obtained // we should determine this overhead by calling startTicks = Stopwatch.GetTimestamp(); for (int i = 0; i < N; i++) { h.Contains(c[i]); } ticksH[ticksIdxForH++] = Stopwatch.GetTimestamp() - startTicks; startTicks = Stopwatch.GetTimestamp(); for (int i = 0; i < N; i++) { f.Contains(c[i]); } ticksF[ticksIdxForF++] = Stopwatch.GetTimestamp() - startTicks; startTicks = Stopwatch.GetTimestamp(); for (int i = 0; i < N; i++) { c5.Contains(c[i]); } ticksC[ticksIdxForC++] = Stopwatch.GetTimestamp() - startTicks; } } // summarize and output the data BenchmarkSummaries summaries = new BenchmarkSummaries(); summaries.AddNSummaryList(NSummary.CreateNSummaryListForBenchmark(overheadTicks, nArray, IterartionCount * LoopUnrollCount, ticksH), "SCG_Contains"); summaries.AddNSummaryList(NSummary.CreateNSummaryListForBenchmark(overheadTicks, nArray, IterartionCount * LoopUnrollCount, ticksF), "Fast_Contains"); summaries.AddNSummaryList(NSummary.CreateNSummaryListForBenchmark(overheadTicks, nArray, IterartionCount * LoopUnrollCount, ticksC), "C5_Contains"); summaries.OutputSummariesToFile(outputFileName, "SCG_Contains"); }
static void Main(string[] args) { // cmd line params variables string dbConnStr = null; int runID = 0; int benchmarkMethodID = 0; int n; int maxN; try { DateTime startTime = DateTime.Now; //Console.WriteLine($"Args Count:" + args.Length.ToString()); //foreach (string s in args) //{ // Console.WriteLine(s); //} //Console.ReadKey(); string errMsg = PerfUtil.GetCmdLineParams_DbNAndMaxN(args, out dbConnStr, out runID, out benchmarkMethodID, out n, out maxN); //if (errMsg != null) //{ // Console.WriteLine(errMsg); //} //Console.WriteLine($"Args: {dbConnStr}; {runID.ToString()}; {benchmarkMethodID.ToString()}; {n.ToString()}; {maxN.ToString()}"); //Console.ReadKey(); int[] a; int[] c; FastHashSet <int> set = new FastHashSet <int>(); BenchUtil.PopulateArrays25_25_50PctUnique(maxN, out a, out c); // in a real world scenario, we will have probably have recently added the items into the set, so no need to try to clear the cache or anything for (int i = 0; i < maxN; i++) { set.Add(a[i]); } double overheadNanoSecs = PerfUtil.GetTimestampOverheadInNanoSeconds(); PerfUtil.DoGCCollect(); int iterations = 1; long startTicks; long endTicks; double ticks; // this is enough to jit things and not put everything in the cache bool isContained = set.Contains(0); if (maxN <= 1000) { iterations = 1; // there amount of time taken for these is too small to measure just one iteration - so we measure multiple iterations in a loop and get the time for these // the mean time is this total time / iterations startTicks = Stopwatch.GetTimestamp(); for (int i = 0; i < maxN; i++) { set.Contains(c[i]); } endTicks = Stopwatch.GetTimestamp(); ticks = ((endTicks - startTicks) * n) / (double)maxN; } else { iterations = 1; startTicks = Stopwatch.GetTimestamp(); for (int i = 0; i < n; i++) // loop overhead is ok because we assume there will be some loop in real-world scenario { set.Contains(c[i]); } endTicks = Stopwatch.GetTimestamp(); ticks = (double)(endTicks - startTicks); } double nanoSecs = PerfUtil.GetNanoSecondsFromTicks(ticks, Stopwatch.Frequency) - overheadNanoSecs; PerfDb.InsertMeasurement(dbConnStr, runID, benchmarkMethodID, n, iterations, nanoSecs, startTime, DateTime.Now); } catch (Exception ex) { Console.Write(ex.ToString()); if (!string.IsNullOrEmpty(dbConnStr)) { // write error to db PerfDb.InsertRunError(dbConnStr, runID, benchmarkMethodID, ex); } else { // log error to file } } }
public int GetHashCode([DisallowNull] FastHashSet <T> obj) { throw new NotImplementedException(); }
public bool Equals([AllowNull] FastHashSet <T> x, [AllowNull] FastHashSet <T> y) { throw new NotImplementedException(); }
public void FindOrAdd() { FastHashSet <IDCount> set = new FastHashSet <IDCount>(); IDCount x = new IDCount(1, 1); ref IDCount xRef = ref set.FindOrAdd(in x, out bool isFound);
static void Main(string[] args) { // cmd line params variables string dbConnStr = null; int runID = 0; int benchmarkMethodID = 0; int n; int maxN; try { DateTime startTime = DateTime.Now; //Console.WriteLine($"Args Count:" + args.Length.ToString()); //foreach (string s in args) //{ // Console.WriteLine(s); //} //Console.ReadKey(); string errMsg = PerfUtil.GetCmdLineParams_DbNAndMaxN(args, out dbConnStr, out runID, out benchmarkMethodID, out n, out maxN); //if (errMsg != null) //{ // Console.WriteLine(errMsg); //} //Console.WriteLine($"Args: {dbConnStr}; {runID.ToString()}; {benchmarkMethodID.ToString()}; {n.ToString()}; {maxN.ToString()}"); //Console.ReadKey(); int[] a = new int[n]; int[] a2 = new int[n]; Random rand = new Random(89); for (int i = 0; i < a.Length; i++) { a[i] = rand.Next(); a2[i] = rand.Next(); } FastHashSet <SmallStruct> setWarmup = new FastHashSet <SmallStruct>(); setWarmup.Add(new SmallStruct(1, 2)); FastHashSet <SmallStruct> set = new FastHashSet <SmallStruct>(); double overheadNanoSecs = PerfUtil.GetTimestampOverheadInNanoSeconds(); PerfUtil.DoGCCollect(); int iterations = 1; long startTicks; long endTicks; double ticks; // this is enough to jit things and not put everything in the cache //bool isContained = set.Contains(0); iterations = 1; startTicks = Stopwatch.GetTimestamp(); for (int i = 0; i < a.Length; i++) { set.Add(new SmallStruct(a[i], a2[i])); } endTicks = Stopwatch.GetTimestamp(); ticks = (double)(endTicks - startTicks); double nanoSecs = PerfUtil.GetNanoSecondsFromTicks(ticks, Stopwatch.Frequency) - overheadNanoSecs; PerfDb.InsertMeasurement(dbConnStr, runID, benchmarkMethodID, n, iterations, nanoSecs, startTime, DateTime.Now); } catch (Exception ex) { Console.Write(ex.ToString()); if (!string.IsNullOrEmpty(dbConnStr)) { // write error to db PerfDb.InsertRunError(dbConnStr, runID, benchmarkMethodID, ex); } else { // log error to file } } }
public void SetUp() { _found = GetItems(); _hashSet = new FastHashSet <T>(_found); }