static void Main(string[] args)
        {
            // cmd line params variables
            string dbConnStr         = null;
            int    runID             = 0;
            int    benchmarkMethodID = 0;
            int    n;
            int    maxN;

            try
            {
                DateTime startTime = DateTime.Now;
                //Console.WriteLine($"Args Count:" + args.Length.ToString());
                //foreach (string s in args)
                //{
                //	Console.WriteLine(s);
                //}
                //Console.ReadKey();

                string errMsg = PerfUtil.GetCmdLineParams_DbNAndMaxN(args, out dbConnStr, out runID, out benchmarkMethodID, out n, out maxN);
                //if (errMsg != null)
                //{
                //	Console.WriteLine(errMsg);
                //}
                //Console.WriteLine($"Args: {dbConnStr}; {runID.ToString()}; {benchmarkMethodID.ToString()}; {n.ToString()}; {maxN.ToString()}");
                //Console.ReadKey();

                int[] a;
                int[] c;

                HashSet <int> set = new HashSet <int>();

                BenchUtil.PopulateArrays25_25_50PctUnique(maxN, out a, out c);

                // in a real world scenario, we will have probably have recently added the items into the set, so no need to try to clear the cache or anything
                for (int i = 0; i < maxN; i++)
                {
                    set.Add(a[i]);
                }

                double overheadNanoSecs = PerfUtil.GetTimestampOverheadInNanoSeconds();

                PerfUtil.DoGCCollect();

                int    iterations = 1;
                long   startTicks;
                long   endTicks;
                double ticks;

                // this is enough to jit things and not put everything in the cache
                bool isContained = set.Contains(0);

                if (maxN <= 1000)
                {
                    iterations = 1;

                    // there amount of time taken for these is too small to measure just one iteration - so we measure multiple iterations in a loop and get the time for these
                    // the mean time is this total time / iterations

                    // for really small operations, like a single contains on a hashet that has 8 items you would probably want to call at least a feww hundred Contains
                    // and then average that - the maxN wouldn't work too well if that was just 8

                    startTicks = Stopwatch.GetTimestamp();

                    for (int i = 0; i < maxN; i++)
                    {
                        set.Contains(c[i]);
                    }

                    endTicks = Stopwatch.GetTimestamp();

                    ticks = ((endTicks - startTicks) * n) / (double)maxN;
                }
                else
                {
                    iterations = 1;

                    startTicks = Stopwatch.GetTimestamp();
                    for (int i = 0; i < n; i++)                     // loop overhead is ok because we assume there will be some loop in real-world scenario
                    {
                        set.Contains(c[i]);
                    }
                    endTicks = Stopwatch.GetTimestamp();

                    ticks = (double)(endTicks - startTicks);
                }

                double nanoSecs = PerfUtil.GetNanoSecondsFromTicks(ticks, Stopwatch.Frequency) - overheadNanoSecs;

                PerfDb.InsertMeasurement(dbConnStr, runID, benchmarkMethodID, n, iterations, nanoSecs, startTime, DateTime.Now);
            }
            catch (Exception ex)
            {
                Console.Write(ex.ToString());
                if (!string.IsNullOrEmpty(dbConnStr))
                {
                    // write error to db
                    PerfDb.InsertRunError(dbConnStr, runID, benchmarkMethodID, ex);
                }
                else
                {
                    // log error to file
                }
            }
        }
Example #2
0
        static void Main(string[] args)
        {
            // cmd line params variables
            string dbConnStr         = null;
            int    runID             = 0;
            int    benchmarkMethodID = 0;
            int    n;
            int    maxN;

            try
            {
                DateTime startTime = DateTime.Now;
                //Console.WriteLine($"Args Count:" + args.Length.ToString());
                //foreach (string s in args)
                //{
                //	Console.WriteLine(s);
                //}
                //Console.ReadKey();

                string errMsg = PerfUtil.GetCmdLineParams_DbNAndMaxN(args, out dbConnStr, out runID, out benchmarkMethodID, out n, out maxN);
                //if (errMsg != null)
                //{
                //	Console.WriteLine(errMsg);
                //}
                //Console.WriteLine($"Args: {dbConnStr}; {runID.ToString()}; {benchmarkMethodID.ToString()}; {n.ToString()}; {maxN.ToString()}");
                //Console.ReadKey();

                int[] a  = new int[n];
                int[] a2 = new int[n];

                Random rand = new Random(89);
                for (int i = 0; i < a.Length; i++)
                {
                    a[i]  = rand.Next();
                    a2[i] = rand.Next();
                }

                FastHashSet <SmallStruct> setWarmup = new FastHashSet <SmallStruct>();
                setWarmup.Add(new SmallStruct(1, 2));

                FastHashSet <SmallStruct> set = new FastHashSet <SmallStruct>();

                double overheadNanoSecs = PerfUtil.GetTimestampOverheadInNanoSeconds();

                PerfUtil.DoGCCollect();

                int    iterations = 1;
                long   startTicks;
                long   endTicks;
                double ticks;

                // this is enough to jit things and not put everything in the cache
                //bool isContained = set.Contains(0);

                iterations = 1;

                startTicks = Stopwatch.GetTimestamp();
                for (int i = 0; i < a.Length; i++)
                {
                    set.Add(new SmallStruct(a[i], a2[i]));
                }

                endTicks = Stopwatch.GetTimestamp();

                ticks = (double)(endTicks - startTicks);

                double nanoSecs = PerfUtil.GetNanoSecondsFromTicks(ticks, Stopwatch.Frequency) - overheadNanoSecs;

                PerfDb.InsertMeasurement(dbConnStr, runID, benchmarkMethodID, n, iterations, nanoSecs, startTime, DateTime.Now);
            }
            catch (Exception ex)
            {
                Console.Write(ex.ToString());
                if (!string.IsNullOrEmpty(dbConnStr))
                {
                    // write error to db
                    PerfDb.InsertRunError(dbConnStr, runID, benchmarkMethodID, ex);
                }
                else
                {
                    // log error to file
                }
            }
        }
Example #3
0
        static void Main(string[] args)
        {
            //string outputFileName = @"e:\\proj\\summary.tsv";
            //int minN = 100_000;
            //int maxN = 1_000_000;

            //int incrementNBy = 10_000;
            string errMsg = PerfUtil.GetCmdLineParams_OutputFileAndMinMaxIncN(args, out int minN, out int maxN, out int incrementNBy, out string outputFileName);

            int nCount = ((maxN - minN) / incrementNBy) + 1;

            int[] nArray = new int[nCount];

            int idx = 0;

            for (int n = minN; n <= maxN; n += incrementNBy, idx++)
            {
                nArray[idx] = n;
            }

            const int LoopUnrollCount       = 1;
            const int IterartionCount       = 512;
            const int IterartionWarmupCount = 16;

            long[] ticksH       = new long[nArray.Length * IterartionCount * LoopUnrollCount];
            int    ticksIdxForH = 0;

            long[] ticksF       = new long[nArray.Length * IterartionCount * LoopUnrollCount];
            int    ticksIdxForF = 0;

            long[] ticksC       = new long[nArray.Length * IterartionCount * LoopUnrollCount];
            int    ticksIdxForC = 0;

            long startTicks;

            double overheadTicks = PerfUtil.GetTimestampOverheadInNanoSeconds();

            int[] a;
            int[] c;

            SCG.HashSet <int> h = new HashSet <int>();
            FastHashSet <int> f = new FastHashSet <int>();

            C5.HashSet <int> c5 = new C5.HashSet <int>();

            HashSetBench.BenchUtil.PopulateCollections25_25_50PctUnique(maxN, out a, out c, h, f, c5);

            // not sure if we should run bechmark 1 and then benchmark 2 separately so that the presence of the one doesn't effect the other???
            // in practice they will probably not be run together one after the other

            PerfUtil.DoGCCollect();

            int N;

            for (int j = 0; j < nArray.Length; j++)
            {
                N = nArray[j];

                // not really sure what running the warmup really does - it can put things in the cache that maybe shouldn't be there because they won't be in a real application???
                // still, if we execute the same code with the same data in a loop alot of times, this will populate the cache unrealistically
                // also if we do a warmup, the jit times will be removed, but does this represent reality - jit times do happen in real running code???

                for (int iterationIdx = 0; iterationIdx < IterartionWarmupCount; iterationIdx++)
                {
                    // SCG_Contains
                    for (int i = 0; i < N; i++)
                    {
                        h.Contains(c[i]);
                    }

                    // Fast_Contains
                    for (int i = 0; i < N; i++)
                    {
                        f.Contains(c[i]);
                    }

                    for (int i = 0; i < N; i++)
                    {
                        c5.Contains(c[i]);
                    }
                }

                for (int iterationIdx = 0; iterationIdx < IterartionCount; iterationIdx++)
                {
                    // to minimize the effects of the loop code on the count, unroll each bechmark 2 times
                    // also alternate randomly between the order of these to minimize any effects of order
                    // not sure what effects loop unrolling has since that part isn't contained in the stopwatch time
                    // still there might be some residual effects on CPU registers? - not really sure

                    // 1

                    // there is some overhead that should be removed - it is returning from GetTimestamp and setting startTicks and afterwards calling GetTimestamp until the point where the return value is obtained
                    // we should determine this overhead by calling
                    startTicks = Stopwatch.GetTimestamp();
                    for (int i = 0; i < N; i++)
                    {
                        h.Contains(c[i]);
                    }
                    ticksH[ticksIdxForH++] = Stopwatch.GetTimestamp() - startTicks;

                    startTicks = Stopwatch.GetTimestamp();
                    for (int i = 0; i < N; i++)
                    {
                        f.Contains(c[i]);
                    }
                    ticksF[ticksIdxForF++] = Stopwatch.GetTimestamp() - startTicks;

                    startTicks = Stopwatch.GetTimestamp();
                    for (int i = 0; i < N; i++)
                    {
                        c5.Contains(c[i]);
                    }
                    ticksC[ticksIdxForC++] = Stopwatch.GetTimestamp() - startTicks;
                }
            }

            // summarize and output the data

            BenchmarkSummaries summaries = new BenchmarkSummaries();

            summaries.AddNSummaryList(NSummary.CreateNSummaryListForBenchmark(overheadTicks, nArray, IterartionCount * LoopUnrollCount, ticksH), "SCG_Contains");

            summaries.AddNSummaryList(NSummary.CreateNSummaryListForBenchmark(overheadTicks, nArray, IterartionCount * LoopUnrollCount, ticksF), "Fast_Contains");

            summaries.AddNSummaryList(NSummary.CreateNSummaryListForBenchmark(overheadTicks, nArray, IterartionCount * LoopUnrollCount, ticksC), "C5_Contains");

            summaries.OutputSummariesToFile(outputFileName, "SCG_Contains");
        }