private static void RunThroughput( IDictionary <string, BatchPredictionEngine <AttendeeData, AttendeeResult> > atndEngines, List <AttendeeData[]> atndBatches, IDictionary <string, BatchPredictionEngine <AmazonData, AmazonResult> > amznEngines, List <AmazonData[]> amznBatches, int numExecutors, int numRequestors, int numRepeat, int numThreadsPerExecutor = 1) { IExecutor[] executors = new IExecutor[numExecutors]; for (int i = 0; i < numExecutors; i++) { executors[i] = new FixedThreadPoolExecutor(numThreadsPerExecutor, 4 << i); } var totalPredictionTime = 0.0; /* * foreach (var engine in atndEngines) * { * totalPredictionTime += RunInParallel(engine.Value, atndBatches, executors, numThreadsPerExecutor, numRequestors, numRepeat); * } * foreach (var engine in amznEngines) * { * totalPredictionTime += RunInParallel(engine.Value, amznBatches, executors, numThreadsPerExecutor, numRequestors, numRepeat); * } */ System.Console.WriteLine("Total time is: " + totalPredictionTime); }
private static void LoadModels(IHostEnvironment env, out IDictionary <string, BatchPredictionEngine <AttendeeData, AttendeeResult> > atndEngines, out IDictionary <string, BatchPredictionEngine <AmazonData, AmazonResult> > amznEngines) { int batchSize = 1024; var atndBatches = AttendeeModel.Parse(Environment.ExpandEnvironmentVariables(_atndInputRecordFile), batchSize); var amznBatches = AmazonModel.Parse(Environment.ExpandEnvironmentVariables(_amznInputRecordFile), batchSize); var numModels = 0; var totalStartTime = Stopwatch.GetTimestamp(); atndEngines = new Dictionary <string, BatchPredictionEngine <AttendeeData, AttendeeResult> >(); amznEngines = new Dictionary <string, BatchPredictionEngine <AmazonData, AmazonResult> >(); IExecutor[] executors = new IExecutor[_numExecutors]; for (int i = 0; i < _numExecutors; i++) { executors[i] = new FixedThreadPoolExecutor(1, 4 << i); } using (var reader = new StreamReader(Environment.ExpandEnvironmentVariables(_metadataFile))) { string line; while ((line = reader.ReadLine()) != null) { string[] splits = line.Split(); var modelPath = Environment.ExpandEnvironmentVariables(splits[0]); var startTime = Stopwatch.GetTimestamp(); if (modelPath.Contains("Attendee")) { var engine = new ThreadLocal <BatchPredictionEngine <AttendeeData, AttendeeResult> >(() => { return(AttendeeModel.CreateEngine(env, Environment.ExpandEnvironmentVariables(modelPath))); }); // Throughput RunInParallel(engine, atndBatches, executors, _numStageHandlerThreads, _numRequestHandler, _numIterations); // Latency //RunLatencySingle(modelPath, engine.Value, atndBatches.Take(10).ToList(), null, amznBatches.Take(10).ToList(), false); //RunLatencySingle(modelPath, engine, atndBatches, null, amznBatches); //atndEngines.Add(modelPath, engine); } else if (modelPath.Contains("Amazon")) { var engine = new ThreadLocal <BatchPredictionEngine <AmazonData, AmazonResult> >(() => { return(AmazonModel.CreateEngine(env, Environment.ExpandEnvironmentVariables(modelPath))); }); RunInParallel(engine, amznBatches, executors, _numStageHandlerThreads, _numRequestHandler, _numIterations); // RunLatencySingle(modelPath, null, atndBatches.Take(10).ToList(), engine, amznBatches.Take(10).ToList(), false); // RunLatencySingle(modelPath, null, atndBatches, engine, amznBatches); //amznEngines.Add(modelPath, engine); } var loadingTime = (Stopwatch.GetTimestamp() - startTime) * 1000.0 / Stopwatch.Frequency; using (Process proc = Process.GetCurrentProcess()) { var memory = proc.PrivateMemorySize64; System.Console.WriteLine("Load {0} in {1} ms", modelPath, loadingTime); System.Console.WriteLine("Memory after loading {0} {1}", modelPath, memory); } // TODO: Other type of models numModels++; } } var totalLoadingTime = (Stopwatch.GetTimestamp() - totalStartTime) * 1000.0 / Stopwatch.Frequency; System.Console.WriteLine("Load {0} models in {1} ms", numModels, totalLoadingTime); }