private async Task runBatchMiner(BenchmarkRequest request, IServerStreamWriter <BenchmarkReply> responseStream, ServerCallContext context) { Console.WriteLine("run batch miner " + request.UseTopK); await Task.Run(async() => { BatchMiner miner = new BatchMiner(); var batchStopwatch = new Stopwatch(); batchStopwatch.Restart(); long totalTimeElapsed = 0; //int numBlocks = (int)Math.Ceiling(request.DBSize/(double)request.SampleSize); await miner.ProcessCSVFile( request.File, request.Support, request.K, request.UseTopK, request.SampleSize, request.DBSize, request.ErrorTolerance, async delegate(List <Sequence> frequentSequencePatterns, double error, int iteration) { batchStopwatch.Stop(); totalTimeElapsed += batchStopwatch.ElapsedMilliseconds; var reply = new BenchmarkReply { NrProcessedRecords = miner.NumTransactionsProcessed, ReplyType = ReplyType.Batch, Iteration = iteration, Error = error, BatchRuntimeInMillis = batchStopwatch.ElapsedMilliseconds, TotalRuntimeInMillis = totalTimeElapsed, PrevBlockFileReadingTime = miner.PrevBlockFileReadingTime, PrevBlockPreProcessingRuntime = miner.Algorithm.PrevBlockPreProcessingRuntime, PrevBlockPrefixSpanRuntime = miner.Algorithm.PrevBlockPrefixSpanRuntime, PrevBlockSubsequenceMatchingRuntime = miner.Algorithm.PrevBlockSubsequenceMatchingRuntime }; var ser = JsonConvert.SerializeObject(frequentSequencePatterns, _jsonSettings); reply.SequencesInJson = ser; await responseStream.WriteAsync(reply); batchStopwatch.Restart(); }, long.MaxValue ); await responseStream.WriteAsync(new BenchmarkReply { ReplyType = ReplyType.Complete, TotalRuntimeInMillis = totalTimeElapsed }); }); }
private async Task runPrefixSpan(BenchmarkRequest request, IServerStreamWriter <BenchmarkReply> responseStream, ServerCallContext context) { Console.WriteLine("run prefix span"); await Task.Run(async() => { Stopwatch stopwatch = Stopwatch.StartNew(); //creates and start the instance of Stopwatch List <Sequence> sequenceList = new List <Sequence>(); // open the CSV file using (var reader = new StreamReader(File.OpenRead(request.File))) { while (!reader.EndOfStream) { // read the current line string line = reader.ReadLine(); // split the line by comma (assuming CSV file) string [] transactions = line.Split(new string[] { " -1 " }, StringSplitOptions.None); List <Transaction> tr = new List <Transaction>(transactions.Length); for (int i = 0; i < transactions.Length - 1; i++) { string [] items = transactions[i].Split(' '); Transaction trans = new Transaction(items); tr.Add(trans); } Sequence sequence = new Sequence(tr); sequenceList.Add(sequence); } } PrefixSpan algorithm = new PrefixSpan(sequenceList); List <Sequence> frequentSequences = algorithm.MinSupportFrequentSequences(request.Support); // stop the stopwatch after frequent patterns are // returned stopwatch.Stop(); var reply = new BenchmarkReply { NrProcessedRecords = request.DBSize, ReplyType = ReplyType.Batch, Iteration = 0, Error = 0, BatchRuntimeInMillis = stopwatch.ElapsedMilliseconds, TotalRuntimeInMillis = stopwatch.ElapsedMilliseconds }; var ser = JsonConvert.SerializeObject(frequentSequences, _jsonSettings); reply.SequencesInJson = ser; await responseStream.WriteAsync(reply); reply = new BenchmarkReply { ReplyType = ReplyType.Complete, TotalRuntimeInMillis = stopwatch.ElapsedMilliseconds }; await responseStream.WriteAsync(reply); }); }