internal ReadWorker(TestParallelReadUtil.TestFileInfo testInfo, int id, TestParallelReadUtil.ReadWorkerHelper helper) : base("ReadWorker-" + id + "-" + testInfo.filepath.ToString()) { this.testInfo = testInfo; this.helper = helper; fileSize = testInfo.dis.GetFileLength(); NUnit.Framework.Assert.AreEqual(fileSize, testInfo.authenticData.Length); bytesRead = 0; error = false; }
/// <summary>Start the parallel read with the given parameters.</summary> /// <exception cref="System.IO.IOException"/> internal virtual bool RunParallelRead(int nFiles, int nWorkerEach, TestParallelReadUtil.ReadWorkerHelper helper) { TestParallelReadUtil.ReadWorker[] workers = new TestParallelReadUtil.ReadWorker[nFiles * nWorkerEach]; TestParallelReadUtil.TestFileInfo[] testInfoArr = new TestParallelReadUtil.TestFileInfo [nFiles]; // Prepare the files and workers int nWorkers = 0; for (int i = 0; i < nFiles; ++i) { TestParallelReadUtil.TestFileInfo testInfo = new TestParallelReadUtil.TestFileInfo (this); testInfoArr[i] = testInfo; testInfo.filepath = new Path("/TestParallelRead.dat." + i); testInfo.authenticData = util.WriteFile(testInfo.filepath, FileSizeK); testInfo.dis = dfsClient.Open(testInfo.filepath.ToString(), dfsClient.GetConf().ioBufferSize , verifyChecksums); for (int j = 0; j < nWorkerEach; ++j) { workers[nWorkers++] = new TestParallelReadUtil.ReadWorker(testInfo, nWorkers, helper ); } } // Start the workers and wait long starttime = Time.MonotonicNow(); foreach (TestParallelReadUtil.ReadWorker worker in workers) { worker.Start(); } foreach (TestParallelReadUtil.ReadWorker worker_1 in workers) { try { worker_1.Join(); } catch (Exception) { } } long endtime = Time.MonotonicNow(); // Cleanup foreach (TestParallelReadUtil.TestFileInfo testInfo_1 in testInfoArr) { testInfo_1.dis.Close(); } // Report bool res = true; long totalRead = 0; foreach (TestParallelReadUtil.ReadWorker worker_2 in workers) { long nread = worker_2.GetBytesRead(); Log.Info("--- Report: " + worker_2.GetName() + " read " + nread + " B; " + "average " + nread / TestParallelReadUtil.ReadWorker.NIterations + " B per read"); totalRead += nread; if (worker_2.HasError()) { res = false; } } double timeTakenSec = (endtime - starttime) / 1000.0; long totalReadKB = totalRead / 1024; Log.Info("=== Report: " + nWorkers + " threads read " + totalReadKB + " KB (across " + nFiles + " file(s)) in " + timeTakenSec + "s; average " + totalReadKB / timeTakenSec + " KB/s"); return res; }