/// <exception cref="System.Exception"/> public virtual void TestFilesExceedMaxLockedMemory() { Log.Info("beginning testFilesExceedMaxLockedMemory"); // Create some test files that will exceed total cache capacity int numFiles = 5; long fileSize = CacheCapacity / (numFiles - 1); Path[] testFiles = new Path[numFiles]; HdfsBlockLocation[][] fileLocs = new HdfsBlockLocation[numFiles][]; long[] fileSizes = new long[numFiles]; for (int i = 0; i < numFiles; i++) { testFiles[i] = new Path("/testFilesExceedMaxLockedMemory-" + i); DFSTestUtil.CreateFile(fs, testFiles[i], fileSize, (short)1, unchecked ((long)(0xDFAl ))); fileLocs[i] = (HdfsBlockLocation[])fs.GetFileBlockLocations(testFiles[i], 0, fileSize ); // Get the file size (sum of blocks) long[] sizes = GetBlockSizes(fileLocs[i]); for (int j = 0; j < sizes.Length; j++) { fileSizes[i] += sizes[j]; } } // Cache the first n-1 files long total = 0; DFSTestUtil.VerifyExpectedCacheUsage(0, 0, fsd); for (int i_1 = 0; i_1 < numFiles - 1; i_1++) { SetHeartbeatResponse(CacheBlocks(fileLocs[i_1])); total = DFSTestUtil.VerifyExpectedCacheUsage(rounder.Round(total + fileSizes[i_1] ), 4 * (i_1 + 1), fsd); } // nth file should hit a capacity exception LogVerificationAppender appender = new LogVerificationAppender(); Logger logger = Logger.GetRootLogger(); logger.AddAppender(appender); SetHeartbeatResponse(CacheBlocks(fileLocs[numFiles - 1])); GenericTestUtils.WaitFor(new _Supplier_351(appender), 500, 30000); // Also check the metrics for the failure NUnit.Framework.Assert.IsTrue("Expected more than 0 failed cache attempts", fsd.GetNumBlocksFailedToCache () > 0); // Uncache the n-1 files int curCachedBlocks = 16; for (int i_2 = 0; i_2 < numFiles - 1; i_2++) { SetHeartbeatResponse(UncacheBlocks(fileLocs[i_2])); long uncachedBytes = rounder.Round(fileSizes[i_2]); total -= uncachedBytes; curCachedBlocks -= uncachedBytes / BlockSize; DFSTestUtil.VerifyExpectedCacheUsage(total, curCachedBlocks, fsd); } Log.Info("finishing testFilesExceedMaxLockedMemory"); }
/// <exception cref="System.Exception"/> private static long[] GetBlockSizes(HdfsBlockLocation[] locs) { long[] sizes = new long[locs.Length]; for (int i = 0; i < locs.Length; i++) { HdfsBlockLocation loc = locs[i]; string bpid = loc.GetLocatedBlock().GetBlock().GetBlockPoolId(); Block block = loc.GetLocatedBlock().GetBlock().GetLocalBlock(); ExtendedBlock extBlock = new ExtendedBlock(bpid, block); FileInputStream blockInputStream = null; FileChannel blockChannel = null; try { blockInputStream = (FileInputStream)fsd.GetBlockInputStream(extBlock, 0); blockChannel = blockInputStream.GetChannel(); sizes[i] = blockChannel.Size(); } finally { IOUtils.Cleanup(Log, blockChannel, blockInputStream); } } return(sizes); }
private static DatanodeCommand[] UncacheBlock(HdfsBlockLocation loc) { return(UncacheBlocks(new HdfsBlockLocation[] { loc })); }