/// <exception cref="System.IO.IOException"/> private void CheckNameSpace(Configuration conf) { NameNode namenode = new NameNode(conf); NamenodeProtocols nnRpc = namenode.GetRpcServer(); NUnit.Framework.Assert.IsTrue(nnRpc.GetFileInfo("/test").IsDir()); nnRpc.SetSafeMode(HdfsConstants.SafeModeAction.SafemodeEnter, false); nnRpc.SaveNamespace(); namenode.Stop(); namenode.Join(); }
public virtual void TestCompression() { Log.Info("Test compressing image."); Configuration conf = new Configuration(); FileSystem.SetDefaultUri(conf, "hdfs://localhost:0"); conf.Set(DFSConfigKeys.DfsNamenodeHttpAddressKey, "127.0.0.1:0"); FilePath base_dir = new FilePath(PathUtils.GetTestDir(GetType()), "dfs/"); conf.Set(DFSConfigKeys.DfsNamenodeNameDirKey, new FilePath(base_dir, "name").GetPath ()); conf.SetBoolean(DFSConfigKeys.DfsPermissionsEnabledKey, false); DFSTestUtil.FormatNameNode(conf); // create an uncompressed image Log.Info("Create an uncompressed fsimage"); NameNode namenode = new NameNode(conf); namenode.GetNamesystem().Mkdirs("/test", new PermissionStatus("hairong", null, FsPermission .GetDefault()), true); NamenodeProtocols nnRpc = namenode.GetRpcServer(); NUnit.Framework.Assert.IsTrue(nnRpc.GetFileInfo("/test").IsDir()); nnRpc.SetSafeMode(HdfsConstants.SafeModeAction.SafemodeEnter, false); nnRpc.SaveNamespace(); namenode.Stop(); namenode.Join(); // compress image using default codec Log.Info("Read an uncomressed image and store it compressed using default codec." ); conf.SetBoolean(DFSConfigKeys.DfsImageCompressKey, true); CheckNameSpace(conf); // read image compressed using the default and compress it using Gzip codec Log.Info("Read a compressed image and store it using a different codec."); conf.Set(DFSConfigKeys.DfsImageCompressionCodecKey, "org.apache.hadoop.io.compress.GzipCodec" ); CheckNameSpace(conf); // read an image compressed in Gzip and store it uncompressed Log.Info("Read a compressed image and store it as uncompressed."); conf.SetBoolean(DFSConfigKeys.DfsImageCompressKey, false); CheckNameSpace(conf); // read an uncomrpessed image and store it uncompressed Log.Info("Read an uncompressed image and store it as uncompressed."); CheckNameSpace(conf); }
public virtual void TestConcat() { int numFiles = 10; long fileLen = blockSize * 3; HdfsFileStatus fStatus; FSDataInputStream stm; string trg = "/trg"; Path trgPath = new Path(trg); DFSTestUtil.CreateFile(dfs, trgPath, fileLen, ReplFactor, 1); fStatus = nn.GetFileInfo(trg); long trgLen = fStatus.GetLen(); long trgBlocks = nn.GetBlockLocations(trg, 0, trgLen).LocatedBlockCount(); Path[] files = new Path[numFiles]; byte[][] bytes = new byte[][] { new byte[(int)fileLen], new byte[(int)fileLen], new byte[(int)fileLen], new byte[(int)fileLen], new byte[(int)fileLen], new byte[(int )fileLen], new byte[(int)fileLen], new byte[(int)fileLen], new byte[(int)fileLen ], new byte[(int)fileLen], new byte[(int)fileLen] }; LocatedBlocks[] lblocks = new LocatedBlocks[numFiles]; long[] lens = new long[numFiles]; stm = dfs.Open(trgPath); stm.ReadFully(0, bytes[0]); stm.Close(); int i; for (i = 0; i < files.Length; i++) { files[i] = new Path("/file" + i); Path path = files[i]; System.Console.Out.WriteLine("Creating file " + path); // make files with different content DFSTestUtil.CreateFile(dfs, path, fileLen, ReplFactor, i); fStatus = nn.GetFileInfo(path.ToUri().GetPath()); lens[i] = fStatus.GetLen(); NUnit.Framework.Assert.AreEqual(trgLen, lens[i]); // file of the same length. lblocks[i] = nn.GetBlockLocations(path.ToUri().GetPath(), 0, lens[i]); //read the file stm = dfs.Open(path); stm.ReadFully(0, bytes[i + 1]); //bytes[i][10] = 10; stm.Close(); } // check permissions -try the operation with the "wrong" user UserGroupInformation user1 = UserGroupInformation.CreateUserForTesting("theDoctor" , new string[] { "tardis" }); DistributedFileSystem hdfs = (DistributedFileSystem)DFSTestUtil.GetFileSystemAs(user1 , conf); try { hdfs.Concat(trgPath, files); NUnit.Framework.Assert.Fail("Permission exception expected"); } catch (IOException ie) { System.Console.Out.WriteLine("Got expected exception for permissions:" + ie.GetLocalizedMessage ()); } // expected // check count update ContentSummary cBefore = dfs.GetContentSummary(trgPath.GetParent()); // resort file array, make INode id not sorted. for (int j = 0; j < files.Length / 2; j++) { Path tempPath = files[j]; files[j] = files[files.Length - 1 - j]; files[files.Length - 1 - j] = tempPath; byte[] tempBytes = bytes[1 + j]; bytes[1 + j] = bytes[files.Length - 1 - j + 1]; bytes[files.Length - 1 - j + 1] = tempBytes; } // now concatenate dfs.Concat(trgPath, files); // verify count ContentSummary cAfter = dfs.GetContentSummary(trgPath.GetParent()); NUnit.Framework.Assert.AreEqual(cBefore.GetFileCount(), cAfter.GetFileCount() + files .Length); // verify other stuff long totalLen = trgLen; long totalBlocks = trgBlocks; for (i = 0; i < files.Length; i++) { totalLen += lens[i]; totalBlocks += lblocks[i].LocatedBlockCount(); } System.Console.Out.WriteLine("total len=" + totalLen + "; totalBlocks=" + totalBlocks ); fStatus = nn.GetFileInfo(trg); trgLen = fStatus.GetLen(); // new length // read the resulting file stm = dfs.Open(trgPath); byte[] byteFileConcat = new byte[(int)trgLen]; stm.ReadFully(0, byteFileConcat); stm.Close(); trgBlocks = nn.GetBlockLocations(trg, 0, trgLen).LocatedBlockCount(); //verifications // 1. number of blocks NUnit.Framework.Assert.AreEqual(trgBlocks, totalBlocks); // 2. file lengths NUnit.Framework.Assert.AreEqual(trgLen, totalLen); // 3. removal of the src file foreach (Path p in files) { fStatus = nn.GetFileInfo(p.ToUri().GetPath()); NUnit.Framework.Assert.IsNull("File " + p + " still exists", fStatus); // file shouldn't exist // try to create fie with the same name DFSTestUtil.CreateFile(dfs, p, fileLen, ReplFactor, 1); } // 4. content CheckFileContent(byteFileConcat, bytes); // add a small file (less then a block) Path smallFile = new Path("/sfile"); int sFileLen = 10; DFSTestUtil.CreateFile(dfs, smallFile, sFileLen, ReplFactor, 1); dfs.Concat(trgPath, new Path[] { smallFile }); fStatus = nn.GetFileInfo(trg); trgLen = fStatus.GetLen(); // new length // check number of blocks trgBlocks = nn.GetBlockLocations(trg, 0, trgLen).LocatedBlockCount(); NUnit.Framework.Assert.AreEqual(trgBlocks, totalBlocks + 1); // and length NUnit.Framework.Assert.AreEqual(trgLen, totalLen + sFileLen); }