/// <exception cref="System.IO.IOException"/> public virtual int Run(Configuration conf, IList <string> args) { if (!args.IsEmpty()) { System.Console.Error.WriteLine("Can't understand argument: " + args[0]); return(1); } DistributedFileSystem dfs = AdminHelper.GetDFS(conf); try { TableListing listing = new TableListing.Builder().AddField(string.Empty).AddField (string.Empty, true).WrapWidth(AdminHelper.MaxLineWidth).HideHeaders().Build(); RemoteIterator <EncryptionZone> it = dfs.ListEncryptionZones(); while (it.HasNext()) { EncryptionZone ez = it.Next(); listing.AddRow(ez.GetPath(), ez.GetKeyName()); } System.Console.Out.WriteLine(listing.ToString()); } catch (IOException e) { System.Console.Error.WriteLine(PrettifyException(e)); return(2); } return(0); }
public virtual void TestSymbolicLinks() { WriteFile(fc, File1, FileLen); WriteFile(fc, File2, FileLen); WriteFile(fc, File3, FileLen); Path dir4 = new Path(TestDir, "dir4"); Path dir5 = new Path(dir4, "dir5"); Path file4 = new Path(dir4, "file4"); fc.CreateSymlink(Dir1, dir5, true); fc.CreateSymlink(File1, file4, true); RemoteIterator <LocatedFileStatus> itor = fc.Util().ListFiles(dir4, true); LocatedFileStatus stat = itor.Next(); NUnit.Framework.Assert.IsTrue(stat.IsFile()); NUnit.Framework.Assert.AreEqual(fc.MakeQualified(File2), stat.GetPath()); stat = itor.Next(); NUnit.Framework.Assert.IsTrue(stat.IsFile()); NUnit.Framework.Assert.AreEqual(fc.MakeQualified(File3), stat.GetPath()); stat = itor.Next(); NUnit.Framework.Assert.IsTrue(stat.IsFile()); NUnit.Framework.Assert.AreEqual(fc.MakeQualified(File1), stat.GetPath()); NUnit.Framework.Assert.IsFalse(itor.HasNext()); itor = fc.Util().ListFiles(dir4, false); stat = itor.Next(); NUnit.Framework.Assert.IsTrue(stat.IsFile()); NUnit.Framework.Assert.AreEqual(fc.MakeQualified(File1), stat.GetPath()); NUnit.Framework.Assert.IsFalse(itor.HasNext()); }
public virtual void TestListLocatedStatus() { string testHarPath = this.GetType().GetResource("/test.har").AbsolutePath; URI uri = new URI("har://" + testHarPath); HarFileSystem hfs = new HarFileSystem(localFileSystem); hfs.Initialize(uri, new Configuration()); // test.har has the following contents: // dir1/1.txt // dir1/2.txt ICollection <string> expectedFileNames = new HashSet <string>(); expectedFileNames.AddItem("1.txt"); expectedFileNames.AddItem("2.txt"); // List contents of dir, and ensure we find all expected files Path path = new Path("dir1"); RemoteIterator <LocatedFileStatus> fileList = hfs.ListLocatedStatus(path); while (fileList.HasNext()) { string fileName = fileList.Next().GetPath().GetName(); Assert.True(fileName + " not in expected files list", expectedFileNames .Contains(fileName)); expectedFileNames.Remove(fileName); } Assert.Equal("Didn't find all of the expected file names: " + expectedFileNames, 0, expectedFileNames.Count); }
/// <exception cref="System.Exception"/> public virtual LocatedFileStatusFetcher.ProcessInputDirCallable.Result Call() { LocatedFileStatusFetcher.ProcessInputDirCallable.Result result = new LocatedFileStatusFetcher.ProcessInputDirCallable.Result (); result.fs = fs; if (fileStatus.IsDirectory()) { RemoteIterator <LocatedFileStatus> iter = fs.ListLocatedStatus(fileStatus.GetPath( )); while (iter.HasNext()) { LocatedFileStatus stat = iter.Next(); if (inputFilter.Accept(stat.GetPath())) { if (recursive && stat.IsDirectory()) { result.dirsNeedingRecursiveCalls.AddItem(stat); } else { result.locatedFileStatuses.AddItem(stat); } } } } else { result.locatedFileStatuses.AddItem(fileStatus); } return(result); }
public _RemoteIterator_422(ViewFileSystem _enclosing, RemoteIterator <LocatedFileStatus > statusIter, InodeTree.ResolveResult <FileSystem> res, Path f) { this._enclosing = _enclosing; this.statusIter = statusIter; this.res = res; this.f = f; }
public virtual void TestTargetDeletionForListLocatedStatus() { Log.Info("Test Target Delete For listLocatedStatus"); RemoteIterator <LocatedFileStatus> itor = fs.ListLocatedStatus(TestPath); itor.Next(); NUnit.Framework.Assert.IsFalse(itor.HasNext()); }
/// <exception cref="System.IO.IOException"/> private IList <FileStatus> SingleThreadedListStatus(JobContext job, Path[] dirs, PathFilter inputFilter, bool recursive) { IList <FileStatus> result = new AList <FileStatus>(); IList <IOException> errors = new AList <IOException>(); for (int i = 0; i < dirs.Length; ++i) { Path p = dirs[i]; FileSystem fs = p.GetFileSystem(job.GetConfiguration()); FileStatus[] matches = fs.GlobStatus(p, inputFilter); if (matches == null) { errors.AddItem(new IOException("Input path does not exist: " + p)); } else { if (matches.Length == 0) { errors.AddItem(new IOException("Input Pattern " + p + " matches 0 files")); } else { foreach (FileStatus globStat in matches) { if (globStat.IsDirectory()) { RemoteIterator <LocatedFileStatus> iter = fs.ListLocatedStatus(globStat.GetPath()); while (iter.HasNext()) { LocatedFileStatus stat = iter.Next(); if (inputFilter.Accept(stat.GetPath())) { if (recursive && stat.IsDirectory()) { AddInputPathRecursively(result, fs, stat.GetPath(), inputFilter); } else { result.AddItem(stat); } } } } else { result.AddItem(globStat); } } } } } if (!errors.IsEmpty()) { throw new InvalidInputException(errors); } return(result); }
public virtual void TestDirectory() { fs.Mkdirs(Dir1); // test empty directory RemoteIterator <LocatedFileStatus> itor = fs.ListFiles(Dir1, true); NUnit.Framework.Assert.IsFalse(itor.HasNext()); itor = fs.ListFiles(Dir1, false); NUnit.Framework.Assert.IsFalse(itor.HasNext()); // testing directory with 1 file WriteFile(fs, File2, FileLen); itor = fs.ListFiles(Dir1, true); LocatedFileStatus stat = itor.Next(); NUnit.Framework.Assert.IsFalse(itor.HasNext()); Assert.True(stat.IsFile()); Assert.Equal(FileLen, stat.GetLen()); Assert.Equal(fs.MakeQualified(File2), stat.GetPath()); Assert.Equal(1, stat.GetBlockLocations().Length); itor = fs.ListFiles(Dir1, false); stat = itor.Next(); NUnit.Framework.Assert.IsFalse(itor.HasNext()); Assert.True(stat.IsFile()); Assert.Equal(FileLen, stat.GetLen()); Assert.Equal(fs.MakeQualified(File2), stat.GetPath()); Assert.Equal(1, stat.GetBlockLocations().Length); // test more complicated directory WriteFile(fs, File1, FileLen); WriteFile(fs, File3, FileLen); ICollection <Path> filesToFind = new HashSet <Path>(); filesToFind.AddItem(fs.MakeQualified(File1)); filesToFind.AddItem(fs.MakeQualified(File2)); filesToFind.AddItem(fs.MakeQualified(File3)); itor = fs.ListFiles(TestDir, true); stat = itor.Next(); Assert.True(stat.IsFile()); Assert.True("Path " + stat.GetPath() + " unexpected", filesToFind .Remove(stat.GetPath())); stat = itor.Next(); Assert.True(stat.IsFile()); Assert.True("Path " + stat.GetPath() + " unexpected", filesToFind .Remove(stat.GetPath())); stat = itor.Next(); Assert.True(stat.IsFile()); Assert.True("Path " + stat.GetPath() + " unexpected", filesToFind .Remove(stat.GetPath())); NUnit.Framework.Assert.IsFalse(itor.HasNext()); Assert.True(filesToFind.IsEmpty()); itor = fs.ListFiles(TestDir, false); stat = itor.Next(); Assert.True(stat.IsFile()); Assert.Equal(fs.MakeQualified(File1), stat.GetPath()); NUnit.Framework.Assert.IsFalse(itor.HasNext()); fs.Delete(TestDir, true); }
/// <summary> /// Override this so that we don't set the targetTestRoot to any path under the /// root of the FS, and so that we don't try to delete the test dir, but rather /// only its contents. /// </summary> /// <exception cref="System.IO.IOException"/> internal override void InitializeTargetTestRoot() { targetTestRoot = fc.MakeQualified(new Path("/")); RemoteIterator <FileStatus> dirContents = fc.ListStatus(targetTestRoot); while (dirContents.HasNext()) { fc.Delete(dirContents.Next().GetPath(), true); } }
/// <exception cref="System.IO.IOException"/> private int CountPaths(RemoteIterator <Path> iter) { int i = 0; while (iter.HasNext()) { Log.Info("PATH: " + iter.Next().ToUri().GetPath()); i++; } return(i); }
/// <exception cref="System.IO.FileNotFoundException"/> /// <exception cref="System.IO.IOException"/> protected internal override RemoteIterator <LocatedFileStatus> ListLocatedStatus(Path f, PathFilter filter) { InodeTree.ResolveResult <FileSystem> res = fsState.Resolve(GetUriPath(f), true); RemoteIterator <LocatedFileStatus> statusIter = res.targetFileSystem.ListLocatedStatus (res.remainingPath); if (res.IsInternalDir()) { return(statusIter); } return(new _RemoteIterator_422(this, statusIter, res, f)); }
/// <exception cref="System.IO.IOException"/> /// <exception cref="System.Exception"/> /// <exception cref="System.TypeLoadException"/> public virtual void TestRandomWriter() { Log.Info("\n\n\nStarting testRandomWriter()."); if (!(new FilePath(MiniMRYarnCluster.Appjar)).Exists()) { Log.Info("MRAppJar " + MiniMRYarnCluster.Appjar + " not found. Not running test." ); return; } RandomTextWriterJob randomWriterJob = new RandomTextWriterJob(); mrCluster.GetConfig().Set(RandomTextWriterJob.TotalBytes, "3072"); mrCluster.GetConfig().Set(RandomTextWriterJob.BytesPerMap, "1024"); Job job = randomWriterJob.CreateJob(mrCluster.GetConfig()); Path outputDir = new Path(OutputRootDir, "random-output"); FileOutputFormat.SetOutputPath(job, outputDir); job.SetSpeculativeExecution(false); job.AddFileToClassPath(AppJar); // The AppMaster jar itself. job.SetJarByClass(typeof(RandomTextWriterJob)); job.SetMaxMapAttempts(1); // speed up failures job.Submit(); string trackingUrl = job.GetTrackingURL(); string jobId = job.GetJobID().ToString(); bool succeeded = job.WaitForCompletion(true); NUnit.Framework.Assert.IsTrue(succeeded); NUnit.Framework.Assert.AreEqual(JobStatus.State.Succeeded, job.GetJobState()); NUnit.Framework.Assert.IsTrue("Tracking URL was " + trackingUrl + " but didn't Match Job ID " + jobId, trackingUrl.EndsWith(Sharpen.Runtime.Substring(jobId, jobId.LastIndexOf ("_")) + "/")); // Make sure there are three files in the output-dir RemoteIterator <FileStatus> iterator = FileContext.GetFileContext(mrCluster.GetConfig ()).ListStatus(outputDir); int count = 0; while (iterator.HasNext()) { FileStatus file = iterator.Next(); if (!file.GetPath().GetName().Equals(FileOutputCommitter.SucceededFileName)) { count++; } } NUnit.Framework.Assert.AreEqual("Number of part files is wrong!", 3, count); VerifyRandomWriterCounters(job); }
/// <exception cref="System.IO.IOException"/> public virtual int Run(Configuration conf, IList <string> args) { string path = StringUtils.PopOptionWithArgument("-path", args); if (path == null) { System.Console.Error.WriteLine("You must specify a path with -path."); return(1); } if (!args.IsEmpty()) { System.Console.Error.WriteLine("Can't understand argument: " + args[0]); System.Console.Error.WriteLine("Usage is " + GetShortUsage()); return(1); } int exitCode = 0; try { DistributedFileSystem dfs = AdminHelper.GetDFS(conf); RemoteIterator <CacheDirectiveEntry> iter = dfs.ListCacheDirectives(new CacheDirectiveInfo.Builder ().SetPath(new Path(path)).Build()); while (iter.HasNext()) { CacheDirectiveEntry entry = iter.Next(); try { dfs.RemoveCacheDirective(entry.GetInfo().GetId()); System.Console.Out.WriteLine("Removed cache directive " + entry.GetInfo().GetId() ); } catch (IOException e) { System.Console.Error.WriteLine(AdminHelper.PrettifyException(e)); exitCode = 2; } } } catch (IOException e) { System.Console.Error.WriteLine(AdminHelper.PrettifyException(e)); exitCode = 2; } if (exitCode == 0) { System.Console.Out.WriteLine("Removed every cache directive with path " + path); } return(exitCode); }
/// <exception cref="System.IO.IOException"/> private static IList <FileStatus> RemoteIterToList(RemoteIterator <FileStatus> rIter ) { IList <FileStatus> fsList = new List <FileStatus>(); if (rIter == null) { return(fsList); } while (rIter.HasNext()) { fsList.AddItem(rIter.Next()); } return(fsList); }
/// <exception cref="System.IO.IOException"/> private int NumOfUsercacheDELDirs(string localDir) { int count = 0; RemoteIterator <FileStatus> fileStatus = localFS.ListStatus(new Path(localDir)); while (fileStatus.HasNext()) { FileStatus status = fileStatus.Next(); if (status.GetPath().GetName().Matches(".*" + ContainerLocalizer.Usercache + "_DEL_.*" )) { count++; } } return(count); }
public virtual void TestDirectory() { fc.Mkdir(Dir1, FsPermission.GetDefault(), true); // test empty directory RemoteIterator <LocatedFileStatus> itor = fc.Util().ListFiles(Dir1, true); NUnit.Framework.Assert.IsFalse(itor.HasNext()); itor = fc.Util().ListFiles(Dir1, false); NUnit.Framework.Assert.IsFalse(itor.HasNext()); // testing directory with 1 file WriteFile(fc, File2, FileLen); itor = fc.Util().ListFiles(Dir1, true); LocatedFileStatus stat = itor.Next(); NUnit.Framework.Assert.IsFalse(itor.HasNext()); NUnit.Framework.Assert.IsTrue(stat.IsFile()); NUnit.Framework.Assert.AreEqual(FileLen, stat.GetLen()); NUnit.Framework.Assert.AreEqual(fc.MakeQualified(File2), stat.GetPath()); NUnit.Framework.Assert.AreEqual(1, stat.GetBlockLocations().Length); itor = fc.Util().ListFiles(Dir1, false); stat = itor.Next(); NUnit.Framework.Assert.IsFalse(itor.HasNext()); NUnit.Framework.Assert.IsTrue(stat.IsFile()); NUnit.Framework.Assert.AreEqual(FileLen, stat.GetLen()); NUnit.Framework.Assert.AreEqual(fc.MakeQualified(File2), stat.GetPath()); NUnit.Framework.Assert.AreEqual(1, stat.GetBlockLocations().Length); // test more complicated directory WriteFile(fc, File1, FileLen); WriteFile(fc, File3, FileLen); itor = fc.Util().ListFiles(TestDir, true); stat = itor.Next(); NUnit.Framework.Assert.IsTrue(stat.IsFile()); NUnit.Framework.Assert.AreEqual(fc.MakeQualified(File2), stat.GetPath()); stat = itor.Next(); NUnit.Framework.Assert.IsTrue(stat.IsFile()); NUnit.Framework.Assert.AreEqual(fc.MakeQualified(File3), stat.GetPath()); stat = itor.Next(); NUnit.Framework.Assert.IsTrue(stat.IsFile()); NUnit.Framework.Assert.AreEqual(fc.MakeQualified(File1), stat.GetPath()); NUnit.Framework.Assert.IsFalse(itor.HasNext()); itor = fc.Util().ListFiles(TestDir, false); stat = itor.Next(); NUnit.Framework.Assert.IsTrue(stat.IsFile()); NUnit.Framework.Assert.AreEqual(fc.MakeQualified(File1), stat.GetPath()); NUnit.Framework.Assert.IsFalse(itor.HasNext()); }
public virtual void TestListStatusOnFile() { FileStatus[] stats = fs.ListStatus(file1); NUnit.Framework.Assert.AreEqual(1, stats.Length); FileStatus status = stats[0]; NUnit.Framework.Assert.IsFalse(file1 + " should be a file", status.IsDirectory()); NUnit.Framework.Assert.AreEqual(blockSize, status.GetBlockSize()); NUnit.Framework.Assert.AreEqual(1, status.GetReplication()); NUnit.Framework.Assert.AreEqual(fileSize, status.GetLen()); NUnit.Framework.Assert.AreEqual(file1.MakeQualified(fs.GetUri(), fs.GetWorkingDirectory ()).ToString(), status.GetPath().ToString()); RemoteIterator <FileStatus> itor = fc.ListStatus(file1); status = itor.Next(); NUnit.Framework.Assert.AreEqual(stats[0], status); NUnit.Framework.Assert.IsFalse(file1 + " should be a file", status.IsDirectory()); }
/// <exception cref="System.IO.IOException"/> private FileStatus[] ListStatusInternal(bool located, Path dataPath) { FileStatus[] dirPaths = new FileStatus[0]; if (located) { RemoteIterator <LocatedFileStatus> statIter = fsView.ListLocatedStatus(dataPath); AList <LocatedFileStatus> tmp = new AList <LocatedFileStatus>(10); while (statIter.HasNext()) { tmp.AddItem(statIter.Next()); } dirPaths = Collections.ToArray(tmp, dirPaths); } else { dirPaths = fsView.ListStatus(dataPath); } return(dirPaths); }
public virtual void TestFile() { fc.Mkdir(TestDir, FsPermission.GetDefault(), true); WriteFile(fc, File1, FileLen); RemoteIterator <LocatedFileStatus> itor = fc.Util().ListFiles(File1, true); LocatedFileStatus stat = itor.Next(); NUnit.Framework.Assert.IsFalse(itor.HasNext()); NUnit.Framework.Assert.IsTrue(stat.IsFile()); NUnit.Framework.Assert.AreEqual(FileLen, stat.GetLen()); NUnit.Framework.Assert.AreEqual(fc.MakeQualified(File1), stat.GetPath()); NUnit.Framework.Assert.AreEqual(1, stat.GetBlockLocations().Length); itor = fc.Util().ListFiles(File1, false); stat = itor.Next(); NUnit.Framework.Assert.IsFalse(itor.HasNext()); NUnit.Framework.Assert.IsTrue(stat.IsFile()); NUnit.Framework.Assert.AreEqual(FileLen, stat.GetLen()); NUnit.Framework.Assert.AreEqual(fc.MakeQualified(File1), stat.GetPath()); NUnit.Framework.Assert.AreEqual(1, stat.GetBlockLocations().Length); }
/// <summary>Add files in the input path recursively into the results.</summary> /// <param name="result">The List to store all files.</param> /// <param name="fs">The FileSystem.</param> /// <param name="path">The input path.</param> /// <param name="inputFilter">The input filter that can be used to filter files/dirs. /// </param> /// <exception cref="System.IO.IOException"/> protected internal virtual void AddInputPathRecursively(IList <FileStatus> result, FileSystem fs, Path path, PathFilter inputFilter) { RemoteIterator <LocatedFileStatus> iter = fs.ListLocatedStatus(path); while (iter.HasNext()) { LocatedFileStatus stat = iter.Next(); if (inputFilter.Accept(stat.GetPath())) { if (stat.IsDirectory()) { AddInputPathRecursively(result, fs, stat.GetPath(), inputFilter); } else { result.AddItem(stat); } } } }
public virtual void TestFile() { fs.Mkdirs(TestDir); WriteFile(fs, File1, FileLen); RemoteIterator <LocatedFileStatus> itor = fs.ListFiles(File1, true); LocatedFileStatus stat = itor.Next(); NUnit.Framework.Assert.IsFalse(itor.HasNext()); Assert.True(stat.IsFile()); Assert.Equal(FileLen, stat.GetLen()); Assert.Equal(fs.MakeQualified(File1), stat.GetPath()); Assert.Equal(1, stat.GetBlockLocations().Length); itor = fs.ListFiles(File1, false); stat = itor.Next(); NUnit.Framework.Assert.IsFalse(itor.HasNext()); Assert.True(stat.IsFile()); Assert.Equal(FileLen, stat.GetLen()); Assert.Equal(fs.MakeQualified(File1), stat.GetPath()); Assert.Equal(1, stat.GetBlockLocations().Length); fs.Delete(File1, true); }
/// <exception cref="System.IO.IOException"/> public virtual int Run(Configuration conf, IList <string> args) { CacheDirectiveInfo.Builder builder = new CacheDirectiveInfo.Builder(); string pathFilter = StringUtils.PopOptionWithArgument("-path", args); if (pathFilter != null) { builder.SetPath(new Path(pathFilter)); } string poolFilter = StringUtils.PopOptionWithArgument("-pool", args); if (poolFilter != null) { builder.SetPool(poolFilter); } bool printStats = StringUtils.PopOption("-stats", args); string idFilter = StringUtils.PopOptionWithArgument("-id", args); if (idFilter != null) { builder.SetId(long.Parse(idFilter)); } if (!args.IsEmpty()) { System.Console.Error.WriteLine("Can't understand argument: " + args[0]); return(1); } TableListing.Builder tableBuilder = new TableListing.Builder().AddField("ID", TableListing.Justification .Right).AddField("POOL", TableListing.Justification.Left).AddField("REPL", TableListing.Justification .Right).AddField("EXPIRY", TableListing.Justification.Left).AddField("PATH", TableListing.Justification .Left); if (printStats) { tableBuilder.AddField("BYTES_NEEDED", TableListing.Justification.Right).AddField( "BYTES_CACHED", TableListing.Justification.Right).AddField("FILES_NEEDED", TableListing.Justification .Right).AddField("FILES_CACHED", TableListing.Justification.Right); } TableListing tableListing = tableBuilder.Build(); try { DistributedFileSystem dfs = AdminHelper.GetDFS(conf); RemoteIterator <CacheDirectiveEntry> iter = dfs.ListCacheDirectives(builder.Build( )); int numEntries = 0; while (iter.HasNext()) { CacheDirectiveEntry entry = iter.Next(); CacheDirectiveInfo directive = entry.GetInfo(); CacheDirectiveStats stats = entry.GetStats(); IList <string> row = new List <string>(); row.AddItem(string.Empty + directive.GetId()); row.AddItem(directive.GetPool()); row.AddItem(string.Empty + directive.GetReplication()); string expiry; // This is effectively never, round for nice printing if (directive.GetExpiration().GetMillis() > CacheDirectiveInfo.Expiration.MaxRelativeExpiryMs / 2) { expiry = "never"; } else { expiry = directive.GetExpiration().ToString(); } row.AddItem(expiry); row.AddItem(directive.GetPath().ToUri().GetPath()); if (printStats) { row.AddItem(string.Empty + stats.GetBytesNeeded()); row.AddItem(string.Empty + stats.GetBytesCached()); row.AddItem(string.Empty + stats.GetFilesNeeded()); row.AddItem(string.Empty + stats.GetFilesCached()); } tableListing.AddRow(Sharpen.Collections.ToArray(row, new string[row.Count])); numEntries++; } System.Console.Out.Write(string.Format("Found %d entr%s%n", numEntries, numEntries == 1 ? "y" : "ies")); if (numEntries > 0) { System.Console.Out.Write(tableListing); } } catch (IOException e) { System.Console.Error.WriteLine(AdminHelper.PrettifyException(e)); return(2); } return(0); }
public virtual void TestGetFileStatusOnDir() { // Create the directory Path dir = new Path("/test/mkdirs"); NUnit.Framework.Assert.IsTrue("mkdir failed", fs.Mkdirs(dir)); NUnit.Framework.Assert.IsTrue("mkdir failed", fs.Exists(dir)); // test getFileStatus on an empty directory FileStatus status = fs.GetFileStatus(dir); NUnit.Framework.Assert.IsTrue(dir + " should be a directory", status.IsDirectory( )); NUnit.Framework.Assert.IsTrue(dir + " should be zero size ", status.GetLen() == 0 ); NUnit.Framework.Assert.AreEqual(dir.MakeQualified(fs.GetUri(), fs.GetWorkingDirectory ()).ToString(), status.GetPath().ToString()); // test listStatus on an empty directory FileStatus[] stats = fs.ListStatus(dir); NUnit.Framework.Assert.AreEqual(dir + " should be empty", 0, stats.Length); NUnit.Framework.Assert.AreEqual(dir + " should be zero size ", 0, fs.GetContentSummary (dir).GetLength()); NUnit.Framework.Assert.AreEqual(dir + " should be zero size using hftp", 0, hftpfs .GetContentSummary(dir).GetLength()); RemoteIterator <FileStatus> itor = fc.ListStatus(dir); NUnit.Framework.Assert.IsFalse(dir + " should be empty", itor.HasNext()); itor = fs.ListStatusIterator(dir); NUnit.Framework.Assert.IsFalse(dir + " should be empty", itor.HasNext()); // create another file that is smaller than a block. Path file2 = new Path(dir, "filestatus2.dat"); WriteFile(fs, file2, 1, blockSize / 4, blockSize); CheckFile(fs, file2, 1); // verify file attributes status = fs.GetFileStatus(file2); NUnit.Framework.Assert.AreEqual(blockSize, status.GetBlockSize()); NUnit.Framework.Assert.AreEqual(1, status.GetReplication()); file2 = fs.MakeQualified(file2); NUnit.Framework.Assert.AreEqual(file2.ToString(), status.GetPath().ToString()); // Create another file in the same directory Path file3 = new Path(dir, "filestatus3.dat"); WriteFile(fs, file3, 1, blockSize / 4, blockSize); CheckFile(fs, file3, 1); file3 = fs.MakeQualified(file3); // Verify that the size of the directory increased by the size // of the two files int expected = blockSize / 2; NUnit.Framework.Assert.AreEqual(dir + " size should be " + expected, expected, fs .GetContentSummary(dir).GetLength()); NUnit.Framework.Assert.AreEqual(dir + " size should be " + expected + " using hftp" , expected, hftpfs.GetContentSummary(dir).GetLength()); // Test listStatus on a non-empty directory stats = fs.ListStatus(dir); NUnit.Framework.Assert.AreEqual(dir + " should have two entries", 2, stats.Length ); NUnit.Framework.Assert.AreEqual(file2.ToString(), stats[0].GetPath().ToString()); NUnit.Framework.Assert.AreEqual(file3.ToString(), stats[1].GetPath().ToString()); itor = fc.ListStatus(dir); NUnit.Framework.Assert.AreEqual(file2.ToString(), itor.Next().GetPath().ToString( )); NUnit.Framework.Assert.AreEqual(file3.ToString(), itor.Next().GetPath().ToString( )); NUnit.Framework.Assert.IsFalse("Unexpected addtional file", itor.HasNext()); itor = fs.ListStatusIterator(dir); NUnit.Framework.Assert.AreEqual(file2.ToString(), itor.Next().GetPath().ToString( )); NUnit.Framework.Assert.AreEqual(file3.ToString(), itor.Next().GetPath().ToString( )); NUnit.Framework.Assert.IsFalse("Unexpected addtional file", itor.HasNext()); // Test iterative listing. Now dir has 2 entries, create one more. Path dir3 = fs.MakeQualified(new Path(dir, "dir3")); fs.Mkdirs(dir3); dir3 = fs.MakeQualified(dir3); stats = fs.ListStatus(dir); NUnit.Framework.Assert.AreEqual(dir + " should have three entries", 3, stats.Length ); NUnit.Framework.Assert.AreEqual(dir3.ToString(), stats[0].GetPath().ToString()); NUnit.Framework.Assert.AreEqual(file2.ToString(), stats[1].GetPath().ToString()); NUnit.Framework.Assert.AreEqual(file3.ToString(), stats[2].GetPath().ToString()); itor = fc.ListStatus(dir); NUnit.Framework.Assert.AreEqual(dir3.ToString(), itor.Next().GetPath().ToString() ); NUnit.Framework.Assert.AreEqual(file2.ToString(), itor.Next().GetPath().ToString( )); NUnit.Framework.Assert.AreEqual(file3.ToString(), itor.Next().GetPath().ToString( )); NUnit.Framework.Assert.IsFalse("Unexpected addtional file", itor.HasNext()); itor = fs.ListStatusIterator(dir); NUnit.Framework.Assert.AreEqual(dir3.ToString(), itor.Next().GetPath().ToString() ); NUnit.Framework.Assert.AreEqual(file2.ToString(), itor.Next().GetPath().ToString( )); NUnit.Framework.Assert.AreEqual(file3.ToString(), itor.Next().GetPath().ToString( )); NUnit.Framework.Assert.IsFalse("Unexpected addtional file", itor.HasNext()); // Now dir has 3 entries, create two more Path dir4 = fs.MakeQualified(new Path(dir, "dir4")); fs.Mkdirs(dir4); dir4 = fs.MakeQualified(dir4); Path dir5 = fs.MakeQualified(new Path(dir, "dir5")); fs.Mkdirs(dir5); dir5 = fs.MakeQualified(dir5); stats = fs.ListStatus(dir); NUnit.Framework.Assert.AreEqual(dir + " should have five entries", 5, stats.Length ); NUnit.Framework.Assert.AreEqual(dir3.ToString(), stats[0].GetPath().ToString()); NUnit.Framework.Assert.AreEqual(dir4.ToString(), stats[1].GetPath().ToString()); NUnit.Framework.Assert.AreEqual(dir5.ToString(), stats[2].GetPath().ToString()); NUnit.Framework.Assert.AreEqual(file2.ToString(), stats[3].GetPath().ToString()); NUnit.Framework.Assert.AreEqual(file3.ToString(), stats[4].GetPath().ToString()); itor = fc.ListStatus(dir); NUnit.Framework.Assert.AreEqual(dir3.ToString(), itor.Next().GetPath().ToString() ); NUnit.Framework.Assert.AreEqual(dir4.ToString(), itor.Next().GetPath().ToString() ); NUnit.Framework.Assert.AreEqual(dir5.ToString(), itor.Next().GetPath().ToString() ); NUnit.Framework.Assert.AreEqual(file2.ToString(), itor.Next().GetPath().ToString( )); NUnit.Framework.Assert.AreEqual(file3.ToString(), itor.Next().GetPath().ToString( )); NUnit.Framework.Assert.IsFalse(itor.HasNext()); itor = fs.ListStatusIterator(dir); NUnit.Framework.Assert.AreEqual(dir3.ToString(), itor.Next().GetPath().ToString() ); NUnit.Framework.Assert.AreEqual(dir4.ToString(), itor.Next().GetPath().ToString() ); NUnit.Framework.Assert.AreEqual(dir5.ToString(), itor.Next().GetPath().ToString() ); NUnit.Framework.Assert.AreEqual(file2.ToString(), itor.Next().GetPath().ToString( )); NUnit.Framework.Assert.AreEqual(file3.ToString(), itor.Next().GetPath().ToString( )); NUnit.Framework.Assert.IsFalse(itor.HasNext()); { //test permission error on hftp fs.SetPermission(dir, new FsPermission((short)0)); try { string username = UserGroupInformation.GetCurrentUser().GetShortUserName() + "1"; HftpFileSystem hftp2 = cluster.GetHftpFileSystemAs(username, conf, 0, "somegroup" ); hftp2.GetContentSummary(dir); NUnit.Framework.Assert.Fail(); } catch (IOException ioe) { FileSystem.Log.Info("GOOD: getting an exception", ioe); } } fs.Delete(dir, true); }
/// <summary>test listCorruptFileBlocks in DistributedFileSystem</summary> /// <exception cref="System.Exception"/> public virtual void TestlistCorruptFileBlocksDFS() { Configuration conf = new Configuration(); conf.SetLong(DFSConfigKeys.DfsBlockreportIntervalMsecKey, 1000); conf.SetInt(DFSConfigKeys.DfsDatanodeDirectoryscanIntervalKey, 1); // datanode scans // directories FileSystem fs = null; MiniDFSCluster cluster = null; try { cluster = new MiniDFSCluster.Builder(conf).Build(); cluster.WaitActive(); fs = cluster.GetFileSystem(); DistributedFileSystem dfs = (DistributedFileSystem)fs; DFSTestUtil util = new DFSTestUtil.Builder().SetName("testGetCorruptFiles").SetNumFiles (3).SetMaxLevels(1).SetMaxSize(1024).Build(); util.CreateFiles(fs, "/corruptData"); RemoteIterator <Path> corruptFileBlocks = dfs.ListCorruptFileBlocks(new Path("/corruptData" )); int numCorrupt = CountPaths(corruptFileBlocks); NUnit.Framework.Assert.IsTrue(numCorrupt == 0); // delete the blocks string bpid = cluster.GetNamesystem().GetBlockPoolId(); // For loop through number of datadirectories per datanode (2) for (int i = 0; i < 2; i++) { FilePath storageDir = cluster.GetInstanceStorageDir(0, i); FilePath data_dir = MiniDFSCluster.GetFinalizedDir(storageDir, bpid); IList <FilePath> metadataFiles = MiniDFSCluster.GetAllBlockMetadataFiles(data_dir); if (metadataFiles == null) { continue; } // assertTrue("Blocks do not exist in data-dir", (blocks != null) && // (blocks.length > 0)); foreach (FilePath metadataFile in metadataFiles) { FilePath blockFile = Block.MetaToBlockFile(metadataFile); Log.Info("Deliberately removing file " + blockFile.GetName()); NUnit.Framework.Assert.IsTrue("Cannot remove file.", blockFile.Delete()); Log.Info("Deliberately removing file " + metadataFile.GetName()); NUnit.Framework.Assert.IsTrue("Cannot remove file.", metadataFile.Delete()); } } // break; int count = 0; corruptFileBlocks = dfs.ListCorruptFileBlocks(new Path("/corruptData")); numCorrupt = CountPaths(corruptFileBlocks); while (numCorrupt < 3) { Sharpen.Thread.Sleep(1000); corruptFileBlocks = dfs.ListCorruptFileBlocks(new Path("/corruptData")); numCorrupt = CountPaths(corruptFileBlocks); count++; if (count > 30) { break; } } // Validate we get all the corrupt files Log.Info("Namenode has bad files. " + numCorrupt); NUnit.Framework.Assert.IsTrue(numCorrupt == 3); util.Cleanup(fs, "/corruptData"); util.Cleanup(fs, "/goodData"); } finally { if (cluster != null) { cluster.Shutdown(); } } }
/// <summary> /// Verify a DN remains in DECOMMISSION_INPROGRESS state if it is marked /// as dead before decommission has completed. /// </summary> /// <remarks> /// Verify a DN remains in DECOMMISSION_INPROGRESS state if it is marked /// as dead before decommission has completed. That will allow DN to resume /// the replication process after it rejoins the cluster. /// </remarks> /// <exception cref="System.Exception"/> public virtual void TestDecommissionStatusAfterDNRestart() { DistributedFileSystem fileSys = (DistributedFileSystem)cluster.GetFileSystem(); // Create a file with one block. That block has one replica. Path f = new Path("decommission.dat"); DFSTestUtil.CreateFile(fileSys, f, fileSize, fileSize, fileSize, (short)1, seed); // Find the DN that owns the only replica. RemoteIterator <LocatedFileStatus> fileList = fileSys.ListLocatedStatus(f); BlockLocation[] blockLocations = fileList.Next().GetBlockLocations(); string dnName = blockLocations[0].GetNames()[0]; // Decommission the DN. FSNamesystem fsn = cluster.GetNamesystem(); DatanodeManager dm = fsn.GetBlockManager().GetDatanodeManager(); DecommissionNode(fsn, localFileSys, dnName); dm.RefreshNodes(conf); // Stop the DN when decommission is in progress. // Given DFS_DATANODE_BALANCE_BANDWIDTHPERSEC_KEY is to 1 and the size of // the block, it will take much longer time that test timeout value for // the decommission to complete. So when stopDataNode is called, // decommission should be in progress. MiniDFSCluster.DataNodeProperties dataNodeProperties = cluster.StopDataNode(dnName ); IList <DatanodeDescriptor> dead = new AList <DatanodeDescriptor>(); while (true) { dm.FetchDatanodes(null, dead, false); if (dead.Count == 1) { break; } Sharpen.Thread.Sleep(1000); } // Force removal of the dead node's blocks. BlockManagerTestUtil.CheckHeartbeat(fsn.GetBlockManager()); // Force DatanodeManager to check decommission state. BlockManagerTestUtil.RecheckDecommissionState(dm); // Verify that the DN remains in DECOMMISSION_INPROGRESS state. NUnit.Framework.Assert.IsTrue("the node should be DECOMMISSION_IN_PROGRESSS", dead [0].IsDecommissionInProgress()); // Check DatanodeManager#getDecommissionNodes, make sure it returns // the node as decommissioning, even if it's dead IList <DatanodeDescriptor> decomlist = dm.GetDecommissioningNodes(); NUnit.Framework.Assert.IsTrue("The node should be be decommissioning", decomlist. Count == 1); // Delete the under-replicated file, which should let the // DECOMMISSION_IN_PROGRESS node become DECOMMISSIONED CleanupFile(fileSys, f); BlockManagerTestUtil.RecheckDecommissionState(dm); NUnit.Framework.Assert.IsTrue("the node should be decommissioned", dead[0].IsDecommissioned ()); // Add the node back cluster.RestartDataNode(dataNodeProperties, true); cluster.WaitActive(); // Call refreshNodes on FSNamesystem with empty exclude file. // This will remove the datanodes from decommissioning list and // make them available again. WriteConfigFile(localFileSys, excludeFile, null); dm.RefreshNodes(conf); }
/// <summary>Looks for the dirs to clean.</summary> /// <remarks> /// Looks for the dirs to clean. The folder structure is YYYY/MM/DD/Serial so /// we can use that to more efficiently find the directories to clean by /// comparing the cutoff timestamp with the timestamp from the folder /// structure. /// </remarks> /// <param name="fc">done dir FileContext</param> /// <param name="root">folder for completed jobs</param> /// <param name="cutoff">The cutoff for the max history age</param> /// <returns>The list of directories for cleaning</returns> /// <exception cref="System.IO.IOException"/> public static IList <FileStatus> GetHistoryDirsForCleaning(FileContext fc, Path root , long cutoff) { IList <FileStatus> fsList = new AList <FileStatus>(); Calendar cCal = Calendar.GetInstance(); cCal.SetTimeInMillis(cutoff); int cYear = cCal.Get(Calendar.Year); int cMonth = cCal.Get(Calendar.Month) + 1; int cDate = cCal.Get(Calendar.Date); RemoteIterator <FileStatus> yearDirIt = fc.ListStatus(root); while (yearDirIt.HasNext()) { FileStatus yearDir = yearDirIt.Next(); try { int year = System.Convert.ToInt32(yearDir.GetPath().GetName()); if (year <= cYear) { RemoteIterator <FileStatus> monthDirIt = fc.ListStatus(yearDir.GetPath()); while (monthDirIt.HasNext()) { FileStatus monthDir = monthDirIt.Next(); try { int month = System.Convert.ToInt32(monthDir.GetPath().GetName()); // If we only checked the month here, then something like 07/2013 // would incorrectly not pass when the cutoff is 06/2014 if (year < cYear || month <= cMonth) { RemoteIterator <FileStatus> dateDirIt = fc.ListStatus(monthDir.GetPath()); while (dateDirIt.HasNext()) { FileStatus dateDir = dateDirIt.Next(); try { int date = System.Convert.ToInt32(dateDir.GetPath().GetName()); // If we only checked the date here, then something like // 07/21/2013 would incorrectly not pass when the cutoff is // 08/20/2013 or 07/20/2012 if (year < cYear || month < cMonth || date <= cDate) { Sharpen.Collections.AddAll(fsList, RemoteIterToList(fc.ListStatus(dateDir.GetPath ()))); } } catch (FormatException) { } } } } catch (FormatException) { } } } } catch (FormatException) { } } // the directory didn't fit the format we're looking for so // skip the dir // the directory didn't fit the format we're looking for so skip // the dir // the directory didn't fit the format we're looking for so skip the dir return(fsList); }
public virtual void TestListStatus() { string hPrefix = "test/hadoop"; string[] dirs = new string[] { hPrefix + "/a", hPrefix + "/b", hPrefix + "/c", hPrefix + "/1", hPrefix + "/#@#@", hPrefix + "/&*#$#$@234" }; AList <Path> testDirs = new AList <Path>(); foreach (string d in dirs) { if (!IsTestableFileNameOnPlatform(d)) { continue; } testDirs.AddItem(QualifiedPath(d, fc2)); } NUnit.Framework.Assert.IsFalse(FileContextTestHelper.Exists(fc1, testDirs[0])); foreach (Path path in testDirs) { fc1.Mkdir(path, FsPermission.GetDefault(), true); } // test listStatus that returns an array of FileStatus FileStatus[] paths = fc1.Util().ListStatus(QualifiedPath("test", fc1)); Assert.Equal(1, paths.Length); Assert.Equal(QualifiedPath(hPrefix, fc1), paths[0].GetPath()); paths = fc1.Util().ListStatus(QualifiedPath(hPrefix, fc1)); Assert.Equal(testDirs.Count, paths.Length); for (int i = 0; i < testDirs.Count; i++) { bool found = false; for (int j = 0; j < paths.Length; j++) { if (QualifiedPath(testDirs[i].ToString(), fc1).Equals(paths[j].GetPath())) { found = true; } } Assert.True(testDirs[i] + " not found", found); } paths = fc1.Util().ListStatus(QualifiedPath(dirs[0], fc1)); Assert.Equal(0, paths.Length); // test listStatus that returns an iterator of FileStatus RemoteIterator <FileStatus> pathsItor = fc1.ListStatus(QualifiedPath("test", fc1)); Assert.Equal(QualifiedPath(hPrefix, fc1), pathsItor.Next().GetPath ()); NUnit.Framework.Assert.IsFalse(pathsItor.HasNext()); pathsItor = fc1.ListStatus(QualifiedPath(hPrefix, fc1)); int dirLen = 0; for (; pathsItor.HasNext(); dirLen++) { bool found = false; FileStatus stat = pathsItor.Next(); for (int j = 0; j < dirs.Length; j++) { if (QualifiedPath(dirs[j], fc1).Equals(stat.GetPath())) { found = true; break; } } Assert.True(stat.GetPath() + " not found", found); } Assert.Equal(testDirs.Count, dirLen); pathsItor = fc1.ListStatus(QualifiedPath(dirs[0], fc1)); NUnit.Framework.Assert.IsFalse(pathsItor.HasNext()); }
public virtual void TestOperationsThroughMountLinks() { // Create file fileContextTestHelper.CreateFileNonRecursive(fcView, "/user/foo"); Assert.True("Create file should be file", FileContextTestHelper.IsFile (fcView, new Path("/user/foo"))); Assert.True("Target of created file should be type file", FileContextTestHelper.IsFile (fcTarget, new Path(targetTestRoot, "user/foo"))); // Delete the created file Assert.True("Delete should succeed", fcView.Delete(new Path("/user/foo" ), false)); NUnit.Framework.Assert.IsFalse("File should not exist after delete", FileContextTestHelper.Exists (fcView, new Path("/user/foo"))); NUnit.Framework.Assert.IsFalse("Target File should not exist after delete", FileContextTestHelper.Exists (fcTarget, new Path(targetTestRoot, "user/foo"))); // Create file with a 2 component dirs fileContextTestHelper.CreateFileNonRecursive(fcView, "/internalDir/linkToDir2/foo" ); Assert.True("Created file should be type file", FileContextTestHelper.IsFile (fcView, new Path("/internalDir/linkToDir2/foo"))); Assert.True("Target of created file should be type file", FileContextTestHelper.IsFile (fcTarget, new Path(targetTestRoot, "dir2/foo"))); // Delete the created file Assert.True("Delete should suceed", fcView.Delete(new Path("/internalDir/linkToDir2/foo" ), false)); NUnit.Framework.Assert.IsFalse("File should not exist after deletion", FileContextTestHelper.Exists (fcView, new Path("/internalDir/linkToDir2/foo"))); NUnit.Framework.Assert.IsFalse("Target should not exist after deletion", FileContextTestHelper.Exists (fcTarget, new Path(targetTestRoot, "dir2/foo"))); // Create file with a 3 component dirs fileContextTestHelper.CreateFileNonRecursive(fcView, "/internalDir/internalDir2/linkToDir3/foo" ); Assert.True("Created file should be of type file", FileContextTestHelper.IsFile (fcView, new Path("/internalDir/internalDir2/linkToDir3/foo"))); Assert.True("Target of created file should also be type file", FileContextTestHelper.IsFile(fcTarget, new Path(targetTestRoot, "dir3/foo"))); // Recursive Create file with missing dirs fileContextTestHelper.CreateFile(fcView, "/internalDir/linkToDir2/missingDir/miss2/foo" ); Assert.True("Created file should be of type file", FileContextTestHelper.IsFile (fcView, new Path("/internalDir/linkToDir2/missingDir/miss2/foo"))); Assert.True("Target of created file should also be type file", FileContextTestHelper.IsFile(fcTarget, new Path(targetTestRoot, "dir2/missingDir/miss2/foo" ))); // Delete the created file Assert.True("Delete should succeed", fcView.Delete(new Path("/internalDir/internalDir2/linkToDir3/foo" ), false)); NUnit.Framework.Assert.IsFalse("Deleted File should not exist", FileContextTestHelper.Exists (fcView, new Path("/internalDir/internalDir2/linkToDir3/foo"))); NUnit.Framework.Assert.IsFalse("Target of deleted file should not exist", FileContextTestHelper.Exists (fcTarget, new Path(targetTestRoot, "dir3/foo"))); // mkdir fcView.Mkdir(fileContextTestHelper.GetTestRootPath(fcView, "/user/dirX"), FileContext .DefaultPerm, false); Assert.True("New dir should be type dir", FileContextTestHelper.IsDir (fcView, new Path("/user/dirX"))); Assert.True("Target of new dir should be of type dir", FileContextTestHelper.IsDir (fcTarget, new Path(targetTestRoot, "user/dirX"))); fcView.Mkdir(fileContextTestHelper.GetTestRootPath(fcView, "/user/dirX/dirY"), FileContext .DefaultPerm, false); Assert.True("New dir should be type dir", FileContextTestHelper.IsDir (fcView, new Path("/user/dirX/dirY"))); Assert.True("Target of new dir should be of type dir", FileContextTestHelper.IsDir (fcTarget, new Path(targetTestRoot, "user/dirX/dirY"))); // Delete the created dir Assert.True("Delete should succeed", fcView.Delete(new Path("/user/dirX/dirY" ), false)); NUnit.Framework.Assert.IsFalse("Deleted File should not exist", FileContextTestHelper.Exists (fcView, new Path("/user/dirX/dirY"))); NUnit.Framework.Assert.IsFalse("Deleted Target should not exist", FileContextTestHelper.Exists (fcTarget, new Path(targetTestRoot, "user/dirX/dirY"))); Assert.True("Delete should succeed", fcView.Delete(new Path("/user/dirX" ), false)); NUnit.Framework.Assert.IsFalse("Deleted File should not exist", FileContextTestHelper.Exists (fcView, new Path("/user/dirX"))); NUnit.Framework.Assert.IsFalse("Deleted Target should not exist", FileContextTestHelper.Exists (fcTarget, new Path(targetTestRoot, "user/dirX"))); // Rename a file fileContextTestHelper.CreateFile(fcView, "/user/foo"); fcView.Rename(new Path("/user/foo"), new Path("/user/fooBar")); NUnit.Framework.Assert.IsFalse("Renamed src should not exist", FileContextTestHelper.Exists (fcView, new Path("/user/foo"))); NUnit.Framework.Assert.IsFalse(FileContextTestHelper.Exists(fcTarget, new Path(targetTestRoot , "user/foo"))); Assert.True(FileContextTestHelper.IsFile(fcView, fileContextTestHelper .GetTestRootPath(fcView, "/user/fooBar"))); Assert.True(FileContextTestHelper.IsFile(fcTarget, new Path(targetTestRoot , "user/fooBar"))); fcView.Mkdir(new Path("/user/dirFoo"), FileContext.DefaultPerm, false); fcView.Rename(new Path("/user/dirFoo"), new Path("/user/dirFooBar")); NUnit.Framework.Assert.IsFalse("Renamed src should not exist", FileContextTestHelper.Exists (fcView, new Path("/user/dirFoo"))); NUnit.Framework.Assert.IsFalse("Renamed src should not exist in target", FileContextTestHelper.Exists (fcTarget, new Path(targetTestRoot, "user/dirFoo"))); Assert.True("Renamed dest should exist as dir", FileContextTestHelper.IsDir (fcView, fileContextTestHelper.GetTestRootPath(fcView, "/user/dirFooBar"))); Assert.True("Renamed dest should exist as dir in target", FileContextTestHelper.IsDir (fcTarget, new Path(targetTestRoot, "user/dirFooBar"))); // Make a directory under a directory that's mounted from the root of another FS fcView.Mkdir(new Path("/targetRoot/dirFoo"), FileContext.DefaultPerm, false); Assert.True(FileContextTestHelper.Exists(fcView, new Path("/targetRoot/dirFoo" ))); bool dirFooPresent = false; RemoteIterator <FileStatus> dirContents = fcView.ListStatus(new Path("/targetRoot/" )); while (dirContents.HasNext()) { FileStatus fileStatus = dirContents.Next(); if (fileStatus.GetPath().GetName().Equals("dirFoo")) { dirFooPresent = true; } } Assert.True(dirFooPresent); }
/// <exception cref="System.IO.IOException"/> public virtual int Run(Configuration conf, IList <string> args) { string name = StringUtils.PopFirstNonOption(args); bool printStats = StringUtils.PopOption("-stats", args); if (!args.IsEmpty()) { System.Console.Error.Write("Can't understand arguments: " + Joiner.On(" ").Join(args ) + "\n"); System.Console.Error.WriteLine("Usage is " + GetShortUsage()); return(1); } DistributedFileSystem dfs = AdminHelper.GetDFS(conf); TableListing.Builder builder = new TableListing.Builder().AddField("NAME", TableListing.Justification .Left).AddField("OWNER", TableListing.Justification.Left).AddField("GROUP", TableListing.Justification .Left).AddField("MODE", TableListing.Justification.Left).AddField("LIMIT", TableListing.Justification .Right).AddField("MAXTTL", TableListing.Justification.Right); if (printStats) { builder.AddField("BYTES_NEEDED", TableListing.Justification.Right).AddField("BYTES_CACHED" , TableListing.Justification.Right).AddField("BYTES_OVERLIMIT", TableListing.Justification .Right).AddField("FILES_NEEDED", TableListing.Justification.Right).AddField("FILES_CACHED" , TableListing.Justification.Right); } TableListing listing = builder.Build(); int numResults = 0; try { RemoteIterator <CachePoolEntry> iter = dfs.ListCachePools(); while (iter.HasNext()) { CachePoolEntry entry = iter.Next(); CachePoolInfo info = entry.GetInfo(); List <string> row = new List <string>(); if (name == null || info.GetPoolName().Equals(name)) { row.AddItem(info.GetPoolName()); row.AddItem(info.GetOwnerName()); row.AddItem(info.GetGroupName()); row.AddItem(info.GetMode() != null ? info.GetMode().ToString() : null); long limit = info.GetLimit(); string limitString; if (limit != null && limit.Equals(CachePoolInfo.LimitUnlimited)) { limitString = "unlimited"; } else { limitString = string.Empty + limit; } row.AddItem(limitString); long maxTtl = info.GetMaxRelativeExpiryMs(); string maxTtlString = null; if (maxTtl != null) { if (maxTtl == CachePoolInfo.RelativeExpiryNever) { maxTtlString = "never"; } else { maxTtlString = DFSUtil.DurationToString(maxTtl); } } row.AddItem(maxTtlString); if (printStats) { CachePoolStats stats = entry.GetStats(); row.AddItem(System.Convert.ToString(stats.GetBytesNeeded())); row.AddItem(System.Convert.ToString(stats.GetBytesCached())); row.AddItem(System.Convert.ToString(stats.GetBytesOverlimit())); row.AddItem(System.Convert.ToString(stats.GetFilesNeeded())); row.AddItem(System.Convert.ToString(stats.GetFilesCached())); } listing.AddRow(Sharpen.Collections.ToArray(row, new string[row.Count])); ++numResults; if (name != null) { break; } } } } catch (IOException e) { System.Console.Error.WriteLine(AdminHelper.PrettifyException(e)); return(2); } System.Console.Out.Write(string.Format("Found %d result%s.%n", numResults, (numResults == 1 ? string.Empty : "s"))); if (numResults > 0) { System.Console.Out.Write(listing); } // If list pools succeed, we return 0 (success exit code) return(0); }