コード例 #1
0
        private void Execute(string[] args, string namenode)
        {
            FsShell    shell = new FsShell();
            FileSystem fs    = null;

            try
            {
                ToolRunner.Run(shell, args);
                fs = FileSystem.Get(NameNode.GetUri(NameNode.GetAddress(namenode)), shell.GetConf
                                        ());
                NUnit.Framework.Assert.IsTrue("Directory does not get created", fs.IsDirectory(new
                                                                                               Path("/data")));
                fs.Delete(new Path("/data"), true);
            }
            catch (Exception e)
            {
                System.Console.Error.WriteLine(e.Message);
                Sharpen.Runtime.PrintStackTrace(e);
            }
            finally
            {
                if (fs != null)
                {
                    try
                    {
                        fs.Close();
                    }
                    catch (IOException)
                    {
                    }
                }
            }
        }
コード例 #2
0
ファイル: InputSampler.cs プロジェクト: orf53975/hadoop.net
        /// <exception cref="System.Exception"/>
        public static void Main(string[] args)
        {
            InputSampler <object, object> sampler = new InputSampler(new Configuration());
            int res = ToolRunner.Run(sampler, args);

            System.Environment.Exit(res);
        }
コード例 #3
0
        /// <exception cref="System.Exception"/>
        public static void Main(string[] args)
        {
            // -files option is also used by GenericOptionsParser
            // Make sure that is not the first argument for fsck
            int res = -1;

            if ((args.Length == 0) || ("-files".Equals(args[0])))
            {
                PrintUsage(System.Console.Error);
                ToolRunner.PrintGenericCommandUsage(System.Console.Error);
            }
            else
            {
                if (DFSUtil.ParseHelpArgument(args, Usage, System.Console.Out, true))
                {
                    res = 0;
                }
                else
                {
                    res = ToolRunner.Run(new Org.Apache.Hadoop.Hdfs.Tools.DFSck(new HdfsConfiguration
                                                                                    ()), args);
                }
            }
            System.Environment.Exit(res);
        }
コード例 #4
0
ファイル: RMAdminCLI.cs プロジェクト: orf53975/hadoop.net
        /// <exception cref="System.Exception"/>
        public static void Main(string[] args)
        {
            int result = ToolRunner.Run(new Org.Apache.Hadoop.Yarn.Client.Cli.RMAdminCLI(), args
                                        );

            System.Environment.Exit(result);
        }
コード例 #5
0
        public virtual void Test()
        {
            // make sure JT starts
            Configuration jobConf = new JobConf(mrCluster.GetConfig());
            // provide namenodes names for the job to get the delegation tokens for
            //String nnUri = dfsCluster.getNameNode().getUri(namenode).toString();
            NameNode nn    = dfsCluster.GetNameNode();
            URI      nnUri = NameNode.GetUri(nn.GetNameNodeAddress());

            jobConf.Set(JobContext.JobNamenodes, nnUri + "," + nnUri.ToString());
            jobConf.Set("mapreduce.job.credentials.json", "keys.json");
            // using argument to pass the file name
            string[] args = new string[] { "-m", "1", "-r", "1", "-mt", "1", "-rt", "1" };
            int      res  = -1;

            try
            {
                res = ToolRunner.Run(jobConf, new CredentialsTestJob(), args);
            }
            catch (Exception e)
            {
                System.Console.Out.WriteLine("Job failed with" + e.GetLocalizedMessage());
                Sharpen.Runtime.PrintStackTrace(e, System.Console.Out);
                NUnit.Framework.Assert.Fail("Job failed");
            }
            NUnit.Framework.Assert.AreEqual("dist job res is not 0", res, 0);
        }
コード例 #6
0
        /// <exception cref="System.Exception"/>
        public static void Main(string[] argv)
        {
            int res = ToolRunner.Run(new Org.Apache.Hadoop.Yarn.Client.GetGroupsForTesting(new
                                                                                           YarnConfiguration()), argv);

            System.Environment.Exit(res);
        }
コード例 #7
0
ファイル: Join.cs プロジェクト: orf53975/hadoop.net
        /// <exception cref="System.Exception"/>
        public static void Main(string[] args)
        {
            int res = ToolRunner.Run(new Configuration(), new Org.Apache.Hadoop.Examples.Join
                                         (), args);

            System.Environment.Exit(res);
        }
コード例 #8
0
        public virtual void TestTokenCacheFile()
        {
            Configuration conf = mrCluster.GetConfig();

            CreateBinaryTokenFile(conf);
            // provide namenodes names for the job to get the delegation tokens for
            string nnUri = dfsCluster.GetURI(0).ToString();

            conf.Set(MRJobConfig.JobNamenodes, nnUri + "," + nnUri);
            // using argument to pass the file name
            string[] args = new string[] { "-tokenCacheFile", binaryTokenFileName.ToString(),
                                           "-m", "1", "-r", "1", "-mt", "1", "-rt", "1" };
            int res = -1;

            try
            {
                res = ToolRunner.Run(conf, new SleepJob(), args);
            }
            catch (Exception e)
            {
                System.Console.Out.WriteLine("Job failed with " + e.GetLocalizedMessage());
                Sharpen.Runtime.PrintStackTrace(e, System.Console.Out);
                NUnit.Framework.Assert.Fail("Job failed");
            }
            NUnit.Framework.Assert.AreEqual("dist job res is not 0:", 0, res);
        }
コード例 #9
0
ファイル: TestMover.cs プロジェクト: orf53975/hadoop.net
        public virtual void TestMoverFailedRetry()
        {
            // HDFS-8147
            Configuration conf = new HdfsConfiguration();

            conf.Set(DFSConfigKeys.DfsMoverRetryMaxAttemptsKey, "2");
            MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).NumDataNodes(3).StorageTypes
                                         (new StorageType[][] { new StorageType[] { StorageType.Disk, StorageType.Archive }, new StorageType[] { StorageType.Disk, StorageType.Archive }, new StorageType
                                                                [] { StorageType.Disk, StorageType.Archive } }).Build();

            try
            {
                cluster.WaitActive();
                DistributedFileSystem dfs = cluster.GetFileSystem();
                string file = "/testMoverFailedRetry";
                // write to DISK
                FSDataOutputStream @out = dfs.Create(new Path(file), (short)2);
                @out.WriteChars("testMoverFailedRetry");
                @out.Close();
                // Delete block file so, block move will fail with FileNotFoundException
                LocatedBlock lb = dfs.GetClient().GetLocatedBlocks(file, 0).Get(0);
                cluster.CorruptBlockOnDataNodesByDeletingBlockFile(lb.GetBlock());
                // move to ARCHIVE
                dfs.SetStoragePolicy(new Path(file), "COLD");
                int rc = ToolRunner.Run(conf, new Mover.Cli(), new string[] { "-p", file.ToString
                                                                                  () });
                NUnit.Framework.Assert.AreEqual("Movement should fail after some retry", ExitStatus
                                                .IoException.GetExitCode(), rc);
            }
            finally
            {
                cluster.Shutdown();
            }
        }
コード例 #10
0
ファイル: TestTeraSort.cs プロジェクト: orf53975/hadoop.net
 /// <exception cref="System.Exception"/>
 private void RunTeraValidator(Configuration job, Path sortOutput, Path valOutput)
 {
     string[] svArgs = new string[] { sortOutput.ToString(), valOutput.ToString() };
     // Run Tera-Validator
     NUnit.Framework.Assert.AreEqual(ToolRunner.Run(job, new TeraValidate(), svArgs),
                                     0);
 }
コード例 #11
0
        /// <summary>Main program</summary>
        /// <param name="args">command line arguments</param>
        /// <exception cref="System.Exception"/>
        public static void Main(string[] args)
        {
            int res = ToolRunner.Run(new Configuration(), new Org.Apache.Hadoop.FS.LoadGenerator.LoadGeneratorMR
                                         (), args);

            System.Environment.Exit(res);
        }
コード例 #12
0
        /// <summary>main() runs the offline edits viewer using ToolRunner</summary>
        /// <param name="argv">Command line parameters.</param>
        /// <exception cref="System.Exception"/>
        public static void Main(string[] argv)
        {
            int res = ToolRunner.Run(new Org.Apache.Hadoop.Hdfs.Tools.OfflineEditsViewer.OfflineEditsViewer
                                         (), argv);

            System.Environment.Exit(res);
        }
コード例 #13
0
ファイル: TestTools.cs プロジェクト: orf53975/hadoop.net
        public virtual void TestDFSAdminInvalidUsageHelp()
        {
            ImmutableSet <string> args = ImmutableSet.Of("-report", "-saveNamespace", "-rollEdits"
                                                         , "-restoreFailedStorage", "-refreshNodes", "-finalizeUpgrade", "-metasave", "-refreshUserToGroupsMappings"
                                                         , "-printTopology", "-refreshNamenodes", "-deleteBlockPool", "-setBalancerBandwidth"
                                                         , "-fetchImage");

            try
            {
                foreach (string arg in args)
                {
                    NUnit.Framework.Assert.IsTrue(ToolRunner.Run(new DFSAdmin(), FillArgs(arg)) == -1
                                                  );
                }
                NUnit.Framework.Assert.IsTrue(ToolRunner.Run(new DFSAdmin(), new string[] { "-help"
                                                                                            , "-some" }) == 0);
            }
            catch (Exception e)
            {
                NUnit.Framework.Assert.Fail("testDFSAdminHelp error" + e);
            }
            string pattern = "Usage: hdfs dfsadmin";

            CheckOutput(new string[] { "-cancel", "-renew" }, pattern, System.Console.Error,
                        typeof(DFSAdmin));
        }
コード例 #14
0
        /// <param name="args">arguments</param>
        /// <exception cref="System.Exception"/>
        public static void Main(string[] args)
        {
            int res = ToolRunner.Run(new HdfsConfiguration(), new BenchmarkThroughput(), args
                                     );

            System.Environment.Exit(res);
        }
コード例 #15
0
ファイル: TestTeraSort.cs プロジェクト: orf53975/hadoop.net
 /// <exception cref="System.Exception"/>
 private void RunTeraSort(Configuration conf, Path sortInput, Path sortOutput)
 {
     // Setup command-line arguments to 'sort'
     string[] sortArgs = new string[] { sortInput.ToString(), sortOutput.ToString() };
     // Run Sort
     NUnit.Framework.Assert.AreEqual(ToolRunner.Run(conf, new TeraSort(), sortArgs), 0
                                     );
 }
コード例 #16
0
        /// <exception cref="System.Exception"/>
        public virtual void TestBenchmarkWithWritable()
        {
            int rc = ToolRunner.Run(new RPCCallBenchmark(), new string[] { "--clientThreads",
                                                                           "30", "--serverThreads", "30", "--time", "5", "--serverReaderThreads", "4", "--messageSize"
                                                                           , "1024", "--engine", "writable" });

            Assert.Equal(0, rc);
        }
コード例 #17
0
ファイル: HSAdmin.cs プロジェクト: orf53975/hadoop.net
        /// <exception cref="System.Exception"/>
        public static void Main(string[] args)
        {
            JobConf conf   = new JobConf();
            int     result = ToolRunner.Run(new Org.Apache.Hadoop.Mapreduce.V2.HS.Client.HSAdmin(
                                                conf), args);

            System.Environment.Exit(result);
        }
コード例 #18
0
        public virtual void TestLsAclsUnsupported()
        {
            Configuration conf = new Configuration();

            conf.Set(CommonConfigurationKeys.FsDefaultNameKey, "stubfs:///");
            conf.SetClass("fs.stubfs.impl", typeof(TestAclCommands.StubFileSystem), typeof(FileSystem
                                                                                           ));
            Assert.Equal("ls must succeed even if FileSystem does not implement ACLs."
                         , 0, ToolRunner.Run(conf, new FsShell(), new string[] { "-ls", "/" }));
        }
コード例 #19
0
ファイル: SliveTest.cs プロジェクト: orf53975/hadoop.net
        /// <summary>The main program entry point.</summary>
        /// <remarks>
        /// The main program entry point. Sets up and parses the command line options,
        /// then merges those options and then dumps those options and the runs the
        /// corresponding map/reduce job that those operations represent and then
        /// writes the report for the output of the run that occurred.
        /// </remarks>
        /// <param name="args">command line options</param>
        /// <exception cref="System.Exception"/>
        public static void Main(string[] args)
        {
            Configuration startCfg = new Configuration(true);

            Org.Apache.Hadoop.FS.Slive.SliveTest runner = new Org.Apache.Hadoop.FS.Slive.SliveTest
                                                              (startCfg);
            int ec = ToolRunner.Run(runner, args);

            System.Environment.Exit(ec);
        }
コード例 #20
0
ファイル: GetConf.cs プロジェクト: orf53975/hadoop.net
        /// <exception cref="System.Exception"/>
        public static void Main(string[] args)
        {
            if (DFSUtil.ParseHelpArgument(args, Usage, System.Console.Out, true))
            {
                System.Environment.Exit(0);
            }
            int res = ToolRunner.Run(new GetConf(new HdfsConfiguration()), args);

            System.Environment.Exit(res);
        }
コード例 #21
0
        /// <exception cref="System.Exception"/>
        public static void Main(string[] args)
        {
            QueueCLI cli = new QueueCLI();

            cli.SetSysOutPrintStream(System.Console.Out);
            cli.SetSysErrPrintStream(System.Console.Error);
            int res = ToolRunner.Run(cli, args);

            cli.Stop();
            System.Environment.Exit(res);
        }
コード例 #22
0
ファイル: GetGroups.cs プロジェクト: orf53975/hadoop.net
        /// <exception cref="System.Exception"/>
        public static void Main(string[] argv)
        {
            if (DFSUtil.ParseHelpArgument(argv, Usage, System.Console.Out, true))
            {
                System.Environment.Exit(0);
            }
            int res = ToolRunner.Run(new Org.Apache.Hadoop.Hdfs.Tools.GetGroups(new HdfsConfiguration
                                                                                    ()), argv);

            System.Environment.Exit(res);
        }
コード例 #23
0
        public virtual void TestLsNoRpcForGetAclStatus()
        {
            Configuration conf = new Configuration();

            conf.Set(CommonConfigurationKeys.FsDefaultNameKey, "stubfs:///");
            conf.SetClass("fs.stubfs.impl", typeof(TestAclCommands.StubFileSystem), typeof(FileSystem
                                                                                           ));
            conf.SetBoolean("stubfs.noRpcForGetAclStatus", true);
            Assert.Equal("ls must succeed even if getAclStatus RPC does not exist."
                         , 0, ToolRunner.Run(conf, new FsShell(), new string[] { "-ls", "/" }));
        }
コード例 #24
0
ファイル: TestTools.cs プロジェクト: orf53975/hadoop.net
 private void ExpectDfsAdminPrint(string[] args)
 {
     try
     {
         ToolRunner.Run(new DFSAdmin(), args);
     }
     catch (Exception ex)
     {
         NUnit.Framework.Assert.Fail("expectDelegationTokenFetcherExit ex error " + ex);
     }
 }
コード例 #25
0
        /// <summary>
        /// Adding as part of jira HDFS-5343
        /// Test for checking the cat command on snapshot path it
        /// cannot read a file beyond snapshot file length
        /// </summary>
        /// <exception cref="System.Exception"/>
        public virtual void TestSnapshotFileLengthWithCatCommand()
        {
            FSDataInputStream fis        = null;
            FileStatus        fileStatus = null;
            int bytesRead;

            byte[] buffer = new byte[Blocksize * 8];
            hdfs.Mkdirs(sub);
            Path file1 = new Path(sub, file1Name);

            DFSTestUtil.CreateFile(hdfs, file1, Blocksize, Replication, Seed);
            hdfs.AllowSnapshot(sub);
            hdfs.CreateSnapshot(sub, snapshot1);
            DFSTestUtil.AppendFile(hdfs, file1, Blocksize);
            // Make sure we can read the entire file via its non-snapshot path.
            fileStatus = hdfs.GetFileStatus(file1);
            NUnit.Framework.Assert.AreEqual("Unexpected file length", Blocksize * 2, fileStatus
                                            .GetLen());
            fis       = hdfs.Open(file1);
            bytesRead = fis.Read(buffer, 0, buffer.Length);
            NUnit.Framework.Assert.AreEqual("Unexpected # bytes read", Blocksize * 2, bytesRead
                                            );
            fis.Close();
            Path file1snap1 = SnapshotTestHelper.GetSnapshotPath(sub, snapshot1, file1Name);

            fis        = hdfs.Open(file1snap1);
            fileStatus = hdfs.GetFileStatus(file1snap1);
            NUnit.Framework.Assert.AreEqual(fileStatus.GetLen(), Blocksize);
            // Make sure we can only read up to the snapshot length.
            bytesRead = fis.Read(buffer, 0, buffer.Length);
            NUnit.Framework.Assert.AreEqual("Unexpected # bytes read", Blocksize, bytesRead);
            fis.Close();
            TextWriter            outBackup = System.Console.Out;
            TextWriter            errBackup = System.Console.Error;
            ByteArrayOutputStream bao       = new ByteArrayOutputStream();

            Runtime.SetOut(new TextWriter(bao));
            Runtime.SetErr(new TextWriter(bao));
            // Make sure we can cat the file upto to snapshot length
            FsShell shell = new FsShell();

            try
            {
                ToolRunner.Run(conf, shell, new string[] { "-cat", "/TestSnapshotFileLength/sub1/.snapshot/snapshot1/file1" });
                NUnit.Framework.Assert.AreEqual("Unexpected # bytes from -cat", Blocksize, bao.Size
                                                    ());
            }
            finally
            {
                Runtime.SetOut(outBackup);
                Runtime.SetErr(errBackup);
            }
        }
コード例 #26
0
        public virtual void TestMRFlow()
        {
            ConfigExtractor extractor = GetTestConfig(false);
            SliveTest       s         = new SliveTest(GetBaseConfig());
            int             ec        = ToolRunner.Run(s, GetTestArgs(false));

            NUnit.Framework.Assert.IsTrue(ec == 0);
            string   resFile = extractor.GetResultFile();
            FilePath fn      = new FilePath(resFile);

            NUnit.Framework.Assert.IsTrue(fn.Exists());
        }
コード例 #27
0
ファイル: TestHAFsck.cs プロジェクト: orf53975/hadoop.net
        /// <exception cref="System.Exception"/>
        internal static void RunFsck(Configuration conf)
        {
            ByteArrayOutputStream bStream = new ByteArrayOutputStream();
            TextWriter            @out    = new TextWriter(bStream, true);
            int errCode = ToolRunner.Run(new DFSck(conf, @out), new string[] { "/", "-files" }
                                         );
            string result = bStream.ToString();

            System.Console.Out.WriteLine("output from fsck:\n" + result);
            NUnit.Framework.Assert.AreEqual(0, errCode);
            NUnit.Framework.Assert.IsTrue(result.Contains("/test1"));
            NUnit.Framework.Assert.IsTrue(result.Contains("/test2"));
        }
コード例 #28
0
        /// <exception cref="System.Exception"/>
        internal static string RunFsck(Configuration conf, int expectedErrCode, bool checkErrorCode
                                       , params string[] path)
        {
            ByteArrayOutputStream bStream = new ByteArrayOutputStream();
            TextWriter            @out    = new TextWriter(bStream, true);
            int errCode = ToolRunner.Run(new DFSck(conf, @out), path);

            if (checkErrorCode)
            {
                NUnit.Framework.Assert.AreEqual(expectedErrCode, errCode);
            }
            return(bStream.ToString());
        }
コード例 #29
0
        public virtual void TestGetTheMedian()
        {
            string[] args = new string[2];
            args[0] = Input;
            args[1] = MedianOutput;
            WordMedian wm = new WordMedian();

            ToolRunner.Run(new Configuration(), wm, args);
            double median = wm.GetMedian();

            // outputs MUST match
            TestWordStats.WordMedianReader wr = new TestWordStats.WordMedianReader();
            NUnit.Framework.Assert.AreEqual(median, wr.Read(Input), 0.0);
        }
コード例 #30
0
        public virtual void TestGetTheStandardDeviation()
        {
            string[] args = new string[2];
            args[0] = Input;
            args[1] = StddevOutput;
            WordStandardDeviation wsd = new WordStandardDeviation();

            ToolRunner.Run(new Configuration(), wsd, args);
            double stddev = wsd.GetStandardDeviation();

            // outputs MUST match
            TestWordStats.WordStdDevReader wr = new TestWordStats.WordStdDevReader();
            NUnit.Framework.Assert.AreEqual(stddev, wr.Read(Input), 0.0);
        }