コード例 #1
0
        private void Execute(string[] args, string namenode)
        {
            FsShell    shell = new FsShell();
            FileSystem fs    = null;

            try
            {
                ToolRunner.Run(shell, args);
                fs = FileSystem.Get(NameNode.GetUri(NameNode.GetAddress(namenode)), shell.GetConf
                                        ());
                NUnit.Framework.Assert.IsTrue("Directory does not get created", fs.IsDirectory(new
                                                                                               Path("/data")));
                fs.Delete(new Path("/data"), true);
            }
            catch (Exception e)
            {
                System.Console.Error.WriteLine(e.Message);
                Sharpen.Runtime.PrintStackTrace(e);
            }
            finally
            {
                if (fs != null)
                {
                    try
                    {
                        fs.Close();
                    }
                    catch (IOException)
                    {
                    }
                }
            }
        }
コード例 #2
0
ファイル: InputSampler.cs プロジェクト: orf53975/hadoop.net
        /// <exception cref="System.Exception"/>
        public static void Main(string[] args)
        {
            InputSampler <object, object> sampler = new InputSampler(new Configuration());
            int res = ToolRunner.Run(sampler, args);

            System.Environment.Exit(res);
        }
コード例 #3
0
        /// <exception cref="System.Exception"/>
        public static void Main(string[] args)
        {
            // -files option is also used by GenericOptionsParser
            // Make sure that is not the first argument for fsck
            int res = -1;

            if ((args.Length == 0) || ("-files".Equals(args[0])))
            {
                PrintUsage(System.Console.Error);
                ToolRunner.PrintGenericCommandUsage(System.Console.Error);
            }
            else
            {
                if (DFSUtil.ParseHelpArgument(args, Usage, System.Console.Out, true))
                {
                    res = 0;
                }
                else
                {
                    res = ToolRunner.Run(new Org.Apache.Hadoop.Hdfs.Tools.DFSck(new HdfsConfiguration
                                                                                    ()), args);
                }
            }
            System.Environment.Exit(res);
        }
コード例 #4
0
ファイル: RMAdminCLI.cs プロジェクト: orf53975/hadoop.net
        /// <exception cref="System.Exception"/>
        public static void Main(string[] args)
        {
            int result = ToolRunner.Run(new Org.Apache.Hadoop.Yarn.Client.Cli.RMAdminCLI(), args
                                        );

            System.Environment.Exit(result);
        }
コード例 #5
0
ファイル: SCMAdmin.cs プロジェクト: orf53975/hadoop.net
        private static void PrintHelp(string cmd)
        {
            string summary = "scmadmin is the command to execute shared cache manager" + "administrative commands.\n"
                             + "The full syntax is: \n\n" + "hadoop scmadmin" + " [-runCleanerTask]" + " [-help [cmd]]\n";
            string runCleanerTask = "-runCleanerTask: Run cleaner task right away.\n";
            string help           = "-help [cmd]: \tDisplays help for the given command or all commands if none\n"
                                    + "\t\tis specified.\n";

            if ("runCleanerTask".Equals(cmd))
            {
                System.Console.Out.WriteLine(runCleanerTask);
            }
            else
            {
                if ("help".Equals(cmd))
                {
                    System.Console.Out.WriteLine(help);
                }
                else
                {
                    System.Console.Out.WriteLine(summary);
                    System.Console.Out.WriteLine(runCleanerTask);
                    System.Console.Out.WriteLine(help);
                    System.Console.Out.WriteLine();
                    ToolRunner.PrintGenericCommandUsage(System.Console.Out);
                }
            }
        }
コード例 #6
0
ファイル: TestTeraSort.cs プロジェクト: orf53975/hadoop.net
 /// <exception cref="System.Exception"/>
 private void RunTeraValidator(Configuration job, Path sortOutput, Path valOutput)
 {
     string[] svArgs = new string[] { sortOutput.ToString(), valOutput.ToString() };
     // Run Tera-Validator
     NUnit.Framework.Assert.AreEqual(ToolRunner.Run(job, new TeraValidate(), svArgs),
                                     0);
 }
コード例 #7
0
ファイル: TestMover.cs プロジェクト: orf53975/hadoop.net
        public virtual void TestMoverFailedRetry()
        {
            // HDFS-8147
            Configuration conf = new HdfsConfiguration();

            conf.Set(DFSConfigKeys.DfsMoverRetryMaxAttemptsKey, "2");
            MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).NumDataNodes(3).StorageTypes
                                         (new StorageType[][] { new StorageType[] { StorageType.Disk, StorageType.Archive }, new StorageType[] { StorageType.Disk, StorageType.Archive }, new StorageType
                                                                [] { StorageType.Disk, StorageType.Archive } }).Build();

            try
            {
                cluster.WaitActive();
                DistributedFileSystem dfs = cluster.GetFileSystem();
                string file = "/testMoverFailedRetry";
                // write to DISK
                FSDataOutputStream @out = dfs.Create(new Path(file), (short)2);
                @out.WriteChars("testMoverFailedRetry");
                @out.Close();
                // Delete block file so, block move will fail with FileNotFoundException
                LocatedBlock lb = dfs.GetClient().GetLocatedBlocks(file, 0).Get(0);
                cluster.CorruptBlockOnDataNodesByDeletingBlockFile(lb.GetBlock());
                // move to ARCHIVE
                dfs.SetStoragePolicy(new Path(file), "COLD");
                int rc = ToolRunner.Run(conf, new Mover.Cli(), new string[] { "-p", file.ToString
                                                                                  () });
                NUnit.Framework.Assert.AreEqual("Movement should fail after some retry", ExitStatus
                                                .IoException.GetExitCode(), rc);
            }
            finally
            {
                cluster.Shutdown();
            }
        }
コード例 #8
0
        public virtual void TestTokenCacheFile()
        {
            Configuration conf = mrCluster.GetConfig();

            CreateBinaryTokenFile(conf);
            // provide namenodes names for the job to get the delegation tokens for
            string nnUri = dfsCluster.GetURI(0).ToString();

            conf.Set(MRJobConfig.JobNamenodes, nnUri + "," + nnUri);
            // using argument to pass the file name
            string[] args = new string[] { "-tokenCacheFile", binaryTokenFileName.ToString(),
                                           "-m", "1", "-r", "1", "-mt", "1", "-rt", "1" };
            int res = -1;

            try
            {
                res = ToolRunner.Run(conf, new SleepJob(), args);
            }
            catch (Exception e)
            {
                System.Console.Out.WriteLine("Job failed with " + e.GetLocalizedMessage());
                Sharpen.Runtime.PrintStackTrace(e, System.Console.Out);
                NUnit.Framework.Assert.Fail("Job failed");
            }
            NUnit.Framework.Assert.AreEqual("dist job res is not 0:", 0, res);
        }
コード例 #9
0
 internal static int PrintUsage()
 {
     System.Console.Out.WriteLine("randomtextwriter " + "[-outFormat <output format class>] "
                                  + "<output>");
     ToolRunner.PrintGenericCommandUsage(System.Console.Out);
     return(2);
 }
コード例 #10
0
ファイル: Grep.cs プロジェクト: orf53975/hadoop.net
        /// <exception cref="System.Exception"/>
        public static void Main(string[] args)
        {
            int res = ToolRunner.Run(new Configuration(), new Org.Apache.Hadoop.Examples.Grep
                                         (), args);

            System.Environment.Exit(res);
        }
コード例 #11
0
        /// <summary>main() runs the offline edits viewer using ToolRunner</summary>
        /// <param name="argv">Command line parameters.</param>
        /// <exception cref="System.Exception"/>
        public static void Main(string[] argv)
        {
            int res = ToolRunner.Run(new Org.Apache.Hadoop.Hdfs.Tools.OfflineEditsViewer.OfflineEditsViewer
                                         (), argv);

            System.Environment.Exit(res);
        }
コード例 #12
0
ファイル: KeyShell.cs プロジェクト: orf53975/hadoop.net
 public override bool Validate()
 {
     this.provider = this.GetKeyProvider();
     if (this.provider == null)
     {
         [email protected]("There are no valid KeyProviders configured. Nothing\n"
                                        + "was deleted. Use the -provider option to specify a provider.");
         return(false);
     }
     if (this.keyName == null)
     {
         [email protected]("There is no keyName specified. Please specify a "
                                        + "<keyname>. See the usage description with -help.");
         return(false);
     }
     if (this._enclosing.interactive)
     {
         try
         {
             this.cont = ToolRunner.ConfirmPrompt("You are about to DELETE all versions of " +
                                                  " key " + this.keyName + " from KeyProvider " + this.provider + ". Continue? ");
             if (!this.cont)
             {
                 [email protected](this.keyName + " has not been deleted.");
             }
             return(this.cont);
         }
         catch (IOException e)
         {
             [email protected](this.keyName + " will not be deleted.");
             Runtime.PrintStackTrace(e, this._enclosing.err);
         }
     }
     return(true);
 }
コード例 #13
0
ファイル: LoadGenerator.cs プロジェクト: orf53975/hadoop.net
        /// <summary>Main program</summary>
        /// <param name="args">command line arguments</param>
        /// <exception cref="System.Exception"/>
        public static void Main(string[] args)
        {
            int res = ToolRunner.Run(new Configuration(), new Org.Apache.Hadoop.FS.LoadGenerator.LoadGenerator
                                         (), args);

            System.Environment.Exit(res);
        }
コード例 #14
0
        /// <exception cref="System.Exception"/>
        public static void Main(string[] argv)
        {
            int res = ToolRunner.Run(new Org.Apache.Hadoop.Yarn.Client.GetGroupsForTesting(new
                                                                                           YarnConfiguration()), argv);

            System.Environment.Exit(res);
        }
コード例 #15
0
        public virtual void Test()
        {
            // make sure JT starts
            Configuration jobConf = new JobConf(mrCluster.GetConfig());
            // provide namenodes names for the job to get the delegation tokens for
            //String nnUri = dfsCluster.getNameNode().getUri(namenode).toString();
            NameNode nn    = dfsCluster.GetNameNode();
            URI      nnUri = NameNode.GetUri(nn.GetNameNodeAddress());

            jobConf.Set(JobContext.JobNamenodes, nnUri + "," + nnUri.ToString());
            jobConf.Set("mapreduce.job.credentials.json", "keys.json");
            // using argument to pass the file name
            string[] args = new string[] { "-m", "1", "-r", "1", "-mt", "1", "-rt", "1" };
            int      res  = -1;

            try
            {
                res = ToolRunner.Run(jobConf, new CredentialsTestJob(), args);
            }
            catch (Exception e)
            {
                System.Console.Out.WriteLine("Job failed with" + e.GetLocalizedMessage());
                Sharpen.Runtime.PrintStackTrace(e, System.Console.Out);
                NUnit.Framework.Assert.Fail("Job failed");
            }
            NUnit.Framework.Assert.AreEqual("dist job res is not 0", res, 0);
        }
コード例 #16
0
ファイル: TestTeraSort.cs プロジェクト: orf53975/hadoop.net
 /// <exception cref="System.Exception"/>
 private void RunTeraSort(Configuration conf, Path sortInput, Path sortOutput)
 {
     // Setup command-line arguments to 'sort'
     string[] sortArgs = new string[] { sortInput.ToString(), sortOutput.ToString() };
     // Run Sort
     NUnit.Framework.Assert.AreEqual(ToolRunner.Run(conf, new TeraSort(), sortArgs), 0
                                     );
 }
コード例 #17
0
ファイル: HSAdmin.cs プロジェクト: orf53975/hadoop.net
        /// <exception cref="System.Exception"/>
        public static void Main(string[] args)
        {
            JobConf conf   = new JobConf();
            int     result = ToolRunner.Run(new Org.Apache.Hadoop.Mapreduce.V2.HS.Client.HSAdmin(
                                                conf), args);

            System.Environment.Exit(result);
        }
コード例 #18
0
ファイル: Sort.cs プロジェクト: orf53975/hadoop.net
 internal static int PrintUsage()
 {
     System.Console.Out.WriteLine("sort [-r <reduces>] " + "[-inFormat <input format class>] "
                                  + "[-outFormat <output format class>] " + "[-outKey <output key class>] " + "[-outValue <output value class>] "
                                  + "[-totalOrder <pcnt> <num samples> <max splits>] " + "<input> <output>");
     ToolRunner.PrintGenericCommandUsage(System.Console.Out);
     return(2);
 }
コード例 #19
0
        public void VersionRegexDoesNotErrorOnNullOrNonMatch(string pattern)
        {
            var regex         = pattern is null ? null : new Regex(pattern);
            var processResult = new ProcessResult(true, 0, "", error: "\nabcdef_v123defg\n", null);
            var version       = ToolRunner.GetVersion(regex, processResult);

            Assert.Equal(None, version);
        }
コード例 #20
0
ファイル: Join.cs プロジェクト: orf53975/hadoop.net
 internal static int PrintUsage()
 {
     System.Console.Out.WriteLine("join [-r <reduces>] " + "[-inFormat <input format class>] "
                                  + "[-outFormat <output format class>] " + "[-outKey <output key class>] " + "[-outValue <output value class>] "
                                  + "[-joinOp <inner|outer|override>] " + "[input]* <input> <output>");
     ToolRunner.PrintGenericCommandUsage(System.Console.Out);
     return(2);
 }
コード例 #21
0
        /// <exception cref="System.Exception"/>
        public virtual void TestBenchmarkWithWritable()
        {
            int rc = ToolRunner.Run(new RPCCallBenchmark(), new string[] { "--clientThreads",
                                                                           "30", "--serverThreads", "30", "--time", "5", "--serverReaderThreads", "4", "--messageSize"
                                                                           , "1024", "--engine", "writable" });

            Assert.Equal(0, rc);
        }
コード例 #22
0
ファイル: Util.cs プロジェクト: orf53975/hadoop.net
 /// <summary>Print usage messages</summary>
 public static int PrintUsage(string[] args, string usage)
 {
     err.WriteLine("args = " + Arrays.AsList(args));
     err.WriteLine();
     err.WriteLine("Usage: java " + usage);
     err.WriteLine();
     ToolRunner.PrintGenericCommandUsage(err);
     return(-1);
 }
コード例 #23
0
ファイル: HAAdmin.cs プロジェクト: orf53975/hadoop.net
 /// <exception cref="System.IO.IOException"/>
 private bool ConfirmForceManual()
 {
     return(ToolRunner.ConfirmPrompt("You have specified the --" + Forcemanual + " flag. This flag is "
                                     + "dangerous, as it can induce a split-brain scenario that WILL " + "CORRUPT your HDFS namespace, possibly irrecoverably.\n"
                                     + "\n" + "It is recommended not to use this flag, but instead to shut down the "
                                     + "cluster and disable automatic failover if you prefer to manually " + "manage your HA state.\n"
                                     + "\n" + "You may abort safely by answering 'n' or hitting ^C now.\n" + "\n" +
                                     "Are you sure you want to continue?"));
 }
コード例 #24
0
ファイル: GetConf.cs プロジェクト: orf53975/hadoop.net
        /// <exception cref="System.Exception"/>
        public static void Main(string[] args)
        {
            if (DFSUtil.ParseHelpArgument(args, Usage, System.Console.Out, true))
            {
                System.Environment.Exit(0);
            }
            int res = ToolRunner.Run(new GetConf(new HdfsConfiguration()), args);

            System.Environment.Exit(res);
        }
コード例 #25
0
        protected static void RegisterDXWinSdk(string dxWinSdkDirectoryPath)
        {
            var batchFileRunner = new ToolRunner();

            foreach (var registrationBatchFilePath in FileSystemHelper.FindBatchFiles(dxWinSdkDirectoryPath))
            {
                batchFileRunner.SetContext(registrationBatchFilePath, string.Empty, (int)TimeSpan.FromMinutes(5).TotalSeconds);
                ExecuteExternalTool(batchFileRunner, null, "[DX WIN SDK REGISTRATION]");
            }
        }
コード例 #26
0
ファイル: SliveTest.cs プロジェクト: orf53975/hadoop.net
        /// <summary>The main program entry point.</summary>
        /// <remarks>
        /// The main program entry point. Sets up and parses the command line options,
        /// then merges those options and then dumps those options and the runs the
        /// corresponding map/reduce job that those operations represent and then
        /// writes the report for the output of the run that occurred.
        /// </remarks>
        /// <param name="args">command line options</param>
        /// <exception cref="System.Exception"/>
        public static void Main(string[] args)
        {
            Configuration startCfg = new Configuration(true);

            Org.Apache.Hadoop.FS.Slive.SliveTest runner = new Org.Apache.Hadoop.FS.Slive.SliveTest
                                                              (startCfg);
            int ec = ToolRunner.Run(runner, args);

            System.Environment.Exit(ec);
        }
コード例 #27
0
        public virtual void TestLsAclsUnsupported()
        {
            Configuration conf = new Configuration();

            conf.Set(CommonConfigurationKeys.FsDefaultNameKey, "stubfs:///");
            conf.SetClass("fs.stubfs.impl", typeof(TestAclCommands.StubFileSystem), typeof(FileSystem
                                                                                           ));
            Assert.Equal("ls must succeed even if FileSystem does not implement ACLs."
                         , 0, ToolRunner.Run(conf, new FsShell(), new string[] { "-ls", "/" }));
        }
コード例 #28
0
        /// <summary>This is the main routine for launching a distributed random write job.</summary>
        /// <remarks>
        /// This is the main routine for launching a distributed random write job.
        /// It runs 10 maps/node and each node writes 1 gig of data to a DFS file.
        /// The reduce doesn't do anything.
        /// </remarks>
        /// <exception cref="System.IO.IOException"></exception>
        /// <exception cref="System.Exception"/>
        public virtual int Run(string[] args)
        {
            if (args.Length == 0)
            {
                System.Console.Out.WriteLine("Usage: writer <out-dir>");
                ToolRunner.PrintGenericCommandUsage(System.Console.Out);
                return(2);
            }
            Path          outDir                = new Path(args[0]);
            Configuration conf                  = GetConf();
            JobClient     client                = new JobClient(conf);
            ClusterStatus cluster               = client.GetClusterStatus();
            int           numMapsPerHost        = conf.GetInt(MapsPerHost, 10);
            long          numBytesToWritePerMap = conf.GetLong(BytesPerMap, 1 * 1024 * 1024 * 1024);

            if (numBytesToWritePerMap == 0)
            {
                System.Console.Error.WriteLine("Cannot have" + BytesPerMap + " set to 0");
                return(-2);
            }
            long totalBytesToWrite = conf.GetLong(TotalBytes, numMapsPerHost * numBytesToWritePerMap
                                                  * cluster.GetTaskTrackers());
            int numMaps = (int)(totalBytesToWrite / numBytesToWritePerMap);

            if (numMaps == 0 && totalBytesToWrite > 0)
            {
                numMaps = 1;
                conf.SetLong(BytesPerMap, totalBytesToWrite);
            }
            conf.SetInt(MRJobConfig.NumMaps, numMaps);
            Job job = Job.GetInstance(conf);

            job.SetJarByClass(typeof(RandomWriter));
            job.SetJobName("random-writer");
            FileOutputFormat.SetOutputPath(job, outDir);
            job.SetOutputKeyClass(typeof(BytesWritable));
            job.SetOutputValueClass(typeof(BytesWritable));
            job.SetInputFormatClass(typeof(RandomWriter.RandomInputFormat));
            job.SetMapperClass(typeof(RandomWriter.RandomMapper));
            job.SetReducerClass(typeof(Reducer));
            job.SetOutputFormatClass(typeof(SequenceFileOutputFormat));
            System.Console.Out.WriteLine("Running " + numMaps + " maps.");
            // reducer NONE
            job.SetNumReduceTasks(0);
            DateTime startTime = new DateTime();

            System.Console.Out.WriteLine("Job started: " + startTime);
            int      ret     = job.WaitForCompletion(true) ? 0 : 1;
            DateTime endTime = new DateTime();

            System.Console.Out.WriteLine("Job ended: " + endTime);
            System.Console.Out.WriteLine("The job took " + (endTime.GetTime() - startTime.GetTime
                                                                ()) / 1000 + " seconds.");
            return(ret);
        }
コード例 #29
0
        /// <exception cref="System.Exception"/>
        public static void Main(string[] args)
        {
            QueueCLI cli = new QueueCLI();

            cli.SetSysOutPrintStream(System.Console.Out);
            cli.SetSysErrPrintStream(System.Console.Error);
            int res = ToolRunner.Run(cli, args);

            cli.Stop();
            System.Environment.Exit(res);
        }
コード例 #30
0
ファイル: GetGroups.cs プロジェクト: orf53975/hadoop.net
        /// <exception cref="System.Exception"/>
        public static void Main(string[] argv)
        {
            if (DFSUtil.ParseHelpArgument(argv, Usage, System.Console.Out, true))
            {
                System.Environment.Exit(0);
            }
            int res = ToolRunner.Run(new Org.Apache.Hadoop.Hdfs.Tools.GetGroups(new HdfsConfiguration
                                                                                    ()), argv);

            System.Environment.Exit(res);
        }