コード例 #1
0
        private string GetMapData()
        {
            try
            {
                HadoopJobConfiguration myConfig = new HadoopJobConfiguration();
                myConfig.InputPath    = "/demo/simple/in";
                myConfig.OutputFolder = "/demo/simple/out";

                //connect to cluster
                Uri myUri = new Uri("hdfs://0.0.0.0:19000");

                string userName = null;

                string passWord = null;

                //Microsoft.Hadoop.WebHDFS.WebHDFSClient.MapReduce.IHadoop myCluster = Microsoft.Hadoop.MapReduce.Hadoop.Connect(myUri, userName, passWord);
                Microsoft.Hadoop.WebHDFS.WebHDFSClient client = new Microsoft.Hadoop.WebHDFS.WebHDFSClient(myUri, userName);
                return(client.GetHomeDirectory().Result);
            }
            catch (Exception ex)
            {
                throw ex;
            }

            return(string.Empty);
        }
コード例 #2
0
ファイル: Program.cs プロジェクト: prilutskiy/SPOLKS
        public static void Main(string[] args)
        {
            //establish job configuration
            HadoopJobConfiguration myConfig = new HadoopJobConfiguration();
            myConfig.InputPath = "/demo/simple/in";
            myConfig.OutputFolder = "/demo/simple/out";

            //connect to cluster
            Uri myUri = new Uri("http://WSB-201:10001");
            string userName = "******";
            string passWord = null;
            IHadoop myCluster = Hadoop.Connect(myUri, userName, passWord);
            //execute mapreduce job
            var start = DateTime.Now;
            MapReduceResult jobResult = myCluster.MapReduceJob.Execute<MySimpleMapper, MySimpleReducer>(myConfig);
            var result = DateTime.Now - start;
            Console.WriteLine($"{result.TotalMilliseconds} ms");
            //write job result to console
            int exitCode = jobResult.Info.ExitCode;
            string exitStatus = "Failure";
            if (exitCode == 0) exitStatus = "Success";
            exitStatus = exitCode + " (" +exitStatus + ")";
            Console.WriteLine();
            Console.Write("Exit Code = " +exitStatus);
            Console.Read();
        }
コード例 #3
0
ファイル: HiveController.cs プロジェクト: netspdev/SPSamples
        private string GetMapData()
        {
            try
            {

                HadoopJobConfiguration myConfig = new HadoopJobConfiguration();
                myConfig.InputPath = "/demo/simple/in";
                myConfig.OutputFolder = "/demo/simple/out";

                //connect to cluster
                Uri myUri = new Uri("hdfs://0.0.0.0:19000");

                string userName = null;

                string passWord = null;

                //Microsoft.Hadoop.WebHDFS.WebHDFSClient.MapReduce.IHadoop myCluster = Microsoft.Hadoop.MapReduce.Hadoop.Connect(myUri, userName, passWord);
                Microsoft.Hadoop.WebHDFS.WebHDFSClient client = new Microsoft.Hadoop.WebHDFS.WebHDFSClient(myUri, userName);
                return client.GetHomeDirectory().Result;
            }
            catch (Exception ex)
            {
                throw ex;
            }

            return string.Empty;
        }
コード例 #4
0
        static void Main(string[] args)
        {
            //establish job configuration
            var myConfig = new HadoopJobConfiguration {
                InputPath = "/demo/simple/in", OutputFolder = "/demo/simple/out"
            };

            //connect to cluster
            var          myUri     = new Uri("http://localhost");
            const string userName  = "******";
            string       passWord  = null;
            IHadoop      myCluster = Hadoop.Connect(myUri, userName, passWord);

            //execute mapreduce job
            MapReduceResult jobResult = myCluster.MapReduceJob.Execute <MySimpleMapper, MySimpleReducer>(myConfig);

            //write job result to console
            int exitCode = jobResult.Info.ExitCode;

            string exitStatus = exitCode == 0 ? "Success" : "Failure";

            exitStatus = exitCode + " (" + exitStatus + ")";

            Console.WriteLine();
            Console.Write("Exit Code = " + exitStatus);
            Console.Read();
        }
コード例 #5
0
        public override HadoopJobConfiguration Configure(ExecutorContext context)
        {
            HadoopJobConfiguration config = new HadoopJobConfiguration();

            config.InputPath    = "Deliveries-tsv";
            config.OutputFolder = "output/kmPerVehicle";
            return(config);
        }
コード例 #6
0
            public override HadoopJobConfiguration Configure(ExecutorContext context)
            {
                HadoopJobConfiguration config = new HadoopJobConfiguration();

                config.InputPath    = "Input/sqrt";
                config.OutputFolder = "Output/sqrt";
                return(config);
            }
コード例 #7
0
        public override HadoopJobConfiguration Configure(ExecutorContext context)
        {
            var config = new HadoopJobConfiguration();

            config.InputPath    = "input/rawdata";
            config.OutputFolder = "output/data";
            return(config);
        }
コード例 #8
0
            public override HadoopJobConfiguration Configure(ExecutorContext context)
            {
                HadoopJobConfiguration config = new HadoopJobConfiguration();

                config.InputPath    = "asv://[email protected]/tweet/input/furious7.txt";
                config.OutputFolder = "asv://[email protected]/tweet/output";
                return(config);
            }
コード例 #9
0
        public MapReduceResult Work(HadoopJobConfiguration config, out StatusResult statusCode)
        {
            var myCluster = ClusterFactory.GetCluster();
            var jobResult = myCluster.MapReduceJob.Execute <TMapper, TReducer>(config);

            statusCode = jobResult.Info.ExitCode.ToStatusResult();
            return(jobResult);
        }
コード例 #10
0
        public override HadoopJobConfiguration Configure(ExecutorContext context)
        {
            var config = new HadoopJobConfiguration();

            // config.DeleteOutputFolder = true;
            config.InputPath    = "/input/";
            config.OutputFolder = "/output/";
            return(config);
        }
コード例 #11
0
        public override HadoopJobConfiguration Configure(ExecutorContext context)
        {
            HadoopJobConfiguration config = new HadoopJobConfiguration();

            config.InputPath    = "input/SqrtJob";
            config.OutputFolder = "output/SqrtJob";
            //config
            return(config);
        }
コード例 #12
0
        public override HadoopJobConfiguration Configure(ExecutorContext context)
        {
            var config = new HadoopJobConfiguration
            {
                InputPath    = "wasb:///test2.txt",
                OutputFolder = "wasb:///myresclust"
            };

            return(config);
        }
コード例 #13
0
        public override HadoopJobConfiguration Configure(ExecutorContext context)
        {
            var config = new HadoopJobConfiguration
            {
                InputPath    = Constants.wasbPath + "/example/data/Numbers.txt",
                OutputFolder = Constants.wasbPath + "/example/data/SqaureRootOutput"
            };

            return(config);
        }
コード例 #14
0
ファイル: Program.cs プロジェクト: ds112/hbase-on-windows
            public override HadoopJobConfiguration Configure(ExecutorContext context)
            {
                HadoopJobConfiguration config = new HadoopJobConfiguration();

                config.Verbose      = true;
                config.InputPath    = _input1HDFS;
                config.OutputFolder = s_outputFolderHDFS;
                config.AdditionalGenericArguments.Add("-D \"mapred.map.tasks=3\""); // example of controlling arbitrary hadoop options.
                return(config);
            }
コード例 #15
0
        public override HadoopJobConfiguration Configure(ExecutorContext context)
        {
            var configuration = new HadoopJobConfiguration
            {
                InputPath    = MyConstants.AzureStorageVaultRoot + "/handsonlabs/lab1/output/part-00000",
                OutputFolder = MyConstants.AzureStorageVaultRoot + "/handsonlabs/lab2/output"
            };

            return(configuration);
        }
コード例 #16
0
        public override HadoopJobConfiguration Configure(ExecutorContext context)
        {
            var jobConf = new HadoopJobConfiguration()
            {
                InputPath    = "/in",
                OutputFolder = "/out",
            };

            return(jobConf);
        }
コード例 #17
0
        public override HadoopJobConfiguration Configure(ExecutorContext context)
        {
            var configuration = new HadoopJobConfiguration
            {
                InputPath    = MyConstants.AzureStorageVaultRoot + "/handsonlabs/lab1Results.log",
                OutputFolder = MyConstants.AzureStorageVaultRoot + "/handsonlabs/output/newUserPurchasesByContinent/" + Guid.NewGuid().ToString("N")
            };

            return(configuration);
        }
コード例 #18
0
ファイル: Program.cs プロジェクト: ds112/hbase-on-windows
        private static HadoopJobConfiguration GetHadoopConfiguration()
        {
            HadoopJobConfiguration config = new HadoopJobConfiguration();

            config.InputPath    = "/Data/NASA_Access_Log";
            config.OutputFolder = "/output";

            Console.WriteLine("\n\n\nInput Path :" + config.InputPath);
            Console.WriteLine("\nOutput Folder :" + config.OutputFolder);
            return(config);
        }
コード例 #19
0
ファイル: Program.cs プロジェクト: Alfredovec/HadoopSandbox
            public override HadoopJobConfiguration Configure(ExecutorContext context)
            {
                var config = new HadoopJobConfiguration
                {
                    InputPath          = "wasb://[email protected]/input",
                    OutputFolder       = "wasb://[email protected]/output",
                    DeleteOutputFolder = false
                };

                return(config);
            }
コード例 #20
0
        public override HadoopJobConfiguration Configure(ExecutorContext context)
        {
            var configuration = new HadoopJobConfiguration
            {
                InputPath =
                    MyConstants.AzureStorageVaultRoot + "/handsonlabs/data/ua.base",
                OutputFolder =
                    string.Format("{0}{1}", MyConstants.AzureStorageVaultRoot, "/handsonlabs/lab5/output")
            };

            return(configuration);
        }
コード例 #21
0
ファイル: Program.cs プロジェクト: divyanshmalik/sa2014
        static void Main(string[] args)
        {
            HadoopJobConfiguration conf = new HadoopJobConfiguration()
            {
                InputPath = "/demo/in",
                OutputFolder = "/demo/out"
            };

            Hadoop.Connect(new Uri("http://win8-dev-pc/"), "hadoop", "")
                .MapReduceJob
                .Execute<Mapper, Reducer>(conf);
        }
コード例 #22
0
ファイル: Program.cs プロジェクト: jawaharrajan/RaleighCrime
        static void Main(string[] args)
        {
            /* To create these locations on your own drive, open the Hadoop
             * console and then type in the following commands:
             *      hadoop fs -mkdir /user/OpenData
             *      hadoop fs -mkdir /user/OpenData/Police
             *      hadoop fs -copyFromLocal C:\Temp\Datasets\Police.csv /user/OpenData/Police/
             *      hadoop fs -mkdir /user/OpenData/Output
             */
            HadoopJobConfiguration config = new HadoopJobConfiguration();

            config.InputPath    = "/user/OpenData/Police";
            config.OutputFolder = "/user/OpenData/Output";

            //Replace the URI with your local machine name.
            //Note that password is ignored for the HDInsight emulator, so that can be whatever you want.
            Uri     clusterURI = new Uri("http://yourmachine");
            string  username   = "******";
            string  password   = null;
            IHadoop cluster    = Hadoop.Connect(clusterURI, username, password);

            Console.WriteLine("Crime Counter.  Select an option to continue:");
            Console.WriteLine("1) Raw count by crime");
            Console.WriteLine("2) Count by coordinates (4 spots after decimal)");

            var input = Console.ReadLine();

            MapReduceResult jobResult;

            switch (input)
            {
            case "1":
                jobResult = cluster.MapReduceJob.Execute <CrimeCount, TotalCrimeCount>(config);
                break;

            case "2":
                //Quick note:  if we just wanted to spit out all areas regardless of
                //number of crimes, we could just use the TotalCrimeCount class
                //and would not need to generate a new Reduce class.
                jobResult = cluster.MapReduceJob.Execute <CrimeLocation, TopCrimeLocations>(config);
                break;

            default:
                return;
            }

            int    exitcode   = jobResult.Info.ExitCode;
            string exitstatus = exitcode == 0 ? "Success" : "Failure";

            Console.WriteLine();
            Console.WriteLine("Exit Code = " + exitstatus);
            Console.Read();
        }
コード例 #23
0
ファイル: Program.cs プロジェクト: nitingautam/sa2014
        static void Main(string[] args)
        {
            HadoopJobConfiguration conf = new HadoopJobConfiguration()
            {
                InputPath    = "/demo/in",
                OutputFolder = "/demo/out"
            };

            Hadoop.Connect(new Uri("http://win8-dev-pc/"), "hadoop", "")
            .MapReduceJob
            .Execute <Mapper, Reducer>(conf);
        }
コード例 #24
0
ファイル: Program.cs プロジェクト: jawaharrajan/RaleighCrime
        static void Main(string[] args)
        {
            /* To create these locations on your own drive, open the Hadoop
             console and then type in the following commands:
                hadoop fs -mkdir /user/OpenData
                hadoop fs -mkdir /user/OpenData/Police
                hadoop fs -copyFromLocal C:\Temp\Datasets\Police.csv /user/OpenData/Police/
                hadoop fs -mkdir /user/OpenData/Output
             */
            HadoopJobConfiguration config = new HadoopJobConfiguration();
            config.InputPath = "/user/OpenData/Police";
            config.OutputFolder = "/user/OpenData/Output";

            //Replace the URI with your local machine name.
            //Note that password is ignored for the HDInsight emulator, so that can be whatever you want.
            Uri clusterURI = new Uri("http://yourmachine");
            string username = "******";
            string password = null;
            IHadoop cluster = Hadoop.Connect(clusterURI, username, password);

            Console.WriteLine("Crime Counter.  Select an option to continue:");
            Console.WriteLine("1) Raw count by crime");
            Console.WriteLine("2) Count by coordinates (4 spots after decimal)");

            var input = Console.ReadLine();

            MapReduceResult jobResult;
            switch (input)
            {
                case "1":
                    jobResult = cluster.MapReduceJob.Execute<CrimeCount, TotalCrimeCount>(config);
                    break;
                case "2":
                    //Quick note:  if we just wanted to spit out all areas regardless of
                    //number of crimes, we could just use the TotalCrimeCount class
                    //and would not need to generate a new Reduce class.
                    jobResult = cluster.MapReduceJob.Execute<CrimeLocation, TopCrimeLocations>(config);
                    break;
                default:
                    return;
            }

            int exitcode = jobResult.Info.ExitCode;
            string exitstatus = exitcode == 0 ? "Success" : "Failure";

            Console.WriteLine();
            Console.WriteLine("Exit Code = " + exitstatus);
            Console.Read();
        }
コード例 #25
0
ファイル: Program.cs プロジェクト: abhaymise/Hadoop-Analysis
        static void Main(string[] args)
        {

          
            HadoopJobConfiguration myConfig = new HadoopJobConfiguration();
            myConfig.InputPath = "/world/in/worldbank";
            myConfig.OutputFolder = "/world/out";
           
            
           

            Uri myUri = new Uri("http://*****:*****@"c:\hadoop");
            Environment.SetEnvironmentVariable("Java_HOME", @"c:\hadoop\jvm");

            IHadoop myCluster = Microsoft.Hadoop.MapReduce.Hadoop.Connect(myUri, userName, passWord);
          
            MapReduceResult jobResult = myCluster.MapReduceJob.Execute<ExtractValuesForIndicatorsMapper, IndicatorsReducer>(myConfig);

            HadoopJobConfiguration myConfig2 = new HadoopJobConfiguration();
            myConfig2.InputPath = "/world/out";
            myConfig2.OutputFolder = "/world/out2";
            //MapReduceResult jobResult2 = myCluster.MapReduceJob.Execute<GroupValuesMapper, GroupValuesReducer>(myConfig2);



            int exitCode = jobResult.Info.ExitCode;

           

            string exitStatus = "Failure";

            if (exitCode == 0) exitStatus = "Success";

            exitStatus = exitCode + " (" + exitStatus + ")";

            Console.WriteLine();

            Console.Write("Exit Code = " + exitStatus);

           

        }
コード例 #26
0
ファイル: Program.cs プロジェクト: mreeddev/Hadoop-Analysis
        static void Main(string[] args)
        {
            HadoopJobConfiguration myConfig = new HadoopJobConfiguration();

            myConfig.InputPath    = "/world/in/worldbank";
            myConfig.OutputFolder = "/world/out";



            Uri myUri = new Uri("http://*****:*****@"c:\hadoop");
            Environment.SetEnvironmentVariable("Java_HOME", @"c:\hadoop\jvm");

            IHadoop myCluster = Microsoft.Hadoop.MapReduce.Hadoop.Connect(myUri, userName, passWord);

            MapReduceResult jobResult = myCluster.MapReduceJob.Execute <ExtractValuesForIndicatorsMapper, IndicatorsReducer>(myConfig);

            HadoopJobConfiguration myConfig2 = new HadoopJobConfiguration();

            myConfig2.InputPath    = "/world/out";
            myConfig2.OutputFolder = "/world/out2";
            //MapReduceResult jobResult2 = myCluster.MapReduceJob.Execute<GroupValuesMapper, GroupValuesReducer>(myConfig2);



            int exitCode = jobResult.Info.ExitCode;



            string exitStatus = "Failure";

            if (exitCode == 0)
            {
                exitStatus = "Success";
            }

            exitStatus = exitCode + " (" + exitStatus + ")";

            Console.WriteLine();

            Console.Write("Exit Code = " + exitStatus);
        }
コード例 #27
0
        static void Main(string[] args)
        {
            HadoopJobConfiguration myconfig = new HadoopJobConfiguration();
           
            myconfig.InputPath = "/in";
            myconfig.OutputFolder = "/out";

            //connect to cluster

            Uri myUri = new Uri("http://127.0.0.1:50070");
            Copy(myUri, "/input");

            string userName = "******";

            string passWord = null;

            IHadoop myCluster = Microsoft.Hadoop.MapReduce.Hadoop.Connect(myUri, userName, null);

            //execute mapreduce job

            MapReduceResult jobResult = myCluster.MapReduceJob.Execute<MySimpleMapper, MySimpleReducer>(myconfig);
            int exitCode = jobResult.Info.ExitCode;



            string exitStatus = "Failure";

            if (exitCode == 0) exitStatus = "Success";



            exitStatus = exitCode + " (" + exitStatus + ")";

            Console.WriteLine();

            Console.Write("Exit Code = " + exitStatus);



        }
コード例 #28
0
        // Main Method
        static void Main(string[] args)
        {
           
            // Load Data in to HDInsight 
            HadoopJobConfiguration practice_jobConfig = new HadoopJobConfiguration()
            {
                InputPath = "/user/Jayakaran/Input/NHS/Practices",
                OutputFolder = "/user/Jayakaran/Output/NHS/Practices",
                DeleteOutputFolder = true
            };
            HadoopJobConfiguration prescription_jobConfig = new HadoopJobConfiguration()
            {
                InputPath = "/user/Jayakaran/Input/NHS/Prescription",
                OutputFolder = "/user/Jayakaran/Output/NHS/Prescription",
                DeleteOutputFolder = true
            };

            HadoopJobConfiguration combined_jobConfig = new HadoopJobConfiguration()
            {
                InputPath = "/user/Jayakaran/Input/NHS/Combined",
                OutputFolder = "/user/Jayakaran/Output/NHS/Combined",
                DeleteOutputFolder = true
            };

            // Call Jobs 

            // Question 1 How many practices are in London?
            Hadoop.Connect().MapReduceJob.Execute<PracticesCountDataMapper, PracticesCountDataReducer>(practice_jobConfig);

            // Question 2 What was the average actual cost of all peppermint oil prescriptions?
            Hadoop.Connect().MapReduceJob.Execute<PrescriptionFilterDataMapper, PrescriptionFilterDataReducer>(prescription_jobConfig);

            // Question 3 Which 5 post codes have the highest actual spend, and how much did each spend in total?
            Hadoop.Connect().MapReduceJob.Execute<CombinedDataMapper, CombinedDataReducer>(combined_jobConfig);

            System.Console.Read();  //using to catch console
        }
コード例 #29
0
ファイル: Program.cs プロジェクト: constructor-igor/TechSugar
        static void Main(string[] args)
        {
            //establish job configuration
            var myConfig = new HadoopJobConfiguration {InputPath = "/demo/simple/in", OutputFolder = "/demo/simple/out"};

            //connect to cluster
            var myUri = new Uri("http://localhost");
            const string userName = "******";
            string passWord = null;
            IHadoop myCluster = Hadoop.Connect(myUri, userName, passWord);

            //execute mapreduce job
            MapReduceResult jobResult = myCluster.MapReduceJob.Execute<MySimpleMapper, MySimpleReducer>(myConfig);

            //write job result to console
            int exitCode = jobResult.Info.ExitCode;

            string exitStatus = exitCode == 0 ? "Success" : "Failure";
            exitStatus = exitCode + " (" + exitStatus + ")";

            Console.WriteLine();
            Console.Write("Exit Code = " + exitStatus);
            Console.Read();
        }
コード例 #30
0
        // Main Method
        static void Main(string[] args)
        {
            // Load Data in to HDInsight
            HadoopJobConfiguration practice_jobConfig = new HadoopJobConfiguration()
            {
                InputPath          = "/user/Jayakaran/Input/NHS/Practices",
                OutputFolder       = "/user/Jayakaran/Output/NHS/Practices",
                DeleteOutputFolder = true
            };
            HadoopJobConfiguration prescription_jobConfig = new HadoopJobConfiguration()
            {
                InputPath          = "/user/Jayakaran/Input/NHS/Prescription",
                OutputFolder       = "/user/Jayakaran/Output/NHS/Prescription",
                DeleteOutputFolder = true
            };

            HadoopJobConfiguration combined_jobConfig = new HadoopJobConfiguration()
            {
                InputPath          = "/user/Jayakaran/Input/NHS/Combined",
                OutputFolder       = "/user/Jayakaran/Output/NHS/Combined",
                DeleteOutputFolder = true
            };

            // Call Jobs

            // Question 1 How many practices are in London?
            Hadoop.Connect().MapReduceJob.Execute <PracticesCountDataMapper, PracticesCountDataReducer>(practice_jobConfig);

            // Question 2 What was the average actual cost of all peppermint oil prescriptions?
            Hadoop.Connect().MapReduceJob.Execute <PrescriptionFilterDataMapper, PrescriptionFilterDataReducer>(prescription_jobConfig);

            // Question 3 Which 5 post codes have the highest actual spend, and how much did each spend in total?
            Hadoop.Connect().MapReduceJob.Execute <CombinedDataMapper, CombinedDataReducer>(combined_jobConfig);

            System.Console.Read();  //using to catch console
        }
コード例 #31
0
 private static HadoopJobConfiguration Required(this HadoopJobConfiguration config)
 {
     config.FilesToInclude.Add("Microsoft.WindowsAzure.Management.Framework.Threading.dll");
     return(config);
 }