public void Run() { IHadoop hadoop = Hadoop.Connect(); hadoop.MapReduceJob.ExecuteJob <HelloWorldJob>(); Console.Read(); }
static void Main(string[] args) { try { RunUsingStreamingUnit(); IHadoop hadoop = Hadoop.Connect(MyConstants.AzureCluster, MyConstants.AzureUserName, MyConstants.HadoopUserName, MyConstants.AzurePassword, MyConstants.AzureStorageAccount, MyConstants.AzureStorageKey, MyConstants.AzureStorageContainer, false); var result = hadoop.MapReduceJob.ExecuteJob <MovieLensJob>(); Console.WriteLine(); Console.WriteLine("Job Run Information"); Console.WriteLine(); Console.WriteLine("Job Id: {0}", result.Id); Console.WriteLine("Exit Code: {0}", result.Info.ExitCode); Console.WriteLine("Standard Out"); Console.WriteLine(result.Info.StandardOut); Console.WriteLine(); Console.WriteLine("Standard Err"); Console.WriteLine(result.Info.StandardError); Console.ReadLine(); } catch (Exception ex) { Console.WriteLine(ex); Console.ReadLine(); } }
static void Main(string[] args) { //establish job configuration var myConfig = new HadoopJobConfiguration { InputPath = "/demo/simple/in", OutputFolder = "/demo/simple/out" }; //connect to cluster var myUri = new Uri("http://localhost"); const string userName = "******"; string passWord = null; IHadoop myCluster = Hadoop.Connect(myUri, userName, passWord); //execute mapreduce job MapReduceResult jobResult = myCluster.MapReduceJob.Execute <MySimpleMapper, MySimpleReducer>(myConfig); //write job result to console int exitCode = jobResult.Info.ExitCode; string exitStatus = exitCode == 0 ? "Success" : "Failure"; exitStatus = exitCode + " (" + exitStatus + ")"; Console.WriteLine(); Console.Write("Exit Code = " + exitStatus); Console.Read(); }
private static IHadoop ConnectToLocalPseudoCluster() { LoadInpuFiles(); IHadoop myCluster; myCluster = Hadoop.Connect(); return(myCluster); }
public void Run() { Uri uri = new Uri(@"https://10.0.0.4"); //IHadoop hdp = Hadoop.Connect(uri,"cloudera","cloudera"); IHadoop hdp = Hadoop.Connect(); hdp.MapReduceJob.ExecuteJob <WCJobConf>(); Console.Read(); }
//Run Custom Map Reduce public static void DoCustomMapReduce() { Console.WriteLine("Starting MapReduce job. Remote login to your Name Node and check progress from JobTracker portal with the returned JobID..."); IHadoop hadoop = Hadoop.Connect(Constants.azureClusterUri, Constants.clusterUser, Constants.hadoopUser, Constants.clusterPassword, Constants.storageAccount, Constants.storageAccountKey, Constants.container, true); //IHadoop hadoop = Hadoop.Connect(); var output = hadoop.MapReduceJob.ExecuteJob <SquareRootJob>(); }
private static IHadoop ConnectToRemoteCluster() { LoadInpuFiles(); IHadoop myCluster; Uri uri = new Uri("http://" + ActiveNameNode); myCluster = Hadoop.Connect(uri); return(myCluster); }
public static void DoCustomMapReduce() { Console.WriteLine("Starting MapReduce job..."); IHadoop hadoop = Hadoop.Connect(Constants.azureClusterUri, Constants.clusterUser, Constants.hadoopUser, Constants.clusterPassword, Constants.storageAccount, Constants.storageAccountKey, Constants.container, true); var output = hadoop.MapReduceJob.ExecuteJob <RootJob>(); }
private static IHadoop ConnectToSecuredRemoteCluster() { LoadInpuFiles(); IHadoop myCluster; Uri uri = new Uri("http://" + ActiveNameNode); Uri uri2 = new Uri("http://" + credentials.ActiveDirectoryIp); myCluster = Hadoop.Connect(uri, uri2, credentials.Username, credentials.Password); return(myCluster); }
static void Main(string[] args) { /* To create these locations on your own drive, open the Hadoop * console and then type in the following commands: * hadoop fs -mkdir /user/OpenData * hadoop fs -mkdir /user/OpenData/Police * hadoop fs -copyFromLocal C:\Temp\Datasets\Police.csv /user/OpenData/Police/ * hadoop fs -mkdir /user/OpenData/Output */ HadoopJobConfiguration config = new HadoopJobConfiguration(); config.InputPath = "/user/OpenData/Police"; config.OutputFolder = "/user/OpenData/Output"; //Replace the URI with your local machine name. //Note that password is ignored for the HDInsight emulator, so that can be whatever you want. Uri clusterURI = new Uri("http://yourmachine"); string username = "******"; string password = null; IHadoop cluster = Hadoop.Connect(clusterURI, username, password); Console.WriteLine("Crime Counter. Select an option to continue:"); Console.WriteLine("1) Raw count by crime"); Console.WriteLine("2) Count by coordinates (4 spots after decimal)"); var input = Console.ReadLine(); MapReduceResult jobResult; switch (input) { case "1": jobResult = cluster.MapReduceJob.Execute <CrimeCount, TotalCrimeCount>(config); break; case "2": //Quick note: if we just wanted to spit out all areas regardless of //number of crimes, we could just use the TotalCrimeCount class //and would not need to generate a new Reduce class. jobResult = cluster.MapReduceJob.Execute <CrimeLocation, TopCrimeLocations>(config); break; default: return; } int exitcode = jobResult.Info.ExitCode; string exitstatus = exitcode == 0 ? "Success" : "Failure"; Console.WriteLine(); Console.WriteLine("Exit Code = " + exitstatus); Console.Read(); }
public static IHadoop connectAzure() { Environment.SetEnvironmentVariable("HADOOP_HOME", @"c:\hadoop"); //Workaround for environment variable issue return(Hadoop.Connect( new Uri("https://TAnalyser.azurehdinsight.net"), "admin", "admin1", "ABC!123abc", "tweetbase.blob.core.windows.net", "QfrZs8Jwyeu1DDoxZ+0aVoUUsmH02ssTAH0BN8JPUq8sZZF5KowBREU9icS506DEFiuZIafLMg2RsUo3tWq2XA==", "tanalyser", true )); }
static void Main(string[] args) { HadoopJobConfiguration conf = new HadoopJobConfiguration() { InputPath = "/demo/in", OutputFolder = "/demo/out" }; Hadoop.Connect(new Uri("http://win8-dev-pc/"), "hadoop", "") .MapReduceJob .Execute <Mapper, Reducer>(conf); }
static void Main(string[] args) { // Run Map/Reduce jobs var hadoop = Hadoop.Connect(); Console.WriteLine("Running KmPerVehicle"); var result = hadoop.MapReduceJob.ExecuteJob <KmPerVehicle.Job>(); // Wait for the user to quit the program Console.WriteLine("Done. Press Enter to quit"); Console.ReadLine(); }
public static void DoCustomMapReduce() { //The credentials entered below are dummy values. Please input valid credentials and submit jobs Environment.SetEnvironmentVariable("HADOOP_HOME", @"C:\Syncfusion\BigData\3.2.0.20\BigDataSDK\\SDK\Hadoop"); Environment.SetEnvironmentVariable("JAVA_HOME", @"C:\Syncfusion\BigData\3.2.0.20\BigDataSDK\\Java\jdk1.7.0_51"); //Pass the cluster name string clusterName = "https://{clustername}.azurehdinsight.net:"; Uri azureCluster = new Uri(clusterName); string clusterUserName = "******"; // default - admin string clusterPassword = "******"; //// This is the name of the account under which Hadoop will execute jobs. //// Normally this is just "Hadoop". string hadoopUserName = "******"; //// Azure Storage Information. string azureStorageAccount = "{storagename}.blob.core.windows.net"; string azureStorageKey = "{storagekey}"; string azureStorageContainer = "{storagecontainer}"; //Console.WriteLine("Starting MapReduce job. Remote login to your Name Node and check progress from JobTracker portal with the returned JobID..."); IHadoop hadoop = Hadoop.Connect(azureCluster, clusterUserName, hadoopUserName, clusterPassword, azureStorageAccount, azureStorageKey, azureStorageContainer, true); // Create or overwrite the "myblob" blob with contents from a local file. var fileStream = File.ReadAllText(@"..//..//data/NASA_Access_Log"); hadoop.StorageSystem.WriteAllText(FindReplace._input1HDFS, fileStream); Console.WriteLine("Input file uploaded.......\n\n"); Console.WriteLine("Find and Replace Operation.\n\nImplementation of Find and Replace operations in native MapReduce through C#"); Console.WriteLine("Execution begins......\n"); //connect to HDInsightcluster MapReduceResult result = hadoop.MapReduceJob.ExecuteJob <FindReplace>(); Console.WriteLine(); Console.WriteLine("Job Run Information"); Console.WriteLine(); Console.WriteLine("Job Id: {0}", result.Id); Console.WriteLine("Exit Code: {0}", result.Info.ExitCode); Console.WriteLine("Standard Out"); Console.WriteLine(result.Info.StandardOut); Console.WriteLine(); Console.WriteLine("Standard Err"); Console.WriteLine(result.Info.StandardError); Console.ReadKey(); }
static void Main(string[] args) { var azureCluster = new Uri("https://aziza.azurehdinsight.net"); // var hadoop = Hadoop.Connect(azureCluster, clusterUserName, hadoopUserName, clusterPassword, azureStorageAccount, azureStorageKey, azureStorageContainer, createContinerIfNotExist); var hadoop = Hadoop.Connect(); Console.WriteLine("Starting: {0} ", DateTime.Now); var result = hadoop.MapReduceJob.ExecuteJob <TwitExtractionJob>(); var info = result.Info; Console.WriteLine("Done: {0} ", DateTime.Now); Console.Read(); }
private static void Main(string[] args) { IHadoop hadoop = Hadoop.Connect(MyConstants.AzureCluster, MyConstants.AzureUserName, MyConstants.HadoopUserName, MyConstants.AzurePassword, MyConstants.AzureStorageAccount, MyConstants.AzureStorageKey, MyConstants.AzureStorageContainer, false); //var result = hadoop.MapReduceJob.ExecuteJob<AggregatePurchaseJob>(); //Console.WriteLine("Aggregate Purchase Job complete: {0}", result.Id); var chainedResult = hadoop.MapReduceJob.ExecuteJob <NewUserPurchasesByContinentJob>(); Console.WriteLine("New User Purchases By Continent job complete: {0}", chainedResult.Id); Console.ReadLine(); }
static void Main(string[] args) { Environment.SetEnvironmentVariable("HADOOP_HOME", @"c:\hadoop"); Environment.SetEnvironmentVariable("Java_HOME", @"c:\hadoop\jvm"); var hadoop = Hadoop.Connect(new Uri("https://simonellistonball.azurehdinsight.net"), "simonellistonball", "bob", "bD9/6+WoP0d:eI%25lT'", "sebhdinsight", "XygTMN16161vt63tTyvQbMvpoNTDUi8HYyP91Mz6Vll4i4e71s2c29QZs7FVte4jUXuAkq8/wD8KL1CTh5kkxA==", "cluster", false); var result = hadoop.MapReduceJob.ExecuteJob <LoveCleanStreetsAverageLinqJob>(); if (result.Info.ExitCode != 0) { Console.WriteLine("Returned with unexpected exit code of " + result.Info.ExitCode); } Console.ReadLine(); }
private static void Main(string[] args) { Environment.SetEnvironmentVariable("HADOOP_HOME", "abc"); Environment.SetEnvironmentVariable("JAVA_HOME", "abc"); //Environment.SetEnvironmentVariable("HADOOP_HOME", @"C:\apps1\dist\hadoop-2.7.1.2.3.3.1-25"); //Environment.SetEnvironmentVariable("JAVA_HOME", @"C:\apps1\dist\java"); var hadoop = Hadoop.MakeAzure( new Uri("https://sergeyrud.azurehdinsight.net"), "sergeyrud", "sergeyrud", "Password1!", "sergeyrud.blob.core.windows.net", "D8P7p1KoEvb/aMtfNmYvVwZ2h3p2JW9GMUao2G7y/hKILCAziAWEQ3uw28kISa5TEs/cO+fI9iE6a3AnP6Hkbw==", "sergeyrud", false); try { hadoop.MapReduceJob.ExecuteJob <ProcessingJob>(); } catch (AggregateException e) when(e.InnerExceptions.SingleOrDefault() is HttpRequestException) { Console.WriteLine("Master, it happened. Again."); } }
// Main Method static void Main(string[] args) { // Load Data in to HDInsight HadoopJobConfiguration practice_jobConfig = new HadoopJobConfiguration() { InputPath = "/user/Jayakaran/Input/NHS/Practices", OutputFolder = "/user/Jayakaran/Output/NHS/Practices", DeleteOutputFolder = true }; HadoopJobConfiguration prescription_jobConfig = new HadoopJobConfiguration() { InputPath = "/user/Jayakaran/Input/NHS/Prescription", OutputFolder = "/user/Jayakaran/Output/NHS/Prescription", DeleteOutputFolder = true }; HadoopJobConfiguration combined_jobConfig = new HadoopJobConfiguration() { InputPath = "/user/Jayakaran/Input/NHS/Combined", OutputFolder = "/user/Jayakaran/Output/NHS/Combined", DeleteOutputFolder = true }; // Call Jobs // Question 1 How many practices are in London? Hadoop.Connect().MapReduceJob.Execute <PracticesCountDataMapper, PracticesCountDataReducer>(practice_jobConfig); // Question 2 What was the average actual cost of all peppermint oil prescriptions? Hadoop.Connect().MapReduceJob.Execute <PrescriptionFilterDataMapper, PrescriptionFilterDataReducer>(prescription_jobConfig); // Question 3 Which 5 post codes have the highest actual spend, and how much did each spend in total? Hadoop.Connect().MapReduceJob.Execute <CombinedDataMapper, CombinedDataReducer>(combined_jobConfig); System.Console.Read(); //using to catch console }
public static IHadoop GetCluster() { return(Hadoop.Connect(new Uri("http://localhost"), "hadoop", null)); }
public string SerializeJobDetails(Hadoop.Client.JobDetails jobDetails) { var result = new PassthroughResponse(); if (jobDetails.ErrorCode.IsNotNullOrEmpty() || jobDetails.HttpStatusCode != HttpStatusCode.Accepted) { result.Error = new PassthroughErrorResponse { StatusCode = jobDetails.HttpStatusCode, ErrorId = jobDetails.ErrorCode }; } var details = new Microsoft.ClusterServices.RDFEProvider.ResourceExtensions.JobSubmission.Models.JobDetails() { ErrorOutputPath = jobDetails.ErrorOutputPath, ExitCode = jobDetails.ExitCode, LogicalOutputPath = jobDetails.LogicalOutputPath, Name = jobDetails.Name, PhysicalOutputPath = new Uri(jobDetails.PhysicalOutputPath), Query = jobDetails.Query, SubmissionTime = jobDetails.SubmissionTime.Ticks.ToString() }; Microsoft.ClusterServices.RDFEProvider.ResourceExtensions.JobSubmission.Models.JobStatusCode statusCode; Assert.IsTrue(Microsoft.ClusterServices.RDFEProvider.ResourceExtensions.JobSubmission.Models.JobStatusCode.TryParse(jobDetails.StatusCode.ToString(), out statusCode)); details.StatusCode = statusCode; result.Data = details; return this.SerializeJobDetails(result); }
static void Main(string[] args) { var hadoop = Hadoop.Connect(); var result = hadoop.MapReduceJob.ExecuteJob <SqrtJob>(); }
static void Main(string[] args) { var hadoop = Hadoop.Connect(); hadoop.MapReduceJob.ExecuteJob <FirstJob>(); }