static void Main(string[] args) { try { RunUsingStreamingUnit(); IHadoop hadoop = Hadoop.Connect(MyConstants.AzureCluster, MyConstants.AzureUserName, MyConstants.HadoopUserName, MyConstants.AzurePassword, MyConstants.AzureStorageAccount, MyConstants.AzureStorageKey, MyConstants.AzureStorageContainer, false); var result = hadoop.MapReduceJob.ExecuteJob <MovieLensJob>(); Console.WriteLine(); Console.WriteLine("Job Run Information"); Console.WriteLine(); Console.WriteLine("Job Id: {0}", result.Id); Console.WriteLine("Exit Code: {0}", result.Info.ExitCode); Console.WriteLine("Standard Out"); Console.WriteLine(result.Info.StandardOut); Console.WriteLine(); Console.WriteLine("Standard Err"); Console.WriteLine(result.Info.StandardError); Console.ReadLine(); } catch (Exception ex) { Console.WriteLine(ex); Console.ReadLine(); } }
public void Run() { IHadoop hadoop = Hadoop.Connect(); hadoop.MapReduceJob.ExecuteJob <HelloWorldJob>(); Console.Read(); }
static void Main(string[] args) { //establish job configuration var myConfig = new HadoopJobConfiguration { InputPath = "/demo/simple/in", OutputFolder = "/demo/simple/out" }; //connect to cluster var myUri = new Uri("http://localhost"); const string userName = "******"; string passWord = null; IHadoop myCluster = Hadoop.Connect(myUri, userName, passWord); //execute mapreduce job MapReduceResult jobResult = myCluster.MapReduceJob.Execute <MySimpleMapper, MySimpleReducer>(myConfig); //write job result to console int exitCode = jobResult.Info.ExitCode; string exitStatus = exitCode == 0 ? "Success" : "Failure"; exitStatus = exitCode + " (" + exitStatus + ")"; Console.WriteLine(); Console.Write("Exit Code = " + exitStatus); Console.Read(); }
//Run Custom Map Reduce public static void DoCustomMapReduce() { Console.WriteLine("Starting MapReduce job. Remote login to your Name Node and check progress from JobTracker portal with the returned JobID..."); IHadoop hadoop = Hadoop.Connect(Constants.azureClusterUri, Constants.clusterUser, Constants.hadoopUser, Constants.clusterPassword, Constants.storageAccount, Constants.storageAccountKey, Constants.container, true); //IHadoop hadoop = Hadoop.Connect(); var output = hadoop.MapReduceJob.ExecuteJob <SquareRootJob>(); }
public void Run() { Uri uri = new Uri(@"https://10.0.0.4"); //IHadoop hdp = Hadoop.Connect(uri,"cloudera","cloudera"); IHadoop hdp = Hadoop.Connect(); hdp.MapReduceJob.ExecuteJob <WCJobConf>(); Console.Read(); }
public static void DoCustomMapReduce() { Console.WriteLine("Starting MapReduce job..."); IHadoop hadoop = Hadoop.Connect(Constants.azureClusterUri, Constants.clusterUser, Constants.hadoopUser, Constants.clusterPassword, Constants.storageAccount, Constants.storageAccountKey, Constants.container, true); var output = hadoop.MapReduceJob.ExecuteJob <RootJob>(); }
static void Main(string[] args) { /* To create these locations on your own drive, open the Hadoop * console and then type in the following commands: * hadoop fs -mkdir /user/OpenData * hadoop fs -mkdir /user/OpenData/Police * hadoop fs -copyFromLocal C:\Temp\Datasets\Police.csv /user/OpenData/Police/ * hadoop fs -mkdir /user/OpenData/Output */ HadoopJobConfiguration config = new HadoopJobConfiguration(); config.InputPath = "/user/OpenData/Police"; config.OutputFolder = "/user/OpenData/Output"; //Replace the URI with your local machine name. //Note that password is ignored for the HDInsight emulator, so that can be whatever you want. Uri clusterURI = new Uri("http://yourmachine"); string username = "******"; string password = null; IHadoop cluster = Hadoop.Connect(clusterURI, username, password); Console.WriteLine("Crime Counter. Select an option to continue:"); Console.WriteLine("1) Raw count by crime"); Console.WriteLine("2) Count by coordinates (4 spots after decimal)"); var input = Console.ReadLine(); MapReduceResult jobResult; switch (input) { case "1": jobResult = cluster.MapReduceJob.Execute <CrimeCount, TotalCrimeCount>(config); break; case "2": //Quick note: if we just wanted to spit out all areas regardless of //number of crimes, we could just use the TotalCrimeCount class //and would not need to generate a new Reduce class. jobResult = cluster.MapReduceJob.Execute <CrimeLocation, TopCrimeLocations>(config); break; default: return; } int exitcode = jobResult.Info.ExitCode; string exitstatus = exitcode == 0 ? "Success" : "Failure"; Console.WriteLine(); Console.WriteLine("Exit Code = " + exitstatus); Console.Read(); }
public static void DoCustomMapReduce() { //The credentials entered below are dummy values. Please input valid credentials and submit jobs Environment.SetEnvironmentVariable("HADOOP_HOME", @"C:\Syncfusion\BigData\3.2.0.20\BigDataSDK\\SDK\Hadoop"); Environment.SetEnvironmentVariable("JAVA_HOME", @"C:\Syncfusion\BigData\3.2.0.20\BigDataSDK\\Java\jdk1.7.0_51"); //Pass the cluster name string clusterName = "https://{clustername}.azurehdinsight.net:"; Uri azureCluster = new Uri(clusterName); string clusterUserName = "******"; // default - admin string clusterPassword = "******"; //// This is the name of the account under which Hadoop will execute jobs. //// Normally this is just "Hadoop". string hadoopUserName = "******"; //// Azure Storage Information. string azureStorageAccount = "{storagename}.blob.core.windows.net"; string azureStorageKey = "{storagekey}"; string azureStorageContainer = "{storagecontainer}"; //Console.WriteLine("Starting MapReduce job. Remote login to your Name Node and check progress from JobTracker portal with the returned JobID..."); IHadoop hadoop = Hadoop.Connect(azureCluster, clusterUserName, hadoopUserName, clusterPassword, azureStorageAccount, azureStorageKey, azureStorageContainer, true); // Create or overwrite the "myblob" blob with contents from a local file. var fileStream = File.ReadAllText(@"..//..//data/NASA_Access_Log"); hadoop.StorageSystem.WriteAllText(FindReplace._input1HDFS, fileStream); Console.WriteLine("Input file uploaded.......\n\n"); Console.WriteLine("Find and Replace Operation.\n\nImplementation of Find and Replace operations in native MapReduce through C#"); Console.WriteLine("Execution begins......\n"); //connect to HDInsightcluster MapReduceResult result = hadoop.MapReduceJob.ExecuteJob <FindReplace>(); Console.WriteLine(); Console.WriteLine("Job Run Information"); Console.WriteLine(); Console.WriteLine("Job Id: {0}", result.Id); Console.WriteLine("Exit Code: {0}", result.Info.ExitCode); Console.WriteLine("Standard Out"); Console.WriteLine(result.Info.StandardOut); Console.WriteLine(); Console.WriteLine("Standard Err"); Console.WriteLine(result.Info.StandardError); Console.ReadKey(); }
private static void DoMapReduce() { try { SetEnvironment(); UserInteraction(); var config = GetHadoopConfiguration(); IHadoop myCluster = null; if (IsRemote) { if (!CreateConfigurationDirectory()) { Console.WriteLine("Failed to create configuration directory"); } else { if (IsSecured) { myCluster = ConnectToSecuredRemoteCluster(); } else { myCluster = ConnectToRemoteCluster(); } } } else { myCluster = ConnectToLocalPseudoCluster(); } if (myCluster != null) { //execute mapreduce job Console.WriteLine("\n\nExecution begins......\n"); //passing the Mapper and Reducer var jobResult = myCluster.MapReduceJob.Execute <SimpleMapper, SimpleReducer>(config); UpdateExecutionStatus(jobResult.Info.ExitCode); } UpdateExecutionStatus(1); } catch (Exception ex) { Console.WriteLine(ex.Message); Console.Read(); } }
public HDInsightRepository() { Environment.SetEnvironmentVariable("HADOOP_HOME", @"c:\hadoop"); Environment.SetEnvironmentVariable("Java_HOME", @"c:\hadoop\jvm"); hadoop = Hadoop.Connect( new Uri("<clusterurl>"), "<username>", "<hadoopuser>", "<pass>", "<storageaccount>", "<storagekey>", "default", true ); }
static void Main(string[] args) { HadoopJobConfiguration myConfig = new HadoopJobConfiguration(); myConfig.InputPath = "/world/in/worldbank"; myConfig.OutputFolder = "/world/out"; Uri myUri = new Uri("http://*****:*****@"c:\hadoop"); Environment.SetEnvironmentVariable("Java_HOME", @"c:\hadoop\jvm"); IHadoop myCluster = Microsoft.Hadoop.MapReduce.Hadoop.Connect(myUri, userName, passWord); MapReduceResult jobResult = myCluster.MapReduceJob.Execute <ExtractValuesForIndicatorsMapper, IndicatorsReducer>(myConfig); HadoopJobConfiguration myConfig2 = new HadoopJobConfiguration(); myConfig2.InputPath = "/world/out"; myConfig2.OutputFolder = "/world/out2"; //MapReduceResult jobResult2 = myCluster.MapReduceJob.Execute<GroupValuesMapper, GroupValuesReducer>(myConfig2); int exitCode = jobResult.Info.ExitCode; string exitStatus = "Failure"; if (exitCode == 0) { exitStatus = "Success"; } exitStatus = exitCode + " (" + exitStatus + ")"; Console.WriteLine(); Console.Write("Exit Code = " + exitStatus); }
private static void Main(string[] args) { IHadoop hadoop = Hadoop.Connect(MyConstants.AzureCluster, MyConstants.AzureUserName, MyConstants.HadoopUserName, MyConstants.AzurePassword, MyConstants.AzureStorageAccount, MyConstants.AzureStorageKey, MyConstants.AzureStorageContainer, false); //var result = hadoop.MapReduceJob.ExecuteJob<AggregatePurchaseJob>(); //Console.WriteLine("Aggregate Purchase Job complete: {0}", result.Id); var chainedResult = hadoop.MapReduceJob.ExecuteJob <NewUserPurchasesByContinentJob>(); Console.WriteLine("New User Purchases By Continent job complete: {0}", chainedResult.Id); Console.ReadLine(); }