public static void SubmitJobToCluster(HDInsightJobManagementClient hdiJobManagementClient, HiveJobSubmissionParameters job)
        {
            System.Console.WriteLine("Submitting the Hive job to the cluster...");
            var jobResponse = hdiJobManagementClient.JobManagement.SubmitHiveJob(job);
            var jobId       = jobResponse.JobSubmissionJsonResponse.Id;

            System.Console.WriteLine("Response status code is " + jobResponse.StatusCode);
            System.Console.WriteLine("JobId is " + jobId);

            System.Console.WriteLine("Waiting for the job completion ...");

            // Wait for job completion
            var jobDetail = hdiJobManagementClient.JobManagement.GetJob(jobId).JobDetail;

            while (!jobDetail.Status.JobComplete)
            {
                Thread.Sleep(1000);
                jobDetail = hdiJobManagementClient.JobManagement.GetJob(jobId).JobDetail;
            }

            // Get job output
            var storageAccess = new AzureStorageAccess(storageAccount, storageKey,
                                                       storageContainer);
            var output = (jobDetail.ExitValue == 0)
                ? hdiJobManagementClient.JobManagement.GetJobOutput(jobId, storageAccess)     // fetch stdout output in case of success
                : hdiJobManagementClient.JobManagement.GetJobErrorLogs(jobId, storageAccess); // fetch stderr output in case of failure

            System.Console.WriteLine("Job output is: ");

            using (var reader = new StreamReader(output, Encoding.UTF8))
            {
                string value = reader.ReadToEnd();
                System.Console.WriteLine(value);
            }
        }
        private static void SubmitMRJob()
        {
            //var paras = new MapReduceStreamingJobSubmissionParameters
            //{
            //    Files = new List<string>() { "/example/app/.exe", "/example/apps/wc.exe" },
            //    Mapper = "cat.exe",
            //    Reducer = "wc.exe",
            //    Input= "/example/data/gutenberg/davinci.txt",
            //    Output = "/example/data/StreamingOutput/wc.txt"
            //};

            var paras = new MapReduceStreamingJobSubmissionParameters
            {
                Files = new List <string>()
                {
                    "/example/coreapp",
                },
                Mapper  = "dotnet coreapp/NetCoreMapper.dll",
                Reducer = "dotnet coreapp/NetCoreReducer.dll",
                Input   = "/example/data/gutenberg/davinci.txt",
                Output  = "/example/data/StreamingOutput/wc.txt"
            };

            Console.WriteLine("Submitting the MR job to the cluster...");
            var jobResponse = _hdiJobManagementClient.JobManagement.SubmitMapReduceStreamingJob(paras);
            var jobId       = jobResponse.JobSubmissionJsonResponse.Id;

            Console.WriteLine("Response status code is " + jobResponse.StatusCode);
            Console.WriteLine("JobId is " + jobId);

            Console.WriteLine("Waiting for the job completion ...");

            // Wait for job completion
            var jobDetail = _hdiJobManagementClient.JobManagement.GetJob(jobId).JobDetail;

            while (!jobDetail.Status.JobComplete)
            {
                Thread.Sleep(1000);
                jobDetail = _hdiJobManagementClient.JobManagement.GetJob(jobId).JobDetail;
            }

            // Get job output
            var storageAccess = new AzureStorageAccess(DefaultStorageAccountName, DefaultStorageAccountKey,
                                                       DefaultStorageContainerName);
            var output = (jobDetail.ExitValue == 0)
                ? _hdiJobManagementClient.JobManagement.GetJobOutput(jobId, storageAccess)     // fetch stdout output in case of success
                : _hdiJobManagementClient.JobManagement.GetJobErrorLogs(jobId, storageAccess); // fetch stderr output in case of failure

            Console.WriteLine("Job output is: ");

            using (var reader = new StreamReader(output, Encoding.UTF8))
            {
                string value = reader.ReadToEnd();
                Console.WriteLine(value);
            }
        }
Exemple #3
0
        private string AddNewPhotoHiveJob(Photo photo)
        {
            Dictionary <string, string> defines = new Dictionary <string, string> {
                { "hive.execution.engine", "tez" }, { "hive.exec.reducers.max", "1" }
            };
            List <string> args = new List <string> {
                { "--hiveconf" },
                { $"Id={photo.Id}" },
                { "--hiveconf" },
                { $"Title={photo.Title}" },
                { "--hiveconf" },
                { $"Url={photo.Url}" }
            };
            var parameters = new HiveJobSubmissionParameters
            {
                Query     = "INSERT INTO TABLE Photo VALUES(${hiveconf:Id}, '${hiveconf:Title}' , '${hiveconf:Url}');", //"INSERT INTO TABLE Photo VALUES('${hiveconf:Id}', '${hiveconf:Title}' , '${hiveconf:Url}');",
                Defines   = defines,
                Arguments = args
            };
            var jobResponse = _jobClient.JobManagement.SubmitHiveJob(parameters);
            var jobId       = jobResponse.JobSubmissionJsonResponse.Id;
            // Wait for job completion
            var jobDetail = _jobClient.JobManagement.GetJob(jobId).JobDetail;

            while (!jobDetail.Status.JobComplete)
            {
                Thread.Sleep(1000);
                jobDetail = _jobClient.JobManagement.GetJob(jobId).JobDetail;
            }

            // Get job output
            var storageAccess = new AzureStorageAccess(DefaultStorageAccountName, DefaultStorageAccountKey,
                                                       DefaultStorageContainerName);
            IList <Photo> listPhoto = new List <Photo>();
            var           result    = "";
            Stream        output;

            if (jobDetail.ExitValue == 0)
            {
                result = "success";
            }
            else
            {
                output = _jobClient.JobManagement.GetJobErrorLogs(jobId, storageAccess);
                using (var reader = new StreamReader(output, Encoding.UTF8))
                {
                    result = reader.ReadToEnd();
                }
            }

            return(result);
        }
Exemple #4
0
        public RecalculationJob(StorageConfiguration storageConfiguration, HDInsightConfiguration configuration)
        {
            _storageConfiguration = storageConfiguration;
            _configuration        = configuration;

            _storageAccess = new AzureStorageAccess(
                _configuration.DefaultStorageAccountName,
                _configuration.DefaultStorageAccountKey,
                _configuration.DefaultStorageContainerName);

            var clusterCredentials = new BasicAuthenticationCloudCredentials {
                Username = _configuration.ExistingClusterUsername, Password = _configuration.ExistingClusterPassword
            };

            _hdiJobManagementClient = new HDInsightJobManagementClient(_configuration.ExistingClusterUri, clusterCredentials);
        }
        public void SubmitMapReduceStreamingJobWithFilesParam()
        {
            using (var context = UndoContext.Current)
            {
                context.Start();

                var username = TestUtils.WinUserName;
                var password = TestUtils.WinPassword;
                var clustername = TestUtils.WinClusterName;

                var credentials = new BasicAuthenticationCloudCredentials
                {
                    Username = username,
                    Password = password
                };

                var client = TestUtils.GetHDInsightJobManagementClient(clustername, credentials);

                var parameters = new MapReduceStreamingJobSubmissionParameters
                {
                    Mapper = "cat.exe",
                    Reducer = "wc.exe",
                    Input = "/example/data/gutenberg/davinci.txt",
                    Output = "/example/data/gutenberg/wcount",
                    Files = new List<string> { "/example/apps/wc.exe", "/example/apps/cat.exe" }
                };

                var response = client.JobManagement.SubmitMapReduceStreamingJob(parameters);
                Assert.NotNull(response);
                Assert.Equal(response.StatusCode, HttpStatusCode.OK);

                var jobId = response.JobSubmissionJsonResponse.Id;
                Assert.Contains("job_", jobId, StringComparison.InvariantCulture);

                var jobStatus = GetJobFinalStatus(client, jobId);

                var storageAccess = new AzureStorageAccess(TestUtils.WinStorageAccountName, TestUtils.WinStorageAccountKey, TestUtils.WinDefaultContainer);

                if (jobStatus.JobDetail.ExitValue == 0)
                {
                    if (HttpMockServer.Mode == HttpRecorderMode.Record)
                    {
                        // Retrieve Job Output
                        var output = client.JobManagement.GetJobOutput(jobId, storageAccess);
                        string textOutput = Convert(output);
                        Assert.True(textOutput.Length > 0);
                    }
                }
                else
                {
                    if (HttpMockServer.Mode == HttpRecorderMode.Record)
                    {
                        var output = client.JobManagement.GetJobErrorLogs(jobId, storageAccess);
                        string errorTextOutput = Convert(output);
                        Assert.NotNull(errorTextOutput);
                    }

                    Assert.True(false);
                }
            }
        }
Exemple #6
0
        public JsonResult GetTop10Photo()
        {
            try
            {
                Dictionary <string, string> defines = new Dictionary <string, string> {
                    { "hive.execution.engine", "tez" }, { "hive.exec.reducers.max", "1" }
                };
                var parameters = new HiveJobSubmissionParameters
                {
                    Query     = "SELECT * FROM Photo LIMIT 10;",
                    Defines   = defines,
                    Arguments = null
                };

                var jobResponse = _jobClient.JobManagement.SubmitHiveJob(parameters);
                var jobId       = jobResponse.JobSubmissionJsonResponse.Id;
                // Wait for job completion
                var jobDetail = _jobClient.JobManagement.GetJob(jobId).JobDetail;
                while (!jobDetail.Status.JobComplete)
                {
                    Thread.Sleep(1000);
                    jobDetail = _jobClient.JobManagement.GetJob(jobId).JobDetail;
                }

                // Get job output
                var storageAccess = new AzureStorageAccess(DefaultStorageAccountName, DefaultStorageAccountKey,
                                                           DefaultStorageContainerName);
                IList <Photo> listPhoto = new List <Photo>();
                Stream        output;
                if (jobDetail.ExitValue == 0)
                {
                    output = _jobClient.JobManagement.GetJobOutput(jobId, storageAccess);
                    using (var reader = new StreamReader(output, Encoding.UTF8))
                    {
                        string[] lines = reader.ReadToEnd().Split("\n".ToCharArray());
                        foreach (var line in lines)
                        {
                            if (!string.IsNullOrEmpty(line))
                            {
                                string[] splitContent = line.Split("\t".ToCharArray());
                                Photo    photo        = new Photo();
                                int      id;
                                int.TryParse(splitContent[0], out id);
                                photo.Id    = id;
                                photo.Title = splitContent[1];
                                photo.Url   = splitContent[2];
                                listPhoto.Add(photo);
                            }
                        }
                    }
                    return(Json(new HandledJsonResult {
                        Data = listPhoto
                    }));
                }
                else
                {
                    string message;
                    output = _jobClient.JobManagement.GetJobErrorLogs(jobId, storageAccess);
                    using (var reader = new StreamReader(output, Encoding.UTF8))
                    {
                        message = reader.ReadToEnd();
                    }
                    return(Json(new HandledJsonResult {
                        Data = message
                    }));
                }
            }
            catch (Exception ex)
            {
                return(Json(new HandledJsonResult(ex)));
            }
        }
        public void SubmitMapReduceStreamingJobWithFilesParam()
        {
            using (var context = UndoContext.Current)
            {
                context.Start();

                var username    = TestUtils.WinUserName;
                var password    = TestUtils.WinPassword;
                var clustername = TestUtils.WinClusterName;

                var credentials = new BasicAuthenticationCloudCredentials
                {
                    Username = username,
                    Password = password
                };

                var client = TestUtils.GetHDInsightJobManagementClient(clustername, credentials);

                var parameters = new MapReduceStreamingJobSubmissionParameters
                {
                    Mapper  = "cat.exe",
                    Reducer = "wc.exe",
                    Input   = "/example/data/gutenberg/davinci.txt",
                    Output  = "/example/data/gutenberg/wcount",
                    Files   = new List <string> {
                        "/example/apps/wc.exe", "/example/apps/cat.exe"
                    }
                };

                var response = client.JobManagement.SubmitMapReduceStreamingJob(parameters);
                Assert.NotNull(response);
                Assert.Equal(response.StatusCode, HttpStatusCode.OK);

                var jobId = response.JobSubmissionJsonResponse.Id;
                Assert.Contains("job_", jobId, StringComparison.InvariantCulture);

                var jobStatus = GetJobFinalStatus(client, jobId);

                var storageAccess = new AzureStorageAccess(TestUtils.WinStorageAccountName, TestUtils.WinStorageAccountKey, TestUtils.WinDefaultContainer);

                if (jobStatus.JobDetail.ExitValue == 0)
                {
                    if (HttpMockServer.Mode == HttpRecorderMode.Record)
                    {
                        // Retrieve Job Output
                        var    output     = client.JobManagement.GetJobOutput(jobId, storageAccess);
                        string textOutput = Convert(output);
                        Assert.True(textOutput.Length > 0);
                    }
                }
                else
                {
                    if (HttpMockServer.Mode == HttpRecorderMode.Record)
                    {
                        var    output          = client.JobManagement.GetJobErrorLogs(jobId, storageAccess);
                        string errorTextOutput = Convert(output);
                        Assert.NotNull(errorTextOutput);
                    }

                    Assert.True(false);
                }
            }
        }