// [Fact] public void GetHiveJob() { using (var context = UndoContext.Current) { context.Start(); var username = TestUtils.UserName; var password = TestUtils.Password; var clustername = TestUtils.ClusterName; var credentials = new BasicAuthenticationCloudCredentials { Username = username, Password = password }; var client = TestUtils.GetHDInsightJobManagementClient(clustername, credentials); var parameters = new HiveJobSubmissionParameters { UserName = username, Query = "SHOW TABLES;" }; var jobid = client.JobManagement.SubmitHiveJob(parameters).JobSubmissionJsonResponse.Id; var response = client.JobManagement.GetJob(jobid); Assert.NotNull(response); Assert.Equal(response.StatusCode, HttpStatusCode.OK); } }
public void SubmitHiveJobError() { using (var context = MockContext.Start(this.GetType())) using (var client = this.CommonData.GetHDInsightJobClient(context)) { var parameters = new HiveJobSubmissionParameters { Query = "FAKEQUERY;", StatusDir = "jobstatus" }; var response = client.Job.SubmitHiveJob(parameters); Assert.NotNull(response); var jobId = response.Id; Assert.Contains("job_", jobId, StringComparison.InvariantCulture); var jobStatus = client.Job.WaitForJobCompletionAsync(jobId, this.CommonData.JobWaitInterval, this.CommonData.JobPollInterval).Result; Assert.True(jobStatus.ExitValue > 0); var storageAccess = GetStorageAccessObject(); if (HttpMockServer.GetCurrentMode() == HttpRecorderMode.Record) { var output = client.Job.GetJobErrorLogs(jobId, storageAccess); Assert.NotNull(output); Assert.True(output.Length > 0); string errorTextOutput = Convert(output); Assert.True(!string.IsNullOrEmpty(errorTextOutput)); } } }
private void SubmitHiveJob(string query) { var parameters = new HiveJobSubmissionParameters { Query = query }; // submitting the Hive job to the cluster JobSubmissionResponse jobResponse = _hdiJobManagementClient.JobManagement.SubmitHiveJob(parameters); string jobId = jobResponse.JobSubmissionJsonResponse.Id; // wait for job completion JobDetailRootJsonObject jobDetail = _hdiJobManagementClient.JobManagement.GetJob(jobId).JobDetail; while (!jobDetail.Status.JobComplete) { Thread.Sleep(1000); jobDetail = _hdiJobManagementClient.JobManagement.GetJob(jobId).JobDetail; } // get job output Stream output = jobDetail.ExitValue == 0 ? _hdiJobManagementClient.JobManagement.GetJobOutput(jobId, _storageAccess) : _hdiJobManagementClient.JobManagement.GetJobErrorLogs(jobId, _storageAccess); // handle output using (var reader = new StreamReader(output, Encoding.UTF8)) { string value = reader.ReadToEnd(); if (!string.IsNullOrEmpty(value)) { throw new BatchViewCalculationException(value); } } }
public void SubmitHiveJob() { var parameters = new HiveJobSubmissionParameters { Query = @"select querydwelltime+2 from hivesampletable where clientid = 8" }; SubmitHiveJobAndValidateOutput(parameters, "15.92"); }
public void SubmitHiveJobAsync_Windows() { var parameters = new HiveJobSubmissionParameters { Query = @"select querydwelltime+2 from hivesampletable where clientid = 8" }; SubmitHiveJobAndValidateOutput(parameters, "15.92", runAyncAPI: true, isWindowsCluster: true); }
public void SubmitHiveJobAndValidateOutput(HiveJobSubmissionParameters parameters, string expectedOutputPart) { using (var context = UndoContext.Current) { context.Start(); var username = TestUtils.UserName; var password = TestUtils.Password; var clustername = TestUtils.ClusterName; var credentials = new BasicAuthenticationCloudCredentials { Username = username, Password = password }; var client = TestUtils.GetHDInsightJobManagementClient(clustername, credentials); var response = client.JobManagement.SubmitHiveJob(parameters); Assert.NotNull(response); Assert.Equal(response.StatusCode, HttpStatusCode.OK); var jobId = response.JobSubmissionJsonResponse.Id; Assert.Contains("job_", jobId, StringComparison.InvariantCulture); var jobStatus = GetJobFinalStatus(client, jobId); var storageAccess = GetStorageAccessObject(); if (jobStatus.JobDetail.ExitValue == 0) { if (HttpMockServer.Mode == HttpRecorderMode.Record) { // Retrieve Job Output var output = client.JobManagement.GetJobOutput(jobId, storageAccess); string textOutput = Convert(output); Assert.True(textOutput.Contains(expectedOutputPart)); } } else { if (HttpMockServer.Mode == HttpRecorderMode.Record) { var output = client.JobManagement.GetJobErrorLogs(jobId, storageAccess); string errorTextOutput = Convert(output); Assert.NotNull(errorTextOutput); } Assert.True(false); } } }
private string AddNewPhotoHiveJob(Photo photo) { Dictionary <string, string> defines = new Dictionary <string, string> { { "hive.execution.engine", "tez" }, { "hive.exec.reducers.max", "1" } }; List <string> args = new List <string> { { "--hiveconf" }, { $"Id={photo.Id}" }, { "--hiveconf" }, { $"Title={photo.Title}" }, { "--hiveconf" }, { $"Url={photo.Url}" } }; var parameters = new HiveJobSubmissionParameters { Query = "INSERT INTO TABLE Photo VALUES(${hiveconf:Id}, '${hiveconf:Title}' , '${hiveconf:Url}');", //"INSERT INTO TABLE Photo VALUES('${hiveconf:Id}', '${hiveconf:Title}' , '${hiveconf:Url}');", Defines = defines, Arguments = args }; var jobResponse = _jobClient.JobManagement.SubmitHiveJob(parameters); var jobId = jobResponse.JobSubmissionJsonResponse.Id; // Wait for job completion var jobDetail = _jobClient.JobManagement.GetJob(jobId).JobDetail; while (!jobDetail.Status.JobComplete) { Thread.Sleep(1000); jobDetail = _jobClient.JobManagement.GetJob(jobId).JobDetail; } // Get job output var storageAccess = new AzureStorageAccess(DefaultStorageAccountName, DefaultStorageAccountKey, DefaultStorageContainerName); IList <Photo> listPhoto = new List <Photo>(); var result = ""; Stream output; if (jobDetail.ExitValue == 0) { result = "success"; } else { output = _jobClient.JobManagement.GetJobErrorLogs(jobId, storageAccess); using (var reader = new StreamReader(output, Encoding.UTF8)) { result = reader.ReadToEnd(); } } return(result); }
public virtual JobSubmissionResponse SubmitHiveJob(AzureHDInsightHiveJobDefinition hiveJobDef) { var hiveJobParams = new HiveJobSubmissionParameters { Arguments = hiveJobDef.Arguments, Defines = hiveJobDef.Defines, File = hiveJobDef.File, Files = hiveJobDef.Files, Query = hiveJobDef.Query, StatusDir = hiveJobDef.StatusFolder }; return(HdInsightJobManagementClient.JobManagement.SubmitHiveJob(hiveJobParams)); }
public virtual JobSubmissionResponse SubmitHiveJob(AzureHDInsightHiveJobDefinition hiveJobDef) { var hiveJobParams = new HiveJobSubmissionParameters { Arguments = ConvertListToString(hiveJobDef.Arguments, "arg"), Defines = ConvertDefinesToString(hiveJobDef.Defines), File = hiveJobDef.File, Files = ConvertListToString(hiveJobDef.Files, "file"), Query = hiveJobDef.Query, StatusDir = hiveJobDef.StatusFolder, UserName = HdInsightJobManagementClient.Credentials.Username }; return(HdInsightJobManagementClient.JobManagement.SubmitHiveJob(hiveJobParams)); }
static void Main(string[] args) { var key = new SecureString(); foreach (char c in secretKey) { key.AppendChar(c); } var tokenCreds = GetTokenCloudCredentials(tenantId, appId, key); var resourceManagementClient = new ResourceManagementClient(new TokenCloudCredentials(subId.ToString(), tokenCreds.Token)); resourceManagementClient.Providers.Register("Microsoft.HDInsight"); var hdiManagementClient = new HDInsightManagementClient(new TokenCredentials(tokenCreds.Token)); hdiManagementClient.SubscriptionId = subId.ToString(); var clusterName = GetClusterName(hdiManagementClient); Console.ReadLine(); var hdiJobManagementClient = new HDInsightJobManagementClient(clusterName + ".azurehdinsight.net", new BasicAuthenticationCloudCredentials { Username = user, Password = password }); Dictionary <string, string> defines = new Dictionary <string, string> { { "hive.execution.engine", "tez" }, { "hive.exec.reducers.max", "1" } }; List <string> hadoopArgs = new List <string> { { "argA" }, { "argB" } }; var job = new HiveJobSubmissionParameters { Query = "SHOW TABLES", Defines = defines, Arguments = hadoopArgs }; SubmitJobToCluster(hdiJobManagementClient, job); Console.ReadLine(); }
public void SubmitHiveJobError() { using (var context = UndoContext.Current) { context.Start(); var username = TestUtils.UserName; var password = TestUtils.Password; var clustername = TestUtils.ClusterName; var credentials = new BasicAuthenticationCloudCredentials { Username = username, Password = password }; var client = TestUtils.GetHDInsightJobManagementClient(clustername, credentials); var parameters = new HiveJobSubmissionParameters { Query = "FAKEQUERY;", StatusDir = "jobstatus" }; var response = client.JobManagement.SubmitHiveJob(parameters); Assert.NotNull(response); Assert.Equal(response.StatusCode, HttpStatusCode.OK); var jobId = response.JobSubmissionJsonResponse.Id; Assert.Contains("job_", jobId, StringComparison.InvariantCulture); var jobStatus = GetJobFinalStatus(client, jobId); Assert.True(jobStatus.JobDetail.ExitValue > 0); var storageAccess = GetStorageAccessObject(); if (HttpMockServer.Mode == HttpRecorderMode.Record) { var output = client.JobManagement.GetJobErrorLogs(jobId, storageAccess); Assert.NotNull(output); Assert.True(output.Length > 0); string errorTextOutput = Convert(output); Assert.True(!string.IsNullOrEmpty(errorTextOutput)); } } }
public void SubmitHiveJobLargeQuery() { var query = string.Empty; // Maximum input string size Uri.EscapeDataString can accept is 65520. while (query.Length < 65520) { query += "select 1.0000000001 + 0.00001 limit 1;"; } var parameters = new HiveJobSubmissionParameters { Query = query }; SubmitHiveJobAndValidateOutput(parameters, "1.0000100001"); }
public void SubmitHiveJobAndValidateOutput(HiveJobSubmissionParameters parameters, string expectedOutputPart, bool runAyncAPI = false, bool isWindowsCluster = false) { using (var context = UndoContext.Current) { context.Start(); var client = TestUtils.GetHDInsightJobManagementClient(isWindowsCluster); var response = runAyncAPI ? client.JobManagement.SubmitHiveJobAsync(parameters).Result : client.JobManagement.SubmitHiveJob(parameters); Assert.NotNull(response); Assert.Equal(response.StatusCode, HttpStatusCode.OK); var jobId = response.JobSubmissionJsonResponse.Id; Assert.Contains("job_", jobId, StringComparison.InvariantCulture); WaitForJobCompletion(client, jobId, TestUtils.JobPollInterval, TestUtils.JobWaitInterval); var jobStatus = client.JobManagement.GetJob(jobId); var storageAccess = GetStorageAccessObject(isWindowsCluster); if (jobStatus.JobDetail.ExitValue == 0) { if (HttpMockServer.Mode == HttpRecorderMode.Record) { // Retrieve Job Output var output = client.JobManagement.GetJobOutput(jobId, storageAccess); string textOutput = Convert(output); Assert.True(textOutput.Contains(expectedOutputPart)); } } else { if (HttpMockServer.Mode == HttpRecorderMode.Record) { var output = client.JobManagement.GetJobErrorLogs(jobId, storageAccess); string errorTextOutput = Convert(output); Assert.NotNull(errorTextOutput); } Assert.True(false); } } }
public void SubmitHiveJob_Defines() { var defines = new Dictionary <string, string> { { "hive.execution.engine", "ravi" }, { "hive.exec.reducers.max", "1" }, { "time", "10" }, { "rows", "20" } }; var parameters = new HiveJobSubmissionParameters { Query = @"select * from hivesampletable where querydwelltime > ${hiveconf:time} limit ${hiveconf:rows}", Defines = defines }; SubmitHiveJobAndValidateOutput(parameters, "Massachusetts United States"); }
public void SubmitHiveJob() { using (var context = UndoContext.Current) { context.Start(); var username = TestUtils.UserName; var password = TestUtils.Password; var clustername = TestUtils.ClusterName; var credentials = new BasicAuthenticationCloudCredentials { Username = username, Password = password }; var client = TestUtils.GetHDInsightJobManagementClient(clustername, credentials); var defines = new Dictionary <string, string> { { "hive.execution.engine", "ravi" }, { "hive.exec.reducers.max", "1" } }; var args = new List <string> { "argA", "argB" }; var parameters = new HiveJobSubmissionParameters { UserName = username, Query = "SHOW TABLES;", Defines = ConvertDefinesToString(defines), Arguments = ConvertArgsToString(args) }; var response = client.JobManagement.SubmitHiveJob(parameters); Assert.NotNull(response); Assert.Equal(response.StatusCode, HttpStatusCode.OK); Assert.Contains("job_", response.JobSubmissionJsonResponse.Id, StringComparison.InvariantCulture); } }
public void GetJobsPagination() { using (var context = MockContext.Start(this.GetType())) using (var client = this.CommonData.GetHDInsightJobClient(context)) { var parameters = new HiveJobSubmissionParameters { Query = @"select querydwelltime+2 from hivesampletable where clientid = 8" }; // Prepare job data var submitResponse = client.Job.SubmitHiveJob(parameters); Assert.NotNull(submitResponse); submitResponse = client.Job.SubmitHiveJob(parameters); Assert.NotNull(submitResponse); submitResponse = client.Job.SubmitHiveJob(parameters); Assert.NotNull(submitResponse); submitResponse = client.Job.SubmitHiveJob(parameters); Assert.NotNull(submitResponse); var allJobs = client.Job.List(); Assert.True(allJobs.Count > 0); int numOfEntries = 3; int index = 0; string jobid = string.Empty; while (true) { var t = client.Job.ListAfterJobId(jobid, numOfEntries); jobid = t.Last().Id; index += t.Count; var expectedJobId = allJobs.ElementAt(index - 1).Id; Assert.Equal(expectedJobId, jobid); if (t.Count != numOfEntries || allJobs.Count <= index) { break; } } } }
private void SubmitHiveJobAndValidateOutput(HiveJobSubmissionParameters parameters, string expectedOutputPart, bool runAyncAPI = false) { using (var context = MockContext.Start(this.GetType())) using (var client = this.CommonData.GetHDInsightJobClient(context)) { var response = runAyncAPI ? client.Job.SubmitHiveJobAsync(parameters).Result : client.Job.SubmitHiveJob(parameters); Assert.NotNull(response); var jobId = response.Id; Assert.Contains("job_", jobId, StringComparison.InvariantCulture); var jobStatus = client.Job.WaitForJobCompletion(jobId, this.CommonData.JobWaitInterval, this.CommonData.JobPollInterval); var storageAccess = GetStorageAccessObject(); if (jobStatus.ExitValue == 0) { if (HttpMockServer.GetCurrentMode() == HttpRecorderMode.Record) { // Retrieve Job Output var output = client.Job.GetJobOutput(jobId, storageAccess); string textOutput = Convert(output); Assert.Contains(expectedOutputPart, textOutput); } } else { if (HttpMockServer.GetCurrentMode() == HttpRecorderMode.Record) { var output = client.Job.GetJobErrorLogs(jobId, storageAccess); string errorTextOutput = Convert(output); Assert.NotNull(errorTextOutput); } Assert.True(false); } } }
public void SubmitHiveJobError() { using (var context = UndoContext.Current) { context.Start(); var client = TestUtils.GetHDInsightJobManagementClient(); var parameters = new HiveJobSubmissionParameters { Query = "FAKEQUERY;", StatusDir = "jobstatus" }; var response = client.JobManagement.SubmitHiveJob(parameters); Assert.NotNull(response); Assert.Equal(response.StatusCode, HttpStatusCode.OK); var jobId = response.JobSubmissionJsonResponse.Id; Assert.Contains("job_", jobId, StringComparison.InvariantCulture); WaitForJobCompletion(client, jobId, TestUtils.JobPollInterval, TestUtils.JobWaitInterval); var jobStatus = client.JobManagement.GetJob(jobId); Assert.True(jobStatus.JobDetail.ExitValue > 0); var storageAccess = GetStorageAccessObject(); if (HttpMockServer.Mode == HttpRecorderMode.Record) { var output = client.JobManagement.GetJobErrorLogs(jobId, storageAccess); Assert.NotNull(output); Assert.True(output.Length > 0); string errorTextOutput = Convert(output); Assert.True(!string.IsNullOrEmpty(errorTextOutput)); } } }
public void ListJobs() { using (var context = MockContext.Start(this.GetType())) using (var client = this.CommonData.GetHDInsightJobClient(context)) { var response = client.Job.List(); Assert.NotNull(response); int originalJobCount = response.Count; var parameters = new HiveJobSubmissionParameters { Query = @"select querydwelltime+2 from hivesampletable where clientid = 8" }; var submitResponse = client.Job.SubmitHiveJob(parameters); Assert.NotNull(submitResponse); response = client.Job.List(); Assert.NotNull(response); Assert.Equal(originalJobCount + 1, response.Count); } }
//[Fact] public void GetJobError() { using (var context = UndoContext.Current) { context.Start(); var username = TestUtils.UserName; var password = TestUtils.Password; var clustername = TestUtils.ClusterName; var credentials = new BasicAuthenticationCloudCredentials { Username = username, Password = password }; var client = TestUtils.GetHDInsightJobManagementClient(clustername, credentials); var parameters = new HiveJobSubmissionParameters { UserName = username, Query = "FAKEQUERY;", StatusDir = "jobstatus" }; var response = client.JobManagement.SubmitHiveJob(parameters); Assert.NotNull(response); Assert.Equal(response.StatusCode, HttpStatusCode.OK); var job = client.JobManagement.GetJob(response.JobSubmissionJsonResponse.Id); var output = client.JobManagement.GetJobErrorLogs(job.JobDetail.Id, TestUtils.StorageAccountName, TestUtils.StorageAccountKey, TestUtils.DefaultContainer); Assert.NotNull(output); Assert.True(output.Length > 0); var outputStr = Convert(output); Assert.NotNull(outputStr); } }
/// <summary> /// Submits a Hive job to an HDInsight cluster. /// </summary> /// <param name='parameters'> /// Required. Hive job parameters. /// </param> /// <returns> /// The Create Job operation response. /// </returns> public async Task <JobSubmissionResponse> SubmitHiveJobAsync(HiveJobSubmissionParameters parameters) { return(await SubmitHiveJobAsync(new JobSubmissionParameters { Content = parameters.GetJobPostRequestContent() }, CancellationToken.None)); }
/// <summary> /// Submits a Hive job to an HDInsight cluster. /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.Azure.HDInsight.Job.IJobOperations. /// </param> /// <param name='parameters'> /// Required. Hive job parameters. /// </param> /// <returns> /// The Create Job operation response. /// </returns> public static async Task <JobSubmissionJsonResponse> SubmitHiveJobAsync(this IJobOperations operations, HiveJobSubmissionParameters parameters, CancellationToken cancellationToken = default(CancellationToken)) { using (var _result = await operations.SubmitHiveJobWithHttpMessagesAsync(parameters, null, cancellationToken).ConfigureAwait(false)) { return(_result.Body); } }
/// <summary> /// Submits a Hive job to an HDInsight cluster. /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.Azure.HDInsight.Job.IJobOperations. /// </param> /// <param name='parameters'> /// Required. Hive job parameters. /// </param> /// <returns> /// The Create Job operation response. /// </returns> public static JobSubmissionJsonResponse SubmitHiveJob(this IJobOperations operations, HiveJobSubmissionParameters parameters) { return(SubmitHiveJobAsync(operations, parameters).GetAwaiter().GetResult()); }
/// <summary> /// Submits an Hive job to an HDINSIGHT cluster. /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.Azure.Management.HDInsight.Job.IJobOperations. /// </param> /// <param name='parameters'> /// Required. Hive job parameters. /// </param> /// <returns> /// The Create Job operation response. /// </returns> public static JobSubmissionResponse SubmitHiveJob(this IJobOperations operations, HiveJobSubmissionParameters parameters) { return(Task.Factory.StartNew((object s) => { return ((IJobOperations)s).SubmitHiveJobAsync(parameters); } , operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult()); }
/// <summary> /// Submits an Hive job to an HDINSIGHT cluster. /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.Azure.Management.HDInsight.Job.IJobOperations. /// </param> /// <param name='parameters'> /// Required. Hive job parameters. /// </param> /// <returns> /// The Create Job operation response. /// </returns> public static Task <JobSubmissionResponse> SubmitHiveJobAsync(this IJobOperations operations, HiveJobSubmissionParameters parameters) { return(operations.SubmitHiveJobAsync(parameters, CancellationToken.None)); }
public static void SubmitJobToCluster(HDInsightJobManagementClient hdiJobManagementClient, HiveJobSubmissionParameters job) { System.Console.WriteLine("Submitting the Hive job to the cluster..."); var jobResponse = hdiJobManagementClient.JobManagement.SubmitHiveJob(job); var jobId = jobResponse.JobSubmissionJsonResponse.Id; System.Console.WriteLine("Response status code is " + jobResponse.StatusCode); System.Console.WriteLine("JobId is " + jobId); System.Console.WriteLine("Waiting for the job completion ..."); // Wait for job completion var jobDetail = hdiJobManagementClient.JobManagement.GetJob(jobId).JobDetail; while (!jobDetail.Status.JobComplete) { Thread.Sleep(1000); jobDetail = hdiJobManagementClient.JobManagement.GetJob(jobId).JobDetail; } // Get job output var storageAccess = new AzureStorageAccess(storageAccount, storageKey, storageContainer); var output = (jobDetail.ExitValue == 0) ? hdiJobManagementClient.JobManagement.GetJobOutput(jobId, storageAccess) // fetch stdout output in case of success : hdiJobManagementClient.JobManagement.GetJobErrorLogs(jobId, storageAccess); // fetch stderr output in case of failure System.Console.WriteLine("Job output is: "); using (var reader = new StreamReader(output, Encoding.UTF8)) { string value = reader.ReadToEnd(); System.Console.WriteLine(value); } }
public JsonResult GetTop10Photo() { try { Dictionary <string, string> defines = new Dictionary <string, string> { { "hive.execution.engine", "tez" }, { "hive.exec.reducers.max", "1" } }; var parameters = new HiveJobSubmissionParameters { Query = "SELECT * FROM Photo LIMIT 10;", Defines = defines, Arguments = null }; var jobResponse = _jobClient.JobManagement.SubmitHiveJob(parameters); var jobId = jobResponse.JobSubmissionJsonResponse.Id; // Wait for job completion var jobDetail = _jobClient.JobManagement.GetJob(jobId).JobDetail; while (!jobDetail.Status.JobComplete) { Thread.Sleep(1000); jobDetail = _jobClient.JobManagement.GetJob(jobId).JobDetail; } // Get job output var storageAccess = new AzureStorageAccess(DefaultStorageAccountName, DefaultStorageAccountKey, DefaultStorageContainerName); IList <Photo> listPhoto = new List <Photo>(); Stream output; if (jobDetail.ExitValue == 0) { output = _jobClient.JobManagement.GetJobOutput(jobId, storageAccess); using (var reader = new StreamReader(output, Encoding.UTF8)) { string[] lines = reader.ReadToEnd().Split("\n".ToCharArray()); foreach (var line in lines) { if (!string.IsNullOrEmpty(line)) { string[] splitContent = line.Split("\t".ToCharArray()); Photo photo = new Photo(); int id; int.TryParse(splitContent[0], out id); photo.Id = id; photo.Title = splitContent[1]; photo.Url = splitContent[2]; listPhoto.Add(photo); } } } return(Json(new HandledJsonResult { Data = listPhoto })); } else { string message; output = _jobClient.JobManagement.GetJobErrorLogs(jobId, storageAccess); using (var reader = new StreamReader(output, Encoding.UTF8)) { message = reader.ReadToEnd(); } return(Json(new HandledJsonResult { Data = message })); } } catch (Exception ex) { return(Json(new HandledJsonResult(ex))); } }
/// <summary> /// Submits a Hive job to an HDInsight cluster. /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.Azure.Management.HDInsight.Job.IJobOperations. /// </param> /// <param name='parameters'> /// Required. Hive job parameters. /// </param> /// <returns> /// The Create Job operation response. /// </returns> public static JobSubmissionResponse SubmitHiveJob(this IJobOperations operations, HiveJobSubmissionParameters parameters) { return(operations.SubmitHiveJob(new JobSubmissionParameters { Content = parameters.GetJobPostRequestContent() })); }
/// <summary> /// Submits a Hive job to an HDInsight cluster. /// </summary> /// <param name='parameters'> /// Required. Hive job parameters. /// </param> /// <returns> /// The Create Job operation response. /// </returns> public async Task <AzureOperationResponse <JobSubmissionJsonResponse> > SubmitHiveJobWithHttpMessagesAsync(HiveJobSubmissionParameters parameters, Dictionary <string, List <string> > customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)) { using (var requestContents = new MemoryStream(Encoding.UTF8.GetBytes(parameters.GetJobPostRequestContent()))) { return(await SubmitHiveJobWithHttpMessagesAsync(requestContents, customHeaders, cancellationToken).ConfigureAwait(false)); } }