public virtual JobSubmissionResponse SubmitPigJob(AzureHDInsightPigJobDefinition pigJobDef) { var pigJobParams = new PigJobSubmissionParameters { Arguments = pigJobDef.Arguments, Files = pigJobDef.Files, StatusDir = pigJobDef.StatusFolder, File = pigJobDef.File, Query = pigJobDef.Query }; return HdInsightJobManagementClient.JobManagement.SubmitPigJob(pigJobParams); }
public virtual JobSubmissionResponse SubmitPigJob(AzureHDInsightPigJobDefinition pigJobDef) { var pigJobParams = new PigJobSubmissionParameters { Arguments = ConvertListToString(pigJobDef.Arguments, "arg"), Files = ConvertListToString(pigJobDef.Files, "file"), StatusDir = pigJobDef.StatusFolder, File = pigJobDef.File, Query = pigJobDef.Query, UserName = HdInsightJobManagementClient.Credentials.Username }; return HdInsightJobManagementClient.JobManagement.SubmitPigJob(pigJobParams); }
public void SubmitPigJob() { using (var context = UndoContext.Current) { context.Start(); var username = TestUtils.UserName; var password = TestUtils.Password; var clustername = TestUtils.ClusterName; var credentials = new BasicAuthenticationCloudCredentials() { Username = username, Password = password }; var client = TestUtils.GetHDInsightJobManagementClient(clustername, credentials); var parameters = new PigJobSubmissionParameters() { Query = "LOGS = LOAD 'wasb:///example/data/sample.log';" + "LEVELS = foreach LOGS generate REGEX_EXTRACT($0, '(TRACE|DEBUG|INFO|WARN|ERROR|FATAL)', 1) as LOGLEVEL;" + "FILTEREDLEVELS = FILTER LEVELS by LOGLEVEL is not null;" + "GROUPEDLEVELS = GROUP FILTEREDLEVELS by LOGLEVEL;" + "FREQUENCIES = foreach GROUPEDLEVELS generate group as LOGLEVEL, COUNT(FILTEREDLEVELS.LOGLEVEL) as COUNT;" + "RESULT = order FREQUENCIES by COUNT desc;" + "DUMP RESULT;" }; var response = client.JobManagement.SubmitPigJob(parameters); Assert.NotNull(response); Assert.Equal(response.StatusCode, HttpStatusCode.OK); var jobId = response.JobSubmissionJsonResponse.Id; Assert.Contains("job_", jobId, StringComparison.InvariantCulture); var jobStatus = GetJobFinalStatus(client, jobId); var storageAccess = GetStorageAccessObject(); if (jobStatus.JobDetail.ExitValue == 0) { // Retrieve Job Output if (HttpMockServer.Mode == HttpRecorderMode.Record) { var output = client.JobManagement.GetJobOutput(jobId, storageAccess); string textOutput = Convert(output); Assert.True(textOutput.Contains("(DEBUG,")); } } else { if (HttpMockServer.Mode == HttpRecorderMode.Record) { var output = client.JobManagement.GetJobErrorLogs(jobId, storageAccess); string errorTextOutput = Convert(output); Assert.NotNull(errorTextOutput); } Assert.True(false); } } }
/// <summary> /// Submits an Hive job to an HDINSIGHT cluster. /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.Azure.Management.HDInsight.Job.IJobOperations. /// </param> /// <param name='parameters'> /// Required. Pig job parameters. /// </param> /// <returns> /// The Create Job operation response. /// </returns> public static Task<JobSubmissionResponse> SubmitPigJobAsync(this IJobOperations operations, PigJobSubmissionParameters parameters) { return operations.SubmitPigJobAsync(parameters, CancellationToken.None); }
/// <summary> /// Submits an Hive job to an HDINSIGHT cluster. /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.Azure.Management.HDInsight.Job.IJobOperations. /// </param> /// <param name='parameters'> /// Required. Pig job parameters. /// </param> /// <returns> /// The Create Job operation response. /// </returns> public static JobSubmissionResponse SubmitPigJob(this IJobOperations operations, PigJobSubmissionParameters parameters) { return Task.Factory.StartNew((object s) => { return ((IJobOperations)s).SubmitPigJobAsync(parameters); } , operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); }
/// <summary> /// Submits a Pig job to an HDInsight cluster. /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.Azure.Management.HDInsight.Job.IJobOperations. /// </param> /// <param name='parameters'> /// Required. Pig job parameters. /// </param> /// <returns> /// The Create Job operation response. /// </returns> public static JobSubmissionResponse SubmitPigJob(this IJobOperations operations, PigJobSubmissionParameters parameters) { return operations.SubmitPigJob(new JobSubmissionParameters { Content = parameters.GetJobPostRequestContent() }); }
public void GetPigJob() { using (var context = UndoContext.Current) { context.Start(); var username = TestUtils.UserName; var password = TestUtils.Password; var clustername = TestUtils.ClusterName; var credentials = new BasicAuthenticationCloudCredentials { Username = username, Password = password }; var client = TestUtils.GetHDInsightJobManagementClient(clustername, credentials); var parameters = new PigJobSubmissionParameters() { UserName = username, Query = "records = LOAD '/example/pig/sahara-paleo-fauna.txt' AS (DateBP:int, Loc:chararray, Coordinates:chararray, Samples:chararray, Country:chararray, Laboratory:chararray);" + "filtered_records = FILTER records by Country == 'Egypt' OR Country == 'Morocco';" + "grouped_records = GROUP filtered_records BY Country;" + "DUMP grouped_records;" }; var jobid = client.JobManagement.SubmitPigJob(parameters).JobSubmissionJsonResponse.Id; var response = client.JobManagement.GetJob(jobid); Assert.NotNull(response); Assert.Equal(response.StatusCode, HttpStatusCode.OK); } }
public void SubmitPigJobAndValidateOutput(PigJobSubmissionParameters parameters, bool runAyncAPI = false, bool isWindowsCluster = false) { using (var context = UndoContext.Current) { context.Start(); var client = TestUtils.GetHDInsightJobManagementClient(isWindowsCluster); var response = runAyncAPI ? client.JobManagement.SubmitPigJobAsync(parameters).Result : client.JobManagement.SubmitPigJob(parameters); Assert.NotNull(response); Assert.Equal(response.StatusCode, HttpStatusCode.OK); var jobId = response.JobSubmissionJsonResponse.Id; Assert.Contains("job_", jobId, StringComparison.InvariantCulture); var jobStatus = client.JobManagement.WaitForJobCompletion(jobId, TestUtils.JobWaitInterval, TestUtils.JobPollInterval); var storageAccess = GetStorageAccessObject(isWindowsCluster); if (jobStatus.JobDetail.ExitValue == 0) { // Retrieve Job Output if (HttpMockServer.GetCurrentMode() == HttpRecorderMode.Record) { var output = client.JobManagement.GetJobOutput(jobId, storageAccess); string textOutput = Convert(output); Assert.True(textOutput.Contains("(DEBUG,")); } } else { if (HttpMockServer.GetCurrentMode() == HttpRecorderMode.Record) { var output = client.JobManagement.GetJobErrorLogs(jobId, storageAccess); string errorTextOutput = Convert(output); Assert.NotNull(errorTextOutput); } Assert.True(false); } } }