public virtual void WaitForJobWithId()
        {
            var hiveJobDefinition = new HiveJobCreateParameters()
            {
                JobName = "show tables jobDetails",
                Query = "show tables"
            };

            var cluster = CmdletScenariosTestCaseBase.GetHttpAccessEnabledCluster();
            using (var runspace = this.GetPowerShellRunspace())
            {
                var results = runspace.NewPipeline()
                                      .AddCommand(CmdletConstants.NewAzureHDInsightHiveJobDefinition)
                                      .WithParameter(CmdletConstants.JobName, hiveJobDefinition.JobName)
                                      .WithParameter(CmdletConstants.Query, hiveJobDefinition.Query)
                                      .AddCommand(CmdletConstants.StartAzureHDInsightJob)
                                      .WithParameter(CmdletConstants.Cluster, cluster.ConnectionUrl)
                                      .WithParameter(CmdletConstants.Credential, IntegrationTestBase.GetPSCredential(cluster.HttpUserName, cluster.HttpPassword))
                                      .Invoke();
                Assert.AreEqual(1, results.Results.Count);
                var job = results.Results.First().BaseObject as Microsoft.WindowsAzure.Management.HDInsight.Cmdlet.DataObjects.AzureHDInsightJob;

                results = runspace.NewPipeline()
                                      .AddCommand(CmdletConstants.WaitAzureHDInsightJob)
                                      .WithParameter(CmdletConstants.Credential, IntegrationTestBase.GetPSCredential(cluster.HttpUserName, cluster.HttpPassword))
                                      .WithParameter(CmdletConstants.JobId, job.JobId)
                                      .WithParameter(CmdletConstants.Cluster, job.Cluster)
                                      .Invoke();
                var completedJob = results.Results.ToEnumerable<AzureHDInsightJob>().FirstOrDefault();
                Assert.IsNotNull(completedJob);
                Assert.AreEqual(job.JobId, completedJob.JobId);
                Assert.AreEqual("Completed", completedJob.State);
            }
        }
        public void ICanCallThe_New_HDInsightHiveJobDefinitionCmdlet_WithArguments()
        {
            var HiveJobDefinition = new HiveJobCreateParameters { JobName = "show tables jobDetails", Query = "show tables" };
            HiveJobDefinition.Arguments.Add("arg 1");
            HiveJobDefinition.Arguments.Add("arg 2");

            using (IRunspace runspace = this.GetPowerShellRunspace())
            {
                IPipelineResult results =
                    runspace.NewPipeline()
                            .AddCommand(CmdletConstants.NewAzureHDInsightHiveJobDefinition)
                            .WithParameter(CmdletConstants.JobName, HiveJobDefinition.JobName)
                            .WithParameter(CmdletConstants.Query, HiveJobDefinition.Query)
                            .WithParameter(CmdletConstants.HiveArgs, HiveJobDefinition.Arguments)
                            .Invoke();
                Assert.AreEqual(1, results.Results.Count);
                AzureHDInsightHiveJobDefinition HiveJobFromPowershell = results.Results.ToEnumerable<AzureHDInsightHiveJobDefinition>().First();

                Assert.AreEqual(HiveJobDefinition.JobName, HiveJobFromPowershell.JobName);
                Assert.AreEqual(HiveJobDefinition.Query, HiveJobFromPowershell.Query);

                foreach (string args in HiveJobDefinition.Arguments)
                {
                    Assert.IsTrue(
                        HiveJobFromPowershell.Arguments.Any(arg => string.Equals(args, arg)),
                        "Unable to find argument '{0}' in value returned from powershell",
                        args);
                }
            }
        }
Ejemplo n.º 3
0
        public static void CreateTables(JobSubmissionCertificateCredential creds, AzureSettings settings)
        {
            var storagePath = String.Format("wasb://{0}@{1}.blob.core.windows.net/", settings.ClusterName, settings.StorageAccount);
            var TweetInPath = storagePath + @"/Tweets";
            var IdentifiersInPath = storagePath + @"/Identifiers";

            var hiveJobDefinition = new HiveJobCreateParameters()
            {
                JobName = "Create external tables",
                StatusFolder = "/AAACreateTables",

                Query = "DROP TABLE tweets; CREATE EXTERNAL TABLE tweets( id_str string, created_at string, retweet_count string, tweetText string, userName string, userId string, screenName string, countryCode string, placeType string, placeName string, placeType1 string, coordinates array<string>)" +
                "ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t' COLLECTION ITEMS TERMINATED BY ',' STORED AS TEXTFILE location '" + TweetInPath + "';" +
                "DROP TABLE identifiers; CREATE EXTERNAL TABLE identifiers(identifier string)" +
                "ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' STORED AS TEXTFILE location '" + IdentifiersInPath + "';"
            };

            // Submit the Hive job
            var jobClient = JobSubmissionClientFactory.Connect(creds);
            var jobResults = jobClient.CreateHiveJob(hiveJobDefinition);

            WaitForJobCompletion(jobResults, jobClient);

            // Print the Hive job output
            var stream = jobClient.GetJobOutput(jobResults.JobId);

            var reader = new StreamReader(stream);
            Console.WriteLine(reader.ReadToEnd());
        }
        public void DoesNotAddDefineIfJobNameAbsent()
        {
            var hiveJob = new HiveJobCreateParameters()
            {
                Query = "show tables"
            };

            var payloadConverter = new PayloadConverterBase();
            var payload = payloadConverter.SerializeHiveRequest("hadoop", hiveJob);
            Assert.IsFalse(payload.Contains(Uri.EscapeDataString(string.Format("{0}={1}", WebHCatConstants.DefineJobName, hiveJob.JobName))));
        }
        public void CanCreateNewHiveJob_StartJob()
        {
            var hiveJobDefinition = new HiveJobCreateParameters { JobName = "show tables jobDetails", Query = "show tables" };

            INewAzureHDInsightHiveJobDefinitionCommand newHiveJobDefinitionCommand =
                ServiceLocator.Instance.Locate<IAzureHDInsightCommandFactory>().CreateNewHiveDefinition();
            newHiveJobDefinitionCommand.JobName = hiveJobDefinition.JobName;
            newHiveJobDefinitionCommand.Query = hiveJobDefinition.Query;
            newHiveJobDefinitionCommand.EndProcessing();

            AzureHDInsightHiveJobDefinition hiveJobFromCommand = newHiveJobDefinitionCommand.Output.ElementAt(0);
            TestJobStart(hiveJobFromCommand);
        }
Ejemplo n.º 6
0
        public static string SubmitJob(HiveJobCreateParameters hiveJobDefinition, Func<Stream, string> submitCallback, string jobid = "")
        {
            string msg = string.Empty;
            var start = DateTime.Now;
            Console.WriteLine("开始提交job:" + hiveJobDefinition.JobName);
            UBA.Common.LogHelperNet.Info("开始提交job:" + hiveJobDefinition.JobName, null);
            string pfx = AppDomain.CurrentDomain.BaseDirectory + "ubaClient.pfx";
            string subscriptionid = System.Configuration.ConfigurationSettings.AppSettings["Subscriptionid"];
            string clustername = System.Configuration.ConfigurationSettings.AppSettings["Clustername"];
            System.IO.Stream stream = null;
            try
            {
                X509Certificate2 cert = new X509Certificate2(pfx, "1");//c8321a5a-6f7e-4f2e-a0c8-7b19f076877a
                JobSubmissionCertificateCredential creds = new JobSubmissionCertificateCredential(new Guid(subscriptionid), cert, clustername, new Uri("https://management.core.chinacloudapi.cn"));
                // Submit the Hive job
                var jobClient = JobSubmissionClientFactory.Connect(creds);
                if (!string.IsNullOrEmpty(jobid))
                {
                    stream = jobClient.GetJobOutput(jobid);
                }
                else
                {
                    JobCreationResults jobResults = jobClient.CreateHiveJob(hiveJobDefinition);
                    msg = string.Format("提交job成功,耗时{0}秒\r\n开始处理job", DateTime.Now.Subtract(start).TotalMilliseconds / 1000);
                    Console.WriteLine(msg);
                    UBA.Common.LogHelperNet.Info(msg, null);
                    start = DateTime.Now;
                    //// Wait for the job to complete
                    WaitForJobCompletion(jobResults, jobClient);
                    stream = jobClient.GetJobOutput(jobResults.JobId);
                }

            }
            catch (Exception ex)
            {
                Console.WriteLine("提交job失败:" + ex.Message);
                UBA.Common.LogHelperNet.Error("提交job失败:", ex);
                return "error";
            }
            msg = string.Format("处理完成job,耗时{0}秒", DateTime.Now.Subtract(start).TotalMilliseconds / 1000);
            Console.WriteLine(msg);
            UBA.Common.LogHelperNet.Info(msg, null);
            return submitCallback(stream);
        }
        public void ICanCallThe_New_HDInsightHiveJobDefinitionCmdlet()
        {
            var HiveJobDefinition = new HiveJobCreateParameters { JobName = "show tables jobDetails", Query = "show tables" };

            using (IRunspace runspace = this.GetPowerShellRunspace())
            {
                IPipelineResult results =
                    runspace.NewPipeline()
                            .AddCommand(CmdletConstants.NewAzureHDInsightHiveJobDefinition)
                            .WithParameter(CmdletConstants.JobName, HiveJobDefinition.JobName)
                            .WithParameter(CmdletConstants.Query, HiveJobDefinition.Query)
                            .Invoke();
                Assert.AreEqual(1, results.Results.Count);
                AzureHDInsightHiveJobDefinition HiveJobFromPowershell = results.Results.ToEnumerable<AzureHDInsightHiveJobDefinition>().First();

                Assert.AreEqual(HiveJobDefinition.JobName, HiveJobFromPowershell.JobName);
                Assert.AreEqual(HiveJobDefinition.Query, HiveJobFromPowershell.Query);
            }
        }
        public virtual void ICanCallThe_NewHiveJob_Then_Start_HDInsightJobsCmdlet()
        {
            var hiveJobDefinition = new HiveJobCreateParameters { JobName = "show tables jobDetails", Query = "show tables" };

            using (IRunspace runspace = this.GetPowerShellRunspace())
            {
                IPipelineResult results =
                    runspace.NewPipeline()
                            .AddCommand(CmdletConstants.NewAzureHDInsightHiveJobDefinition)
                            .WithParameter(CmdletConstants.JobName, hiveJobDefinition.JobName)
                            .WithParameter(CmdletConstants.Query, hiveJobDefinition.Query)
                            .Invoke();
                Assert.AreEqual(1, results.Results.Count);
                AzureHDInsightHiveJobDefinition hiveJobFromPowershell = results.Results.ToEnumerable<AzureHDInsightHiveJobDefinition>().First();
                ClusterDetails testCluster = CmdletScenariosTestCaseBase.GetHttpAccessEnabledCluster();
                AzureHDInsightJob jobCreationDetails = RunJobInPowershell(runspace, hiveJobFromPowershell, testCluster);
                AzureHDInsightJob jobHistoryResult = GetJobsCmdletTests.GetJobWithID(runspace, jobCreationDetails.JobId, testCluster);
            }
        }
Ejemplo n.º 9
0
        public static void LoadTweets(JobSubmissionCertificateCredential creds, AzureSettings settings)
        {
            var hiveJobDefinition = new HiveJobCreateParameters()
            {
                JobName = "Load tweets to external table",
                StatusFolder = "/AAALoadTweets",

                Query = "select identifiers.identifier, Z.X.estfrequency as tweetCount  from (select explode(word_map) as X from ( SELECT context_ngrams(sentences(lower(tweetText)), array(null), 1000) as word_map FROM tweets ) struct) Z join identifiers on identifiers.identifier = Z.X.ngram[0]"
            };

            // Submit the Hive job
            var jobClient = JobSubmissionClientFactory.Connect(creds);
            var jobResults = jobClient.CreateHiveJob(hiveJobDefinition);

            WaitForJobCompletion(jobResults, jobClient);

            // Print the Hive job output
            var stream = jobClient.GetJobOutput(jobResults.JobId);

            var reader = new StreamReader(stream);
            Console.WriteLine(reader.ReadToEnd());
        }
        public Task<JobCreationResults> CreateHiveJobAsync(HiveJobCreateParameters hiveJobCreateParameters)
        {
            if (hiveJobCreateParameters.Query.IsNullOrEmpty())
            {
                hiveJobCreateParameters.File.ArgumentNotNullOrEmpty("File");
                if (hiveJobCreateParameters.File.Contains("://") &&
                    !hiveJobCreateParameters.File.StartsWith("wasb", StringComparison.OrdinalIgnoreCase))
                {
                    throw new InvalidOperationException("Invalid file protocol : " + hiveJobCreateParameters.File);
                }
            }

            JobCreationResults retval =
                this.CreateJobSuccessResult(
                    new JobDetails
                    {
                        Name = hiveJobCreateParameters.JobName,
                        Query = hiveJobCreateParameters.Query,
                        StatusDirectory = hiveJobCreateParameters.StatusFolder
                    },
                    hiveJobCreateParameters.JobName);
            return TaskEx2.FromResult(retval);
        }
        public void CannotCreateNewHiveJob_WithRestrictedCharacters_StartJob()
        {
            var hiveJobDefinition = new HiveJobCreateParameters { JobName = "show tables jobDetails", Query = "show tables %" };

            INewAzureHDInsightHiveJobDefinitionCommand newHiveJobDefinitionCommand =
                ServiceLocator.Instance.Locate<IAzureHDInsightCommandFactory>().CreateNewHiveDefinition();
            newHiveJobDefinitionCommand.JobName = hiveJobDefinition.JobName;
            newHiveJobDefinitionCommand.Query = hiveJobDefinition.Query;
            newHiveJobDefinitionCommand.EndProcessing();

            AzureHDInsightHiveJobDefinition hiveJobFromCommand = newHiveJobDefinitionCommand.Output.ElementAt(0);
            try
            {
                TestJobStart(hiveJobFromCommand);
                Assert.Fail();
            }
            catch (AggregateException aggregateException)
            {
                var invalidOperationException = aggregateException.GetBaseException() as InvalidOperationException;
                Assert.IsNotNull(invalidOperationException);
                Assert.IsTrue(invalidOperationException.Message.Contains("Query contains restricted character :'%'"), "Exception not thrown for special character");
            }
        }
        public void CannotCreateNewHiveJob_WithRestrictedCharacters_StartJob()
        {
            var hiveJobDefinition = new HiveJobCreateParameters { JobName = "show tables jobDetails", Query = "show tables %" };

            INewAzureHDInsightHiveJobDefinitionCommand newHiveJobDefinitionCommand =
                ServiceLocator.Instance.Locate<IAzureHDInsightCommandFactory>().CreateNewHiveDefinition();
            newHiveJobDefinitionCommand.JobName = hiveJobDefinition.JobName;
            newHiveJobDefinitionCommand.Query = hiveJobDefinition.Query;
            newHiveJobDefinitionCommand.EndProcessing();

            AzureHDInsightHiveJobDefinition hiveJobFromCommand = newHiveJobDefinitionCommand.Output.ElementAt(0);
            try
            {
                TestJobStart(hiveJobFromCommand);
                Assert.Fail();
            }
            catch (AggregateException aggregateException)
            {
                var invalidOperationException = aggregateException.GetBaseException() as InvalidOperationException;
                Assert.IsNotNull(invalidOperationException);
                Assert.AreEqual("Query text contains restricted character '%', please upload the query to a file in storage and re-submit the job using the -File parameter",
                    invalidOperationException.Message);
            }
        }
        public void ICanCallThe_New_HDInsightHiveJobDefinitionCmdlet_WithParameters()
        {
            var HiveJobDefinition = new HiveJobCreateParameters { JobName = "show tables jobDetails", Query = "show tables" };

            HiveJobDefinition.Defines.Add("map.input.tasks", "1000");
            HiveJobDefinition.Defines.Add("map.input.reducers", "1000");

            using (IRunspace runspace = this.GetPowerShellRunspace())
            {
                IPipelineResult results =
                    runspace.NewPipeline()
                            .AddCommand(CmdletConstants.NewAzureHDInsightHiveJobDefinition)
                            .WithParameter(CmdletConstants.JobName, HiveJobDefinition.JobName)
                            .WithParameter(CmdletConstants.Query, HiveJobDefinition.Query)
                            .WithParameter(CmdletConstants.Parameters, HiveJobDefinition.Defines)
                            .Invoke();
                Assert.AreEqual(1, results.Results.Count);
                AzureHDInsightHiveJobDefinition HiveJobFromPowershell = results.Results.ToEnumerable<AzureHDInsightHiveJobDefinition>().First();

                Assert.AreEqual(HiveJobDefinition.JobName, HiveJobFromPowershell.JobName);
                Assert.AreEqual(HiveJobDefinition.Query, HiveJobFromPowershell.Query);

                foreach (var parameter in HiveJobDefinition.Defines)
                {
                    Assert.IsTrue(
                        HiveJobFromPowershell.Defines.Any(arg => string.Equals(parameter.Key, arg.Key) && string.Equals(parameter.Value, arg.Value)),
                        "Unable to find parameter '{0}' in value returned from powershell",
                        parameter.Key);
                }
            }
        }
        public void PayloadHasEnableLogsFalseByDefault()
        {
            var hiveJob = new HiveJobCreateParameters()
            {
                Query = "show tables",
                StatusFolder = "/showtableslocation"
            };

            var payloadConverter = new PayloadConverterBase();
            var payload = payloadConverter.SerializeHiveRequest("hadoop", hiveJob);
            Assert.IsTrue(payload.Contains(string.Format("{0}={1}", HadoopRemoteRestConstants.EnableLogging, "false")));
        }
        public void CanSerializeValidHiveJobRequest()
        {
            var hiveJob = new HiveJobCreateParameters()
            {
                Query = "show tables"
            };

            var payloadConverter = new PayloadConverterBase();
            var payload = payloadConverter.SerializeHiveRequest("hadoop", hiveJob);

            Assert.IsTrue(payload.Contains(string.Format("{0}={1}", WebHCatConstants.Execute, Uri.EscapeDataString(hiveJob.Query))));
        }
        public void CanSerializeValidHiveJobRequest_WithFile()
        {
            var hiveJob = new HiveJobCreateParameters()
            {
                File = Constants.WabsProtocolSchemeName + "filepath.hql"
            };

            var payloadConverter = new PayloadConverterBase();
            var payload = payloadConverter.SerializeHiveRequest("hadoop", hiveJob);

            Assert.IsTrue(payload.Contains(string.Format("{0}={1}", WebHCatConstants.File, Uri.EscapeDataString(hiveJob.File))));
        }
        /// <inheritdoc />
        private async Task DownloadApplicationLogsAsync(string applicationId, string applicationUser, string containerId, string nodeId, string targetDirectory)
        {
            applicationId.ArgumentNotNullOrEmpty("applicationId");
            applicationUser.ArgumentNotNullOrEmpty("applicationUser");
            targetDirectory.ArgumentNotNullOrEmpty("targetDirectory");

            if (!string.IsNullOrEmpty(containerId) && string.IsNullOrEmpty(nodeId))
            {
                throw new ArgumentException("NodeId was null or empty. If container id is specified, node id should also be specified");
            }

            if (!Directory.Exists(targetDirectory))
            {
                throw new ArgumentException(string.Format(CultureInfo.InvariantCulture, "The specified directory {0} does not exist.", targetDirectory));
            }

            var jobSubmissionClient = JobSubmissionClientFactory.Connect(this.HttpCredentials, this.customUserAgent);
            var storageClient = ServiceLocator.Instance.Locate<IWabStorageAbstractionFactory>().Create(this.DefaultStorageCredentials);

            // Check existence of application logs in the default storage account
            Uri appLogContainer = new Uri(string.Format(CultureInfo.InvariantCulture, "{0}{1}@{2}/app-logs/{3}/logs/{4}", Constants.WabsProtocolSchemeName, this.DefaultStorageCredentials.ContainerName, this.DefaultStorageCredentials.Name, applicationUser, applicationId));

            var logFiles = await storageClient.List(appLogContainer, false);
            if (!logFiles.Any())
            {
                throw new InvalidOperationException(string.Format("No logs found for application id {0}, user {1}, on cluster {2} at location {3}", applicationId, applicationUser, this.Cluster.Name, appLogContainer.AbsoluteUri));
            }

            // Application logs exist!
            // Convert them to plain text by running YARN CLI
            string jobName = string.Format("yarnlogs-{0}", Guid.NewGuid());
            string statusFolderName = string.Format("/{0}", jobName);
            string optionalContainerArguments = !string.IsNullOrEmpty(containerId) ? string.Format(" -containerId {0} -nodeAddress {1}", containerId, nodeId) : string.Empty;
            string command = string.Format("!cmd.exe /c yarn logs -applicationId {0} -appOwner {1}{2};", applicationId, applicationUser, optionalContainerArguments);
            string queryFileName = string.Format("/{0}.hql", jobName);

            Uri queryFileUri = new Uri(string.Format(CultureInfo.InvariantCulture, "{0}{1}@{2}{3}", Constants.WabsProtocolSchemeName, this.DefaultStorageCredentials.ContainerName, this.DefaultStorageCredentials.Name, queryFileName));

            Uri statusFolderUri = new Uri(string.Format(CultureInfo.InvariantCulture, "{0}{1}@{2}{3}", Constants.WabsProtocolSchemeName, this.DefaultStorageCredentials.ContainerName, this.DefaultStorageCredentials.Name, statusFolderName));

            try
            {
                var bytes = Encoding.UTF8.GetBytes(command);
                using (var memoryStream = new MemoryStream(bytes))
                {
                    await storageClient.Write(queryFileUri, memoryStream);
                }

                HiveJobCreateParameters hiveJobDefinition = new HiveJobCreateParameters()
                {
                    JobName = jobName,
                    StatusFolder = statusFolderName,
                    File = queryFileName
                };

                JobCreationResults jobResults = jobSubmissionClient.CreateHiveJob(hiveJobDefinition);
                WaitForJobCompletion(jobSubmissionClient, jobResults);

                Uri logContentsFileUri = new Uri(string.Format("{0}/stdout", statusFolderUri.AbsoluteUri));

                if (await storageClient.Exists(logContentsFileUri))
                {
                    // create local file in the targetdirectory.
                    var localFilePath = Path.Combine(targetDirectory, string.Format("{0}_{1}.txt", this.Cluster.Name, string.IsNullOrEmpty(containerId) ? applicationId : containerId));
                    await storageClient.DownloadToFile(logContentsFileUri, localFilePath);
                }
                else
                {
                    throw new InvalidOperationException(string.Format(
                        CultureInfo.InvariantCulture,
                        "Could not retrive logs for application id {0}, user {1} on cluster {2} at location {3}.",
                        applicationId,
                        applicationUser,
                        this.Cluster.Name,
                        appLogContainer.AbsoluteUri));
                }
            }
            finally
            {
                // Cleanup what we created
                if (storageClient.Exists(queryFileUri).WaitForResult())
                {
                    storageClient.Delete(queryFileUri);
                }

                if (storageClient.Exists(statusFolderUri).WaitForResult())
                {
                    storageClient.Delete(statusFolderUri);
                }
            }
        }
        public void ICannotCallThe_New_HDInsightHiveJobDefinitionCmdlet_WithoutFileOrQueryParameter()
        {
            var HiveJobDefinition = new HiveJobCreateParameters
            {
                JobName = "show tables jobDetails",
                File = TestConstants.WabsProtocolSchemeName + "filepath.hql"
            };

            try
            {
                using (IRunspace runspace = this.GetPowerShellRunspace())
                {
                    runspace.NewPipeline()
                            .AddCommand(CmdletConstants.NewAzureHDInsightHiveJobDefinition)
                            .WithParameter(CmdletConstants.JobName, HiveJobDefinition.JobName)
                            .Invoke();
                    Assert.Fail("test failed.");
                }
            }
            catch (CmdletInvocationException invokeException)
            {
                var psArgumentException = invokeException.GetBaseException() as PSArgumentException;
                Assert.IsNotNull(psArgumentException);
                Assert.AreEqual("Either File or Query should be specified for Hive jobs.", psArgumentException.Message);
            }
        }
        public HiveJobCreateParameters DeserializeHiveJobCreationDetails(string content)
        {
            var request = this.DeserializePayload(content);
            var result = new HiveJobCreateParameters()
            {
                JobName = request.JobName,
                StatusFolder = request.OutputStorageLocation,
                Query = request.Query
            };

            foreach (var jobRequestParameter in request.Parameters)
            {
                result.Defines.Add(jobRequestParameter.Key, jobRequestParameter.Value.ToString());
            }

            this.SetStandardProperties(request, result);
            return result;
        }
 public Task<JobCreationResults> SubmitHiveJob(HiveJobCreateParameters details)
 {
     this.SubmitHiveJobCalled = true;
     var job = new JobCreationResults() { JobId = JobId };
     return Task.Run(() => job);
 }
 private static JobCreationResults RunHiveJob(HiveJobCreateParameters job)
 {
     JobCreationResults jobDetails = jobSubmissionClient.CreateHiveJob(job);
     JobDetails jobInProgress = jobSubmissionClient.GetJob(jobDetails.JobId);
     while (jobInProgress.StatusCode != JobStatusCode.Completed && jobInProgress.StatusCode != JobStatusCode.Failed)
     {
         jobInProgress = jobSubmissionClient.GetJob(jobInProgress.JobId);
         Thread.Sleep(TimeSpan.FromMilliseconds(IHadoopClientExtensions.GetPollingInterval()));
     }
     Assert.IsNull(jobDetails.ErrorCode, "Should not fail hive jobDetails submission");
     Assert.IsNotNull(jobDetails.JobId, "Should have a non-null jobDetails id");
     return jobDetails;
 }
 /// <inheritdoc />
 public async Task<JobCreationResults> SubmitHiveJob(HiveJobCreateParameters details)
 {
     var remoteClient = ServiceLocator.Instance.Locate<IRemoteHadoopJobSubmissionPocoClientFactory>().Create(this.remoteCreds, this.context, this.ignoreSslErrors, this.GetUserAgentString());
     return await remoteClient.SubmitHiveJob(details);
 }
Ejemplo n.º 23
0
        /// <summary>
        /// Performs HQL query and returns the query results.
        /// </summary>
        /// <param name="jobParams">The query parameters.</param>
        /// <returns>The query result.</returns>
        public string Query(HiveJobCreateParameters jobParams)
        {
            // Assign status folder
            jobParams.StatusFolder = RootDirectory + "/status";

            JobCreationResults jobDetails = null;

            try
            {
                // Create Hive job
                jobDetails = this.job.CreateHiveJob(jobParams);
            }
            catch (Exception e)
            {
                AvroHdiSample.ReportError("Error while creating a Hive job\n" + e);
            }

            JobDetails jobInProgress = null;

            try
            {
                // Get job status
                jobInProgress = this.job.GetJob(jobDetails.JobId);
            }
            catch (Exception e)
            {
                AvroHdiSample.ReportError("Error while getting Hive job status\n" + e);
            }


            // If job is not finished then sleep until the next client polling interval
            while (jobInProgress.StatusCode != JobStatusCode.Completed
                   && jobInProgress.StatusCode != JobStatusCode.Failed)
            {
                try
                {
                    // Get job status
                    jobInProgress = this.job.GetJob(jobDetails.JobId);
                }
                catch (Exception e)
                {
                    AvroHdiSample.ReportError("Error while getting Hive job status\n" + e);
                }

                Thread.Sleep(this.client.PollingInterval);
            }

            try
            {
                // Job is finished; get its output stream, read it, and return the value
                return new StreamReader(this.job.GetJobOutput(jobDetails.JobId)).ReadToEnd();
            }
            catch (Exception e)
            {
                AvroHdiSample.ReportError("Error while reading Hibe job result\n" + e);
            }

            return string.Empty;
        }
        public Task<JobCreationResults> SubmitHiveJob(HiveJobCreateParameters hiveJob)
        {
            if (hiveJob.Query.IsNullOrEmpty())
            {
                hiveJob.File.ArgumentNotNullOrEmpty("File");
                if (hiveJob.File.Contains("://") && !hiveJob.File.StartsWith(Constants.WabsProtocol, StringComparison.OrdinalIgnoreCase))
                {
                    throw new InvalidOperationException("Invalid file protocol : " + hiveJob.File);
                }
            }

            var retval = this.CreateJobSuccessResult(new JobDetails()
            {
                Name = hiveJob.JobName,
                Query = hiveJob.Query,
                StatusDirectory = hiveJob.StatusFolder
            },
            hiveJob.JobName);
            return Task.FromResult(retval);
        }
        private static void SubmitJobs()
        {
            // Get HDInsight cluster configuration settings
            string clusterName = ConfigurationManager.AppSettings["ClusterName"];
            string userName = ConfigurationManager.AppSettings["UserName"];
            string password = ConfigurationManager.AppSettings["Password"];

            // Create basic authentication credential for cluster
            BasicAuthCredential bcred = new BasicAuthCredential();
            bcred.Server = new Uri("https://" + clusterName + ".azurehdinsight.net");
            bcred.UserName = userName;
            bcred.Password = password;

            // Create and submit Pig job
            PigJobCreateParameters pigJob = new PigJobCreateParameters()
            {
                StatusFolder = "/data/racecar/scripts/processdatastatus",
                File = "/data/racecar/scripts/processdata.pig"
            };
            var pigJobClient = JobSubmissionClientFactory.Connect(bcred);
            JobCreationResults pigJobResults = pigJobClient.CreatePigJob(pigJob);
            WaitForJobCompletion(pigJobResults, pigJobClient);

            // Create and submit Hive job
            HiveJobCreateParameters hiveJob = new HiveJobCreateParameters()
            {
                JobName = "Create Hive tables",
                StatusFolder = "/data/racecar/scripts/createtablestatus",
                File = "/data/racecar/scripts/createtables.hql"
            };
            var hiveJobClient = JobSubmissionClientFactory.Connect(bcred);
            JobCreationResults hiveJobResults = hiveJobClient.CreateHiveJob(hiveJob);
            WaitForJobCompletion(hiveJobResults, hiveJobClient);

        }
 public JobCreationResults CreateHiveJob(HiveJobCreateParameters hiveJobCreateParameters)
 {
     this.PrepareQueryJob(hiveJobCreateParameters);
     return this.CreateHiveJobAsync(hiveJobCreateParameters).WaitForResult();
 }
 public JobCreationResults CreateHiveJob(HiveJobCreateParameters hiveJobCreateParameters)
 {
     return this.CreateHiveJobAsync(hiveJobCreateParameters).WaitForResult();
 }
        public virtual void WaitForJob()
        {
            var hiveJobDefinition = new HiveJobCreateParameters { JobName = "show tables jobDetails", Query = "show tables" };

            // IHadoopClientExtensions.GetPollingInterval = () => 0;
            ClusterDetails cluster = CmdletScenariosTestCaseBase.GetHttpAccessEnabledCluster();
            using (IRunspace runspace = this.GetPowerShellRunspace())
            {
                IPipelineResult results =
                runspace.NewPipeline()
                            .AddCommand(CmdletConstants.NewAzureHDInsightHiveJobDefinition)
                            .WithParameter(CmdletConstants.JobName, hiveJobDefinition.JobName)
                            .WithParameter(CmdletConstants.Query, hiveJobDefinition.Query)
                            .AddCommand(CmdletConstants.StartAzureHDInsightJob)
                            .WithParameter(CmdletConstants.Cluster, cluster.ConnectionUrl)
                            .WithParameter(CmdletConstants.Credential, GetPSCredential(cluster.HttpUserName, cluster.HttpPassword))
                .AddCommand(CmdletConstants.WaitAzureHDInsightJob)
                            .WithParameter(CmdletConstants.Credential, GetPSCredential(cluster.HttpUserName, cluster.HttpPassword))
                .Invoke();
                Assert.AreEqual(1, results.Results.Count);
                Assert.AreEqual("Completed", results.Results.ToEnumerable<AzureHDInsightJob>().First().State);
            }
        }
 public void GivenIHaveAHiveJobRequestObject()
 {
     var request = new HiveJobCreateParameters();
     request.JobName = string.Empty;
     request.Query = string.Empty;
     request.StatusFolder = string.Empty;
     this.transferObject = request;
 }