// [Fact] public void GetHiveJob() { using (var context = UndoContext.Current) { context.Start(); var username = TestUtils.UserName; var password = TestUtils.Password; var clustername = TestUtils.ClusterName; var credentials = new BasicAuthenticationCloudCredentials { Username = username, Password = password }; var client = TestUtils.GetHDInsightJobManagementClient(clustername, credentials); var parameters = new HiveJobSubmissionParameters { UserName = username, Query = "SHOW TABLES;" }; var jobid = client.JobManagement.SubmitHiveJob(parameters).JobSubmissionJsonResponse.Id; var response = client.JobManagement.GetJob(jobid); Assert.NotNull(response); Assert.Equal(response.StatusCode, HttpStatusCode.OK); } }
public void SubmitMapReduceStreamingJob() { using (var context = UndoContext.Current) { context.Start(); var username = TestUtils.UserName; var password = TestUtils.Password; var clustername = TestUtils.ClusterName; var credentials = new BasicAuthenticationCloudCredentials { Username = username, Password = password }; var client = TestUtils.GetHDInsightJobManagementClient(clustername, credentials); var parameters = new MapReduceStreamingJobSubmissionParameters { UserName = username, Mapper = "cat.exe", Reducer = "wc.exe", Input = "/example/data/gutenberg/davinci.txt", Output = "/example/data/gutenberg/wcout" }; var response = client.JobManagement.SubmitMapReduceStreamingJob(parameters); Assert.NotNull(response); Assert.Equal(response.StatusCode, HttpStatusCode.OK); Assert.Contains("job_", response.JobSubmissionJsonResponse.Id, StringComparison.InvariantCulture); } }
public virtual void SetupTestsForData() { hdinsightManagementMock = new Mock<AzureHdInsightManagementClient>(); var cred = new BasicAuthenticationCloudCredentials {Username = "******", Password = "******"}; hdinsightJobManagementMock = new Mock<AzureHdInsightJobManagementClient>(ClusterName, cred); commandRuntimeMock = new Mock<ICommandRuntime>(); }
// [Fact] public void GetPigJob() { using (var context = UndoContext.Current) { context.Start(); var username = TestUtils.UserName; var password = TestUtils.Password; var clustername = TestUtils.ClusterName; var credentials = new BasicAuthenticationCloudCredentials { Username = username, Password = password }; var client = TestUtils.GetHDInsightJobManagementClient(clustername, credentials); var parameters = new PigJobSubmissionParameters() { UserName = username, Query = "records = LOAD '/example/pig/sahara-paleo-fauna.txt' AS (DateBP:int, Loc:chararray, Coordinates:chararray, Samples:chararray, Country:chararray, Laboratory:chararray);" + "filtered_records = FILTER records by Country == 'Egypt' OR Country == 'Morocco';" + "grouped_records = GROUP filtered_records BY Country;" + "DUMP grouped_records;" }; var jobid = client.JobManagement.SubmitPigJob(parameters).JobSubmissionJsonResponse.Id; var response = client.JobManagement.GetJob(jobid); Assert.NotNull(response); Assert.Equal(response.StatusCode, HttpStatusCode.OK); } }
public void SubmitMapReduceStreamingJob() { using (var context = UndoContext.Current) { context.Start(); var username = TestUtils.UserName; var password = TestUtils.Password; var clustername = TestUtils.ClusterName; var credentials = new BasicAuthenticationCloudCredentials { Username = username, Password = password }; var client = TestUtils.GetHDInsightJobManagementClient(clustername, credentials); var parameters = new MapReduceStreamingJobSubmissionParameters { Mapper = "cat", Reducer = "wc", Input = "/example/data/gutenberg/davinci.txt", Output = "/example/data/gutenberg/wcount" }; var response = client.JobManagement.SubmitMapReduceStreamingJob(parameters); Assert.NotNull(response); Assert.Equal(response.StatusCode, HttpStatusCode.OK); var jobId = response.JobSubmissionJsonResponse.Id; Assert.Contains("job_", jobId, StringComparison.InvariantCulture); var jobStatus = GetJobFinalStatus(client, jobId); var storageAccess = GetStorageAccessObject(); if (jobStatus.JobDetail.ExitValue == 0) { if (HttpMockServer.Mode == HttpRecorderMode.Record) { // Retrieve Job Output var output = client.JobManagement.GetJobOutput(jobId, storageAccess); string textOutput = Convert(output); Assert.True(textOutput.Length > 0); } } else { if (HttpMockServer.Mode == HttpRecorderMode.Record) { var output = client.JobManagement.GetJobErrorLogs(jobId, storageAccess); string errorTextOutput = Convert(output); Assert.NotNull(errorTextOutput); } Assert.True(false); } } }
public void GetHiveJob() { using (var context = UndoContext.Current) { context.Start(); var username = TestUtils.UserName; var password = TestUtils.Password; var clustername = TestUtils.ClusterName; var credentials = new BasicAuthenticationCloudCredentials { Username = username, Password = password }; var client = TestUtils.GetHDInsightJobManagementClient(clustername, credentials); var parameters = new HiveJobSubmissionParameters { UserName = username, Query = "SHOW TABLES;", Arguments = "arg1", Defines = "def1" }; var jobid = client.JobManagement.SubmitHiveJob(parameters).JobSubmissionJsonResponse.Id; var response = client.JobManagement.GetJob(jobid); Assert.NotNull(response); Assert.Equal(response.StatusCode, HttpStatusCode.OK); } }
public virtual void SetupTestsForData() { var cred = new BasicAuthenticationCloudCredentials { Username = "******", Password = "******" }; hdinsightJobManagementMock = new Mock <AzureHdInsightJobManagementClient>(ClusterName, cred); commandRuntimeMock = new Mock <ICommandRuntime>(); }
public static HDInsightJobManagementClient GetHDInsightJobManagementClient(bool isWindowsCluster = false) { var credentials = new BasicAuthenticationCloudCredentials { Username = isWindowsCluster ? WinUserName : UserName, Password = isWindowsCluster ? WinPassword : Password }; return(TestUtils.GetHDInsightJobManagementClient(isWindowsCluster ? WinClusterName : ClusterName, credentials)); }
public InsightPhotoController(IHostingEnvironment hostingEnv) { _env = hostingEnv; var clusterCredentials = new BasicAuthenticationCloudCredentials { Username = ExistingClusterUsername, Password = ExistingClusterPassword }; _jobClient = new HDInsightJobManagementClient(ExistingClusterUri, clusterCredentials); }
/// <summary> /// Initializes a new instance of the HDInsightJobManagementClient /// class. /// </summary> /// <param name='clusterDnsName'> /// Required. The cluster dns name against which the job management is /// to be performed. /// </param> /// <param name='credentials'> /// Required. Basic authentication credentials for job submission. /// </param> /// <param name='retryPolicy'> /// Optional. Retry Policy for Http Transient errors. /// </param> public HDInsightJobManagementClient(string clusterDnsName, BasicAuthenticationCloudCredentials credentials, RetryPolicy retryPolicy = null) : this(clusterDnsName, credentials) { if (retryPolicy == null) { // If No retry policy is provided then use default retry policy retryPolicy = HDInsightJobManagementClient.HDInsightRetryPolicy; } this.SetRetryPolicy(retryPolicy); }
public void SubmitHiveJobAndValidateOutput(HiveJobSubmissionParameters parameters, string expectedOutputPart) { using (var context = UndoContext.Current) { context.Start(); var username = TestUtils.UserName; var password = TestUtils.Password; var clustername = TestUtils.ClusterName; var credentials = new BasicAuthenticationCloudCredentials { Username = username, Password = password }; var client = TestUtils.GetHDInsightJobManagementClient(clustername, credentials); var response = client.JobManagement.SubmitHiveJob(parameters); Assert.NotNull(response); Assert.Equal(response.StatusCode, HttpStatusCode.OK); var jobId = response.JobSubmissionJsonResponse.Id; Assert.Contains("job_", jobId, StringComparison.InvariantCulture); var jobStatus = GetJobFinalStatus(client, jobId); var storageAccess = GetStorageAccessObject(); if (jobStatus.JobDetail.ExitValue == 0) { if (HttpMockServer.Mode == HttpRecorderMode.Record) { // Retrieve Job Output var output = client.JobManagement.GetJobOutput(jobId, storageAccess); string textOutput = Convert(output); Assert.True(textOutput.Contains(expectedOutputPart)); } } else { if (HttpMockServer.Mode == HttpRecorderMode.Record) { var output = client.JobManagement.GetJobErrorLogs(jobId, storageAccess); string errorTextOutput = Convert(output); Assert.NotNull(errorTextOutput); } Assert.True(false); } } }
public virtual void SetupTestsForData() { //This is because job test need to use ClientFactory,however scenario test will create a MockClientFactory AzureSession.Instance.ClientFactory = new ClientFactory(); hdinsightManagementMock = new Mock <AzureHdInsightManagementClient>(); var cred = new BasicAuthenticationCloudCredentials { Username = "******", Password = "******" }; hdinsightJobManagementMock = new Mock <AzureHdInsightJobManagementClient>(ClusterName, cred); commandRuntimeMock = new Mock <ICommandRuntime>(); }
static void Main(string[] args) { Console.WriteLine("The application is running ..."); var clusterCredentials = new BasicAuthenticationCloudCredentials { Username = ExistingClusterUsername, Password = ExistingClusterPassword }; _hdiJobManagementClient = new HDInsightJobManagementClient(ExistingClusterUri, clusterCredentials); SubmitMRJob(); Console.WriteLine("Press ENTER to continue ..."); Console.ReadLine(); }
public void KillMapReduceStreamingJob() { using (var context = UndoContext.Current) { context.Start(); var username = TestUtils.UserName; var password = TestUtils.Password; var clustername = TestUtils.ClusterName; var credentials = new BasicAuthenticationCloudCredentials { Username = username, Password = password }; var client = TestUtils.GetHDInsightJobManagementClient(clustername, credentials); var defines = new Dictionary <string, string> { { "mapreduce.map.maxattempts", "10" }, { "mapreduce.reduce.maxattempts", "10" }, { "mapreduce.task.timeout", "60000" } }; var args = new List <string> { "10", "1000" }; var parameters = new MapReduceJobSubmissionParameters { UserName = username, JarFile = "/example/jars/hadoop-mapreduce-examples.jar", JarClass = "pi", Defines = ConvertDefinesToString(defines), Arguments = ConvertArgsToString(args) }; var jobid = client.JobManagement.SubmitMapReduceJob(parameters).JobSubmissionJsonResponse.Id; var response = client.JobManagement.GetJob(jobid); Assert.NotNull(response); Assert.Equal(response.StatusCode, HttpStatusCode.OK); var job = client.JobManagement.KillJob(jobid); Assert.NotNull(job); Assert.Equal(job.JobDetail.Status.State, "KILLED"); } }
public RecalculationJob(StorageConfiguration storageConfiguration, HDInsightConfiguration configuration) { _storageConfiguration = storageConfiguration; _configuration = configuration; _storageAccess = new AzureStorageAccess( _configuration.DefaultStorageAccountName, _configuration.DefaultStorageAccountKey, _configuration.DefaultStorageContainerName); var clusterCredentials = new BasicAuthenticationCloudCredentials { Username = _configuration.ExistingClusterUsername, Password = _configuration.ExistingClusterPassword }; _hdiJobManagementClient = new HDInsightJobManagementClient(_configuration.ExistingClusterUri, clusterCredentials); }
/// <summary> /// Initializes a new instance of the HDInsightJobManagementClient /// class. /// </summary> /// <param name='clusterDnsName'> /// Required. The cluster dns name against which the job management is /// to be performed. /// </param> /// <param name='credentials'> /// Required. Basic authentication credentials for job submission. /// </param> /// <param name='httpClient'> /// The Http client /// </param> public HDInsightJobManagementClient(string clusterDnsName, BasicAuthenticationCloudCredentials credentials, HttpClient httpClient) : this(httpClient) { if (clusterDnsName == null) { throw new ArgumentNullException("clusterDnsName"); } if (credentials == null) { throw new ArgumentNullException("credentials"); } this._clusterDnsName = clusterDnsName; this._credentials = credentials; this.Credentials.InitializeServiceClient(this); }
/// <summary> /// Initializes a new instance of the HDInsightJobManagementClient /// class. /// </summary> /// <param name='clusterDnsName'> /// Required. The cluster dns name against which the job management is /// to be performed. /// </param> /// <param name='credentials'> /// Required. Basic authentication credentials for job submission. /// </param> /// <param name='retryPolicy'> /// Optional. Retry Policy for Http Transient errors. /// </param> public HDInsightJobManagementClient(string clusterDnsName, BasicAuthenticationCloudCredentials credentials, RetryPolicy retryPolicy = null) : this(clusterDnsName, credentials) { if (retryPolicy == null) { // If No retry policy is provided then use default retry policy retryPolicy = HDInsightJobManagementClient.HDInsightRetryPolicy; // Having Http client time same as MaxBackOff seems to be not sufficient. This is still // raising TaskCancellation Exception. Setting value MaxBackOff + 2 mins for HDinsight // gateway time and having 1 min extra buffer. this.HttpClient.Timeout = MaxBackOff.Add(TimeSpan.FromMinutes(3)); } this.SetRetryPolicy(retryPolicy); }
public void SubmitHiveJobError() { using (var context = UndoContext.Current) { context.Start(); var username = TestUtils.UserName; var password = TestUtils.Password; var clustername = TestUtils.ClusterName; var credentials = new BasicAuthenticationCloudCredentials { Username = username, Password = password }; var client = TestUtils.GetHDInsightJobManagementClient(clustername, credentials); var parameters = new HiveJobSubmissionParameters { Query = "FAKEQUERY;", StatusDir = "jobstatus" }; var response = client.JobManagement.SubmitHiveJob(parameters); Assert.NotNull(response); Assert.Equal(response.StatusCode, HttpStatusCode.OK); var jobId = response.JobSubmissionJsonResponse.Id; Assert.Contains("job_", jobId, StringComparison.InvariantCulture); var jobStatus = GetJobFinalStatus(client, jobId); Assert.True(jobStatus.JobDetail.ExitValue > 0); var storageAccess = GetStorageAccessObject(); if (HttpMockServer.Mode == HttpRecorderMode.Record) { var output = client.JobManagement.GetJobErrorLogs(jobId, storageAccess); Assert.NotNull(output); Assert.True(output.Length > 0); string errorTextOutput = Convert(output); Assert.True(!string.IsNullOrEmpty(errorTextOutput)); } } }
public void KillMapReduceStreamingJob() { using (var context = UndoContext.Current) { context.Start(); var username = TestUtils.UserName; var password = TestUtils.Password; var clustername = TestUtils.ClusterName; var credentials = new BasicAuthenticationCloudCredentials { Username = username, Password = password }; var client = TestUtils.GetHDInsightJobManagementClient(clustername, credentials); var defines = new Dictionary<string, string> { {"mapreduce.map.maxattempts", "10"}, {"mapreduce.reduce.maxattempts", "10"}, {"mapreduce.task.timeout", "60000"} }; var args = new List<string> { "10", "1000" }; var parameters = new MapReduceJobSubmissionParameters { UserName = username, JarFile = "/example/jars/hadoop-mapreduce-examples.jar", JarClass = "pi", Defines = ConvertDefinesToString(defines), Arguments = ConvertArgsToString(args) }; var jobid = client.JobManagement.SubmitMapReduceJob(parameters).JobSubmissionJsonResponse.Id; var response = client.JobManagement.GetJob(jobid); Assert.NotNull(response); Assert.Equal(response.StatusCode, HttpStatusCode.OK); var job = client.JobManagement.KillJob(jobid); Assert.NotNull(job); Assert.Equal(job.JobDetail.Status.State, "KILLED"); } }
/// <summary> /// Initializes a new instance of the HDInsightJobManagementClient /// class. /// </summary> /// <param name='clusterDnsName'> /// Required. The cluster dns name against which the job management is /// to be performed. /// </param> /// <param name='credentials'> /// Required. Basic authentication credentials for job submission. /// </param> internal HDInsightJobManagementClient(string clusterDnsName, BasicAuthenticationCloudCredentials credentials) : this() { if (clusterDnsName == null) { throw new ArgumentNullException("clusterDnsName"); } if (credentials == null) { throw new ArgumentNullException("credentials"); } this._clusterDnsName = clusterDnsName; this._credentials = credentials; this.Credentials.InitializeServiceClient(this); this._userName = credentials.Username.ToLower(CultureInfo.CurrentCulture); }
public static void SubmitPigJob() { const string ExistingClusterName = "sparkdheetest"; const string ExistingClusterUri = ExistingClusterName + ".azurehdinsight.net"; const string ExistingClusterUsername = "******"; const string ExistingClusterPassword = "******"; var clusterCredentials = new BasicAuthenticationCloudCredentials { Username = ExistingClusterUsername, Password = ExistingClusterPassword }; HDInsightJobManagementClient _hdiJobManagementClient = new HDInsightJobManagementClient(ExistingClusterUri, clusterCredentials); ////SubmitPigJob(); var parameters = new PigJobSubmissionParameters { //A = LOAD 'wasb://clustercreatetemplate.blob.core.windows.net/clustercreatetemplate/information.txt' using PigStorage (‘\t’) as (FName: chararray, LName: chararray, MobileNo: chararray, City: chararray, Profession: chararray); // B = FOREACH A generate FName,MobileNo,Profession; //DUMP B; //wasb://clustercreatetemplate.blob.core.windows.net/clustercreatetemplate/sample.pig //Query = @"PigSampleIn = LOAD 'wasb://clustercreatetemplate.blob.core.windows.net/clustercreatetemplate/input.txt' USIG PigStorage(',') AS (ProfileID:chararray, SessionStart:chararray, Duration:int, SrcIPAddress:chararray, GameType:chararray); // GroupProfile = Group PigSampleIn all; // PigSampleOut = Foreach GroupProfile Generate PigSampleIn.ProfileID, SUM(PigSampleIn.Duration); // Store PigSampleOut into 'wasb://clustercreatetemplate.blob.core.windows.net/clustercreatetemplate/output.txt' USING PigStorage (',');" // File = }; Console.WriteLine("Submitting the Pig job to the cluster..."); var response = _hdiJobManagementClient.JobManagement.SubmitPigJob(parameters); Console.WriteLine("Validating that the response is as expected..."); Console.WriteLine("Response status code is " + response.StatusCode); Console.WriteLine("Validating the response object..."); Console.WriteLine("JobId is " + response.JobSubmissionJsonResponse.Id); var jobs = _hdiJobManagementClient.JobManagement.ListJobs(); foreach (var job in jobs) { //var deails = _hdiJobManagementClient.JobManagement.GetJobOutput(job.Id) } Console.WriteLine("Press ENTER to continue ..."); Console.ReadLine(); }
/// <summary> /// Initializes a new instance of the WebSiteExtensionsClient class. /// </summary> /// <param name='siteName'> /// Required. The site name. /// </param> /// <param name='credentials'> /// Required. TBD. /// </param> /// <param name='httpClient'> /// The Http client /// </param> public WebSiteExtensionsClient(string siteName, BasicAuthenticationCloudCredentials credentials, HttpClient httpClient) : this(httpClient) { if (siteName == null) { throw new ArgumentNullException("siteName"); } if (credentials == null) { throw new ArgumentNullException("credentials"); } this._siteName = siteName; this._credentials = credentials; this._baseUri = TypeConversion.TryParseUri("https://" + SiteName + ".scm.azurewebsites.net:443"); this.Credentials.InitializeServiceClient(this); }
public void GetJobsPagination() { using (var context = UndoContext.Current) { context.Start(); var username = TestUtils.UserName; var password = TestUtils.Password; var clustername = TestUtils.ClusterName; var credentials = new BasicAuthenticationCloudCredentials { Username = username, Password = password }; var client = TestUtils.GetHDInsightJobManagementClient(clustername, credentials); var allJobsResp = client.JobManagement.ListJobs(); Assert.NotNull(allJobsResp); Assert.Equal(allJobsResp.StatusCode, HttpStatusCode.OK); Assert.NotNull(allJobsResp); Assert.Equal(allJobsResp.StatusCode, HttpStatusCode.OK); Assert.True(allJobsResp.JobList.Count > 0); int numOfEntries = 3; int index = -1; string jobid = string.Empty; while (true) { var t = client.JobManagement.ListJobsAfterJobId(jobid, numOfEntries); jobid = t.JobList.Last().Id; index += t.JobList.Count; var expectedJobId = allJobsResp.JobList.ElementAt(index).Id; Assert.Equal(expectedJobId, jobid); if (t.JobList.Count != numOfEntries || allJobsResp.JobList.Count <= index) { break; } } } }
public void CheckValidJobUserName() { using (var context = UndoContext.Current) { context.Start(); var credentials = new BasicAuthenticationCloudCredentials { Username = "******", Password = "" }; var client = new HDInsightJobManagementClient("TestCluster", credentials); Assert.Equal(credentials.Username.ToLower(CultureInfo.CurrentCulture), client.UserName); client = new HDInsightJobManagementClient("TestCluster", credentials, new HttpClient()); Assert.Equal(credentials.Username.ToLower(CultureInfo.CurrentCulture), client.UserName); } }
public void SubmitHiveJob() { using (var context = UndoContext.Current) { context.Start(); var username = TestUtils.UserName; var password = TestUtils.Password; var clustername = TestUtils.ClusterName; var credentials = new BasicAuthenticationCloudCredentials { Username = username, Password = password }; var client = TestUtils.GetHDInsightJobManagementClient(clustername, credentials); var defines = new Dictionary <string, string> { { "hive.execution.engine", "ravi" }, { "hive.exec.reducers.max", "1" } }; var args = new List <string> { "argA", "argB" }; var parameters = new HiveJobSubmissionParameters { UserName = username, Query = "SHOW TABLES;", Defines = ConvertDefinesToString(defines), Arguments = ConvertArgsToString(args) }; var response = client.JobManagement.SubmitHiveJob(parameters); Assert.NotNull(response); Assert.Equal(response.StatusCode, HttpStatusCode.OK); Assert.Contains("job_", response.JobSubmissionJsonResponse.Id, StringComparison.InvariantCulture); } }
/// <summary> /// Initializes a new instance of the WebSiteExtensionsClient class. /// </summary> /// <param name='siteName'> /// Required. The site name. /// </param> /// <param name='credentials'> /// Required. TBD. /// </param> /// <param name='baseUri'> /// Required. The URI used as the base for all kudu requests. /// </param> /// <param name='httpClient'> /// The Http client /// </param> public WebSiteExtensionsClient(string siteName, BasicAuthenticationCloudCredentials credentials, Uri baseUri, HttpClient httpClient) : this(httpClient) { if (siteName == null) { throw new ArgumentNullException("siteName"); } if (credentials == null) { throw new ArgumentNullException("credentials"); } if (baseUri == null) { throw new ArgumentNullException("baseUri"); } this._siteName = siteName; this._credentials = credentials; this._baseUri = baseUri; this.Credentials.InitializeServiceClient(this); }
// [Fact] public void GetJobError() { using (var context = UndoContext.Current) { context.Start(); var username = TestUtils.UserName; var password = TestUtils.Password; var clustername = TestUtils.ClusterName; var credentials = new BasicAuthenticationCloudCredentials { Username = username, Password = password }; var client = TestUtils.GetHDInsightJobManagementClient(clustername, credentials); var parameters = new HiveJobSubmissionParameters { UserName = username, Query = "FAKEQUERY;", StatusDir = "jobstatus" }; var response = client.JobManagement.SubmitHiveJob(parameters); Assert.NotNull(response); Assert.Equal(response.StatusCode, HttpStatusCode.OK); var job = client.JobManagement.GetJob(response.JobSubmissionJsonResponse.Id); var output = client.JobManagement.GetJobErrorLogs(job.JobDetail.Id, TestUtils.StorageAccountName, TestUtils.StorageAccountKey, TestUtils.DefaultContainer); Assert.NotNull(output); Assert.True(output.Length > 0); var outputStr = Convert(output); Assert.NotNull(outputStr); } }
//[Fact] public void GetJobError() { using (var context = UndoContext.Current) { context.Start(); var username = TestUtils.UserName; var password = TestUtils.Password; var clustername = TestUtils.ClusterName; var credentials = new BasicAuthenticationCloudCredentials { Username = username, Password = password }; var client = TestUtils.GetHDInsightJobManagementClient(clustername, credentials); var parameters = new HiveJobSubmissionParameters { UserName = username, Query = "FAKEQUERY;", StatusDir = "jobstatus" }; var response = client.JobManagement.SubmitHiveJob(parameters); Assert.NotNull(response); Assert.Equal(response.StatusCode, HttpStatusCode.OK); var job = client.JobManagement.GetJob(response.JobSubmissionJsonResponse.Id); var output = client.JobManagement.GetJobErrorLogs(job.JobDetail.Id, TestUtils.StorageAccountName, TestUtils.StorageAccountKey, TestUtils.DefaultContainer); Assert.NotNull(output); Assert.True(output.Length > 0); var outputStr = Convert(output); Assert.NotNull(outputStr); } }
// [Fact] public void ListJobs() { using (var context = UndoContext.Current) { context.Start(); var username = TestUtils.UserName; var password = TestUtils.Password; var clustername = TestUtils.ClusterName; var credentials = new BasicAuthenticationCloudCredentials { Username = username, Password = password }; var client = TestUtils.GetHDInsightJobManagementClient(clustername, credentials); var response = client.JobManagement.ListJobs(); Assert.NotNull(response); Assert.Equal(response.StatusCode, HttpStatusCode.OK); } }
public static void SubmitPigJob(string filename) { const string ExistingClusterName = "orionml"; const string ExistingClusterUri = ExistingClusterName + ".azurehdinsight.net"; const string ExistingClusterUsername = "******"; const string ExistingClusterPassword = "******"; var clusterCredentials = new BasicAuthenticationCloudCredentials { Username = ExistingClusterUsername, Password = ExistingClusterPassword }; HDInsightJobManagementClient _hdiJobManagementClient = new HDInsightJobManagementClient(ExistingClusterUri, clusterCredentials); var parameters = new PigJobSubmissionParameters { File = "wasbs:///user/root/" + filename + ".pig" }; Console.WriteLine("Submitting the Pig job with file name [" + filename + "] to the cluster..."); var response = _hdiJobManagementClient.JobManagement.SubmitPigJob(parameters); Console.WriteLine("JobId is " + response.JobSubmissionJsonResponse.Id); }
public void ListJobs() { using (var context = UndoContext.Current) { context.Start(); var username = TestUtils.UserName; var password = TestUtils.Password; var clustername = TestUtils.ClusterName; var credentials = new BasicAuthenticationCloudCredentials { Username = username, Password = password }; var client = TestUtils.GetHDInsightJobManagementClient(clustername, credentials); var response = client.JobManagement.ListJobs(); Assert.NotNull(response); Assert.Equal(response.StatusCode, HttpStatusCode.OK); } }
public static WebSiteExtensionsClient Create(IDictionary <string, object> settings) { if (settings == null) { throw new ArgumentNullException("settings"); } BasicAuthenticationCloudCredentials credentials = ConfigurationHelper.GetCredentials <BasicAuthenticationCloudCredentials>(settings); Uri baseUri = ConfigurationHelper.GetUri(settings, "BaseUri", false); string siteName = ConfigurationHelper.GetString(settings, "SiteName", false); if (baseUri != null && siteName != null) { return(new WebSiteExtensionsClient(siteName, credentials, baseUri)); } else if (siteName != null) { return(new WebSiteExtensionsClient(siteName, credentials)); } throw new ArgumentNullException(); }
public void SubmitMapReduceStreamingJobWithFilesParam() { using (var context = UndoContext.Current) { context.Start(); var username = TestUtils.WinUserName; var password = TestUtils.WinPassword; var clustername = TestUtils.WinClusterName; var credentials = new BasicAuthenticationCloudCredentials { Username = username, Password = password }; var client = TestUtils.GetHDInsightJobManagementClient(clustername, credentials); var parameters = new MapReduceStreamingJobSubmissionParameters { Mapper = "cat.exe", Reducer = "wc.exe", Input = "/example/data/gutenberg/davinci.txt", Output = "/example/data/gutenberg/wcount", Files = new List<string> { "/example/apps/wc.exe", "/example/apps/cat.exe" } }; var response = client.JobManagement.SubmitMapReduceStreamingJob(parameters); Assert.NotNull(response); Assert.Equal(response.StatusCode, HttpStatusCode.OK); var jobId = response.JobSubmissionJsonResponse.Id; Assert.Contains("job_", jobId, StringComparison.InvariantCulture); var jobStatus = GetJobFinalStatus(client, jobId); var storageAccess = new AzureStorageAccess(TestUtils.WinStorageAccountName, TestUtils.WinStorageAccountKey, TestUtils.WinDefaultContainer); if (jobStatus.JobDetail.ExitValue == 0) { if (HttpMockServer.Mode == HttpRecorderMode.Record) { // Retrieve Job Output var output = client.JobManagement.GetJobOutput(jobId, storageAccess); string textOutput = Convert(output); Assert.True(textOutput.Length > 0); } } else { if (HttpMockServer.Mode == HttpRecorderMode.Record) { var output = client.JobManagement.GetJobErrorLogs(jobId, storageAccess); string errorTextOutput = Convert(output); Assert.NotNull(errorTextOutput); } Assert.True(false); } } }
public void SubmitPigJob() { using (var context = UndoContext.Current) { context.Start(); var username = TestUtils.UserName; var password = TestUtils.Password; var clustername = TestUtils.ClusterName; var credentials = new BasicAuthenticationCloudCredentials() { Username = username, Password = password }; var client = TestUtils.GetHDInsightJobManagementClient(clustername, credentials); var parameters = new PigJobSubmissionParameters() { Query = "LOGS = LOAD 'wasb:///example/data/sample.log';" + "LEVELS = foreach LOGS generate REGEX_EXTRACT($0, '(TRACE|DEBUG|INFO|WARN|ERROR|FATAL)', 1) as LOGLEVEL;" + "FILTEREDLEVELS = FILTER LEVELS by LOGLEVEL is not null;" + "GROUPEDLEVELS = GROUP FILTEREDLEVELS by LOGLEVEL;" + "FREQUENCIES = foreach GROUPEDLEVELS generate group as LOGLEVEL, COUNT(FILTEREDLEVELS.LOGLEVEL) as COUNT;" + "RESULT = order FREQUENCIES by COUNT desc;" + "DUMP RESULT;" }; var response = client.JobManagement.SubmitPigJob(parameters); Assert.NotNull(response); Assert.Equal(response.StatusCode, HttpStatusCode.OK); var jobId = response.JobSubmissionJsonResponse.Id; Assert.Contains("job_", jobId, StringComparison.InvariantCulture); var jobStatus = GetJobFinalStatus(client, jobId); var storageAccess = GetStorageAccessObject(); if (jobStatus.JobDetail.ExitValue == 0) { // Retrieve Job Output if (HttpMockServer.Mode == HttpRecorderMode.Record) { var output = client.JobManagement.GetJobOutput(jobId, storageAccess); string textOutput = Convert(output); Assert.True(textOutput.Contains("(DEBUG,")); } } else { if (HttpMockServer.Mode == HttpRecorderMode.Record) { var output = client.JobManagement.GetJobErrorLogs(jobId, storageAccess); string errorTextOutput = Convert(output); Assert.NotNull(errorTextOutput); } Assert.True(false); } } }
public void SubmitMapReduceJob() { using (var context = UndoContext.Current) { context.Start(); var username = TestUtils.UserName; var password = TestUtils.Password; var clustername = TestUtils.ClusterName; var credentials = new BasicAuthenticationCloudCredentials { Username = username, Password = password }; var client = TestUtils.GetHDInsightJobManagementClient(clustername, credentials); var defines = new Dictionary<string, string> { {"mapreduce.map.maxattempts", "10"}, {"mapreduce.reduce.maxattempts", "10"}, {"mapreduce.task.timeout", "60000"} }; var args = new List<string> { "10", "1000" }; var parameters = new MapReduceJobSubmissionParameters { JarFile = "/example/jars/hadoop-mapreduce-examples.jar", JarClass = "pi", Defines = defines, Arguments = args }; var response = client.JobManagement.SubmitMapReduceJob(parameters); Assert.NotNull(response); Assert.Equal(response.StatusCode, HttpStatusCode.OK); var jobId = response.JobSubmissionJsonResponse.Id; Assert.Contains("job_", jobId, StringComparison.InvariantCulture); var jobStatus = GetJobFinalStatus(client, jobId); var storageAccess = GetStorageAccessObject(); if (jobStatus.JobDetail.ExitValue == 0) { if (HttpMockServer.Mode == HttpRecorderMode.Record) { // Retrieve Job Output var output = client.JobManagement.GetJobOutput(jobId, storageAccess); string textOutput = Convert(output); Assert.True(textOutput.Contains("Estimated value of Pi is 3.14")); } } else { if (HttpMockServer.Mode == HttpRecorderMode.Record) { var output = client.JobManagement.GetJobErrorLogs(jobId, storageAccess); string errorTextOutput = Convert(output); Assert.NotNull(errorTextOutput); } Assert.True(false); } } }
public static HDInsightJobManagementClient GetHDInsightJobManagementClient(string dnsName, BasicAuthenticationCloudCredentials creds) { var client = new HDInsightJobManagementClient(dnsName, creds); return(AddMockHandler(ref client)); }
public void GetSqoopJob() { using (var context = UndoContext.Current) { context.Start(); var username = TestUtils.UserName; var password = TestUtils.Password; var clustername = TestUtils.ClusterName; var credentials = new BasicAuthenticationCloudCredentials { Username = username, Password = password }; var client = TestUtils.GetHDInsightJobManagementClient(clustername, credentials); var parameters = new SqoopJobSubmissionParameters { UserName = username, Command = "some command", StatusDir = "sqoopstatus", }; var jobid = client.JobManagement.SubmitSqoopJob(parameters).JobSubmissionJsonResponse.Id; var response = client.JobManagement.GetJob(jobid); Assert.NotNull(response); Assert.Equal(response.StatusCode, HttpStatusCode.OK); } }
public AzureHdInsightJobManagementClient(string clusterName, BasicAuthenticationCloudCredentials credential) { HdInsightJobManagementClient = AzureSession.ClientFactory.CreateCustomClient<HDInsightJobManagementClient>(clusterName, credential); }
public override void ExecuteCmdlet() { //get variables from session var clusterConnection = SessionState.PSVariable.Get(UseAzureHDInsightClusterCommand.ClusterEndpoint).Value.ToString(); var clusterCred = (PSCredential)SessionState.PSVariable.Get(UseAzureHDInsightClusterCommand.ClusterCred).Value; var resourceGroup = SessionState.PSVariable.Get(UseAzureHDInsightClusterCommand.CurrentResourceGroup).Value.ToString(); _clusterName = clusterConnection; _credential = new BasicAuthenticationCloudCredentials { Username = clusterCred.UserName, Password = clusterCred.Password.ConvertToString() }; if (clusterConnection == null || clusterCred == null) { throw new NullReferenceException( string.Format( "The cluster or resource group specified is null. Please use the Use-AzureHDInsightCluster command to connect to a cluster.")); } //get hive job var hivejob = hiveJobDefinitionCommand.GetHiveJob(); //start hive job WriteObject("Submitting hive query..."); var startJobCommand = new StartAzureHDInsightJobCommand { ClusterName = clusterConnection, ResourceGroupName = resourceGroup, JobDefinition = hivejob, ClusterCredential = clusterCred }; var jobCreationResult = startJobCommand.SubmitJob(); var jobid = jobCreationResult.JobSubmissionJsonResponse.Id; WriteObject(string.Format("Submitted Hive query with jobDetails Id : {0}", jobid)); //wait for job to complete WriteProgress(new ProgressRecord(0, "Waiting for job to complete", "In Progress")); var waitJobCommand = new WaitAzureHDInsightJobCommand { ClusterCredential = clusterCred, ResourceGroupName = resourceGroup, ClusterName = clusterConnection, JobId = jobid }; var job = waitJobCommand.WaitJob(); string output; if (job.ExitValue == 0) { //get job output var getOutputCommand = new GetAzureHDInsightJobOutputCommand { ClusterCredential = clusterCred, ResourceGroupName = resourceGroup, ClusterName = clusterConnection, DefaultContainer = DefaultContainer, DefaultStorageAccountName = DefaultStorageAccountName, DefaultStorageAccountKey = DefaultStorageAccountKey, JobId = jobid }; output = getOutputCommand.GetJobOutput(); } else { //get job error output = Convert(HDInsightJobClient.GetJobError(jobid, DefaultStorageAccountName, DefaultStorageAccountKey, DefaultContainer)); } WriteObject(output); }
public void SubmitSqoopJob() { using (var context = UndoContext.Current) { context.Start(); var username = TestUtils.UserName; var password = TestUtils.Password; var clustername = TestUtils.ClusterName; var credentials = new BasicAuthenticationCloudCredentials { Username = username, Password = password }; var client = TestUtils.GetHDInsightJobManagementClient(clustername, credentials); // Before we run this test in Record mode, we should run following commands on cluster // hdfs dfs -mkdir /user/hcat/lib // hadoop fs -copyFromLocal -f /usr/share/java/sqljdbc_4.1/enu/sqljdbc41.jar /user/hcat/lib // Generate sqoopcommand.txt using content // --connect // <Connection string to DB which has table dept.> // --table // dept // Keep these in separate lines otherwise, sqoop command will fail. Copy the sqoopcommand.txt // hdfs dfs -mkdir /example/data/sqoop/ // hadoop fs -copyFromLocal -f sqoopcommand.txt /example/data/sqoop/ var parameters = new SqoopJobSubmissionParameters { LibDir = "/user/hcat/lib", Files = new List<string>{"/example/data/sqoop/sqoopcommand.txt"}, Command = "import --options-file sqoopcommand.txt --hive-import -m 1", StatusDir = "sqoopstatus", }; var response = client.JobManagement.SubmitSqoopJob(parameters); Assert.NotNull(response); Assert.Equal(response.StatusCode, HttpStatusCode.OK); var jobId = response.JobSubmissionJsonResponse.Id; Assert.Contains("job_", jobId, StringComparison.InvariantCulture); var jobStatus = GetJobFinalStatus(client, jobId); var storageAccess = GetStorageAccessObject(); if (jobStatus.JobDetail.ExitValue == 0) { if (HttpMockServer.Mode == HttpRecorderMode.Record) { // Retrieve Job Output var output = client.JobManagement.GetJobOutput(jobId, storageAccess); string textOutput = Convert(output); Assert.True(textOutput.Length > 0); } } else { if (HttpMockServer.Mode == HttpRecorderMode.Record) { var output = client.JobManagement.GetJobErrorLogs(jobId, storageAccess); string errorTextOutput = Convert(output); Assert.NotNull(errorTextOutput); } Assert.True(false); } } }
public override void ExecuteCmdlet() { //get variables from session var clusterConnection = SessionState.PSVariable.Get(UseAzureHDInsightClusterCommand.ClusterEndpoint).Value.ToString(); var clusterCred = (PSCredential)SessionState.PSVariable.Get(UseAzureHDInsightClusterCommand.ClusterCred).Value; var resourceGroup = SessionState.PSVariable.Get(UseAzureHDInsightClusterCommand.CurrentResourceGroup).Value.ToString(); _credential = new BasicAuthenticationCloudCredentials { Username = clusterCred.UserName, Password = clusterCred.Password.ConvertToString() }; if (clusterConnection == null || clusterCred == null) { throw new NullReferenceException( string.Format( "The cluster or resource group specified is null. Please use the Use-AzureRmHDInsightCluster command to connect to a cluster.")); } //get hive job var hivejob = hiveJobDefinitionCommand.GetHiveJob(); //start hive job WriteObject("Submitting hive query..."); var startJobCommand = new StartAzureHDInsightJobCommand { ClusterName = clusterConnection, ResourceGroupName = resourceGroup, JobDefinition = hivejob, HttpCredential = clusterCred }; var jobCreationResult = startJobCommand.SubmitJob(); var jobid = jobCreationResult.JobSubmissionJsonResponse.Id; WriteObject(string.Format("Submitted Hive query with jobDetails Id : {0}", jobid)); //wait for job to complete WriteProgress(new ProgressRecord(0, "Waiting for job to complete", "In Progress")); var waitJobCommand = new WaitAzureHDInsightJobCommand { HttpCredential = clusterCred, ResourceGroupName = resourceGroup, ClusterName = clusterConnection, JobId = jobid }; var job = waitJobCommand.WaitJob(); _clusterName = clusterConnection.Substring(0, clusterConnection.IndexOf('.')); var getOutputCommand = new GetAzureHDInsightJobOutputCommand { HttpCredential = clusterCred, ResourceGroupName = resourceGroup, ClusterName = clusterConnection, DefaultContainer = DefaultContainer, DefaultStorageAccountName = DefaultStorageAccountName, DefaultStorageAccountKey = DefaultStorageAccountKey, JobId = jobid }; var storageAccess = getOutputCommand.GetDefaultStorageAccess(resourceGroup, _clusterName); string output; if (job.ExitValue == 0) { //get job output output = getOutputCommand.GetJobOutput(storageAccess); } else { //get job error output = getOutputCommand.GetJobError(storageAccess); } WriteObject(output); }
public void GetPigJob() { using (var context = UndoContext.Current) { context.Start(); var username = TestUtils.UserName; var password = TestUtils.Password; var clustername = TestUtils.ClusterName; var credentials = new BasicAuthenticationCloudCredentials { Username = username, Password = password }; var client = TestUtils.GetHDInsightJobManagementClient(clustername, credentials); var parameters = new PigJobSubmissionParameters() { UserName = username, Query = "records = LOAD '/example/pig/sahara-paleo-fauna.txt' AS (DateBP:int, Loc:chararray, Coordinates:chararray, Samples:chararray, Country:chararray, Laboratory:chararray);" + "filtered_records = FILTER records by Country == 'Egypt' OR Country == 'Morocco';" + "grouped_records = GROUP filtered_records BY Country;" + "DUMP grouped_records;" }; var jobid = client.JobManagement.SubmitPigJob(parameters).JobSubmissionJsonResponse.Id; var response = client.JobManagement.GetJob(jobid); Assert.NotNull(response); Assert.Equal(response.StatusCode, HttpStatusCode.OK); } }
public AzureHdInsightJobManagementClient(string clusterName, BasicAuthenticationCloudCredentials credential) { HdInsightJobManagementClient = AzureSession.ClientFactory.CreateCustomClient <HDInsightJobManagementClient>(clusterName, credential); }
public void SubmitHiveJob() { using (var context = UndoContext.Current) { context.Start(); var username = TestUtils.UserName; var password = TestUtils.Password; var clustername = TestUtils.ClusterName; var credentials = new BasicAuthenticationCloudCredentials { Username = username, Password = password }; var client = TestUtils.GetHDInsightJobManagementClient(clustername, credentials); var defines = new Dictionary<string, string> { {"hive.execution.engine", "ravi"}, {"hive.exec.reducers.max", "1"} }; var args = new List<string> {"argA", "argB"}; var parameters = new HiveJobSubmissionParameters { UserName = username, Query = "SHOW TABLES;", Defines = ConvertDefinesToString(defines), Arguments = ConvertArgsToString(args) }; var response = client.JobManagement.SubmitHiveJob(parameters); Assert.NotNull(response); Assert.Equal(response.StatusCode, HttpStatusCode.OK); Assert.Contains("job_", response.JobSubmissionJsonResponse.Id, StringComparison.InvariantCulture); } }