internal HDInsightJobSubmissionPocoClient(BasicAuthCredential credentials, IAbstractionContext context, bool ignoreSslErrors, string userAgentString) { this.remoteCreds = credentials; this.context = context; this.ignoreSslErrors = ignoreSslErrors; this.userAgentString = userAgentString ?? string.Empty; }
public HadoopJobSubmissionPocoSimulatorClient(BasicAuthCredential connectionCredentials, IAbstractionContext context, string userAgentString) { this.credentials = connectionCredentials; var server = connectionCredentials.Server.Host.Split('.')[0]; this.cluster = HDInsightManagementRestSimulatorClient.GetCloudServiceInternal(server); this.context = context; this.userAgentString = userAgentString; this.InitializeSimulator(); }
public IHadoopApplicationHistoryRestClient Create(BasicAuthCredential credentials, IAbstractionContext context, bool ignoreSslErrors) { IHadoopApplicationHistoryRestReadClient readProxy = new HadoopApplicationHistoryRestReadClient( credentials.Server, new HttpRestClientConfiguration(new HadoopRestWebRequestHandler(credentials, ignoreSslErrors), new[] { new HttpLoggingHandler(context.Logger) }, new HttpRestClientRetryPolicy(RetryPolicyFactory.CreateExponentialRetryPolicy()))); return new HadoopApplicationHistoryRestClient(readProxy); }
public IHadoopApplicationHistoryRestClient Create(BasicAuthCredential credentials, IAbstractionContext context, bool ignoreSslErrors) { IHadoopApplicationHistoryRestReadClient readProxy = new HadoopApplicationHistoryRestReadClient( credentials.Server, new HttpRestClientConfiguration(new HadoopRestWebRequestHandler(credentials, ignoreSslErrors), new[] { new HttpLoggingHandler(context.Logger) }, new HttpRestClientRetryPolicy(RetryPolicyFactory.CreateExponentialRetryPolicy()))); return(new HadoopApplicationHistoryRestClient(readProxy)); }
public HadoopRestWebRequestHandler(BasicAuthCredential credentials, bool ignoreSslErrors) { var byteArray = Encoding.ASCII.GetBytes(credentials.UserName + ":" + credentials.Password); this.base64AuthorizationCreds = Convert.ToBase64String(byteArray); if (ignoreSslErrors) { ServicePointManager.ServerCertificateValidationCallback += (s, cert, chain, ssl) => true; } }
internal RemoteHadoopClient(BasicAuthCredential credentials, string userAgentString) { this.credentials = credentials; userAgentString = userAgentString ?? string.Empty; var assemblyVersion = "NA"; try { assemblyVersion = Assembly.GetExecutingAssembly().GetName().Version.ToString(4); } catch { } this.userAgentString = string.Format( CultureInfo.InvariantCulture, SDKVersionUserAgentString, assemblyVersion, userAgentString); }
private static void SubmitJobs() { // Get HDInsight cluster configuration settings string clusterName = ConfigurationManager.AppSettings["ClusterName"]; string userName = ConfigurationManager.AppSettings["UserName"]; string password = ConfigurationManager.AppSettings["Password"]; // Create basic authentication credential for cluster BasicAuthCredential bcred = new BasicAuthCredential(); bcred.Server = new Uri("https://" + clusterName + ".azurehdinsight.net"); bcred.UserName = userName; bcred.Password = password; // Create and submit Pig job PigJobCreateParameters pigJob = new PigJobCreateParameters() { StatusFolder = "/data/racecar/scripts/processdatastatus", File = "/data/racecar/scripts/processdata.pig" }; var pigJobClient = JobSubmissionClientFactory.Connect(bcred); JobCreationResults pigJobResults = pigJobClient.CreatePigJob(pigJob); WaitForJobCompletion(pigJobResults, pigJobClient); // Create and submit Hive job HiveJobCreateParameters hiveJob = new HiveJobCreateParameters() { JobName = "Create Hive tables", StatusFolder = "/data/racecar/scripts/createtablestatus", File = "/data/racecar/scripts/createtables.hql" }; var hiveJobClient = JobSubmissionClientFactory.Connect(bcred); JobCreationResults hiveJobResults = hiveJobClient.CreateHiveJob(hiveJob); WaitForJobCompletion(hiveJobResults, hiveJobClient); }
public void DisableHttpServices_GetJobHistory() { var testCluster = GetHttpAccessDisabledCluster(); var connectionCredentials = new BasicAuthCredential() { Server = new Uri(testCluster.ConnectionUrl), Password = testCluster.HttpPassword, UserName = testCluster.HttpUserName }; var jobSubmissionClient = new HDInsightJobSubmissionPocoClient(connectionCredentials, GetAbstractionContext(), false, customUserAgent); jobSubmissionClient.ListJobs().WaitForResult(); }
public void ValidMapReduceJobSubmissionTest() { var remoteConnectionCredentials = new BasicAuthCredential() { UserName = IntegrationTestBase.TestCredentials.AzureUserName, Password = IntegrationTestBase.TestCredentials.AzurePassword, Server = new Uri(IntegrationTestBase.TestCredentials.WellKnownCluster.Cluster) }; var hadoopClient = JobSubmissionClientFactory.Connect(remoteConnectionCredentials); var mapReduceJob = new MapReduceJobCreateParameters() { ClassName = "pi", JobName = "pi estimation jobDetails", JarFile = "/example/hadoop-examples.jar", StatusFolder = "/piresults" }; mapReduceJob.Arguments.Add("16"); mapReduceJob.Arguments.Add("10000"); var jobCreationDetails = hadoopClient.CreateMapReduceJob(mapReduceJob); Assert.IsNull(jobCreationDetails.ErrorCode, "Should not fail mr jobDetails submission"); Assert.IsNotNull(jobCreationDetails.JobId, "Should have a non-null jobDetails id"); }
public void ICanSubmitASqoopJob() { var factory = new MockRemotePocoLayerFactory(); var pocoMock = new MockRemotePoco(); pocoMock.JobId = "54321"; factory.Mock = pocoMock; ServiceLocator.Instance.Locate<IServiceLocationIndividualTestManager>().Override<IRemoteHadoopJobSubmissionPocoClientFactory>(factory); // var creds = new JobSubmissionCertificateCredential(Guid.NewGuid(), null, "someCluster"); var creds = new BasicAuthCredential() { Password = "******", Server = new Uri("http://somewhere"), UserName = "******" }; var poco = new HDInsightJobSubmissionPocoClient(creds, GetAbstractionContext(), false, pocoMock.GetUserAgentString()); var task = poco.SubmitSqoopJob(new SqoopJobCreateParameters() { Command = "load remote;" }); task.Wait(); Assert.AreEqual("54321", task.Result.JobId); Assert.IsTrue(pocoMock.SubmitSqoopJobCalled); }
internal BasicAuthCredential GetRemoteHadoopCredential() { var azureTestCredentials = GetCredentials("hadoop"); var credentials = new BasicAuthCredential() { Server = new Uri(azureTestCredentials.WellKnownCluster.Cluster), UserName = azureTestCredentials.AzureUserName, Password = azureTestCredentials.AzurePassword }; return credentials; }
public AzureHDInsightClusterConfigurationAccessor(BasicAuthCredential connectionCredentials) { this.credentials = connectionCredentials; }
public AzureHdInsightConfigurationRestClient(BasicAuthCredential credentials) { this.credentials = credentials; }
public AzureHDInsightClusterConfigurationAccessorSimulator(IJobSubmissionClientCredential credentials) { var remoteCreds = credentials as BasicAuthCredential; Assert.IsNotNull(remoteCreds); this.credentials = remoteCreds; }
internal static void ValidateClusterConfiguration(ClusterDetails testCluster, ClusterCreateParameters cluster) { var remoteCreds = new BasicAuthCredential() { Server = GatewayUriResolver.GetGatewayUri(new Uri(testCluster.ConnectionUrl).Host), UserName = testCluster.HttpUserName, Password = testCluster.HttpPassword }; var configurationAccessor = ServiceLocator.Instance.Locate<IAzureHDInsightClusterConfigurationAccessorFactory>().Create(remoteCreds); var coreConfiguration = configurationAccessor.GetCoreServiceConfiguration().WaitForResult(); ValidateConfiguration(cluster.CoreConfiguration, coreConfiguration); var hdfsConfiguration = configurationAccessor.GetHdfsServiceConfiguration().WaitForResult(); ValidateConfiguration(cluster.HdfsConfiguration, hdfsConfiguration); var mapReduceConfiguration = configurationAccessor.GetMapReduceServiceConfiguration().WaitForResult(); ValidateConfiguration(cluster.MapReduceConfiguration.ConfigurationCollection, mapReduceConfiguration); var hiveConfiguration = configurationAccessor.GetHiveServiceConfiguration().WaitForResult(); ValidateConfiguration(cluster.HiveConfiguration.ConfigurationCollection, hiveConfiguration); var oozieConfiguration = configurationAccessor.GetOozieServiceConfiguration().WaitForResult(); ValidateConfiguration(cluster.OozieConfiguration.ConfigurationCollection, oozieConfiguration); }
public void ICanNotSubmitAJobWithTheIncorectCredintials() { IHDInsightCertificateCredential hdInsightCredentials = IntegrationTestBase.GetValidCredentials(); var client = ServiceLocator.Instance.Locate<IHDInsightClientFactory>().Create(new HDInsightCertificateCredential(hdInsightCredentials.SubscriptionId, hdInsightCredentials.Certificate)); var manager = ServiceLocator.Instance.Locate<IHDInsightManagementPocoClientFactory>(); var pocoClient = manager.Create(hdInsightCredentials, GetAbstractionContext(), false); var clusterDetails = GetRandomCluster(); client.CreateCluster(clusterDetails); try { ClusterDetails cluster = pocoClient.ListContainer(clusterDetails.Name).WaitForResult(); BasicAuthCredential hadoopCredentials = new BasicAuthCredential() { Server = GatewayUriResolver.GetGatewayUri(cluster.ConnectionUrl), UserName = clusterDetails.UserName, Password = clusterDetails.Password }; var hadoopClient = JobSubmissionClientFactory.Connect(hadoopCredentials); var mapReduceJob = new MapReduceJobCreateParameters() { ClassName = "pi", JobName = "pi estimation jobDetails", JarFile = "/example/hadoop-examples.jar", StatusFolder = "/piresults" }; mapReduceJob.Arguments.Add("16"); mapReduceJob.Arguments.Add("10000"); var jobCreationDetails = hadoopClient.CreateMapReduceJob(mapReduceJob); var id = pocoClient.DisableHttp(clusterDetails.Name, clusterDetails.Location).WaitForResult(); while (!pocoClient.IsComplete(cluster.Name, cluster.Location, id).WaitForResult()) { Thread.Sleep(500); } // now add a user string userName = "******"; string password = GetRandomValidPassword(); id = pocoClient.EnableHttp(clusterDetails.Name, clusterDetails.Location, userName, password).WaitForResult(); while (!pocoClient.IsComplete(cluster.Name, cluster.Location, id).WaitForResult()) { Thread.Sleep(500); } jobCreationDetails = hadoopClient.CreateMapReduceJob(mapReduceJob); Assert.Fail("This test expected an exception but did not receive one."); } catch (UnauthorizedAccessException ex) { Help.DoNothing(ex); } finally { // delete the cluster client.DeleteCluster(clusterDetails.Name); } }
public static IJobSubmissionClientCredential GetJobSubmissionClientCredentials( this IAzureHDInsightJobCommandCredentialsBase command, AzureSubscription currentSubscription, AzureEnvironment environment, string cluster, AzureProfile profile) { IJobSubmissionClientCredential clientCredential = null; if (command.Credential != null) { clientCredential = new BasicAuthCredential { Server = GatewayUriResolver.GetGatewayUri(cluster), UserName = command.Credential.UserName, Password = command.Credential.GetCleartextPassword() }; } else if (currentSubscription.IsNotNull()) { var subscriptionCredentials = GetSubscriptionCredentials(command, currentSubscription, environment, profile); var asCertificateCredentials = subscriptionCredentials as HDInsightCertificateCredential; var asTokenCredentials = subscriptionCredentials as HDInsightAccessTokenCredential; if (asCertificateCredentials.IsNotNull()) { clientCredential = new JobSubmissionCertificateCredential(asCertificateCredentials, cluster); } else if (asTokenCredentials.IsNotNull()) { clientCredential = new JobSubmissionAccessTokenCredential(asTokenCredentials, cluster); } } return clientCredential; }
private static BasicAuthCredential GetClusterHttpCredentials(ClusterDetails cluster) { if (string.IsNullOrEmpty(cluster.ConnectionUrl)) { throw new InvalidOperationException("Unable to connect to cluster as connection url is missing or empty."); } if (string.IsNullOrEmpty(cluster.HttpUserName) || string.IsNullOrEmpty(cluster.HttpPassword)) { throw new InvalidOperationException("Unable to connect to cluster as cluster username and/or password are missing or empty."); } BasicAuthCredential clusterCreds = new BasicAuthCredential() { Server = new Uri(cluster.ConnectionUrl), UserName = cluster.HttpUserName, Password = cluster.HttpPassword }; return clusterCreds; }
public void ICanListJobs() { var expectedJob = new JobDetails() { ExitCode = 12, Name = "some jobDetails", StatusCode = JobStatusCode.Completed, JobId = "2345" }; var factory = new MockRemotePocoLayerFactory(); var pocoMock = new MockRemotePoco { JobId = string.Empty, JobDetails = expectedJob }; factory.Mock = pocoMock; ServiceLocator.Instance.Locate<IServiceLocationIndividualTestManager>().Override<IRemoteHadoopJobSubmissionPocoClientFactory>(factory); // var creds = new JobSubmissionCertificateCredential(Guid.NewGuid(), null, "someCluster"); var creds = new BasicAuthCredential() { Password = "******", Server = new Uri("http://somewhere"), UserName = "******" }; var poco = new HDInsightJobSubmissionPocoClient(creds, GetAbstractionContext(), false, pocoMock.GetUserAgentString()); var task = poco.ListJobs(); task.Wait(); Assert.IsNotNull(task.Result); Assert.IsTrue(pocoMock.ListJobsCalled); Assert.AreEqual(1, task.Result.Jobs.Count); var job = task.Result.Jobs.First(); Assert.AreEqual("2345", job.JobId); Assert.AreEqual(expectedJob, job); }
public void EnableHttpServices_GetJobHistory() { var testCluster = GetHttpAccessEnabledCluster(false); var connectionCredentials = new BasicAuthCredential() { Server = new Uri(testCluster.ConnectionUrl), Password = testCluster.HttpPassword, UserName = testCluster.HttpUserName }; var jobSubmissionClient = new HDInsightJobSubmissionPocoClient(connectionCredentials, GetAbstractionContext(), false, customUserAgent); var jobHistory = jobSubmissionClient.ListJobs().WaitForResult(); var expectedJobHistory = SyncClientScenarioTests.GetJobHistory(connectionCredentials.Server.OriginalString); Assert.AreEqual(jobHistory.Jobs.Count, expectedJobHistory.Jobs.Count); }