/// <summary> /// Converts the Powershell object type to SDK object type. /// </summary> /// <returns>An SDK MapReduce object type.</returns> internal MapReduceJobCreateParameters ToMapReduceJobCreateParameters() { var mapReduceJobDefinition = new MapReduceJobCreateParameters { ClassName = this.ClassName, JarFile = this.JarFile, JobName = this.JobName, StatusFolder = this.StatusFolder }; if (this.Arguments.IsNotNull()) { mapReduceJobDefinition.Arguments.AddRange(this.Arguments); } if (this.Defines.IsNotNull()) { mapReduceJobDefinition.Defines.AddRange(this.Defines); } if (this.Files.IsNotNull()) { mapReduceJobDefinition.Files.AddRange(this.Files); } if (this.LibJars.IsNotNull()) { mapReduceJobDefinition.LibJars.AddRange(this.LibJars); } return mapReduceJobDefinition; }
public void ICanCallThe_New_HDInsightMapReduceJobDefinitionCmdlet() { var mapReduceJobDefinition = new MapReduceJobCreateParameters { JobName = "pi estimation jobDetails", ClassName = "pi", JarFile = TestConstants.WabsProtocolSchemeName + "container@hostname/examples.jar", }; mapReduceJobDefinition.LibJars.Add("some.jarfile.jar"); using (IRunspace runspace = this.GetPowerShellRunspace()) { IPipelineResult results = runspace.NewPipeline() .AddCommand(CmdletConstants.NewAzureHDInsightMapReduceJobDefinition) .WithParameter(CmdletConstants.JobName, mapReduceJobDefinition.JobName) .WithParameter(CmdletConstants.JarFile, mapReduceJobDefinition.JarFile) .WithParameter(CmdletConstants.ClassName, mapReduceJobDefinition.ClassName) .WithParameter(CmdletConstants.LibJars, mapReduceJobDefinition.LibJars) .Invoke(); Assert.AreEqual(1, results.Results.Count); AzureHDInsightMapReduceJobDefinition mapReduceJobFromPowershell = results.Results.ToEnumerable<AzureHDInsightMapReduceJobDefinition>().First(); Assert.AreEqual(mapReduceJobDefinition.JobName, mapReduceJobFromPowershell.JobName); Assert.AreEqual(mapReduceJobDefinition.ClassName, mapReduceJobFromPowershell.ClassName); Assert.AreEqual(mapReduceJobDefinition.JarFile, mapReduceJobFromPowershell.JarFile); Assert.AreEqual(mapReduceJobDefinition.LibJars.Count, mapReduceJobFromPowershell.LibJars.Count); Assert.AreEqual(mapReduceJobDefinition.LibJars.First(), mapReduceJobFromPowershell.LibJars.First()); } }
public virtual void ICanCallThe_NewMapReduceJob_Then_Start_HDInsightJobsCmdlet() { var mapReduceJobDefinition = new MapReduceJobCreateParameters { JobName = "pi estimation jobDetails", ClassName = "pi", JarFile = "/example/hadoop-examples.jar" }; mapReduceJobDefinition.Arguments.Add("16"); mapReduceJobDefinition.Arguments.Add("10000"); using (IRunspace runspace = this.GetPowerShellRunspace()) { IPipelineResult results = runspace.NewPipeline() .AddCommand(CmdletConstants.NewAzureHDInsightMapReduceJobDefinition) .WithParameter(CmdletConstants.JobName, mapReduceJobDefinition.JobName) .WithParameter(CmdletConstants.JarFile, mapReduceJobDefinition.JarFile) .WithParameter(CmdletConstants.ClassName, mapReduceJobDefinition.ClassName) .WithParameter(CmdletConstants.Arguments, mapReduceJobDefinition.Arguments) .Invoke(); Assert.AreEqual(1, results.Results.Count); AzureHDInsightMapReduceJobDefinition mapReduceJobFromPowershell = results.Results.ToEnumerable<AzureHDInsightMapReduceJobDefinition>().First(); RunJobInPowershell(runspace, mapReduceJobFromPowershell); } }
public void CanCreateNewMapReduceDefinition_WithArguments() { var mapReduceJobDefinition = new MapReduceJobCreateParameters { JobName = "pi estimation jobDetails", ClassName = "pi", JarFile = TestConstants.WabsProtocolSchemeName + "container@hostname/examples.jar" }; mapReduceJobDefinition.Arguments.Add("16"); mapReduceJobDefinition.Arguments.Add("10000"); INewAzureHDInsightMapReduceJobDefinitionCommand newMapReduceJobDefinitionCommand = ServiceLocator.Instance.Locate<IAzureHDInsightCommandFactory>().CreateNewMapReduceDefinition(); newMapReduceJobDefinitionCommand.JobName = mapReduceJobDefinition.JobName; newMapReduceJobDefinitionCommand.JarFile = mapReduceJobDefinition.JarFile; newMapReduceJobDefinitionCommand.ClassName = mapReduceJobDefinition.ClassName; newMapReduceJobDefinitionCommand.Arguments = mapReduceJobDefinition.Arguments.ToArray(); newMapReduceJobDefinitionCommand.EndProcessing(); AzureHDInsightMapReduceJobDefinition mapReduceJobFromCommand = newMapReduceJobDefinitionCommand.Output.ElementAt(0); Assert.AreEqual(mapReduceJobDefinition.JobName, mapReduceJobFromCommand.JobName); Assert.AreEqual(mapReduceJobDefinition.ClassName, mapReduceJobFromCommand.ClassName); Assert.AreEqual(mapReduceJobDefinition.JarFile, mapReduceJobFromCommand.JarFile); foreach (string argument in mapReduceJobDefinition.Arguments) { Assert.IsTrue( mapReduceJobFromCommand.Arguments.Any(arg => string.Equals(argument, arg)), "Unable to find argument '{0}' in value returned from command", argument); } }
/// <inheritdoc /> public async Task <JobCreationResults> CreateMapReduceJobAsync(MapReduceJobCreateParameters mapReduceJobCreateParameters) { var factory = ServiceLocator.Instance.Locate <IRemoteHadoopJobSubmissionPocoClientFactory>(); var pocoClient = factory.Create(this.credentials, this.Context, this.IgnoreSslErrors, this.userAgentString); return(await pocoClient.SubmitMapReduceJob(mapReduceJobCreateParameters)); }
public void CanSerializeValidJobRequest_JobName() { var mapReduceJob = new MapReduceJobCreateParameters() { JobName = "pi estimation jobDetails" }; var payloadConverter = new PayloadConverterBase(); var payload = payloadConverter.SerializeMapReduceRequest("hadoop", mapReduceJob); Assert.IsTrue(payload.Contains(Uri.EscapeDataString(string.Format("{0}={1}", WebHCatConstants.DefineJobName, mapReduceJob.JobName)))); }
public void CanSerializeValidJobRequestWithCallback() { var mapReduceJob = new MapReduceJobCreateParameters() { JobName = "pi estimation jobDetails", Callback = "http://someball.com/$jobid/notvalid" }; var payloadConverter = new PayloadConverterBase(); var payload = payloadConverter.SerializeMapReduceRequest("hadoop", mapReduceJob); Assert.IsTrue(payload.Contains(Uri.EscapeDataString(string.Format("{0}={1}", WebHCatConstants.DefineJobName, mapReduceJob.JobName)))); Assert.IsTrue(payload.Contains(string.Format("{0}={1}", Uri.EscapeDataString(WebHCatConstants.Callback), Uri.EscapeDataString(mapReduceJob.Callback)))); }
public void CanCreateNewMapReduceJob_StartJob_StopJob() { var mapReduceJobDefinition = new MapReduceJobCreateParameters { JobName = "pi estimation job", ClassName = "pi", JarFile = TestConstants.WabsProtocolSchemeName + "container@hostname/examples.jar" }; INewAzureHDInsightMapReduceJobDefinitionCommand newMapReduceJobDefinitionCommand = ServiceLocator.Instance.Locate<IAzureHDInsightCommandFactory>().CreateNewMapReduceDefinition(); newMapReduceJobDefinitionCommand.JobName = mapReduceJobDefinition.JobName; newMapReduceJobDefinitionCommand.JarFile = mapReduceJobDefinition.JarFile; newMapReduceJobDefinitionCommand.ClassName = mapReduceJobDefinition.ClassName; newMapReduceJobDefinitionCommand.EndProcessing(); AzureHDInsightMapReduceJobDefinition mapReduceJobFromCommand = newMapReduceJobDefinitionCommand.Output.ElementAt(0); TestJobLifecycle(mapReduceJobFromCommand); }
public void ICanCallThe_New_HDInsightMapReduceJobDefinitionCmdlet_WithArguments() { var mapReduceJobDefinition = new MapReduceJobCreateParameters { JobName = "pi estimation jobDetails", ClassName = "pi", JarFile = TestConstants.WabsProtocolSchemeName + "container@hostname/examples.jar" }; mapReduceJobDefinition.Arguments.Add("16"); mapReduceJobDefinition.Arguments.Add("10000"); using (IRunspace runspace = this.GetPowerShellRunspace()) { IPipelineResult results = runspace.NewPipeline() .AddCommand(CmdletConstants.NewAzureHDInsightMapReduceJobDefinition) .WithParameter(CmdletConstants.JobName, mapReduceJobDefinition.JobName) .WithParameter(CmdletConstants.JarFile, mapReduceJobDefinition.JarFile) .WithParameter(CmdletConstants.ClassName, mapReduceJobDefinition.ClassName) .WithParameter(CmdletConstants.Arguments, mapReduceJobDefinition.Arguments) .Invoke(); Assert.AreEqual(1, results.Results.Count); AzureHDInsightMapReduceJobDefinition mapReduceJobFromPowershell = results.Results.ToEnumerable<AzureHDInsightMapReduceJobDefinition>().First(); Assert.AreEqual(mapReduceJobDefinition.JobName, mapReduceJobFromPowershell.JobName); Assert.AreEqual(mapReduceJobDefinition.ClassName, mapReduceJobFromPowershell.ClassName); Assert.AreEqual(mapReduceJobDefinition.JarFile, mapReduceJobFromPowershell.JarFile); foreach (string argument in mapReduceJobDefinition.Arguments) { Assert.IsTrue( mapReduceJobFromPowershell.Arguments.Any(arg => string.Equals(argument, arg)), "Unable to find argument '{0}' in value returned from powershell", argument); } } }
public void GivenIHaveAMapReduceJobRequestObject() { var request = new MapReduceJobCreateParameters(); request.ClassName = string.Empty; request.JarFile = string.Empty; request.JobName = string.Empty; request.StatusFolder = string.Empty; this.transferObject = request; }
public Task<JobCreationResults> SubmitMapReduceJob(MapReduceJobCreateParameters details) { this.SubmitMapReduceJobCalled = true; var job = new JobCreationResults() { JobId = JobId }; return Task.Run(() => job); }
/// <inheritdoc /> public async Task<JobCreationResults> SubmitMapReduceJob(MapReduceJobCreateParameters details) { var remoteClient = ServiceLocator.Instance.Locate<IRemoteHadoopJobSubmissionPocoClientFactory>().Create(this.remoteCreds, this.context, this.ignoreSslErrors, this.GetUserAgentString()); return await remoteClient.SubmitMapReduceJob(details); }
public void ServiceHost_JobSubmissionRecieved(object sender, JobSubmissionMessage e) { Trace.WriteLine("JobSubmissionRecieved Recieved User Id : " + e.idUsuario, "Warning"); try { // Obtener la cuenta de almacenamiento // Para actualizar metadatos CloudStorageAccount storageAccount = CloudStorageAccount.Parse( CloudConfigurationManager.GetSetting("StorageConnectionString")); CloudBlobClient blobClient = storageAccount.CreateCloudBlobClient(); CloudBlobContainer container = blobClient.GetContainerReference(VariablesConfiguracion.containerName); // Obtencion de variables para la conxion con el cluster string subscriptionID = VariablesConfiguracion.subscriptionID; string certFriendlyName = VariablesConfiguracion.certFriendlyName; string clusterName = VariablesConfiguracion.clusterName; // Definicion de la tarea MapReduce MapReduceJobCreateParameters mrJobDefinition = new MapReduceJobCreateParameters() { JarFile = "wasb:///CienciaCelularMR.jar", ClassName = "Main", StatusFolder = "wasb:///scicluster/test/status-" + e.idUsuario + "." + e.subIdUsuario, }; mrJobDefinition.Arguments.Add("wasb:///" + e.nomEntrada); mrJobDefinition.Arguments.Add("wasb:///scicluster/test/output-" + e.idUsuario + "." + e.subIdUsuario); // Obtener el objeto certificate X509Store store = new X509Store(); store.Open(OpenFlags.ReadOnly); X509Certificate2 cert = FindCertificate(StoreLocation.CurrentUser, StoreName.My, X509FindType.FindByThumbprint, VariablesConfiguracion.thumbprint); JobSubmissionCertificateCredential creds = new JobSubmissionCertificateCredential(new Guid(subscriptionID), cert, clusterName); // Se crea un cliente Hadoop para conectarse con HDInsight var jobClient = JobSubmissionClientFactory.Connect(creds); //Actualizacion de metadatos CloudBlockBlob infoSimulation = container.GetBlockBlobReference(VariablesConfiguracion.infoSimulation + "-" + e.idUsuario); infoSimulation.UploadText(VariablesConfiguracion.JOB_STARTING); // Se lanza la ejecucion de los jobs MapReduce JobCreationResults mrJobResults = jobClient.CreateMapReduceJob(mrJobDefinition); // Esperar hasta que finalice la ejecucion WaitForJobCompletion(mrJobResults, jobClient, e.idUsuario, e.subIdUsuario); } catch (Exception ex) { Trace.TraceError(ex.Message); throw; } }
public Task<JobCreationResults> SubmitMapReduceJob(MapReduceJobCreateParameters mapReduceJob) { if (mapReduceJob.JobName == "1456577") { throw new HttpLayerException(HttpStatusCode.BadRequest, "{ \"error\": \"File /example/files/WordCount.jar does not exist.\"}"); } mapReduceJob.JarFile.ArgumentNotNullOrEmpty("JarFile"); mapReduceJob.ClassName.ArgumentNotNullOrEmpty("ClassName"); return Task.FromResult(this.CreateJobSuccessResult(mapReduceJob, mapReduceJob.JobName)); }
public void CanSerializeValidMapReduceJobRequest() { var mapReduceJob = new MapReduceJobCreateParameters() { JarFile = "/example/hadoop-examples.jar", ClassName = "pi" }; var payloadConverter = new PayloadConverterBase(); var payload = payloadConverter.SerializeMapReduceRequest("hadoop", mapReduceJob); Assert.IsTrue(payload.Contains(string.Format("{0}={1}", WebHCatConstants.Jar, Uri.EscapeDataString(mapReduceJob.JarFile)))); Assert.IsTrue(payload.Contains(string.Format("{0}={1}", WebHCatConstants.Class, Uri.EscapeDataString(mapReduceJob.ClassName)))); }
public MapReduceJobCreateParameters DeserializeMapReduceJobCreationDetails(string content) { var request = this.DeserializePayload(content); var result = new MapReduceJobCreateParameters() { ClassName = request.ApplicationName, JarFile = request.JarFile, JobName = request.JobName, StatusFolder = request.OutputStorageLocation, }; result.Arguments.AddRange(request.Arguments); foreach (var jobRequestParameter in request.Parameters) { result.Defines.Add(jobRequestParameter.Key, jobRequestParameter.Value.ToString()); } this.SetStandardProperties(request, result); return result; }
public void CanSerializeValidJobRequest_Arguments_And_Defines() { var piJob = new MapReduceJobCreateParameters(); piJob.JarFile = "hadoop-examples.jar"; piJob.Arguments.Add("16"); piJob.Arguments.Add("10000"); piJob.Defines.Add("map.red.tasks", "1000"); piJob.Defines.Add("other.tasks", "1000"); var payloadConverter = new PayloadConverterBase(); var payload = payloadConverter.SerializeMapReduceRequest("hadoop", piJob); foreach (var argument in piJob.Arguments) { Assert.IsTrue(payload.Contains(string.Format("{0}={1}", WebHCatConstants.Arg, argument))); } int defineCounter = 0; foreach (var define in piJob.Defines) { defineCounter++; Assert.IsTrue(payload.Contains(Uri.EscapeDataString(string.Format("{0}={1}", define.Key, define.Value)))); } Assert.AreEqual(piJob.Defines.Count, defineCounter); }
public void CanSerializeValidJobRequest_Defines() { var mapReduceJob = new MapReduceJobCreateParameters(); mapReduceJob.JobName = "Define counter test"; mapReduceJob.Defines.Add(new KeyValuePair<string, string>("map.input.tasks", "1000")); mapReduceJob.Defines.Add(new KeyValuePair<string, string>("map.input.mappers", "6")); mapReduceJob.Defines.Add(new KeyValuePair<string, string>("map.input.reducers", "16")); var payloadConverter = new PayloadConverterBase(); var payload = payloadConverter.SerializeMapReduceRequest("hadoop", mapReduceJob); int defineCounter = 0; foreach (var define in mapReduceJob.Defines) { defineCounter++; Assert.IsTrue(payload.Contains(Uri.EscapeDataString(string.Format("{0}={1}", define.Key, define.Value)))); } Assert.AreEqual(mapReduceJob.Defines.Count, defineCounter); }
public void ICanNotSubmitAJobWithTheIncorectCredintials() { IHDInsightCertificateCredential hdInsightCredentials = IntegrationTestBase.GetValidCredentials(); var client = ServiceLocator.Instance.Locate<IHDInsightClientFactory>().Create(new HDInsightCertificateCredential(hdInsightCredentials.SubscriptionId, hdInsightCredentials.Certificate)); var manager = ServiceLocator.Instance.Locate<IHDInsightManagementPocoClientFactory>(); var pocoClient = manager.Create(hdInsightCredentials, GetAbstractionContext(), false); var clusterDetails = GetRandomCluster(); client.CreateCluster(clusterDetails); try { ClusterDetails cluster = pocoClient.ListContainer(clusterDetails.Name).WaitForResult(); BasicAuthCredential hadoopCredentials = new BasicAuthCredential() { Server = GatewayUriResolver.GetGatewayUri(cluster.ConnectionUrl), UserName = clusterDetails.UserName, Password = clusterDetails.Password }; var hadoopClient = JobSubmissionClientFactory.Connect(hadoopCredentials); var mapReduceJob = new MapReduceJobCreateParameters() { ClassName = "pi", JobName = "pi estimation jobDetails", JarFile = "/example/hadoop-examples.jar", StatusFolder = "/piresults" }; mapReduceJob.Arguments.Add("16"); mapReduceJob.Arguments.Add("10000"); var jobCreationDetails = hadoopClient.CreateMapReduceJob(mapReduceJob); var id = pocoClient.DisableHttp(clusterDetails.Name, clusterDetails.Location).WaitForResult(); while (!pocoClient.IsComplete(cluster.Name, cluster.Location, id).WaitForResult()) { Thread.Sleep(500); } // now add a user string userName = "******"; string password = GetRandomValidPassword(); id = pocoClient.EnableHttp(clusterDetails.Name, clusterDetails.Location, userName, password).WaitForResult(); while (!pocoClient.IsComplete(cluster.Name, cluster.Location, id).WaitForResult()) { Thread.Sleep(500); } jobCreationDetails = hadoopClient.CreateMapReduceJob(mapReduceJob); Assert.Fail("This test expected an exception but did not receive one."); } catch (UnauthorizedAccessException ex) { Help.DoNothing(ex); } finally { // delete the cluster client.DeleteCluster(clusterDetails.Name); } }
/// <inheritdoc /> public JobCreationResults CreateMapReduceJob(MapReduceJobCreateParameters mapReduceJobCreateParameters) { return(this.CreateMapReduceJobAsync(mapReduceJobCreateParameters).WaitForResult()); }
public void CanCreateNewMapReduceDefinition_WithParameters() { var mapReduceJobDefinition = new MapReduceJobCreateParameters { JobName = "pi estimation jobDetails", ClassName = "pi", JarFile = TestConstants.WabsProtocolSchemeName + "container@hostname/examples.jar" }; mapReduceJobDefinition.Defines.Add("map.input.tasks", "1000"); mapReduceJobDefinition.Defines.Add("map.input.reducers", "1000"); INewAzureHDInsightMapReduceJobDefinitionCommand newMapReduceJobDefinitionCommand = ServiceLocator.Instance.Locate<IAzureHDInsightCommandFactory>().CreateNewMapReduceDefinition(); newMapReduceJobDefinitionCommand.JobName = mapReduceJobDefinition.JobName; newMapReduceJobDefinitionCommand.JarFile = mapReduceJobDefinition.JarFile; newMapReduceJobDefinitionCommand.ClassName = mapReduceJobDefinition.ClassName; foreach (var define in mapReduceJobDefinition.Defines) { newMapReduceJobDefinitionCommand.Defines.Add(define.Key, define.Value); } newMapReduceJobDefinitionCommand.EndProcessing(); AzureHDInsightMapReduceJobDefinition mapReduceJobFromCommand = newMapReduceJobDefinitionCommand.Output.ElementAt(0); Assert.AreEqual(mapReduceJobDefinition.JobName, mapReduceJobFromCommand.JobName); Assert.AreEqual(mapReduceJobDefinition.ClassName, mapReduceJobFromCommand.ClassName); Assert.AreEqual(mapReduceJobDefinition.JarFile, mapReduceJobFromCommand.JarFile); foreach (var parameter in mapReduceJobDefinition.Defines) { Assert.IsTrue( mapReduceJobFromCommand.Defines.Any(arg => string.Equals(parameter.Key, arg.Key) && string.Equals(parameter.Value, arg.Value)), "Unable to find parameter '{0}' in value returned from command", parameter.Key); } }
public void ValidMapReduceJobSubmissionTest() { var remoteConnectionCredentials = new BasicAuthCredential() { UserName = IntegrationTestBase.TestCredentials.AzureUserName, Password = IntegrationTestBase.TestCredentials.AzurePassword, Server = new Uri(IntegrationTestBase.TestCredentials.WellKnownCluster.Cluster) }; var hadoopClient = JobSubmissionClientFactory.Connect(remoteConnectionCredentials); var mapReduceJob = new MapReduceJobCreateParameters() { ClassName = "pi", JobName = "pi estimation jobDetails", JarFile = "/example/hadoop-examples.jar", StatusFolder = "/piresults" }; mapReduceJob.Arguments.Add("16"); mapReduceJob.Arguments.Add("10000"); var jobCreationDetails = hadoopClient.CreateMapReduceJob(mapReduceJob); Assert.IsNull(jobCreationDetails.ErrorCode, "Should not fail mr jobDetails submission"); Assert.IsNotNull(jobCreationDetails.JobId, "Should have a non-null jobDetails id"); }
public JobCreationResults CreateMapReduceJob(MapReduceJobCreateParameters mapReduceJobCreateParameters) { return this.CreateMapReduceJobAsync(mapReduceJobCreateParameters).WaitForResult(); }