IAsyncResult invokeRunJobFlow(RunJobFlowRequest runJobFlowRequest, AsyncCallback callback, object state, bool synchronized) { IRequest irequest = new RunJobFlowRequestMarshaller().Marshall(runJobFlowRequest); var unmarshaller = RunJobFlowResponseUnmarshaller.GetInstance(); AsyncResult result = new AsyncResult(irequest, callback, state, synchronized, signer, unmarshaller); Invoke(result); return(result); }
internal RunJobFlowResponse RunJobFlow(RunJobFlowRequest request) { var task = RunJobFlowAsync(request); try { return(task.Result); } catch (AggregateException e) { ExceptionDispatchInfo.Capture(e.InnerException).Throw(); return(null); } }
/// <summary> /// Send a request to EMR service to start and configure a new job /// </summary> /// <param name="emrClient">EMR Client to make requests to the Amazon EMR Service</param> /// <param name="settings">Settings to replace placeholders</param> /// <param name="jobFlowId">Existing jobflow Id, can be null for the new job.</param> /// <returns>JobFlow Id, if request failed -> returns null</returns> public override async Task <string> SendAsync(IAmazonElasticMapReduce emrClient, IBuilderSettings settings, string jobFlowId) { RunJobFlowRequestBuilder builder = new RunJobFlowRequestBuilder(settings); RunJobFlowRequest request = builder.Build(this.jobFlow); RunJobFlowResponse response = await emrClient.RunJobFlowAsync(request); if (!this.IsOk(response)) { return(null); } return(response.JobFlowId); }
/// <summary> /// 创建流程作业 /// </summary> /// <param name="req"><see cref="RunJobFlowRequest"/></param> /// <returns><see cref="RunJobFlowResponse"/></returns> public RunJobFlowResponse RunJobFlowSync(RunJobFlowRequest req) { JsonResponseModel <RunJobFlowResponse> rsp = null; try { var strResp = this.InternalRequestSync(req, "RunJobFlow"); rsp = JsonConvert.DeserializeObject <JsonResponseModel <RunJobFlowResponse> >(strResp); } catch (JsonSerializationException e) { throw new TencentCloudSDKException(e.Message); } return(rsp.Response); }
public static async Task <RunJobFlowResponse> RunJob(IAmazonElasticMapReduce emrClient, JobConfiguration configuration) { RunJobFlowRequest request = new RunJobFlowRequest(); request.Name = configuration.ClusterName; request.ReleaseLabel = configuration.ReleaseLabel; request.Applications = configuration.Applications.Select(app => new Amazon.ElasticMapReduce.Model.Application() { Name = app.Name }).ToList(); request.Instances = new JobFlowInstancesConfig(); request.Instances.Ec2KeyName = configuration.EC2KeyName; request.Instances.InstanceCount = configuration.InstanceCount; request.Instances.KeepJobFlowAliveWhenNoSteps = configuration.KeepJobAlive; request.Instances.MasterInstanceType = configuration.MasterInstanceType; request.Instances.SlaveInstanceType = configuration.SlaveInstanceType; request.ServiceRole = configuration.ServiceRole; request.JobFlowRole = configuration.JobFlowRole; request.BootstrapActions = configuration.BootstrapActions.Select(bootstrap => new Amazon.ElasticMapReduce.Model.BootstrapActionConfig() { Name = bootstrap.Name, ScriptBootstrapAction = new ScriptBootstrapActionConfig() { Path = bootstrap.ScriptBootstrapAction.Path, Args = new List <string>(bootstrap.ScriptBootstrapAction.Args) } }).ToList(); request.Steps = configuration.StepConfigurations.Select(s => new Amazon.ElasticMapReduce.Model.StepConfig() { Name = s.Name, ActionOnFailure = ParseActionOnFailure(s.ActionOnFailure), HadoopJarStep = new HadoopJarStepConfig() { Jar = s.HadoopJarStep.Jar, Args = new List <string>(s.HadoopJarStep.Args) } }).ToList(); request.LogUri = configuration.LogUri; return(await emrClient.RunJobFlowAsync(request)); }
public void VisitJobFlow() { //Init args JobFlow jobFlow = new JobFlow(); jobFlow.Name = "testName-{jobFlowId}"; jobFlow.LogUri = "{myBucket}/logs/"; jobFlow.JobFlowRole = "arn:{myRole}"; jobFlow.AmiVersion = "{amiVersion}"; jobFlow.AdditionalInfo = "{ name: \"name1\", contact: \"{contact}\" }"; jobFlow.Ec2KeyName = "{ec2Key}"; jobFlow.HadoopVersion = "{hadoopVersion}"; jobFlow.KeepJobFlowAliveWhenNoSteps = true; jobFlow.TerminationProtected = true; jobFlow.MasterInstanceType = "{masterInstanceType}"; jobFlow.SlaveInstanceType = "{slaveInstanceType}"; jobFlow.InstanceCount = 34; //Init visitor BuildRequestVisitor visitor = new BuildRequestVisitor(BuildRequestVisitorTest.GetSettings()); VisitorSubscriber visitorSubscriber = new VisitorSubscriber(visitor); //Action jobFlow.Accept(visitor); //Verify Assert.AreEqual(2, visitorSubscriber.TotalObjCount, "Unexpected number of objects created"); RunJobFlowRequest actualJobFlowRequest = visitorSubscriber.jobFlowRequestList[0]; Assert.AreEqual("testName-j-111AAABBBNJ2I", actualJobFlowRequest.Name, "Unexpected Name"); Assert.AreEqual("s3://myBucket/logs/", actualJobFlowRequest.LogUri, "Unexpected LogUri"); Assert.AreEqual("arn:SupperSlonic", actualJobFlowRequest.JobFlowRole, "Unexpected JobFlowRole"); Assert.AreEqual("3.0.3", actualJobFlowRequest.AmiVersion, "Unexpected AmiVersion"); Assert.AreEqual("{ name: \"name1\", contact: \"supperslonic.com\" }", actualJobFlowRequest.AdditionalInfo, "Unexpected AdditionalInfo"); JobFlowInstancesConfig actualJobFlowInstancesConfig = visitorSubscriber.instanceConfigList[0]; Assert.AreEqual("testEC2Key", actualJobFlowInstancesConfig.Ec2KeyName, "Unexpected Ec2KeyName"); Assert.AreEqual("2.2.0", actualJobFlowInstancesConfig.HadoopVersion, "Unexpected HadoopVersion"); Assert.IsTrue(actualJobFlowInstancesConfig.KeepJobFlowAliveWhenNoSteps, "Unexpected KeepJobFlowAliveWhenNoSteps"); Assert.IsTrue(actualJobFlowInstancesConfig.TerminationProtected, "Unexpected TerminationProtected"); Assert.AreEqual("m1.medium", actualJobFlowInstancesConfig.MasterInstanceType, "Unexpected MasterInstanceType"); Assert.AreEqual("m3.2xlarge", actualJobFlowInstancesConfig.SlaveInstanceType, "Unexpected SlaveInstanceType"); Assert.AreEqual(34, actualJobFlowInstancesConfig.InstanceCount, "Unexpected InstanceCount"); }
/** * Convert RunJobFlowRequest to name value pairs */ private static IDictionary <string, string> ConvertRunJobFlow(RunJobFlowRequest request) { IDictionary <string, string> parameters = new Dictionary <string, string>(); parameters["Action"] = "RunJobFlow"; if (request.IsSetName()) { parameters["Name"] = request.Name; } if (request.IsSetLogUri()) { parameters["LogUri"] = request.LogUri; } if (request.IsSetAdditionalInfo()) { parameters["AdditionalInfo"] = request.AdditionalInfo; } if (request.IsSetInstances()) { JobFlowInstancesConfig runJobFlowRequestInstances = request.Instances; if (runJobFlowRequestInstances.IsSetMasterInstanceType()) { parameters[String.Concat("Instances", ".", "MasterInstanceType")] = runJobFlowRequestInstances.MasterInstanceType; } if (runJobFlowRequestInstances.IsSetSlaveInstanceType()) { parameters[String.Concat("Instances", ".", "SlaveInstanceType")] = runJobFlowRequestInstances.SlaveInstanceType; } if (runJobFlowRequestInstances.IsSetInstanceCount()) { parameters[String.Concat("Instances", ".", "InstanceCount")] = runJobFlowRequestInstances.InstanceCount.ToString(); } if (runJobFlowRequestInstances.IsSetEc2KeyName()) { parameters[String.Concat("Instances", ".", "Ec2KeyName")] = runJobFlowRequestInstances.Ec2KeyName; } if (runJobFlowRequestInstances.IsSetPlacement()) { PlacementType instancesPlacement = runJobFlowRequestInstances.Placement; if (instancesPlacement.IsSetAvailabilityZone()) { parameters[String.Concat("Instances", ".", "Placement", ".", "AvailabilityZone")] = instancesPlacement.AvailabilityZone; } } if (runJobFlowRequestInstances.IsSetKeepJobFlowAliveWhenNoSteps()) { parameters[String.Concat("Instances", ".", "KeepJobFlowAliveWhenNoSteps")] = runJobFlowRequestInstances.KeepJobFlowAliveWhenNoSteps.ToString().ToLower(); } if (runJobFlowRequestInstances.IsSetHadoopVersion()) { parameters[String.Concat("Instances", ".", "HadoopVersion")] = runJobFlowRequestInstances.HadoopVersion; } } List <StepConfig> runJobFlowRequestStepsList = request.Steps; int runJobFlowRequestStepsListIndex = 1; foreach (StepConfig runJobFlowRequestSteps in runJobFlowRequestStepsList) { if (runJobFlowRequestSteps.IsSetName()) { parameters[String.Concat("Steps", ".member.", runJobFlowRequestStepsListIndex, ".", "Name")] = runJobFlowRequestSteps.Name; } if (runJobFlowRequestSteps.IsSetActionOnFailure()) { parameters[String.Concat("Steps", ".member.", runJobFlowRequestStepsListIndex, ".", "ActionOnFailure")] = runJobFlowRequestSteps.ActionOnFailure; } if (runJobFlowRequestSteps.IsSetHadoopJarStep()) { HadoopJarStepConfig stepsHadoopJarStep = runJobFlowRequestSteps.HadoopJarStep; List <KeyValue> hadoopJarStepPropertiesList = stepsHadoopJarStep.Properties; int hadoopJarStepPropertiesListIndex = 1; foreach (KeyValue hadoopJarStepProperties in hadoopJarStepPropertiesList) { if (hadoopJarStepProperties.IsSetKey()) { parameters[String.Concat("Steps", ".member.", runJobFlowRequestStepsListIndex, ".", "HadoopJarStep", ".", "Properties", ".member.", hadoopJarStepPropertiesListIndex, ".", "Key")] = hadoopJarStepProperties.Key; } if (hadoopJarStepProperties.IsSetValue()) { parameters[String.Concat("Steps", ".member.", runJobFlowRequestStepsListIndex, ".", "HadoopJarStep", ".", "Properties", ".member.", hadoopJarStepPropertiesListIndex, ".", "Value")] = hadoopJarStepProperties.Value; } hadoopJarStepPropertiesListIndex++; } if (stepsHadoopJarStep.IsSetJar()) { parameters[String.Concat("Steps", ".member.", runJobFlowRequestStepsListIndex, ".", "HadoopJarStep", ".", "Jar")] = stepsHadoopJarStep.Jar; } if (stepsHadoopJarStep.IsSetMainClass()) { parameters[String.Concat("Steps", ".member.", runJobFlowRequestStepsListIndex, ".", "HadoopJarStep", ".", "MainClass")] = stepsHadoopJarStep.MainClass; } List <string> hadoopJarStepArgsList = stepsHadoopJarStep.Args; int hadoopJarStepArgsListIndex = 1; foreach (string hadoopJarStepArgs in hadoopJarStepArgsList) { parameters[String.Concat("Steps", ".member.", runJobFlowRequestStepsListIndex, ".", "HadoopJarStep", ".", "Args", ".member.", hadoopJarStepArgsListIndex)] = hadoopJarStepArgs; hadoopJarStepArgsListIndex++; } } runJobFlowRequestStepsListIndex++; } List <BootstrapActionConfig> runJobFlowRequestBootstrapActionsList = request.BootstrapActions; int runJobFlowRequestBootstrapActionsListIndex = 1; foreach (BootstrapActionConfig runJobFlowRequestBootstrapActions in runJobFlowRequestBootstrapActionsList) { if (runJobFlowRequestBootstrapActions.IsSetName()) { parameters[String.Concat("BootstrapActions", ".member.", runJobFlowRequestBootstrapActionsListIndex, ".", "Name")] = runJobFlowRequestBootstrapActions.Name; } if (runJobFlowRequestBootstrapActions.IsSetScriptBootstrapAction()) { ScriptBootstrapActionConfig bootstrapActionsScriptBootstrapAction = runJobFlowRequestBootstrapActions.ScriptBootstrapAction; if (bootstrapActionsScriptBootstrapAction.IsSetPath()) { parameters[String.Concat("BootstrapActions", ".member.", runJobFlowRequestBootstrapActionsListIndex, ".", "ScriptBootstrapAction", ".", "Path")] = bootstrapActionsScriptBootstrapAction.Path; } List <string> scriptBootstrapActionArgsList = bootstrapActionsScriptBootstrapAction.Args; int scriptBootstrapActionArgsListIndex = 1; foreach (string scriptBootstrapActionArgs in scriptBootstrapActionArgsList) { parameters[String.Concat("BootstrapActions", ".member.", runJobFlowRequestBootstrapActionsListIndex, ".", "ScriptBootstrapAction", ".", "Args", ".member.", scriptBootstrapActionArgsListIndex)] = scriptBootstrapActionArgs; scriptBootstrapActionArgsListIndex++; } } runJobFlowRequestBootstrapActionsListIndex++; } return(parameters); }
/// <summary> /// Run Job Flow /// </summary> /// <param name="request">Run Job Flow request</param> /// <returns>Run Job Flow Response from the service</returns> public RunJobFlowResponse RunJobFlow(RunJobFlowRequest request) { return(Invoke <RunJobFlowResponse>(ConvertRunJobFlow(request))); }
public override ProvisionAddOnResult Provision(AddonProvisionRequest request) { var provisionResult = new ProvisionAddOnResult("") { IsSuccess = true }; AddonManifest manifest = request.Manifest; string developerOptions = request.DeveloperOptions; try { IAmazonElasticMapReduce client; EMRDeveloperOptions devOptions; var parseOptionsResult = ParseDevOptions(developerOptions, out devOptions); if (!parseOptionsResult.IsSuccess) { provisionResult.EndUserMessage = parseOptionsResult.EndUserMessage; return(provisionResult); } var establishClientResult = EstablishClient(manifest, EMRDeveloperOptions.Parse(developerOptions), out client); if (!establishClientResult.IsSuccess) { provisionResult.EndUserMessage = establishClientResult.EndUserMessage; return(provisionResult); } var stepFactory = new StepFactory(); StepConfig enabledebugging = null; if (devOptions.EnableDebugging) { enabledebugging = new StepConfig { Name = "Enable debugging", ActionOnFailure = "TERMINATE_JOB_FLOW", HadoopJarStep = stepFactory.NewEnableDebuggingStep() }; } var installHive = new StepConfig { Name = "Install Hive", ActionOnFailure = "TERMINATE_JOB_FLOW", HadoopJarStep = stepFactory.NewInstallHiveStep() }; var instanceConfig = new JobFlowInstancesConfig { Ec2KeyName = devOptions.Ec2KeyName, HadoopVersion = "0.20", InstanceCount = devOptions.InstanceCount, // this is important. the EMR job flow must be kept alive for the application to see it during provisioning KeepJobFlowAliveWhenNoSteps = true, MasterInstanceType = devOptions.MasterInstanceType, SlaveInstanceType = devOptions.SlaveInstanceType }; var _request = new RunJobFlowRequest { Name = devOptions.JobFlowName, Steps = { enabledebugging, installHive }, // revisit this one in ne LogUri = "s3://myawsbucket", Instances = instanceConfig }; // if debugging is enabled, add to top of the list of steps. if (devOptions.EnableDebugging) { _request.Steps.Insert(0, enabledebugging); } var result = client.RunJobFlow(_request); // wait for JobFlowID to come back. while (result.JobFlowId == null) { Thread.Sleep(1000); } provisionResult.IsSuccess = true; provisionResult.ConnectionData = string.Format(result.JobFlowId); } catch (Exception e) { provisionResult.EndUserMessage = e.Message; } return(provisionResult); }
private static IDictionary <string, string> ConvertRunJobFlow(RunJobFlowRequest request) { IDictionary <string, string> dictionary = new Dictionary <string, string>(); dictionary["Action"] = "RunJobFlow"; if (request.IsSetName()) { dictionary["Name"] = request.Name; } if (request.IsSetLogUri()) { dictionary["LogUri"] = request.LogUri; } if (request.IsSetAdditionalInfo()) { dictionary["AdditionalInfo"] = request.AdditionalInfo; } if (request.IsSetInstances()) { JobFlowInstancesConfig instances = request.Instances; if (instances.IsSetMasterInstanceType()) { dictionary["Instances" + "." + "MasterInstanceType"] = instances.MasterInstanceType; } if (instances.IsSetSlaveInstanceType()) { dictionary["Instances" + "." + "SlaveInstanceType"] = instances.SlaveInstanceType; } if (instances.IsSetInstanceCount()) { dictionary["Instances" + "." + "InstanceCount"] = instances.InstanceCount.ToString(); } if (instances.IsSetEc2KeyName()) { dictionary["Instances" + "." + "Ec2KeyName"] = instances.Ec2KeyName; } if (instances.IsSetPlacement()) { PlacementType placement = instances.Placement; if (placement.IsSetAvailabilityZone()) { dictionary["Instances" + "." + "Placement" + "." + "AvailabilityZone"] = placement.AvailabilityZone; } } if (instances.IsSetKeepJobFlowAliveWhenNoSteps()) { dictionary["Instances" + "." + "KeepJobFlowAliveWhenNoSteps"] = instances.KeepJobFlowAliveWhenNoSteps.ToString().ToLower(); } if (instances.IsSetHadoopVersion()) { dictionary["Instances" + "." + "HadoopVersion"] = instances.HadoopVersion; } } List <StepConfig> steps = request.Steps; int num = 1; foreach (StepConfig config2 in steps) { if (config2.IsSetName()) { dictionary[string.Concat(new object[] { "Steps", ".member.", num, ".", "Name" })] = config2.Name; } if (config2.IsSetActionOnFailure()) { dictionary[string.Concat(new object[] { "Steps", ".member.", num, ".", "ActionOnFailure" })] = config2.ActionOnFailure; } if (config2.IsSetHadoopJarStep()) { HadoopJarStepConfig hadoopJarStep = config2.HadoopJarStep; List <KeyValue> properties = hadoopJarStep.Properties; int num2 = 1; foreach (KeyValue value2 in properties) { if (value2.IsSetKey()) { dictionary[string.Concat(new object[] { "Steps", ".member.", num, ".", "HadoopJarStep", ".", "Properties", ".member.", num2, ".", "Key" })] = value2.Key; } if (value2.IsSetValue()) { dictionary[string.Concat(new object[] { "Steps", ".member.", num, ".", "HadoopJarStep", ".", "Properties", ".member.", num2, ".", "Value" })] = value2.Value; } num2++; } if (hadoopJarStep.IsSetJar()) { dictionary[string.Concat(new object[] { "Steps", ".member.", num, ".", "HadoopJarStep", ".", "Jar" })] = hadoopJarStep.Jar; } if (hadoopJarStep.IsSetMainClass()) { dictionary[string.Concat(new object[] { "Steps", ".member.", num, ".", "HadoopJarStep", ".", "MainClass" })] = hadoopJarStep.MainClass; } List <string> args = hadoopJarStep.Args; int num3 = 1; foreach (string str in args) { dictionary[string.Concat(new object[] { "Steps", ".member.", num, ".", "HadoopJarStep", ".", "Args", ".member.", num3 })] = str; num3++; } } num++; } List <BootstrapActionConfig> bootstrapActions = request.BootstrapActions; int num4 = 1; foreach (BootstrapActionConfig config4 in bootstrapActions) { if (config4.IsSetName()) { dictionary[string.Concat(new object[] { "BootstrapActions", ".member.", num4, ".", "Name" })] = config4.Name; } if (config4.IsSetScriptBootstrapAction()) { ScriptBootstrapActionConfig scriptBootstrapAction = config4.ScriptBootstrapAction; if (scriptBootstrapAction.IsSetPath()) { dictionary[string.Concat(new object[] { "BootstrapActions", ".member.", num4, ".", "ScriptBootstrapAction", ".", "Path" })] = scriptBootstrapAction.Path; } List <string> list5 = scriptBootstrapAction.Args; int num5 = 1; foreach (string str2 in list5) { dictionary[string.Concat(new object[] { "BootstrapActions", ".member.", num4, ".", "ScriptBootstrapAction", ".", "Args", ".member.", num5 })] = str2; num5++; } } num4++; } return(dictionary); }
/// <summary> /// Initiates the asynchronous execution of the RunJobFlow operation. /// <seealso cref="Amazon.ElasticMapReduce.AmazonElasticMapReduce.RunJobFlow"/> /// </summary> /// /// <param name="runJobFlowRequest">Container for the necessary parameters to execute the RunJobFlow operation on /// AmazonElasticMapReduce.</param> /// <param name="callback">An AsyncCallback delegate that is invoked when the operation completes.</param> /// <param name="state">A user-defined state object that is passed to the callback procedure. Retrieve this object from within the callback /// procedure using the AsyncState property.</param> /// /// <returns>An IAsyncResult that can be used to poll or wait for results, or both; this value is also needed when invoking EndRunJobFlow /// operation.</returns> public IAsyncResult BeginRunJobFlow(RunJobFlowRequest runJobFlowRequest, AsyncCallback callback, object state) { return(invokeRunJobFlow(runJobFlowRequest, callback, state, false)); }
/// <summary> /// <para> RunJobFlow creates and starts running a new job flow. The job flow will run the steps specified. Once the job flow completes, the /// cluster is stopped and the HDFS partition is lost. To prevent loss of data, configure the last step of the job flow to store results in /// Amazon S3. If the JobFlowInstancesConfig <c>KeepJobFlowAliveWhenNoSteps</c> parameter is set to <c>TRUE</c> , the job flow will transition /// to the WAITING state rather than shutting down once the steps have completed. </para> <para>For additional protection, you can set the /// JobFlowInstancesConfig <c>TerminationProtected</c> parameter to <c>TRUE</c> to lock the job flow and prevent it from being terminated by API /// call, user intervention, or in the event of a job flow error.</para> <para>A maximum of 256 steps are allowed in each job flow.</para> /// <para>If your job flow is long-running (such as a Hive data warehouse) or complex, you may require more than 256 steps to process your data. /// You can bypass the 256-step limitation in various ways, including using the SSH shell to connect to the master node and submitting queries /// directly to the software running on the master node, such as Hive and Hadoop. For more information on how to do this, go to Add More than /// 256 Steps to a Job Flow in the <i>Amazon Elastic MapReduce Developer's Guide</i> .</para> <para>For long running job flows, we recommend /// that you periodically store your results.</para> /// </summary> /// /// <param name="runJobFlowRequest">Container for the necessary parameters to execute the RunJobFlow service method on /// AmazonElasticMapReduce.</param> /// /// <returns>The response from the RunJobFlow service method, as returned by AmazonElasticMapReduce.</returns> /// /// <exception cref="InternalServerErrorException"/> public RunJobFlowResponse RunJobFlow(RunJobFlowRequest runJobFlowRequest) { IAsyncResult asyncResult = invokeRunJobFlow(runJobFlowRequest, null, null, true); return(EndRunJobFlow(asyncResult)); }
private void OnRunJobFlowRequestCreated(object sender, RunJobFlowRequest jobFlowRequest) { this.result = jobFlowRequest; }
public void CanBuild() { //Input XmlDocument jobFlowXml = new XmlDocument(); jobFlowXml.Load("TestData/JobFlowTemplate.xml"); JobFlow jobFlow = JobFlow.GetRecord(jobFlowXml.OuterXml); //Action RunJobFlowRequestBuilder builder = new RunJobFlowRequestBuilder(RunJobFlowRequestBuilderTest.GetSettings()); RunJobFlowRequest actual = builder.Build(jobFlow); //Verfiy //Main properties Assert.AreEqual("Name1", actual.Name, "Unexpected Name"); Assert.AreEqual("s3://myBucket/logs", actual.LogUri, "Unexpected LogUri"); Assert.AreEqual("test job flow role", actual.JobFlowRole, "Unexpected JobFlowRole"); Assert.AreEqual("3.0.3", actual.AmiVersion, "Unexpected AmiVersion"); Assert.AreEqual("{ test: \"lala\", \"key\" : \"value\"}", actual.AdditionalInfo, "Unexpected AdditionalInfo"); //JobFlowInstancesConfig Assert.AreEqual("testEC2Key", actual.Instances.Ec2KeyName, "Unexpected Ec2KeyName"); Assert.AreEqual("2.2.0", actual.Instances.HadoopVersion, "Unexpected HadoopVersion"); Assert.IsTrue(actual.Instances.KeepJobFlowAliveWhenNoSteps, "Unexpected KeepJobFlowAliveWhenNoSteps"); Assert.IsTrue(actual.Instances.TerminationProtected, "Unexpected TerminationProtected"); Assert.AreEqual("m1.medium", actual.Instances.MasterInstanceType, "Unexpected MasterInstanceType"); Assert.AreEqual("m3.2xlarge", actual.Instances.SlaveInstanceType, "Unexpected SlaveInstanceType"); Assert.AreEqual(34, actual.Instances.InstanceCount, "Unexpected InstanceCount"); //Tags Assert.AreEqual(2, actual.Tags.Count, "Unexpected amount of tags"); Assert.AreEqual("Contact", actual.Tags[0].Key, "Unexpected Key"); Assert.AreEqual("Supperslonic.com", actual.Tags[0].Value, "Unexpected Value"); Assert.AreEqual("Environment", actual.Tags[1].Key, "Unexpected Key"); Assert.AreEqual("test", actual.Tags[1].Value, "Unexpected Value"); //BootstrapActions int index = 0; BootstrapActionConfig bootstrap; Assert.AreEqual(6, actual.BootstrapActions.Count, "Unexpected amount of bootstrapActions"); bootstrap = actual.BootstrapActions[index++]; Assert.AreEqual("Configure Hadoop", bootstrap.Name, "Unexpected Name"); Assert.AreEqual("s3://elasticmapreduce/bootstrap-actions/configure-hadoop", bootstrap.ScriptBootstrapAction.Path, "Unexpected ScriptBootstrapAction.Path"); Assert.IsTrue(new List <string>() { "-s", "mapreduce.map.memory.mb=8192", "-s", "mapreduce.user.classpath.first=true" }.SequenceEqual(bootstrap.ScriptBootstrapAction.Args), "Unexpected args list"); bootstrap = actual.BootstrapActions[index++]; Assert.AreEqual("Install HBase", bootstrap.Name, "Unexpected Name"); Assert.AreEqual("s3://elasticmapreduce/bootstrap-actions/setup-hbase", bootstrap.ScriptBootstrapAction.Path, "Unexpected ScriptBootstrapAction.Path"); Assert.IsNull(bootstrap.ScriptBootstrapAction.Args, "Unexpected args list"); bootstrap = actual.BootstrapActions[index++]; Assert.AreEqual("Configure HBase", bootstrap.Name, "Unexpected Name"); Assert.AreEqual("s3://elasticmapreduce/bootstrap-actions/configure-hbase", bootstrap.ScriptBootstrapAction.Path, "Unexpected ScriptBootstrapAction.Path"); Assert.IsTrue(new List <string>() { "--site-config-file", "s3://myBucket/hBase/config.xml" }.SequenceEqual(bootstrap.ScriptBootstrapAction.Args), "Unexpected args list"); bootstrap = actual.BootstrapActions[index++]; Assert.AreEqual("Configure HBase Daemons", bootstrap.Name, "Unexpected Name"); Assert.AreEqual("s3://elasticmapreduce/bootstrap-actions/configure-hbase-daemons", bootstrap.ScriptBootstrapAction.Path, "Unexpected ScriptBootstrapAction.Path"); Assert.IsTrue(new List <string>() { "--hbase-master-opts=-Xmx6140M -XX:NewSize=64m", "--regionserver-opts=-XX:MaxNewSize=64m -XX:+HeapDumpOnOutOfMemoryError" }.SequenceEqual(bootstrap.ScriptBootstrapAction.Args), "Unexpected args list"); bootstrap = actual.BootstrapActions[index++]; Assert.AreEqual("bootstrap action 1", bootstrap.Name, "Unexpected Name"); Assert.AreEqual("s3://myBucket/bootstrap/UploadLibraries.sh", bootstrap.ScriptBootstrapAction.Path, "Unexpected ScriptBootstrapAction.Path"); Assert.IsNull(bootstrap.ScriptBootstrapAction.Args, "Unexpected args list"); bootstrap = actual.BootstrapActions[index++]; Assert.AreEqual("bootstrap action 2", bootstrap.Name, "Unexpected Name"); Assert.AreEqual("s3://elasticmapreduce/bootstrap-actions/install-ganglia", bootstrap.ScriptBootstrapAction.Path, "Unexpected ScriptBootstrapAction.Path"); Assert.IsTrue(new List <string>() { "true", "4" }.SequenceEqual(bootstrap.ScriptBootstrapAction.Args), "Unexpected args list"); //Steps index = 0; StepConfig step; Assert.AreEqual(6, actual.Steps.Count, "Unexpected amount of steps"); step = actual.Steps[index++]; Assert.AreEqual("Start debugging", step.Name, "Unexpected Name"); Assert.AreEqual(ActionOnFailure.CONTINUE, step.ActionOnFailure, "Unexpected ActionOnFailure"); Assert.AreEqual("s3://elasticmapreduce/libs/script-runner/script-runner.jar", step.HadoopJarStep.Jar, "Unexpected Jar"); Assert.IsNull(step.HadoopJarStep.MainClass, "Unexpected MainClass"); Assert.IsTrue(new List <string>() { "s3://elasticmapreduce/libs/state-pusher/0.1/fetch" }.SequenceEqual(step.HadoopJarStep.Args), "Unexpected args list"); step = actual.Steps[index++]; Assert.AreEqual("Start HBase", step.Name, "Unexpected Name"); Assert.AreEqual(ActionOnFailure.TERMINATE_JOB_FLOW, step.ActionOnFailure, "Unexpected ActionOnFailure"); Assert.AreEqual("/home/hadoop/lib/hbase-0.94.7.jar", step.HadoopJarStep.Jar, "Unexpected Jar"); Assert.AreEqual("emr.hbase.backup.Main", step.HadoopJarStep.MainClass, "Unexpected MainClass"); Assert.IsTrue(new List <string>() { "--start-master" }.SequenceEqual(step.HadoopJarStep.Args), "Unexpected args list"); step = actual.Steps[index++]; Assert.AreEqual("Restore HBase", step.Name, "Unexpected Name"); Assert.AreEqual(ActionOnFailure.TERMINATE_JOB_FLOW, step.ActionOnFailure, "Unexpected ActionOnFailure"); Assert.AreEqual("/home/hadoop/lib/hbase-0.94.7.jar", step.HadoopJarStep.Jar, "Unexpected Jar"); Assert.AreEqual("emr.hbase.backup.Main", step.HadoopJarStep.MainClass, "Unexpected MainClass"); Assert.IsTrue(new List <string>() { "--restore", "--backup-dir", "s3://myBucket/hBaseRestore" }.SequenceEqual(step.HadoopJarStep.Args), "Unexpected args list"); step = actual.Steps[index++]; Assert.AreEqual("step 1", step.Name, "Unexpected Name"); Assert.AreEqual(ActionOnFailure.CANCEL_AND_WAIT, step.ActionOnFailure, "Unexpected ActionOnFailure"); Assert.AreEqual("s3://myBucket/jars/test.jar", step.HadoopJarStep.Jar, "Unexpected Jar"); Assert.AreEqual("com.supperslonic.emr.Step1Driver", step.HadoopJarStep.MainClass, "Unexpected MainClass"); Assert.IsTrue(new List <string>() { "true", "12.34", "hello" }.SequenceEqual(step.HadoopJarStep.Args), "Unexpected args list"); step = actual.Steps[index++]; Assert.AreEqual("Backup HBase", step.Name, "Unexpected Name"); Assert.AreEqual(ActionOnFailure.TERMINATE_JOB_FLOW, step.ActionOnFailure, "Unexpected ActionOnFailure"); Assert.AreEqual("/home/hadoop/lib/hbase-0.94.7.jar", step.HadoopJarStep.Jar, "Unexpected Jar"); Assert.AreEqual("emr.hbase.backup.Main", step.HadoopJarStep.MainClass, "Unexpected MainClass"); Assert.IsTrue(new List <string>() { "--backup", "--backup-dir", "s3://myBucket/hBaseBackup" }.SequenceEqual(step.HadoopJarStep.Args), "Unexpected args list"); step = actual.Steps[index++]; Assert.AreEqual("step 2", step.Name, "Unexpected Name"); Assert.IsNull(step.ActionOnFailure, "Unexpected ActionOnFailure"); Assert.AreEqual("s3://myBucket/jars/test2.jar", step.HadoopJarStep.Jar, "Unexpected Jar"); Assert.IsNull(step.HadoopJarStep.MainClass, "Unexpected MainClass"); Assert.IsNull(step.HadoopJarStep.Args, "Unexpected args list"); }
public void Visit(JobFlow jobFlow) { if (String.IsNullOrEmpty(jobFlow.Name)) { throw new InvalidOperationException(Resources.E_JobFlowNameIsMissing); } if (String.IsNullOrEmpty(jobFlow.LogUri)) { throw new InvalidOperationException(Resources.E_JobFlowLogUriIsMissing); } if (String.IsNullOrEmpty(jobFlow.AmiVersion)) { throw new InvalidOperationException(Resources.E_JobFlowAmiVersionIsMissing); } if (String.IsNullOrEmpty(jobFlow.Ec2KeyName)) { throw new InvalidOperationException(Resources.E_JobFlowEc2KeyNameIsMissing); } if (String.IsNullOrEmpty(jobFlow.HadoopVersion)) { throw new InvalidOperationException(Resources.E_JobFlowHadoopVersionIsMissing); } if (String.IsNullOrEmpty(jobFlow.MasterInstanceType)) { throw new InvalidOperationException(Resources.E_JobFlowMasterInstanceTypeIsMissing); } if (String.IsNullOrEmpty(jobFlow.SlaveInstanceType)) { throw new InvalidOperationException(Resources.E_JobFlowSlaveInstanceTypeIsMissing); } if (jobFlow.InstanceCount <= 0) { throw new InvalidOperationException(Resources.E_JobFlowInstanceCountShouldBePositive); } RunJobFlowRequest jobFlowRequest = new RunJobFlowRequest(); jobFlowRequest.Name = this.settings.FillPlaceHolders(jobFlow.Name); jobFlowRequest.LogUri = this.settings.FillPlaceHolders(jobFlow.LogUri); jobFlowRequest.JobFlowRole = this.settings.FillPlaceHolders(jobFlow.JobFlowRole); jobFlowRequest.AmiVersion = this.settings.FillPlaceHolders(jobFlow.AmiVersion); jobFlowRequest.AdditionalInfo = this.settings.FillPlaceHolders(jobFlow.AdditionalInfo); if (this.OnRunJobFlowRequestCreated != null) { this.OnRunJobFlowRequestCreated(this, jobFlowRequest); } JobFlowInstancesConfig instancesConfig = new JobFlowInstancesConfig(); instancesConfig.Ec2KeyName = this.settings.FillPlaceHolders(jobFlow.Ec2KeyName); instancesConfig.HadoopVersion = this.settings.FillPlaceHolders(jobFlow.HadoopVersion); instancesConfig.KeepJobFlowAliveWhenNoSteps = jobFlow.KeepJobFlowAliveWhenNoSteps; instancesConfig.TerminationProtected = jobFlow.TerminationProtected; instancesConfig.MasterInstanceType = this.settings.FillPlaceHolders(jobFlow.MasterInstanceType); instancesConfig.SlaveInstanceType = this.settings.FillPlaceHolders(jobFlow.SlaveInstanceType); instancesConfig.InstanceCount = jobFlow.InstanceCount; if (this.OnJobFlowInstancesConfigCreated != null) { this.OnJobFlowInstancesConfigCreated(this, instancesConfig); } }
void visitor_OnRunJobFlowRequestCreated(object sender, RunJobFlowRequest e) { this.baseCheck(sender); this.jobFlowRequestList.Add(e); }