public void ICanCallThe_New_HDInsightPigJobDefinitionCmdlet_WithArguments()
        {
            var pigJobDefinition = new PigJobCreateParameters { File = TestConstants.WabsProtocolSchemeName + "container@accountname/pigquery.q" };

            pigJobDefinition.Arguments.Add("map.input.tasks=1000");
            pigJobDefinition.Arguments.Add("map.input.reducers=1000");

            using (IRunspace runspace = this.GetPowerShellRunspace())
            {
                IPipelineResult results =
                    runspace.NewPipeline()
                            .AddCommand(CmdletConstants.NewAzureHDInsightPigJobDefinition)
                            .WithParameter(CmdletConstants.File, pigJobDefinition.File)
                            .WithParameter(CmdletConstants.Arguments, pigJobDefinition.Arguments)
                            .Invoke();
                Assert.AreEqual(1, results.Results.Count);
                AzureHDInsightPigJobDefinition pigJobFromPowershell = results.Results.ToEnumerable<AzureHDInsightPigJobDefinition>().First();

                Assert.AreEqual(pigJobDefinition.File, pigJobFromPowershell.File);

                foreach (string argument in pigJobDefinition.Arguments)
                {
                    Assert.IsTrue(
                        pigJobFromPowershell.Arguments.Any(arg => string.Equals(argument, arg)),
                        string.Format("Unable to find parameter '{0}' in value returned from powershell", argument));
                }
            }
        }
        public void CanCreateNewPigDefinition_WithArguments()
        {
            var pigJobDefinition = new PigJobCreateParameters { Query = "load 'passwd' using PigStorage(':'); B = foreach A generate $0 as id;" };

            pigJobDefinition.Arguments.Add("map.input.tasks=1000");
            pigJobDefinition.Arguments.Add("map.input.reducers=1000");


            INewAzureHDInsightPigJobDefinitionCommand newPigJobDefinitionCommand =
                ServiceLocator.Instance.Locate<IAzureHDInsightCommandFactory>().CreateNewPigJobDefinition();
            newPigJobDefinitionCommand.Query = pigJobDefinition.Query;
            newPigJobDefinitionCommand.Arguments = pigJobDefinition.Arguments.ToArray();
            newPigJobDefinitionCommand.EndProcessing();

            AzureHDInsightPigJobDefinition pigJobFromCommand = newPigJobDefinitionCommand.Output.ElementAt(0);

            Assert.AreEqual(pigJobDefinition.Query, pigJobFromCommand.Query);

            foreach (string parameter in pigJobDefinition.Arguments)
            {
                Assert.IsTrue(
                    pigJobFromCommand.Arguments.Any(arg => string.Equals(parameter, arg)),
                    "Unable to find parameter '{0}' in value returned from command",
                    parameter);
            }
        }
        public void CanCreateNewPigDefinition()
        {
            var pigJobDefinition = new PigJobCreateParameters { Query = "load 'passwd' using PigStorage(':'); B = foreach A generate $0 as id;" };

            INewAzureHDInsightPigJobDefinitionCommand newPigJobDefinitionCommand =
                ServiceLocator.Instance.Locate<IAzureHDInsightCommandFactory>().CreateNewPigJobDefinition();
            newPigJobDefinitionCommand.Query = pigJobDefinition.Query;
            newPigJobDefinitionCommand.EndProcessing();

            AzureHDInsightPigJobDefinition pigJobFromCommand = newPigJobDefinitionCommand.Output.ElementAt(0);

            Assert.AreEqual(pigJobDefinition.Query, pigJobFromCommand.Query);
        }
        public virtual void ICanCallThe_NewPigJob_Then_Start_HDInsightJobsCmdlet()
        {
            var pigJobDefinition = new PigJobCreateParameters { Query = "load table from 'A'" };

            using (IRunspace runspace = this.GetPowerShellRunspace())
            {
                IPipelineResult results =
                    runspace.NewPipeline()
                            .AddCommand(CmdletConstants.NewAzureHDInsightPigJobDefinition)
                            .WithParameter(CmdletConstants.Query, pigJobDefinition.Query)
                            .Invoke();
                Assert.AreEqual(1, results.Results.Count);
                AzureHDInsightPigJobDefinition pigJobFromPowershell = results.Results.ToEnumerable<AzureHDInsightPigJobDefinition>().First();

                RunJobInPowershell(runspace, pigJobFromPowershell);
            }
        }
        public void ICanCallThe_New_HDInsightPigJobDefinitionCmdlet()
        {
            var pigJobDefinition = new PigJobCreateParameters { Query = "load 'passwd' using PigStorage(':'); B = foreach A generate $0 as id;" };

            using (IRunspace runspace = this.GetPowerShellRunspace())
            {
                IPipelineResult results =
                    runspace.NewPipeline()
                            .AddCommand(CmdletConstants.NewAzureHDInsightPigJobDefinition)
                            .WithParameter(CmdletConstants.Query, pigJobDefinition.Query)
                            .Invoke();
                Assert.AreEqual(1, results.Results.Count);
                AzureHDInsightPigJobDefinition pigJobFromPowershell = results.Results.ToEnumerable<AzureHDInsightPigJobDefinition>().First();

                Assert.AreEqual(pigJobDefinition.Query, pigJobFromPowershell.Query);
            }
        }
        public void CanSerializeValidPigJobRequest()
        {
            var pigJob = new PigJobCreateParameters()
            {
                Query = "show tables"
            };

            var payloadConverter = new PayloadConverterBase();
            var payload = payloadConverter.SerializePigRequest("hadoop", pigJob);

            Assert.IsTrue(payload.Contains(string.Format("{0}={1}", WebHCatConstants.Execute, Uri.EscapeDataString(pigJob.Query))));
        }
        public void ShouldNotSerializePigJobName()
        {
            var pigJob = new PigJobCreateParameters();
            pigJob.Arguments.Add("16");
            pigJob.Query = "show tables";
            pigJob.Arguments.Add("10000");

            var payloadConverter = new PayloadConverterBase();
            var payload = payloadConverter.SerializePigRequest("hadoop", pigJob);
            Assert.IsFalse((payload.Contains(WebHCatConstants.DefineJobName)));
        }
        public void CanSerializeValidJobRequest_Arguments()
        {
            var pigJob = new PigJobCreateParameters();
            pigJob.Arguments.Add("16");
            pigJob.Query = "show tables";
            pigJob.Arguments.Add("10000");

            var payloadConverter = new PayloadConverterBase();
            var payload = payloadConverter.SerializePigRequest("hadoop", pigJob);
            foreach (var argument in pigJob.Arguments)
            {
                Assert.IsTrue(payload.Contains(string.Format("{0}={1}", WebHCatConstants.Arg, argument)));
            }
        }
 /// <inheritdoc />
 public async Task<JobCreationResults> SubmitPigJob(PigJobCreateParameters pigJobCreateParameters)
 {
     var remoteClient = ServiceLocator.Instance.Locate<IRemoteHadoopJobSubmissionPocoClientFactory>().Create(this.remoteCreds, this.context, this.ignoreSslErrors, this.GetUserAgentString());
     return await remoteClient.SubmitPigJob(pigJobCreateParameters);
 }
 public Task<JobCreationResults> SubmitPigJob(PigJobCreateParameters pigJobCreateParameters)
 {
     throw new System.NotImplementedException();
 }
        public void ICanCallThe_New_HDInsightPigJobDefinitionCmdlet_WithResources()
        {
            var pigJobDefinition = new PigJobCreateParameters { Query = "load 'passwd' using PigStorage(':'); B = foreach A generate $0 as id;" };
            pigJobDefinition.Files.Add("pidata.txt");
            pigJobDefinition.Files.Add("pidate2.txt");

            using (IRunspace runspace = this.GetPowerShellRunspace())
            {
                IPipelineResult results =
                    runspace.NewPipeline()
                            .AddCommand(CmdletConstants.NewAzureHDInsightPigJobDefinition)
                            .WithParameter(CmdletConstants.Query, pigJobDefinition.Query)
                            .WithParameter(CmdletConstants.Files, pigJobDefinition.Files)
                            .Invoke();
                Assert.AreEqual(1, results.Results.Count);
                AzureHDInsightPigJobDefinition pigJobFromPowershell = results.Results.ToEnumerable<AzureHDInsightPigJobDefinition>().First();

                Assert.AreEqual(pigJobDefinition.Query, pigJobFromPowershell.Query);

                foreach (string file in pigJobDefinition.Files)
                {
                    Assert.IsTrue(
                        pigJobFromPowershell.Files.Any(arg => string.Equals(file, arg)),
                        "Unable to find File '{0}' in value returned from powershell",
                        file);
                }
            }
        }
        public void ICanCallThe_New_HDInsightPigJobDefinitionCmdlet_WithQueryFile()
        {
            var pigJobDefinition = new PigJobCreateParameters { File = TestConstants.WabsProtocolSchemeName + "container@accountname/pigquery.q" };

            pigJobDefinition.Arguments.Add("map.input.tasks=1000");
            pigJobDefinition.Arguments.Add("map.input.reducers=1000");

            using (IRunspace runspace = this.GetPowerShellRunspace())
            {
                IPipelineResult results =
                    runspace.NewPipeline()
                            .AddCommand(CmdletConstants.NewAzureHDInsightPigJobDefinition)
                            .WithParameter(CmdletConstants.File, pigJobDefinition.File)
                            .WithParameter(CmdletConstants.Arguments, pigJobDefinition.Arguments)
                            .Invoke();
                Assert.AreEqual(1, results.Results.Count);
                AzureHDInsightPigJobDefinition pigJobFromPowershell = results.Results.ToEnumerable<AzureHDInsightPigJobDefinition>().First();

                Assert.AreEqual(pigJobDefinition.File, pigJobFromPowershell.File);
            }
        }
        public Task<JobCreationResults> CreatePigJobAsync(PigJobCreateParameters pigJobCreateParameters)
        {
            if (pigJobCreateParameters == null)
            {
                throw new ArgumentNullException("pigJobCreateParameters");
            }

            this.PrepareQueryJob(pigJobCreateParameters);
            JobCreationResults retval =
                this.CreateJobSuccessResult(
                    new JobDetails { Query = pigJobCreateParameters.Query, StatusDirectory = pigJobCreateParameters.StatusFolder }, string.Empty);
            return TaskEx2.FromResult(retval);
        }
 public JobCreationResults CreatePigJob(PigJobCreateParameters pigJobCreateParameters)
 {
     this.PrepareQueryJob(pigJobCreateParameters);
     return this.CreatePigJobAsync(pigJobCreateParameters).WaitForResult();
 }
 public JobCreationResults CreatePigJob(PigJobCreateParameters pigJobCreateParameters)
 {
     return this.CreatePigJobAsync(pigJobCreateParameters).WaitForResult();
 }
        public void CanSerializeValidPigJobRequest_WithFile()
        {
            var pigJob = new PigJobCreateParameters()
            {
                File = Constants.WabsProtocolSchemeName + "filepath.hql"
            };

            var payloadConverter = new PayloadConverterBase();
            var payload = payloadConverter.SerializePigRequest("hadoop", pigJob);

            Assert.IsTrue(payload.Contains(string.Format("{0}={1}", WebHCatConstants.File, Uri.EscapeDataString(pigJob.File))));
        }
        public Task<JobCreationResults> SubmitPigJob(PigJobCreateParameters pigJobCreateParameters)
        {
            if (pigJobCreateParameters == null)
            {
                throw new ArgumentNullException("pigJobCreateParameters");
            }

            var retval = this.CreateJobSuccessResult(new JobDetails()
            {
                Query = pigJobCreateParameters.Query,
                StatusDirectory = pigJobCreateParameters.StatusFolder
            },
            string.Empty);
            return Task.FromResult(retval);
        }
        public void CanCreateNewPigJob_StartJob()
        {
            var pigJobDefinition = new PigJobCreateParameters { Query = "load table from 'A'" };

            INewAzureHDInsightPigJobDefinitionCommand newMapReduceJobDefinitionCommand =
                ServiceLocator.Instance.Locate<IAzureHDInsightCommandFactory>().CreateNewPigJobDefinition();
            newMapReduceJobDefinitionCommand.Query = pigJobDefinition.Query;
            newMapReduceJobDefinitionCommand.EndProcessing();

            AzureHDInsightPigJobDefinition pigJobFromCommand = newMapReduceJobDefinitionCommand.Output.ElementAt(0);

            TestJobStart(pigJobFromCommand);
        }
        public void CannotCreateNewPigJob_WithRestrictedCharacters_StartJob()
        {
            var pigJobDefinition = new PigJobCreateParameters { Query = "load table from 'A' %" };

            INewAzureHDInsightPigJobDefinitionCommand newMapReduceJobDefinitionCommand =
                ServiceLocator.Instance.Locate<IAzureHDInsightCommandFactory>().CreateNewPigJobDefinition();
            newMapReduceJobDefinitionCommand.Query = pigJobDefinition.Query;
            newMapReduceJobDefinitionCommand.EndProcessing();

            AzureHDInsightPigJobDefinition pigJobFromCommand = newMapReduceJobDefinitionCommand.Output.ElementAt(0);
            try
            {
                TestJobStart(pigJobFromCommand);
                Assert.Fail();
            }
            catch (AggregateException aggregateException)
            {
                var invalidOperationException = aggregateException.GetBaseException() as InvalidOperationException;
                Assert.IsNotNull(invalidOperationException);
                Assert.AreEqual("Query text contains restricted character '%', please upload the query to a file in storage and re-submit the job using the -File parameter",
                    invalidOperationException.Message);
            }
        }
        private static void SubmitJobs()
        {
            // Get HDInsight cluster configuration settings
            string clusterName = ConfigurationManager.AppSettings["ClusterName"];
            string userName = ConfigurationManager.AppSettings["UserName"];
            string password = ConfigurationManager.AppSettings["Password"];

            // Create basic authentication credential for cluster
            BasicAuthCredential bcred = new BasicAuthCredential();
            bcred.Server = new Uri("https://" + clusterName + ".azurehdinsight.net");
            bcred.UserName = userName;
            bcred.Password = password;

            // Create and submit Pig job
            PigJobCreateParameters pigJob = new PigJobCreateParameters()
            {
                StatusFolder = "/data/racecar/scripts/processdatastatus",
                File = "/data/racecar/scripts/processdata.pig"
            };
            var pigJobClient = JobSubmissionClientFactory.Connect(bcred);
            JobCreationResults pigJobResults = pigJobClient.CreatePigJob(pigJob);
            WaitForJobCompletion(pigJobResults, pigJobClient);

            // Create and submit Hive job
            HiveJobCreateParameters hiveJob = new HiveJobCreateParameters()
            {
                JobName = "Create Hive tables",
                StatusFolder = "/data/racecar/scripts/createtablestatus",
                File = "/data/racecar/scripts/createtables.hql"
            };
            var hiveJobClient = JobSubmissionClientFactory.Connect(bcred);
            JobCreationResults hiveJobResults = hiveJobClient.CreateHiveJob(hiveJob);
            WaitForJobCompletion(hiveJobResults, hiveJobClient);

        }
        public void CanCreateNewPigDefinition_WithResources()
        {
            var pigJobDefinition = new PigJobCreateParameters { Query = "load 'passwd' using PigStorage(':'); B = foreach A generate $0 as id;" };
            pigJobDefinition.Files.Add("pidata.txt");
            pigJobDefinition.Files.Add("pidate2.txt");

            INewAzureHDInsightPigJobDefinitionCommand newPigJobDefinitionCommand =
                ServiceLocator.Instance.Locate<IAzureHDInsightCommandFactory>().CreateNewPigJobDefinition();
            newPigJobDefinitionCommand.Query = pigJobDefinition.Query;
            newPigJobDefinitionCommand.Files = pigJobDefinition.Files.ToArray();
            newPigJobDefinitionCommand.EndProcessing();

            AzureHDInsightPigJobDefinition pigJobFromCommand = newPigJobDefinitionCommand.Output.ElementAt(0);

            Assert.AreEqual(pigJobDefinition.Query, pigJobFromCommand.Query);

            foreach (string resource in pigJobDefinition.Files)
            {
                Assert.IsTrue(
                    pigJobFromCommand.Files.Any(arg => string.Equals(resource, arg)),
                    "Unable to find File '{0}' in value returned from command",
                    resource);
            }
        }
        public void CannotCreateNewPigJob_WithRestrictedCharacters_StartJob()
        {
            var pigJobDefinition = new PigJobCreateParameters { Query = "load table from 'A' %" };

            INewAzureHDInsightPigJobDefinitionCommand newMapReduceJobDefinitionCommand =
                ServiceLocator.Instance.Locate<IAzureHDInsightCommandFactory>().CreateNewPigJobDefinition();
            newMapReduceJobDefinitionCommand.Query = pigJobDefinition.Query;
            newMapReduceJobDefinitionCommand.EndProcessing();

            AzureHDInsightPigJobDefinition pigJobFromCommand = newMapReduceJobDefinitionCommand.Output.ElementAt(0);
            try
            {
                TestJobStart(pigJobFromCommand);
                Assert.Fail();
            }
            catch (AggregateException aggregateException)
            {
                var invalidOperationException = aggregateException.GetBaseException() as InvalidOperationException;
                Assert.IsNotNull(invalidOperationException);
                Assert.IsTrue(invalidOperationException.Message.Contains("Query contains restricted character :'%'"), "Exception not thrown for special character");
            }
        }