public async Task TestDeleteSparkJob()
        {
            SparkJobDefinitionClient client = CreateClient();

            SparkJobDefinitionResource resource = await DisposableSparkJobDefinition.CreateResource(client, Recording, TestEnvironment.StorageFileSystemName, TestEnvironment.StorageAccountName);

            SparkJobDefinitionDeleteSparkJobDefinitionOperation deleteOperation = await client.StartDeleteSparkJobDefinitionAsync(resource.Name);

            await deleteOperation.WaitAndAssertSuccessfulCompletion();
        }
            public static async ValueTask <SparkJobDefinitionResource> CreateResource(SparkJobDefinitionClient client, TestRecording recording, string storageFileSystemName, string storageAccountName)
            {
                string jobName = recording.GenerateId("SparkJobDefinition", 16);

                string                     file          = string.Format("abfss://{0}@{1}.dfs.core.windows.net/samples/net/wordcount/wordcount.zip", storageFileSystemName, storageAccountName);
                SparkJobProperties         jobProperties = new SparkJobProperties(file, "28g", 4, "28g", 4, 2);
                SparkJobDefinition         jobDefinition = new SparkJobDefinition(new BigDataPoolReference(BigDataPoolReferenceType.BigDataPoolReference, "sparkchhamosyna"), jobProperties);
                SparkJobDefinitionResource resource      = new SparkJobDefinitionResource(jobDefinition);
                SparkJobDefinitionCreateOrUpdateSparkJobDefinitionOperation createOperation = await client.StartCreateOrUpdateSparkJobDefinitionAsync(jobName, resource);

                return(await createOperation.WaitForCompletionAsync());
            }
        public async Task TestGetSparkJob()
        {
            SparkJobDefinitionClient client = CreateClient();

            await using DisposableSparkJobDefinition sparkJobDefinition = await DisposableSparkJobDefinition.Create(client, Recording, TestEnvironment.StorageFileSystemName, TestEnvironment.StorageAccountName);

            IList <SparkJobDefinitionResource> jobs = await client.GetSparkJobDefinitionsByWorkspaceAsync().ToListAsync();

            Assert.GreaterOrEqual(jobs.Count, 1);

            foreach (var expectedJob in jobs)
            {
                SparkJobDefinitionResource actualJob = await client.GetSparkJobDefinitionAsync(expectedJob.Name);

                Assert.AreEqual(expectedJob.Name, actualJob.Name);
                Assert.AreEqual(expectedJob.Id, actualJob.Id);
            }
        }
        public override void ExecuteCmdlet()
        {
            if (this.IsParameterBound(c => c.WorkspaceObject))
            {
                this.WorkspaceName = this.WorkspaceObject.Name;
            }

            if (this.ShouldProcess(this.WorkspaceName, String.Format(Resources.SettingSynapseSparkJobDefinition, this.Name, this.WorkspaceName)))
            {
                string                     rawJsonContent             = SynapseAnalyticsClient.ReadJsonFileContent(this.TryResolvePath(DefinitionFile));
                SparkJobDefinition         sparkJobDefinition         = JsonConvert.DeserializeObject <SparkJobDefinition>(rawJsonContent);
                SparkJobDefinitionResource sparkJobDefinitionResource = new SparkJobDefinitionResource(sparkJobDefinition);
                if (this.IsParameterBound(c => c.FolderPath))
                {
                    SparkJobDefinitionFolder folder = new SparkJobDefinitionFolder();
                    folder.Name = FolderPath;
                    sparkJobDefinitionResource.Properties.Folder = folder;
                }
                WriteObject(new PSSparkJobDefinitionResource(SynapseAnalyticsClient.CreateOrUpdateSparkJobDefinition(this.Name, sparkJobDefinitionResource)));
            }
        }
        public async Task TestRenameSparkJob()
        {
            SparkJobDefinitionClient client = CreateClient();

            SparkJobDefinitionResource resource = await DisposableSparkJobDefinition.CreateResource(client, this.Recording, TestEnvironment.StorageFileSystemName, TestEnvironment.StorageAccountName);

            string newSparkJobName = Recording.GenerateId("Pipeline2", 16);

            SparkJobDefinitionRenameSparkJobDefinitionOperation renameOperation = await client.StartRenameSparkJobDefinitionAsync(resource.Name, new ArtifactRenameRequest()
            {
                NewName = newSparkJobName
            });

            await renameOperation.WaitForCompletionAsync();

            SparkJobDefinitionResource sparkJob = await client.GetSparkJobDefinitionAsync(newSparkJobName);

            Assert.AreEqual(newSparkJobName, sparkJob.Name);

            SparkJobDefinitionDeleteSparkJobDefinitionOperation deleteOperation = await client.StartDeleteSparkJobDefinitionAsync(newSparkJobName);

            await deleteOperation.WaitForCompletionAsync();
        }
示例#6
0
        public virtual SparkJobDefinitionDebugSparkJobDefinitionOperation StartDebugSparkJobDefinition(SparkJobDefinitionResource sparkJobDefinitionAzureResource, CancellationToken cancellationToken = default)
        {
            if (sparkJobDefinitionAzureResource == null)
            {
                throw new ArgumentNullException(nameof(sparkJobDefinitionAzureResource));
            }

            using var scope = _clientDiagnostics.CreateScope("SparkJobDefinitionClient.StartDebugSparkJobDefinition");
            scope.Start();
            try
            {
                var originalResponse = RestClient.DebugSparkJobDefinition(sparkJobDefinitionAzureResource, cancellationToken);
                return(new SparkJobDefinitionDebugSparkJobDefinitionOperation(_clientDiagnostics, _pipeline, RestClient.CreateDebugSparkJobDefinitionRequest(sparkJobDefinitionAzureResource).Request, originalResponse));
            }
            catch (Exception e)
            {
                scope.Failed(e);
                throw;
            }
        }
示例#7
0
        public virtual SparkJobDefinitionCreateOrUpdateSparkJobDefinitionOperation StartCreateOrUpdateSparkJobDefinition(string sparkJobDefinitionName, SparkJobDefinitionResource sparkJobDefinition, string ifMatch = null, CancellationToken cancellationToken = default)
        {
            if (sparkJobDefinitionName == null)
            {
                throw new ArgumentNullException(nameof(sparkJobDefinitionName));
            }
            if (sparkJobDefinition == null)
            {
                throw new ArgumentNullException(nameof(sparkJobDefinition));
            }

            using var scope = _clientDiagnostics.CreateScope("SparkJobDefinitionClient.StartCreateOrUpdateSparkJobDefinition");
            scope.Start();
            try
            {
                var originalResponse = RestClient.CreateOrUpdateSparkJobDefinition(sparkJobDefinitionName, sparkJobDefinition, ifMatch, cancellationToken);
                return(new SparkJobDefinitionCreateOrUpdateSparkJobDefinitionOperation(_clientDiagnostics, _pipeline, RestClient.CreateCreateOrUpdateSparkJobDefinitionRequest(sparkJobDefinitionName, sparkJobDefinition, ifMatch).Request, originalResponse));
            }
            catch (Exception e)
            {
                scope.Failed(e);
                throw;
            }
        }
 private DisposableSparkJobDefinition(SparkJobDefinitionClient client, SparkJobDefinitionResource resource)
 {
     _client  = client;
     Resource = resource;
 }
        public SparkJobDefinitionResource CreateOrUpdateSparkJobDefinition(string SparkJobDefinitionName, string rawJsonContent)
        {
            SparkJobDefinitionResource SparkJobDefinition = new SparkJobDefinitionResource(JsonConvert.DeserializeObject <SparkJobDefinition>(rawJsonContent));

            return(_sparkJobDefinitionClient.StartCreateOrUpdateSparkJobDefinition(SparkJobDefinitionName, SparkJobDefinition).Poll().Value);
        }
 public virtual Response <SparkJobDefinitionResource> CreateOrUpdateSparkJobDefinition(string sparkJobDefinitionName, SparkJobDefinitionResource sparkJobDefinition, string ifMatch = null, CancellationToken cancellationToken = default)
 {
     using var scope = _clientDiagnostics.CreateScope("SparkJobDefinitionClient.CreateOrUpdateSparkJobDefinition");
     scope.Start();
     try
     {
         return(RestClient.CreateOrUpdateSparkJobDefinition(sparkJobDefinitionName, sparkJobDefinition, ifMatch, cancellationToken));
     }
     catch (Exception e)
     {
         scope.Failed(e);
         throw;
     }
 }
        public SparkJobDefinitionResource CreateOrUpdateSparkJobDefinition(string SparkJobDefinitionName, SparkJobDefinitionResource SparkJobDefinition)
        {
            var operation = _sparkJobDefinitionClient.StartCreateOrUpdateSparkJobDefinition(SparkJobDefinitionName, SparkJobDefinition);

            return(operation.Poll().Value);
        }
 public PSSparkJobDefinitionResource(SparkJobDefinitionResource sparkJobDefinition)
     : base(sparkJobDefinition.Id, sparkJobDefinition.Name, sparkJobDefinition.Type, sparkJobDefinition.Etag)
 {
     Properties = sparkJobDefinition?.Properties != null ? new PSSparkJobDefinition(sparkJobDefinition.Properties) : null;
 }