public SynapseAnalyticsArtifactsClient(string workspaceName, IAzureContext context) { if (context == null) { throw new AzPSInvalidOperationException(Resources.InvalidDefaultSubscription); } _context = context; string suffix = context.Environment.GetEndpoint(AzureEnvironment.ExtendedEndpoint.AzureSynapseAnalyticsEndpointSuffix); Uri uri = new Uri("https://" + workspaceName + "." + suffix); _endpoint = uri; _pipelineClient = new PipelineClient(uri, new AzureSessionCredential(context)); _pipelineRunClient = new PipelineRunClient(uri, new AzureSessionCredential(context)); _linkedServiceClient = new LinkedServiceClient(uri, new AzureSessionCredential(context)); _notebookClient = new NotebookClient(uri, new AzureSessionCredential(context)); _triggerClient = new TriggerClient(uri, new AzureSessionCredential(context)); _triggerRunClient = new TriggerRunClient(uri, new AzureSessionCredential(context)); _datasetClient = new DatasetClient(uri, new AzureSessionCredential(context)); _dataFlowClient = new DataFlowClient(uri, new AzureSessionCredential(context)); _dataFlowDebugSessionClient = new DataFlowDebugSessionClient(uri, new AzureSessionCredential(context)); _bigDataPoolsClient = new BigDataPoolsClient(uri, new AzureSessionCredential(context)); _sparkJobDefinitionClient = new SparkJobDefinitionClient(uri, new AzureSessionCredential(context)); _sqlScriptClient = new SqlScriptClient(uri, new AzureSessionCredential(context)); }
public async Task TestDeleteSparkJob() { SparkJobDefinitionClient client = CreateClient(); SparkJobDefinitionResource resource = await DisposableSparkJobDefinition.CreateResource(client, Recording, TestEnvironment.StorageFileSystemName, TestEnvironment.StorageAccountName); SparkJobDefinitionDeleteSparkJobDefinitionOperation deleteOperation = await client.StartDeleteSparkJobDefinitionAsync(resource.Name); await deleteOperation.WaitAndAssertSuccessfulCompletion(); }
public async Task TestDebug() { SparkJobDefinitionClient client = CreateClient(); await using DisposableSparkJobDefinition sparkJobDefinition = await DisposableSparkJobDefinition.Create(client, Recording, TestEnvironment.StorageFileSystemName, TestEnvironment.StorageAccountName); SparkJobDefinitionDebugSparkJobDefinitionOperation debugOperation = await client.StartDebugSparkJobDefinitionAsync(sparkJobDefinition.Resource); SparkBatchJob job = await debugOperation.WaitForCompletionAsync(); }
public async Task TestGetSparkJob() { SparkJobDefinitionClient client = CreateClient(); await using DisposableSparkJobDefinition sparkJobDefinition = await DisposableSparkJobDefinition.Create(client, Recording, TestEnvironment.StorageFileSystemName, TestEnvironment.StorageAccountName); IList <SparkJobDefinitionResource> jobs = await client.GetSparkJobDefinitionsByWorkspaceAsync().ToListAsync(); Assert.GreaterOrEqual(jobs.Count, 1); foreach (var expectedJob in jobs) { SparkJobDefinitionResource actualJob = await client.GetSparkJobDefinitionAsync(expectedJob.Name); Assert.AreEqual(expectedJob.Name, actualJob.Name); Assert.AreEqual(expectedJob.Id, actualJob.Id); } }
public async Task TestRenameSparkJob() { SparkJobDefinitionClient client = CreateClient(); SparkJobDefinitionResource resource = await DisposableSparkJobDefinition.CreateResource(client, this.Recording, TestEnvironment.StorageFileSystemName, TestEnvironment.StorageAccountName); string newSparkJobName = Recording.GenerateId("Pipeline2", 16); SparkJobDefinitionRenameSparkJobDefinitionOperation renameOperation = await client.StartRenameSparkJobDefinitionAsync(resource.Name, new ArtifactRenameRequest() { NewName = newSparkJobName }); await renameOperation.WaitForCompletionAsync(); SparkJobDefinitionResource sparkJob = await client.GetSparkJobDefinitionAsync(newSparkJobName); Assert.AreEqual(newSparkJobName, sparkJob.Name); SparkJobDefinitionDeleteSparkJobDefinitionOperation deleteOperation = await client.StartDeleteSparkJobDefinitionAsync(newSparkJobName); await deleteOperation.WaitForCompletionAsync(); }
public static async ValueTask <SparkJobDefinitionResource> CreateResource(SparkJobDefinitionClient client, TestRecording recording, string storageFileSystemName, string storageAccountName) { string jobName = recording.GenerateId("SparkJobDefinition", 16); string file = string.Format("abfss://{0}@{1}.dfs.core.windows.net/samples/net/wordcount/wordcount.zip", storageFileSystemName, storageAccountName); SparkJobProperties jobProperties = new SparkJobProperties(file, "28g", 4, "28g", 4, 2); SparkJobDefinition jobDefinition = new SparkJobDefinition(new BigDataPoolReference(BigDataPoolReferenceType.BigDataPoolReference, "sparkchhamosyna"), jobProperties); SparkJobDefinitionResource resource = new SparkJobDefinitionResource(jobDefinition); SparkJobDefinitionCreateOrUpdateSparkJobDefinitionOperation createOperation = await client.StartCreateOrUpdateSparkJobDefinitionAsync(jobName, resource); return(await createOperation.WaitForCompletionAsync()); }
public static async ValueTask <DisposableSparkJobDefinition> Create(SparkJobDefinitionClient client, TestRecording recording, string storageFileSystemName, string storageAccountName) => new DisposableSparkJobDefinition(client, await CreateResource(client, recording, storageFileSystemName, storageAccountName));
private DisposableSparkJobDefinition(SparkJobDefinitionClient client, SparkJobDefinitionResource resource) { _client = client; Resource = resource; }