public async Task ListPools() { BigDataPoolsClient client = CreateClient(); BigDataPoolResourceInfoListResult pools = await client.ListAsync(); Assert.GreaterOrEqual(1, pools.Value.Count); }
public SynapseAnalyticsArtifactsClient(string workspaceName, IAzureContext context) { if (context == null) { throw new AzPSInvalidOperationException(Resources.InvalidDefaultSubscription); } _context = context; string suffix = context.Environment.GetEndpoint(AzureEnvironment.ExtendedEndpoint.AzureSynapseAnalyticsEndpointSuffix); Uri uri = new Uri("https://" + workspaceName + "." + suffix); _endpoint = uri; _pipelineClient = new PipelineClient(uri, new AzureSessionCredential(context)); _pipelineRunClient = new PipelineRunClient(uri, new AzureSessionCredential(context)); _linkedServiceClient = new LinkedServiceClient(uri, new AzureSessionCredential(context)); _notebookClient = new NotebookClient(uri, new AzureSessionCredential(context)); _triggerClient = new TriggerClient(uri, new AzureSessionCredential(context)); _triggerRunClient = new TriggerRunClient(uri, new AzureSessionCredential(context)); _datasetClient = new DatasetClient(uri, new AzureSessionCredential(context)); _dataFlowClient = new DataFlowClient(uri, new AzureSessionCredential(context)); _dataFlowDebugSessionClient = new DataFlowDebugSessionClient(uri, new AzureSessionCredential(context)); _bigDataPoolsClient = new BigDataPoolsClient(uri, new AzureSessionCredential(context)); _sparkJobDefinitionClient = new SparkJobDefinitionClient(uri, new AzureSessionCredential(context)); _sqlScriptClient = new SqlScriptClient(uri, new AzureSessionCredential(context)); }
public async Task GetPool() { const string PoolName = "sparkchhamosyna"; BigDataPoolsClient client = CreateClient(); BigDataPoolResourceInfo pool = await client.GetAsync(PoolName); Assert.AreEqual(PoolName, pool.Name); }