public async Task CreateFileSystemAsync() { var name = GetNewFileSystemName(); DataLakeServiceClient service = DataLakeClientBuilder.GetServiceClient_Hns(); try { DataLakeFileSystemClient fileSystem = InstrumentClient((await service.CreateFileSystemAsync(name)).Value); Response <FileSystemProperties> properties = await fileSystem.GetPropertiesAsync(); Assert.IsNotNull(properties.Value); } finally { await service.DeleteFileSystemAsync(name); } }
public async Task GetFileSystemsAsync() { // Arrange DataLakeServiceClient service = DataLakeClientBuilder.GetServiceClient_Hns(); // Ensure at least one container await using (await GetNewFileSystem(service: service)) { // Act IList <FileSystemItem> fileSystems = await service.GetFileSystemsAsync().ToListAsync(); // Assert Assert.IsTrue(fileSystems.Count >= 1); var accountName = new DataLakeUriBuilder(service.Uri).AccountName; TestHelper.AssertCacheableProperty(accountName, () => service.AccountName); } }
public void Upload() { // Create three temporary Lorem Ipsum files on disk that we can upload int contentLength = 10; string sampleFileContent = CreateTempFile(SampleFileContent.Substring(0, contentLength)); // Make StorageSharedKeyCredential to pass to the serviceClient string storageAccountName = StorageAccountName; string storageAccountKey = StorageAccountKey; Uri serviceUri = StorageAccountBlobUri; StorageSharedKeyCredential sharedKeyCredential = new StorageSharedKeyCredential(storageAccountName, storageAccountKey); // Create DataLakeServiceClient using StorageSharedKeyCredentials DataLakeServiceClient serviceClient = new DataLakeServiceClient(serviceUri, sharedKeyCredential); // Get a reference to a filesystem named "sample-filesystem-append" and then create it DataLakeFileSystemClient filesystem = serviceClient.GetFileSystemClient(Randomize("sample-filesystem-append")); filesystem.Create(); try { // Get a reference to a file named "sample-file" in a filesystem DataLakeFileClient file = filesystem.GetFileClient(Randomize("sample-file")); // Create the file file.Create(); // Verify we created one file Assert.AreEqual(1, filesystem.GetPaths().Count()); // Upload content to the file. When using the Upload API, you don't need to create the file first. // If the file already exists, it will be overwritten. // For larger files, Upload() will upload the file in multiple sequential requests. file.Upload(File.OpenRead(sampleFileContent)); // Verify the contents of the file PathProperties properties = file.GetProperties(); Assert.AreEqual(contentLength, properties.ContentLength); } finally { // Clean up after the test when we're finished filesystem.Delete(); } }
public static async Task <DisposingFileSystem> GetNewFileSystem( this DataLakeClientBuilder clientBuilder, DataLakeServiceClient service = default, string fileSystemName = default, IDictionary <string, string> metadata = default, PublicAccessType?publicAccessType = default, bool premium = default, bool hnsEnabled = true) { fileSystemName ??= clientBuilder.GetNewFileSystemName(); service ??= hnsEnabled?clientBuilder.GetServiceClient_Hns() : clientBuilder.GetServiceClient_NonHns(); if (publicAccessType == default) { publicAccessType = premium ? PublicAccessType.None : PublicAccessType.FileSystem; } DataLakeFileSystemClient fileSystem = clientBuilder.AzureCoreRecordedTestBase.InstrumentClient(service.GetFileSystemClient(fileSystemName)); // due to a service issue, if the initial container creation request times out, subsequent requests // can return a ContainerAlreadyExists code even though the container doesn't really exist. // we delay until after the service cache timeout and then attempt to create the container one more time. // If this attempt still fails, we mark the test as inconclusive. // TODO Remove this handling after the service bug is fixed https://github.com/Azure/azure-sdk-for-net/issues/9399 try { await StorageTestBase <DataLakeTestEnvironment> .RetryAsync( clientBuilder.AzureCoreRecordedTestBase.Recording.Mode, async() => await fileSystem.CreateAsync(metadata: metadata, publicAccessType: publicAccessType.Value), ex => ex.ErrorCode == Blobs.Models.BlobErrorCode.ContainerAlreadyExists, retryDelay : TestConstants.DataLakeRetryDelay, retryAttempts : 1); } catch (RequestFailedException storageRequestFailedException) when(storageRequestFailedException.ErrorCode == Blobs.Models.BlobErrorCode.ContainerAlreadyExists) { // if we still get this error after retrying, mark the test as inconclusive TestContext.Out.WriteLine( $"{TestContext.CurrentContext.Test.Name} is inconclusive due to hitting " + $"the DataLake service bug described in https://github.com/Azure/azure-sdk-for-net/issues/9399"); Assert.Inconclusive(); // passing the message in Inconclusive call doesn't show up in Console output. } return(new DisposingFileSystem(fileSystem)); }
public async Task GetFileSystemsAsync_MaxResults() { DataLakeServiceClient service = GetServiceClient_SharedKey(); // Ensure at least one container await GetNewFileSystem(service : service); await using DisposingFileSystem test = await GetNewFileSystem(service : service); // Act Page <FileSystemItem> page = await service.GetFileSystemsAsync() .AsPages(pageSizeHint: 1) .FirstAsync(); // Assert Assert.AreEqual(1, page.Values.Count()); }
public async Task GetFileSystemsAsync_MaxResults() { DataLakeServiceClient service = GetServiceClient_SharedKey(); // Ensure at least one container using (GetNewFileSystem(out _, service: service)) using (GetNewFileSystem(out DataLakeFileSystemClient fileSystem, service: service)) { // Act Page <FileSystemItem> page = await service.GetFileSystemsAsync() .AsPages(pageSizeHint: 1) .FirstAsync(); // Assert Assert.AreEqual(1, page.Values.Count()); } }
public async Task GetFileSystemsAsync_Metadata() { DataLakeServiceClient service = GetServiceClient_SharedKey(); // Ensure at least one container using (GetNewFileSystem(out FileSystemClient fileSystem, service: service)) { // Arrange IDictionary <string, string> metadata = BuildMetadata(); await fileSystem.SetMetadataAsync(metadata); // Act FileSystemItem first = await service.GetFileSystemsAsync(FileSystemTraits.Metadata).FirstAsync(); // Assert Assert.IsNotNull(first.Metadata); } }
public async Task GetFileSystemsAsync_Prefix() { DataLakeServiceClient service = GetServiceClient_SharedKey(); var prefix = "aaa"; var fileSystemName = prefix + GetNewFileSystemName(); // Ensure at least one container await using DisposingFileSystem test = await GetNewFileSystem(service : service, fileSystemName : fileSystemName); // Act AsyncPageable <FileSystemItem> fileSystems = service.GetFileSystemsAsync(prefix: prefix); IList <FileSystemItem> items = await fileSystems.ToListAsync(); // Assert Assert.AreNotEqual(0, items.Count()); Assert.IsTrue(items.All(c => c.Name.StartsWith(prefix))); Assert.IsNotNull(items.Single(c => c.Name == fileSystemName)); }
public void ReadTo() { // Create a temporary Lorem Ipsum file on disk that we can upload string originalPath = CreateTempFile(SampleFileContent); // Get a temporary path on disk where we can download the file string downloadPath = CreateTempPath(); // Make StorageSharedKeyCredential to pass to the serviceClient string storageAccountName = StorageAccountName; string storageAccountKey = StorageAccountKey; Uri serviceUri = StorageAccountBlobUri; StorageSharedKeyCredential sharedKeyCredential = new StorageSharedKeyCredential(storageAccountName, storageAccountKey); // Create DataLakeServiceClient using StorageSharedKeyCredentials DataLakeServiceClient serviceClient = new DataLakeServiceClient(serviceUri, sharedKeyCredential); // Get a reference to a filesystem named "sample-filesystem-read" and then create it DataLakeFileSystemClient filesystem = serviceClient.GetFileSystemClient(Randomize("sample-filesystem-read")); filesystem.Create(); try { // Get a reference to a file named "sample-file" in a filesystem DataLakeFileClient file = filesystem.GetFileClient(Randomize("sample-file")); // First upload something the DataLake file so we have something to download file.Upload(File.OpenRead(originalPath)); // Download the DataLake file's directly to a file. // For larger files, ReadTo() will download the file in multiple sequential requests. #region Snippet:SampleSnippetDataLakeFileClient_ReadTo file.ReadTo(downloadPath); #endregion Snippet:SampleSnippetDataLakeFileClient_ReadTo // Verify the contents Assert.AreEqual(SampleFileContent, File.ReadAllText(downloadPath)); } finally { // Clean up after the test when we're finished filesystem.Delete(); } }
public async Task <FileModel> GetFile(string fileName) { return(await Execute(async (sharedKeyCredential) => { var serviceClient = new DataLakeServiceClient(new Uri(_storageConfiguration.Url), sharedKeyCredential); var filesystem = serviceClient.GetFileSystemClient(_storageConfiguration.Url); var file = filesystem.GetFileClient(fileName); Response <FileDownloadInfo> fileContents = await file.ReadAsync(); if (fileContents.Value != null) { return new FileModel { FileStream = fileContents.Value.Content, Name = fileName }; } return null; })); }
// </Snippet_SetACLRecursively> #endregion #region Update ACLs recursively // <Snippet_UpdateACLsRecursively> public async Task UpdateACLsRecursively(DataLakeServiceClient serviceClient, bool isDefaultScope) { DataLakeDirectoryClient directoryClient = serviceClient.GetFileSystemClient("my-container"). GetDirectoryClient("my-parent-directory"); List <PathAccessControlItem> accessControlListUpdate = new List <PathAccessControlItem>() { new PathAccessControlItem(AccessControlType.User, RolePermissions.Read | RolePermissions.Write | RolePermissions.Execute, isDefaultScope, entityId: "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"), }; await directoryClient.UpdateAccessControlRecursiveAsync (accessControlListUpdate, null); }
public async Task Ctor_ConnectionString_RoundTrip() { // Arrage string connectionString = $"DefaultEndpointsProtocol=https;AccountName={TestConfigHierarchicalNamespace.AccountName};AccountKey={TestConfigHierarchicalNamespace.AccountKey};EndpointSuffix=core.windows.net"; DataLakeServiceClient serviceClient = InstrumentClient(new DataLakeServiceClient(connectionString, GetOptions())); DataLakeFileSystemClient fileSystem = InstrumentClient(serviceClient.GetFileSystemClient(GetNewFileSystemName())); // Act try { await fileSystem.CreateAsync(); } // Cleanup finally { await fileSystem.DeleteAsync(); } }
public async Task GetFileSystemsAsync_Marker() { DataLakeServiceClient service = GetServiceClient_SharedKey(); // Ensure at least one container await using DisposingFileSystem test = await GetNewFileSystem(service : service); var marker = default(string); var fileSystems = new List <FileSystemItem>(); await foreach (Page <FileSystemItem> page in service.GetFileSystemsAsync().AsPages(marker)) { fileSystems.AddRange(page.Values); } Assert.AreNotEqual(0, fileSystems.Count); Assert.AreEqual(fileSystems.Count, fileSystems.Select(c => c.Name).Distinct().Count()); Assert.IsTrue(fileSystems.Any(c => test.FileSystem.Uri == InstrumentClient(service.GetFileSystemClient(c.Name)).Uri)); }
public async Task DataLakeSasBuilder_DirectoryDepth_Exists() { // Arrange DataLakeServiceClient oauthService = GetServiceClient_OAuth(); string fileSystemName = GetNewFileSystemName(); await using DisposingFileSystem test = await GetNewFileSystem(service : oauthService, fileSystemName : fileSystemName); DataLakeDirectoryClient directory = await test.FileSystem.CreateDirectoryAsync(GetNewDirectoryName()); DataLakeDirectoryClient subdirectory = await directory.CreateSubDirectoryAsync(GetNewDirectoryName()); DataLakeDirectoryClient subdirectory2 = await subdirectory.CreateSubDirectoryAsync(GetNewDirectoryName()); DataLakeDirectoryClient subdirectory3 = await subdirectory2.CreateSubDirectoryAsync(GetNewDirectoryName()); DataLakeFileClient file = await subdirectory3.CreateFileAsync(GetNewFileName()); Response <UserDelegationKey> userDelegationKey = await oauthService.GetUserDelegationKeyAsync( startsOn : null, expiresOn : Recording.UtcNow.AddHours(1)); DataLakeSasBuilder dataLakeSasBuilder = new DataLakeSasBuilder { StartsOn = Recording.UtcNow.AddHours(-1), ExpiresOn = Recording.UtcNow.AddHours(1), FileSystemName = test.FileSystem.Name, Path = subdirectory3.Path, IsDirectory = true }; dataLakeSasBuilder.SetPermissions(DataLakeSasPermissions.All); DataLakeUriBuilder dataLakeUriBuilder = new DataLakeUriBuilder(subdirectory3.Uri) { Sas = dataLakeSasBuilder.ToSasQueryParameters(userDelegationKey, test.FileSystem.AccountName) }; DataLakeDirectoryClient sasDirectoryClient = InstrumentClient(new DataLakeDirectoryClient(dataLakeUriBuilder.ToUri(), GetOptions())); // Act await sasDirectoryClient.ExistsAsync(); }
public async Task GetFileSystemsAsync_Metadata() { DataLakeServiceClient service = GetServiceClient_SharedKey(); // Ensure at least one container await using DisposingFileSystem test = await GetNewFileSystem(service : service); // Arrange IDictionary <string, string> metadata = BuildMetadata(); await test.FileSystem.SetMetadataAsync(metadata); // Act IList <FileSystemItem> items = await service.GetFileSystemsAsync(FileSystemTraits.Metadata).ToListAsync(); // Assert AssertDictionaryEquality( metadata, items.Where(i => i.Name == test.FileSystem.Name).FirstOrDefault().Properties.Metadata); }
public void SetGetAcls() { // Make StorageSharedKeyCredential to pass to the serviceClient string storageAccountName = NamespaceStorageAccountName; string storageAccountKey = NamespaceStorageAccountKey; Uri serviceUri = NamespaceBlobUri; StorageSharedKeyCredential sharedKeyCredential = new StorageSharedKeyCredential(storageAccountName, storageAccountKey); // Create DataLakeServiceClient using StorageSharedKeyCredentials DataLakeServiceClient serviceClient = new DataLakeServiceClient(serviceUri, sharedKeyCredential); // Get a reference to a filesystem named "sample-filesystem-acl" and then create it DataLakeFileSystemClient filesystem = serviceClient.GetFileSystemClient(Randomize("sample-filesystem-acl")); filesystem.Create(); try { #region Snippet:SampleSnippetDataLakeFileClient_SetAcls // Create a DataLake file so we can set the Access Controls on the files DataLakeFileClient fileClient = filesystem.GetFileClient(Randomize("sample-file")); fileClient.Create(); // Set Access Control List IList <PathAccessControlItem> accessControlList = PathAccessControlExtensions.ParseAccessControlList("user::rwx,group::r--,mask::rwx,other::---"); fileClient.SetAccessControlList(accessControlList); #endregion Snippet:SampleSnippetDataLakeFileClient_SetAcls #region Snippet:SampleSnippetDataLakeFileClient_GetAcls // Get Access Control List PathAccessControl accessControlResponse = fileClient.GetAccessControl(); #endregion Snippet:SampleSnippetDataLakeFileClient_GetAcls // Check Access Control permissions Assert.AreEqual( PathAccessControlExtensions.ToAccessControlListString(accessControlList), PathAccessControlExtensions.ToAccessControlListString(accessControlResponse.AccessControlList.ToList())); } finally { // Clean up after the test when we're finished filesystem.Delete(); } }
public async Task DataLakeSasBuilderRawPermissions_2020_02_10(string permissionsString) { // Arrange DataLakeServiceClient oauthService = GetServiceClient_OAuth(); string fileSystemName = GetNewFileSystemName(); string directoryName = GetNewDirectoryName(); await using DisposingFileSystem test = await GetNewFileSystem(service : oauthService, fileSystemName : fileSystemName); // Arrange DataLakeDirectoryClient directory = await test.FileSystem.CreateDirectoryAsync(directoryName); DataLakeFileClient file = await directory.CreateFileAsync(GetNewFileName()); Response <UserDelegationKey> userDelegationKey = await oauthService.GetUserDelegationKeyAsync( startsOn : null, expiresOn : Recording.UtcNow.AddHours(1)); DataLakeSasBuilder dataLakeSasBuilder = new DataLakeSasBuilder { StartsOn = Recording.UtcNow.AddHours(-1), ExpiresOn = Recording.UtcNow.AddHours(1), FileSystemName = test.FileSystem.Name }; dataLakeSasBuilder.SetPermissions( rawPermissions: permissionsString, normalize: true); DataLakeUriBuilder dataLakeUriBuilder = new DataLakeUriBuilder(test.FileSystem.Uri) { Sas = dataLakeSasBuilder.ToSasQueryParameters(userDelegationKey, test.FileSystem.AccountName) }; DataLakeFileSystemClient sasFileSystemClient = InstrumentClient(new DataLakeFileSystemClient(dataLakeUriBuilder.ToUri(), GetOptions())); // Act await foreach (PathItem pathItem in sasFileSystemClient.GetPathsAsync()) { // Just make sure the call succeeds. } }
public async Task SetPropertiesAsync_Logging() { // Arrange DataLakeServiceClient service = GetServiceClient_SharedKey(); DataLakeServiceProperties properties = await service.GetPropertiesAsync(); DataLakeAnalyticsLogging originalLogging = properties.Logging; properties.Logging = new DataLakeAnalyticsLogging { Version = "1.0", Delete = true, Read = true, Write = true, RetentionPolicy = new DataLakeRetentionPolicy { Enabled = true, Days = 1 } }; // Act await service.SetPropertiesAsync(properties); // Assert properties = await service.GetPropertiesAsync(); Assert.AreEqual("1.0", properties.Logging.Version); Assert.IsTrue(properties.Logging.Delete); Assert.IsTrue(properties.Logging.Read); Assert.IsTrue(properties.Logging.Write); Assert.IsTrue(properties.Logging.RetentionPolicy.Enabled); Assert.AreEqual(1, properties.Logging.RetentionPolicy.Days); // Cleanup properties.Logging = originalLogging; await service.SetPropertiesAsync(properties); properties = await service.GetPropertiesAsync(); Assert.AreEqual(originalLogging.RetentionPolicy.Days, properties.Logging.RetentionPolicy.Days); }
public async Task <DisposingFileSystem> GetNewFileSystem( DataLakeServiceClient service = default, string fileSystemName = default, IDictionary <string, string> metadata = default, PublicAccessType publicAccessType = PublicAccessType.None, bool premium = default) { fileSystemName ??= GetNewFileSystemName(); service ??= GetServiceClient_SharedKey(); if (publicAccessType == PublicAccessType.None) { publicAccessType = premium ? PublicAccessType.None : PublicAccessType.FileSystem; } DataLakeFileSystemClient fileSystem = InstrumentClient(service.GetFileSystemClient(fileSystemName)); await fileSystem.CreateAsync(metadata : metadata, publicAccessType : publicAccessType); return(new DisposingFileSystem(fileSystem)); }
public static async Task OnDirectoryCreated([EventGridTrigger] EventGridEvent eventGridEvent, ILogger log) { try { var createdEvent = ((JObject)eventGridEvent.Data).ToObject <StorageBlobCreatedEventData>(); DataLakeServiceClient serviceClient = new DataLakeServiceClient(BLOB_OUTPUT_CONNECTION_STRING); var fileSystemClient = serviceClient.GetFileSystemClient("backup"); var directoryName = GetBlobPathFromUrl(createdEvent.Url); await fileSystemClient.CreateDirectoryAsync(directoryName); } catch (Exception ex) { log.LogInformation(ex.Message); throw; } }
public void GetProperties() { // Make StorageSharedKeyCredential to pass to the serviceClient string storageAccountName = StorageAccountName; string storageAccountKey = StorageAccountKey; Uri serviceUri = StorageAccountBlobUri; StorageSharedKeyCredential sharedKeyCredential = new StorageSharedKeyCredential(storageAccountName, storageAccountKey); // Create DataLakeServiceClient using StorageSharedKeyCredentials DataLakeServiceClient serviceClient = new DataLakeServiceClient(serviceUri, sharedKeyCredential); // Get a reference to a filesystem named "sample-filesystem-rename" and then create it DataLakeFileSystemClient filesystem = serviceClient.GetFileSystemClient(Randomize("sample-filesystem")); filesystem.Create(); try { // Create a DataLake Directory to rename it later DataLakeDirectoryClient directoryClient = filesystem.GetDirectoryClient(Randomize("sample-directory")); directoryClient.Create(); #region Snippet:SampleSnippetDataLakeDirectoryClient_GetProperties // Get Properties on a Directory PathProperties directoryPathProperties = directoryClient.GetProperties(); #endregion Snippet:SampleSnippetDataLakeDirectoryClient_GetProperties // Create a DataLake file DataLakeFileClient fileClient = filesystem.GetFileClient(Randomize("sample-file")); fileClient.Create(); #region Snippet:SampleSnippetDataLakeFileClient_GetProperties // Get Properties on a File PathProperties filePathProperties = fileClient.GetProperties(); #endregion Snippet:SampleSnippetDataLakeFileClient_GetProperties } finally { // Clean up after the test when we're finished filesystem.Delete(); } }
public void List() { // Make StorageSharedKeyCredential to pass to the serviceClient string storageAccountName = StorageAccountName; string storageAccountKey = StorageAccountKey; Uri serviceUri = StorageAccountBlobUri; StorageSharedKeyCredential sharedKeyCredential = new StorageSharedKeyCredential(storageAccountName, storageAccountKey); // Create DataLakeServiceClient using StorageSharedKeyCredentials DataLakeServiceClient serviceClient = new DataLakeServiceClient(serviceUri, sharedKeyCredential); // Get a reference to a filesystem named "sample-filesystem-list" and then create it DataLakeFileSystemClient filesystem = serviceClient.GetFileSystemClient(Randomize("sample-filesystem-list")); filesystem.Create(); try { // Upload a couple of directories so we have something to list filesystem.CreateDirectory("sample-directory1"); filesystem.CreateDirectory("sample-directory2"); filesystem.CreateDirectory("sample-directory3"); // List all the directories List <string> names = new List <string>(); #region Snippet:SampleSnippetDataLakeFileClient_List foreach (PathItem pathItem in filesystem.ListPaths()) { names.Add(pathItem.Name); } #endregion Snippet:SampleSnippetDataLakeFileClient_List Assert.AreEqual(3, names.Count); Assert.Contains("sample-directory1", names); Assert.Contains("sample-directory2", names); Assert.Contains("sample-directory3", names); } finally { // Clean up after the test when we're finished filesystem.Delete(); } }
public void Append_Simple() { // Create Sample File to read content from string sampleFilePath = CreateTempFile(SampleFileContent); // Make StorageSharedKeyCredential to pass to the serviceClient string storageAccountName = StorageAccountName; string storageAccountKey = StorageAccountKey; Uri serviceUri = StorageAccountBlobUri; StorageSharedKeyCredential sharedKeyCredential = new StorageSharedKeyCredential(storageAccountName, storageAccountKey); // Create DataLakeServiceClient using StorageSharedKeyCredentials DataLakeServiceClient serviceClient = new DataLakeServiceClient(serviceUri, sharedKeyCredential); // Get a reference to a filesystem named "sample-filesystem-append" and then create it DataLakeFileSystemClient filesystem = serviceClient.GetFileSystemClient(Randomize("sample-filesystem-append")); filesystem.Create(); try { #region Snippet:SampleSnippetDataLakeFileClient_Append // Create a file DataLakeFileClient file = filesystem.GetFileClient(Randomize("sample-file")); file.Create(); // Append data to the DataLake File file.Append(File.OpenRead(sampleFilePath), 0); file.Flush(SampleFileContent.Length); #endregion Snippet:SampleSnippetDataLakeFileClient_Append // Verify the contents of the file PathProperties properties = file.GetProperties(); Assert.AreEqual(SampleFileContent.Length, properties.ContentLength); } finally { // Clean up after the test when we're finished filesystem.Delete(); } }
public async Task DataLakeSasBuilder_DirectoryDepth_SharedKey() { // Arrange DataLakeServiceClient oauthService = GetServiceClient_OAuth(); string fileSystemName = GetNewFileSystemName(); await using DisposingFileSystem test = await GetNewFileSystem(service : oauthService, fileSystemName : fileSystemName); DataLakeDirectoryClient directory = await test.FileSystem.CreateDirectoryAsync(GetNewDirectoryName()); DataLakeDirectoryClient subdirectory = await directory.CreateSubDirectoryAsync(GetNewDirectoryName()); DataLakeDirectoryClient subdirectory2 = await subdirectory.CreateSubDirectoryAsync(GetNewDirectoryName()); DataLakeDirectoryClient subdirectory3 = await subdirectory2.CreateSubDirectoryAsync(GetNewDirectoryName()); DataLakeFileClient file = await subdirectory3.CreateFileAsync(GetNewFileName()); DataLakeSasBuilder dataLakeSasBuilder = new DataLakeSasBuilder { StartsOn = Recording.UtcNow.AddHours(-1), ExpiresOn = Recording.UtcNow.AddHours(1), FileSystemName = test.FileSystem.Name, Path = subdirectory3.Path, IsDirectory = true }; dataLakeSasBuilder.SetPermissions(DataLakeSasPermissions.All); StorageSharedKeyCredential sharedKeyCredential = new StorageSharedKeyCredential(TestConfigHierarchicalNamespace.AccountName, TestConfigHierarchicalNamespace.AccountKey); DataLakeUriBuilder dataLakeUriBuilder = new DataLakeUriBuilder(subdirectory3.Uri) { Sas = dataLakeSasBuilder.ToSasQueryParameters(sharedKeyCredential) }; DataLakeDirectoryClient sasDirectoryClient = InstrumentClient(new DataLakeDirectoryClient(dataLakeUriBuilder.ToUri(), GetOptions())); // Act await sasDirectoryClient.ExistsAsync(); }
public async Task GetFileSystemsAsync_Deleted() { // Arrange DataLakeServiceClient service = GetServiceClient_SharedKey(); string fileSystemName = GetNewFileSystemName(); DataLakeFileSystemClient fileSystemClient = InstrumentClient(service.GetFileSystemClient(fileSystemName)); await fileSystemClient.CreateAsync(); await fileSystemClient.DeleteAsync(); // Act IList <FileSystemItem> fileSystems = await service.GetFileSystemsAsync(states : FileSystemStates.Deleted).ToListAsync(); FileSystemItem fileSystemItem = fileSystems.Where(c => c.Name == fileSystemName).FirstOrDefault(); // Assert Assert.IsTrue(fileSystemItem.IsDeleted); Assert.IsNotNull(fileSystemItem.VersionId); Assert.IsNotNull(fileSystemItem.Properties.DeletedOn); Assert.IsNotNull(fileSystemItem.Properties.RemainingRetentionDays); }
public void SetPermissions() { // Make StorageSharedKeyCredential to pass to the serviceClient string storageAccountName = NamespaceStorageAccountName; string storageAccountKey = NamespaceStorageAccountKey; Uri serviceUri = NamespaceBlobUri; StorageSharedKeyCredential sharedKeyCredential = new StorageSharedKeyCredential(storageAccountName, storageAccountKey); // Create DataLakeServiceClient using StorageSharedKeyCredentials DataLakeServiceClient serviceClient = new DataLakeServiceClient(serviceUri, sharedKeyCredential); // Get a reference to a filesystem named "sample-filesystem-acl" and then create it DataLakeFileSystemClient filesystem = serviceClient.GetFileSystemClient(Randomize("sample-filesystem-per")); filesystem.Create(); try { #region Snippet:SampleSnippetDataLakeFileClient_SetPermissions // Create a DataLake file so we can set the Access Controls on the files DataLakeFileClient fileClient = filesystem.GetFileClient(Randomize("sample-file")); fileClient.Create(); // Set the Permissions of the file PathPermissions pathPermissions = PathPermissions.ParseSymbolicPermissions("rwxrwxrwx"); fileClient.SetPermissions(permissions: pathPermissions); #endregion Snippet:SampleSnippetDataLakeFileClient_SetPermissions // Get Access Control List PathAccessControl accessControlResponse = fileClient.GetAccessControl(); // Check Access Control permissions Assert.AreEqual(pathPermissions.ToSymbolicPermissions(), accessControlResponse.Permissions.ToSymbolicPermissions()); Assert.AreEqual(pathPermissions.ToOctalPermissions(), accessControlResponse.Permissions.ToOctalPermissions()); } finally { // Clean up after the test when we're finished filesystem.Delete(); } }
public async Task DataLakeSasBuilder_AgentObjectId_Error() { // Arrange DataLakeServiceClient oauthService = GetServiceClient_OAuth(); string fileSystemName = GetNewFileSystemName(); string directoryName = GetNewDirectoryName(); await using DisposingFileSystem test = await GetNewFileSystem(service : oauthService, fileSystemName : fileSystemName); // Arrange DataLakeDirectoryClient directory = await test.FileSystem.CreateDirectoryAsync(directoryName); DataLakeFileClient file = await directory.CreateFileAsync(GetNewFileName()); Response <UserDelegationKey> userDelegationKey = await oauthService.GetUserDelegationKeyAsync( startsOn : null, expiresOn : Recording.UtcNow.AddHours(1)); DataLakeSasBuilder dataLakeSasBuilder = new DataLakeSasBuilder { StartsOn = Recording.UtcNow.AddHours(-1), ExpiresOn = Recording.UtcNow.AddHours(1), FileSystemName = test.FileSystem.Name, AgentObjectId = Recording.Random.NewGuid().ToString() }; dataLakeSasBuilder.SetPermissions(DataLakeSasPermissions.All); DataLakeUriBuilder dataLakeUriBuilder = new DataLakeUriBuilder(test.FileSystem.Uri) { Sas = dataLakeSasBuilder.ToSasQueryParameters(userDelegationKey, test.FileSystem.AccountName) }; DataLakeFileSystemClient sasFileSystemClient = InstrumentClient(new DataLakeFileSystemClient(dataLakeUriBuilder.ToUri(), GetOptions())); // Act await TestHelper.AssertExpectedExceptionAsync <RequestFailedException>( sasFileSystemClient.ExistsAsync(), e => Assert.IsNotNull(e.ErrorCode)); }
public async Task <DisposingFileSystem> GetNewFileSystem( DataLakeServiceClient service = default, string fileSystemName = default, IDictionary <string, string> metadata = default, PublicAccessType publicAccessType = PublicAccessType.None, bool premium = default) { fileSystemName ??= GetNewFileSystemName(); service ??= GetServiceClient_SharedKey(); if (publicAccessType == PublicAccessType.None) { publicAccessType = premium ? PublicAccessType.None : PublicAccessType.FileSystem; } DataLakeFileSystemClient fileSystem = InstrumentClient(service.GetFileSystemClient(fileSystemName)); // due to a service issue, if the initial container creation request times out, subsequent requests // can return a ContainerAlreadyExists code even though the container doesn't really exist. // we delay until after the service cache timeout and then attempt to create the container one more time. // If this attempt fails, we let the exception propagate. // TODO Note this issue will be fixed in the 72 rollout. After that time, this try/catch can be removed. try { await RetryAsync( async() => await fileSystem.CreateAsync(metadata: metadata, publicAccessType: publicAccessType), ex => ex.ErrorCode == Constants.Blob.Container.AlreadyExists, retryDelay : TestConstants.DataLakeRetryDelay, retryAttempts : 1); } catch (RequestFailedException storageRequestFailedException) when(storageRequestFailedException.ErrorCode == Constants.Blob.Container.AlreadyExists) { // if we get this error after retrying once, that means the container really does // exist, since the retry attempt occurred after the cache timeout. // so we just swallow this error and continue on in the test. } return(new DisposingFileSystem(fileSystem)); }
public async Task AnonymousAuthAsync() { // Make StorageSharedKeyCredential to pass to the serviceClient string accountName = StorageAccountName; string accountKey = StorageAccountKey; Uri serviceUri = StorageAccountBlobUri; StorageSharedKeyCredential sharedKeyCredential = new StorageSharedKeyCredential(accountName, accountKey); // Get a reference to a service Client DataLakeServiceClient service = new DataLakeServiceClient(serviceUri, sharedKeyCredential); // Get a reference to a filesystem named "sample-filesystem" DataLakeFileSystemClient filesystem = service.GetFileSystemClient(Randomize("sample-filesystem")); try { // Create a file that can be accessed publicly await filesystem.CreateAsync(PublicAccessType.FileSystem); DataLakeFileClient file = filesystem.GetFileClient(Randomize("sample-file")); await file.CreateAsync(); // Append data to the file string fileContent = "File Content"; await file.AppendAsync(new MemoryStream(Encoding.UTF8.GetBytes(fileContent)), 0); await file.FlushAsync(fileContent.Length); // Anonymously access a blob given its URI Uri endpoint = file.Uri; DataLakeFileClient anonymous = new DataLakeFileClient(endpoint); // Make a service request to verify we've succesfully authenticated await anonymous.GetPropertiesAsync(); } finally { await filesystem.DeleteAsync(); } }
public IDisposable GetNewFileSystem( out DataLakeFileSystemClient fileSystem, DataLakeServiceClient service = default, string fileSystemName = default, IDictionary <string, string> metadata = default, Models.PublicAccessType publicAccessType = Models.PublicAccessType.None, bool premium = default) { fileSystemName ??= GetNewFileSystemName(); service ??= GetServiceClient_SharedKey(); fileSystem = InstrumentClient(service.GetFileSystemClient(fileSystemName)); if (publicAccessType == Models.PublicAccessType.None) { publicAccessType = premium ? Models.PublicAccessType.None : Models.PublicAccessType.Container; } return(new DisposingFileSystem( fileSystem, metadata ?? new Dictionary <string, string>(StringComparer.OrdinalIgnoreCase), publicAccessType)); }