//There is no difference between moving and renaming; you should simply call Directory.Move public void Move(string sourceDirName, string destDirName) { DataLakeDirectoryClient directoryClient = GetDirectoryClient(sourceDirName); var Names = _azureBlobAdapter.ExtractContainerBlobPortions(destDirName); directoryClient.Rename(NormalizeToBlobPath(Names.Item2)); }
public static async Task InitContainerAsync(string containerName, ILogger log) { log.LogInformation($"Start to init container: {containerName}"); string serviceEndpoint = string.Format("https://{0}.blob.core.windows.net/", Common.GetEnvironmentVariable("SA_NAME")); List <string> directories = new List <string> { "Incoming", "Ok", "Fail", "Report" }; try { foreach (var directory in directories) { DataLakeServiceClient dataLakeServiceClient = new DataLakeServiceClient(new Uri(serviceEndpoint), new DefaultAzureCredential()); DataLakeFileSystemClient dataLakeFileSystemClient = dataLakeServiceClient.GetFileSystemClient(containerName); DataLakeDirectoryClient dataLakeDirectoryClient = dataLakeFileSystemClient.GetDirectoryClient(directory); await dataLakeDirectoryClient.CreateIfNotExistsAsync(); log.LogInformation($"Initialisation complete: {containerName}"); } } catch (RequestFailedException) { log.LogInformation($"Failed to complete container initialisation operation: {containerName}"); throw; } }
public async Task DataLakeSasBuilder_DirectoryDepth(string directoryName) { // Arrange DataLakeServiceClient oauthService = GetServiceClient_OAuth(); string fileSystemName = GetNewFileSystemName(); await using DisposingFileSystem test = await GetNewFileSystem(service : oauthService, fileSystemName : fileSystemName); DataLakeDirectoryClient directory = test.FileSystem.GetDirectoryClient(directoryName); Response <UserDelegationKey> userDelegationKey = await oauthService.GetUserDelegationKeyAsync( startsOn : null, expiresOn : Recording.UtcNow.AddHours(1)); DataLakeSasBuilder dataLakeSasBuilder = new DataLakeSasBuilder { StartsOn = Recording.UtcNow.AddHours(-1), ExpiresOn = Recording.UtcNow.AddHours(1), FileSystemName = test.FileSystem.Name, Path = directoryName, IsDirectory = true }; dataLakeSasBuilder.SetPermissions(DataLakeSasPermissions.All); DataLakeSasQueryParameters sas = dataLakeSasBuilder.ToSasQueryParameters(userDelegationKey, test.FileSystem.AccountName); int expectedDepth = directoryName.Split('/').Length; if (expectedDepth > 0) { expectedDepth -= directoryName.ElementAt(0) == '/' ? 1 : 0; expectedDepth -= directoryName.ElementAt(directoryName.Length - 1) == '/' ? 1 : 0; } Assert.AreEqual(expectedDepth, sas.DirectoryDepth); }
/// <summary> /// Connect and upload the data as file to the Azure Data Lake. /// </summary> /// <param name="storageAccountName">Azure storage account name</param> /// <param name="storageAccountKey">Azure storage account key</param> /// <param name="dataLakeUri">Azure Data Lake URI</param> /// <param name="directoryName">Azure Data Lake directory name</param> /// <param name="content">Upload data content</param> public async Task <bool> UploadData(string storageAccountName, string storageAccountKey, string dataLakeUri, string directoryName, string content) { try { Uri serviceUri = new Uri(dataLakeUri); StorageSharedKeyCredential sharedKeyCredential = new StorageSharedKeyCredential(storageAccountName, storageAccountKey); // Create DataLakeServiceClient using StorageSharedKeyCredentials DataLakeServiceClient serviceClient = new DataLakeServiceClient(serviceUri, sharedKeyCredential); DataLakeFileSystemClient filesystem = serviceClient.GetFileSystemClient(directoryName); DataLakeDirectoryClient directoryClient = filesystem.GetDirectoryClient(directoryName); DataLakeFileClient fileClient = await directoryClient.CreateFileAsync(string.Format("data-{0}.json", Guid.NewGuid().ToString())); using (MemoryStream memoryStream = new MemoryStream(System.Text.Encoding.ASCII.GetBytes(content))) { await fileClient.AppendAsync(memoryStream, offset : 0); await fileClient.FlushAsync(position : memoryStream.Length); } return(true); } catch (Exception exception) { logger.Error(exception.StackTrace); return(false); } }
// </Snippet_RemoveACLRecursively> #endregion #region Resume with event token // <Snippet_ResumeContinuationToken> public async Task <string> ResumeAsync(DataLakeServiceClient serviceClient, DataLakeDirectoryClient directoryClient, List <PathAccessControlItem> accessControlList, string continuationToken) { try { var accessControlChangeResult = await directoryClient.SetAccessControlRecursiveAsync( accessControlList, continuationToken : continuationToken, null); if (accessControlChangeResult.Value.Counters.FailedChangesCount > 0) { continuationToken = accessControlChangeResult.Value.ContinuationToken; } return(continuationToken); } catch (Exception ex) { Console.WriteLine(ex.ToString()); return(continuationToken); } }
public void CreateFileClient_Directory() { // Make StorageSharedKeyCredential to pass to the serviceClient string storageAccountName = StorageAccountName; string storageAccountKey = StorageAccountKey; Uri serviceUri = StorageAccountBlobUri; #region Snippet:SampleSnippetDataLakeFileSystemClient_Create StorageSharedKeyCredential sharedKeyCredential = new StorageSharedKeyCredential(storageAccountName, storageAccountKey); // Create DataLakeServiceClient using StorageSharedKeyCredentials DataLakeServiceClient serviceClient = new DataLakeServiceClient(serviceUri, sharedKeyCredential); // Create a DataLake Filesystem DataLakeFileSystemClient filesystem = serviceClient.GetFileSystemClient(Randomize("sample-filesystem")); filesystem.Create(); #endregion Snippet:SampleSnippetDataLakeFileSystemClient_Create #region Snippet:SampleSnippetDataLakeFileClient_Create_Directory // Create a DataLake Directory DataLakeDirectoryClient directory = filesystem.CreateDirectory(Randomize("sample-directory")); directory.Create(); // Create a DataLake File using a DataLake Directory DataLakeFileClient file = directory.GetFileClient(Randomize("sample-file")); file.Create(); #endregion Snippet:SampleSnippetDataLakeFileClient_Create_Directory // Verify we created one file Assert.AreEqual(1, filesystem.ListPaths().Count()); // Cleanup filesystem.Delete(); }
RenameDirectory(DataLakeFileSystemClient fileSystemClient) { DataLakeDirectoryClient directoryClient = fileSystemClient.GetDirectoryClient("my-directory/my-subdirectory"); return(await directoryClient.RenameAsync("my-directory/my-subdirectory-renamed")); }
// </Snippet_MoveDirectory> #endregion #region Delete a directory // --------------------------------------------------------- // Delete a directory //---------------------------------------------------------- // <Snippet_DeleteDirectory> public void DeleteDirectory(DataLakeFileSystemClient fileSystemClient) { DataLakeDirectoryClient directoryClient = fileSystemClient.GetDirectoryClient("my-directory"); directoryClient.Delete(); }
// </Snippet_DownloadFromDirectory> #endregion #region Download a binary file from a directory // --------------------------------------------------------- // Download file from directory (binary) //---------------------------------------------------------- // <Snippet_DownloadBinaryFromDirectory> public async Task DownloadFile(DataLakeFileSystemClient fileSystemClient) { DataLakeDirectoryClient directoryClient = fileSystemClient.GetDirectoryClient("my-directory"); DataLakeFileClient fileClient = directoryClient.GetFileClient("my-image.png"); Response <FileDownloadInfo> downloadResponse = await fileClient.ReadAsync(); BinaryReader reader = new BinaryReader(downloadResponse.Value.Content); FileStream fileStream = File.OpenWrite("C:\\Users\\contoso\\my-image-downloaded.png"); int bufferSize = 4096; byte[] buffer = new byte[bufferSize]; int count; while ((count = reader.Read(buffer, 0, buffer.Length)) != 0) { fileStream.Write(buffer, 0, count); } await fileStream.FlushAsync(); fileStream.Close(); }
/// <summary> /// Write a datalake gen2 folder to output. /// </summary> internal void WriteDataLakeGen2Item(IStorageBlobManagement channel, DataLakeDirectoryClient dirClient) { AzureDataLakeGen2Item azureDataLakeGen2Item = new AzureDataLakeGen2Item(dirClient); azureDataLakeGen2Item.Context = channel.StorageContext; WriteObject(azureDataLakeGen2Item); }
/// <summary> /// Azure DataLakeGen2 Item constructor /// </summary> /// <param name="blobDir">Cloud blob Directory object</param> public AzureDataLakeGen2Item(DataLakeDirectoryClient directoryClient) { Name = directoryClient.Name; Path = directoryClient.Path; Directory = directoryClient; IsDirectory = true; if (directoryClient.Path != "/" || string.IsNullOrEmpty(directoryClient.Path)) //if root directory, GetProperties() will fail. Skip until this is fixed. { try { Properties = directoryClient.GetProperties(); Length = Properties.ContentLength; ContentType = Properties.ContentType; LastModified = Properties.LastModified; } catch (global::Azure.RequestFailedException e) when(e.Status == 403 || e.Status == 404) { // skip get dir properties if don't have read permission } } try { AccessControl = directoryClient.GetAccessControl(); Permissions = AccessControl.Permissions; ACL = PSPathAccessControlEntry.ParsePSPathAccessControlEntrys(AccessControl.AccessControlList); Owner = AccessControl.Owner; Group = AccessControl.Group; } catch (global::Azure.RequestFailedException e) when(e.Status == 403 || e.Status == 404) { // skip get dir ACL if don't have read permission } }
public async Task CreateDirectoryClientAsync() { // Make StorageSharedKeyCredential to pass to the serviceClient string storageAccountName = StorageAccountName; string storageAccountKey = StorageAccountKey; Uri serviceUri = StorageAccountBlobUri; StorageSharedKeyCredential sharedKeyCredential = new StorageSharedKeyCredential(storageAccountName, storageAccountKey); // Create DataLakeServiceClient using StorageSharedKeyCredentials DataLakeServiceClient serviceClient = new DataLakeServiceClient(serviceUri, sharedKeyCredential); // Get a reference to a filesystem named "sample-filesystem-append" and then create it DataLakeFileSystemClient filesystem = serviceClient.GetFileSystemClient(Randomize("sample-filesystem-append")); filesystem.Create(); // Create DataLakeDirectoryClient directory = filesystem.GetDirectoryClient(Randomize("sample-file")); await directory.CreateAsync(); // Verify we created one directory AsyncPageable <PathItem> response = filesystem.GetPathsAsync(); IList <PathItem> paths = await response.ToListAsync(); Assert.AreEqual(1, paths.Count); // Cleanup await filesystem.DeleteAsync(); }
// </Snippet_UpdateACLsRecursively> #endregion #region Remove ACL entry // --------------------------------------------------------- // Remove directory-level ACL entry //---------------------------------------------------------- // <Snippet_RemoveACLEntry> public async Task RemoveDirectoryACLEntry (DataLakeFileSystemClient fileSystemClient) { DataLakeDirectoryClient directoryClient = fileSystemClient.GetDirectoryClient(""); PathAccessControl directoryAccessControl = await directoryClient.GetAccessControlAsync(); List <PathAccessControlItem> accessControlListUpdate = (List <PathAccessControlItem>)directoryAccessControl.AccessControlList; PathAccessControlItem entryToRemove = null; foreach (var item in accessControlListUpdate) { if (item.EntityId == "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx") { entryToRemove = item; break; } } if (entryToRemove != null) { accessControlListUpdate.Remove(entryToRemove); directoryClient.SetAccessControlList(accessControlListUpdate); } }
// </Snippet_FileACL> #endregion #region Update ACL // --------------------------------------------------------- // Update directory-level ACLs //---------------------------------------------------------- // <Snippet_UpdateACL> public async Task UpdateDirectoryACLs(DataLakeFileSystemClient fileSystemClient) { DataLakeDirectoryClient directoryClient = fileSystemClient.GetDirectoryClient(""); PathAccessControl directoryAccessControl = await directoryClient.GetAccessControlAsync(); List <PathAccessControlItem> accessControlListUpdate = (List <PathAccessControlItem>)directoryAccessControl.AccessControlList; int index = -1; foreach (var item in accessControlListUpdate) { if (item.AccessControlType == AccessControlType.Other) { index = accessControlListUpdate.IndexOf(item); break; } } if (index > -1) { accessControlListUpdate[index] = new PathAccessControlItem(AccessControlType.Other, RolePermissions.Read | RolePermissions.Execute); directoryClient.SetAccessControlList(accessControlListUpdate); } }
// </Snippet_UpdateACL> #endregion #region Set ACLs recursively // <Snippet_SetACLRecursively> public async Task SetACLRecursively(DataLakeServiceClient serviceClient, bool isDefaultScope) { DataLakeDirectoryClient directoryClient = serviceClient.GetFileSystemClient("my-container"). GetDirectoryClient("my-parent-directory"); List <PathAccessControlItem> accessControlList = new List <PathAccessControlItem>() { new PathAccessControlItem(AccessControlType.User, RolePermissions.Read | RolePermissions.Write | RolePermissions.Execute, isDefaultScope), new PathAccessControlItem(AccessControlType.Group, RolePermissions.Read | RolePermissions.Execute, isDefaultScope), new PathAccessControlItem(AccessControlType.Other, RolePermissions.None, isDefaultScope), new PathAccessControlItem(AccessControlType.User, RolePermissions.Read | RolePermissions.Execute, isDefaultScope, entityId: "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"), }; await directoryClient.SetAccessControlRecursiveAsync (accessControlList, null); }
public async Task PathClient_CanGetParentDirectoryClient_WithContainerSAS() { // Arrange var parentDirName = DataLakeClientBuilder.GetNewDirectoryName(); await using DisposingFileSystem test = await DataLakeClientBuilder.GetNewFileSystem(); var fileName = DataLakeClientBuilder.GetNewFileName(); DataLakeFileClient fileClient = InstrumentClient( GetServiceClient_DataLakeServiceSas_FileSystem(test.Container.Name) .GetFileSystemClient(test.FileSystem.Name) .GetRootDirectoryClient() .GetSubDirectoryClient(parentDirName) .GetFileClient(fileName)); await fileClient.CreateAsync(); // Act DataLakeDirectoryClient parentDirClient = fileClient.GetParentDirectoryClient(); // make sure that client is functional var pathItems = await parentDirClient.GetPathsAsync().ToListAsync(); // Assert Assert.AreEqual(fileClient.Path.GetParentPath(), parentDirClient.Path); Assert.AreEqual(fileClient.AccountName, parentDirClient.AccountName); Assert.IsNotNull(pathItems); }
public static async Task <IActionResult> Run( [HttpTrigger(AuthorizationLevel.Anonymous, "get", "post", Route = "SevenZip/{account}/{container}/{directory}/{filename}")] HttpRequest req, string account, string container, string directory, string filename, ILogger log) { //Retrieve File from storage var lakeClient = GetDataLakeServiceClient(HttpUtility.UrlDecode(account)); var fileSystemClient = lakeClient.GetFileSystemClient(HttpUtility.UrlDecode(container)); DataLakeDirectoryClient directoryClient = fileSystemClient.GetDirectoryClient(HttpUtility.UrlDecode(directory)); var DownloadFile = directoryClient.GetFileClient(HttpUtility.UrlDecode(filename)); var ReadStream = await DownloadFile.OpenReadAsync(); var response = req.HttpContext.Response; response.StatusCode = 200; response.ContentType = "application/json-data-stream"; using (var archive = SevenZipArchive.Open(ReadStream, null)) { foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory)) { foreach (IArchiveEntry e in archive.Entries) { e.WriteTo(response.Body); } } } return(new EmptyResult()); }
#pragma warning disable CA1806 // Do not ignore method results public override void Run(CancellationToken cancellationToken) { var serviceClient = new DataLakeServiceClient(s_testEnvironment.DataLakeServiceUri); new DataLakeServiceClient(s_testEnvironment.DataLakeServiceUri, s_tokenCredential); new DataLakeServiceClient(s_testEnvironment.DataLakeServiceUri, s_testEnvironment.DataLakeCredential); var fileSystemClient = new DataLakeFileSystemClient(s_fileSystemUri); new DataLakeFileSystemClient(s_fileSystemUri, s_tokenCredential); new DataLakeFileSystemClient(s_fileSystemUri, s_testEnvironment.DataLakeCredential); var directoryClient = new DataLakeDirectoryClient(s_directoryUri); new DataLakeDirectoryClient(s_directoryUri, s_tokenCredential); new DataLakeDirectoryClient(s_directoryUri, s_testEnvironment.DataLakeCredential); new DataLakeFileClient(s_fileUri); new DataLakeFileClient(s_fileUri, s_tokenCredential); new DataLakeFileClient(s_fileUri, s_testEnvironment.DataLakeCredential); new DataLakePathClient(s_fileUri); new DataLakePathClient(s_fileUri, s_tokenCredential); new DataLakePathClient(s_fileUri, s_testEnvironment.DataLakeCredential); serviceClient.GetFileSystemClient("foo"); fileSystemClient.GetDirectoryClient("foo"); fileSystemClient.GetFileClient("foo"); directoryClient.GetFileClient("foo"); }
public void CreateDirectoryClient() { // Make StorageSharedKeyCredential to pass to the serviceClient string storageAccountName = StorageAccountName; string storageAccountKey = StorageAccountKey; Uri serviceUri = StorageAccountBlobUri; #region Snippet:SampleSnippetDataLakeDirectoryClient_Create StorageSharedKeyCredential sharedKeyCredential = new StorageSharedKeyCredential(storageAccountName, storageAccountKey); // Create DataLakeServiceClient using StorageSharedKeyCredentials DataLakeServiceClient serviceClient = new DataLakeServiceClient(serviceUri, sharedKeyCredential); // Get a reference to a filesystem named "sample-filesystem-append" and then create it DataLakeFileSystemClient filesystem = serviceClient.GetFileSystemClient(Randomize("sample-filesystem-append")); filesystem.Create(); // Create DataLakeDirectoryClient directory = filesystem.GetDirectoryClient(Randomize("sample-file")); directory.Create(); #endregion Snippet:SampleSnippetDataLakeDirectoryClient_Create // Verify we created one directory Assert.AreEqual(1, filesystem.ListPaths().Count()); // Cleanup filesystem.Delete(); }
public async Task DataLakeSasBuilder_BothObjectId_Error() { // Arrange DataLakeServiceClient oauthService = GetServiceClient_OAuth(); string fileSystemName = GetNewFileSystemName(); string directoryName = GetNewDirectoryName(); await using DisposingFileSystem test = await GetNewFileSystem(service : oauthService, fileSystemName : fileSystemName); // Arrange DataLakeDirectoryClient directory = await test.FileSystem.CreateDirectoryAsync(directoryName); DataLakeFileClient file = await directory.CreateFileAsync(GetNewFileName()); Response <UserDelegationKey> userDelegationKey = await oauthService.GetUserDelegationKeyAsync( startsOn : null, expiresOn : Recording.UtcNow.AddHours(1)); DataLakeSasBuilder dataLakeSasBuilder = new DataLakeSasBuilder { StartsOn = Recording.UtcNow.AddHours(-1), ExpiresOn = Recording.UtcNow.AddHours(1), FileSystemName = test.FileSystem.Name, PreauthorizedAgentObjectId = Recording.Random.NewGuid().ToString(), AgentObjectId = Recording.Random.NewGuid().ToString() }; dataLakeSasBuilder.SetPermissions(DataLakeSasPermissions.All); TestHelper.AssertExpectedException <InvalidOperationException>( () => dataLakeSasBuilder.ToSasQueryParameters(userDelegationKey, test.FileSystem.AccountName), new InvalidOperationException("SAS cannot have the following parameters specified in conjunction: PreauthorizedAgentObjectId, AgentObjectId")); }
// </Snippet_CreateDirectory> #endregion #region Get a directory // --------------------------------------------------------- // Get a directory //---------------------------------------------------------- // <Snippet_GetDirectory> public DataLakeDirectoryClient GetDirectory (DataLakeFileSystemClient fileSystemClient, string directoryName) { DataLakeDirectoryClient directoryClient = fileSystemClient.GetDirectoryClient(directoryName); return(directoryClient); }
public IDisposable GetNewDirectory(out DataLakeDirectoryClient directory, DataLakeServiceClient service = default, string fileSystemName = default, string directoryName = default) { IDisposable disposingFileSystem = GetNewFileSystem(out DataLakeFileSystemClient fileSystem, service, fileSystemName); directory = InstrumentClient(fileSystem.GetDirectoryClient(directoryName ?? GetNewDirectoryName())); _ = directory.CreateAsync().Result; return(disposingFileSystem); }
public async Task DeleteDirectory(string path, CancellationToken token = default) { path = WithBasePath(path); _logger.LogTrace($"Deleting directory {path}"); DataLakeDirectoryClient directoryClient = _fileSystem.GetDirectoryClient(path); await directoryClient.DeleteAsync(cancellationToken : token); }
public async Task DeleteDirectory(string path, CancellationToken token) { path.VerifyNotEmpty(nameof(path)); _logger.LogTrace($"{nameof(DeleteDirectory)} {path}"); DataLakeDirectoryClient directoryClient = _fileSystem.GetDirectoryClient(path); await directoryClient.DeleteAsync(cancellationToken : token); }
/// <summary> /// execute command /// </summary> public override void ExecuteCmdlet() { IStorageBlobManagement localChannel = Channel; bool foundAFolder = false; DataLakeFileClient srcBlob = null; DataLakeDirectoryClient srcBlobDir = null; if (ParameterSetName == ManualParameterSet) { DataLakeFileSystemClient fileSystem = GetFileSystemClientByName(localChannel, this.FileSystem); foundAFolder = GetExistDataLakeGen2Item(fileSystem, this.Path, out srcBlob, out srcBlobDir); } else //BlobParameterSet { if (!InputObject.IsDirectory) { srcBlob = InputObject.File; } else { srcBlobDir = InputObject.Directory; foundAFolder = true; } } if (foundAFolder) { if (ShouldProcess(GetDataLakeItemUriWithoutSas(srcBlobDir), "Move Directory: ")) { DataLakeFileSystemClient destFileSystem = GetFileSystemClientByName(localChannel, this.DestFileSystem != null ? this.DestFileSystem : this.FileSystem); DataLakeDirectoryClient destBlobDir = destFileSystem.GetDirectoryClient(this.DestPath); if (this.Force || !destBlobDir.Exists() || ShouldContinue(string.Format("Overwrite destination {0}", GetDataLakeItemUriWithoutSas(destBlobDir)), "")) { destBlobDir = srcBlobDir.Rename(this.DestPath, this.DestFileSystem).Value; WriteDataLakeGen2Item(localChannel, destBlobDir); } } } else { if (ShouldProcess(GetDataLakeItemUriWithoutSas(srcBlob), "Move File: ")) { DataLakeFileSystemClient destFileSystem = GetFileSystemClientByName(localChannel, this.DestFileSystem != null ? this.DestFileSystem : this.FileSystem); DataLakeFileClient destFile = destFileSystem.GetFileClient(this.DestPath); if (this.Force || !destFile.Exists() || ShouldContinue(string.Format("Overwrite destination {0}", GetDataLakeItemUriWithoutSas(destFile)), "")) { destFile = srcBlob.Rename(this.DestPath, this.DestFileSystem).Value; WriteDataLakeGen2Item(localChannel, destFile); } } } }
public DataLakeDirectoryClient GetDirectoryClient(string directoryFullPath) { var Names = _azureBlobAdapter.ExtractContainerBlobPortions(directoryFullPath); var fileSystemName = _azureBlobAdapter.ExtractContainerName(Names.Item1); var fileSystemClient = GetFileSystemClient(fileSystemName); DataLakeDirectoryClient directoryClient = fileSystemClient.GetDirectoryClient(Names.Item2); return(directoryClient); }
// </Snippet_GetContainer> #endregion #region create a directory // --------------------------------------------------------- // Create directory //---------------------------------------------------------- // <Snippet_CreateDirectory> public async Task <DataLakeDirectoryClient> CreateDirectory (DataLakeServiceClient serviceClient, string fileSystemName) { DataLakeFileSystemClient fileSystemClient = serviceClient.GetFileSystemClient(fileSystemName); DataLakeDirectoryClient directoryClient = await fileSystemClient.CreateDirectoryAsync("my-directory"); return(await directoryClient.CreateSubDirectoryAsync("my-subdirectory")); }
/// <summary> /// A Task() which we will use for uploading a million blobs in parallel /// </summary> public async Task UploadFile(DataLakeDirectoryClient directoryClient, int numTasks, int taskId, int numFilesPerTask) { for (int i = 0; i < numFilesPerTask; i++) { // Giving unique name to each path string fileName = "Dir_" + i.ToString("D3") + "/" + ((taskId * numTasks) + i).ToString("D6"); // Send request to create the file Console.WriteLine("Creating file = {0}.", fileName); DataLakeFileClient fileClient = await directoryClient.CreateFileAsync(fileName); } }
// </Snippet_UploadFile> #endregion #region Upload files to a directory in bulk // --------------------------------------------------------- // Upload files to the directory - bulk uploads //---------------------------------------------------------- // <Snippet_UploadFileBulk> public async Task UploadFileBulk(DataLakeFileSystemClient fileSystemClient) { DataLakeDirectoryClient directoryClient = fileSystemClient.GetDirectoryClient("my-directory"); DataLakeFileClient fileClient = directoryClient.GetFileClient("uploaded-file.txt"); FileStream fileStream = File.OpenRead("C:\\Users\\contoso\\file-to-upload.txt"); await fileClient.UploadAsync(fileStream); }
private static void SetupADLSClient() { _ADLSBaseURL = "https://" + _ADLSaccountName + ".dfs.core.windows.net"; _ADLSClient = new DataLakeServiceClient(new Uri(_ADLSBaseURL), new StorageSharedKeyCredential(_ADLSaccountName, _ADLSaccountKey)); _ADLSFileSystemClient = _ADLSClient.GetFileSystemClient(_ADLSfileSystemName); _ADLSDirectoryClient = _ADLSFileSystemClient.GetDirectoryClient(_ADLSworkingFolder); if (_configuration.GetValue <bool>("UploadToADLSGen2")) { _ADLSDirectoryClient.Create(); } }