public void CreateFileClient_Directory() { // Make StorageSharedKeyCredential to pass to the serviceClient string storageAccountName = StorageAccountName; string storageAccountKey = StorageAccountKey; Uri serviceUri = StorageAccountBlobUri; #region Snippet:SampleSnippetDataLakeFileSystemClient_Create StorageSharedKeyCredential sharedKeyCredential = new StorageSharedKeyCredential(storageAccountName, storageAccountKey); // Create DataLakeServiceClient using StorageSharedKeyCredentials DataLakeServiceClient serviceClient = new DataLakeServiceClient(serviceUri, sharedKeyCredential); // Create a DataLake Filesystem DataLakeFileSystemClient filesystem = serviceClient.GetFileSystemClient(Randomize("sample-filesystem")); filesystem.Create(); #endregion Snippet:SampleSnippetDataLakeFileSystemClient_Create #region Snippet:SampleSnippetDataLakeFileClient_Create_Directory // Create a DataLake Directory DataLakeDirectoryClient directory = filesystem.CreateDirectory(Randomize("sample-directory")); directory.Create(); // Create a DataLake File using a DataLake Directory DataLakeFileClient file = directory.GetFileClient(Randomize("sample-file")); file.Create(); #endregion Snippet:SampleSnippetDataLakeFileClient_Create_Directory // Verify we created one file Assert.AreEqual(1, filesystem.ListPaths().Count()); // Cleanup filesystem.Delete(); }
/// <summary> /// Upload File with Datalake API /// </summary> internal virtual async Task UploadDataLakeFile(long taskId, DataLakeFileClient fileClient, string filePath) { if (this.Force.IsPresent || !fileClient.Exists() || ShouldContinue(string.Format(Resources.OverwriteConfirmation, GetDataLakeItemUriWithoutSas(fileClient)), null)) { // Set Item Properties and MetaData PathHttpHeaders pathHttpHeaders = SetDatalakegen2ItemProperties(fileClient, BlobProperties, setToServer: false); IDictionary <string, string> metadata = SetDatalakegen2ItemMetaData(fileClient, BlobMetadata, setToServer: false); fileClient.Create(pathHttpHeaders, metadata, this.Permission, this.Umask != null ? DataLakeModels.PathPermissions.ParseSymbolicPermissions(this.Umask).ToOctalPermissions() : null); long fileSize = new FileInfo(ResolvedFileName).Length; string activity = String.Format(Resources.SendAzureBlobActivity, this.Source, this.Path, this.FileSystem); string status = Resources.PrepareUploadingBlob; ProgressRecord pr = new ProgressRecord(OutputStream.GetProgressId(taskId), activity, status); IProgress <long> progressHandler = new Progress <long>((finishedBytes) => { if (pr != null) { // Size of the source file might be 0, when it is, directly treat the progress as 100 percent. pr.PercentComplete = 0 == fileSize ? 100 : (int)(finishedBytes * 100 / fileSize); pr.StatusDescription = string.Format(CultureInfo.CurrentCulture, Resources.FileTransmitStatus, pr.PercentComplete); this.OutputStream.WriteProgress(pr); } }); using (FileStream stream = File.OpenRead(ResolvedFileName)) { await fileClient.AppendAsync(stream, 0, progressHandler : progressHandler, cancellationToken : CmdletCancellationToken).ConfigureAwait(false); } WriteDataLakeGen2Item(Channel, fileClient, taskId: taskId); } }
public Response <PathInfo> Upload( Stream content, PathHttpHeaders httpHeaders, DataLakeRequestConditions conditions, IProgress <long> progressHandler, CancellationToken cancellationToken) { _client.Create( httpHeaders: httpHeaders, conditions: conditions, cancellationToken: cancellationToken); // After the File is Create, Lease ID is the only valid request parameter. conditions = new DataLakeRequestConditions { LeaseId = conditions?.LeaseId }; // If we can compute the size and it's small enough if (PartitionedUploadExtensions.TryGetLength(content, out long contentLength) && contentLength < _singleUploadThreshold) { // Upload it in a single request _client.Append( content, offset: 0, leaseId: conditions?.LeaseId, progressHandler: progressHandler, cancellationToken: cancellationToken); // Calculate flush position long flushPosition = contentLength; return(_client.Flush( position: flushPosition, httpHeaders: httpHeaders, conditions: conditions, cancellationToken: cancellationToken)); } // If the caller provided an explicit block size, we'll use it. // Otherwise we'll adjust dynamically based on the size of the // content. int blockSize = _blockSize != null ? _blockSize.Value : contentLength < Constants.LargeUploadThreshold ? Constants.DefaultBufferSize : Constants.LargeBufferSize; // Otherwise stage individual blocks one at a time. It's not as // fast as a parallel upload, but you get the benefit of the retry // policy working on a single block instead of the entire stream. return(UploadInSequence( content, blockSize, httpHeaders, conditions, progressHandler, cancellationToken)); }
public void Append() { // Create three temporary Lorem Ipsum files on disk that we can upload int contentLength = 10; string sampleFileContentPart1 = CreateTempFile(SampleFileContent.Substring(0, contentLength)); string sampleFileContentPart2 = CreateTempFile(SampleFileContent.Substring(contentLength, contentLength)); string sampleFileContentPart3 = CreateTempFile(SampleFileContent.Substring(contentLength * 2, contentLength)); // Make StorageSharedKeyCredential to pass to the serviceClient string storageAccountName = StorageAccountName; string storageAccountKey = StorageAccountKey; Uri serviceUri = StorageAccountBlobUri; StorageSharedKeyCredential sharedKeyCredential = new StorageSharedKeyCredential(storageAccountName, storageAccountKey); // Create DataLakeServiceClient using StorageSharedKeyCredentials DataLakeServiceClient serviceClient = new DataLakeServiceClient(serviceUri, sharedKeyCredential); // Get a reference to a filesystem named "sample-filesystem-append" and then create it DataLakeFileSystemClient filesystem = serviceClient.GetFileSystemClient(Randomize("sample-filesystem-append")); filesystem.Create(); try { // Get a reference to a file named "sample-file" in a filesystem DataLakeFileClient file = filesystem.GetFileClient(Randomize("sample-file")); // Create the file file.Create(); // Verify we created one file Assert.AreEqual(1, filesystem.GetPaths().Count()); // Append data to an existing DataLake File. Append is currently limited to 4000 MB per call. // To upload a large file all at once, consider using Upload() instead. file.Append(File.OpenRead(sampleFileContentPart1), 0); file.Append(File.OpenRead(sampleFileContentPart2), contentLength); file.Append(File.OpenRead(sampleFileContentPart3), contentLength * 2); file.Flush(contentLength * 3); // Verify the contents of the file PathProperties properties = file.GetProperties(); Assert.AreEqual(contentLength * 3, properties.ContentLength); } finally { // Clean up after the test when we're finished filesystem.Delete(); } }
public void Rename() { // Make StorageSharedKeyCredential to pass to the serviceClient string storageAccountName = StorageAccountName; string storageAccountKey = StorageAccountKey; Uri serviceUri = StorageAccountBlobUri; StorageSharedKeyCredential sharedKeyCredential = new StorageSharedKeyCredential(storageAccountName, storageAccountKey); // Create DataLakeServiceClient using StorageSharedKeyCredentials DataLakeServiceClient serviceClient = new DataLakeServiceClient(serviceUri, sharedKeyCredential); // Get a reference to a filesystem named "sample-filesystem-rename" and then create it DataLakeFileSystemClient filesystem = serviceClient.GetFileSystemClient(Randomize("sample-filesystem-rename")); filesystem.Create(); try { // Create a DataLake Directory to rename it later DataLakeDirectoryClient directoryClient = filesystem.GetDirectoryClient(Randomize("sample-directory")); directoryClient.Create(); // Rename directory with new path/name and verify by making a service call (e.g. GetProperties) #region Snippet:SampleSnippetDataLakeFileClient_RenameDirectory DataLakeDirectoryClient renamedDirectoryClient = directoryClient.Rename("sample-directory2"); #endregion Snippet:SampleSnippetDataLakeFileClient_RenameDirectory PathProperties directoryPathProperties = renamedDirectoryClient.GetProperties(); // Delete the sample directory using the new path/name filesystem.DeleteDirectory("sample-directory2"); // Create a DataLake file. DataLakeFileClient fileClient = filesystem.GetFileClient(Randomize("sample-file")); fileClient.Create(); // Rename file with new path/name and verify by making a service call (e.g. GetProperties) #region Snippet:SampleSnippetDataLakeFileClient_RenameFile DataLakeFileClient renamedFileClient = fileClient.Rename("sample-file2"); #endregion Snippet:SampleSnippetDataLakeFileClient_RenameFile PathProperties filePathProperties = renamedFileClient.GetProperties(); // Delete the sample directory using the new path/name filesystem.DeleteFile("sample-file2"); } finally { // Clean up after the test when we're finished filesystem.Delete(); } }
public void Read() { // Create a temporary Lorem Ipsum file on disk that we can upload string originalPath = CreateTempFile(SampleFileContent); // Get a temporary path on disk where we can download the file string downloadPath = CreateTempPath(); // Make StorageSharedKeyCredential to pass to the serviceClient string storageAccountName = StorageAccountName; string storageAccountKey = StorageAccountKey; Uri serviceUri = StorageAccountBlobUri; StorageSharedKeyCredential sharedKeyCredential = new StorageSharedKeyCredential(storageAccountName, storageAccountKey); // Create DataLakeServiceClient using StorageSharedKeyCredentials DataLakeServiceClient serviceClient = new DataLakeServiceClient(serviceUri, sharedKeyCredential); // Get a reference to a filesystem named "sample-filesystem-read" and then create it DataLakeFileSystemClient filesystem = serviceClient.GetFileSystemClient(Randomize("sample-filesystem-read")); filesystem.Create(); try { // Get a reference to a file named "sample-file" in a filesystem DataLakeFileClient file = filesystem.GetFileClient(Randomize("sample-file")); // First upload something the DataLake file so we have something to download file.Create(); file.Append(File.OpenRead(originalPath), 0); file.Flush(SampleFileContent.Length); // Download the DataLake file's contents and save it to a file #region Snippet:SampleSnippetDataLakeFileClient_Read Response <FileDownloadInfo> fileContents = file.Read(); #endregion Snippet:SampleSnippetDataLakeFileClient_Read using (FileStream stream = File.OpenWrite(downloadPath)) { fileContents.Value.Content.CopyTo(stream); } // Verify the contents Assert.AreEqual(SampleFileContent, File.ReadAllText(downloadPath)); } finally { // Clean up after the test when we're finished filesystem.Delete(); } }
public void Upload() { // Create three temporary Lorem Ipsum files on disk that we can upload int contentLength = 10; string sampleFileContent = CreateTempFile(SampleFileContent.Substring(0, contentLength)); // Make StorageSharedKeyCredential to pass to the serviceClient string storageAccountName = StorageAccountName; string storageAccountKey = StorageAccountKey; Uri serviceUri = StorageAccountBlobUri; StorageSharedKeyCredential sharedKeyCredential = new StorageSharedKeyCredential(storageAccountName, storageAccountKey); // Create DataLakeServiceClient using StorageSharedKeyCredentials DataLakeServiceClient serviceClient = new DataLakeServiceClient(serviceUri, sharedKeyCredential); // Get a reference to a filesystem named "sample-filesystem-append" and then create it DataLakeFileSystemClient filesystem = serviceClient.GetFileSystemClient(Randomize("sample-filesystem-append")); filesystem.Create(); try { // Get a reference to a file named "sample-file" in a filesystem DataLakeFileClient file = filesystem.GetFileClient(Randomize("sample-file")); // Create the file file.Create(); // Verify we created one file Assert.AreEqual(1, filesystem.GetPaths().Count()); // Upload content to the file. When using the Upload API, you don't need to create the file first. // If the file already exists, it will be overwritten. // For larger files, Upload() will upload the file in multiple sequential requests. file.Upload(File.OpenRead(sampleFileContent), true); // Verify the contents of the file PathProperties properties = file.GetProperties(); Assert.AreEqual(contentLength, properties.ContentLength); } finally { // Clean up after the test when we're finished filesystem.Delete(); } }
private static void SetAclAndGetFileProperties(DataLakeFileSystemClient client) { DataLakeFileClient fileClient = client.GetFileClient("sample.txt"); fileClient.Create(); // Set Access Control List IList <PathAccessControlItem> accessControlList = PathAccessControlExtensions.ParseAccessControlList("user::rwx,group::r--,mask::rwx,other::---"); fileClient.SetAccessControlList(accessControlList); PathAccessControl accessControlResponse = fileClient.GetAccessControl(); Console.WriteLine($"User: {accessControlResponse.Owner}"); Console.WriteLine($"Group: {accessControlResponse.Group}"); Console.WriteLine($"Permissions: {accessControlResponse.Permissions}"); }
public void SetGetAcls() { // Make StorageSharedKeyCredential to pass to the serviceClient string storageAccountName = NamespaceStorageAccountName; string storageAccountKey = NamespaceStorageAccountKey; Uri serviceUri = NamespaceBlobUri; StorageSharedKeyCredential sharedKeyCredential = new StorageSharedKeyCredential(storageAccountName, storageAccountKey); // Create DataLakeServiceClient using StorageSharedKeyCredentials DataLakeServiceClient serviceClient = new DataLakeServiceClient(serviceUri, sharedKeyCredential); // Get a reference to a filesystem named "sample-filesystem-acl" and then create it DataLakeFileSystemClient filesystem = serviceClient.GetFileSystemClient(Randomize("sample-filesystem-acl")); filesystem.Create(); try { #region Snippet:SampleSnippetDataLakeFileClient_SetAcls // Create a DataLake file so we can set the Access Controls on the files DataLakeFileClient fileClient = filesystem.GetFileClient(Randomize("sample-file")); fileClient.Create(); // Set Access Control List IList <PathAccessControlItem> accessControlList = PathAccessControlExtensions.ParseAccessControlList("user::rwx,group::r--,mask::rwx,other::---"); fileClient.SetAccessControlList(accessControlList); #endregion Snippet:SampleSnippetDataLakeFileClient_SetAcls #region Snippet:SampleSnippetDataLakeFileClient_GetAcls // Get Access Control List PathAccessControl accessControlResponse = fileClient.GetAccessControl(); #endregion Snippet:SampleSnippetDataLakeFileClient_GetAcls // Check Access Control permissions Assert.AreEqual( PathAccessControlExtensions.ToAccessControlListString(accessControlList), PathAccessControlExtensions.ToAccessControlListString(accessControlResponse.AccessControlList.ToList())); } finally { // Clean up after the test when we're finished filesystem.Delete(); } }
public void GetProperties() { // Make StorageSharedKeyCredential to pass to the serviceClient string storageAccountName = StorageAccountName; string storageAccountKey = StorageAccountKey; Uri serviceUri = StorageAccountBlobUri; StorageSharedKeyCredential sharedKeyCredential = new StorageSharedKeyCredential(storageAccountName, storageAccountKey); // Create DataLakeServiceClient using StorageSharedKeyCredentials DataLakeServiceClient serviceClient = new DataLakeServiceClient(serviceUri, sharedKeyCredential); // Get a reference to a filesystem named "sample-filesystem-rename" and then create it DataLakeFileSystemClient filesystem = serviceClient.GetFileSystemClient(Randomize("sample-filesystem")); filesystem.Create(); try { // Create a DataLake Directory to rename it later DataLakeDirectoryClient directoryClient = filesystem.GetDirectoryClient(Randomize("sample-directory")); directoryClient.Create(); #region Snippet:SampleSnippetDataLakeDirectoryClient_GetProperties // Get Properties on a Directory PathProperties directoryPathProperties = directoryClient.GetProperties(); #endregion Snippet:SampleSnippetDataLakeDirectoryClient_GetProperties // Create a DataLake file DataLakeFileClient fileClient = filesystem.GetFileClient(Randomize("sample-file")); fileClient.Create(); #region Snippet:SampleSnippetDataLakeFileClient_GetProperties // Get Properties on a File PathProperties filePathProperties = fileClient.GetProperties(); #endregion Snippet:SampleSnippetDataLakeFileClient_GetProperties } finally { // Clean up after the test when we're finished filesystem.Delete(); } }
public void Append_Simple() { // Create Sample File to read content from string sampleFilePath = CreateTempFile(SampleFileContent); // Make StorageSharedKeyCredential to pass to the serviceClient string storageAccountName = StorageAccountName; string storageAccountKey = StorageAccountKey; Uri serviceUri = StorageAccountBlobUri; StorageSharedKeyCredential sharedKeyCredential = new StorageSharedKeyCredential(storageAccountName, storageAccountKey); // Create DataLakeServiceClient using StorageSharedKeyCredentials DataLakeServiceClient serviceClient = new DataLakeServiceClient(serviceUri, sharedKeyCredential); // Get a reference to a filesystem named "sample-filesystem-append" and then create it DataLakeFileSystemClient filesystem = serviceClient.GetFileSystemClient(Randomize("sample-filesystem-append")); filesystem.Create(); try { #region Snippet:SampleSnippetDataLakeFileClient_Append // Create a file DataLakeFileClient file = filesystem.GetFileClient(Randomize("sample-file")); file.Create(); // Append data to the DataLake File file.Append(File.OpenRead(sampleFilePath), 0); file.Flush(SampleFileContent.Length); #endregion Snippet:SampleSnippetDataLakeFileClient_Append // Verify the contents of the file PathProperties properties = file.GetProperties(); Assert.AreEqual(SampleFileContent.Length, properties.ContentLength); } finally { // Clean up after the test when we're finished filesystem.Delete(); } }
public void SetPermissions() { // Make StorageSharedKeyCredential to pass to the serviceClient string storageAccountName = NamespaceStorageAccountName; string storageAccountKey = NamespaceStorageAccountKey; Uri serviceUri = NamespaceBlobUri; StorageSharedKeyCredential sharedKeyCredential = new StorageSharedKeyCredential(storageAccountName, storageAccountKey); // Create DataLakeServiceClient using StorageSharedKeyCredentials DataLakeServiceClient serviceClient = new DataLakeServiceClient(serviceUri, sharedKeyCredential); // Get a reference to a filesystem named "sample-filesystem-acl" and then create it DataLakeFileSystemClient filesystem = serviceClient.GetFileSystemClient(Randomize("sample-filesystem-per")); filesystem.Create(); try { #region Snippet:SampleSnippetDataLakeFileClient_SetPermissions // Create a DataLake file so we can set the Access Controls on the files DataLakeFileClient fileClient = filesystem.GetFileClient(Randomize("sample-file")); fileClient.Create(); // Set the Permissions of the file PathPermissions pathPermissions = PathPermissions.ParseSymbolicPermissions("rwxrwxrwx"); fileClient.SetPermissions(permissions: pathPermissions); #endregion Snippet:SampleSnippetDataLakeFileClient_SetPermissions // Get Access Control List PathAccessControl accessControlResponse = fileClient.GetAccessControl(); // Check Access Control permissions Assert.AreEqual(pathPermissions.ToSymbolicPermissions(), accessControlResponse.Permissions.ToSymbolicPermissions()); Assert.AreEqual(pathPermissions.ToOctalPermissions(), accessControlResponse.Permissions.ToOctalPermissions()); } finally { // Clean up after the test when we're finished filesystem.Delete(); } }
public virtual Response <DataLakeFileClient> CreateFile( string fileName, PathHttpHeaders httpHeaders = default, Metadata metadata = default, string permissions = default, string umask = default, DataLakeRequestConditions conditions = default, CancellationToken cancellationToken = default) { DataLakeFileClient fileClient = GetFileClient(fileName); Response <PathInfo> response = fileClient.Create( httpHeaders, metadata, permissions, umask, conditions, cancellationToken); return(Response.FromValue( fileClient, response.GetRawResponse())); }
public void Traverse() { // Create a temporary Lorem Ipsum file on disk that we can upload string originalPath = CreateTempFile(SampleFileContent); // Make StorageSharedKeyCredential to pass to the serviceClient string storageAccountName = StorageAccountName; string storageAccountKey = StorageAccountKey; Uri serviceUri = StorageAccountBlobUri; StorageSharedKeyCredential sharedKeyCredential = new StorageSharedKeyCredential(storageAccountName, storageAccountKey); // Create DataLakeServiceClient using StorageSharedKeyCredentials DataLakeServiceClient serviceClient = new DataLakeServiceClient(serviceUri, sharedKeyCredential); // Get a reference to a filesystem named "sample-filesystem-traverse" and then create it DataLakeFileSystemClient filesystem = serviceClient.GetFileSystemClient(Randomize("sample-filesystem-traverse")); filesystem.Create(); try { // Create a bunch of directories and files within the directories DataLakeDirectoryClient first = filesystem.CreateDirectory("first"); first.CreateSubDirectory("a"); first.CreateSubDirectory("b"); DataLakeDirectoryClient second = filesystem.CreateDirectory("second"); second.CreateSubDirectory("c"); second.CreateSubDirectory("d"); filesystem.CreateDirectory("third"); DataLakeDirectoryClient fourth = filesystem.CreateDirectory("fourth"); DataLakeDirectoryClient deepest = fourth.CreateSubDirectory("e"); // Upload a DataLake file named "file" DataLakeFileClient file = deepest.GetFileClient("file"); file.Create(); using (FileStream stream = File.OpenRead(originalPath)) { file.Append(stream, 0); } // Keep track of all the names we encounter List <string> names = new List <string>(); foreach (PathItem pathItem in filesystem.ListPaths(recursive: true)) { names.Add(pathItem.Name); } // Verify we've seen everything Assert.AreEqual(10, names.Count); Assert.Contains("first", names); Assert.Contains("second", names); Assert.Contains("third", names); Assert.Contains("fourth", names); Assert.Contains("first/a", names); Assert.Contains("first/b", names); Assert.Contains("second/c", names); Assert.Contains("second/d", names); Assert.Contains("fourth/e", names); Assert.Contains("fourth/e/file", names); } finally { // Clean up after the test when we're finished filesystem.Delete(); } }