Пример #1
0
        public void Append()
        {
            // Create three temporary Lorem Ipsum files on disk that we can upload
            int    contentLength          = 10;
            string sampleFileContentPart1 = CreateTempFile(SampleFileContent.Substring(0, contentLength));
            string sampleFileContentPart2 = CreateTempFile(SampleFileContent.Substring(contentLength, contentLength));
            string sampleFileContentPart3 = CreateTempFile(SampleFileContent.Substring(contentLength * 2, contentLength));

            // Make StorageSharedKeyCredential to pass to the serviceClient
            string storageAccountName = StorageAccountName;
            string storageAccountKey  = StorageAccountKey;
            Uri    serviceUri         = StorageAccountBlobUri;

            StorageSharedKeyCredential sharedKeyCredential = new StorageSharedKeyCredential(storageAccountName, storageAccountKey);

            // Create DataLakeServiceClient using StorageSharedKeyCredentials
            DataLakeServiceClient serviceClient = new DataLakeServiceClient(serviceUri, sharedKeyCredential);

            // Get a reference to a filesystem named "sample-filesystem-append" and then create it
            DataLakeFileSystemClient filesystem = serviceClient.GetFileSystemClient(Randomize("sample-filesystem-append"));

            filesystem.Create();
            try
            {
                // Get a reference to a file named "sample-file" in a filesystem
                DataLakeFileClient file = filesystem.GetFileClient(Randomize("sample-file"));

                // Create the file
                file.Create();

                // Verify we created one file
                Assert.AreEqual(1, filesystem.GetPaths().Count());

                // Append data to an existing DataLake File.  Append is currently limited to 4000 MB per call.
                // To upload a large file all at once, consider using Upload() instead.
                file.Append(File.OpenRead(sampleFileContentPart1), 0);
                file.Append(File.OpenRead(sampleFileContentPart2), contentLength);
                file.Append(File.OpenRead(sampleFileContentPart3), contentLength * 2);
                file.Flush(contentLength * 3);

                // Verify the contents of the file
                PathProperties properties = file.GetProperties();
                Assert.AreEqual(contentLength * 3, properties.ContentLength);
            }
            finally
            {
                // Clean up after the test when we're finished
                filesystem.Delete();
            }
        }
        public async Task ReadAsync()
        {
            // Create a temporary Lorem Ipsum file on disk that we can upload
            string originalPath = CreateTempFile(SampleFileContent);

            // Get a temporary path on disk where we can download the file
            string downloadPath = CreateTempPath();

            // Make StorageSharedKeyCredential to pass to the serviceClient
            string storageAccountName = StorageAccountName;
            string storageAccountKey  = StorageAccountKey;
            Uri    serviceUri         = StorageAccountBlobUri;
            StorageSharedKeyCredential sharedKeyCredential = new StorageSharedKeyCredential(storageAccountName, storageAccountKey);

            // Create DataLakeServiceClient using StorageSharedKeyCredentials
            DataLakeServiceClient serviceClient = new DataLakeServiceClient(serviceUri, sharedKeyCredential);

            // Get a reference to a filesystem named "sample-filesystem-readasync" and then create it
            DataLakeFileSystemClient filesystem = serviceClient.GetFileSystemClient(Randomize("sample-filesystem-read"));
            await filesystem.CreateAsync();

            try
            {
                // Get a reference to a file named "sample-file" in a filesystem
                DataLakeFileClient file = filesystem.GetFileClient(Randomize("sample-file"));

                // First upload something the DataLake file so we have something to download
                await file.CreateAsync();

                await file.AppendAsync(File.OpenRead(originalPath), 0);

                await file.FlushAsync(SampleFileContent.Length);

                // Download the DataLake file's contents and save it to a file
                Response <FileDownloadInfo> fileContents = await file.ReadAsync();

                using (FileStream stream = File.OpenWrite(downloadPath))
                {
                    fileContents.Value.Content.CopyTo(stream);
                }

                // Verify the contents
                Assert.AreEqual(SampleFileContent, File.ReadAllText(downloadPath));
            }
            finally
            {
                // Clean up after the test when we're finished
                await filesystem.DeleteAsync();
            }
        }
Пример #3
0
        /// <summary>
        /// Initializes new instance of DataLakeStoreService.
        /// </summary>
        /// <param name="configuration">Context configuration.</param>
        /// <param name="httpContextAccessor">Http context.</param>
        public DataLakeStoreService(IOptions <DavContextConfig> configuration, IHttpContextAccessor httpContextAccessor)
        {
            var token = httpContextAccessor.HttpContext.User.Claims.FirstOrDefault(x => x.Type == "access_token")?.Value;

            if (token == null)
            {
                notAuthorized = true;
            }
            DavContextConfig config = configuration.Value;
            var credential          = new DataLakeTokenCredential(token, DateTimeOffset.MaxValue);
            var dfsUri = "https://" + config.AzureStorageAccountName + ".dfs.core.windows.net";

            dataLakeClient = new DataLakeServiceClient(new Uri(dfsUri), credential).GetFileSystemClient(config.DataLakeContainerName);
        }
        /// <summary>
        /// Function to list one million blobs in random order
        /// Parrallelization can be controlled via 'numTasks'
        /// </summary>
        public static void ListMillionBlobsRandomOrder(DataLakeFileSystemClient fileSystemClient, string directoryName)
        {
            // Assuming that listing is being done on a directory that has
            // million blobs which have been partitioned [into subdirectories]

            // Step 1- Perform a non recursive listing on the directory
            // and capture all those directories in a List<>
            Helper helper = new Helper();

            Console.WriteLine("Performing a non recursive listing on top level.");
            helper.ListBlobs(fileSystemClient, directoryName, false, true).GetAwaiter().GetResult();

            // Step 2- Chunk down above list into buckets of 'numTasks' and let
            // them print the output of listing in a unique directory
            int           numTasks = 1000;
            List <string> temp     = new List <string>();

            // Step 3- Initiate recursive listing on all the buckets in parallel
            Console.WriteLine("Initiating {0} parallel tasks to provide randomness in listing.",
                              numTasks);

            int i = 0;

            while (i < helper.directoryList.Count)
            {
                List <TaskAwaiter> allTasks = new List <TaskAwaiter>();

                for (int j = 0; j < numTasks; j++)
                {
                    allTasks.Add(helper.ListBlobs(fileSystemClient, helper.directoryList[i],
                                                  true, false).GetAwaiter());;
                    i++;
                }

                // Wait on all tasks to get finished
                for (int j = 0; j < allTasks.Count; j++)
                {
                    try
                    {
                        allTasks[j].GetResult();
                    }
                    catch (Exception e)
                    {
                        Console.WriteLine("Task Id {0}, failed with -\n", j);
                        Console.WriteLine(e.Message);
                    }
                }
            }
        }
        public void Read()
        {
            // Create a temporary Lorem Ipsum file on disk that we can upload
            string originalPath = CreateTempFile(SampleFileContent);

            // Get a temporary path on disk where we can download the file
            string downloadPath = CreateTempPath();

            // Make StorageSharedKeyCredential to pass to the serviceClient
            string storageAccountName = StorageAccountName;
            string storageAccountKey  = StorageAccountKey;
            Uri    serviceUri         = StorageAccountBlobUri;
            StorageSharedKeyCredential sharedKeyCredential = new StorageSharedKeyCredential(storageAccountName, storageAccountKey);

            // Create DataLakeServiceClient using StorageSharedKeyCredentials
            DataLakeServiceClient serviceClient = new DataLakeServiceClient(serviceUri, sharedKeyCredential);

            // Get a reference to a filesystem named "sample-filesystem-read" and then create it
            DataLakeFileSystemClient filesystem = serviceClient.GetFileSystemClient(Randomize("sample-filesystem-read"));

            filesystem.Create();
            try
            {
                // Get a reference to a file named "sample-file" in a filesystem
                DataLakeFileClient file = filesystem.GetFileClient(Randomize("sample-file"));

                // First upload something the DataLake file so we have something to download
                file.Upload(File.OpenRead(originalPath));

                // Download the DataLake file's contents and save it to a file
                // The ReadAsync() API downloads a file in a single requests.
                // For large files, it may be faster to call ReadTo()
                #region Snippet:SampleSnippetDataLakeFileClient_Read
                Response <FileDownloadInfo> fileContents = file.Read();
                #endregion Snippet:SampleSnippetDataLakeFileClient_Read
                using (FileStream stream = File.OpenWrite(downloadPath))
                {
                    fileContents.Value.Content.CopyTo(stream);
                }

                // Verify the contents
                Assert.AreEqual(SampleFileContent, File.ReadAllText(downloadPath));
            }
            finally
            {
                // Clean up after the test when we're finished
                filesystem.Delete();
            }
        }
        public void Rename()
        {
            // Make StorageSharedKeyCredential to pass to the serviceClient
            string storageAccountName = StorageAccountName;
            string storageAccountKey  = StorageAccountKey;
            Uri    serviceUri         = StorageAccountBlobUri;
            StorageSharedKeyCredential sharedKeyCredential = new StorageSharedKeyCredential(storageAccountName, storageAccountKey);

            // Create DataLakeServiceClient using StorageSharedKeyCredentials
            DataLakeServiceClient serviceClient = new DataLakeServiceClient(serviceUri, sharedKeyCredential);

            // Get a reference to a filesystem named "sample-filesystem-rename" and then create it
            DataLakeFileSystemClient filesystem = serviceClient.GetFileSystemClient(Randomize("sample-filesystem-rename"));

            filesystem.Create();
            try
            {
                // Create a DataLake Directory to rename it later
                DataLakeDirectoryClient directoryClient = filesystem.GetDirectoryClient(Randomize("sample-directory"));
                directoryClient.Create();

                // Rename directory with new path/name and verify by making a service call (e.g. GetProperties)
                #region Snippet:SampleSnippetDataLakeFileClient_RenameDirectory
                DataLakeDirectoryClient renamedDirectoryClient = directoryClient.Rename("sample-directory2");
                #endregion Snippet:SampleSnippetDataLakeFileClient_RenameDirectory
                PathProperties directoryPathProperties = renamedDirectoryClient.GetProperties();

                // Delete the sample directory using the new path/name
                filesystem.DeleteDirectory("sample-directory2");

                // Create a DataLake file.
                DataLakeFileClient fileClient = filesystem.GetFileClient(Randomize("sample-file"));
                fileClient.Create();

                // Rename file with new path/name and verify by making a service call (e.g. GetProperties)
                #region Snippet:SampleSnippetDataLakeFileClient_RenameFile
                DataLakeFileClient renamedFileClient = fileClient.Rename("sample-file2");
                #endregion Snippet:SampleSnippetDataLakeFileClient_RenameFile
                PathProperties filePathProperties = renamedFileClient.GetProperties();

                // Delete the sample directory using the new path/name
                filesystem.DeleteFile("sample-file2");
            }
            finally
            {
                // Clean up after the test when we're finished
                filesystem.Delete();
            }
        }
        public async Task <dynamic> DownloadJsonData(DataLakeFileSystemClient fileSystemClient, string directory, string filePath)
        {
            DataLakeDirectoryClient directoryClient =
                fileSystemClient.GetDirectoryClient(directory);

            DataLakeFileClient fileClient =
                directoryClient.GetFileClient(filePath);

            Response <FileDownloadInfo> downloadResponse = await fileClient.ReadAsync();

            var streamContent = new StreamContent(downloadResponse.Value.Content);
            var stringContent = await streamContent.ReadAsStringAsync();

            return(JsonConvert.DeserializeObject <dynamic>(stringContent));
        }
Пример #8
0
        /// <summary>
        /// execute command
        /// </summary>
        public override void ExecuteCmdlet()
        {
            IStorageBlobManagement localChannel   = Channel;
            BlobRequestOptions     requestOptions = RequestOptions;

            bool foundAFolder = false;

            DataLakeFileClient      fileClient = null;
            DataLakeDirectoryClient dirClient  = null;

            if (ParameterSetName == ManualParameterSet)
            {
                DataLakeFileSystemClient fileSystem = GetFileSystemClientByName(localChannel, this.FileSystem);
                foundAFolder = GetExistDataLakeGen2Item(fileSystem, this.Path, out fileClient, out dirClient);
            }
            else //BlobParameterSet
            {
                if (!InputObject.IsDirectory)
                {
                    fileClient = InputObject.File;
                }
                else
                {
                    dirClient    = InputObject.Directory;
                    foundAFolder = true;
                }
            }

            if (foundAFolder)
            {
                if (force || ShouldContinue(string.Format("Remove Directory: {0}", GetDataLakeItemUriWithoutSas(dirClient)), ""))
                {
                    dirClient.Delete(true);
                }
            }
            else
            {
                if (force || ShouldContinue(string.Format("Remove File: {0}", GetDataLakeItemUriWithoutSas(fileClient)), ""))
                {
                    fileClient.Delete();
                }
            }

            if (PassThru)
            {
                WriteObject(true);
            }
        }
Пример #9
0
        // Bulk upload and download
        private static void RunFileTransfer(DataLakeFileSystemClient client)
        {
            Directory.CreateDirectory(localFileTransferPath);
            var fileName = localFileTransferPath + @"\testUploadFile.txt";

            Console.WriteLine("Creating the test file to upload:");
            using (var stream = new StreamWriter(new FileStream(fileName, FileMode.Create, FileAccess.ReadWrite)))
            {
                stream.WriteLine("Hello I am the first line of upload.");
                stream.WriteLine("Hello I am the second line of upload.");
            }
            var destFile            = remoteFileTransferPath + "/testremoteUploadFile.txt";
            DataLakeFileClient file = client.GetFileClient(destFile);

            Console.WriteLine("Upload of the file:");
            file.Upload(fileName); // Source and destination could also be directories
            Response <FileDownloadInfo> fileContents = file.Read();
            MemoryStream fileContentDown             = new MemoryStream();

            fileContents.Value.Content.CopyTo(fileContentDown);
            using (var readStream = new StreamReader(fileContents.Value.Content))
            {
                string line;
                while ((line = readStream.ReadLine()) != null)
                {
                    Console.WriteLine(line);
                }
            }
            var localDestFile = localFileTransferPath + @"\testlocalDownloadFile.txt";

            Console.WriteLine("Download of the uploaded file:");

            using (FileStream stream = File.OpenWrite(localDestFile))
            {
                fileContentDown.CopyTo(stream);
            }
            fileContentDown.Close();
            using (var stream = new StreamReader(File.OpenRead(localDestFile)))
            {
                string line;
                while ((line = stream.ReadLine()) != null)
                {
                    Console.WriteLine(line);
                }
            }
            Directory.Delete(localFileTransferPath, true);
            client.DeleteDirectory(remoteFileTransferPath);
        }
        // </Snippet_ListFilesInDirectory>

        #endregion

        #region Upload files to a directory

        // ---------------------------------------------------------
        // Upload files to the directory.
        //----------------------------------------------------------

        // <Snippet_UploadFile>
        public async Task UploadFile(DataLakeFileSystemClient fileSystemClient)
        {
            DataLakeDirectoryClient directoryClient =
                fileSystemClient.GetDirectoryClient("my-directory");

            DataLakeFileClient fileClient = await directoryClient.CreateFileAsync("uploaded-file.txt");

            FileStream fileStream =
                File.OpenRead("C:\\Users\\contoso\\Temp\\file-to-upload.txt");

            long fileSize = fileStream.Length;

            await fileClient.AppendAsync(fileStream, offset : 0);

            await fileClient.FlushAsync(position : fileSize);
        }
Пример #11
0
            public async ValueTask DisposeAsync()
            {
                if (FileSystem != null)
                {
                    try
                    {
                        await FileSystem.DeleteAsync();

                        FileSystem = null;
                    }
                    catch
                    {
                        // swallow the exception to avoid hiding another test failure
                    }
                }
            }
        public async Task UploadJsonData(DataLakeFileSystemClient fileSystemClient, string directory, string jsonData, string dataId)
        {
            DataLakeDirectoryClient directoryClient =
                fileSystemClient.GetDirectoryClient(directory);

            DataLakeFileClient fileClient = await directoryClient.CreateFileAsync(dataId + ".json");

            byte[]       byteArray = Encoding.ASCII.GetBytes(jsonData);
            MemoryStream stream    = new MemoryStream(byteArray);

            long fileSize = stream.Length;

            await fileClient.AppendAsync(stream, offset : 0);

            await fileClient.FlushAsync(position : fileSize);
        }
Пример #13
0
        public async Task PathClient_CanGetParentContainerClient()
        {
            // Arrange
            await using DisposingFileSystem test = await DataLakeClientBuilder.GetNewFileSystem();

            DataLakeFileClient fileClient = InstrumentClient(test.Container.GetRootDirectoryClient().GetFileClient(DataLakeClientBuilder.GetNewFileName()));

            // Act
            DataLakeFileSystemClient filesystemClient = fileClient.GetParentFileSystemClient();
            // make sure that client is functional
            var containerProperties = await filesystemClient.GetPropertiesAsync();

            // Assert
            Assert.AreEqual(fileClient.FileSystemName, filesystemClient.Name);
            Assert.AreEqual(fileClient.AccountName, filesystemClient.AccountName);
            Assert.IsNotNull(containerProperties);
        }
Пример #14
0
        private static void SetAclAndGetFileProperties(DataLakeFileSystemClient client)
        {
            DataLakeFileClient fileClient = client.GetFileClient("sample.txt");

            fileClient.Create();

            // Set Access Control List
            IList <PathAccessControlItem> accessControlList
                = PathAccessControlExtensions.ParseAccessControlList("user::rwx,group::r--,mask::rwx,other::---");

            fileClient.SetAccessControlList(accessControlList);
            PathAccessControl accessControlResponse = fileClient.GetAccessControl();

            Console.WriteLine($"User: {accessControlResponse.Owner}");
            Console.WriteLine($"Group: {accessControlResponse.Group}");
            Console.WriteLine($"Permissions: {accessControlResponse.Permissions}");
        }
        /// <summary>
        /// Remove ACL recusive async function
        /// </summary>
        protected override async Task OperationAclResusive(long taskId)
        {
            IStorageBlobManagement localChannel = Channel;

            progressRecord    = GetProgressRecord("Remove", taskId);
            continuationToken = this.ContinuationToken;

            bool foundAFolder = false;

            DataLakeFileClient      fileClient = null;
            DataLakeDirectoryClient dirClient  = null;

            DataLakeFileSystemClient fileSystem = GetFileSystemClientByName(localChannel, this.FileSystem);

            foundAFolder = GetExistDataLakeGen2Item(fileSystem, this.Path, out fileClient, out dirClient);


            if (foundAFolder)
            {
                if (ShouldProcess(dirClient.Uri.ToString(), "Remove Acl recursively on Directory: "))
                {
                    WriteWarning("To find the ACL Entry to remove, will only compare AccessControlType, DefaultScope and EntityId, will omit Permission.");
                    await dirClient.RemoveAccessControlRecursiveAsync(PSPathAccessControlEntry.ParseRemoveAccessControls(this.Acl),
                                                                      continuationToken,
                                                                      GetAccessControlChangeOptions(taskId),
                                                                      CmdletCancellationToken).ConfigureAwait(false);

                    SetProgressComplete();
                    WriteResult(taskId);
                }
            }
            else
            {
                if (ShouldProcess(fileClient.Uri.ToString(), "Remove Acl recursively on File: "))
                {
                    WriteWarning("To find the ACL Entry to remove, will only compare AccessControlType, DefaultScope and EntityId, will omit Permission.");
                    await fileClient.RemoveAccessControlRecursiveAsync(PSPathAccessControlEntry.ParseRemoveAccessControls(this.Acl),
                                                                       continuationToken,
                                                                       GetAccessControlChangeOptions(taskId),
                                                                       CmdletCancellationToken).ConfigureAwait(false);

                    SetProgressComplete();
                    WriteResult(taskId);
                }
            }
        }
Пример #16
0
        public async Task CreateFileSystemAsync()
        {
            var name = GetNewFileSystemName();
            DataLakeServiceClient service = GetServiceClient_SharedKey();

            try
            {
                DataLakeFileSystemClient        fileSystem = InstrumentClient((await service.CreateFileSystemAsync(name)).Value);
                Response <FileSystemProperties> properties = await fileSystem.GetPropertiesAsync();

                Assert.IsNotNull(properties.Value);
            }
            finally
            {
                await service.DeleteFileSystemAsync(name);
            }
        }
        public void Upload()
        {
            // Create three temporary Lorem Ipsum files on disk that we can upload
            int    contentLength     = 10;
            string sampleFileContent = CreateTempFile(SampleFileContent.Substring(0, contentLength));

            // Make StorageSharedKeyCredential to pass to the serviceClient
            string storageAccountName = StorageAccountName;
            string storageAccountKey  = StorageAccountKey;
            Uri    serviceUri         = StorageAccountBlobUri;

            StorageSharedKeyCredential sharedKeyCredential = new StorageSharedKeyCredential(storageAccountName, storageAccountKey);

            // Create DataLakeServiceClient using StorageSharedKeyCredentials
            DataLakeServiceClient serviceClient = new DataLakeServiceClient(serviceUri, sharedKeyCredential);

            // Get a reference to a filesystem named "sample-filesystem-append" and then create it
            DataLakeFileSystemClient filesystem = serviceClient.GetFileSystemClient(Randomize("sample-filesystem-append"));

            filesystem.Create();
            try
            {
                // Get a reference to a file named "sample-file" in a filesystem
                DataLakeFileClient file = filesystem.GetFileClient(Randomize("sample-file"));

                // Create the file
                file.Create();

                // Verify we created one file
                Assert.AreEqual(1, filesystem.GetPaths().Count());

                // Upload content to the file.  When using the Upload API, you don't need to create the file first.
                // If the file already exists, it will be overwritten.
                // For larger files, Upload() will upload the file in multiple sequential requests.
                file.Upload(File.OpenRead(sampleFileContent), true);

                // Verify the contents of the file
                PathProperties properties = file.GetProperties();
                Assert.AreEqual(contentLength, properties.ContentLength);
            }
            finally
            {
                // Clean up after the test when we're finished
                filesystem.Delete();
            }
        }
Пример #18
0
        private static async Task CreateFoldersAndFiles(DataLakeFileSystemClient ADLSFileSystemClient, string workingPath, int level)
        {
            Console.WriteLine("START-folder: " + workingPath);

            // Unclear behavior: when files already exists, only ADLSFileSystemClient.GetPaths is called.
            // This is sync and the tasks do not parallelize. But if I add Task.Delay() then tasks run in parallel as expected.
            // ... investigate...
            await Task.Delay(1);

            var           ADLSDirectoryClient = ADLSFileSystemClient.GetDirectoryClient(workingPath);
            List <string> existingItems       = ADLSFileSystemClient.GetPaths(workingPath, recursive: false).Select(x => x.Name).ToList();

            // create files (if required)
            for (int i = 0; i < _filesPerFolder; i++)
            {
                Interlocked.Increment(ref _fileCounter);
                var adlsFileClient = ADLSDirectoryClient.GetFileClient(i.ToString("0000") + ".txt");
                if (!existingItems.Contains(adlsFileClient.Path))
                {
                    await adlsFileClient.UploadAsync(new MemoryStream(Encoding.UTF8.GetBytes("fake data " + DateTime.UtcNow.ToString("O"))), true);

                    Console.Write(".");
                }
            }

            // create sub-folders
            if (level > 0)
            {
                List <string> subFolders = Enumerable.Range(0, 10).Select(i => workingPath + "/" + i.ToString("000")).ToList();
                foreach (var item in subFolders)
                {
                    Interlocked.Increment(ref _folderCounter);
                    if (!existingItems.Contains(item))
                    {
                        ADLSFileSystemClient.GetDirectoryClient(item).Create();
                    }
                }

                // start the sub-tasks and let them run in parallel
                var tasks = subFolders.Select(subFolder => CreateFoldersAndFiles(ADLSFileSystemClient, subFolder, level - 1));
                await Task.WhenAll(tasks);
            }

            Console.WriteLine("END-folder: " + workingPath);
        }
Пример #19
0
        public static async Task <DisposingFileSystem> GetNewFileSystem(
            this DataLakeClientBuilder clientBuilder,
            DataLakeServiceClient service         = default,
            string fileSystemName                 = default,
            IDictionary <string, string> metadata = default,
            PublicAccessType?publicAccessType     = default,
            bool premium    = default,
            bool hnsEnabled = true)
        {
            fileSystemName ??= clientBuilder.GetNewFileSystemName();
            service ??= hnsEnabled?clientBuilder.GetServiceClient_Hns() : clientBuilder.GetServiceClient_NonHns();

            if (publicAccessType == default)
            {
                publicAccessType = premium ? PublicAccessType.None : PublicAccessType.FileSystem;
            }

            DataLakeFileSystemClient fileSystem = clientBuilder.AzureCoreRecordedTestBase.InstrumentClient(service.GetFileSystemClient(fileSystemName));

            // due to a service issue, if the initial container creation request times out, subsequent requests
            // can return a ContainerAlreadyExists code even though the container doesn't really exist.
            // we delay until after the service cache timeout and then attempt to create the container one more time.
            // If this attempt still fails, we mark the test as inconclusive.
            // TODO Remove this handling after the service bug is fixed https://github.com/Azure/azure-sdk-for-net/issues/9399
            try
            {
                await StorageTestBase <DataLakeTestEnvironment> .RetryAsync(
                    clientBuilder.AzureCoreRecordedTestBase.Recording.Mode,
                    async() => await fileSystem.CreateAsync(metadata: metadata, publicAccessType: publicAccessType.Value),
                    ex => ex.ErrorCode == Blobs.Models.BlobErrorCode.ContainerAlreadyExists,
                    retryDelay : TestConstants.DataLakeRetryDelay,
                    retryAttempts : 1);
            }
            catch (RequestFailedException storageRequestFailedException)
                when(storageRequestFailedException.ErrorCode == Blobs.Models.BlobErrorCode.ContainerAlreadyExists)
                {
                    // if we still get this error after retrying, mark the test as inconclusive
                    TestContext.Out.WriteLine(
                        $"{TestContext.CurrentContext.Test.Name} is inconclusive due to hitting " +
                        $"the DataLake service bug described in https://github.com/Azure/azure-sdk-for-net/issues/9399");
                    Assert.Inconclusive(); // passing the message in Inconclusive call doesn't show up in Console output.
                }

            return(new DisposingFileSystem(fileSystem));
        }
Пример #20
0
        private static DataLakeFileSystemClient GetDataLakeClient(DataLakeConfig settings, ILogger log)
        {
            // This works as long as the account accessing (managed identity or visual studio user) has both of the following IAM permissions on the storage account:
            // - Reader
            // - Storage Blob Data Reader
            var credential = new DefaultAzureCredential();

            log.LogInformation($"Using credential Type: {credential.GetType().Name}");

            var client = new DataLakeFileSystemClient(new Uri(settings.BaseUrl), credential);

            if (!client.Exists())
            {
                return(null);
            }

            return(client);
        }
        public void ReadTo()
        {
            // Create a temporary Lorem Ipsum file on disk that we can upload
            string originalPath = CreateTempFile(SampleFileContent);

            // Get a temporary path on disk where we can download the file
            string downloadPath = CreateTempPath();

            // Make StorageSharedKeyCredential to pass to the serviceClient
            string storageAccountName = StorageAccountName;
            string storageAccountKey  = StorageAccountKey;
            Uri    serviceUri         = StorageAccountBlobUri;
            StorageSharedKeyCredential sharedKeyCredential = new StorageSharedKeyCredential(storageAccountName, storageAccountKey);

            // Create DataLakeServiceClient using StorageSharedKeyCredentials
            DataLakeServiceClient serviceClient = new DataLakeServiceClient(serviceUri, sharedKeyCredential);

            // Get a reference to a filesystem named "sample-filesystem-read" and then create it
            DataLakeFileSystemClient filesystem = serviceClient.GetFileSystemClient(Randomize("sample-filesystem-read"));

            filesystem.Create();
            try
            {
                // Get a reference to a file named "sample-file" in a filesystem
                DataLakeFileClient file = filesystem.GetFileClient(Randomize("sample-file"));

                // First upload something the DataLake file so we have something to download
                file.Upload(File.OpenRead(originalPath));

                // Download the DataLake file's directly to a file.
                // For larger files, ReadTo() will download the file in multiple sequential requests.
                #region Snippet:SampleSnippetDataLakeFileClient_ReadTo
                file.ReadTo(downloadPath);
                #endregion Snippet:SampleSnippetDataLakeFileClient_ReadTo

                // Verify the contents
                Assert.AreEqual(SampleFileContent, File.ReadAllText(downloadPath));
            }
            finally
            {
                // Clean up after the test when we're finished
                filesystem.Delete();
            }
        }
Пример #22
0
        public async Task Ctor_ConnectionString_RoundTrip()
        {
            // Arrage
            string connectionString = $"DefaultEndpointsProtocol=https;AccountName={TestConfigHierarchicalNamespace.AccountName};AccountKey={TestConfigHierarchicalNamespace.AccountKey};EndpointSuffix=core.windows.net";
            DataLakeServiceClient    serviceClient = InstrumentClient(new DataLakeServiceClient(connectionString, GetOptions()));
            DataLakeFileSystemClient fileSystem    = InstrumentClient(serviceClient.GetFileSystemClient(GetNewFileSystemName()));

            // Act
            try
            {
                await fileSystem.CreateAsync();
            }

            // Cleanup
            finally
            {
                await fileSystem.DeleteAsync();
            }
        }
        public void SetGetAcls()
        {
            // Make StorageSharedKeyCredential to pass to the serviceClient
            string storageAccountName = NamespaceStorageAccountName;
            string storageAccountKey  = NamespaceStorageAccountKey;
            Uri    serviceUri         = NamespaceBlobUri;
            StorageSharedKeyCredential sharedKeyCredential = new StorageSharedKeyCredential(storageAccountName, storageAccountKey);

            // Create DataLakeServiceClient using StorageSharedKeyCredentials
            DataLakeServiceClient serviceClient = new DataLakeServiceClient(serviceUri, sharedKeyCredential);

            // Get a reference to a filesystem named "sample-filesystem-acl" and then create it
            DataLakeFileSystemClient filesystem = serviceClient.GetFileSystemClient(Randomize("sample-filesystem-acl"));

            filesystem.Create();
            try
            {
                #region Snippet:SampleSnippetDataLakeFileClient_SetAcls
                // Create a DataLake file so we can set the Access Controls on the files
                DataLakeFileClient fileClient = filesystem.GetFileClient(Randomize("sample-file"));
                fileClient.Create();

                // Set Access Control List
                IList <PathAccessControlItem> accessControlList
                    = PathAccessControlExtensions.ParseAccessControlList("user::rwx,group::r--,mask::rwx,other::---");
                fileClient.SetAccessControlList(accessControlList);
                #endregion Snippet:SampleSnippetDataLakeFileClient_SetAcls
                #region Snippet:SampleSnippetDataLakeFileClient_GetAcls
                // Get Access Control List
                PathAccessControl accessControlResponse = fileClient.GetAccessControl();
                #endregion Snippet:SampleSnippetDataLakeFileClient_GetAcls

                // Check Access Control permissions
                Assert.AreEqual(
                    PathAccessControlExtensions.ToAccessControlListString(accessControlList),
                    PathAccessControlExtensions.ToAccessControlListString(accessControlResponse.AccessControlList.ToList()));
            }
            finally
            {
                // Clean up after the test when we're finished
                filesystem.Delete();
            }
        }
Пример #24
0
        // </Snippet_GetFileSystem>

        #endregion

        #region Get and set directory ACLs

        // ---------------------------------------------------------
        // Get and set directory-level ACLs
        //----------------------------------------------------------

        // <Snippet_ACLDirectory>
        public async Task ManageDirectoryACLs(DataLakeFileSystemClient fileSystemClient)
        {
            DataLakeDirectoryClient directoryClient =
                fileSystemClient.GetDirectoryClient("");

            PathAccessControl directoryAccessControl =
                await directoryClient.GetAccessControlAsync();

            foreach (var item in directoryAccessControl.AccessControlList)
            {
                Console.WriteLine(item.ToString());
            }

            IList <PathAccessControlItem> accessControlList
                = PathAccessControlExtensions.ParseAccessControlList
                      ("user::rwx,group::r-x,other::rw-");

            directoryClient.SetAccessControlList(accessControlList);
        }
        public async Task DataLakeSasBuilderRawPermissions_2020_02_10(string permissionsString)
        {
            // Arrange
            DataLakeServiceClient oauthService = GetServiceClient_OAuth();
            string fileSystemName = GetNewFileSystemName();
            string directoryName  = GetNewDirectoryName();

            await using DisposingFileSystem test = await GetNewFileSystem(service : oauthService, fileSystemName : fileSystemName);

            // Arrange
            DataLakeDirectoryClient directory = await test.FileSystem.CreateDirectoryAsync(directoryName);

            DataLakeFileClient file = await directory.CreateFileAsync(GetNewFileName());

            Response <UserDelegationKey> userDelegationKey = await oauthService.GetUserDelegationKeyAsync(
                startsOn : null,
                expiresOn : Recording.UtcNow.AddHours(1));

            DataLakeSasBuilder dataLakeSasBuilder = new DataLakeSasBuilder
            {
                StartsOn       = Recording.UtcNow.AddHours(-1),
                ExpiresOn      = Recording.UtcNow.AddHours(1),
                FileSystemName = test.FileSystem.Name
            };

            dataLakeSasBuilder.SetPermissions(
                rawPermissions: permissionsString,
                normalize: true);

            DataLakeUriBuilder dataLakeUriBuilder = new DataLakeUriBuilder(test.FileSystem.Uri)
            {
                Sas = dataLakeSasBuilder.ToSasQueryParameters(userDelegationKey, test.FileSystem.AccountName)
            };

            DataLakeFileSystemClient sasFileSystemClient = InstrumentClient(new DataLakeFileSystemClient(dataLakeUriBuilder.ToUri(), GetOptions()));

            // Act
            await foreach (PathItem pathItem in sasFileSystemClient.GetPathsAsync())
            {
                // Just make sure the call succeeds.
            }
        }
        /// <summary>
        /// Initializes a new instance of the <see cref="AzureBlobContainerClient"/> class for local tests
        /// since MI auth is not supported for local emulator.
        /// </summary>
        /// <param name="connectionString">Storage connection string.</param>
        /// <param name="containerName">Container name.</param>
        /// <param name="logger">logger.</param>
        public AzureBlobContainerClient(
            string connectionString,
            string containerName,
            ILogger <AzureBlobContainerClient> logger)
        {
            EnsureArg.IsNotNull(connectionString, nameof(connectionString));
            EnsureArg.IsNotNull(containerName, nameof(containerName));
            EnsureArg.IsNotNull(logger, nameof(logger));

            _logger = logger;

            _blobContainerClient = new BlobContainerClient(
                connectionString,
                containerName);
            _dataLakeFileSystemClient = new DataLakeFileSystemClient(
                connectionString,
                containerName);

            InitializeBlobContainerClient();
        }
Пример #27
0
        private async Task GetFiles(DataLakeFileSystemClient client, string folderPath, List <string> paths)
        {
            var e = client.GetPathsAsync(folderPath).GetAsyncEnumerator();

            while (true)
            {
                if (!await e.MoveNextAsync())
                {
                    break;
                }
                else if (!e.Current.IsDirectory ?? false)
                {
                    paths.Add(e.Current.Name);
                }
                else
                {
                    await GetFiles(client, e.Current.Name, paths);
                }
            }
        }
Пример #28
0
        public async Task <DisposingFileSystem> GetNewFileSystem(
            DataLakeServiceClient service         = default,
            string fileSystemName                 = default,
            IDictionary <string, string> metadata = default,
            PublicAccessType publicAccessType     = PublicAccessType.None,
            bool premium = default)
        {
            fileSystemName ??= GetNewFileSystemName();
            service ??= GetServiceClient_SharedKey();

            if (publicAccessType == PublicAccessType.None)
            {
                publicAccessType = premium ? PublicAccessType.None : PublicAccessType.FileSystem;
            }

            DataLakeFileSystemClient fileSystem = InstrumentClient(service.GetFileSystemClient(fileSystemName));
            await fileSystem.CreateAsync(metadata : metadata, publicAccessType : publicAccessType);

            return(new DisposingFileSystem(fileSystem));
        }
        public override void ExecuteCmdlet()
        {
            IStorageBlobManagement localChannel = Channel;

            DataLakeFileSystemClient fileSystem = GetFileSystemClientByName(localChannel, this.FileSystem);

            DataLakeFileClient      fileClient;
            DataLakeDirectoryClient dirClient;

            if (GetExistDataLakeGen2Item(fileSystem, this.Path, out fileClient, out dirClient))
            {
                // Directory
                WriteDataLakeGen2Item(localChannel, dirClient);
            }
            else
            {
                //File
                WriteDataLakeGen2Item(Channel, fileClient);
            }
        }
        public void GetProperties()
        {
            // Make StorageSharedKeyCredential to pass to the serviceClient
            string storageAccountName = StorageAccountName;
            string storageAccountKey  = StorageAccountKey;
            Uri    serviceUri         = StorageAccountBlobUri;
            StorageSharedKeyCredential sharedKeyCredential = new StorageSharedKeyCredential(storageAccountName, storageAccountKey);

            // Create DataLakeServiceClient using StorageSharedKeyCredentials
            DataLakeServiceClient serviceClient = new DataLakeServiceClient(serviceUri, sharedKeyCredential);

            // Get a reference to a filesystem named "sample-filesystem-rename" and then create it
            DataLakeFileSystemClient filesystem = serviceClient.GetFileSystemClient(Randomize("sample-filesystem"));

            filesystem.Create();
            try
            {
                // Create a DataLake Directory to rename it later
                DataLakeDirectoryClient directoryClient = filesystem.GetDirectoryClient(Randomize("sample-directory"));
                directoryClient.Create();

                #region Snippet:SampleSnippetDataLakeDirectoryClient_GetProperties
                // Get Properties on a Directory
                PathProperties directoryPathProperties = directoryClient.GetProperties();
                #endregion Snippet:SampleSnippetDataLakeDirectoryClient_GetProperties

                // Create a DataLake file
                DataLakeFileClient fileClient = filesystem.GetFileClient(Randomize("sample-file"));
                fileClient.Create();

                #region Snippet:SampleSnippetDataLakeFileClient_GetProperties
                // Get Properties on a File
                PathProperties filePathProperties = fileClient.GetProperties();
                #endregion Snippet:SampleSnippetDataLakeFileClient_GetProperties
            }
            finally
            {
                // Clean up after the test when we're finished
                filesystem.Delete();
            }
        }