public Response <PathInfo> Upload(
            Stream content,
            PathHttpHeaders httpHeaders,
            DataLakeRequestConditions conditions,
            IProgress <long> progressHandler,
            CancellationToken cancellationToken)
        {
            _client.Create(
                httpHeaders: httpHeaders,
                conditions: conditions,
                cancellationToken: cancellationToken);

            // After the File is Create, Lease ID is the only valid request parameter.
            conditions = new DataLakeRequestConditions {
                LeaseId = conditions?.LeaseId
            };

            // If we can compute the size and it's small enough
            if (PartitionedUploadExtensions.TryGetLength(content, out long contentLength) &&
                contentLength < _singleUploadThreshold)
            {
                // Upload it in a single request
                _client.Append(
                    content,
                    offset: 0,
                    leaseId: conditions?.LeaseId,
                    progressHandler: progressHandler,
                    cancellationToken: cancellationToken);

                // Calculate flush position
                long flushPosition = contentLength;

                return(_client.Flush(
                           position: flushPosition,
                           httpHeaders: httpHeaders,
                           conditions: conditions,
                           cancellationToken: cancellationToken));
            }

            // If the caller provided an explicit block size, we'll use it.
            // Otherwise we'll adjust dynamically based on the size of the
            // content.
            int blockSize =
                _blockSize != null ? _blockSize.Value :
                contentLength < Constants.LargeUploadThreshold ?
                Constants.DefaultBufferSize :
                Constants.LargeBufferSize;

            // Otherwise stage individual blocks one at a time.  It's not as
            // fast as a parallel upload, but you get the benefit of the retry
            // policy working on a single block instead of the entire stream.
            return(UploadInSequence(
                       content,
                       blockSize,
                       httpHeaders,
                       conditions,
                       progressHandler,
                       cancellationToken));
        }
        public void Append()
        {
            // Create three temporary Lorem Ipsum files on disk that we can upload
            int    contentLength          = 10;
            string sampleFileContentPart1 = CreateTempFile(SampleFileContent.Substring(0, contentLength));
            string sampleFileContentPart2 = CreateTempFile(SampleFileContent.Substring(contentLength, contentLength));
            string sampleFileContentPart3 = CreateTempFile(SampleFileContent.Substring(contentLength * 2, contentLength));

            // Make StorageSharedKeyCredential to pass to the serviceClient
            string storageAccountName = StorageAccountName;
            string storageAccountKey  = StorageAccountKey;
            Uri    serviceUri         = StorageAccountBlobUri;

            StorageSharedKeyCredential sharedKeyCredential = new StorageSharedKeyCredential(storageAccountName, storageAccountKey);

            // Create DataLakeServiceClient using StorageSharedKeyCredentials
            DataLakeServiceClient serviceClient = new DataLakeServiceClient(serviceUri, sharedKeyCredential);

            // Get a reference to a filesystem named "sample-filesystem-append" and then create it
            DataLakeFileSystemClient filesystem = serviceClient.GetFileSystemClient(Randomize("sample-filesystem-append"));

            filesystem.Create();
            try
            {
                // Get a reference to a file named "sample-file" in a filesystem
                DataLakeFileClient file = filesystem.GetFileClient(Randomize("sample-file"));

                // Create the file
                file.Create();

                // Verify we created one file
                Assert.AreEqual(1, filesystem.GetPaths().Count());

                // Append data to an existing DataLake File.  Append is currently limited to 4000 MB per call.
                // To upload a large file all at once, consider using Upload() instead.
                file.Append(File.OpenRead(sampleFileContentPart1), 0);
                file.Append(File.OpenRead(sampleFileContentPart2), contentLength);
                file.Append(File.OpenRead(sampleFileContentPart3), contentLength * 2);
                file.Flush(contentLength * 3);

                // Verify the contents of the file
                PathProperties properties = file.GetProperties();
                Assert.AreEqual(contentLength * 3, properties.ContentLength);
            }
            finally
            {
                // Clean up after the test when we're finished
                filesystem.Delete();
            }
        }
示例#3
0
        private static void PerformWriteFlushReadSeek(DataLakeFileSystemClient client)
        {
            string fileName = "/Test/dir1/testFilename1.txt";

            DataLakeFileClient file = client.GetFileClient(fileName);

            // Create the file
            Stream stream = BinaryData.FromString("This is the first line.\nThis is the second line.\n").ToStream();
            long   length = stream.Length;

            file.Upload(stream, true);

            // Append to the file
            stream = BinaryData.FromString("This is the third line.\nThis is the fourth line.\n").ToStream();
            file.Append(stream, length);
            file.Flush(length + stream.Length);

            // Read the file
            using (var readStream = file.OpenRead())
            {
                byte[] readData = new byte[1024];

                // Read 40 bytes at this offset
                int readBytes = readStream.Read(readData, 25, 40);
                Console.WriteLine("Read output of 40 bytes from offset 25: " + Encoding.UTF8.GetString(readData, 0, readBytes));
            }
        }
        private static string serviceUri    = "FILL-IN-HERE";     // full account FQDN, not just the account name - it should look like https://{ACCOUNTNAME}.dfs.core.windows.net/

        public static void Main(string[] args)
        {
            // Create Client Secret Credential
            var creds = new ClientSecretCredential(tenantId, applicationId, clientSecret);

            // Create data lake file service client object
            DataLakeServiceClient serviceClient = new DataLakeServiceClient(new Uri(serviceUri), creds);
            var name = "sample-filesystem" + Guid.NewGuid().ToString("n").Substring(0, 8);
            // Create data lake file system client object
            DataLakeFileSystemClient filesystemclient = serviceClient.GetFileSystemClient(name);

            filesystemclient.CreateIfNotExists();

            try
            {
                long               length;
                string             fileName = "/Test/testFilename1.txt";
                DataLakeFileClient file     = filesystemclient.GetFileClient(fileName);

                // Upload a file - automatically creates any parent directories that don't exist
                length = BinaryData.FromString("This is test data to write.\r\nThis is the second line.\r\n").ToStream().Length;

                file.Upload(BinaryData.FromString("This is test data to write.\r\nThis is the second line.\r\n").ToStream(), true);

                file.Append(BinaryData.FromString("This is the added line.\r\n").ToStream(), length);
                file.Flush(length + BinaryData.FromString("This is the added line.\r\n").ToStream().Length);
                //Read file contents
                Response <FileDownloadInfo> fileContents = file.Read();

                Console.WriteLine(BinaryData.FromStream(fileContents.Value.Content).ToString());

                // Get the properties of the file
                PathProperties pathProperties = file.GetProperties();
                PrintDirectoryEntry(pathProperties);

                // Rename a file
                string destFilePath = "/Test/testRenameDest3.txt";
                file.Rename(destFilePath);
                file = filesystemclient.GetFileClient(destFilePath);
                Console.WriteLine("The file URI is " + file.Uri);

                // Enumerate directory
                foreach (var pathItem in filesystemclient.GetPaths("/Test"))
                {
                    PrintDirectoryEntry(pathItem);
                }

                // Delete a directory and all it's subdirectories and files
                filesystemclient.DeleteDirectory("/Test");
            }
            finally
            {
                filesystemclient.Delete();
            }

            Console.WriteLine("Done. Press ENTER to continue ...");
            Console.ReadLine();
        }
        public void Read()
        {
            // Create a temporary Lorem Ipsum file on disk that we can upload
            string originalPath = CreateTempFile(SampleFileContent);

            // Get a temporary path on disk where we can download the file
            string downloadPath = CreateTempPath();

            // Make StorageSharedKeyCredential to pass to the serviceClient
            string storageAccountName = StorageAccountName;
            string storageAccountKey  = StorageAccountKey;
            Uri    serviceUri         = StorageAccountBlobUri;
            StorageSharedKeyCredential sharedKeyCredential = new StorageSharedKeyCredential(storageAccountName, storageAccountKey);

            // Create DataLakeServiceClient using StorageSharedKeyCredentials
            DataLakeServiceClient serviceClient = new DataLakeServiceClient(serviceUri, sharedKeyCredential);

            // Get a reference to a filesystem named "sample-filesystem-read" and then create it
            DataLakeFileSystemClient filesystem = serviceClient.GetFileSystemClient(Randomize("sample-filesystem-read"));

            filesystem.Create();
            try
            {
                // Get a reference to a file named "sample-file" in a filesystem
                DataLakeFileClient file = filesystem.GetFileClient(Randomize("sample-file"));

                // First upload something the DataLake file so we have something to download
                file.Create();
                file.Append(File.OpenRead(originalPath), 0);
                file.Flush(SampleFileContent.Length);

                // Download the DataLake file's contents and save it to a file
                #region Snippet:SampleSnippetDataLakeFileClient_Read
                Response <FileDownloadInfo> fileContents = file.Read();
                #endregion Snippet:SampleSnippetDataLakeFileClient_Read
                using (FileStream stream = File.OpenWrite(downloadPath))
                {
                    fileContents.Value.Content.CopyTo(stream);
                }

                // Verify the contents
                Assert.AreEqual(SampleFileContent, File.ReadAllText(downloadPath));
            }
            finally
            {
                // Clean up after the test when we're finished
                filesystem.Delete();
            }
        }
        public void Append_Simple()
        {
            // Create Sample File to read content from
            string sampleFilePath = CreateTempFile(SampleFileContent);

            // Make StorageSharedKeyCredential to pass to the serviceClient
            string storageAccountName = StorageAccountName;
            string storageAccountKey  = StorageAccountKey;
            Uri    serviceUri         = StorageAccountBlobUri;

            StorageSharedKeyCredential sharedKeyCredential = new StorageSharedKeyCredential(storageAccountName, storageAccountKey);

            // Create DataLakeServiceClient using StorageSharedKeyCredentials
            DataLakeServiceClient serviceClient = new DataLakeServiceClient(serviceUri, sharedKeyCredential);

            // Get a reference to a filesystem named "sample-filesystem-append" and then create it
            DataLakeFileSystemClient filesystem = serviceClient.GetFileSystemClient(Randomize("sample-filesystem-append"));

            filesystem.Create();
            try
            {
                #region Snippet:SampleSnippetDataLakeFileClient_Append
                // Create a file
                DataLakeFileClient file = filesystem.GetFileClient(Randomize("sample-file"));
                file.Create();

                // Append data to the DataLake File
                file.Append(File.OpenRead(sampleFilePath), 0);
                file.Flush(SampleFileContent.Length);
                #endregion Snippet:SampleSnippetDataLakeFileClient_Append

                // Verify the contents of the file
                PathProperties properties = file.GetProperties();
                Assert.AreEqual(SampleFileContent.Length, properties.ContentLength);
            }
            finally
            {
                // Clean up after the test when we're finished
                filesystem.Delete();
            }
        }
        public void Traverse()
        {
            // Create a temporary Lorem Ipsum file on disk that we can upload
            string originalPath = CreateTempFile(SampleFileContent);

            // Make StorageSharedKeyCredential to pass to the serviceClient
            string storageAccountName = StorageAccountName;
            string storageAccountKey  = StorageAccountKey;
            Uri    serviceUri         = StorageAccountBlobUri;
            StorageSharedKeyCredential sharedKeyCredential = new StorageSharedKeyCredential(storageAccountName, storageAccountKey);

            // Create DataLakeServiceClient using StorageSharedKeyCredentials
            DataLakeServiceClient serviceClient = new DataLakeServiceClient(serviceUri, sharedKeyCredential);

            // Get a reference to a filesystem named "sample-filesystem-traverse" and then create it
            DataLakeFileSystemClient filesystem = serviceClient.GetFileSystemClient(Randomize("sample-filesystem-traverse"));

            filesystem.Create();
            try
            {
                // Create a bunch of directories and files within the directories
                DataLakeDirectoryClient first = filesystem.CreateDirectory("first");
                first.CreateSubDirectory("a");
                first.CreateSubDirectory("b");
                DataLakeDirectoryClient second = filesystem.CreateDirectory("second");
                second.CreateSubDirectory("c");
                second.CreateSubDirectory("d");
                filesystem.CreateDirectory("third");
                DataLakeDirectoryClient fourth  = filesystem.CreateDirectory("fourth");
                DataLakeDirectoryClient deepest = fourth.CreateSubDirectory("e");

                // Upload a DataLake file named "file"
                DataLakeFileClient file = deepest.GetFileClient("file");
                file.Create();
                using (FileStream stream = File.OpenRead(originalPath))
                {
                    file.Append(stream, 0);
                }

                // Keep track of all the names we encounter
                List <string> names = new List <string>();
                foreach (PathItem pathItem in filesystem.ListPaths(recursive: true))
                {
                    names.Add(pathItem.Name);
                }

                // Verify we've seen everything
                Assert.AreEqual(10, names.Count);
                Assert.Contains("first", names);
                Assert.Contains("second", names);
                Assert.Contains("third", names);
                Assert.Contains("fourth", names);
                Assert.Contains("first/a", names);
                Assert.Contains("first/b", names);
                Assert.Contains("second/c", names);
                Assert.Contains("second/d", names);
                Assert.Contains("fourth/e", names);
                Assert.Contains("fourth/e/file", names);
            }
            finally
            {
                // Clean up after the test when we're finished
                filesystem.Delete();
            }
        }