public async Task AppendAsync()
        {
            // Create three temporary Lorem Ipsum files on disk that we can upload
            int    contentLength          = 10;
            string sampleFileContentPart1 = CreateTempFile(SampleFileContent.Substring(0, contentLength));
            string sampleFileContentPart2 = CreateTempFile(SampleFileContent.Substring(contentLength, contentLength));
            string sampleFileContentPart3 = CreateTempFile(SampleFileContent.Substring(contentLength * 2, contentLength));

            // Make StorageSharedKeyCredential to pass to the serviceClient
            string storageAccountName = StorageAccountName;
            string storageAccountKey  = StorageAccountKey;
            Uri    serviceUri         = StorageAccountBlobUri;

            StorageSharedKeyCredential sharedKeyCredential = new StorageSharedKeyCredential(storageAccountName, storageAccountKey);

            // Get a reference to a FileSystemClient
            DataLakeServiceClient serviceClient = new DataLakeServiceClient(serviceUri, sharedKeyCredential);

            // Get a reference to a filesystem named "sample-filesystem-appendasync" and then create it
            DataLakeFileSystemClient filesystem = serviceClient.GetFileSystemClient(Randomize("sample-filesystem-append"));
            await filesystem.CreateAsync();

            try
            {
                // Get a reference to a file named "sample-file" in a filesystem
                DataLakeFileClient file = filesystem.GetFileClient(Randomize("sample-file"));

                // Create the file
                await file.CreateAsync();

                // Verify we created one file
                AsyncPageable <PathItem> response = filesystem.GetPathsAsync();
                IList <PathItem>         paths    = await response.ToListAsync();

                Assert.AreEqual(1, paths.Count);

                // Append data to an existing DataLake File.  Append is currently limited to 4000 MB per call.
                // To upload a large file all at once, consider using UploadAsync() instead.
                await file.AppendAsync(File.OpenRead(sampleFileContentPart1), 0);

                await file.AppendAsync(File.OpenRead(sampleFileContentPart2), contentLength);

                await file.AppendAsync(File.OpenRead(sampleFileContentPart3), contentLength * 2);

                await file.FlushAsync(contentLength * 3);

                // Verify the contents of the file
                PathProperties properties = await file.GetPropertiesAsync();

                Assert.AreEqual(contentLength * 3, properties.ContentLength);
            }
            finally
            {
                // Clean up after the test when we're finished
                await filesystem.DeleteAsync();
            }
        }
        public async Task <Response <PathInfo> > UploadAsync(
            Stream content,
            PathHttpHeaders httpHeaders,
            DataLakeRequestConditions conditions,
            IProgress <long> progressHandler,
            CancellationToken cancellationToken)
        {
            await _client.CreateAsync(
                httpHeaders : httpHeaders,
                conditions : conditions,
                cancellationToken : cancellationToken).ConfigureAwait(false);

            // After the File is Create, Lease ID is the only valid request parameter.
            conditions = new DataLakeRequestConditions {
                LeaseId = conditions?.LeaseId
            };

            // If we can compute the size and it's small enough
            if (PartitionedUploadExtensions.TryGetLength(content, out long contentLength) &&
                contentLength < _singleUploadThreshold)
            {
                // Append data
                await _client.AppendAsync(
                    content,
                    offset : 0,
                    leaseId : conditions?.LeaseId,
                    progressHandler : progressHandler,
                    cancellationToken : cancellationToken).ConfigureAwait(false);

                // Flush data
                return(await _client.FlushAsync(
                           position : contentLength,
                           httpHeaders : httpHeaders,
                           conditions : conditions)
                       .ConfigureAwait(false));
            }

            // If the caller provided an explicit block size, we'll use it.
            // Otherwise we'll adjust dynamically based on the size of the
            // content.
            int blockSize =
                _blockSize != null ? _blockSize.Value :
                contentLength < Constants.LargeUploadThreshold ?
                Constants.DefaultBufferSize :
                Constants.LargeBufferSize;

            // Otherwise stage individual blocks in parallel
            return(await UploadInParallelAsync(
                       content,
                       blockSize,
                       httpHeaders,
                       conditions,
                       progressHandler,
                       cancellationToken).ConfigureAwait(false));
        }
        /// <summary>
        /// Upload File with Datalake API
        /// </summary>
        internal virtual async Task UploadDataLakeFile(long taskId, DataLakeFileClient fileClient, string filePath)
        {
            if (this.Force.IsPresent || !fileClient.Exists() || ShouldContinue(string.Format(Resources.OverwriteConfirmation, GetDataLakeItemUriWithoutSas(fileClient)), null))
            {
                // Set Item Properties and MetaData
                PathHttpHeaders pathHttpHeaders       = SetDatalakegen2ItemProperties(fileClient, BlobProperties, setToServer: false);
                IDictionary <string, string> metadata = SetDatalakegen2ItemMetaData(fileClient, BlobMetadata, setToServer: false);

                fileClient.Create(pathHttpHeaders,
                                  metadata,
                                  this.Permission,
                                  this.Umask != null ? DataLakeModels.PathPermissions.ParseSymbolicPermissions(this.Umask).ToOctalPermissions() : null);

                long             fileSize        = new FileInfo(ResolvedFileName).Length;
                string           activity        = String.Format(Resources.SendAzureBlobActivity, this.Source, this.Path, this.FileSystem);
                string           status          = Resources.PrepareUploadingBlob;
                ProgressRecord   pr              = new ProgressRecord(OutputStream.GetProgressId(taskId), activity, status);
                IProgress <long> progressHandler = new Progress <long>((finishedBytes) =>
                {
                    if (pr != null)
                    {
                        // Size of the source file might be 0, when it is, directly treat the progress as 100 percent.
                        pr.PercentComplete   = 0 == fileSize ? 100 : (int)(finishedBytes * 100 / fileSize);
                        pr.StatusDescription = string.Format(CultureInfo.CurrentCulture, Resources.FileTransmitStatus, pr.PercentComplete);
                        this.OutputStream.WriteProgress(pr);
                    }
                });

                using (FileStream stream = File.OpenRead(ResolvedFileName))
                {
                    await fileClient.AppendAsync(stream, 0, progressHandler : progressHandler, cancellationToken : CmdletCancellationToken).ConfigureAwait(false);
                }
                WriteDataLakeGen2Item(Channel, fileClient, taskId: taskId);
            }
        }
示例#4
0
        public static async Task <IActionResult> Run(
            [HttpTrigger(AuthorizationLevel.Function, "post", Route = null)] HttpRequest req,
            ILogger log)
        {
            //connect to account
            string accountName    = Common.GetEnvironmentVariable("DataLakeAccountName");
            string accountKey     = Common.GetEnvironmentVariable("DataLakeAccountKey");
            string fileSystemName = Common.GetEnvironmentVariable("DataLakeFileSystemName");

            StorageSharedKeyCredential sharedKeyCredential = new StorageSharedKeyCredential(accountName, accountKey);

            string dfsUri = "https://" + accountName + ".dfs.core.windows.net";

            DataLakeServiceClient dataLakeServiceClient = new DataLakeServiceClient(new Uri(dfsUri), sharedKeyCredential);

            //upload file
            DataLakeFileSystemClient fileSystemClient = dataLakeServiceClient.GetFileSystemClient(fileSystemName);

            string             fileName   = Guid.NewGuid().ToString() + ".json";
            DataLakeFileClient fileClient = await fileSystemClient.CreateFileAsync(fileName);

            long fileSize = req.Body.Length;

            await fileClient.AppendAsync(req.Body, offset : 0);

            await fileClient.FlushAsync(position : fileSize);

            return((ActionResult) new OkResult());
        }
示例#5
0
        public async Task Append(string path, byte[] data, CancellationToken token = default)
        {
            path = WithBasePath(path);
            _logger.LogTrace($"Appending to {path}, data.Length={data.Length}");

            data
            .VerifyNotNull(nameof(data))
            .VerifyAssert(x => x.Length > 0, $"{nameof(data)} length must be greater then 0");

            using var memoryBuffer = new MemoryStream(data.ToArray());

            try
            {
                DatalakePathProperties properties = await GetPathProperties(path, token);

                DataLakeFileClient file = _fileSystem.GetFileClient(path);

                await file.AppendAsync(memoryBuffer, properties.ContentLength, cancellationToken : token);

                await file.FlushAsync(properties.ContentLength + data.Length);
            }
            catch (RequestFailedException ex) when(ex.ErrorCode == "PathNotFound" || ex.ErrorCode == "BlobNotFound")
            {
                await Write(path, data, true, token : token);
            }
            catch (TaskCanceledException) { }
        }
示例#6
0
        /// <summary>
        /// Connect and upload the data as file to the Azure Data Lake.
        /// </summary>
        /// <param name="storageAccountName">Azure storage account name</param>
        /// <param name="storageAccountKey">Azure storage account key</param>
        /// <param name="dataLakeUri">Azure Data Lake URI</param>
        /// <param name="directoryName">Azure Data Lake directory name</param>
        /// <param name="content">Upload data content</param>
        public async Task <bool> UploadData(string storageAccountName, string storageAccountKey, string dataLakeUri, string directoryName, string content)
        {
            try
            {
                Uri serviceUri = new Uri(dataLakeUri);

                StorageSharedKeyCredential sharedKeyCredential = new StorageSharedKeyCredential(storageAccountName, storageAccountKey);
                // Create DataLakeServiceClient using StorageSharedKeyCredentials
                DataLakeServiceClient    serviceClient   = new DataLakeServiceClient(serviceUri, sharedKeyCredential);
                DataLakeFileSystemClient filesystem      = serviceClient.GetFileSystemClient(directoryName);
                DataLakeDirectoryClient  directoryClient =
                    filesystem.GetDirectoryClient(directoryName);
                DataLakeFileClient fileClient = await directoryClient.CreateFileAsync(string.Format("data-{0}.json", Guid.NewGuid().ToString()));

                using (MemoryStream memoryStream = new MemoryStream(System.Text.Encoding.ASCII.GetBytes(content)))
                {
                    await fileClient.AppendAsync(memoryStream, offset : 0);

                    await fileClient.FlushAsync(position : memoryStream.Length);
                }
                return(true);
            }
            catch (Exception exception)
            {
                logger.Error(exception.StackTrace);
                return(false);
            }
        }
        public async Task ReadAsync()
        {
            // Create a temporary Lorem Ipsum file on disk that we can upload
            string originalPath = CreateTempFile(SampleFileContent);

            // Get a temporary path on disk where we can download the file
            string downloadPath = CreateTempPath();

            // Make StorageSharedKeyCredential to pass to the serviceClient
            string storageAccountName = StorageAccountName;
            string storageAccountKey  = StorageAccountKey;
            Uri    serviceUri         = StorageAccountBlobUri;
            StorageSharedKeyCredential sharedKeyCredential = new StorageSharedKeyCredential(storageAccountName, storageAccountKey);

            // Create DataLakeServiceClient using StorageSharedKeyCredentials
            DataLakeServiceClient serviceClient = new DataLakeServiceClient(serviceUri, sharedKeyCredential);

            // Get a reference to a filesystem named "sample-filesystem-readasync" and then create it
            DataLakeFileSystemClient filesystem = serviceClient.GetFileSystemClient(Randomize("sample-filesystem-read"));
            await filesystem.CreateAsync();

            try
            {
                // Get a reference to a file named "sample-file" in a filesystem
                DataLakeFileClient file = filesystem.GetFileClient(Randomize("sample-file"));

                // First upload something the DataLake file so we have something to download
                await file.CreateAsync();

                await file.AppendAsync(File.OpenRead(originalPath), 0);

                await file.FlushAsync(SampleFileContent.Length);

                // Download the DataLake file's contents and save it to a file
                Response <FileDownloadInfo> fileContents = await file.ReadAsync();

                using (FileStream stream = File.OpenWrite(downloadPath))
                {
                    fileContents.Value.Content.CopyTo(stream);
                }

                // Verify the contents
                Assert.AreEqual(SampleFileContent, File.ReadAllText(downloadPath));
            }
            finally
            {
                // Clean up after the test when we're finished
                await filesystem.DeleteAsync();
            }
        }
        // </Snippet_ListFilesInDirectory>

        #endregion

        #region Upload files to a directory

        // ---------------------------------------------------------
        // Upload files to the directory.
        //----------------------------------------------------------

        // <Snippet_UploadFile>
        public async Task UploadFile(DataLakeFileSystemClient fileSystemClient)
        {
            DataLakeDirectoryClient directoryClient =
                fileSystemClient.GetDirectoryClient("my-directory");

            DataLakeFileClient fileClient = await directoryClient.CreateFileAsync("uploaded-file.txt");

            FileStream fileStream =
                File.OpenRead("C:\\Users\\contoso\\Temp\\file-to-upload.txt");

            long fileSize = fileStream.Length;

            await fileClient.AppendAsync(fileStream, offset : 0);

            await fileClient.FlushAsync(position : fileSize);
        }
        public async Task UploadJsonData(DataLakeFileSystemClient fileSystemClient, string directory, string jsonData, string dataId)
        {
            DataLakeDirectoryClient directoryClient =
                fileSystemClient.GetDirectoryClient(directory);

            DataLakeFileClient fileClient = await directoryClient.CreateFileAsync(dataId + ".json");

            byte[]       byteArray = Encoding.ASCII.GetBytes(jsonData);
            MemoryStream stream    = new MemoryStream(byteArray);

            long fileSize = stream.Length;

            await fileClient.AppendAsync(stream, offset : 0);

            await fileClient.FlushAsync(position : fileSize);
        }
示例#10
0
        public async Task <string> UploadFile(IBrowserFile WebFile, Models.File CirrusFile)
        {
            var DirectoryClient           = FileSystemClient.GetDirectoryClient(CirrusFile.UserId);
            DataLakeFileClient FileClient = null;

            foreach (Category Category in CirrusFile.Categories)
            {
                FileClient = DirectoryClient.CreateSubDirectoryAsync(Category.CategoryName).Result.Value.CreateFileAsync(CirrusFile.FileName).Result.Value;
                using (var MS = new MemoryStream())
                {
                    await WebFile.OpenReadStream(10485760).CopyToAsync(MS);

                    MS.Position = 0;
                    await FileClient.AppendAsync(MS, 0);

                    await FileClient.FlushAsync(position : MS.Length);
                }
            }
            return(FileClient == null ? null : FileClient.Path + "/" + CirrusFile.FileName);
        }
        public async Task AppendAsync_Simple()
        {
            // Create Sample File to read content from
            string sampleFilePath = CreateTempFile(SampleFileContent);

            // Make StorageSharedKeyCredential to pass to the serviceClient
            string storageAccountName = StorageAccountName;
            string storageAccountKey  = StorageAccountKey;
            Uri    serviceUri         = StorageAccountBlobUri;

            StorageSharedKeyCredential sharedKeyCredential = new StorageSharedKeyCredential(storageAccountName, storageAccountKey);

            // Create DataLakeServiceClient using StorageSharedKeyCredentials
            DataLakeServiceClient serviceClient = new DataLakeServiceClient(serviceUri, sharedKeyCredential);

            // Get a reference to a filesystem named "sample-filesystem-append" and then create it
            DataLakeFileSystemClient filesystem = serviceClient.GetFileSystemClient(Randomize("sample-filesystem-append"));
            await filesystem.CreateAsync();

            try
            {
                // Create a file
                DataLakeFileClient file = filesystem.GetFileClient(Randomize("sample-file"));
                await file.CreateAsync();

                // Append data to the DataLake File
                await file.AppendAsync(File.OpenRead(sampleFilePath), 0);

                await file.FlushAsync(SampleFileContent.Length);

                // Verify the contents of the file
                PathProperties properties = await file.GetPropertiesAsync();

                Assert.AreEqual(SampleFileContent.Length, properties.ContentLength);
            }
            finally
            {
                // Clean up after the test when we're finished
                await filesystem.DeleteAsync();
            }
        }
示例#12
0
        public async Task AnonymousAuthAsync()
        {
            // Make StorageSharedKeyCredential to pass to the serviceClient
            string accountName = StorageAccountName;
            string accountKey  = StorageAccountKey;
            Uri    serviceUri  = StorageAccountBlobUri;
            StorageSharedKeyCredential sharedKeyCredential = new StorageSharedKeyCredential(accountName, accountKey);

            // Get a reference to a service Client
            DataLakeServiceClient service = new DataLakeServiceClient(serviceUri, sharedKeyCredential);

            // Get a reference to a filesystem named "sample-filesystem"
            DataLakeFileSystemClient filesystem = service.GetFileSystemClient(Randomize("sample-filesystem"));

            try
            {
                // Create a file that can be accessed publicly
                await filesystem.CreateAsync(PublicAccessType.FileSystem);

                DataLakeFileClient file = filesystem.GetFileClient(Randomize("sample-file"));
                await file.CreateAsync();

                // Append data to the file
                string fileContent = "File Content";
                await file.AppendAsync(new MemoryStream(Encoding.UTF8.GetBytes(fileContent)), 0);

                await file.FlushAsync(fileContent.Length);

                // Anonymously access a blob given its URI
                Uri endpoint = file.Uri;
                DataLakeFileClient anonymous = new DataLakeFileClient(endpoint);

                // Make a service request to verify we've succesfully authenticated
                await anonymous.GetPropertiesAsync();
            }
            finally
            {
                await filesystem.DeleteAsync();
            }
        }
        public async Task TraverseAsync()
        {
            // Create a temporary Lorem Ipsum file on disk that we can upload
            string originalPath = CreateTempFile(SampleFileContent);

            // Make StorageSharedKeyCredential to pass to the serviceClient
            string storageAccountName = StorageAccountName;
            string storageAccountKey  = StorageAccountKey;
            Uri    serviceUri         = StorageAccountBlobUri;
            StorageSharedKeyCredential sharedKeyCredential = new StorageSharedKeyCredential(storageAccountName, storageAccountKey);

            // Create DataLakeServiceClient using StorageSharedKeyCredentials

            DataLakeServiceClient serviceClient = new DataLakeServiceClient(serviceUri, sharedKeyCredential);

            // Get a reference to a filesystem named "sample-filesystem-traverseasync" and then create it
            DataLakeFileSystemClient filesystem = serviceClient.GetFileSystemClient(Randomize("sample-filesystem-traverse"));

            await filesystem.CreateAsync();

            try
            {
                // Create a bunch of directories and files within the directories
                DataLakeDirectoryClient first = await filesystem.CreateDirectoryAsync("first");

                await first.CreateSubDirectoryAsync("a");

                await first.CreateSubDirectoryAsync("b");

                DataLakeDirectoryClient second = await filesystem.CreateDirectoryAsync("second");

                await second.CreateSubDirectoryAsync("c");

                await second.CreateSubDirectoryAsync("d");

                await filesystem.CreateDirectoryAsync("third");

                DataLakeDirectoryClient fourth = await filesystem.CreateDirectoryAsync("fourth");

                DataLakeDirectoryClient deepest = await fourth.CreateSubDirectoryAsync("e");

                // Upload a DataLake file named "file"
                DataLakeFileClient file = deepest.GetFileClient("file");
                await file.CreateAsync();

                using (FileStream stream = File.OpenRead(originalPath))
                {
                    await file.AppendAsync(stream, 0);
                }

                // Keep track of all the names we encounter
                List <string> names = new List <string>();
                await foreach (PathItem pathItem in filesystem.GetPathsAsync(recursive: true))
                {
                    names.Add(pathItem.Name);
                }

                // Verify we've seen everything
                Assert.AreEqual(10, names.Count);
                Assert.Contains("first", names);
                Assert.Contains("second", names);
                Assert.Contains("third", names);
                Assert.Contains("fourth", names);
                Assert.Contains("first/a", names);
                Assert.Contains("first/b", names);
                Assert.Contains("second/c", names);
                Assert.Contains("second/d", names);
                Assert.Contains("fourth/e", names);
                Assert.Contains("fourth/e/file", names);
            }
            finally
            {
                // Clean up after the test when we're finished
                await filesystem.DeleteAsync();
            }
        }
        public async Task UploadFile(string fullPath)
        {
            var dataLakeServiceClient    = GetDataLakeServiceClient();
            var dataLakeFileSystemClient = dataLakeServiceClient.GetFileSystemClient(ContainerName);

            var directoryDate = dataLakeFileSystemClient.GetDirectoryClient(DateTime.Now.ToString("yyyy-MM-dd"));

            #region Create and Check Folder
            DataLakeDirectoryClient directoryDistributorCode = null;

            if (!directoryDate.Exists())
            {
                dataLakeFileSystemClient.CreateDirectory(DateTime.Now.ToString("yyyy-MM-dd"));
                directoryDate = dataLakeFileSystemClient.GetDirectoryClient(DateTime.Now.ToString("yyyy-MM-dd"));
            }

            if (FormFileUpload.Category.ToUpper() == "STOCK")
            {
                var directoryStock = directoryDate.GetSubDirectoryClient("Stock");

                if (!directoryStock.Exists())
                {
                    directoryDate.GetSubDirectoryClient("Stock");
                    directoryStock = directoryDate.GetSubDirectoryClient("Stock");
                }

                directoryDistributorCode = directoryStock.GetSubDirectoryClient(FormFileUpload.DistributorCode);

                if (!directoryDistributorCode.Exists())
                {
                    directoryStock.GetSubDirectoryClient(FormFileUpload.DistributorCode);
                    directoryDistributorCode = directoryStock.GetSubDirectoryClient(FormFileUpload.DistributorCode);
                }
            }

            if (FormFileUpload.Category.ToUpper() == "SALE")
            {
                var directorySales = directoryDate.GetSubDirectoryClient("Sales");

                if (!directorySales.Exists())
                {
                    directoryDate.GetSubDirectoryClient("Sales");
                    directorySales = directoryDate.GetSubDirectoryClient("Sales");
                }

                directoryDistributorCode = directorySales.GetSubDirectoryClient(FormFileUpload.DistributorCode);

                if (!directoryDistributorCode.Exists())
                {
                    directorySales.GetSubDirectoryClient(FormFileUpload.DistributorCode);
                    directoryDistributorCode = directorySales.GetSubDirectoryClient(FormFileUpload.DistributorCode);
                }
            }
            #endregion

            string fileName = Path.GetFileName(fullPath);

            DataLakeFileClient fileClient = await directoryDistributorCode.CreateFileAsync(fileName);

            using var fileStream = System.IO.File.OpenRead(fullPath);
            long fileSize = fileStream.Length;
            await fileClient.AppendAsync(fileStream, offset : 0);

            await fileClient.FlushAsync(position : fileSize);
        }