public IEnumerable <ClaimBlobInfoMap> EnumerateClaimFolderTree(IClaimsInfoProvider claimsInfoProvider, string solutionId) { return(GetInternal(() => { var soln = _solutionsDatastore.ById(solutionId); var claims = claimsInfoProvider.GetClaimBySolution(solutionId); var org = _organisationsDatastore.ById(soln.OrganisationId); var claimFolder = CleanupFileName(claimsInfoProvider.GetFolderName()); var solutionUrl = Url.Combine( SharePoint_OrganisationsRelativeUrl, CleanupFileName(org.Name), GetSolutionVersionFolderName(soln)); var allBlobInfos = EnumerateFolderRecursively(solutionUrl, claimFolder); var quals = claimsInfoProvider.GetAllQualities(); var retval = new List <ClaimBlobInfoMap>(); foreach (var qual in quals) { // specific claim folder eg 'Standards Evidence/Patient Management' var qualFolder = allBlobInfos.SingleOrDefault(bi => bi.Name == CleanupFileName(qual.Name) && bi.IsFolder); if (qualFolder == null) { continue; } // look for claim corresponding to this Capability/Standard var claim = claims.SingleOrDefault(c => c.QualityId == qual.Id); if (claim == null) { continue; } var map = new ClaimBlobInfoMap { ClaimId = claim.Id }; var blobInfos = new List <BlobInfo>(new[] { qualFolder }); // files and sub-folders in specific claim folder var subBlobInfos = allBlobInfos.Where(abi => abi.ParentId == qualFolder.Id); blobInfos.AddRange(subBlobInfos); // files in a specific claim sub folder eg 'Standards Evidence/Patient Management/Video Evidence' var subBlobInfosFolders = subBlobInfos.Where(sbi => sbi.IsFolder); var subBlobInfosFiles = subBlobInfosFolders.SelectMany(sbif => allBlobInfos.Where(abi => abi.ParentId == sbif.Id)); blobInfos.AddRange(subBlobInfosFiles); map.BlobInfos = blobInfos; retval.Add(map); } return retval; })); }
public IEnumerable <BlobInfo> EnumerateFolder(IClaimsInfoProvider claimsInfoProvider, string claimId, string subFolder = null) { return(GetInternal(() => { LogInformation($"EnumerateFolder: claimId: {claimId} | subFolder: {CleanupFileName(subFolder)}"); var claim = claimsInfoProvider.GetClaimById(claimId); var soln = _solutionsDatastore.ById(claim.SolutionId); var org = _organisationsDatastore.ById(soln.OrganisationId); var context = GetClientContext(); var claimFolderUrl = Url.Combine( SharePoint_OrganisationsRelativeUrl, CleanupFileName(org.Name), GetSolutionVersionFolderName(soln), CleanupFileName(claimsInfoProvider.GetFolderName()), CleanupFileName(claimsInfoProvider.GetFolderClaimName(claim))); if (!string.IsNullOrEmpty(subFolder)) { claimFolderUrl = Url.Combine(claimFolderUrl, subFolder); } var claimFolder = context.Web.GetFolderByServerRelativeUrl(claimFolderUrl); context.Load(claimFolder); context.Load(claimFolder.Files); context.Load(claimFolder.Folders); LogInformation($"EnumerateFolder: enumerating {Url.Combine(context.Url, claimFolderUrl)}..."); context.ExecuteQuery(); var claimFolderInfo = new BlobInfo { Id = claimFolder.UniqueId.ToString(), Name = claimFolder.Name, IsFolder = true, Url = new Uri(new Uri(context.Url), claimFolder.ServerRelativeUrl).AbsoluteUri, TimeLastModified = claimFolder.TimeLastModified }; var claimSubFolderInfos = claimFolder .Folders .Select(x => new BlobInfo { Id = x.UniqueId.ToString(), ParentId = claimFolderInfo.Id, Name = x.Name, IsFolder = true, Length = 0, Url = new Uri(new Uri(context.Url), x.ServerRelativeUrl).AbsoluteUri, TimeLastModified = x.TimeLastModified }); var claimFileInfos = claimFolder .Files .Select(x => new BlobInfo { Id = x.UniqueId.ToString(), ParentId = claimFolderInfo.Id, Name = x.Name, IsFolder = false, Length = x.Length, Url = new Uri(new Uri(context.Url), x.ServerRelativeUrl).AbsoluteUri, TimeLastModified = x.TimeLastModified, BlobId = x.UniqueId.ToString() }); var retVal = new List <BlobInfo>(); retVal.Add(claimFolderInfo); retVal.AddRange(claimSubFolderInfos); retVal.AddRange(claimFileInfos); return retVal; })); }
private string UploadFileSlicePerSlice( IClaimsInfoProvider claimsInfoProvider, string claimId, Stream file, string fileName, string subFolder, int fileChunkSizeInMB = 3) { // Each sliced upload requires a unique id var uploadId = Guid.NewGuid(); // Get to folder to upload into var claim = claimsInfoProvider.GetClaimById(claimId); var soln = _solutionsDatastore.ById(claim.SolutionId); var org = _organisationsDatastore.ById(soln.OrganisationId); var solnVer = GetSolutionVersionFolderName(soln); var claimFolderRelUrl = Url.Combine( SharePoint_OrganisationsRelativeUrl, CleanupFileName(org.Name), solnVer, CleanupFileName(claimsInfoProvider.GetFolderName()), CleanupFileName(claimsInfoProvider.GetFolderClaimName(claim))); // create subFolder if not exists if (!string.IsNullOrEmpty(subFolder)) { CreateSubFolder(claimFolderRelUrl, subFolder); claimFolderRelUrl = Url.Combine(claimFolderRelUrl, subFolder); } var context = GetClientContext(); var docClaimFolder = context.Web.GetFolderByServerRelativeUrl(claimFolderRelUrl); context.ExecuteQuery(); // Get the information about the folder that will hold the file LogInformation($"UploadFileSlicePerSlice: enumerating {Url.Combine(context.Url, claimFolderRelUrl)}..."); context.Load(docClaimFolder.Files); context.Load(docClaimFolder, folder => folder.ServerRelativeUrl); context.ExecuteQuery(); using (var br = new BinaryReader(file)) { var fileSize = file.Length; ClientResult <long> bytesUploaded = null; Microsoft.SharePoint.Client.NetCore.File uploadFile = null; // Calculate block size in bytes var blockSize = fileChunkSizeInMB * 1024 * 1024; byte[] buffer = new byte[blockSize]; byte[] lastBuffer = null; long fileoffset = 0; long totalBytesRead = 0; int bytesRead; bool first = true; bool last = false; // Read data from stream in blocks while ((bytesRead = br.Read(buffer, 0, buffer.Length)) > 0) { totalBytesRead += bytesRead; // We've reached the end of the file if (totalBytesRead == fileSize) { last = true; // Copy to a new buffer that has the correct size lastBuffer = new byte[bytesRead]; Array.Copy(buffer, 0, lastBuffer, 0, bytesRead); } if (first) { using (var contentStream = new MemoryStream()) { // Add an empty file. var fileInfo = new FileCreationInformation { ContentStream = contentStream, Url = fileName, Overwrite = true }; uploadFile = docClaimFolder.Files.Add(fileInfo); // Start upload by uploading the first slice // NOTE: small files will be contained in the lastBuffer, so use this to upload in one call using (var strm = new MemoryStream(last ? lastBuffer : buffer)) { // Call the start upload method on the first slice bytesUploaded = uploadFile.StartUpload(uploadId, strm); LogInformation($"UploadFileSlicePerSlice: uploading first slice..."); context.ExecuteQuery(); // fileoffset is the pointer where the next slice will be added fileoffset = bytesUploaded.Value; } // NOTE: small files have already been uploaded from lastBuffer, so reset it lastBuffer = new byte[0]; } } // Get a reference to our file LogInformation($"UploadFileSlicePerSlice: getting reference to file..."); uploadFile = context.Web.GetFileByServerRelativeUrl(Url.Combine(docClaimFolder.ServerRelativeUrl, fileName)); if (last) { // Is this the last slice of data? using (var strm = new MemoryStream(lastBuffer)) { // End sliced upload by calling FinishUpload LogInformation($"UploadFileSlicePerSlice: uploading last slice..."); uploadFile = uploadFile.FinishUpload(uploadId, fileoffset, strm); context.Load(uploadFile); context.ExecuteQuery(); return(uploadFile.UniqueId.ToString()); } } if (first) { // we can only start the upload once first = false; continue; } using (var strm = new MemoryStream(buffer)) { // Continue sliced upload LogInformation($"UploadFileSlicePerSlice: uploading intermediate slice..."); bytesUploaded = uploadFile.ContinueUpload(uploadId, fileoffset, strm); context.ExecuteQuery(); // update fileoffset for the next slice fileoffset = bytesUploaded.Value; } } } return(string.Empty); }