public void PrepareForSolution(IClaimsInfoProvider claimsInfoProvider, string solutionId)
        {
            GetInternal(() =>
            {
                if (_env.IsDevelopment() && _isFakeSharePoint)
                {
                    LogInformation($"PrepareForSolution disabled in 'test' Development environment");
                    return(0);
                }

                LogInformation($"PrepareForSolution: solutionId: {solutionId}");
                var soln = _solutionsDatastore.ById(solutionId);
                if (soln == null)
                {
                    throw new KeyNotFoundException($"Could not find solution: {solutionId}");
                }
                var org             = _organisationsDatastore.ById(soln.OrganisationId);
                var claimedCapNames = _capabilitiesImplementedDatastore
                                      .BySolution(solutionId)
                                      .Select(x => CleanupFileName(_capabilitiesDatastore.ById(x.CapabilityId).Name));
                var claimedNameStds = _standardsApplicableDatastore
                                      .BySolution(solutionId)
                                      .Select(x => CleanupFileName(_standardsDatastore.ById(x.StandardId).Name));

                var capsTask = Task.Factory.StartNew(() => CreateClaimSubFolders(CreateClientContext(), SharePoint_OrganisationsRelativeUrl, CleanupFileName(org.Name), CleanupFileName(soln.Name), CleanupFileName(soln.Version), CleanupFileName(claimsInfoProvider.GetCapabilityFolderName()), claimedCapNames));
                var stdsTask = Task.Factory.StartNew(() => CreateClaimSubFolders(CreateClientContext(), SharePoint_OrganisationsRelativeUrl, CleanupFileName(org.Name), CleanupFileName(soln.Name), CleanupFileName(soln.Version), CleanupFileName(claimsInfoProvider.GetStandardsFolderName()), claimedNameStds));
                Task.WaitAll(capsTask, stdsTask);

                return(0);
            });
        }
        public FileStreamResult GetFileStream(IClaimsInfoProvider claimsInfoProvider, string claimId, string uniqueId)
        {
            return(GetInternal(() =>
            {
                LogInformation($"GetFileStream: claimId: {claimId} | uniqueId: {uniqueId}");

                var context = CreateClientContextByUserNamePassword();
                var file = context.Web.GetFileById(Guid.Parse(uniqueId));
                context.Load(file);
                context.ExecuteQuery();
                LogInformation($"GetFileStream: retrieved info for {file.Name}");

                return
                // File.OpenBinaryDirect will only work with a username/password context as CSOM uses basic authentication.
                // For an add-in context, CSOM does not set the authentication.
                // This probably because SP uses WebDAV as the underlying protocol and this will allow SP
                // to determine the calling user's permissions.
                //
                // An add-in should use:
                //    File.OpenBinaryStream
                // but this is broken for Dot Net Core CSOM (but works for .NET CSOM).
                // It may be possible to directly call the SP restful API:
                //    https://docs.microsoft.com/en-us/previous-versions/office/developer/sharepoint-rest-reference/dn450841%28v%3doffice.15%29
                // but I (TDE) couldn't get it working.
                new FileStreamResult(Microsoft.SharePoint.Client.NetCore.File.OpenBinaryDirect(context, file.ServerRelativeUrl)?.Stream, GetContentType(file.Name))
                {
                    FileDownloadName = Path.GetFileName(file.Name)
                };
            }));
        }
Beispiel #3
0
        public FileStreamResult GetFileStream(IClaimsInfoProvider claimsInfoProvider, string claimId, string uniqueId)
        {
            return(GetInternal(() =>
            {
                LogInformation($"GetFileStream: claimId: {claimId} | uniqueId: {uniqueId}");

                var context = GetClientContext();
                var file = context.Web.GetFileById(Guid.Parse(uniqueId));
                context.Load(file);
                context.ExecuteQuery();
                LogInformation($"GetFileStream: retrieved info for {file.Name}");

                var client = new RestClient(SharePoint_FileDownloadServerUrl);
                var request = new RestRequest(Method.POST);
                request.AddHeader("Content-Type", "application/json");
                request.AddJsonBody(new { path = file.ServerRelativeUrl });
                byte[] response = client.DownloadData(request);
                var ms = new MemoryStream();
                ms.Write(response, 0, response.Length);
                var retval = new FileStreamResult(ms, GetContentType(file.Name))
                {
                    FileDownloadName = Path.GetFileName(file.Name)
                };
                LogInformation($"GetFileStream: retrieved binary data for {file.Name}");

                return retval;
            }));
        }
        public string AddEvidenceForClaim(IClaimsInfoProvider claimsInfoProvider, string claimId, Stream file, string fileName, string subFolder = null)
        {
            return(GetInternal(() =>
            {
                LogInformation($"AddEvidenceForClaim: claimId: {claimId} | fileName: {CleanupFileName(fileName)} | subFolder: {CleanupFileName(subFolder)}");
                var blobId = UploadFileSlicePerSlice(claimsInfoProvider, claimId, file, CleanupFileName(fileName), CleanupFileName(subFolder));
                LogInformation($"AddEvidenceForClaim: claimId: {claimId} | fileName: {CleanupFileName(fileName)} | subFolder: {CleanupFileName(subFolder)} --> {blobId}");

                return blobId;
            }));
        }
        public IEnumerable <ClaimBlobInfoMap> EnumerateClaimFolderTree(IClaimsInfoProvider claimsInfoProvider, string solutionId)
        {
            return(GetInternal(() =>
            {
                var soln = _solutionsDatastore.ById(solutionId);
                var claims = claimsInfoProvider.GetClaimBySolution(solutionId);
                var org = _organisationsDatastore.ById(soln.OrganisationId);
                var claimFolder = CleanupFileName(claimsInfoProvider.GetFolderName());
                var solutionUrl = Url.Combine(
                    SharePoint_OrganisationsRelativeUrl,
                    CleanupFileName(org.Name),
                    GetSolutionVersionFolderName(soln));
                var allBlobInfos = EnumerateFolderRecursively(solutionUrl, claimFolder);
                var quals = claimsInfoProvider.GetAllQualities();

                var retval = new List <ClaimBlobInfoMap>();
                foreach (var qual in quals)
                {
                    // specific claim folder eg 'Standards Evidence/Patient Management'
                    var qualFolder = allBlobInfos.SingleOrDefault(bi => bi.Name == CleanupFileName(qual.Name) && bi.IsFolder);
                    if (qualFolder == null)
                    {
                        continue;
                    }

                    // look for claim corresponding to this Capability/Standard
                    var claim = claims.SingleOrDefault(c => c.QualityId == qual.Id);
                    if (claim == null)
                    {
                        continue;
                    }

                    var map = new ClaimBlobInfoMap {
                        ClaimId = claim.Id
                    };
                    var blobInfos = new List <BlobInfo>(new[] { qualFolder });

                    // files and sub-folders in specific claim folder
                    var subBlobInfos = allBlobInfos.Where(abi => abi.ParentId == qualFolder.Id);
                    blobInfos.AddRange(subBlobInfos);

                    // files in a specific claim sub folder eg 'Standards Evidence/Patient Management/Video Evidence'
                    var subBlobInfosFolders = subBlobInfos.Where(sbi => sbi.IsFolder);
                    var subBlobInfosFiles = subBlobInfosFolders.SelectMany(sbif => allBlobInfos.Where(abi => abi.ParentId == sbif.Id));
                    blobInfos.AddRange(subBlobInfosFiles);

                    map.BlobInfos = blobInfos;
                    retval.Add(map);
                }

                return retval;
            }));
        }
        public IEnumerable <BlobInfo> EnumerateFolder(IClaimsInfoProvider claimsInfoProvider, string claimId, string subFolder = null)
        {
            return(GetInternal(() =>
            {
                LogInformation($"EnumerateFolder: claimId: {claimId} | subFolder: {CleanupFileName(subFolder)}");
                var claim = claimsInfoProvider.GetClaimById(claimId);
                var soln = _solutionsDatastore.ById(claim.SolutionId);
                var org = _organisationsDatastore.ById(soln.OrganisationId);

                var context = GetClientContext();
                var claimFolderUrl = Url.Combine(
                    SharePoint_OrganisationsRelativeUrl,
                    CleanupFileName(org.Name),
                    GetSolutionVersionFolderName(soln),
                    CleanupFileName(claimsInfoProvider.GetFolderName()),
                    CleanupFileName(claimsInfoProvider.GetFolderClaimName(claim)));
                if (!string.IsNullOrEmpty(subFolder))
                {
                    claimFolderUrl = Url.Combine(claimFolderUrl, subFolder);
                }
                var claimFolder = context.Web.GetFolderByServerRelativeUrl(claimFolderUrl);

                context.Load(claimFolder);
                context.Load(claimFolder.Files);
                context.Load(claimFolder.Folders);

                LogInformation($"EnumerateFolder: enumerating {Url.Combine(context.Url, claimFolderUrl)}...");
                context.ExecuteQuery();

                var claimFolderInfo = new BlobInfo
                {
                    Id = claimFolder.UniqueId.ToString(),
                    Name = claimFolder.Name,
                    IsFolder = true,
                    Url = new Uri(new Uri(context.Url), claimFolder.ServerRelativeUrl).AbsoluteUri,
                    TimeLastModified = claimFolder.TimeLastModified
                };
                var claimSubFolderInfos = claimFolder
                                          .Folders
                                          .Select(x =>
                                                  new BlobInfo
                {
                    Id = x.UniqueId.ToString(),
                    ParentId = claimFolderInfo.Id,
                    Name = x.Name,
                    IsFolder = true,
                    Length = 0,
                    Url = new Uri(new Uri(context.Url), x.ServerRelativeUrl).AbsoluteUri,
                    TimeLastModified = x.TimeLastModified
                });
                var claimFileInfos = claimFolder
                                     .Files
                                     .Select(x =>
                                             new BlobInfo
                {
                    Id = x.UniqueId.ToString(),
                    ParentId = claimFolderInfo.Id,
                    Name = x.Name,
                    IsFolder = false,
                    Length = x.Length,
                    Url = new Uri(new Uri(context.Url), x.ServerRelativeUrl).AbsoluteUri,
                    TimeLastModified = x.TimeLastModified,
                    BlobId = x.UniqueId.ToString()
                });
                var retVal = new List <BlobInfo>();

                retVal.Add(claimFolderInfo);
                retVal.AddRange(claimSubFolderInfos);
                retVal.AddRange(claimFileInfos);

                return retVal;
            }));
        }
        private string UploadFileSlicePerSlice(
            IClaimsInfoProvider claimsInfoProvider,
            string claimId,
            Stream file,
            string fileName,
            string subFolder,
            int fileChunkSizeInMB = 3)
        {
            // Each sliced upload requires a unique id
            var uploadId = Guid.NewGuid();

            // Get to folder to upload into
            var claim             = claimsInfoProvider.GetClaimById(claimId);
            var soln              = _solutionsDatastore.ById(claim.SolutionId);
            var org               = _organisationsDatastore.ById(soln.OrganisationId);
            var solnVer           = GetSolutionVersionFolderName(soln);
            var claimFolderRelUrl = Url.Combine(
                SharePoint_OrganisationsRelativeUrl,
                CleanupFileName(org.Name),
                solnVer,
                CleanupFileName(claimsInfoProvider.GetFolderName()),
                CleanupFileName(claimsInfoProvider.GetFolderClaimName(claim)));

            // create subFolder if not exists
            if (!string.IsNullOrEmpty(subFolder))
            {
                CreateSubFolder(claimFolderRelUrl, subFolder);
                claimFolderRelUrl = Url.Combine(claimFolderRelUrl, subFolder);
            }

            var context        = GetClientContext();
            var docClaimFolder = context.Web.GetFolderByServerRelativeUrl(claimFolderRelUrl);

            context.ExecuteQuery();

            // Get the information about the folder that will hold the file
            LogInformation($"UploadFileSlicePerSlice: enumerating {Url.Combine(context.Url, claimFolderRelUrl)}...");
            context.Load(docClaimFolder.Files);
            context.Load(docClaimFolder, folder => folder.ServerRelativeUrl);
            context.ExecuteQuery();

            using (var br = new BinaryReader(file))
            {
                var fileSize = file.Length;
                ClientResult <long> bytesUploaded = null;
                Microsoft.SharePoint.Client.NetCore.File uploadFile = null;

                // Calculate block size in bytes
                var blockSize = fileChunkSizeInMB * 1024 * 1024;

                byte[] buffer         = new byte[blockSize];
                byte[] lastBuffer     = null;
                long   fileoffset     = 0;
                long   totalBytesRead = 0;
                int    bytesRead;
                bool   first = true;
                bool   last  = false;

                // Read data from stream in blocks
                while ((bytesRead = br.Read(buffer, 0, buffer.Length)) > 0)
                {
                    totalBytesRead += bytesRead;

                    // We've reached the end of the file
                    if (totalBytesRead == fileSize)
                    {
                        last = true;

                        // Copy to a new buffer that has the correct size
                        lastBuffer = new byte[bytesRead];
                        Array.Copy(buffer, 0, lastBuffer, 0, bytesRead);
                    }

                    if (first)
                    {
                        using (var contentStream = new MemoryStream())
                        {
                            // Add an empty file.
                            var fileInfo = new FileCreationInformation
                            {
                                ContentStream = contentStream,
                                Url           = fileName,
                                Overwrite     = true
                            };
                            uploadFile = docClaimFolder.Files.Add(fileInfo);

                            // Start upload by uploading the first slice
                            // NOTE:  small files will be contained in the lastBuffer, so use this to upload in one call
                            using (var strm = new MemoryStream(last ? lastBuffer : buffer))
                            {
                                // Call the start upload method on the first slice
                                bytesUploaded = uploadFile.StartUpload(uploadId, strm);

                                LogInformation($"UploadFileSlicePerSlice: uploading first slice...");
                                context.ExecuteQuery();

                                // fileoffset is the pointer where the next slice will be added
                                fileoffset = bytesUploaded.Value;
                            }

                            // NOTE:  small files have already been uploaded from lastBuffer, so reset it
                            lastBuffer = new byte[0];
                        }
                    }

                    // Get a reference to our file
                    LogInformation($"UploadFileSlicePerSlice: getting reference to file...");
                    uploadFile = context.Web.GetFileByServerRelativeUrl(Url.Combine(docClaimFolder.ServerRelativeUrl, fileName));

                    if (last)
                    {
                        // Is this the last slice of data?
                        using (var strm = new MemoryStream(lastBuffer))
                        {
                            // End sliced upload by calling FinishUpload
                            LogInformation($"UploadFileSlicePerSlice: uploading last slice...");
                            uploadFile = uploadFile.FinishUpload(uploadId, fileoffset, strm);
                            context.Load(uploadFile);
                            context.ExecuteQuery();

                            return(uploadFile.UniqueId.ToString());
                        }
                    }

                    if (first)
                    {
                        // we can only start the upload once
                        first = false;

                        continue;
                    }

                    using (var strm = new MemoryStream(buffer))
                    {
                        // Continue sliced upload
                        LogInformation($"UploadFileSlicePerSlice: uploading intermediate slice...");
                        bytesUploaded = uploadFile.ContinueUpload(uploadId, fileoffset, strm);
                        context.ExecuteQuery();

                        // update fileoffset for the next slice
                        fileoffset = bytesUploaded.Value;
                    }
                }
            }

            return(string.Empty);
        }