/// <summary> /// Based on docs /// https://docs.microsoft.com/en-us/sharepoint/dev/solution-guidance/upload-large-files-sample-app-for-sharepoint /// </summary> /// <param name="ctx"></param> /// <param name="uploadLib"></param> /// <param name="filePath"></param> /// <param name="fileName"></param> /// <returns></returns> private async Task <Microsoft.SharePoint.Client.File> UploadLargeSizeFile(ClientContext ctx, List uploadLib, string filePath, string fileName, long fileSizeMb) { // Each sliced upload requires a unique ID. Guid uploadId = Guid.NewGuid(); // File object. Microsoft.SharePoint.Client.File uploadFile = null; // Calculate block size in bytes. int blockSize = 3 * 1024 * 1024; // Use large file upload approach. ClientResult <long> bytesUploaded = null; FileStream fs = null; try { fs = System.IO.File.Open(filePath, FileMode.Open, FileAccess.Read, FileShare.ReadWrite); using (BinaryReader br = new BinaryReader(fs)) { byte[] buffer = new byte[blockSize]; Byte[] lastBuffer = null; long fileoffset = 0; long totalBytesRead = 0; int bytesRead; bool first = true; bool last = false; // Read data from file system in blocks. while ((bytesRead = br.Read(buffer, 0, buffer.Length)) > 0) { totalBytesRead = totalBytesRead + bytesRead; // You've reached the end of the file. if (totalBytesRead == fileSizeMb) { last = true; // Copy to a new buffer that has the correct size. lastBuffer = new byte[bytesRead]; Array.Copy(buffer, 0, lastBuffer, 0, bytesRead); } if (first) { using (MemoryStream contentStream = new MemoryStream()) { // Add an empty file. FileCreationInformation fileInfo = new FileCreationInformation(); fileInfo.ContentStream = contentStream; fileInfo.Url = fileName; fileInfo.Overwrite = false; uploadFile = uploadLib.RootFolder.Files.Add(fileInfo); // Start upload by uploading the first slice. using (MemoryStream s = new MemoryStream(buffer)) { // Call the start upload method on the first slice. bytesUploaded = uploadFile.StartUpload(uploadId, s); await ctx.ExecuteQueryAsync(); // fileoffset is the pointer where the next slice will be added. fileoffset = bytesUploaded.Value; } // You can only start the upload once. first = false; } } else { if (last) { // Is this the last slice of data? using (MemoryStream s = new MemoryStream(lastBuffer)) { // End sliced upload by calling FinishUpload. uploadFile = uploadFile.FinishUpload(uploadId, fileoffset, s); ctx.Load(uploadFile); await ctx.ExecuteQueryAsync(); // Return the file object for the uploaded file. return(uploadFile); } } else { using (MemoryStream s = new MemoryStream(buffer)) { // Continue sliced upload. bytesUploaded = uploadFile.ContinueUpload(uploadId, fileoffset, s); await ctx.ExecuteQueryAsync(); // Update fileoffset for the next slice. fileoffset = bytesUploaded.Value; } } } } // while ((bytesRead = br.Read(buffer, 0, buffer.Length)) > 0) } } finally { if (fs != null) { fs.Dispose(); } } return(null); }
public static Microsoft.SharePoint.Client.File GetBigSharePointFile(string fileurl, string filename, SMBCredential SMBCredential, SMB2Client client, NTStatus nts, ISMBFileStore fileStore, List list, ClientContext cc, DocumentModel doc, List <Metadata> fields) { Microsoft.SharePoint.Client.File uploadFile = null; ClientResult <long> bytesUploaded = null; //SMBLibrary.NTStatus actionStatus; FileCreationInformation newFile = new FileCreationInformation(); NTStatus status = nts; object handle; FileStatus fileStatus; string tmpfile = Path.GetTempFileName(); status = fileStore.CreateFile(out handle, out fileStatus, fileurl, AccessMask.GENERIC_READ, 0, ShareAccess.Read, CreateDisposition.FILE_OPEN, CreateOptions.FILE_NON_DIRECTORY_FILE, null); if (status != NTStatus.STATUS_SUCCESS) { Console.WriteLine(status); return(null); } else { string uniqueFileName = String.Empty; int blockSize = 8000000; // 8 MB long fileSize; Guid uploadId = Guid.NewGuid(); byte[] buf; var fs = new FileStream(tmpfile, FileMode.OpenOrCreate); var bw = new BinaryWriter(fs); int bufsz = 64 * 1000; int i = 0; do { status = fileStore.ReadFile(out buf, handle, i * bufsz, bufsz); if (status == NTStatus.STATUS_SUCCESS) { int n = buf.GetLength(0); bw.Write(buf, 0, n); if (n < bufsz) { break; } i++; } }while (status != NTStatus.STATUS_END_OF_FILE && i < 1000); if (status == NTStatus.STATUS_SUCCESS) { fileStore.CloseFile(handle); bw.Flush(); fs.Close(); //fs = System.IO.File.OpenRead(tmpfile); //byte[] fileBytes = new byte[fs.Length]; //fs.Read(fileBytes, 0, fileBytes.Length); try { fs = System.IO.File.Open(tmpfile, FileMode.Open, FileAccess.Read, FileShare.ReadWrite); fileSize = fs.Length; uniqueFileName = Path.GetFileName(fs.Name); using (BinaryReader br = new BinaryReader(fs)) { byte[] buffer = new byte[blockSize]; byte[] lastBuffer = null; long fileoffset = 0; long totalBytesRead = 0; int bytesRead; bool first = true; bool last = false; while ((bytesRead = br.Read(buffer, 0, buffer.Length)) > 0) { totalBytesRead = totalBytesRead + bytesRead; if (totalBytesRead >= fileSize) { last = true; lastBuffer = new byte[bytesRead]; Array.Copy(buffer, 0, lastBuffer, 0, bytesRead); } if (first) { using (MemoryStream contentStream = new MemoryStream()) { newFile.ContentStream = contentStream; newFile.Url = uniqueFileName; newFile.Overwrite = true; if (doc.foldername == null) { uploadFile = list.RootFolder.Files.Add(newFile); } /*else * { * string foldername = doc.foldername; * string sitecontent = doc.sitecontent; * * //Folder folder = list.RootFolder.Folders.GetByUrl(foldername); * * Folder folder = GetFolder(cc, list, foldername); * if (folder == null){ * if(doc.taxFields != null){ * folder = CreateDocumentSetWithTaxonomy(cc, list, sitecontent, foldername, doc.fields, fields, doc.taxFields); * } * else * { * folder = CreateFolder(cc, list, sitecontent, foldername, doc.fields, fields); * } * * } * }*/ using (MemoryStream s = new MemoryStream(buffer)) { bytesUploaded = uploadFile.StartUpload(uploadId, s); cc.ExecuteQuery(); fileoffset = bytesUploaded.Value; } first = false; } } else { uploadFile = cc.Web.GetFileByServerRelativeUrl(list.RootFolder.ServerRelativeUrl + Path.AltDirectorySeparatorChar + uniqueFileName); if (last) { using (MemoryStream s = new MemoryStream(lastBuffer)) { uploadFile = uploadFile.FinishUpload(uploadId, fileoffset, s); cc.ExecuteQuery(); } } else { using (MemoryStream s = new MemoryStream(buffer)) { bytesUploaded = uploadFile.ContinueUpload(uploadId, fileoffset, s); cc.ExecuteQuery(); fileoffset = bytesUploaded.Value; } } } } } } finally { System.IO.File.Delete(tmpfile); if (fs != null) { fs.Dispose(); } } } else { System.IO.File.Delete(tmpfile); return(null); } return(uploadFile); } }
public FileUploadResult UploadFile(Stream file, string filename, string mimeType, string folderName) { // TODO: Consider updating this to use PnP Core when it supports .NET Standard (https://github.com/pnp/PnP-Sites-Core) Uri site = new Uri(_options.SiteUrl); const int fileChunkSizeInMB = 3; Microsoft.SharePoint.Client.File uploadFile = null; using (var authenticationManager = new AuthenticationManager(_options.TenantId)) { using (var ctx = authenticationManager.GetContext(site, _options.ClientId, _options.ClientSecret)) { // https://docs.microsoft.com/en-us/sharepoint/dev/solution-guidance/upload-large-files-sample-app-for-sharepoint // Each sliced upload requires a unique id Guid uploadId = Guid.NewGuid(); // TODO: Library name would be dynamic, based on type of amendment (dependent on amendment data model) List uploadLibrary = ctx.Web.Lists.GetByTitle("Amendment"); // create dedicated folder if one has not already been created for this amendment // TODO: Re-work interface so that folder is created ahead of uploading first file, then this // check can be removed. FolderCollection folders = uploadLibrary.RootFolder.Folders; ctx.Load(folders); ctx.ExecuteQuery(); folderName = folderName.Trim(); var targetFolder = folders.FirstOrDefault(x => x.Name == folderName); if (targetFolder == null) { // create folder targetFolder = folders.Add(folderName); ctx.Load(targetFolder); ctx.ExecuteQuery(); } // Calculate block size in bytes int blockSize = fileChunkSizeInMB * 1024 * 1024; // Get the size of the file long fileSize = file.Length; if (fileSize <= blockSize) { // Use regular approach FileCreationInformation fileInfo = new FileCreationInformation(); fileInfo.ContentStream = file; fileInfo.Url = filename; fileInfo.Overwrite = true; uploadFile = targetFolder.Files.Add(fileInfo); ctx.Load(uploadFile); ctx.ExecuteQuery(); } else { // Use large file upload approach ClientResult <long> bytesUploaded = null; using (BinaryReader br = new BinaryReader(file)) { byte[] buffer = new byte[blockSize]; long fileoffset = 0; long totalBytesRead = 0; int bytesRead; bool first = true; // Read data from filesystem in blocks while ((bytesRead = br.Read(buffer, 0, buffer.Length)) > 0) { totalBytesRead = totalBytesRead + bytesRead; if (first) { using (MemoryStream contentStream = new MemoryStream()) { // Add an empty file. FileCreationInformation fileInfo = new FileCreationInformation(); fileInfo.ContentStream = contentStream; fileInfo.Url = filename; fileInfo.Overwrite = true; uploadFile = targetFolder.Files.Add(fileInfo); // Start upload by uploading the first slice. using (MemoryStream s = new MemoryStream(buffer)) { // Call the start upload method on the first slice bytesUploaded = uploadFile.StartUpload(uploadId, s); ctx.ExecuteQuery(); // fileoffset is the pointer where the next slice will be added fileoffset = bytesUploaded.Value; } // we can only start the upload once first = false; } } else { // Get a reference to our file uploadFile = ctx.Web.GetFileByServerRelativeUrl( targetFolder.ServerRelativeUrl + System.IO.Path.AltDirectorySeparatorChar + filename); if (totalBytesRead == fileSize) { // We've reached the end of the file using (MemoryStream s = new MemoryStream(buffer, 0, bytesRead)) { // End sliced upload by calling FinishUpload uploadFile = uploadFile.FinishUpload(uploadId, fileoffset, s); ctx.ExecuteQuery(); break; } } else { using (MemoryStream s = new MemoryStream(buffer)) { // Continue sliced upload bytesUploaded = uploadFile.ContinueUpload(uploadId, fileoffset, s); ctx.ExecuteQuery(); // update fileoffset for the next slice fileoffset = bytesUploaded.Value; } } } } } } } } return(new FileUploadResult { FileId = uploadFile.UniqueId, FileName = filename, FileSizeInBytes = uploadFile.Length, FolderName = folderName }); }