private static async Task <DriveItem> UploadlargeFile(SharepointIds sp, string path) { DriveItem uploadedFile = null; using (FileStream fileStream = new FileStream(largeFilePath, FileMode.Open)) { UploadSession uploadSession = await graphClient.Sites["root"] .Drive.Root.ItemWithPath($"{path}/LargeFile.txt") .CreateUploadSession().Request() .PostAsync(); if (uploadSession != null) { // Chunk size must be divisible by 320KiB, our chunk size will be slightly more than 1MB int maxSizeChunk = (320 * 1024) * 4; ChunkedUploadProvider uploadProvider = new ChunkedUploadProvider(uploadSession, graphClient, fileStream, maxSizeChunk); var chunkRequests = uploadProvider.GetUploadChunkRequests(); var exceptions = new List <Exception>(); var readBuffer = new byte[maxSizeChunk]; foreach (var request in chunkRequests) { var result = await uploadProvider.GetChunkRequestResponseAsync(request, readBuffer, exceptions); if (result.UploadSucceeded) { uploadedFile = result.ItemResponse; } } } } return(uploadedFile); }
private async Task <DriveItem> UploadLargeFileAsync(Stream stream, DriveItemUploadableProperties properties, string filePath) { var uploadSession = await this.GraphClient.Drive.Root.ItemWithPath(filePath).CreateUploadSession(properties).Request().PostAsync(); var maxChunkSize = 1280 * 1024; // 1280 KB - Change this to your chunk size. 5MB is the default. var provider = new ChunkedUploadProvider(uploadSession, this.GraphClient, stream, maxChunkSize); var chunkRequests = provider.GetUploadChunkRequests(); var readBuffer = new byte[maxChunkSize]; var trackedExceptions = new List <Exception>(); DriveItem driveItem = null; // upload the chunks foreach (var request in chunkRequests) { // Do your updates here: update progress bar, etc. // ... // Send chunk request var result = await provider.GetChunkRequestResponseAsync(request, readBuffer, trackedExceptions); if (result.UploadSucceeded) { driveItem = result.ItemResponse; } } // check that upload succeeded if (driveItem == null) { throw new Exception(); } return(driveItem); }
private async Task <Item> ChunkedUploadAsync(ChunkedUploadProvider provider, IProgress <ProgressValue> progress) { var readBuffer = new byte[MaxChunkSize]; var exceptions = new List <Exception>(); var uploadChunkRequests = provider.GetUploadChunkRequests(); var bytesTransferred = 0; var bytesTotal = uploadChunkRequests.Sum(u => u.RangeLength); progress?.Report(new ProgressValue(bytesTransferred, bytesTotal)); foreach (var currentChunkRequest in uploadChunkRequests) { var uploadChunkResult = await retryPolicy.ExecuteAsync(() => provider.GetChunkRequestResponseAsync(currentChunkRequest, readBuffer, exceptions)); progress?.Report(new ProgressValue(bytesTransferred += currentChunkRequest.RangeLength, bytesTotal)); if (uploadChunkResult.UploadSucceeded) { return(uploadChunkResult.ItemResponse); } } await retryPolicy.ExecuteAsync(() => provider.UpdateSessionStatusAsync()); throw new TaskCanceledException(Properties.Resources.RetriesExhausted, new AggregateException(exceptions)); }
public override async Task WriteFileAsync(string path, byte[] contents, bool overwrite = false, CancellationToken cancellationToken = default) { if (!overwrite && await FileExistsAsync(path, cancellationToken)) { throw new FileExistsException(PrependRootPath(path), Prefix); } path = PrependRootPath(path); try { using var memoryStream = new MemoryStream(contents); var uploadSession = await client.Drives[driveId].Root.ItemWithPath(path).CreateUploadSession().Request() .PostAsync(cancellationToken); var provider = new ChunkedUploadProvider(uploadSession, client, memoryStream); var chunkRequests = provider.GetUploadChunkRequests(); var exceptionTrackingList = new List <Exception>(); foreach (var request in chunkRequests) { var result = await provider.GetChunkRequestResponseAsync(request, exceptionTrackingList); if (!result.UploadSucceeded && exceptionTrackingList.Any()) { throw new AdapterRuntimeException(exceptionTrackingList.First()); } } } catch (Exception exception) { throw Exception(exception); } }
private async Task <Item> ChunkedUploadAsync(ChunkedUploadProvider provider, IProgress <ProgressValue> progress, int retries) { var readBuffer = new byte[MAX_CHUNK_SIZE]; var exceptions = new List <Exception>(); do { var uploadChunkRequests = provider.GetUploadChunkRequests(); var bytesTransferred = 0; var bytesTotal = uploadChunkRequests.Sum(u => u.RangeLength); progress?.Report(new ProgressValue(bytesTransferred, bytesTotal)); foreach (var currentChunkRequest in uploadChunkRequests) { var uploadChunkResult = await provider.GetChunkRequestResponseAsync(currentChunkRequest, readBuffer, exceptions); progress?.Report(new ProgressValue(bytesTransferred += currentChunkRequest.RangeLength, bytesTotal)); if (uploadChunkResult.UploadSucceeded) { return(uploadChunkResult.ItemResponse); } } await provider.UpdateSessionStatusAsync(); await Task.Delay((1 << (exceptions.Count - 1)) * 1000).ConfigureAwait(false); } while (--retries >= 0); throw new TaskCanceledException(Properties.Resources.RetriesExhausted, new AggregateException(exceptions)); }
private async Task <string> UploadFile(string fileName, string fileExtension, Stream stream, string jsonFileId, bool isShared) { DriveItem uploadedFile = null; var uploadSession = await _graphServiceClient.Me.Drive.Special.AppRoot .ItemWithPath(fileName + fileExtension).CreateUploadSession().Request().PostAsync(); // Chunk size must be divisible by 320KiB, our chunk size will be slightly more than 1MB int maxSizeChunk = (320 * 1024) * 4; ChunkedUploadProvider uploadProvider = new ChunkedUploadProvider(uploadSession, _graphServiceClient, stream, maxSizeChunk); var chunkRequests = uploadProvider.GetUploadChunkRequests(); var exceptions = new List <Exception>(); foreach (var request in chunkRequests) { var result = await uploadProvider.GetChunkRequestResponseAsync(request, exceptions); if (result.UploadSucceeded) { uploadedFile = result.ItemResponse; } } if (isShared) { return(await SetPermissions(uploadedFile?.Name)); } return(null); }
public static async Task <bool> UploadDbToOneDrive() { try { var remotePath = MoviekusFolderName + MoviekusDefines.DbFileName; var localPath = Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.LocalApplicationData), MoviekusDefines.DbFileName); LogManager.GetCurrentClassLogger().Info("Signing in to OneDrive..."); await GraphClientManager.Ref.SignIn(); LogManager.GetCurrentClassLogger().Info("Starting Db-Upload to OneDrive..."); LogManager.GetCurrentClassLogger().Info($"Local path is: {localPath}"); LogManager.GetCurrentClassLogger().Info($"Remote path is: {remotePath}"); using (var stream = System.IO.File.OpenRead(localPath)) { if (stream != null) { if (stream.Length > 4 * 1024 * 1024) // Ab 4MB müssen Chunks übertragen werden { var session = await GraphClientManager.Ref.GraphClient.Drive.Root.ItemWithPath(remotePath).CreateUploadSession().Request().PostAsync(); var maxSizeChunk = 320 * 4 * 1024; var provider = new ChunkedUploadProvider(session, GraphClientManager.Ref.GraphClient, stream, maxSizeChunk); var chunckRequests = provider.GetUploadChunkRequests(); var exceptions = new List <Exception>(); DriveItem itemResult = null; foreach (var request in chunckRequests) { var result = await provider.GetChunkRequestResponseAsync(request, exceptions); if (result.UploadSucceeded) { itemResult = result.ItemResponse; } } // Check that upload succeeded if (itemResult == null) { await UploadDbToOneDrive(); } } else { await GraphClientManager.Ref.GraphClient.Drive.Root.ItemWithPath(remotePath).Content.Request().PutAsync <DriveItem>(stream); } } } } catch (Exception ex) { LogManager.GetCurrentClassLogger().Error(ex); return(false); } LogManager.GetCurrentClassLogger().Info("Finished Db-Upload to OneDrive."); return(true); }
public async Task OneDriveUploadLargeFile() { try { System.Drawing.ImageConverter converter = new System.Drawing.ImageConverter(); var buff = (byte[])converter.ConvertTo(Microsoft.Graph.Test.Properties.Resources.hamilton, typeof(byte[])); using (System.IO.MemoryStream ms = new System.IO.MemoryStream(buff)) { // Describe the file to upload. Pass into CreateUploadSession, when the service works as expected. var props = new DriveItemUploadableProperties(); //props.Name = "_hamilton.png"; //props.Description = "This is a pictureof Mr. Hamilton."; //props.FileSystemInfo = new FileSystemInfo(); //props.FileSystemInfo.CreatedDateTime = System.DateTimeOffset.Now; //props.FileSystemInfo.LastModifiedDateTime = System.DateTimeOffset.Now; props.AdditionalData = new Dictionary <string, object>(); props.AdditionalData.Add("@microsoft.graph.conflictBehavior", "rename"); // Get the provider. // POST /v1.0/drive/items/01KGPRHTV6Y2GOVW7725BZO354PWSELRRZ:/_hamiltion.png:/microsoft.graph.createUploadSession // The CreateUploadSesssion action doesn't seem to support the options stated in the metadata. This issue has been filed. var uploadSession = await graphClient.Drive.Items["01KGPRHTV6Y2GOVW7725BZO354PWSELRRZ"].ItemWithPath("_hamilton.png").CreateUploadSession().Request().PostAsync(); var maxChunkSize = 320 * 1024; // 320 KB - Change this to your chunk size. 5MB is the default. var provider = new ChunkedUploadProvider(uploadSession, graphClient, ms, maxChunkSize); // Setup the chunk request necessities var chunkRequests = provider.GetUploadChunkRequests(); var readBuffer = new byte[maxChunkSize]; var trackedExceptions = new List <Exception>(); DriveItem itemResult = null; //upload the chunks foreach (var request in chunkRequests) { // Do your updates here: update progress bar, etc. // ... // Send chunk request var result = await provider.GetChunkRequestResponseAsync(request, readBuffer, trackedExceptions); if (result.UploadSucceeded) { itemResult = result.ItemResponse; } } // Check that upload succeeded if (itemResult == null) { // Retry the upload // ... } } } catch (Microsoft.Graph.ServiceException e) { Assert.Fail("Something happened, check out a trace. Error code: {0}", e.Error.Code); } }
// Uploads a large file to the current user's root directory. public async Task <List <ResultsItem> > UploadLargeFile() { List <ResultsItem> items = new List <ResultsItem>(); using (Stream fileStream = System.IO.File.OpenRead(HostingEnvironment.MapPath("/Content/LargeFileUploadResource.bmp"))) { // Create the upload session. The access token is no longer required as you have session established for the upload. // POST /v1.0/drive/root:/UploadLargeFile.bmp:/microsoft.graph.createUploadSession UploadSession uploadSession = await graphClient.Me.Drive.Root.ItemWithPath("LargeFileUploadResource.bmp").CreateUploadSession().Request(requestOptions) .WithUserAccount(ClaimsPrincipal.Current.ToGraphUserAccount()) .PostAsync(); int maxChunkSize = 320 * 1024; // 320 KB - Change this to your chunk size. 5MB is the default. ChunkedUploadProvider provider = new ChunkedUploadProvider(uploadSession, graphClient, fileStream, maxChunkSize); // Set up the chunk request necessities. IEnumerable <UploadChunkRequest> chunkRequests = provider.GetUploadChunkRequests(requestOptions); byte[] readBuffer = new byte[maxChunkSize]; List <Exception> trackedExceptions = new List <Exception>(); DriveItem uploadedFile = null; // Upload the chunks. foreach (var request in chunkRequests) { request.WithUserAccount(ClaimsPrincipal.Current.ToGraphUserAccount()); // Do your updates here: update progress bar, etc. // ... // Send chunk request UploadChunkResult result = await provider.GetChunkRequestResponseAsync(request, readBuffer, trackedExceptions); if (result.UploadSucceeded) { uploadedFile = result.ItemResponse; // Get file properties. items.Add(new ResultsItem { Display = uploadedFile.Name, Id = uploadedFile.Id, Properties = new Dictionary <string, object> { { Resource.Prop_Created, uploadedFile.CreatedDateTime.Value.ToLocalTime() }, { Resource.Prop_Url, uploadedFile.WebUrl }, { Resource.Prop_Id, uploadedFile.Id } } }); } } // Check that upload succeeded. if (uploadedFile == null) { // Retry the upload // ... } } return(items); }
//private void filesbtn_Click(object sender, RoutedEventArgs e) //{ // OpenFileDialog fileDialog = new OpenFileDialog(); // fileDialog.Filter = "*.epub | *.*"; // fileDialog.InitialDirectory = "C:\\Users\\Tom\\Desktop"; // fileDialog.Multiselect = true; // if (fileDialog.ShowDialog() == System.Windows.Forms.DialogResult.OK) // { // selectedFiles = fileDialog.FileNames; // } // if (selectedFiles != null && selectedFiles.Count() > 0) // { // List<CustomeName1> lstItems = new List<CustomeName1>(); // var fileInfo = new FileInfo(selectedFiles[0]); // dirName = $"Directory : {fileInfo.DirectoryName}"; // foreach (var file in selectedFiles) // { // lstItems.Add(new CustomeName1() { Name = (new FileInfo(file)).Name }); // } // //lstView1.ItemsSource = lstItems; // } //} //private async void btnUpload_Click(object sender, RoutedEventArgs e) //{ // if (selectedFiles == null || selectedFiles.Count() == 0) // { // System.Windows.MessageBox.Show("Please select atleast one file to upload!", "Stop!", MessageBoxButton.OK, MessageBoxImage.Warning); // return; // } // try // { // //spinner.Visibility = Visibility.Visible; // //spinner.Spin = true; // //btnUpload.IsEnabled = false; // //filesbtn.IsEnabled = false; // if (_graphClient == null) // { // _graphClient = GraphClientHelper.GetAuthenticatedClient(); // } // var count = 100 / selectedFiles.Count(); // if (System.IO.File.Exists("log.txt")) // { // System.IO.File.Delete("log.txt"); // } // foreach (var file in selectedFiles) // { // var fileName = Path.GetFileName(file); // try // { // if (file != null && file.Contains(".")) // { // await UploadFilesToOneDrive(fileName, file, _graphClient); // //progressBar.Value += count; // } // } // catch (Exception ex) // { // errorMessages.AppendLine($"File: {fileName} upload failed:"); // errorMessages.AppendLine($"Message :{ ex.Message }"); // errorMessages.AppendLine($"{ ex.StackTrace }"); // System.IO.File.AppendAllText("log.txt", errorMessages.ToString()); // System.Windows.MessageBox.Show(ex.Message, "Error!", MessageBoxButton.OK, MessageBoxImage.Error); // continue; // } // } // if (!System.IO.File.Exists("log.txt")) // { // System.Windows.MessageBox.Show("Successfully uploaded"); // } // } // catch (Exception ex) // { // //spinner.Spin = false; // //spinner.Visibility = Visibility.Hidden; // errorMessages.AppendLine($"Message :{ ex.Message }"); // errorMessages.AppendLine($"{ ex.StackTrace }"); // System.IO.File.AppendAllText("log.txt", errorMessages.ToString()); // System.Windows.MessageBox.Show(ex.Message, "Error!", MessageBoxButton.OK, MessageBoxImage.Error); // } // finally // { // //dirName.Content = "Directory: "; // //lstView1.ItemsSource = null; // selectedFiles = new string[0]; // //btnUpload.IsEnabled = true; // //filesbtn.IsEnabled = true; // //spinner.Spin = false; // //spinner.Visibility = Visibility.Hidden; // //progressBar.Value = 0; // if (System.IO.File.Exists("log.txt")) // { // var result = Process.Start("log.txt"); // Thread.Sleep(5000); // if (result.HasExited) // { // System.IO.File.Delete("log.txt"); // } // } // } //} /// <summary> /// UploadFiles to Onedrive Less than 4MB only /// </summary> /// <param name="fileName"></param> /// <param name="filePath"></param> /// <param name="graphClient"></param> /// <returns></returns> private static async Task UploadFilesToOneDrive(string fileName, string filePath, GraphServiceClient graphClient) { try { var uploadPath = $"/CodeUploads/{DateTime.Now.ToString("ddMMyyyy")}/" + Uri.EscapeUriString(fileName); using (var stream = new FileStream(filePath, FileMode.Open)) { if (stream != null) { var fileSize = ByteSize.FromBytes(stream.Length); if (fileSize.MegaBytes > 4) { var session = await graphClient.Drive.Root.ItemWithPath(uploadPath).CreateUploadSession().Request().PostAsync(); var maxSizeChunk = 320 * 4 * 1024; var provider = new ChunkedUploadProvider(session, graphClient, stream, maxSizeChunk); var chunckRequests = provider.GetUploadChunkRequests(); var exceptions = new List <Exception>(); var readBuffer = new byte[maxSizeChunk]; DriveItem itemResult = null; //upload the chunks foreach (var request in chunckRequests) { // Do your updates here: update progress bar, etc. // ... // Send chunk request var result = await provider.GetChunkRequestResponseAsync(request, readBuffer, exceptions); if (result.UploadSucceeded) { itemResult = result.ItemResponse; } } // Check that upload succeeded if (itemResult == null) { await UploadFilesToOneDrive(fileName, filePath, graphClient); } } else { //await graphClient.Drive.Root.ItemWithPath(uploadPath).Content.Request().PutAsync<DriveItem>(stream); //await graphClient.Groups["36c450de-af05-4dd1-bed8-9c7b6d7ca3e3"].Drive.Root.ItemWithPath(uploadPath).Content.Request().PutAsync<DriveItem>(stream); //graphClient.Groups[].Drive.Items } } } } catch { throw; } }
public async Task OneDriveUploadLargeFile() { try { using (Stream stream = ResourceHelper.GetResourceAsStream(ResourceHelper.Hamilton)) { // Describe the file to upload. Pass into CreateUploadSession, when the service works as expected. //var props = new DriveItemUploadableProperties(); //props.Name = "_hamilton.png"; //props.Description = "This is a pictureof Mr. Hamilton."; //props.FileSystemInfo = new FileSystemInfo(); //props.FileSystemInfo.CreatedDateTime = System.DateTimeOffset.Now; //props.FileSystemInfo.LastModifiedDateTime = System.DateTimeOffset.Now; // Get the provider. // POST /v1.0/drive/items/01KGPRHTV6Y2GOVW7725BZO354PWSELRRZ:/_hamiltion.png:/microsoft.graph.createUploadSession // The CreateUploadSesssion action doesn't seem to support the options stated in the metadata. var uploadSession = await graphClient.Drive.Items["01KGPRHTV6Y2GOVW7725BZO354PWSELRRZ"].ItemWithPath("_hamilton.png").CreateUploadSession().Request().PostAsync(); var maxChunkSize = 320 * 1024; // 320 KB - Change this to your chunk size. 5MB is the default. var provider = new ChunkedUploadProvider(uploadSession, graphClient, stream, maxChunkSize); // Setup the chunk request necessities var chunkRequests = provider.GetUploadChunkRequests(); var readBuffer = new byte[maxChunkSize]; var trackedExceptions = new List <Exception>(); DriveItem itemResult = null; //upload the chunks foreach (var request in chunkRequests) { // Do your updates here: update progress bar, etc. // ... // Send chunk request var result = await provider.GetChunkRequestResponseAsync(request, readBuffer, trackedExceptions); if (result.UploadSucceeded) { itemResult = result.ItemResponse; } } // Check that upload succeeded if (itemResult == null) { // Retry the upload // ... } } } catch (Microsoft.Graph.ServiceException e) { Assert.True(false, "Something happened, check out a trace. Error code: " + e.Error.Code); } }
public static async Task <DriveItem> UploadFileAsync(Stream fileStream, string fileName, bool isLargeFile, string siteUrl) { var graphClient = GetAuthenticatedClient(); DriveItem uploadedFile = null; siteUrl = string.IsNullOrEmpty(siteUrl.Trim())? "/sites/web01" : siteUrl; //var hostname = "m365x130314.sharepoint.com"; try { var site = await graphClient.Sites.GetByPath(siteUrl, hostname).Request().GetAsync(); //var lists = await graphClient.Sites.GetByPath(siteUrl, hostname).Lists.Request().GetAsync(); //var listId = lists.First(p => p.Name == "Documents").Id; if (!isLargeFile) { uploadedFile = await graphClient.Sites[site.Id].Drive.Root.ItemWithPath(fileName).Content.Request().PutAsync <DriveItem>(fileStream); } //uploadedFile = await graphClient.Sites.GetByPath(siteUrl, hostname).Lists[listId].Drive.Root.ItemWithPath(fileName).Content.Request().PutAsync<DriveItem>(fileStream); //uploadedFile = await graphClient.Sites[hostname + siteUrl].Drive.Root.ItemWithPath(fileName).Content.Request().PutAsync<DriveItem>(fileStream); else { UploadSession uploadSession = null; //uploadSession = await graphClient.Sites["root"].SiteWithPath("/sites/team01").Drive.Root.ItemWithPath(fileName).CreateUploadSession().Request().PostAsync(); uploadSession = await graphClient.Sites[site.Id].Drive.Root.ItemWithPath(fileName).CreateUploadSession().Request().PostAsync(); if (uploadSession != null) { // Chunk size must be divisible by 320KiB, our chunk size will be slightly more than 1MB int maxSizeChunk = (320 * 1024) * 16; ChunkedUploadProvider uploadProvider = new ChunkedUploadProvider(uploadSession, graphClient, fileStream, maxSizeChunk); var chunkRequests = uploadProvider.GetUploadChunkRequests(); var exceptions = new List <Exception>(); var readBuffer = new byte[maxSizeChunk]; foreach (var request in chunkRequests) { var result = await uploadProvider.GetChunkRequestResponseAsync(request, readBuffer, exceptions); if (result.UploadSucceeded) { uploadedFile = result.ItemResponse; } } } } } catch (Exception ex) { throw ex; } return(uploadedFile); }
private async Task <Microsoft.Graph.DriveItem> doUpload(string filePath, string fileName, string accessToken) { string token = await getToken(); var graphServiceClient = getClient(token); using (var file = System.IO.File.OpenRead(filePath)) { MemoryStream stream = new MemoryStream(); file.CopyTo(stream); autoOpen(stream); var documentFolder = await ODataHelper.PostFolder <OneDriveItem>(GraphApiHelper.GetOneDriveChildrenUrl(), token); var uploadSession = await graphServiceClient.Drives[documentFolder.ParentReference.DriveId].Items[documentFolder.Id].ItemWithPath(fileName).CreateUploadSession().Request().PostAsync(); string ul = uploadSession.UploadUrl += "&$select=Id,ParentReference,WebUrl,WebDavUrl"; var maxChunkSize = (320 * 1024) * 10; // 5000 KB - Change this to your chunk size. 5MB is the default. var provider = new ChunkedUploadProvider(uploadSession, graphServiceClient, stream, maxChunkSize); // Setup the chunk request necessities var chunkRequests = provider.GetUploadChunkRequests(); var readBuffer = new byte[maxChunkSize]; var trackedExceptions = new List <Exception>(); DriveItem itemResult = null; //upload the chunks foreach (var request in chunkRequests) { // Do your updates here: update progress bar, etc. // ... // Send chunk request var result = await provider.GetChunkRequestResponseAsync(request, readBuffer, trackedExceptions); if (result.UploadSucceeded) { itemResult = result.ItemResponse; } } // Check that upload succeeded if (itemResult != null) { return(itemResult); } } throw new ApplicationException("Upload failed."); }
public static async Task <DriveItem> UploadToOneDrive(UploadTask task, string localFileName, CancellationToken token, IProgress <double> progress, TextBoxWriter logs) { if (MainWindow.Client == null) { throw new ApplicationException("OneDrive client null!"); } var name = Path.GetFileName(localFileName); var targetPath = "YEARBOOK 2019/_Uploads/" + task.TaskName + "/" + name; var sesh = await MainWindow.Client.Drives["b!ciVzyDuG1kGQJn8UlMdOl6fArqsytz1JhhwdkjwpvaP7ZUv4lIZKSL4QhaM-19bz"] .Root .ItemWithPath(targetPath) .CreateUploadSession(new DriveItemUploadableProperties() { }) .Request().PostAsync(); logs.WriteLine($"Started new sesh; timeout {sesh.ExpirationDateTime}"); using (var stream = new FileStream(localFileName, FileMode.Open)) { var maxChunkSize = 320 * 1024 * 2; var provider = new ChunkedUploadProvider(sesh, MainWindow.Client, stream, maxChunkSize); var readBuffer = new byte[maxChunkSize]; var trackedExceptions = new List <Exception>(); DriveItem item = null; int index = 0; var uploadChunkRequests = provider.GetUploadChunkRequests().ToList(); foreach (var request in uploadChunkRequests) { logs.WriteLine($"Uploading chunk {index} of {uploadChunkRequests.Count} (timeout {provider.Session.ExpirationDateTime})"); var result = await provider.GetChunkRequestResponseAsync(request, readBuffer, trackedExceptions); if (result.UploadSucceeded) { if (result.ItemResponse != null) { item = result.ItemResponse; } } index += 1; progress.Report((double)index / uploadChunkRequests.Count); } logs.WriteLine("OneDrive upload completed; new item ID " + item.Id); return(item); } }
private async Task <string> uploadMail2OD(string accessToken, Stream stream, string filename) { GraphServiceClient graphClient = new GraphServiceClient(new DelegateAuthenticationProvider( async(requestMessage) => { requestMessage.Headers.Authorization = new AuthenticationHeaderValue("bearer", accessToken); })); if (stream.Length < (4 * 1024 * 1024)) { DriveItem uploadResult = await graphClient.Me .Drive.Root .ItemWithPath(filename) .Content.Request() .PutAsync <DriveItem>(stream); return(uploadResult.WebUrl); } else { try { // This method supports files even greater 4MB DriveItem item = null; UploadSession session = await graphClient.Me.Drive.Root .ItemWithPath(filename).CreateUploadSession().Request().PostAsync(); int maxSizeChunk = 320 * 4 * 1024; ChunkedUploadProvider provider = new ChunkedUploadProvider(session, graphClient, stream, maxSizeChunk); var chunckRequests = provider.GetUploadChunkRequests(); List <Exception> exceptions = new List <Exception>(); foreach (UploadChunkRequest chunkReq in chunckRequests) //upload the chunks { var reslt = await provider.GetChunkRequestResponseAsync(chunkReq, exceptions); if (reslt.UploadSucceeded) { item = reslt.ItemResponse; // Check that upload succeeded } } return(item != null ? item.WebUrl : null); } catch (ServiceException ex) { return(null); } } }
public async Task <OAuthUserModel> UploadFileOneDrive(OAuthUserModel oAuth, string emailOrObjectId, string fileNameOrObjectId) { try { string _newFileName = $"Prueba Onboarding-{Guid.NewGuid()}.docx"; // where you want to save the file, with name var item = $"/Contratos/" + _newFileName; var uploadSession = await GetUploadSession(oAuth, item, emailOrObjectId); var maxChunkSize = 320 * 1024; // 320 KB - Change this to your chunk size. 5MB is the default. var provider = new ChunkedUploadProvider(uploadSession, oAuth.GraphService, oAuth.TemplateStream, maxChunkSize); // Setup the chunk request necessities var chunkRequests = provider.GetUploadChunkRequests(); var readBuffer = new byte[maxChunkSize]; var trackedExceptions = new List <Exception>(); DriveItem itemResult = null; //upload the chunks foreach (var request in chunkRequests) { // Do your updates here: update progress bar, etc. // ... // Send chunk request var result = await provider.GetChunkRequestResponseAsync(request, readBuffer, trackedExceptions); if (result.UploadSucceeded) { itemResult = result.ItemResponse; } } oAuth.Status.Code = 200; oAuth.Status.Message = "OK"; return(oAuth); } catch (Exception e) { oAuth.Status.Code = 406; oAuth.Status.Message = e.Message; return(oAuth); } }
private static void UploadLarge(GraphServiceClient _graphServiceClient, string _fileToUpload, string _userID) { DriveItem uploadedFile = null; FileStream fileStream = new FileStream(_fileToUpload, FileMode.Open); UploadSession uploadSession = null; uploadSession = _graphServiceClient.Users[_userID] .Drive.Root .ItemWithPath(_fileToUpload) .CreateUploadSession() .Request().PostAsync().GetAwaiter().GetResult(); if (uploadSession != null) { // Chunk size must be divisible by 320KiB int maxSizeChunk = (320 * 1024) * 4; ChunkedUploadProvider uploadProvider = new ChunkedUploadProvider(uploadSession, _graphServiceClient, fileStream, maxSizeChunk); var chunkRequests = uploadProvider.GetUploadChunkRequests(); var exceptions = new List <Exception>(); var readBuffer = new byte[maxSizeChunk]; foreach (var request in chunkRequests) { var result = uploadProvider.GetChunkRequestResponseAsync(request, exceptions).GetAwaiter().GetResult(); Console.WriteLine($"Chunk!"); if (result.UploadSucceeded) { uploadedFile = result.ItemResponse; Console.WriteLine($"Finished!"); } } if (uploadedFile != null) { Console.WriteLine($"Uploaded file {_fileToUpload} to {uploadedFile.WebUrl}."); } else { Console.WriteLine($"Failure uploading {_fileToUpload}"); } } }
/// <summary> /// Take a file greater than 4MB and upload it to the service /// </summary> /// <param name="fileToUpload">The file that we want to upload</param> /// <param name="uploadToSharePoint">Should we upload to SharePoint or OneDrive?</param> public async Task <DriveItem> UploadLargeFile(string fileToUpload, bool uploadToSharePoint) { DriveItem uploadedFile = null; FileStream fileStream = new FileStream(fileToUpload, FileMode.Open); UploadSession uploadSession = null; // Do we want OneDrive for Business/Consumer or do we want a SharePoint Site? if (uploadToSharePoint) { uploadSession = await _graphClient.Sites["root"].Drive.Root.ItemWithPath(fileToUpload).CreateUploadSession().Request().PostAsync(); } else { uploadSession = await _graphClient.Me.Drive.Root.ItemWithPath(fileToUpload).CreateUploadSession().Request().PostAsync(); } if (uploadSession != null) { // Chunk size must be divisible by 320KiB, our chunk size will be slightly more than 1MB int maxSizeChunk = (320 * 1024) * 4; ChunkedUploadProvider uploadProvider = new ChunkedUploadProvider(uploadSession, _graphClient, fileStream, maxSizeChunk); var chunkRequests = uploadProvider.GetUploadChunkRequests(); var exceptions = new List <Exception>(); var readBuffer = new byte[maxSizeChunk]; foreach (var request in chunkRequests) { var result = await uploadProvider.GetChunkRequestResponseAsync(request, readBuffer, exceptions); if (result.UploadSucceeded) { uploadedFile = result.ItemResponse; } } } return(uploadedFile); }
/// <summary> /// Upload files larger than 4MB to Onedrive. /// </summary> /// <param name="accessToken">OneDrive access token.</param> /// <param name="file">File stream object.</param> /// <param name="fileName">File name.</param> /// <param name="endpointBase">Base address.</param> /// <returns></returns> public static async Task UploadBigFileToOneDrive(string accessToken, Stream file, string fileName, string endpointBase) { GraphServiceClient graphServiceClient = new GraphServiceClient(endpointBase, new DelegateAuthenticationProvider( async(requestMessage) => { requestMessage.Headers.Authorization = new AuthenticationHeaderValue("bearer", accessToken); })); var uploadSession = await graphServiceClient.Me.Drive.Root.ItemWithPath(fileName).CreateUploadSession().Request().PostAsync(); // 320 KB - Change this to your chunk size. 5MB is the default. var maxChunkSize = 320 * 1024; var provider = new ChunkedUploadProvider(uploadSession, graphServiceClient, file, maxChunkSize); // Setup the chunk request necessities. var chunkRequests = provider.GetUploadChunkRequests(); var readBuffer = new byte[maxChunkSize]; var trackedExceptions = new List <Exception>(); DriveItem itemResult = null; // upload the chunks. foreach (var request in chunkRequests) { // Send chunk request. var result = await provider.GetChunkRequestResponseAsync(request, readBuffer, trackedExceptions); if (result.UploadSucceeded) { itemResult = result.ItemResponse; } } if (itemResult == null) { throw new Exception("Upload to OneDirve Fail"); } }
private static async Task <DriveItem> Upload(GraphServiceClient graphClient, UploadSession session, MemoryStream stream, string uploadPath, bool first = true) { var maxSizeChunk = 320 * 4 * 1024; var provider = new ChunkedUploadProvider(session, graphClient, stream, maxSizeChunk); var chunckRequests = provider.GetUploadChunkRequests(); var exceptions = new List <Exception>(); // var readBuffer = new byte[maxSizeChunk]; DriveItem itemResult = null; // upload the chunks foreach (var request in chunckRequests) { var result = await provider.GetChunkRequestResponseAsync(request, exceptions); if (result.UploadSucceeded) { itemResult = result.ItemResponse; } } // Check that upload succeeded if (itemResult == null) { if (first) { return(await Upload(graphClient, session, stream, uploadPath, false)); } else { throw new Exception("Can't upload file into onedrive"); } } return(itemResult); }
/// <summary> /// UploadFiles to Onedrive Less than 4MB only /// </summary> /// <param name="fileName"></param> /// <param name="filePath"></param> /// <param name="graphClient"></param> /// <returns></returns> private static async Task UploadFilesToOneDrive(string fileName, string filePath, GraphServiceClient graphClient) { try { var uploadPath = $"/CodeUploads/{DateTime.Now.ToString("ddMMyyyy")}/" + Uri.EscapeUriString(fileName); using (var stream = new FileStream(filePath, FileMode.Open)) { if (stream != null) { var fileSize = ByteSize.FromBytes(stream.Length); if (fileSize.MegaBytes > 4) { var session = await graphClient.Drive.Root.ItemWithPath(uploadPath).CreateUploadSession().Request().PostAsync(); var maxSizeChunk = 320 * 4 * 1024; var provider = new ChunkedUploadProvider(session, graphClient, stream, maxSizeChunk); var chunckRequests = provider.GetUploadChunkRequests(); var exceptions = new List <Exception>(); var readBuffer = new byte[maxSizeChunk]; DriveItem itemResult = null; //upload the chunks foreach (var request in chunckRequests) { // Do your updates here: update progress bar, etc. // ... // Send chunk request var result = await provider.GetChunkRequestResponseAsync(request, readBuffer, exceptions); if (result.UploadSucceeded) { itemResult = result.ItemResponse; } } // Check that upload succeeded if (itemResult == null) { await UploadFilesToOneDrive(fileName, filePath, graphClient); } } else { await graphClient.Drive.Root.ItemWithPath(uploadPath).Content.Request().PutAsync <DriveItem>(stream); ////PlannerBucket plannerbucket = new PlannerBucket(); ////plannerbucket.Name = "test bucket"; ////plannerbucket.PlanId = "VoNaSzdHmUyapAqNZDZIoZcAGKvq"; ////var addedBucket = graphClient.Planner.Buckets.Request().AddAsync(plannerbucket).Result; ////var aaa = graphClient.Planner.Buckets.Request().GetAsync().Result; //Debug.WriteLine(graphClient.Planner.Buckets.Request().RequestUrl); //Debug.WriteLine(graphClient.Planner.Buckets.GetHashCode()); //var aaa = await graphClient.Me.Planner.Plans.Request().GetAsync(); //var bbb = aaa.CurrentPage; //var ccc = bbb[1].Id; //Debug.WriteLine(ccc); //var dddd = await graphClient.Me.Planner.Tasks.Request().GetAsync(); var groups = await graphClient.Groups.Request().GetAsync(); string groupID = groups.CurrentPage[0].Id.ToString(); //string plansUrl = graphClient.Planner.Plans[groupID].Request().RequestUrl; var groupPlans = await graphClient.Groups[groupID].Planner.Plans.Request().GetAsync(); string planID = groupPlans.CurrentPage[1].Id; //var plan = await graphClient.Planner.Plans[planID].Request().GetAsync(); ////string bucketID = plan.Buckets[1].Id; var buckets = await graphClient.Planner.Plans[planID].Buckets.Request().GetAsync(); //var bucket = buckets[buckets.Count - 1]; //var tasks = await graphClient.Planner.Buckets[bucket.Id].Tasks.Request().GetAsync(); //var task = tasks[tasks.Count - 1]; //var taskDetail = await graphClient.Planner.Tasks[task.Id].Details.Request().GetAsync(); //PlannerTask plannerTask = new PlannerTask(); //plannerTask.PlanId = planID; //plannerTask.Title = "Ich bin Tom"; //plannerTask.BucketId = bucket.Id; ////plannerTask.StartDateTime = DateTimeOffset.ToString(); ////PlannerTaskDetails plannerTaskDetails = new PlannerTaskDetails(); ////plannerTaskDetails.Description = "jjjj"; ////plannerTask.Details = plannerTaskDetails; ////plannerTask.DueDateTime = DateTime.Now.ToString(); //var addedTask = graphClient.Planner.Tasks.Request().AddAsync(plannerTask).Result; //plannerTask.BucketId = planID; } } } } catch { throw; } }
static async Task GetDataAsync() { Console.BackgroundColor = ConsoleColor.Red; //Path of the file string Path = @"File location"; byte[] Data = System.IO.File.ReadAllBytes(Path); Stream Stream = new MemoryStream(Data); PublicClientApplication clientApp = new PublicClientApplication(ConfigurationManager.AppSettings["ClientID"].ToString()); GraphServiceClient graphClient = new GraphServiceClient("https://graph.microsoft.com/v1.0/", new DelegateAuthenticationProvider(async(requestMessage) => { requestMessage.Headers.Authorization = new AuthenticationHeaderValue("bearer", await GetTokenAsync(clientApp)); })); #region smallfile // small file upload // var current = await graphClient.Me.Drive.Root.ItemWithPath("Team2.JPG").Content.Request().PutAsync<DriveItem>(Stream); //var x = await graphClient.Me.Request().GetAsync(); #endregion var uploadSession = await graphClient.Drive.Root.ItemWithPath("file name ").CreateUploadSession().Request().PostAsync(); var maxChunkSize = 320 * 1024; // 320 KB - Change this to your chunk size. 5MB is the default. var provider = new ChunkedUploadProvider(uploadSession, graphClient, Stream, maxChunkSize); // Setup the chunk request necessities var chunkRequests = provider.GetUploadChunkRequests(); var readBuffer = new byte[maxChunkSize]; var trackedExceptions = new List <Exception>(); DriveItem itemResult = null; //upload the chunks foreach (var request in chunkRequests) { var result = await provider.GetChunkRequestResponseAsync(request, readBuffer, trackedExceptions); if (result.UploadSucceeded) { itemResult = result.ItemResponse; } } Console.WriteLine("file uploaded"); var Id = itemResult.Id; Console.WriteLine($"ID is\t{Id}"); var permission = graphClient.Me.Drive.Items[Id].CreateLink(/*"view", "anonymous"*/ "Permission").Request().PostAsync().Result; var ShareId = "https://1drv.ms/" + permission.ShareId; Console.WriteLine($"Sharing Link \t{ShareId}"); //var delete = graphClient.Me.Drive.Items[Id].Request().DeleteAsync(); //Console.WriteLine("deleted"); // / me / drive / root / children Console.WriteLine("enter Email Id"); Console.ForegroundColor = ConsoleColor.DarkRed; var EmailId = Console.ReadLine(); if (EmailId != "") { Console.WriteLine("Enter Subject"); var Subject = Console.ReadLine(); Console.WriteLine("Enter Body"); var MainBody = Console.ReadLine(); var BodyMergerd = MainBody + "\n" + ShareId; // Prepare the recipient list string[] Splitter = { ";" }; var SplitRecipientsList = EmailId.Split(Splitter, StringSplitOptions.RemoveEmptyEntries); List <Recipient> recipientList = new List <Recipient>(); foreach (string Recipient in SplitRecipientsList) { recipientList.Add(new Recipient { EmailAddress = new EmailAddress { Address = Recipient.Trim() } }); } var email = new Message { Body = new ItemBody { Content = BodyMergerd, ContentType = BodyType.Html, }, Subject = Subject, ToRecipients = recipientList, }; await graphClient.Me.SendMail(email, true).Request().PostAsync(); Console.WriteLine("sent"); } else { Console.WriteLine("invalid or no email id"); Console.ReadKey(); } }
public static async Task <string> UploadFile(string attachmentPath, Team team, Channel channel, ImportContext importContext, int tryCount = 0) { var fileSizeBytes = new IO.FileInfo(attachmentPath).Length; var fileName = IO.Path.GetFileName(IO.Path.GetDirectoryName(attachmentPath)) + IO.Path.GetExtension(attachmentPath); var uploadPath = channel.DisplayName + "/" + fileName; var itemRequest = importContext.GraphServiceClient.Groups[team.Id].Drive.Root.ItemWithPath(uploadPath); try { using (var stream = IO.File.OpenRead(attachmentPath)) { // 4MB is the maximum for a simple upload, staying safe with 3.6. if (fileSizeBytes / 1024 / 1024 > 3.6) { var session = await itemRequest.CreateUploadSession().Request().PostAsync(); var maxChunkSize = 320 * 4 * 1024; var provider = new ChunkedUploadProvider(session, importContext.GraphServiceClient, stream, maxChunkSize); var chunckRequests = provider.GetUploadChunkRequests(); var exceptions = new List <Exception>(); var readBuffer = new byte[maxChunkSize]; DriveItem itemResult = null; foreach (var request in chunckRequests) { var result = await provider.GetChunkRequestResponseAsync(request, readBuffer, exceptions); if (result.UploadSucceeded) { itemResult = result.ItemResponse; } } // Retry if upload failed. if (itemResult == null) { if (tryCount < 5) { TimestampedConsole.WriteLine($"Uploading the large attachment \"{attachmentPath}\" failed. Retrying."); return(await UploadFile(attachmentPath, team, channel, importContext, ++tryCount)); } else { throw new Exception($"The attachment \"{attachmentPath}\" couldn't be uploaded into the \"{channel.DisplayName}\" channel (\"{team.DisplayName}\" team) because upload failed repeatedly. You can try again as this might be just a temporary error. If the issue isn't resolved then delete the file so it's not uploaded."); } } } else { await itemRequest.Content.Request().PutAsync <DriveItem>(stream); } return((await itemRequest.Request().GetAsync()).WebUrl); } } catch (ServiceException ex) when(ex.Message.Contains("Unable to provision resource.")) { var waitMinutes = 5; TimestampedConsole.WriteLine($"The team's SharePoint site is not yet set up (you can check this under the channel's Files tab) so attachments can't be uploaded. Waiting {waitMinutes} minutes."); await Task.Delay(waitMinutes * 60000); TimestampedConsole.WriteLine($"Retrying upload."); return(await UploadFile(attachmentPath, team, channel, importContext, 0)); } }
private UploadChunkResult SetupGetChunkResponseTest(ServiceException serviceException = null, bool failsOnce = true, bool verifyTrackedExceptions = true) { var chunkSize = 320 * 1024; var bytesToUpload = new byte[] { 4, 8, 15, 16 }; var trackedExceptions = new List <Exception>(); this.uploadSession.Object.NextExpectedRanges = new[] { "0-" }; var stream = new MemoryStream(bytesToUpload.Length); stream.Write(bytesToUpload, 0, bytesToUpload.Length); stream.Seek(0, SeekOrigin.Begin); var provider = new ChunkedUploadProvider( this.uploadSession.Object, this.client.Object, stream, chunkSize); var mockRequest = new Mock <UploadChunkRequest>( this.uploadSession.Object.UploadUrl, this.client.Object, null, 0, bytesToUpload.Length - 1, bytesToUpload.Length); if (serviceException != null && failsOnce) { mockRequest.SetupSequence(r => r.PutAsync( It.IsAny <Stream>(), It.IsAny <CancellationToken>())) .Throws(serviceException) .Returns(Task.FromResult(new UploadChunkResult() { ItemResponse = new Item() })); } else if (serviceException != null) { mockRequest.Setup(r => r.PutAsync( It.IsAny <Stream>(), It.IsAny <CancellationToken>())) .Throws(serviceException); } else { mockRequest.Setup(r => r.PutAsync( It.IsAny <Stream>(), It.IsAny <CancellationToken>())) .Returns(Task.FromResult(new UploadChunkResult { ItemResponse = new Item() })); } var task = provider.GetChunkRequestResponseAsync(mockRequest.Object, bytesToUpload, trackedExceptions); try { task.Wait(); } catch (AggregateException exception) { throw exception.InnerException; } if (verifyTrackedExceptions) { Assert.IsTrue(trackedExceptions.Contains(serviceException), "Expected ServiceException in TrackedException list"); } return(task.Result); }
public async Task CreateFilesAsync(clsServer recServer) { try { if (recServer.spSiteType == "SharePoint") { var site = await clsContent.graphClient.Sites.GetByPath(recServer.spSite, "advnygaard.sharepoint.com").Request().GetAsync(); m_targetSiteId = site.Id; } else { m_targetUser = recServer.spSite; } } catch (Exception e) { Console.WriteLine(e.Message); clsContent.logger.Error(e.Message); } //var qryfiles = clsContent.m_db.tblPath.Local.Where(f => f.serverPathType == "File").Where(f => f.spSite == recServer.spSite).Where(f => f.build == true).Where(f => f.error == null).OrderBy(f => f.serverPath); var qryfiles = clsContent.m_db.tblPath.Local.Where(f => f.serverPathType == "File").Where(f => f.spSite == recServer.spSite).Where(f => f.spPathExists != true).OrderBy(f => f.serverPath); m_totfiles = qryfiles.Count(); m_ifile = 0; foreach (var rec in qryfiles) { m_ifile++; clsContent.RenewAccessToken().Wait(); var tt = (int)DateTime.Now.Subtract(clsContent.TokenAcquired).TotalSeconds; GetspPath(rec); try { if (recServer.spSiteType == "SharePoint") { var item = await clsContent.graphClient.Sites[m_targetSiteId].Drive.Root.ItemWithPath(rec.spPath).Request().GetAsync(); } else // OneDrive { var item = await clsContent.graphClient.Users[m_targetUser].Drive.Root.ItemWithPath(rec.spPath).Request().GetAsync(); } Console.WriteLine(string.Format("{0}-{1}/{2} Exists: {3} {4}", tt, m_ifile, m_totfiles, recServer.spSite, rec.spPath)); rec.spPathExists = true; rec.build = false; } catch (ServiceException e) { Console.WriteLine(string.Format("{0}-{1}/{2} {3} {4}", tt, m_ifile, m_totfiles, recServer.spSite, rec.spPath)); UploadSession uploadSession = null; try { var localPath = rec.serverPath.Replace(rec.serverDrev, rec.localDrevLetter + @":\"); using (FileStream stream = System.IO.File.Open(localPath, FileMode.Open, FileAccess.Read)) { try { if (recServer.spSiteType == "SharePoint") { uploadSession = await clsContent.graphClient.Sites[m_targetSiteId].Drive.Root.ItemWithPath(rec.spPath).CreateUploadSession().Request().PostAsync(); } else { uploadSession = await clsContent.graphClient.Users[m_targetUser].Drive.Root.ItemWithPath(rec.spPath).CreateUploadSession().Request().PostAsync(); } var maxChunkSize = 32 * 320 * 1024; // 10 MB - Change this to your chunk size. 5MB is the default. var provider = new ChunkedUploadProvider(uploadSession, clsContent.graphClient, stream, maxChunkSize); // Setup the chunk request necessities var chunkRequests = provider.GetUploadChunkRequests(); var readBuffer = new byte[maxChunkSize]; var trackedExceptions = new List <Exception>(); DriveItem itemResult = null; //upload the chunks foreach (var request in chunkRequests) { // Do your updates here: update progress bar, etc. // ... // Send chunk request var result = await provider.GetChunkRequestResponseAsync(request, readBuffer, trackedExceptions); if (result.UploadSucceeded) { itemResult = result.ItemResponse; rec.spPathExists = true; rec.build = false; rec.error = null; } } // Check that upload succeeded if (itemResult == null) { Console.WriteLine("Upload failed: " + rec.spPath); clsContent.logger.Error("Upload failed: " + rec.spPath); rec.spPathExists = false; rec.error = "Upload failed"; } } catch (ServiceException ex) { Console.WriteLine(ex.Message); clsContent.logger.Error(rec.spPath); clsContent.logger.Error(ex.Message); rec.spPathExists = false; rec.error = ex.Message; } } } catch (Exception ez) { Console.WriteLine(ez.Message); clsContent.logger.Error(rec.spPath); clsContent.logger.Error(ez.Message); rec.spPathExists = false; rec.error = ez.Message; } } clsContent.m_db.SaveChanges(); } }
private async Task <Microsoft.Graph.DriveItem> doUpload(string filePath, string fileName, string token) { var graphServiceClient = getClient(token); string userFolder = Path.Combine("ForUser", _database.CurrentUser.Uri.ToString()); string fullUserFolder = Path.Combine(_uploadBasePath, userFolder); //string fileName = $"{Guid.NewGuid()}.docx"; if (!System.IO.Directory.Exists(fullUserFolder)) { System.IO.Directory.CreateDirectory(fullUserFolder); } string tempPath = Path.Combine(fullUserFolder, Path.GetFileName(filePath)); System.IO.File.Copy(filePath, tempPath, true); FileInfo fileInfo = new FileInfo(tempPath); fileInfo.IsReadOnly = false; autoOpen(tempPath); // autoOpen(tempPath); using (var file = System.IO.File.OpenRead(tempPath)) { var documentFolder = await ODataHelper.PostFolder <OneDriveItem>(GraphApiHelper.GetOneDriveChildrenUrl(), token); var uploadSession = await graphServiceClient.Drives[documentFolder.ParentReference.DriveId].Items[documentFolder.Id].ItemWithPath(fileName).CreateUploadSession().Request().PostAsync(); string ul = uploadSession.UploadUrl += "&$select=Id,ParentReference,WebUrl,WebDavUrl"; var maxChunkSize = (320 * 1024) * 10; // 5000 KB - Change this to your chunk size. 5MB is the default. var provider = new ChunkedUploadProvider(uploadSession, graphServiceClient, file, maxChunkSize); try { // Setup the chunk request necessities var chunkRequests = provider.GetUploadChunkRequests(); var readBuffer = new byte[maxChunkSize]; var trackedExceptions = new List <Exception>(); DriveItem itemResult = null; //upload the chunks foreach (var request in chunkRequests) { // Do your updates here: update progress bar, etc. // ... // Send chunk request var result = await provider.GetChunkRequestResponseAsync(request, readBuffer, trackedExceptions); if (result.UploadSucceeded) { itemResult = result.ItemResponse; } } // Check that upload succeeded if (itemResult != null) { return(itemResult); } } catch { await provider.DeleteSession(); throw; } } System.IO.File.Delete(tempPath); throw new ApplicationException("Upload failed."); }
public async Task CreateFileAsyncOLD(KeyValuePair <string, string> f) { var sPath = f.Key; var tPath = f.Value; var tt = (int)DateTime.Now.Subtract(clsContent.TokenAcquired).TotalSeconds; tblPath rec = new tblPath() { serverDrev = m_sourceStartDir, serverPath = sPath, spSite = "/klient", spPath = tPath, }; try { var item = await clsContent.graphClient.Sites[m_targetSiteId].Drive.Root.ItemWithPath(tPath).Request().GetAsync(); Console.WriteLine(string.Format("{0}-{1}/{2} Exists: {3}", tt, m_ifile, m_totfiles, tPath)); rec.spPathExists = true; } catch (ServiceException e) { Console.WriteLine(string.Format("{0}-{1}/{2} {3}", tt, m_ifile, m_totfiles, tPath)); UploadSession uploadSession = null; try { using (FileStream stream = System.IO.File.Open(sPath, FileMode.Open, FileAccess.Read)) { try { uploadSession = await clsContent.graphClient.Sites[m_targetSiteId].Drive.Root.ItemWithPath(tPath).CreateUploadSession().Request().PostAsync(); var maxChunkSize = 32 * 320 * 1024; // 10 MB - Change this to your chunk size. 5MB is the default. var provider = new ChunkedUploadProvider(uploadSession, clsContent.graphClient, stream, maxChunkSize); // Setup the chunk request necessities var chunkRequests = provider.GetUploadChunkRequests(); var readBuffer = new byte[maxChunkSize]; var trackedExceptions = new List <Exception>(); DriveItem itemResult = null; //upload the chunks foreach (var request in chunkRequests) { // Do your updates here: update progress bar, etc. // ... // Send chunk request var result = await provider.GetChunkRequestResponseAsync(request, readBuffer, trackedExceptions); if (result.UploadSucceeded) { itemResult = result.ItemResponse; rec.spPathExists = true; } } // Check that upload succeeded if (itemResult == null) { Console.WriteLine("Upload failed: " + tPath); clsContent.logger.Error("Upload failed: " + tPath); rec.spPathExists = false; rec.error = "Upload failed"; } } catch (ServiceException ex) { Console.WriteLine(ex.Message); clsContent.logger.Error(tPath); clsContent.logger.Error(ex.Message); rec.spPathExists = false; rec.error = ex.Message; } } } catch (Exception ez) { Console.WriteLine(ez.Message); clsContent.logger.Error(tPath); clsContent.logger.Error(ez.Message); rec.spPathExists = false; rec.error = ez.Message; } } clsContent.m_db.tblPath.Local.Add(rec); clsContent.m_db.SaveChanges(); }
public async Task <Item> UploadItem(string itemPath, string destinationPath, UploadProgressForm uploadForm) { if (authenticator == null) { InitAuthenticator(); } Item itemResult = null; try { using (var fileStream = FileToStreamHelper.GetFileStream(itemPath)) { var myMaxChunkSize = 16 * 320 * 1024; // 5MiB var session = await client.Drive.Root.ItemWithPath(destinationPath).CreateSession().Request().PostAsync(); var provider = new ChunkedUploadProvider(session, client, fileStream, (int)myMaxChunkSize); if (uploadForm != null && !uploadForm.Visible) { uploadForm.Show(); } //upload the chunks // Setup the chunk request necessities var chunkRequests = provider.GetUploadChunkRequests(); var readBuffer = new byte[(int)myMaxChunkSize]; var trackedExceptions = new List <Exception>(); UploadChunkResult result; int chunkCount = 0, totalChunks = chunkRequests.Count(); for (var chunk = 0; chunk < chunkRequests.Count(); chunk++) { try { result = await provider.GetChunkRequestResponseAsync(chunkRequests.ElementAt(chunk), readBuffer, trackedExceptions); chunkCount++; if (result.UploadSucceeded) { uploadedCompressedFiles++; uploadForm.Invoke((MethodInvoker) delegate { uploadForm.uploadProgressBar.Value = (int)(((float)uploadedCompressedFiles / compressedFilesCount) * 100); uploadForm.uploadValueLabel.Text = (int)(((float)uploadedCompressedFiles / compressedFilesCount) * 100) + "%"; }); itemResult = result.ItemResponse; } } catch (Exception ex) { } } } } catch (Exception ex) { } // Check that upload succeeded if (itemResult == null) { itemResult = await UploadItem(itemPath, destinationPath, uploadForm); } System.GC.Collect(); return(itemResult); }
static bool RecurseUploadData(string LocalPath, string RemotePath, bool SkipFailed) { if (TestLogin() == false) { return(false); } List <FileSystemInfos> RemoteFiles = ListFiles(RemotePath); if (RemoteFiles == null) { return(false); } StatNumDirProcessed++; foreach (string LPath in System.IO.Directory.EnumerateDirectories(LocalPath, "*.*", System.IO.SearchOption.TopDirectoryOnly)) { string RP = RemotePath; RP += "/"; string PathOnly = LPath.Substring(LPath.LastIndexOf("\\") + 1); RP += PathOnly; if (PathOnly.StartsWith(" ") == true) { continue; } MarkAsProcessed(RemoteFiles, PathOnly, true); //Create dir in AZ string RemotePathDotDot = RP.Substring(0, RP.LastIndexOf("/")); Drive drv = QuerySync <Drive>(graphClient.Drive.Request().GetAsync()); DriveItem newcreateddir; DriveItem newfolder = new DriveItem() { Name = RP.Substring(RP.LastIndexOf("/") + 1), Folder = new Folder() }; if (TestLogin() == false) { return(false); } if (string.IsNullOrWhiteSpace(RemotePathDotDot) == true || RemotePath == "/") { newcreateddir = QuerySync <DriveItem>(graphClient.Drive.Root.Children.Request().AddAsync(newfolder)); } else { newcreateddir = QuerySync <DriveItem>(graphClient.Drive.Root.ItemWithPath(RemotePathDotDot).Children.Request().AddAsync(newfolder)); } if (newcreateddir == null) { Console.WriteLine("Cannot create directory"); return(false); } if (RecurseUploadData(LPath, RP, SkipFailed) == false) { return(false); } } foreach (string Filename in System.IO.Directory.EnumerateFiles(LocalPath, "*.*", System.IO.SearchOption.TopDirectoryOnly)) { if (System.IO.Path.GetFileName(Filename).StartsWith(" ") == true) { continue; } string RemoteFullName = RemotePath.Replace("#", "%23") + "/" + Uri.EscapeUriString(System.IO.Path.GetFileName(Filename)).Replace("#", "%23"); System.IO.FileInfo fi = new System.IO.FileInfo(Filename); DateTime DTCreated = fi.CreationTimeUtc; DateTime DTModified = fi.LastWriteTimeUtc; Int64 FSZ = fi.Length; Console.Write(Filename + " -> " + RemoteFullName + " (" + NiceSize(FSZ) + ") ... "); if (FSZ == 0) { StatCopyNumSkipped++; Console.WriteLine("\b\b\b\b\bBlank"); continue; } MarkAsProcessed(RemoteFiles, System.IO.Path.GetFileName(Filename), false); FileSystemInfos fsitest = GetFSI(RemoteFiles, System.IO.Path.GetFileName(Filename), false); if (fsitest != null) { if (DTLightTest(fsitest.Modified, DTModified) && DTLightTest(fsitest.Created, DTCreated) && fsitest.SZ == FSZ) { StatCopyNumSkipped++; StatCopySZSkipped += FSZ; Console.WriteLine("\b\b\b\b\bSkipped"); continue; } else { if (TestLogin() == false) { return(false); } QuerySync(graphClient.Drive.Items[fsitest.OneDriveID].Request().DeleteAsync()); } } try { using (System.IO.FileStream fss = System.IO.File.Open(Filename, System.IO.FileMode.Open, System.IO.FileAccess.Read, System.IO.FileShare.Read)) { FileSystemInfo fsi = new Microsoft.Graph.FileSystemInfo(); fsi.CreatedDateTime = new DateTimeOffset(DTCreated); fsi.LastModifiedDateTime = new DateTimeOffset(DTModified); DriveItemUploadableProperties uplprop = new DriveItemUploadableProperties(); uplprop.FileSystemInfo = fsi; if (TestLogin() == false) { return(false); } UploadSession UploadSess = QuerySync <UploadSession>(graphClient.Drive.Root.ItemWithPath(RemoteFullName).CreateUploadSession(uplprop).Request().PostAsync()); const int MaxChunk = 320 * 10 * 1024; ChunkedUploadProvider provider = new ChunkedUploadProvider(UploadSess, graphClient, fss, MaxChunk); IEnumerable <UploadChunkRequest> chunckRequests = provider.GetUploadChunkRequests(); byte[] readBuffer = new byte[MaxChunk]; List <Exception> exceptions = new List <Exception>(); bool Res = false; foreach (UploadChunkRequest request in chunckRequests) { Int64 Perc = (Int64)(((decimal)request.RangeBegin / (decimal)request.TotalSessionLength) * 100m); Console.Write("\b\b\b\b\b" + Perc.ToString().PadLeft(4) + "%"); if (TestLogin() == false) { return(false); } UploadChunkResult result = QuerySync <UploadChunkResult>(provider.GetChunkRequestResponseAsync(request, readBuffer, exceptions)); if (result.UploadSucceeded) { Res = true; } } if (Res == false) { Console.WriteLine("\b\b\b\b\bFAILED"); if (SkipFailed == false) { return(false); } else { continue; } } } } catch (Exception ee) { Debug.WriteLine(ee.ToString()); Console.WriteLine("\b\b\b\b\bFAILED"); StatCopyNumFailed++; StatCopySZFailed += FSZ; if (FailedDetails == true) { Console.WriteLine("---> " + ee.ToString()); WriteEventLog("Copy failed:\nLocal" + Filename + "\nRemote: " + RemoteFullName + "\n\n" + ee.ToString(), EventLogEntryType.Error); } if (SkipFailed == false) { return(false); } else { continue; } } StatCopyNumSuccess++; StatCopySZSuccess += FSZ; Console.WriteLine("\b\b\b\b\bOK "); } foreach (FileSystemInfos fsis in RemoteFiles) { if (fsis.Processed == true) { continue; } string RemoteFullName = RemotePath + "/" + fsis.Name + (fsis.IsDir == true ? " (DIR)" : ""); //deco only Console.Write("Deleting " + RemoteFullName + " ... "); if (TestLogin() == false) { return(false); } QuerySync(graphClient.Drive.Items[fsis.OneDriveID].Request().DeleteAsync()); StatNumDeleted++; Console.WriteLine("OK"); } return(true); }
/// <summary> /// Uploads a file by splitting it in chunks. /// </summary> /// <param name="item">The <see cref="PstFile"/> to upload.</param> /// <param name="conflictBehaviour">Indicates what to do in case the file already exists</param> /// <param name="totalFiles">The total number of files to upload</param> /// <param name="file">The current file number</param> /// <param name="token">The <see cref="CancellationToken"/> used to check if the task should be cancelled.</param> /// <param name="multipleParts">Indicates if the pst/ost file was zipped and therefore split</param> /// <param name="totalParts">The total numbers of parts, if the file was split</param> /// <param name="part">The current part, if the file was split</param> /// <returns>The uploaded file as <see cref="DriveItem"/></returns> private async Task <DriveItem> ChunkUpload(PstFile item, string conflictBehaviour, bool multipleParts, int totalFiles, int file, CancellationToken token, int totalParts = 1, int part = 1) { // Setup to check upload speed lastAmountOfBytesSent = 0; // Create the stream with the file using (var stream = new FileStream(item.Path, FileMode.Open)) { // Set the upload path var uploadPath = item.Destination + "/" + item.Name; // Initialize the chunks provider ChunkedUploadProvider provider = null; // Will store the uploaded item DriveItem itemResult = null; // Create upload session var uploadSession = await graphClient.Me.Drive.Root.ItemWithPath(uploadPath) .CreateUploadSession(new DriveItemUploadableProperties() { AdditionalData = new Dictionary <string, object> { { "@microsoft.graph.conflictBehavior", conflictBehaviour } } }) .Request() .PostAsync(token); // Get the chunks provider provider = new ChunkedUploadProvider(uploadSession, graphClient, stream, UploadChunkSize); // Setup the chunk request necessities var chunkRequests = provider.GetUploadChunkRequests(); //var readBuffer = new byte[UploadChunkSize]; // Initialize counters for progress var maximum = chunkRequests.Count(); var i = 1; // Upload the chunks await Task.Run(async() => { foreach (var request in chunkRequests) { // Delete session and throw cancellation if requested if (token.IsCancellationRequested) { if (provider != null) { await provider.DeleteSession(); } token.ThrowIfCancellationRequested(); } // Send chunk request //var result = await provider.GetChunkRequestResponseAsync(request, readBuffer, new List<Exception>()); var result = await provider.GetChunkRequestResponseAsync(request, new List <Exception>()); // Update the itemProgressBar and UI var currentValue = Math.Round((i / (double)maximum) * 100); await Dispatcher.InvokeAsync(() => { itemProgressBar.Value = currentValue; itemProgressTextBlock.Text = multipleParts ? "Uploading file " + file + " of " + totalFiles + ": " + item.Name + " | part " + part + "/" + totalParts + "... (" + itemProgressBar.Value.ToString("0") + "%)" : "Uploading file " + file + " of " + totalFiles + ": " + item.Name + "... (" + itemProgressBar.Value.ToString("0") + "%)"; }); // Increment counter i++; if (result.UploadSucceeded) { itemResult = result.ItemResponse; } } }, token); return(itemResult); } }