public override async Task WriteFileAsync(string path, byte[] contents, bool overwrite = false, CancellationToken cancellationToken = default) { if (!overwrite && await FileExistsAsync(path, cancellationToken)) { throw new FileExistsException(PrependRootPath(path), Prefix); } path = PrependRootPath(path); try { using var memoryStream = new MemoryStream(contents); var uploadSession = await client.Drives[driveId].Root.ItemWithPath(path).CreateUploadSession().Request() .PostAsync(cancellationToken); var provider = new ChunkedUploadProvider(uploadSession, client, memoryStream); var chunkRequests = provider.GetUploadChunkRequests(); var exceptionTrackingList = new List <Exception>(); foreach (var request in chunkRequests) { var result = await provider.GetChunkRequestResponseAsync(request, exceptionTrackingList); if (!result.UploadSucceeded && exceptionTrackingList.Any()) { throw new AdapterRuntimeException(exceptionTrackingList.First()); } } } catch (Exception exception) { throw Exception(exception); } }
private static async Task <DriveItem> UploadlargeFile(SharepointIds sp, string path) { DriveItem uploadedFile = null; using (FileStream fileStream = new FileStream(largeFilePath, FileMode.Open)) { UploadSession uploadSession = await graphClient.Sites["root"] .Drive.Root.ItemWithPath($"{path}/LargeFile.txt") .CreateUploadSession().Request() .PostAsync(); if (uploadSession != null) { // Chunk size must be divisible by 320KiB, our chunk size will be slightly more than 1MB int maxSizeChunk = (320 * 1024) * 4; ChunkedUploadProvider uploadProvider = new ChunkedUploadProvider(uploadSession, graphClient, fileStream, maxSizeChunk); var chunkRequests = uploadProvider.GetUploadChunkRequests(); var exceptions = new List <Exception>(); var readBuffer = new byte[maxSizeChunk]; foreach (var request in chunkRequests) { var result = await uploadProvider.GetChunkRequestResponseAsync(request, readBuffer, exceptions); if (result.UploadSucceeded) { uploadedFile = result.ItemResponse; } } } } return(uploadedFile); }
private async Task <DriveItem> UploadLargeFileAsync(Stream stream, DriveItemUploadableProperties properties, string filePath) { var uploadSession = await this.GraphClient.Drive.Root.ItemWithPath(filePath).CreateUploadSession(properties).Request().PostAsync(); var maxChunkSize = 1280 * 1024; // 1280 KB - Change this to your chunk size. 5MB is the default. var provider = new ChunkedUploadProvider(uploadSession, this.GraphClient, stream, maxChunkSize); var chunkRequests = provider.GetUploadChunkRequests(); var readBuffer = new byte[maxChunkSize]; var trackedExceptions = new List <Exception>(); DriveItem driveItem = null; // upload the chunks foreach (var request in chunkRequests) { // Do your updates here: update progress bar, etc. // ... // Send chunk request var result = await provider.GetChunkRequestResponseAsync(request, readBuffer, trackedExceptions); if (result.UploadSucceeded) { driveItem = result.ItemResponse; } } // check that upload succeeded if (driveItem == null) { throw new Exception(); } return(driveItem); }
public async Task <bool> SetContentAsync(RootName root, FileId target, Stream content, IProgress <ProgressValue> progress, Func <FileSystemInfoLocator> locatorResolver) { var context = await RequireContextAsync(root); var item = default(Item); var requestBuilder = context.Client.Drive.Items[target.Value]; if (content.Length <= LargeFileThreshold) { var stream = progress != null ? new ProgressStream(content, progress) : content; var retryPolicyWithAction = Policy.Handle <ServiceException>().WaitAndRetryAsync(5, retryAttempt => TimeSpan.FromSeconds(Math.Pow(2, retryAttempt)), (ex, ts) => content.Seek(0, SeekOrigin.Begin)); item = await retryPolicyWithAction.ExecuteAsync(() => requestBuilder.Content.Request().PutAsync <Item>(stream)); } else { var session = await requestBuilder.CreateSession().Request().PostAsync(); var provider = new ChunkedUploadProvider(session, context.Client, content); item = await ChunkedUploadAsync(provider, progress); } return(true); }
public async Task <FileInfoContract> NewFileItemAsync(RootName root, DirectoryId parent, string name, Stream content, IProgress <ProgressValue> progress) { if (content.Length == 0) { return(new ProxyFileInfoContract(name)); } var context = await RequireContextAsync(root); var item = default(Item); var requestBuilder = context.Client.Drive.Items[parent.Value].ItemWithPath(name); if (content.Length <= LARGE_FILE_THRESHOLD) { var stream = progress != null ? new ProgressStream(content, progress) : content; item = await AsyncFunc.RetryAsync <Item, ServiceException>(async() => await requestBuilder.Content.Request().PutAsync <Item>(stream), RETRIES); } else { var session = await requestBuilder.CreateSession().Request().PostAsync(); var provider = new ChunkedUploadProvider(session, context.Client, content); item = await ChunkedUploadAsync(provider, progress, RETRIES); } return(new FileInfoContract(item.Id, item.Name, item.CreatedDateTime ?? DateTimeOffset.FromFileTime(0), item.LastModifiedDateTime ?? DateTimeOffset.FromFileTime(0), item.Size ?? -1, item.File.Hashes.Sha1Hash.ToLowerInvariant())); }
public async Task <FileInfoContract> NewFileItemAsync(RootName root, DirectoryId parent, string name, Stream content, IProgress <ProgressValue> progress) { if (content.Length == 0) { return(new ProxyFileInfoContract(name)); } var context = await RequireContextAsync(root); var item = default(Item); var requestBuilder = context.Client.Drive.Items[parent.Value].ItemWithPath(name); if (content.Length <= LargeFileThreshold) { var stream = progress != null ? new ProgressStream(content, progress) : content; var retryPolicyWithAction = Policy.Handle <ServiceException>().WaitAndRetryAsync(5, retryAttempt => TimeSpan.FromSeconds(Math.Pow(2, retryAttempt)), (ex, ts) => content.Seek(0, SeekOrigin.Begin)); item = await retryPolicyWithAction.ExecuteAsync(() => requestBuilder.Content.Request().PutAsync <Item>(stream)); } else { var session = await requestBuilder.CreateSession().Request().PostAsync(); var provider = new ChunkedUploadProvider(session, context.Client, content); item = await ChunkedUploadAsync(provider, progress); } return(new FileInfoContract(item.Id, item.Name, item.CreatedDateTime ?? DateTimeOffset.FromFileTime(0), item.LastModifiedDateTime ?? DateTimeOffset.FromFileTime(0), (FileSize)(item.Size ?? -1), item.File.Hashes.Sha1Hash.ToLowerInvariant())); }
private async Task <Item> ChunkedUploadAsync(ChunkedUploadProvider provider, IProgress <ProgressValue> progress, int retries) { var readBuffer = new byte[MAX_CHUNK_SIZE]; var exceptions = new List <Exception>(); do { var uploadChunkRequests = provider.GetUploadChunkRequests(); var bytesTransferred = 0; var bytesTotal = uploadChunkRequests.Sum(u => u.RangeLength); progress?.Report(new ProgressValue(bytesTransferred, bytesTotal)); foreach (var currentChunkRequest in uploadChunkRequests) { var uploadChunkResult = await provider.GetChunkRequestResponseAsync(currentChunkRequest, readBuffer, exceptions); progress?.Report(new ProgressValue(bytesTransferred += currentChunkRequest.RangeLength, bytesTotal)); if (uploadChunkResult.UploadSucceeded) { return(uploadChunkResult.ItemResponse); } } await provider.UpdateSessionStatusAsync(); await Task.Delay((1 << (exceptions.Count - 1)) * 1000).ConfigureAwait(false); } while (--retries >= 0); throw new TaskCanceledException(Properties.Resources.RetriesExhausted, new AggregateException(exceptions)); }
private async Task <Item> ChunkedUploadAsync(ChunkedUploadProvider provider, IProgress <ProgressValue> progress) { var readBuffer = new byte[MaxChunkSize]; var exceptions = new List <Exception>(); var uploadChunkRequests = provider.GetUploadChunkRequests(); var bytesTransferred = 0; var bytesTotal = uploadChunkRequests.Sum(u => u.RangeLength); progress?.Report(new ProgressValue(bytesTransferred, bytesTotal)); foreach (var currentChunkRequest in uploadChunkRequests) { var uploadChunkResult = await retryPolicy.ExecuteAsync(() => provider.GetChunkRequestResponseAsync(currentChunkRequest, readBuffer, exceptions)); progress?.Report(new ProgressValue(bytesTransferred += currentChunkRequest.RangeLength, bytesTotal)); if (uploadChunkResult.UploadSucceeded) { return(uploadChunkResult.ItemResponse); } } await retryPolicy.ExecuteAsync(() => provider.UpdateSessionStatusAsync()); throw new TaskCanceledException(Properties.Resources.RetriesExhausted, new AggregateException(exceptions)); }
private async Task <string> UploadFile(string fileName, string fileExtension, Stream stream, string jsonFileId, bool isShared) { DriveItem uploadedFile = null; var uploadSession = await _graphServiceClient.Me.Drive.Special.AppRoot .ItemWithPath(fileName + fileExtension).CreateUploadSession().Request().PostAsync(); // Chunk size must be divisible by 320KiB, our chunk size will be slightly more than 1MB int maxSizeChunk = (320 * 1024) * 4; ChunkedUploadProvider uploadProvider = new ChunkedUploadProvider(uploadSession, _graphServiceClient, stream, maxSizeChunk); var chunkRequests = uploadProvider.GetUploadChunkRequests(); var exceptions = new List <Exception>(); foreach (var request in chunkRequests) { var result = await uploadProvider.GetChunkRequestResponseAsync(request, exceptions); if (result.UploadSucceeded) { uploadedFile = result.ItemResponse; } } if (isShared) { return(await SetPermissions(uploadedFile?.Name)); } return(null); }
public async Task OneDriveUploadLargeFile() { try { System.Drawing.ImageConverter converter = new System.Drawing.ImageConverter(); var buff = (byte[])converter.ConvertTo(Microsoft.Graph.Test.Properties.Resources.hamilton, typeof(byte[])); using (System.IO.MemoryStream ms = new System.IO.MemoryStream(buff)) { // Describe the file to upload. Pass into CreateUploadSession, when the service works as expected. var props = new DriveItemUploadableProperties(); //props.Name = "_hamilton.png"; //props.Description = "This is a pictureof Mr. Hamilton."; //props.FileSystemInfo = new FileSystemInfo(); //props.FileSystemInfo.CreatedDateTime = System.DateTimeOffset.Now; //props.FileSystemInfo.LastModifiedDateTime = System.DateTimeOffset.Now; props.AdditionalData = new Dictionary <string, object>(); props.AdditionalData.Add("@microsoft.graph.conflictBehavior", "rename"); // Get the provider. // POST /v1.0/drive/items/01KGPRHTV6Y2GOVW7725BZO354PWSELRRZ:/_hamiltion.png:/microsoft.graph.createUploadSession // The CreateUploadSesssion action doesn't seem to support the options stated in the metadata. This issue has been filed. var uploadSession = await graphClient.Drive.Items["01KGPRHTV6Y2GOVW7725BZO354PWSELRRZ"].ItemWithPath("_hamilton.png").CreateUploadSession().Request().PostAsync(); var maxChunkSize = 320 * 1024; // 320 KB - Change this to your chunk size. 5MB is the default. var provider = new ChunkedUploadProvider(uploadSession, graphClient, ms, maxChunkSize); // Setup the chunk request necessities var chunkRequests = provider.GetUploadChunkRequests(); var readBuffer = new byte[maxChunkSize]; var trackedExceptions = new List <Exception>(); DriveItem itemResult = null; //upload the chunks foreach (var request in chunkRequests) { // Do your updates here: update progress bar, etc. // ... // Send chunk request var result = await provider.GetChunkRequestResponseAsync(request, readBuffer, trackedExceptions); if (result.UploadSucceeded) { itemResult = result.ItemResponse; } } // Check that upload succeeded if (itemResult == null) { // Retry the upload // ... } } } catch (Microsoft.Graph.ServiceException e) { Assert.Fail("Something happened, check out a trace. Error code: {0}", e.Error.Code); } }
public static async Task <bool> UploadDbToOneDrive() { try { var remotePath = MoviekusFolderName + MoviekusDefines.DbFileName; var localPath = Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.LocalApplicationData), MoviekusDefines.DbFileName); LogManager.GetCurrentClassLogger().Info("Signing in to OneDrive..."); await GraphClientManager.Ref.SignIn(); LogManager.GetCurrentClassLogger().Info("Starting Db-Upload to OneDrive..."); LogManager.GetCurrentClassLogger().Info($"Local path is: {localPath}"); LogManager.GetCurrentClassLogger().Info($"Remote path is: {remotePath}"); using (var stream = System.IO.File.OpenRead(localPath)) { if (stream != null) { if (stream.Length > 4 * 1024 * 1024) // Ab 4MB müssen Chunks übertragen werden { var session = await GraphClientManager.Ref.GraphClient.Drive.Root.ItemWithPath(remotePath).CreateUploadSession().Request().PostAsync(); var maxSizeChunk = 320 * 4 * 1024; var provider = new ChunkedUploadProvider(session, GraphClientManager.Ref.GraphClient, stream, maxSizeChunk); var chunckRequests = provider.GetUploadChunkRequests(); var exceptions = new List <Exception>(); DriveItem itemResult = null; foreach (var request in chunckRequests) { var result = await provider.GetChunkRequestResponseAsync(request, exceptions); if (result.UploadSucceeded) { itemResult = result.ItemResponse; } } // Check that upload succeeded if (itemResult == null) { await UploadDbToOneDrive(); } } else { await GraphClientManager.Ref.GraphClient.Drive.Root.ItemWithPath(remotePath).Content.Request().PutAsync <DriveItem>(stream); } } } } catch (Exception ex) { LogManager.GetCurrentClassLogger().Error(ex); return(false); } LogManager.GetCurrentClassLogger().Info("Finished Db-Upload to OneDrive."); return(true); }
// Uploads a large file to the current user's root directory. public async Task <List <ResultsItem> > UploadLargeFile() { List <ResultsItem> items = new List <ResultsItem>(); using (Stream fileStream = System.IO.File.OpenRead(HostingEnvironment.MapPath("/Content/LargeFileUploadResource.bmp"))) { // Create the upload session. The access token is no longer required as you have session established for the upload. // POST /v1.0/drive/root:/UploadLargeFile.bmp:/microsoft.graph.createUploadSession UploadSession uploadSession = await graphClient.Me.Drive.Root.ItemWithPath("LargeFileUploadResource.bmp").CreateUploadSession().Request(requestOptions) .WithUserAccount(ClaimsPrincipal.Current.ToGraphUserAccount()) .PostAsync(); int maxChunkSize = 320 * 1024; // 320 KB - Change this to your chunk size. 5MB is the default. ChunkedUploadProvider provider = new ChunkedUploadProvider(uploadSession, graphClient, fileStream, maxChunkSize); // Set up the chunk request necessities. IEnumerable <UploadChunkRequest> chunkRequests = provider.GetUploadChunkRequests(requestOptions); byte[] readBuffer = new byte[maxChunkSize]; List <Exception> trackedExceptions = new List <Exception>(); DriveItem uploadedFile = null; // Upload the chunks. foreach (var request in chunkRequests) { request.WithUserAccount(ClaimsPrincipal.Current.ToGraphUserAccount()); // Do your updates here: update progress bar, etc. // ... // Send chunk request UploadChunkResult result = await provider.GetChunkRequestResponseAsync(request, readBuffer, trackedExceptions); if (result.UploadSucceeded) { uploadedFile = result.ItemResponse; // Get file properties. items.Add(new ResultsItem { Display = uploadedFile.Name, Id = uploadedFile.Id, Properties = new Dictionary <string, object> { { Resource.Prop_Created, uploadedFile.CreatedDateTime.Value.ToLocalTime() }, { Resource.Prop_Url, uploadedFile.WebUrl }, { Resource.Prop_Id, uploadedFile.Id } } }); } } // Check that upload succeeded. if (uploadedFile == null) { // Retry the upload // ... } } return(items); }
/// <inheritdoc/> public async Task <IOneDriveStorageFile> UploadFileAsync(string desiredName, IRandomAccessStream content, CreationCollisionOption options = CreationCollisionOption.FailIfExists, int maxChunkSize = -1) { int currentChunkSize = maxChunkSize < 0 ? OneDriveUploadConstants.DefaultMaxChunkSizeForUploadSession : maxChunkSize; if (currentChunkSize % OneDriveUploadConstants.RequiredChunkSizeIncrementForUploadSession != 0) { throw new ArgumentException("Max chunk size must be a multiple of 320 KiB", nameof(maxChunkSize)); } if (string.IsNullOrEmpty(desiredName)) { throw new ArgumentNullException(nameof(desiredName)); } if (content == null) { throw new ArgumentNullException(nameof(content)); } var uploadSessionUri = $"{Provider.BaseUrl}/drive/items/{OneDriveItem.Id}:/{desiredName}:/createUploadSession"; var conflictBehavior = new OneDriveItemConflictBehavior { Item = new OneDriveConflictItem { ConflictBehavior = OneDriveHelper.TransformCollisionOptionToConflictBehavior(options.ToString()) } }; var jsonConflictBehavior = JsonConvert.SerializeObject(conflictBehavior); HttpRequestMessage request = new HttpRequestMessage(HttpMethod.Post, uploadSessionUri) { Content = new StringContent(jsonConflictBehavior, Encoding.UTF8, "application/json") }; await Provider.AuthenticationProvider.AuthenticateRequestAsync(request).ConfigureAwait(false); var response = await Provider.HttpProvider.SendAsync(request).ConfigureAwait(false); if (!response.IsSuccessStatusCode) { throw new ServiceException(new Error { Message = "Could not create an UploadSession", Code = "NoUploadSession", ThrowSite = "UWP Community Toolkit" }); } IsUploadCompleted = false; var jsonData = await response.Content.ReadAsStringAsync().ConfigureAwait(false); _uploadSession = JsonConvert.DeserializeObject <UploadSession>(jsonData); var streamToUpload = content.AsStreamForRead(); _uploadProvider = new ChunkedUploadProvider(_uploadSession, Provider, streamToUpload, maxChunkSize); var uploadedItem = await _uploadProvider.UploadAsync().ConfigureAwait(false); IsUploadCompleted = true; return(InitializeOneDriveStorageFile(uploadedItem)); }
//private void filesbtn_Click(object sender, RoutedEventArgs e) //{ // OpenFileDialog fileDialog = new OpenFileDialog(); // fileDialog.Filter = "*.epub | *.*"; // fileDialog.InitialDirectory = "C:\\Users\\Tom\\Desktop"; // fileDialog.Multiselect = true; // if (fileDialog.ShowDialog() == System.Windows.Forms.DialogResult.OK) // { // selectedFiles = fileDialog.FileNames; // } // if (selectedFiles != null && selectedFiles.Count() > 0) // { // List<CustomeName1> lstItems = new List<CustomeName1>(); // var fileInfo = new FileInfo(selectedFiles[0]); // dirName = $"Directory : {fileInfo.DirectoryName}"; // foreach (var file in selectedFiles) // { // lstItems.Add(new CustomeName1() { Name = (new FileInfo(file)).Name }); // } // //lstView1.ItemsSource = lstItems; // } //} //private async void btnUpload_Click(object sender, RoutedEventArgs e) //{ // if (selectedFiles == null || selectedFiles.Count() == 0) // { // System.Windows.MessageBox.Show("Please select atleast one file to upload!", "Stop!", MessageBoxButton.OK, MessageBoxImage.Warning); // return; // } // try // { // //spinner.Visibility = Visibility.Visible; // //spinner.Spin = true; // //btnUpload.IsEnabled = false; // //filesbtn.IsEnabled = false; // if (_graphClient == null) // { // _graphClient = GraphClientHelper.GetAuthenticatedClient(); // } // var count = 100 / selectedFiles.Count(); // if (System.IO.File.Exists("log.txt")) // { // System.IO.File.Delete("log.txt"); // } // foreach (var file in selectedFiles) // { // var fileName = Path.GetFileName(file); // try // { // if (file != null && file.Contains(".")) // { // await UploadFilesToOneDrive(fileName, file, _graphClient); // //progressBar.Value += count; // } // } // catch (Exception ex) // { // errorMessages.AppendLine($"File: {fileName} upload failed:"); // errorMessages.AppendLine($"Message :{ ex.Message }"); // errorMessages.AppendLine($"{ ex.StackTrace }"); // System.IO.File.AppendAllText("log.txt", errorMessages.ToString()); // System.Windows.MessageBox.Show(ex.Message, "Error!", MessageBoxButton.OK, MessageBoxImage.Error); // continue; // } // } // if (!System.IO.File.Exists("log.txt")) // { // System.Windows.MessageBox.Show("Successfully uploaded"); // } // } // catch (Exception ex) // { // //spinner.Spin = false; // //spinner.Visibility = Visibility.Hidden; // errorMessages.AppendLine($"Message :{ ex.Message }"); // errorMessages.AppendLine($"{ ex.StackTrace }"); // System.IO.File.AppendAllText("log.txt", errorMessages.ToString()); // System.Windows.MessageBox.Show(ex.Message, "Error!", MessageBoxButton.OK, MessageBoxImage.Error); // } // finally // { // //dirName.Content = "Directory: "; // //lstView1.ItemsSource = null; // selectedFiles = new string[0]; // //btnUpload.IsEnabled = true; // //filesbtn.IsEnabled = true; // //spinner.Spin = false; // //spinner.Visibility = Visibility.Hidden; // //progressBar.Value = 0; // if (System.IO.File.Exists("log.txt")) // { // var result = Process.Start("log.txt"); // Thread.Sleep(5000); // if (result.HasExited) // { // System.IO.File.Delete("log.txt"); // } // } // } //} /// <summary> /// UploadFiles to Onedrive Less than 4MB only /// </summary> /// <param name="fileName"></param> /// <param name="filePath"></param> /// <param name="graphClient"></param> /// <returns></returns> private static async Task UploadFilesToOneDrive(string fileName, string filePath, GraphServiceClient graphClient) { try { var uploadPath = $"/CodeUploads/{DateTime.Now.ToString("ddMMyyyy")}/" + Uri.EscapeUriString(fileName); using (var stream = new FileStream(filePath, FileMode.Open)) { if (stream != null) { var fileSize = ByteSize.FromBytes(stream.Length); if (fileSize.MegaBytes > 4) { var session = await graphClient.Drive.Root.ItemWithPath(uploadPath).CreateUploadSession().Request().PostAsync(); var maxSizeChunk = 320 * 4 * 1024; var provider = new ChunkedUploadProvider(session, graphClient, stream, maxSizeChunk); var chunckRequests = provider.GetUploadChunkRequests(); var exceptions = new List <Exception>(); var readBuffer = new byte[maxSizeChunk]; DriveItem itemResult = null; //upload the chunks foreach (var request in chunckRequests) { // Do your updates here: update progress bar, etc. // ... // Send chunk request var result = await provider.GetChunkRequestResponseAsync(request, readBuffer, exceptions); if (result.UploadSucceeded) { itemResult = result.ItemResponse; } } // Check that upload succeeded if (itemResult == null) { await UploadFilesToOneDrive(fileName, filePath, graphClient); } } else { //await graphClient.Drive.Root.ItemWithPath(uploadPath).Content.Request().PutAsync<DriveItem>(stream); //await graphClient.Groups["36c450de-af05-4dd1-bed8-9c7b6d7ca3e3"].Drive.Root.ItemWithPath(uploadPath).Content.Request().PutAsync<DriveItem>(stream); //graphClient.Groups[].Drive.Items } } } } catch { throw; } }
public void ConstructorTest_Valid() { this.StreamSetup(true); var uploadProvider = new ChunkedUploadProvider( this.uploadSession.Object, this.client.Object, this.uploadStream.Object, 320 * 1024); }
public void ConstructorTest_InvalidChunkSize() { this.StreamSetup(false); var uploadProvider = new ChunkedUploadProvider( this.uploadSession.Object, this.client.Object, this.uploadStream.Object, 12); }
public async Task OneDriveUploadLargeFile() { try { using (Stream stream = ResourceHelper.GetResourceAsStream(ResourceHelper.Hamilton)) { // Describe the file to upload. Pass into CreateUploadSession, when the service works as expected. //var props = new DriveItemUploadableProperties(); //props.Name = "_hamilton.png"; //props.Description = "This is a pictureof Mr. Hamilton."; //props.FileSystemInfo = new FileSystemInfo(); //props.FileSystemInfo.CreatedDateTime = System.DateTimeOffset.Now; //props.FileSystemInfo.LastModifiedDateTime = System.DateTimeOffset.Now; // Get the provider. // POST /v1.0/drive/items/01KGPRHTV6Y2GOVW7725BZO354PWSELRRZ:/_hamiltion.png:/microsoft.graph.createUploadSession // The CreateUploadSesssion action doesn't seem to support the options stated in the metadata. var uploadSession = await graphClient.Drive.Items["01KGPRHTV6Y2GOVW7725BZO354PWSELRRZ"].ItemWithPath("_hamilton.png").CreateUploadSession().Request().PostAsync(); var maxChunkSize = 320 * 1024; // 320 KB - Change this to your chunk size. 5MB is the default. var provider = new ChunkedUploadProvider(uploadSession, graphClient, stream, maxChunkSize); // Setup the chunk request necessities var chunkRequests = provider.GetUploadChunkRequests(); var readBuffer = new byte[maxChunkSize]; var trackedExceptions = new List <Exception>(); DriveItem itemResult = null; //upload the chunks foreach (var request in chunkRequests) { // Do your updates here: update progress bar, etc. // ... // Send chunk request var result = await provider.GetChunkRequestResponseAsync(request, readBuffer, trackedExceptions); if (result.UploadSucceeded) { itemResult = result.ItemResponse; } } // Check that upload succeeded if (itemResult == null) { // Retry the upload // ... } } } catch (Microsoft.Graph.ServiceException e) { Assert.True(false, "Something happened, check out a trace. Error code: " + e.Error.Code); } }
public void Upload(string src, string dest, bool finalizeLocal = true) { dest = LexicalPath.Combine(_rootPath, dest); var fileName = LexicalPath.GetFileName(dest); var item = GetItem(dest); if (item != null) { if (item.Folder != null) { throw new PathException($"Failed to create file '{dest}'. The path exists and is a directory."); } _graphServiceClient.Drive.Items[item.Id].Request().DeleteAsync().Wait(); } var fileInfo = new FileInfo(src); var fileSize = fileInfo.Length; dest = EncodePath(dest); var uploaded = false; for (var i = 0; i < RETRIES && !uploaded; ++i) { try { using (var stream = System.IO.File.OpenRead(src)) { if (fileSize > 1024 * 1024) { // TODO(ivannp) A progress indicator can be added according to: // https://github.com/OneDrive/onedrive-sdk-csharp/blob/master/docs/chunked-uploads.md, var session = _graphServiceClient.Drive.Root.ItemWithPath($"{dest}").CreateUploadSession().Request().PostAsync().Result; var provider = new ChunkedUploadProvider(session, _graphServiceClient, stream); item = provider.UploadAsync().Result; } else { _graphServiceClient.Drive.Root.ItemWithPath($"{dest}").Content.Request().PutAsync <DriveItem>(stream).Wait(); } } uploaded = true; break; } catch (Exception) { } } if (!uploaded) { throw new StorageException($"Failed to upload to '{dest}' ({RETRIES} attempts)."); } }
public static async Task <DriveItem> UploadFileAsync(Stream fileStream, string fileName, bool isLargeFile, string siteUrl) { var graphClient = GetAuthenticatedClient(); DriveItem uploadedFile = null; siteUrl = string.IsNullOrEmpty(siteUrl.Trim())? "/sites/web01" : siteUrl; //var hostname = "m365x130314.sharepoint.com"; try { var site = await graphClient.Sites.GetByPath(siteUrl, hostname).Request().GetAsync(); //var lists = await graphClient.Sites.GetByPath(siteUrl, hostname).Lists.Request().GetAsync(); //var listId = lists.First(p => p.Name == "Documents").Id; if (!isLargeFile) { uploadedFile = await graphClient.Sites[site.Id].Drive.Root.ItemWithPath(fileName).Content.Request().PutAsync <DriveItem>(fileStream); } //uploadedFile = await graphClient.Sites.GetByPath(siteUrl, hostname).Lists[listId].Drive.Root.ItemWithPath(fileName).Content.Request().PutAsync<DriveItem>(fileStream); //uploadedFile = await graphClient.Sites[hostname + siteUrl].Drive.Root.ItemWithPath(fileName).Content.Request().PutAsync<DriveItem>(fileStream); else { UploadSession uploadSession = null; //uploadSession = await graphClient.Sites["root"].SiteWithPath("/sites/team01").Drive.Root.ItemWithPath(fileName).CreateUploadSession().Request().PostAsync(); uploadSession = await graphClient.Sites[site.Id].Drive.Root.ItemWithPath(fileName).CreateUploadSession().Request().PostAsync(); if (uploadSession != null) { // Chunk size must be divisible by 320KiB, our chunk size will be slightly more than 1MB int maxSizeChunk = (320 * 1024) * 16; ChunkedUploadProvider uploadProvider = new ChunkedUploadProvider(uploadSession, graphClient, fileStream, maxSizeChunk); var chunkRequests = uploadProvider.GetUploadChunkRequests(); var exceptions = new List <Exception>(); var readBuffer = new byte[maxSizeChunk]; foreach (var request in chunkRequests) { var result = await uploadProvider.GetChunkRequestResponseAsync(request, readBuffer, exceptions); if (result.UploadSucceeded) { uploadedFile = result.ItemResponse; } } } } } catch (Exception ex) { throw ex; } return(uploadedFile); }
private async Task <Microsoft.Graph.DriveItem> doUpload(string filePath, string fileName, string accessToken) { string token = await getToken(); var graphServiceClient = getClient(token); using (var file = System.IO.File.OpenRead(filePath)) { MemoryStream stream = new MemoryStream(); file.CopyTo(stream); autoOpen(stream); var documentFolder = await ODataHelper.PostFolder <OneDriveItem>(GraphApiHelper.GetOneDriveChildrenUrl(), token); var uploadSession = await graphServiceClient.Drives[documentFolder.ParentReference.DriveId].Items[documentFolder.Id].ItemWithPath(fileName).CreateUploadSession().Request().PostAsync(); string ul = uploadSession.UploadUrl += "&$select=Id,ParentReference,WebUrl,WebDavUrl"; var maxChunkSize = (320 * 1024) * 10; // 5000 KB - Change this to your chunk size. 5MB is the default. var provider = new ChunkedUploadProvider(uploadSession, graphServiceClient, stream, maxChunkSize); // Setup the chunk request necessities var chunkRequests = provider.GetUploadChunkRequests(); var readBuffer = new byte[maxChunkSize]; var trackedExceptions = new List <Exception>(); DriveItem itemResult = null; //upload the chunks foreach (var request in chunkRequests) { // Do your updates here: update progress bar, etc. // ... // Send chunk request var result = await provider.GetChunkRequestResponseAsync(request, readBuffer, trackedExceptions); if (result.UploadSucceeded) { itemResult = result.ItemResponse; } } // Check that upload succeeded if (itemResult != null) { return(itemResult); } } throw new ApplicationException("Upload failed."); }
// Uploads a large file to the current user's root directory. private static void UploadLargeFile(GraphServiceClient graphClient, Stream fileStream, string fileName) { // Create the upload session. The access token is no longer required as you have session established for the upload. // POST /v1.0/drive/root:/UploadLargeFile.bmp:/microsoft.graph.createUploadSession Microsoft.Graph.UploadSession uploadSession = graphClient.Me.Drive.Root.ItemWithPath(fileName).CreateUploadSession().Request().PostAsync().Result; int maxChunkSize = 320 * 1024; // 320 KB - Change this to your chunk size. 5MB is the default. ChunkedUploadProvider provider = new ChunkedUploadProvider(uploadSession, graphClient, fileStream, maxChunkSize); //Replace own implementation in favour of UploadAsync-Helper (https://github.com/OneDrive/onedrive-sdk-csharp/blob/master/docs/chunked-uploads.md) _ = provider.UploadAsync().Result; }
private IEnumerable <UploadChunkRequest> SetupGetUploadChunksTest(int chunkSize, long totalSize, IEnumerable <string> ranges) { this.uploadSession.Object.NextExpectedRanges = ranges; this.uploadStream = new Mock <Stream>(); this.uploadStream.Setup(s => s.Length).Returns(totalSize); this.StreamSetup(true); var provider = new ChunkedUploadProvider( this.uploadSession.Object, this.client.Object, this.uploadStream.Object, chunkSize); return(provider.GetUploadChunkRequests()); }
public static async Task <DriveItem> UploadToOneDrive(UploadTask task, string localFileName, CancellationToken token, IProgress <double> progress, TextBoxWriter logs) { if (MainWindow.Client == null) { throw new ApplicationException("OneDrive client null!"); } var name = Path.GetFileName(localFileName); var targetPath = "YEARBOOK 2019/_Uploads/" + task.TaskName + "/" + name; var sesh = await MainWindow.Client.Drives["b!ciVzyDuG1kGQJn8UlMdOl6fArqsytz1JhhwdkjwpvaP7ZUv4lIZKSL4QhaM-19bz"] .Root .ItemWithPath(targetPath) .CreateUploadSession(new DriveItemUploadableProperties() { }) .Request().PostAsync(); logs.WriteLine($"Started new sesh; timeout {sesh.ExpirationDateTime}"); using (var stream = new FileStream(localFileName, FileMode.Open)) { var maxChunkSize = 320 * 1024 * 2; var provider = new ChunkedUploadProvider(sesh, MainWindow.Client, stream, maxChunkSize); var readBuffer = new byte[maxChunkSize]; var trackedExceptions = new List <Exception>(); DriveItem item = null; int index = 0; var uploadChunkRequests = provider.GetUploadChunkRequests().ToList(); foreach (var request in uploadChunkRequests) { logs.WriteLine($"Uploading chunk {index} of {uploadChunkRequests.Count} (timeout {provider.Session.ExpirationDateTime})"); var result = await provider.GetChunkRequestResponseAsync(request, readBuffer, trackedExceptions); if (result.UploadSucceeded) { if (result.ItemResponse != null) { item = result.ItemResponse; } } index += 1; progress.Report((double)index / uploadChunkRequests.Count); } logs.WriteLine("OneDrive upload completed; new item ID " + item.Id); return(item); } }
private async Task <string> uploadMail2OD(string accessToken, Stream stream, string filename) { GraphServiceClient graphClient = new GraphServiceClient(new DelegateAuthenticationProvider( async(requestMessage) => { requestMessage.Headers.Authorization = new AuthenticationHeaderValue("bearer", accessToken); })); if (stream.Length < (4 * 1024 * 1024)) { DriveItem uploadResult = await graphClient.Me .Drive.Root .ItemWithPath(filename) .Content.Request() .PutAsync <DriveItem>(stream); return(uploadResult.WebUrl); } else { try { // This method supports files even greater 4MB DriveItem item = null; UploadSession session = await graphClient.Me.Drive.Root .ItemWithPath(filename).CreateUploadSession().Request().PostAsync(); int maxSizeChunk = 320 * 4 * 1024; ChunkedUploadProvider provider = new ChunkedUploadProvider(session, graphClient, stream, maxSizeChunk); var chunckRequests = provider.GetUploadChunkRequests(); List <Exception> exceptions = new List <Exception>(); foreach (UploadChunkRequest chunkReq in chunckRequests) //upload the chunks { var reslt = await provider.GetChunkRequestResponseAsync(chunkReq, exceptions); if (reslt.UploadSucceeded) { item = reslt.ItemResponse; // Check that upload succeeded } } return(item != null ? item.WebUrl : null); } catch (ServiceException ex) { return(null); } } }
public async Task <OAuthUserModel> UploadFileOneDrive(OAuthUserModel oAuth, string emailOrObjectId, string fileNameOrObjectId) { try { string _newFileName = $"Prueba Onboarding-{Guid.NewGuid()}.docx"; // where you want to save the file, with name var item = $"/Contratos/" + _newFileName; var uploadSession = await GetUploadSession(oAuth, item, emailOrObjectId); var maxChunkSize = 320 * 1024; // 320 KB - Change this to your chunk size. 5MB is the default. var provider = new ChunkedUploadProvider(uploadSession, oAuth.GraphService, oAuth.TemplateStream, maxChunkSize); // Setup the chunk request necessities var chunkRequests = provider.GetUploadChunkRequests(); var readBuffer = new byte[maxChunkSize]; var trackedExceptions = new List <Exception>(); DriveItem itemResult = null; //upload the chunks foreach (var request in chunkRequests) { // Do your updates here: update progress bar, etc. // ... // Send chunk request var result = await provider.GetChunkRequestResponseAsync(request, readBuffer, trackedExceptions); if (result.UploadSucceeded) { itemResult = result.ItemResponse; } } oAuth.Status.Code = 200; oAuth.Status.Message = "OK"; return(oAuth); } catch (Exception e) { oAuth.Status.Code = 406; oAuth.Status.Message = e.Message; return(oAuth); } }
private static void UploadLarge(GraphServiceClient _graphServiceClient, string _fileToUpload, string _userID) { DriveItem uploadedFile = null; FileStream fileStream = new FileStream(_fileToUpload, FileMode.Open); UploadSession uploadSession = null; uploadSession = _graphServiceClient.Users[_userID] .Drive.Root .ItemWithPath(_fileToUpload) .CreateUploadSession() .Request().PostAsync().GetAwaiter().GetResult(); if (uploadSession != null) { // Chunk size must be divisible by 320KiB int maxSizeChunk = (320 * 1024) * 4; ChunkedUploadProvider uploadProvider = new ChunkedUploadProvider(uploadSession, _graphServiceClient, fileStream, maxSizeChunk); var chunkRequests = uploadProvider.GetUploadChunkRequests(); var exceptions = new List <Exception>(); var readBuffer = new byte[maxSizeChunk]; foreach (var request in chunkRequests) { var result = uploadProvider.GetChunkRequestResponseAsync(request, exceptions).GetAwaiter().GetResult(); Console.WriteLine($"Chunk!"); if (result.UploadSucceeded) { uploadedFile = result.ItemResponse; Console.WriteLine($"Finished!"); } } if (uploadedFile != null) { Console.WriteLine($"Uploaded file {_fileToUpload} to {uploadedFile.WebUrl}."); } else { Console.WriteLine($"Failure uploading {_fileToUpload}"); } } }
/// <inheritdoc /> public async Task <bool> WriteAsync(ClaimsPrincipal user, string fileName, Stream contents) { try { var uploadSession = await graphServiceClient.Me.Drive.Root.ItemWithPath(fileName).CreateUploadSession().Request().PostAsync(); // TODO Provide progress details here? var uploadProvider = new ChunkedUploadProvider(uploadSession, graphServiceClient, contents); await uploadProvider.UploadAsync(); return(true); } catch (Exception ex) { logger.LogError(ex, $"Failed to write OneDrive file {fileName} for user {user?.Identity?.Name} : {ex.Message}"); toastService.ShowError(ex.Message); return(false); } }
/// <summary> /// Take a file greater than 4MB and upload it to the service /// </summary> /// <param name="fileToUpload">The file that we want to upload</param> /// <param name="uploadToSharePoint">Should we upload to SharePoint or OneDrive?</param> public async Task <DriveItem> UploadLargeFile(string fileToUpload, bool uploadToSharePoint) { DriveItem uploadedFile = null; FileStream fileStream = new FileStream(fileToUpload, FileMode.Open); UploadSession uploadSession = null; // Do we want OneDrive for Business/Consumer or do we want a SharePoint Site? if (uploadToSharePoint) { uploadSession = await _graphClient.Sites["root"].Drive.Root.ItemWithPath(fileToUpload).CreateUploadSession().Request().PostAsync(); } else { uploadSession = await _graphClient.Me.Drive.Root.ItemWithPath(fileToUpload).CreateUploadSession().Request().PostAsync(); } if (uploadSession != null) { // Chunk size must be divisible by 320KiB, our chunk size will be slightly more than 1MB int maxSizeChunk = (320 * 1024) * 4; ChunkedUploadProvider uploadProvider = new ChunkedUploadProvider(uploadSession, _graphClient, fileStream, maxSizeChunk); var chunkRequests = uploadProvider.GetUploadChunkRequests(); var exceptions = new List <Exception>(); var readBuffer = new byte[maxSizeChunk]; foreach (var request in chunkRequests) { var result = await uploadProvider.GetChunkRequestResponseAsync(request, readBuffer, exceptions); if (result.UploadSucceeded) { uploadedFile = result.ItemResponse; } } } return(uploadedFile); }
public async Task <bool> SetContentAsync(RootName root, FileId target, Stream content, IProgress <ProgressValue> progress, Func <FileSystemInfoLocator> locatorResolver) { var context = await RequireContextAsync(root); var item = default(Item); var requestBuilder = context.Client.Drive.Items[target.Value]; if (content.Length <= LARGE_FILE_THRESHOLD) { var stream = progress != null ? new ProgressStream(content, progress) : content; item = await AsyncFunc.RetryAsync <Item, ServiceException>(async() => await requestBuilder.Content.Request().PutAsync <Item>(stream), RETRIES); } else { var session = await requestBuilder.CreateSession().Request().PostAsync(); var provider = new ChunkedUploadProvider(session, context.Client, content); item = await ChunkedUploadAsync(provider, progress, RETRIES); } return(true); }
public async Task <DriveItem> SaveFile(string filename, byte[] file) { // https://github.com/OneDrive/onedrive-sdk-csharp/blob/master/docs/chunked-uploads.md // https://stackoverflow.com/questions/44625083/object-reference-not-set-to-an-object-while-file-upload-in-onedrive var drive = await GetGeneratedDocumentDriveAsync(); var graphClient = _graphClientProvider.GetGraphServiceClient(); var path = System.Net.WebUtility.UrlEncode(filename); var uploadSession = await graphClient.Drives[drive.Id].Root.ItemWithPath(path).CreateUploadSession().Request().PostAsync(); MemoryStream memoryStream = new MemoryStream(); memoryStream.Write(file); memoryStream.Seek(0, SeekOrigin.Begin); var provider = new ChunkedUploadProvider(uploadSession, graphClient, memoryStream); var uploadedItem = await provider.UploadAsync(); return(uploadedItem); }