public override StorageFile FinalizeUpload() { var bucketName = _client.Buckets.GetList().Result.Single(b => b.BucketId == _bucketId).BucketName; if (_finish != null) { return(B2StorageFile.Create(_finish)); } ; if (_start == null) { var bytes = _buffer.ToArray(); _finish = _client.Files.Upload(bytes, _fileName, _bucketId, _fileInfo).Result; } else { try { _finish = _client.LargeFiles.FinishLargeFile(_start.FileId, _shas.ToArray()).Result; } catch (Exception) { _client.LargeFiles.CancelLargeFile(_start.FileId).Wait(); throw; } } return(B2StorageFile.Create(_finish)); }
public void LargeFileUploadIncompleteGetPartsTest() { var fileName = "B2LargeFileTest.txt"; FileStream fileStream = File.OpenRead(Path.Combine(FilePath, fileName)); var stream = new StreamReader(fileStream); char[] c = null; List <byte[]> parts = new List <byte[]>(); var shas = new List <string>(); var listParts = new B2LargeFileParts(); while (stream.Peek() >= 0) { c = new char[1024 * (5 * 1024)]; stream.Read(c, 0, c.Length); parts.Add(Encoding.UTF8.GetBytes(c)); } foreach (var part in parts.Take(2)) { string hash = Utilities.GetSHA1Hash(part); shas.Add(hash); } B2File start = null; B2File finish = null; try { start = Client.LargeFiles.StartLargeFile(fileName, "", TestBucket.BucketId).Result; for (int i = 0; i < 2; i++) { var uploadUrl = Client.LargeFiles.GetUploadPartUrl(start.FileId).Result; var part = Client.LargeFiles.UploadPart(parts[i], i + 1, uploadUrl).Result; } // Now we can list parts and get a result listParts = Client.LargeFiles.ListPartsForIncompleteFile(start.FileId, 1, 100).Result; } catch (Exception e) { Console.WriteLine(e); throw; } finally { // Clean up. FilesToDelete.Add(start); } Assert.AreEqual(2, listParts.Parts.Count, "List of parts did not return expected amount of parts."); }
public void LargeFileIncompleteListTest() { var fileName = "B2LargeFileTest.txt"; FileStream fileStream = File.OpenRead(Path.Combine(FilePath, fileName)); var stream = new StreamReader(fileStream); char[] c = null; List <byte[]> parts = new List <byte[]>(); var shas = new List <string>(); var fileList = new B2IncompleteLargeFiles(); while (stream.Peek() >= 0) { c = new char[1024 * (5 * 1024)]; stream.Read(c, 0, c.Length); parts.Add(Encoding.UTF8.GetBytes(c)); } foreach (var part in parts.Take(2)) { string hash = Utilities.GetSHA1Hash(part); shas.Add(hash); } B2File start = null; B2File finish = null; try { start = Client.LargeFiles.StartLargeFile(fileName, "", TestBucket.BucketId).Result; for (int i = 0; i < 2; i++) { var uploadUrl = Client.LargeFiles.GetUploadPartUrl(start.FileId).Result; var part = Client.LargeFiles.UploadPart(parts[i], i + 1, uploadUrl).Result; } // Now we can list parts and get a result fileList = Client.LargeFiles.ListIncompleteFiles(TestBucket.BucketId).Result; } catch (Exception e) { Console.WriteLine(e); throw; } finally { var cancelledFile = Client.LargeFiles.CancelLargeFile(start.FileId).Result; } Assert.AreEqual(1, fileList.Files.Count, "Incomplete file list count does not match what we expected."); }
public void LargeFileUploadTest() { var fileName = "B2LargeFileTest.txt"; FileStream fileStream = File.OpenRead(Path.Combine(FilePath, fileName)); var stream = new StreamReader(fileStream); char[] c = null; List <byte[]> parts = new List <byte[]>(); var shas = new List <string>(); while (stream.Peek() >= 0) { c = new char[1024 * (5 * 1024)]; stream.Read(c, 0, c.Length); parts.Add(Encoding.UTF8.GetBytes(c)); } foreach (var part in parts) { string hash = Utilities.GetSHA1Hash(part); shas.Add(hash); } B2File start = null; B2File finish = null; try { start = Client.LargeFiles.StartLargeFile(fileName, "", TestBucket.BucketId).Result; for (int i = 0; i < parts.Count; i++) { var uploadUrl = Client.LargeFiles.GetUploadPartUrl(start.FileId).Result; var part = Client.LargeFiles.UploadPart(parts[i], i + 1, uploadUrl).Result; } finish = Client.LargeFiles.FinishLargeFile(start.FileId, shas.ToArray()).Result; } catch (Exception e) { Console.WriteLine(e); throw; } finally { // Clean up. FilesToDelete.Add(start); } Assert.AreEqual(start.FileId, finish.FileId, "File Ids did not match."); }
public static StorageFile Create(B2File file) { if (file == null) { return(null); } return(new StorageFile() { FileId = file.FileId, FileName = file.FileName, Metadata = file.FileInfo }); }
private async Task HandleIfExistsInCloud(B2Db b2Db, string filePath, B2File existingB2File, ulong xhash, string parentPath, string b2Path) { // try find local object in db FileObject existingFileObject = b2Db.GetFileObject(filePath); if (existingFileObject != null && existingFileObject.B2Files.ContainsKey(existingB2File.FileId)) { // local object xhash diff from cloud, then mark this for upload if (existingFileObject.Xhash != xhash) { existingFileObject.Xhash = xhash; UploadList.Add(new UploadObject(existingFileObject, parentPath, b2Path)); } } // if existing file object is available but b2file object is not available else if (existingFileObject != null && !existingFileObject.B2Files.ContainsKey(existingB2File.FileId)) { // update b2file object and does not need to reupload if (existingB2File.ContentSHA1.Equals("none") || Utils.FilepathToSha1Hash(filePath).Equals(existingB2File.ContentSHA1.Replace("unverified:", ""), StringComparison.InvariantCultureIgnoreCase)) { existingFileObject.B2Files.Add(existingB2File.FileId, existingB2File); existingFileObject.DateModified = DateTime.Now; await b2Db.UpdateFileObject(existingFileObject); } else // mark this object to upload if diff sha { existingFileObject.Xhash = xhash; UploadList.Add(new UploadObject(existingFileObject, parentPath, b2Path)); } } else { // local not available, so check it's sha1hash when same add to db or mark to upload FileObject tempFileObject = new FileObject(filePath, xhash); if (existingB2File.ContentSHA1.Equals("none") || Utils.FilepathToSha1Hash(filePath).Equals(existingB2File.ContentSHA1.Replace("unverified:", ""), StringComparison.InvariantCultureIgnoreCase)) { tempFileObject.B2Files.Add(existingB2File.FileId, existingB2File); await b2Db.Add(tempFileObject); } else { UploadList.Add(new UploadObject(tempFileObject, parentPath, b2Path)); } } }
/// <summary> /// Uploads the locally-stored database <paramref name="localDb" /> to the bucket that <paramref name="client" /> has access to. /// </summary> /// <param name="client">The <see cref="B2Client" /> created by <see cref="GetClient" /> with access to a bucket to upload to.</param> /// <param name="localDb">The local database to upload.</param> /// <returns><see langword="true" /> if the upload was successful, or <see langword="false" /> otherwise.</returns> public static async Task <bool> UploadDbAsync(B2Client client, PwDatabase localDb) { if (client == null) { return(false); } Interface.UpdateStatus("Uploading database..."); string localPath = localDb.IOConnectionInfo.Path; byte[] fileData; using (FileStream fs = File.OpenRead(localPath)) { if (!fs.CanRead) { return(false); } using (MemoryStream ms = new MemoryStream()) { fs.CopyTo(ms); fileData = ms.ToArray(); } } try { B2UploadUrl uploadUrl = await client.Files.GetUploadUrl(client.Capabilities.BucketId); B2File file = await client.Files.Upload(fileData, Path.GetFileName(localPath), uploadUrl, true, client.Capabilities.BucketId); } catch (Exception e) { if (new [] { typeof(SocketException), typeof(WebException), typeof(HttpRequestException), typeof(AggregateException), typeof(InvalidOperationException) }.Contains(e.GetType())) { Interface.UpdateStatus("Unable to upload the database to B2."); return(false); } throw; } Interface.UpdateStatus("Database upload successful."); return(true); }
public async Task DeleteFile(string fileId) { B2File fileInfo = null; try { fileInfo = await _client.Files.GetInfo(fileId); } catch { // ignored } if (fileInfo != null) { await _client.Files.Delete(fileId, fileInfo.FileName); } }
private async Task <B2File> UploadLargeFile(UploadObject uploadObject, string b2Path, string contentType) { B2File file; ConcurrentDictionary <int, string> Sha1List = new ConcurrentDictionary <int, string>(); List <UploadParts> uploadParts = GetUploadPartList(new FileStream(uploadObject.FileObject.FilePath, FileMode.Open, FileAccess.Read, FileShare.ReadWrite)); B2File largeFile = await Client.LargeFiles.StartLargeFile(b2Path, contentType); await uploadParts.ForEachAsync(4, async uploadPart => { try { // Log.Information("Uploading | TID {0}, Count {1}", // Thread.CurrentThread.ManagedThreadId, uploadPart.PartNumber); byte[] currentBytes = new byte[uploadPart.CurrentBytes]; FileStream currentStream = new FileStream(uploadObject.FileObject.FilePath, FileMode.Open, FileAccess.Read, FileShare.ReadWrite); currentStream.Seek(uploadPart.LastBytes, SeekOrigin.Begin); currentStream.Read(currentBytes, 0, currentBytes.Length); B2UploadPartUrl uploadPartUrl = await Client.LargeFiles.GetUploadPartUrl(largeFile.FileId); await Client.LargeFiles.UploadPart(currentBytes, uploadPart.PartNumber, uploadPartUrl); Sha1List.TryAdd(uploadPart.PartNumber, Utils.BytesToSha1Hash(currentBytes)); } catch (Exception e) { Log.Error(e.ToString()); } }); string[] sortedSha1List = new string[Sha1List.Count]; for (int i = 0; i < Sha1List.Count; i++) { sortedSha1List[i] = Sha1List[i + 1]; } file = await Client.LargeFiles.FinishLargeFile(largeFile.FileId, sortedSha1List); return(file); }
private async Task <B2File> ParseDownloadResponse(HttpResponseMessage response) { Utilities.CheckForErrors(response, _api); var file = new B2File(); IEnumerable <string> values; if (response.Headers.TryGetValues("X-Bz-Content-Sha1", out values)) { file.ContentSHA1 = values.First(); } if (response.Headers.TryGetValues("X-Bz-File-Name", out values)) { file.FileName = values.First(); // Decode file name file.FileName = file.FileName.b2UrlDecode(); } if (response.Headers.TryGetValues("X-Bz-File-Id", out values)) { file.FileId = values.First(); } // File Info Headers var fileInfoHeaders = response.Headers.Where(h => h.Key.ToLower().Contains("x-bz-info")); var infoData = new Dictionary <string, string>(); if (fileInfoHeaders.Count() > 0) { foreach (var fileInfo in fileInfoHeaders) { // Substring to parse out the file info prefix. infoData.Add(fileInfo.Key.Substring(10), fileInfo.Value.First()); } } file.FileInfo = infoData; if (response.Content.Headers.ContentLength.HasValue) { file.Size = response.Content.Headers.ContentLength.Value; } file.FileData = await response.Content.ReadAsByteArrayAsync(); return(await Task.FromResult(file)); }
/// <summary> /// Uploads a temporary file. /// </summary> /// <param name="fileData">The file data.</param> /// <param name="fileName">Name of the file.</param> public static async Task <File> UploadTemporaryFileAsync(byte[] fileData, string fileName) { await Authorize(); B2UploadUrl uploadUrl = await _TempBucketClient.Files.GetUploadUrl(); string sha1Hash = Utilities.GetSHA1Hash(fileData); B2File uploadedFile = await _TempBucketClient.Files.Upload(fileData, fileName, uploadUrl); if (sha1Hash == uploadedFile.ContentSHA1) { string fullUrl = $"{Constants.BackblazeCDN}/file/{SettingsManager.Configuration.BackblazeTempBucket.BucketName}/{uploadedFile.FileName}"; string shortUrl = await Http.ShortenUrl(fullUrl); return(new File(SettingsManager.Configuration.BackblazeTempBucket.BucketName, uploadedFile.FileName, fullUrl, shortUrl)); } return(null); }
public override void Flush() { if (_finish != null) { throw new InvalidOperationException( "Cannot flush after last part has been written. Last part gets sent on the first flush of <5MB"); } if (_start == null) { _start = _client.LargeFiles.StartLargeFile(_fileName, "", _bucketId, _fileInfo).Result; } var partSize = _buffer.Length; if (partSize == 0) { return; } var partBytes = _buffer.ToArray(); _shas.Add(GetSHA1Hash(partBytes)); _buffer.Dispose(); _buffer = new MemoryStream(); try { var uploadUrl = _client.LargeFiles.GetUploadPartUrl(_start.FileId).Result; _client.LargeFiles.UploadPart(partBytes, _partCount + 1, uploadUrl).Wait(); if (partSize < MinPartSize) { FinalizeUpload(); } } catch (Exception) { _client.LargeFiles.CancelLargeFile(_start.FileId).Wait(); throw; } }
public void LargeFileUploadTest() { var fileName = "B2LargeFileTest.txt"; FileStream fileStream = File.OpenRead(Path.Combine(FilePath, fileName)); byte[] c = null; List <byte[]> parts = new List <byte[]>(); var shas = new List <string>(); long fileSize = fileStream.Length; long totalBytesParted = 0; long minPartSize = 1024 * (5 * 1024); while (totalBytesParted < fileSize) { var partSize = minPartSize; // If last part is less than min part size, get that length if (fileSize - totalBytesParted < minPartSize) { partSize = fileSize - totalBytesParted; } c = new byte[partSize]; fileStream.Seek(totalBytesParted, SeekOrigin.Begin); fileStream.Read(c, 0, c.Length); parts.Add(c); totalBytesParted += partSize; } foreach (var part in parts) { string hash = Utilities.GetSHA1Hash(part); shas.Add(hash); } B2File start = null; B2File finish = null; try { start = Client.LargeFiles.StartLargeFile(fileName, "", TestBucket.BucketId).Result; for (int i = 0; i < parts.Count; i++) { var uploadUrl = Client.LargeFiles.GetUploadPartUrl(start.FileId).Result; var part = Client.LargeFiles.UploadPart(parts[i], i + 1, uploadUrl).Result; } finish = Client.LargeFiles.FinishLargeFile(start.FileId, shas.ToArray()).Result; } catch (Exception e) { Client.LargeFiles.CancelLargeFile(start.FileId); Console.WriteLine(e); throw; } // Clean up. FilesToDelete.Add(start); Assert.AreEqual(start.FileId, finish.FileId, "File Ids did not match."); }
public async Task UploadToB2() { IDictionary <string, FileObject> dbFileObjects = B2Db.GetDbFileObjects(); if (UploadList.Count == 0) { Log.Information("No local changes | Nothing to upload."); return; } Log.Information("Upload started"); Log.Information("Total files to upload = {0}", UploadList.Count); Stopwatch stopWatch = Stopwatch.StartNew(); int currentIndex = 0; await UploadList.ForEachAsync(Environment.ProcessorCount, async uploadObject => { try { // reset progress bar // progressLastLength = 0; // progressSpeedAverage.Clear(); // for (short i = 0; i<8; i++) progressSpeedAverage.Enqueue(0); // progressLastUpdate = DateTime.Now; B2File file = null; string b2Path = Utils.GetB2Filename(uploadObject.FileObject.FilePath, uploadObject.ParentPath, uploadObject.B2Path); string contentType = MimeTypeMap.GetMimeType(Path.GetExtension(uploadObject.FileObject.FilePath)); Log.Verbose("File = {0} | B2 Path = {1}", uploadObject.FileObject.FilePath, "/" + b2Path); FileInfo fileInfo = new FileInfo(uploadObject.FileObject.FilePath); if (fileInfo.Length < (1024 * 1024 * 100)) // more than 50MB { file = await UploadSmallFile(uploadObject, b2Path, contentType); } else { file = await UploadLargeFile(uploadObject, b2Path, contentType); } if (file != null) { uploadObject.FileObject.B2Files.Add(file.FileId, file); uploadObject.FileObject.DateModified = DateTime.Now; await B2Db.UpdateFileObject(uploadObject.FileObject); } Console.Write("Progress = {2}% [{0}/{1}]\t\t\t\r", ++currentIndex, UploadList.Count, Math.Round((Convert.ToDouble(currentIndex) / Convert.ToDouble(UploadList.Count)) * 100)); } catch (Exception) { RetryList.Add(uploadObject); Log.Error("Error file = {0}", uploadObject.FileObject.FilePath); } }); Log.Information("Upload finished | Execution time = {0} seconds | Error = {1}", stopWatch.Elapsed.TotalSeconds, RetryList.Count); }