private DbFile UpdateByChunksTest(string initialContent, string updatedText, int sizeLimit, int chunkSize) { using (new SystemAccount()) using (new SizeLimitSwindler(this, sizeLimit)) { var testRoot = CreateTestRoot(); var file = new File(testRoot) { Name = "File1.file" }; file.Binary.SetStream(RepositoryTools.GetStreamFromString(initialContent)); file.Save(); var fileId = file.Id; var chunks = SplitFile(updatedText, chunkSize, out var fullSize); file = Node.Load <File>(fileId); file.Save(SavingMode.StartMultistepSave); var token = BinaryData.StartChunk(fileId, fullSize); var offset = 0; foreach (var chunk in chunks) { BinaryData.WriteChunk(fileId, token, fullSize, chunk, offset); offset += chunkSize; } BinaryData.CommitChunk(fileId, token, fullSize); file = Node.Load <File>(fileId); file.FinalizeContent(); // assert var dbFiles = BlobStoragePlatform.LoadDbFiles(file.VersionId); Assert.AreEqual(1, dbFiles.Length); var dbFile = dbFiles[0]; if (NeedExternal(BlobStoragePlatform.ExpectedBlobProviderDataType, updatedText, sizeLimit)) { Assert.AreEqual(BlobStoragePlatform.ExpectedExternalBlobProviderType.FullName, dbFile.BlobProvider); Assert.IsNotNull(dbFile.BlobProviderData); } else { Assert.IsNull(dbFile.BlobProvider); Assert.IsNull(dbFile.BlobProviderData); } Assert.AreEqual(false, dbFile.IsDeleted); Assert.AreEqual(false, dbFile.Staging); Assert.AreEqual(0, dbFile.StagingVersionId); Assert.AreEqual(0, dbFile.StagingPropertyTypeId); Assert.AreEqual(fullSize, dbFile.Size); return(dbFile); } }
private static void SavePackageToContent(int contentId) { var chunkToken = BinaryData.StartChunk(contentId); long packageStreamSize = 0; using (var packageStream = GeneratePackageStream()) { if (packageStream == null) { return; } packageStreamSize = packageStream.Length; if (packageStreamSize == 0) { return; } long savedByteCount = 0; long chunkSize = RepositoryConfiguration.BinaryChunkSize; while (savedByteCount < packageStreamSize) { //the last part may be smaller if (savedByteCount + chunkSize > packageStreamSize) { chunkSize = packageStreamSize - savedByteCount; } // always allocate a buffer of a correct size, instead of reusing one var buffer = new byte[chunkSize]; packageStream.Read(buffer, 0, (int)chunkSize); BinaryData.WriteChunk(contentId, chunkToken, packageStreamSize, buffer, savedByteCount); savedByteCount += chunkSize; } } BinaryData.CommitChunk(contentId, chunkToken, packageStreamSize, binaryMetadata: new BinaryData { FileName = AgentManager.UPDATER_PACKAGENAME }); }
public static string FinalizeBlobUpload(Content content, string token, long fullSize, string fieldName = null, string fileName = null) { if (string.IsNullOrEmpty(token)) { throw new ArgumentNullException(nameof(token)); } // In most cases this will be the file name, but in case of custom // binary fields it is possible to provide a different name. if (string.IsNullOrEmpty(fileName)) { fileName = content.Name; } BinaryData.CommitChunk(content.Id, token, fullSize, fieldName, new BinaryData { FileName = new BinaryFileName(fileName) }); // reload the content to have a fresh object after commit chunk return(FinalizeContent(Content.Load(content.Id))); }
protected async System.Threading.Tasks.Task SaveFileToRepositoryAsync(Content uploadedContent, Content parent, string token, bool mustFinalize, bool mustCheckIn, IFormFile file, CancellationToken cancellationToken) { if (uploadedContent.ContentHandler.Locked && uploadedContent.ContentHandler.LockedBy.Id != User.Current.Id) { throw new Exception(SenseNetResourceManager.Current.GetString("Action", "UploadExceptionLocked")); } if (UseChunk) { // get bytes from the uploaded stream byte[] chunkData; using (var br = new BinaryReader(file.OpenReadStream())) { chunkData = br.ReadBytes(ChunkLength); } // save chunk BinaryData.WriteChunk(uploadedContent.Id, token, FileLength, chunkData, ChunkStart, PropertyName); // last chunk should commit the process if (ChunkStart + ChunkLength == FileLength) { BinaryData.CommitChunk(uploadedContent.Id, token, FileLength, PropertyName, CreateBinaryData(file, false)); // finalize only if the multistep save was started by this process if (mustFinalize || mustCheckIn) { uploadedContent = await Content.LoadAsync(uploadedContent.Id, cancellationToken).ConfigureAwait(false); SetPreviewGenerationPriority(uploadedContent); uploadedContent.FinalizeContent(); } } } else { if (uploadedContent.IsNew || uploadedContent.ContentHandler.SavingState == ContentSavingState.Finalized) { var binData = CreateBinaryData(file); uploadedContent[PropertyName] = binData; uploadedContent.Save(); } else { // Workaround for small existing content, in case the user started // a multistep saving process manually: save the whole binary in one chunk // (we cannot execute a real content Save here to avoid messing with saving state). string chunkToken; byte[] chunkData; using (var inputStream = file.OpenReadStream()) { var length = inputStream.Length; chunkToken = BinaryData.StartChunk(uploadedContent.Id, length, PropertyName); using (var br = new BinaryReader(inputStream)) { chunkData = br.ReadBytes(Convert.ToInt32(length)); } } // save everything in one chunk and commit the process BinaryData.WriteChunk(uploadedContent.Id, chunkToken, chunkData.Length, chunkData, 0, PropertyName); BinaryData.CommitChunk(uploadedContent.Id, chunkToken, chunkData.Length, PropertyName, CreateBinaryData(file, false)); if (mustFinalize && uploadedContent.ContentHandler.SavingState != ContentSavingState.Finalized) { uploadedContent.FinalizeContent(); } } // checkin only if the content was created or checked out by this process if (uploadedContent.ContentHandler.Locked && mustCheckIn) { uploadedContent.CheckIn(); } } }