HttpResponseMessage StreamFile(BlobId formatBlobId, String fileName = null) { var descriptor = _blobStore.GetDescriptor(formatBlobId); if (descriptor == null) { return(Request.CreateErrorResponse( HttpStatusCode.NotFound, string.Format("File {0} not found", formatBlobId) )); } var response = Request.CreateResponse(HttpStatusCode.OK); response.Content = new StreamContent(descriptor.OpenRead()); response.Content.Headers.ContentType = new MediaTypeHeaderValue(descriptor.ContentType); if (fileName != null) { response.Content.Headers.ContentDisposition = new ContentDispositionHeaderValue("attachment") { FileName = fileName }; } return(response); }
public void On(DocumentDescriptorCreated e) { _streamReadModelCollection.Insert(e, new StreamReadModel() { Id = GetNewId(), Handle = e.HandleInfo.Handle, EventType = HandleStreamEventTypes.DocumentCreated, }); //Now doc is not duplicated anymore, we should generate format added to document events. var doc = _documentDescriptorReadModel .AllUnsorted .Where(r => r.Id == e.AggregateId) .SingleOrDefault(); if (doc.Documents == null || !doc.Documents.Any()) { return; //no handle in this document descriptor } var allHandles = doc.Documents; var descriptor = _blobStore.GetDescriptor(e.BlobId); foreach (var handle in allHandles) { var handleReadMode = _documentWriter.FindOneById(handle); foreach (var format in doc.Formats) { _streamReadModelCollection.Insert(e, new StreamReadModel() { Id = GetNewId(), Handle = handle, Filename = descriptor.FileNameWithExtension, DocumentDescriptorId = (DocumentDescriptorId)e.AggregateId, FormatInfo = new FormatInfo() { BlobId = e.BlobId, DocumentFormat = format.Key, PipelineId = format.Value.PipelineId != PipelineId.Null ? format.Value.PipelineId : new PipelineId("original"), }, EventType = HandleStreamEventTypes.DocumentHasNewFormat, DocumentCustomData = handleReadMode.CustomData, }); } } }
public void On(DocumentDescriptorInitialized e) { var descriptor = _blobStore.GetDescriptor(e.BlobId); if (descriptor != null) { _collection.Upsert(e, e.HandleInfo.FileName.Extension, () => new DocumentStats() { Files = 1, Bytes = descriptor.Length }, s => { s.Files++; s.Bytes += descriptor.Length; }); } }
public void On(FormatAddedToDocumentDescriptor e) { if (IsReplay) { return; } var descriptor = _blobStore.GetDescriptor(e.BlobId); Logger.DebugFormat("Next conversion step for document {0} {1}", e.BlobId, descriptor.FileNameWithExtension); }
public async Task verify_de_duplication_delete_original_blob() { DateTime now = DateTime.UtcNow.AddDays(+30); await _documentStoreClient.UploadAsync(TestConfig.PathToDocumentPdf, new DocumentHandle("handleA")); await _documentStoreClient.UploadAsync(TestConfig.PathToDocumentPdf, new DocumentHandle("handleB")); // wait background projection polling await UpdateAndWaitAsync().ConfigureAwait(false); using (DateTimeService.Override(() => now)) { //now we need to wait cleanupJobs to start ExecuteCleanupJob(); } //verify that blob Assert.That(_blobStore.GetDescriptor(new BlobId("original.1")), Is.Not.Null); Assert.Throws <Exception>(() => _blobStore.GetDescriptor(new BlobId("original.2"))); }
private void SetHandleToReturn() { var customData = new DocumentCustomData() { { "handle1", "test" }, { "handle2", new { isComplex = true, theTruth = 42 } }, }; handle = new DocumentReadModel( new DocumentHandle("rev_1"), new DocumentDescriptorId(1), new FileNameWithExtension("test.txt"), customData ); _handleWriter .FindOneById(Arg.Any <DocumentHandle>()) .Returns(handle); IBlobDescriptor stub = Substitute.For <IBlobDescriptor>(); stub.FileNameWithExtension.Returns(new FileNameWithExtension("test.txt")); _blobStore.GetDescriptor(Arg.Any <BlobId>()).Returns(stub); }
/// <summary> /// /// </summary> /// <param name="tenantId"></param> /// <param name="handle"></param> /// <param name="fatherHandle">Different from null only when you want to upload /// a document that is an attachment of another document</param> /// <param name="fatherHandleDescriptorId">Descriptor id that contains reference /// to <paramref name="fatherHandle"/></param> /// <returns></returns> private async Task <HttpResponseMessage> InnerUploadDocument( TenantId tenantId, DocumentHandle handle, DocumentHandle fatherHandle, DocumentDescriptorId fatherHandleDescriptorId) { var documentId = _identityGenerator.New <DocumentDescriptorId>(); Logger.DebugFormat("Incoming file {0}, assigned {1}", handle, documentId); var errorMessage = await UploadFromHttpContent(Request.Content); Logger.DebugFormat("File {0} processed with message {1}", _blobId, errorMessage ?? "OK"); if (errorMessage != null) { return(Request.CreateErrorResponse( HttpStatusCode.BadRequest, errorMessage )); } CreateDocument(documentId, _blobId, handle, fatherHandle, fatherHandleDescriptorId, _fileName, _customData); Logger.DebugFormat("File {0} uploaded as {1}", _blobId, documentId); var storedFile = _blobStore.GetDescriptor(_blobId); return(Request.CreateResponse( HttpStatusCode.OK, new UploadedDocumentResponse { Handle = handle, Hash = storedFile.Hash, HashType = "md5", Uri = Url.Content("/" + tenantId + "/documents/" + handle) } )); }
public void Verify_basic_create_new_and_get_descriptor() { const String content = "this is the content of the file"; String tempFileName = GenerateTempTextFile(content, "thisisatest.txt"); BlobId id; using (var writer = _sut.CreateNew(DocumentFormats.Original, new Core.Model.FileNameWithExtension(tempFileName))) using (var fileStream = new FileStream(tempFileName, FileMode.Open, FileAccess.Read)) { fileStream.CopyTo(writer.WriteStream); id = writer.BlobId; } Assert.That(id, Is.Not.Null); var descriptor = _sut.GetDescriptor(id); Assert.That(descriptor.BlobId, Is.EqualTo(id)); Assert.That(descriptor.ContentType, Is.EqualTo("text/plain")); Assert.That(descriptor.FileNameWithExtension.ToString(), Is.EqualTo("thisisatest.txt")); Assert.That(descriptor.Hash.ToString(), Is.EqualTo("c4afda0ebfa886d489fe06a436ca491a")); Assert.That(descriptor.Length, Is.EqualTo(31)); }
/// <summary> /// Update dump of the file on disks. /// </summary> /// <param name="blobId"></param> /// <param name="originalBlobStore"></param> /// <param name="deleted">If true this is an operation of deletion of the artifact.</param> /// <returns></returns> private Boolean UpdateDumpOfFile( string blobId, IBlobStore originalBlobStore, Boolean deleted) { try { //Take descriptor even if the blob is deleted, because deleting a descriptor leav //blob in recycle bin, this imply that the blob should still be there var descriptor = originalBlobStore.GetDescriptor(new BlobId(blobId)); if (!deleted) { using (var stream = descriptor.OpenRead()) { _store.Store(stream, descriptor.FileNameWithExtension, blobId); } Logger.Debug($"Blob {blobId} copied to backup store. FileName: {descriptor.FileNameWithExtension}"); } else { _store.Delete(descriptor.FileNameWithExtension, blobId); Logger.Debug($"Blob {blobId} was deleted, delete from the backup store. FileName: {descriptor.FileNameWithExtension}"); } return(true); } catch (Exception ex) { if (deleted == false) { Logger.Error($"Unable to backup blob {blobId} from original store. {ex.Message}", ex); } else { //Since the blob is deleted, issue a warning, because it could be that the backup started //late and the blob was already purged from recycle bin. Logger.Warn($"Unable to backup blob {blobId} from original store. {ex.Message}. Maybe the blob was already deleted by job.", ex); } return(false); } }
public DocumentDescriptorId FindDuplicateDocumentId( DocumentDescriptorId sourceDocumentId, FileHash sourceHash, BlobId sourceBlobId ) { if (!_config.IsDeduplicationActive) { return(null); } var original = _blobStore.GetDescriptor(sourceBlobId); var matches = _hashReader.FindDocumentByHash(sourceHash); Logger.DebugFormat("Deduplicating document {0}", sourceDocumentId); foreach (var match in matches) { if (match.DocumentDescriptorId == sourceDocumentId) { continue; } Logger.DebugFormat("Checking document {0}", match.DocumentDescriptorId); var candidate = this._blobStore.GetDescriptor(match.BlobId); // only within same content type! if (candidate.ContentType != original.ContentType) { Logger.DebugFormat("document {0} has different ContentType ({1}), skipping", match.DocumentDescriptorId, candidate.ContentType ); continue; } // and same length if (candidate.Length != original.Length) { Logger.DebugFormat("document {0} has different length ({1}), skipping", match.DocumentDescriptorId, candidate.Length ); continue; } // binary check using (var candidateStream = candidate.OpenRead()) using (var originalStream = original.OpenRead()) { if (StreamHelper.StreamsContentsAreEqual(candidateStream, originalStream)) { Logger.DebugFormat("{0} has same content of {1}: match found!", match.DocumentDescriptorId, sourceDocumentId ); return(match.DocumentDescriptorId); } else { Logger.DebugFormat("{0} has different content of {1}, skipping", match.DocumentDescriptorId, sourceDocumentId ); } } } return(null); }