public bool IsFileAllowed(FileNameWithExtension filename) { if (AllowedFileTypes == null) return true; return AllowedFileTypes.Contains(filename.Extension); }
public FileSystemBlobWriter( BlobId blobId, FileNameWithExtension fileName, String destinationFileName, IMongoCollection <FileSystemBlobDescriptor> blobDescriptorCollection, ILogger logger) { BlobId = blobId; FileName = fileName; _blobDescriptorCollection = blobDescriptorCollection; _logger = logger; _descriptor = new FileSystemBlobDescriptor() { BlobId = BlobId, FileNameWithExtension = FileName, Timestamp = DateTime.Now, ContentType = MimeTypes.GetMimeType(FileName) }; _destinationFileName = destinationFileName; _blobDescriptorCollection.Save(_descriptor, _descriptor.BlobId); //Create a wrapper of the stream var originalStream = new FileStream(destinationFileName, FileMode.OpenOrCreate, FileAccess.ReadWrite, FileShare.None); originalStream.SetLength(0); _writableStream = new FileSystemBlobStoreWritableStream(originalStream, _descriptor, _blobDescriptorCollection, this); }
public DocumentReadModel(DocumentHandle handle, DocumentDescriptorId documentid, FileNameWithExtension fileName, DocumentCustomData customData) { Handle = handle; DocumentDescriptorId = documentid; FileName = fileName; CustomData = customData; }
/// <summary> /// Upload a file sent in an http request /// </summary> /// <param name="httpContent">request's content</param> /// <returns>Error message or null</returns> private async Task <String> UploadFromHttpContent(HttpContent httpContent) { if (httpContent == null || !httpContent.IsMimeMultipartContent()) { return("Attachment not found!"); } var provider = await httpContent.ReadAsMultipartAsync( new FileStoreMultipartStreamProvider(_blobStore, _configService) ); if (provider.Filename == null) { return("Attachment not found!"); } if (provider.IsInvalidFile) { return(string.Format("Unsupported file {0}", provider.Filename)); } if (provider.FormData["custom-data"] != null) { _customData = JsonConvert.DeserializeObject <DocumentCustomData>(provider.FormData["custom-data"]); } _fileName = provider.Filename; _blobId = provider.BlobId; return(null); }
public DocumentLinked(DocumentHandle handle, DocumentDescriptorId documentId, DocumentDescriptorId previousDocumentId, FileNameWithExtension fileName) { FileName = fileName; PreviousDocumentId = previousDocumentId; Handle = handle; DocumentId = documentId; }
public FileSystemBlobWriter( BlobId blobId, FileNameWithExtension fileName, String destinationFileName, IFileSystemBlobDescriptorStorage fileSystemBlobDescriptorStorage, ILogger logger) { BlobId = blobId; FileName = fileName; _logger = logger; _descriptor = new FileSystemBlobDescriptor() { BlobId = BlobId, FileNameWithExtension = FileName, Timestamp = DateTime.Now, ContentType = MimeTypes.GetMimeType(FileName) }; //Create a wrapper of the stream var originalStream = new FileStream(destinationFileName, FileMode.OpenOrCreate, FileAccess.ReadWrite, FileShare.None); originalStream.SetLength(0); _writableStream = new FileSystemBlobStoreWritableStream(originalStream, this); _writableStream.StreamClosed += WritableStreamClosed; _fileSystemBlobDescriptorStorage = fileSystemBlobDescriptorStorage; }
/// <summary> /// Upload a file sent in an http request /// </summary> /// <param name="httpContent">request's content</param> /// <returns>Error message or null</returns> private async Task <String> AddFormatFromHttpContent(HttpContent httpContent, DocumentFormat format) { if (httpContent == null || !httpContent.IsMimeMultipartContent()) { return("Attachment not found!"); } var provider = await httpContent.ReadAsMultipartAsync( new FormatStoreMultipartStreamProvider(_blobStore, format) ); if (provider.Filename == null) { return("Attachment not found!"); } if (provider.FormData["custom-data"] != null) { _customData = JsonConvert.DeserializeObject <DocumentCustomData>(provider.FormData["custom-data"]); } _fileName = provider.Filename; _blobId = provider.BlobId; return(null); }
public IBlobDescriptor Persist(BlobId blobId, FileNameWithExtension fileName, Stream inputStream) { var descriptor = InnerPersistOfBlob(blobId, fileName, inputStream); _mongodDbFileSystemBlobDescriptorStorage.SaveDescriptor(descriptor); return(descriptor); }
/// <summary> /// Persist a stream given a blobId. /// </summary> /// <param name="blobId"></param> /// <param name="fileName"></param> /// <param name="sourceStream"></param> /// <returns></returns> private FileSystemBlobDescriptor InnerPersistOfBlob(BlobId blobId, FileNameWithExtension fileName, Stream sourceStream) { FileSystemBlobDescriptor descriptor = new FileSystemBlobDescriptor() { BlobId = blobId, FileNameWithExtension = fileName, Timestamp = DateTime.Now, ContentType = MimeTypes.GetMimeType(fileName) }; string destinationFileName = GetFileNameFromBlobIdAndRemoveDuplicates(blobId, descriptor.FileNameWithExtension); Logger.Debug($"File {fileName} was assigned blob {blobId} and will be saved in file {destinationFileName}"); using (var md5 = MD5.Create()) using (var fileStream = new FileStream(destinationFileName, FileMode.OpenOrCreate, FileAccess.Write)) { fileStream.Seek(0, SeekOrigin.Begin); fileStream.SetLength(0); Byte[] buffer = new Byte[8192]; Int32 read; Int64 length = 0; while ((read = sourceStream.Read(buffer, 0, buffer.Length)) > 0) { md5.TransformBlock(buffer, 0, read, buffer, 0); fileStream.Write(buffer, 0, read); length += read; } md5.TransformFinalBlock(buffer, 0, 0); descriptor.Length = length; descriptor.Md5 = BitConverter.ToString(md5.Hash).Replace("-", ""); } Logger.Info($"Blob {blobId} saved in file {destinationFileName} with hash {descriptor.Md5} and length {descriptor.Length}"); return(descriptor); }
public BlobId Upload(DocumentFormat format, FileNameWithExtension fileName, Stream sourceStream) { var descriptor = SaveStream(format, fileName, sourceStream); _blobDescriptorCollection.Save(descriptor, descriptor.BlobId); return(descriptor.BlobId); }
public GridFsBlobDescriptor(BlobId blobId, MongoGridFSFileInfo mongoGridFsFileInfo) { if (mongoGridFsFileInfo == null) throw new ArgumentNullException("mongoGridFsFileInfo"); _mongoGridFsFileInfo = mongoGridFsFileInfo; BlobId = blobId; FileNameWithExtension = new FileNameWithExtension(_mongoGridFsFileInfo.Name); }
public void should_handle_extension_with_dots_in_file_name() { var fname = new FileNameWithExtension("a.b.c"); Assert.AreEqual("a.b", fname.FileName); Assert.AreEqual("c", fname.Extension); Assert.AreEqual("a.b.c", (string)fname); }
public IBlobDescriptor Persist(BlobId blobId, FileNameWithExtension fileName, Stream inputStream) { using (var writer = CreateBlobWriterFromBlobId(blobId.Format, fileName, blobId)) { inputStream.CopyTo(writer.WriteStream); } return(GetDescriptor(blobId)); }
public bool IsFileAllowed(FileNameWithExtension filename) { if (AllowedFileTypes == null) { return(true); } return(AllowedFileTypes.Contains(filename.Extension)); }
public DocumentHandleInfo( DocumentHandle handle, FileNameWithExtension fileName, DocumentCustomData customData = null ) { Handle = handle; FileName = fileName; CustomData = customData; }
public void SetFileName(FileNameWithExtension fileName) { ThrowIfDeleted(); if (InternalState.FileName == fileName) { return; } RaiseEvent(new DocumentFileNameSet(InternalState.Handle, fileName)); }
public override Stream GetStream(HttpContent parent, HttpContentHeaders headers) { string fname = headers.ContentDisposition.FileName; if (fname == null) return new MemoryStream(); Filename = new FileNameWithExtension(fname); _writer = _store.CreateNew(_format, Filename); return _writer.WriteStream; }
public GridFsBlobDescriptor(BlobId blobId, MongoGridFSFileInfo mongoGridFsFileInfo) { if (mongoGridFsFileInfo == null) { throw new ArgumentNullException("mongoGridFsFileInfo"); } _mongoGridFsFileInfo = mongoGridFsFileInfo; BlobId = blobId; FileNameWithExtension = new FileNameWithExtension(_mongoGridFsFileInfo.Name); }
public BlobId Upload(DocumentFormat format, FileNameWithExtension fileName, Stream sourceStream) { var gridFs = GetGridFsByFormat(format); using (var writer = CreateNew(format, fileName)) { Logger.DebugFormat("Uploading file {0} named {1} on {2}", writer.BlobId, fileName, gridFs.DatabaseName); sourceStream.CopyTo(writer.WriteStream); return(writer.BlobId); } }
public BlobId Upload(DocumentFormat format, FileNameWithExtension fileName, Stream sourceStream) { var descriptor = SaveStream(format, fileName, sourceStream); if (Logger.IsDebugEnabled) { Logger.Debug($"Uploaded document format {format} from stream with filename {fileName} with blob Id {descriptor.BlobId}"); } _mongodDbFileSystemBlobDescriptorStorage.SaveDescriptor(descriptor); return(descriptor.BlobId); }
public InitializeDocumentDescriptorAsAttach( DocumentDescriptorId aggregateId, BlobId blobId, DocumentHandleInfo handleInfo, DocumentHandle fatherHandle, DocumentDescriptorId fatherDocumentDescriptorId, FileHash hash, FileNameWithExtension fileName) : base(aggregateId, blobId, handleInfo, hash, fileName) { FatherHandle = fatherHandle; FatherDocumentDescriptorId = fatherDocumentDescriptorId; }
public InitializeDocumentDescriptor( DocumentDescriptorId aggregateId, BlobId blobId, DocumentHandleInfo handleInfo, FileHash hash, FileNameWithExtension fileName ) : base(aggregateId) { FileName = fileName; Hash = hash; BlobId = blobId; HandleInfo = handleInfo; }
public Stream CreateNew(BlobId blobId, FileNameWithExtension fname) { var gridFs = GetGridFsByBlobId(blobId); Logger.DebugFormat("Creating file {0} on {1}", blobId, gridFs.DatabaseName); Delete(blobId); return gridFs.Create(fname, new MongoGridFSCreateOptions() { ContentType = MimeTypes.GetMimeType(fname), UploadDate = DateTime.UtcNow, Id = (string)blobId }); }
private IBlobWriter CreateBlobWriterFromBlobId(DocumentFormat format, FileNameWithExtension fname, BlobId blobId) { var gridFs = GetGridFsByFormat(format); Logger.DebugFormat("Creating file {0} on {1}", blobId, gridFs.DatabaseName); var stream = gridFs.Create(fname, new MongoGridFSCreateOptions() { ContentType = MimeTypes.GetMimeType(fname), UploadDate = DateTime.UtcNow, Id = (string)blobId }); return(new BlobWriter(blobId, stream, fname)); }
public IBlobWriter CreateNew(DocumentFormat format, FileNameWithExtension fname) { var blobId = new BlobId(format, _counterService.GetNext(format)); var gridFs = GetGridFsByFormat(format); Logger.DebugFormat("Creating file {0} on {1}", blobId, gridFs.DatabaseName); var stream = gridFs.Create(fname, new MongoGridFSCreateOptions() { ContentType = MimeTypes.GetMimeType(fname), UploadDate = DateTime.UtcNow, Id = (string)blobId }); return new BlobWriter(blobId, stream, fname); }
public override Stream GetStream(HttpContent parent, HttpContentHeaders headers) { string fname = headers.ContentDisposition.FileName; if (fname == null) { return(new MemoryStream()); } Filename = new FileNameWithExtension(fname); _writer = _store.CreateNew(_format, Filename); return(_writer.WriteStream); }
public IBlobWriter CreateNew(DocumentFormat format, FileNameWithExtension fname) { var blobId = new BlobId(format, _counterService.GetNext(format)); if (Logger.IsDebugEnabled) { Logger.Debug($"CreateNew blob for format {format} with file {fname} - assigned blobId: {blobId}"); } return(new FileSystemBlobWriter( blobId, fname, GetFileNameFromBlobIdAndRemoveDuplicates(blobId, fname), _mongodDbFileSystemBlobDescriptorStorage, Logger)); }
public void SetFileName(DocumentHandle handle, FileNameWithExtension fileName, long projectedAt) { Logger.DebugFormat("SetFilename on handle {0} [{1}]", handle, projectedAt); var result = _collection.FindOneAndUpdate( Builders <DocumentReadModel> .Filter.And( Builders <DocumentReadModel> .Filter.Eq(x => x.Handle, handle), Builders <DocumentReadModel> .Filter.Lte(x => x.CreatetAt, projectedAt) ), Builders <DocumentReadModel> .Update .Set(x => x.FileName, fileName) .Set(x => x.ProjectedAt, projectedAt), new FindOneAndUpdateOptions <DocumentReadModel, DocumentReadModel>() { ReturnDocument = ReturnDocument.After } ); }
public override Stream GetStream(HttpContent parent, HttpContentHeaders headers) { string fname = headers.ContentDisposition.FileName; if (fname == null) return new MemoryStream(); Filename = new FileNameWithExtension(fname); if (!_config.IsFileAllowed(Filename)) { IsInvalidFile = true; return new MemoryStream(); } _writer = _store.CreateNew(DocumentFormats.Original, Filename); return _writer.WriteStream; }
public override Stream GetStream(HttpContent parent, HttpContentHeaders headers) { string fname = headers.ContentDisposition.FileName; if (fname == null) { return(new MemoryStream()); } Filename = new FileNameWithExtension(fname); if (!_config.IsFileAllowed(Filename)) { IsInvalidFile = true; return(new MemoryStream()); } _writer = _store.CreateNew(DocumentFormats.Original, Filename); return(_writer.WriteStream); }
private void CreateDocument( DocumentDescriptorId documentDescriptorId, BlobId blobId, DocumentHandle handle, DocumentHandle fatherHandle, DocumentDescriptorId fatherDocumentDescriptorId, FileNameWithExtension fileName, DocumentCustomData customData ) { var descriptor = _blobStore.GetDescriptor(blobId); ICommand createDocument; var handleInfo = new DocumentHandleInfo(handle, fileName, customData); if (fatherHandle == null) { if (Logger.IsDebugEnabled) { Logger.DebugFormat("Initialize DocumentDescriptor {0} ", documentDescriptorId); } createDocument = new InitializeDocumentDescriptor(documentDescriptorId, blobId, handleInfo, descriptor.Hash, fileName); } else { if (Logger.IsDebugEnabled) { Logger.DebugFormat("Initialize DocumentDescriptor as attach {0} ", documentDescriptorId); } createDocument = new InitializeDocumentDescriptorAsAttach( documentDescriptorId, blobId, handleInfo, fatherHandle, fatherDocumentDescriptorId, descriptor.Hash, fileName); } CommandBus.Send(createDocument, "api"); }
internal void UploadFile(String jobFile, DocumentImportTask task) { String fname = ""; try { TenantContext.Enter(task.Tenant); if (!task.Uri.IsFile) { LogAndThrow("Error importing task file {0}: Uri is not a file: {1}", jobFile, task.Uri); } fname = task.Uri.LocalPath; if (FileHasImportFailureMarker(fname, task.FileTimestamp)) { return; } if (!File.Exists(fname)) { LogAndThrow("Error importing task file {0}: File missing: {1}", jobFile, fname); } var blobStore = GetBlobStoreForTenant(); var identityGenerator = GetIdentityGeneratorForTenant(); if (blobStore == null || identityGenerator == null) { Logger.ErrorFormat("Tenant {1} not found or not configured for file: {1}", task.Tenant, fname); return; } BlobId blobId; if (!String.IsNullOrEmpty(task.FileName)) { //use the real file name from the task not the name of the file using (FileStream fs = File.Open(fname, FileMode.Open, FileAccess.Read)) { blobId = blobStore.Upload(task.Format, new FileNameWithExtension(task.FileName), fs); } } else { //No filename given in task, use name of the blob blobId = blobStore.Upload(task.Format, fname); } if (task.Format == OriginalFormat) { var descriptor = blobStore.GetDescriptor(blobId); var fileName = new FileNameWithExtension(task.FileName); var handleInfo = new DocumentHandleInfo(task.Handle, fileName, task.CustomData); var documentId = identityGenerator.New<DocumentDescriptorId>(); var createDocument = new InitializeDocumentDescriptor( documentId, blobId, handleInfo, descriptor.Hash, fileName ); _commandBus.Send(createDocument, "import-from-file"); } else { var reader = _tenantAccessor.Current.Container.Resolve<IDocumentWriter>(); var handle = reader.FindOneById(task.Handle); var documentId = handle.DocumentDescriptorId; var command = new AddFormatToDocumentDescriptor( documentId, task.Format, blobId, new PipelineId("user-content") ); _commandBus.Send(command, "import-from-file"); } TaskExecuted(task); DeleteImportFailure(fname); } catch (Exception ex) { Logger.ErrorFormat(ex, "Job Import Queue - Error importing {0} - {1}", jobFile, ex.Message); ImportFailure failure = new ImportFailure() { Error = ex.ToString(), FileName = fname, Timestamp = DateTime.Now, ImportFileTimestampTicks = task.FileTimestamp.Ticks, }; MarkImportFailure(failure); } finally { TenantContext.Exit(); } }
public IBlobWriter CreateNew(DocumentFormat format, FileNameWithExtension fname) { return ForFormat(format).CreateNew(format, fname); }
public DocumentReadModel(DocumentHandle handle, DocumentDescriptorId documentid, FileNameWithExtension fileName) : this(handle, documentid, fileName, null) { }
public BlobId Upload(DocumentFormat format, FileNameWithExtension fileName, Stream sourceStrem) { var gridFs = GetGridFsByFormat(format); using (var writer = CreateNew(format, fileName)) { Logger.DebugFormat("Uploading file {0} named {1} on {2}", writer.BlobId, fileName, gridFs.DatabaseName); sourceStrem.CopyTo(writer.WriteStream); return writer.BlobId; } }
internal void UploadFile(String jobFile, DocumentImportTask task) { String fname = ""; try { TenantContext.Enter(task.Tenant); if (!task.Uri.IsFile) { LogAndThrow("Error importing task file {0}: Uri is not a file: {1}", jobFile, task.Uri); } fname = task.Uri.LocalPath; if (FileHasImportFailureMarker(fname, task.FileTimestamp)) { return; } if (!File.Exists(fname)) { LogAndThrow("Error importing task file {0}: File missing: {1}", jobFile, fname); } var blobStore = GetBlobStoreForTenant(); var identityGenerator = GetIdentityGeneratorForTenant(); if (blobStore == null || identityGenerator == null) { Logger.ErrorFormat("Tenant {1} not found or not configured for file: {1}", task.Tenant, fname); return; } BlobId blobId; if (!String.IsNullOrEmpty(task.FileName)) { //use the real file name from the task not the name of the file using (FileStream fs = File.Open(fname, FileMode.Open, FileAccess.Read)) { blobId = blobStore.Upload(task.Format, new FileNameWithExtension(task.FileName), fs); } } else { //No filename given in task, use name of the blob blobId = blobStore.Upload(task.Format, fname); } if (task.Format == OriginalFormat) { var descriptor = blobStore.GetDescriptor(blobId); var fileName = new FileNameWithExtension(task.FileName); var handleInfo = new DocumentHandleInfo(task.Handle, fileName, task.CustomData); var documentId = identityGenerator.New <DocumentDescriptorId>(); var createDocument = new InitializeDocumentDescriptor( documentId, blobId, handleInfo, descriptor.Hash, fileName ); _commandBus.Send(createDocument, "import-from-file"); } else { var reader = _tenantAccessor.Current.Container.Resolve <IDocumentWriter>(); var handle = reader.FindOneById(task.Handle); var documentId = handle.DocumentDescriptorId; var command = new AddFormatToDocumentDescriptor( documentId, task.Format, blobId, new PipelineId("user-content") ); _commandBus.Send(command, "import-from-file"); } TaskExecuted(task); DeleteImportFailure(fname); } catch (Exception ex) { Logger.ErrorFormat(ex, "Job Import Queue - Error importing {0} - {1}", jobFile, ex.Message); ImportFailure failure = new ImportFailure() { Error = ex.ToString(), FileName = fname, Timestamp = DateTime.Now, ImportFileTimestampTicks = task.FileTimestamp.Ticks, }; MarkImportFailure(failure); } finally { TenantContext.Exit(); } }
public IBlobWriter CreateNew(DocumentFormat format, FileNameWithExtension fname) { var blobId = new BlobId(format, _counterService.GetNext(format)); return(CreateBlobWriterFromBlobId(format, fname, blobId)); }
HttpResponseMessage StreamFile(BlobId formatBlobId, FileNameWithExtension fileName = null) { var descriptor = _blobStore.GetDescriptor(formatBlobId); if (descriptor == null) { return Request.CreateErrorResponse( HttpStatusCode.NotFound, string.Format("File {0} not found", formatBlobId) ); } RangeHeaderValue rangeHeader = Request.Headers.Range; var response = Request.CreateResponse(HttpStatusCode.OK); response.Headers.AcceptRanges.Add("bytes"); // HEAD? bool isHead = false; if (Request.Method == HttpMethod.Head) { isHead = true; rangeHeader = null; } // full stream if (rangeHeader == null || !rangeHeader.Ranges.Any()) { if (isHead) { response.Content = new ByteArrayContent(new byte[0]); response.Content.Headers.ContentLength = descriptor.Length; } else { response.Content = new StreamContent(descriptor.OpenRead()); } response.Content.Headers.ContentType = new MediaTypeHeaderValue(descriptor.ContentType); if (fileName != null) { response.Content.Headers.ContentDisposition = new ContentDispositionHeaderValue("attachment") { FileName = fileName }; } return response; } // range stream long start = 0, end = 0; long totalLength = descriptor.Length; // 1. If the unit is not 'bytes'. // 2. If there are multiple ranges in header value. // 3. If start or end position is greater than file length. if (rangeHeader.Unit != "bytes" || rangeHeader.Ranges.Count > 1 || !TryReadRangeItem(rangeHeader.Ranges.First(), totalLength, out start, out end)) { response.StatusCode = HttpStatusCode.RequestedRangeNotSatisfiable; response.Content = new StreamContent(Stream.Null); // No content for this status. response.Content.Headers.ContentRange = new ContentRangeHeaderValue(totalLength); response.Content.Headers.ContentType = new MediaTypeHeaderValue(descriptor.ContentType); return response; } var contentRange = new ContentRangeHeaderValue(start, end, totalLength); // We are now ready to produce partial content. response.StatusCode = HttpStatusCode.PartialContent; response.Content = new PushStreamContent((outputStream, httpContent, transpContext) => { using (outputStream) // Copy the file to output stream in indicated range. using (Stream inputStream = descriptor.OpenRead()) CreatePartialContent(inputStream, outputStream, start, end); }, descriptor.ContentType); response.Content.Headers.ContentType = new MediaTypeHeaderValue(descriptor.ContentType); response.Content.Headers.ContentLength = end - start + 1; response.Content.Headers.ContentRange = contentRange; return response; }
void When(DocumentFileNameSet e) { this.FileName = e.FileName; }
public void SetFileName(FileNameWithExtension fileNameWithExtension) { this.FileName = fileNameWithExtension; }
public FsBlobDescriptor(BlobId id, string pathToFile) { _pathToFile = pathToFile; FileNameWithExtension = new FileNameWithExtension(Path.GetFileName(pathToFile)); BlobId = id; }
public DocumentFileNameSet(DocumentHandle handle, FileNameWithExtension fileName) { Handle = handle; FileName = fileName; }
public BlobWriter(BlobId blobId, Stream writeStream, FileNameWithExtension fileName) { FileName = fileName; BlobId = blobId; WriteStream = writeStream; }
private void CreateDocument( DocumentDescriptorId documentDescriptorId, BlobId blobId, DocumentHandle handle, DocumentHandle fatherHandle, DocumentDescriptorId fatherDocumentDescriptorId, FileNameWithExtension fileName, DocumentCustomData customData ) { var descriptor = _blobStore.GetDescriptor(blobId); ICommand createDocument; var handleInfo = new DocumentHandleInfo(handle, fileName, customData); if (fatherHandle == null) { if (Logger.IsDebugEnabled) Logger.DebugFormat("Initialize DocumentDescriptor {0} ", documentDescriptorId); createDocument = new InitializeDocumentDescriptor(documentDescriptorId, blobId, handleInfo, descriptor.Hash, fileName); } else { if (Logger.IsDebugEnabled) Logger.DebugFormat("Initialize DocumentDescriptor as attach {0} ", documentDescriptorId); createDocument = new InitializeDocumentDescriptorAsAttach( documentDescriptorId, blobId, handleInfo, fatherHandle, fatherDocumentDescriptorId, descriptor.Hash, fileName); } CommandBus.Send(createDocument, "api"); }
public void SetFileName(DocumentHandle handle, FileNameWithExtension fileName, long projectedAt) { Logger.DebugFormat("SetFilename on handle {0} [{1}]", handle, projectedAt); var result = _collection.FindOneAndUpdate( Builders<DocumentReadModel>.Filter.And( Builders<DocumentReadModel>.Filter.Eq(x => x.Handle, handle), Builders<DocumentReadModel>.Filter.Lte(x => x.CreatetAt, projectedAt) ), Builders<DocumentReadModel>.Update .Set(x => x.FileName, fileName) .Set(x => x.ProjectedAt, projectedAt), new FindOneAndUpdateOptions<DocumentReadModel, DocumentReadModel>() { ReturnDocument = ReturnDocument.After } ); }
/// <summary> /// Upload a file sent in an http request /// </summary> /// <param name="httpContent">request's content</param> /// <returns>Error message or null</returns> private async Task<String> UploadFromHttpContent(HttpContent httpContent) { if (httpContent == null || !httpContent.IsMimeMultipartContent()) return "Attachment not found!"; var provider = await httpContent.ReadAsMultipartAsync( new FileStoreMultipartStreamProvider(_blobStore, _configService) ); if (provider.Filename == null) return "Attachment not found!"; if (provider.IsInvalidFile) return string.Format("Unsupported file {0}", provider.Filename); if (provider.FormData["custom-data"] != null) { _customData = JsonConvert.DeserializeObject<DocumentCustomData>(provider.FormData["custom-data"]); } _fileName = provider.Filename; _blobId = provider.BlobId; return null; }
public BlobId Upload(DocumentFormat format, FileNameWithExtension fileName, Stream sourceStrem) { return ForFormat(format).Upload(format, fileName, sourceStrem); }
/// <summary> /// Upload a file sent in an http request /// </summary> /// <param name="httpContent">request's content</param> /// <returns>Error message or null</returns> private async Task<String> AddFormatFromHttpContent(HttpContent httpContent, DocumentFormat format) { if (httpContent == null || !httpContent.IsMimeMultipartContent()) return "Attachment not found!"; var provider = await httpContent.ReadAsMultipartAsync( new FormatStoreMultipartStreamProvider(_blobStore, format) ); if (provider.Filename == null) return "Attachment not found!"; if (provider.FormData["custom-data"] != null) { _customData = JsonConvert.DeserializeObject<DocumentCustomData>(provider.FormData["custom-data"]); } _fileName = provider.Filename; _blobId = provider.BlobId; return null; }