/// <summary> /// Upload a file based on stream data /// </summary> public LiteFileInfo Upload(string id, string filename, Stream stream) { if (stream == null) { throw new ArgumentNullException("stream"); } // checks if file exists var file = this.FindById(id); if (file == null) { file = new LiteFileInfo(_engine, id, filename ?? id); // insert if new _engine.Insert(FILES, file.AsDocument); } // copy stream content to litedb file stream using (var writer = file.OpenWrite()) { stream.CopyTo(writer); } return(file); }
/// <summary> /// Open/Create new file storage and returns linked Stream to write operations. /// </summary> public LiteFileStream <TFileId> OpenWrite(TFileId id, string filename, BsonDocument metadata = null) { // get _id as BsonValue var fileId = _db.Mapper.Serialize(typeof(TFileId), id); // checks if file exists var file = this.FindById(id); if (file == null) { file = new LiteFileInfo <TFileId> { Id = id, Filename = Path.GetFileName(filename), MimeType = MimeTypeConverter.GetMimeType(filename), Metadata = metadata ?? new BsonDocument() }; // set files/chunks instances file.SetReference(fileId, _files, _chunks); } else { // if filename/metada was changed file.Filename = Path.GetFileName(filename); file.MimeType = MimeTypeConverter.GetMimeType(filename); file.Metadata = metadata ?? file.Metadata; } return(file.OpenWrite()); }
internal LiteFileStream(LiteEngine engine, LiteFileInfo file, FileAccess mode) { _engine = engine; _file = file; _mode = mode; if(mode == FileAccess.Read) { // initialize first data block _currentChunkData = this.GetChunkData(_currentChunkIndex); } else if(mode == FileAccess.Write) { _buffer = new MemoryStream(MAX_CHUNK_SIZE); // delete chunks content if needed if (file.Length > 0) { _engine.Delete(LiteStorage.CHUNKS, Query.StartsWith("_id", _file.Id + "\\")); } // clear size counters file.Length = 0; file.Chunks = 0; } }
/// <summary> /// Delete a file inside datafile and all metadata related /// </summary> public bool Delete(string id) { if (string.IsNullOrEmpty(id)) { throw new ArgumentNullException("id"); } // remove file reference in _files var d = _engine.Delete(FILES, Query.EQ("_id", id)); // if not found, just return false if (d == 0) { return(false); } var index = 0; while (true) { var del = _engine.Delete(CHUNKS, Query.EQ("_id", LiteFileInfo.GetChunckId(id, index++))); if (del == 0) { break; } } return(true); }
internal LiteFileStream(LiteEngine engine, LiteFileInfo file, FileAccess mode) { _engine = engine; _file = file; _mode = mode; if (mode == FileAccess.Read) { // initialize first data block _currentChunkData = this.GetChunkData(_currentChunkIndex); } else if (mode == FileAccess.Write) { _buffer = new MemoryStream(MAX_CHUNK_SIZE); // delete chunks content if needed if (file.Length > 0) { _engine.Delete(LiteStorage.CHUNKS, Query.StartsWith("_id", _file.Id + "\\")); } // clear size counters file.Length = 0; file.Chunks = 0; } }
/// <summary> /// Insert a new file content inside datafile in _files collection /// </summary> public LiteFileInfo Upload(LiteFileInfo file, Stream stream) { if (file == null) { throw new ArgumentNullException("id"); } if (stream == null) { throw new ArgumentNullException("stream"); } file.UploadDate = DateTime.Now; // insert file in _files collections with 0 file length _engine.Insert(FILES, new BsonDocument[] { file.AsDocument }); // for each chunk, insert as a chunk document foreach (var chunk in file.CreateChunks(stream)) { _engine.Insert(CHUNKS, new BsonDocument[] { chunk }); } // update fileLength/chunks to confirm full file length stored in disk _engine.Update(FILES, new BsonDocument[] { file.AsDocument }); return(file); }
internal LiteFileStream(LiteCollection <LiteFileInfo <TFileId> > files, LiteCollection <BsonDocument> chunks, LiteFileInfo <TFileId> file, BsonValue fileId, FileAccess mode) { _files = files; _chunks = chunks; _file = file; _fileId = fileId; _mode = mode; if (mode == FileAccess.Read) { // initialize first data block _currentChunkData = this.GetChunkData(_currentChunkIndex); } else if (mode == FileAccess.Write) { _buffer = new MemoryStream(MAX_CHUNK_SIZE); if (_file.Length > 0) { // delete all chunks before re-write var count = _chunks.DeleteMany("_id BETWEEN { f: @0, n: 0 } AND { f: @0, n: 99999999 }", _fileId); ENSURE(count == _file.Chunks); // clear file content length+chunks _file.Length = 0; _file.Chunks = 0; } } }
/// <summary> /// Insert a new file content inside datafile in _files collection /// </summary> public LiteFileInfo Upload(LiteFileInfo file, Stream stream) { if (file == null) throw new ArgumentNullException("id"); if (stream == null) throw new ArgumentNullException("stream"); // no transaction allowed if (this.Database.Transaction.IsInTransaction) throw LiteException.InvalidTransaction(); file.UploadDate = DateTime.Now; // insert file in _files collections with 0 file length this.Files.Insert(file.AsDocument); // for each chunk, insert as a chunk document foreach (var chunk in file.CreateChunks(stream)) { this.Chunks.Insert(chunk); // clear extend pages in cache to avoid too many use of memory in big files this.Database.Cache.RemoveExtendPages(); } // update fileLength to confirm full file length stored in disk this.Files.Update(file.AsDocument); return file; }
internal LiteFileStream(LiteEngine engine, LiteFileInfo file, FileAccess mode) { _engine = engine; _file = file; _mode = mode; if (mode == FileAccess.Read) { // initialize first data block _currentChunkData = this.GetChunkData(_currentChunkIndex); } else if (mode == FileAccess.Write) { _buffer = new MemoryStream(MAX_CHUNK_SIZE); // delete chunks content if needed if (file.Length > 0) { var index = 0; var deleted = true; // delete one-by-one to avoid all pages files dirty in memory while (deleted) { deleted = _engine.Delete(LiteStorage.CHUNKS, LiteFileStream.GetChunckId(_file.Id, index++)); // index zero based } } // clear size counters file.Length = 0; file.Chunks = 0; } }
/// <summary> /// Load data inside storage and returns as Stream /// </summary> internal LiteFileStream OpenRead(LiteFileInfo entry) { if (entry == null) { throw new ArgumentNullException("entry"); } return(new LiteFileStream(_engine, entry)); }
private byte[] GetChunkData(int index) { // check if there is no more chunks in this file var chunk = _engine .Find(LiteFileStorage.CHUNKS, Query.EQ("_id", LiteFileInfo.GetChunckId(FileInfo.Id, index))) .FirstOrDefault(); // if chunk is null there is no more chunks return(chunk == null ? null : chunk["data"].AsBinary); }
private byte[] GetChunkData(int index) { // avoid too many extend pages on memory _db.Cache.RemoveExtendPages(); // check if there is no more chunks in this file var chunks = _db.GetCollection("_chunks"); var chunk = chunks.FindById(LiteFileInfo.GetChunckId(_file.Id, index)); // if chunk is null there is no more chunks return(chunk == null ? null : chunk["data"].AsBinary); }
internal LiteFileStream(LiteDatabase db, LiteFileInfo file) { _db = db; _file = file; if (file.Length == 0) { throw LiteException.FileCorrupted(file); } _positionInChunk = 0; _currentChunkIndex = 0; _currentChunkData = this.GetChunkData(_currentChunkIndex); }
internal LiteFileStream(DbEngine engine, LiteFileInfo file) { _engine = engine; _file = file; if (file.Length == 0) { throw LiteException.FileCorrupted(file); } _positionInChunk = 0; _currentChunkIndex = 0; _currentChunkData = this.GetChunkData(_currentChunkIndex); }
internal LiteFileStream(DbEngine engine, LiteFileInfo file) { _engine = engine; FileInfo = file; if (file.Length == 0) { throw LiteException.FileCorrupted(file); } _positionInChunk = 0; _currentChunkIndex = 0; _currentChunkData = GetChunkData(_currentChunkIndex); }
/// <summary> /// Upload a file based on stream data /// </summary> public LiteFileInfo Upload(string id, string filename, Stream stream) { // checks if file exists var file = this.FindById(id); if (file == null) { file = new LiteFileInfo(_engine, id, filename ?? id); // insert if new _engine.Insert(FILES, file.AsDocument); } // copy stream content to litedb file stream stream.CopyTo(file.OpenWrite()); return(file); }
/// <summary> /// Insert a new file content inside datafile in _files collection /// </summary> public LiteFileInfo Upload(LiteFileInfo file, Stream stream) { if (file == null) throw new ArgumentNullException("id"); if (stream == null) throw new ArgumentNullException("stream"); file.UploadDate = DateTime.Now; // insert file in _files collections with 0 file length _engine.Insert(FILES, new BsonDocument[] { file.AsDocument }); // for each chunk, insert as a chunk document foreach (var chunk in file.CreateChunks(stream)) { _engine.Insert(CHUNKS, new BsonDocument[] { chunk }); } // update fileLength/chunks to confirm full file length stored in disk _engine.Update(FILES, new BsonDocument[] { file.AsDocument }); return file; }
/// <summary> /// Open/Create new file storage and returns linked Stream to write operations /// </summary> public LiteFileStream OpenWrite(string id, string filename, BsonDocument metadata = null) { // checks if file exists var file = this.FindById(id); if (file == null) { file = new LiteFileInfo(_engine, id, filename ?? id); // insert if new _engine.Insert(FILES, file.AsDocument); } // update metadata if passed if (metadata != null) { file.Metadata = metadata; } return(file.OpenWrite()); }
/// <summary> /// Delete a file inside datafile and all metadata related /// </summary> public bool Delete(string id) { if (string.IsNullOrEmpty(id)) { throw new ArgumentNullException("id"); } if (this.Database.Transaction.IsInTransaction) { throw LiteException.InvalidTransaction(); } // remove file reference in _files var d = this.Files.Delete(id); // if not found, just return false if (d == false) { return(false); } var index = 0; while (true) { var del = Chunks.Delete(LiteFileInfo.GetChunckId(id, index++)); this.Database.Cache.RemoveExtendPages(); if (del == false) { break; } } return(true); }
/// <summary> /// Insert a new file content inside datafile in _files collection /// </summary> public LiteFileInfo Upload(LiteFileInfo file, Stream stream) { if (file == null) { throw new ArgumentNullException("id"); } if (stream == null) { throw new ArgumentNullException("stream"); } // no transaction allowed if (this.Database.Transaction.IsInTransaction) { throw LiteException.InvalidTransaction(); } file.UploadDate = DateTime.Now; // insert file in _files collections with 0 file length this.Files.Insert(file.AsDocument); // for each chunk, insert as a chunk document foreach (var chunk in file.CreateChunks(stream)) { this.Chunks.Insert(chunk); // clear extend pages in cache to avoid too many use of memory in big files this.Database.Cache.RemoveExtendPages(); } // update fileLength to confirm full file length stored in disk this.Files.Update(file.AsDocument); return(file); }
/// <summary> /// Load data inside storage and returns as Stream /// </summary> internal LiteFileStream OpenRead(LiteFileInfo entry) { if (entry == null) throw new ArgumentNullException("entry"); return new LiteFileStream(this.Database, entry); }
public static LiteException FileCorrupted(LiteFileInfo file) { return(new LiteException(103, "File '{0}' has no content or is corrupted", file.Id)); }
internal static LiteException FileCorrupted(LiteFileInfo file) { return(new LiteException(FILE_CORRUPTED, "File '{0}' has no content or is corrupted", file.Id)); }
public static LiteException FileCorrupted(LiteFileInfo file) { return new LiteException(103, "File '{0}' has no content or is corrupted", file.Id); }