public void ExtendingColumnStream() { var bookmark = new byte[SystemParameters.BookmarkMost]; int bookmarkSize; var data = Any.BytesOfLength(4096); using (var transaction = new Transaction(this.sesid)) using (var update = new Update(this.sesid, this.tableid, JET_prep.Insert)) using (var stream = new ColumnStream(this.sesid, this.tableid, this.columnidLongText)) { // Write some of the data, rewind a bit and then overwrite the // last few bytes and append some more data stream.Write(data, 0, data.Length - 10); stream.Seek(-10, SeekOrigin.End); stream.Write(data, data.Length - 20, 20); update.Save(bookmark, bookmark.Length, out bookmarkSize); transaction.Commit(CommitTransactionGrbit.LazyFlush); } Api.JetGotoBookmark(this.sesid, this.tableid, bookmark, bookmarkSize); using (var stream = new ColumnStream(this.sesid, this.tableid, this.columnidLongText)) { Assert.AreEqual(data.Length, stream.Length); var retrieved = new byte[data.Length]; Assert.AreEqual(retrieved.Length, stream.Read(retrieved, 0, retrieved.Length)); CollectionAssert.AreEqual(data, retrieved); } }
public void Set(string name, string key, RavenJObject data, UuidType uuidType) { Api.JetSetCurrentIndex(session, Lists, "by_name_and_key"); Api.MakeKey(session, Lists, name, Encoding.Unicode, MakeKeyGrbit.NewKey); Api.MakeKey(session, Lists, key, Encoding.Unicode, MakeKeyGrbit.None); var exists = Api.TrySeek(session, Lists, SeekGrbit.SeekEQ); using (var update = new Update(session, Lists, exists ? JET_prep.Replace : JET_prep.Insert)) { Api.SetColumn(session, Lists, tableColumnsCache.ListsColumns["name"], name, Encoding.Unicode); Api.SetColumn(session, Lists, tableColumnsCache.ListsColumns["key"], key, Encoding.Unicode); Api.SetColumn(session, Lists, tableColumnsCache.ListsColumns["etag"], uuidGenerator.CreateSequentialUuid(uuidType).TransformToValueForEsentSorting()); using (var columnStream = new ColumnStream(session, Lists, tableColumnsCache.ListsColumns["data"])) { if (exists) { columnStream.SetLength(0); } using (Stream stream = new BufferedStream(columnStream)) { data.WriteTo(stream); stream.Flush(); } } update.Save(); } }
public void OverwriteColumnStream() { var bookmark = new byte[SystemParameters.BookmarkMost]; int bookmarkSize; var data = Any.BytesOfLength(1024); var newData = Any.BytesOfLength(128); const int Offset = 10; using (var transaction = new Transaction(this.sesid)) using (var update = new Update(this.sesid, this.tableid, JET_prep.Insert)) using (var stream = new ColumnStream(this.sesid, this.tableid, this.columnidLongText)) { stream.Write(data, 0, data.Length); stream.Position = 0; stream.Seek(Offset, SeekOrigin.Current); stream.Write(newData, 0, newData.Length); update.Save(bookmark, bookmark.Length, out bookmarkSize); transaction.Commit(CommitTransactionGrbit.LazyFlush); } Api.JetGotoBookmark(this.sesid, this.tableid, bookmark, bookmarkSize); using (var stream = new ColumnStream(this.sesid, this.tableid, this.columnidLongText)) { Assert.AreEqual(data.Length, stream.Length); var retrieved = new byte[data.Length]; var expected = new byte[data.Length]; Array.Copy(data, 0, expected, 0, data.Length); Array.Copy(newData, 0, expected, Offset, newData.Length); Assert.AreEqual(retrieved.Length, stream.Read(retrieved, 0, retrieved.Length)); CollectionAssert.AreEqual(expected, retrieved); } }
public void ShrinkColumnStream() { var bookmark = new byte[SystemParameters.BookmarkMost]; int bookmarkSize; const int Length = 1345; var data = Any.BytesOfLength(Length); using (var transaction = new Transaction(this.sesid)) using (var update = new Update(this.sesid, this.tableid, JET_prep.Insert)) using (var stream = new ColumnStream(this.sesid, this.tableid, this.columnidLongText)) { stream.Write(data, 0, data.Length); stream.Write(data, 0, data.Length); Assert.AreEqual(Length * 2, stream.Length); stream.SetLength(Length); Assert.AreEqual(Length, stream.Length); update.Save(bookmark, bookmark.Length, out bookmarkSize); transaction.Commit(CommitTransactionGrbit.LazyFlush); } Api.JetGotoBookmark(this.sesid, this.tableid, bookmark, bookmarkSize); using (var stream = new ColumnStream(this.sesid, this.tableid, this.columnidLongText)) { Assert.AreEqual(Length, stream.Length); var buffer = new byte[Length]; stream.Read(buffer, 0, buffer.Length); CollectionAssert.AreEqual(data, buffer); } }
public Etag AddAttachment(string key, Etag etag, Stream data, RavenJObject headers) { Api.JetSetCurrentIndex(session, Files, "by_name"); Api.MakeKey(session, Files, key, Encoding.Unicode, MakeKeyGrbit.NewKey); var isUpdate = Api.TrySeek(session, Files, SeekGrbit.SeekEQ); if (isUpdate) { var existingEtag = Etag.Parse(Api.RetrieveColumn(session, Files, tableColumnsCache.FilesColumns["etag"])); if (existingEtag != etag && etag != null) { throw new ConcurrencyException("PUT attempted on attachment '" + key + "' using a non current etag") { ActualETag = existingEtag, ExpectedETag = etag }; } } else { if (data == null) throw new InvalidOperationException("When adding new attachment, the attachment data must be specified"); if (Api.TryMoveFirst(session, Details)) Api.EscrowUpdate(session, Details, tableColumnsCache.DetailsColumns["attachment_count"], 1); } Etag newETag = uuidGenerator.CreateSequentialUuid(UuidType.Attachments); using (var update = new Update(session, Files, isUpdate ? JET_prep.Replace : JET_prep.Insert)) { Api.SetColumn(session, Files, tableColumnsCache.FilesColumns["name"], key, Encoding.Unicode); if (data != null) { long written; using (var columnStream = new ColumnStream(session, Files, tableColumnsCache.FilesColumns["data"])) { if (isUpdate) columnStream.SetLength(0); using (var stream = new BufferedStream(columnStream)) { data.CopyTo(stream); written = stream.Position; stream.Flush(); } } if (written == 0) // empty attachment { Api.SetColumn(session, Files, tableColumnsCache.FilesColumns["data"], new byte[0]); } } Api.SetColumn(session, Files, tableColumnsCache.FilesColumns["etag"], newETag.TransformToValueForEsentSorting()); Api.SetColumn(session, Files, tableColumnsCache.FilesColumns["metadata"], headers.ToString(Formatting.None), Encoding.Unicode); update.Save(); } logger.Debug("Adding attachment {0}", key); return newETag; }
public void GrowColumnStreamByWritingPastEnd() { var bookmark = new byte[SystemParameters.BookmarkMost]; int bookmarkSize; const int Length = 1345; const int Position = 1500; var data = Any.BytesOfLength(Length); using (var transaction = new Transaction(this.sesid)) using (var update = new Update(this.sesid, this.tableid, JET_prep.Insert)) using (var stream = new ColumnStream(this.sesid, this.tableid, this.columnidLongText)) { stream.Position = Position; stream.Write(data, 0, data.Length); update.Save(bookmark, bookmark.Length, out bookmarkSize); transaction.Commit(CommitTransactionGrbit.LazyFlush); } Api.JetGotoBookmark(this.sesid, this.tableid, bookmark, bookmarkSize); using (var stream = new ColumnStream(this.sesid, this.tableid, this.columnidLongText)) { Assert.AreEqual(Length + Position, stream.Length); var expected = new byte[Length + Position]; var actual = new byte[Length + Position]; Array.Copy(data, 0, expected, Position, Length); Assert.AreEqual(Length + Position, stream.Read(actual, 0, actual.Length)); CollectionAssert.AreEqual(expected, actual); } }
public void SetAndRetrieveMultiValueColumnStream() { string[] data = { Any.String, Any.String, Any.String, Any.String, Any.String, Any.String }; Api.JetBeginTransaction(this.sesid); Api.JetPrepareUpdate(this.sesid, this.tableid, JET_prep.Insert); for (int i = 0; i < data.Length; ++i) { var column = new ColumnStream(this.sesid, this.tableid, this.columnidLongText); column.Itag = i + 1; using (var writer = new StreamWriter(column)) { writer.WriteLine(data[i]); } } this.UpdateAndGotoBookmark(); Api.JetCommitTransaction(this.sesid, CommitTransactionGrbit.LazyFlush); for (int i = 0; i < data.Length; ++i) { var column = new ColumnStream(this.sesid, this.tableid, this.columnidLongText); column.Itag = i + 1; using (var reader = new StreamReader(column)) { string actual = reader.ReadLine(); Assert.AreEqual(data[i], actual); } } }
public void ColumnStreamCanSerializeBasicType() { var expected = Any.Int64; using (var t = new Transaction(this.sesid)) using (var u = new Update(this.sesid, this.tableid, JET_prep.Insert)) using (var stream = new ColumnStream(this.sesid, this.tableid, this.columnidLongText)) { var serializer = new BinaryFormatter { Context = new StreamingContext(StreamingContextStates.Persistence) }; serializer.Serialize(stream, expected); u.Save(); t.Commit(CommitTransactionGrbit.LazyFlush); } Api.JetMove(this.sesid, this.tableid, JET_Move.First, MoveGrbit.None); using (var stream = new ColumnStream(this.sesid, this.tableid, this.columnidLongText)) { var deseriaizer = new BinaryFormatter(); var actual = (long)deseriaizer.Deserialize(stream); Assert.AreEqual(expected, actual); } }
public void ReadAtNonZeroOffset() { var bookmark = new byte[SystemParameters.BookmarkMost]; int bookmarkSize; var data = Any.BytesOfLength(1024); using (var transaction = new Transaction(this.sesid)) using (var update = new Update(this.sesid, this.tableid, JET_prep.Insert)) using (var stream = new ColumnStream(this.sesid, this.tableid, this.columnidLongText)) { stream.Write(data, 0, data.Length); update.Save(bookmark, bookmark.Length, out bookmarkSize); transaction.Commit(CommitTransactionGrbit.LazyFlush); } Api.JetGotoBookmark(this.sesid, this.tableid, bookmark, bookmarkSize); using (var stream = new ColumnStream(this.sesid, this.tableid, this.columnidLongText)) { var retrieved = new byte[data.Length * 2]; stream.Read(retrieved, data.Length, data.Length); for (int i = data.Length; i < retrieved.Length; ++i) { Assert.AreEqual(retrieved[i], data[i - data.Length]); } } }
public void ReadReturnsNumberOfBytesRead() { var bookmark = new byte[SystemParameters.BookmarkMost]; int bookmarkSize; var data = Any.BytesOfLength(1024); using (var transaction = new Transaction(this.sesid)) using (var update = new Update(this.sesid, this.tableid, JET_prep.Insert)) using (var stream = new ColumnStream(this.sesid, this.tableid, this.columnidLongText)) { stream.Write(data, 0, data.Length); update.Save(bookmark, bookmark.Length, out bookmarkSize); transaction.Commit(CommitTransactionGrbit.LazyFlush); } Api.JetGotoBookmark(this.sesid, this.tableid, bookmark, bookmarkSize); using (var stream = new ColumnStream(this.sesid, this.tableid, this.columnidLongText)) { var retrieved = new byte[data.Length]; stream.Seek(-1, SeekOrigin.End); Assert.AreEqual(1, stream.Read(retrieved, 0, retrieved.Length)); Assert.AreEqual(data[data.Length - 1], retrieved[0]); } }
public AddDocumentResult InsertDocument(string key, RavenJObject data, RavenJObject metadata, bool checkForUpdates) { var prep = JET_prep.Insert; bool isUpdate = false; if (checkForUpdates) { Api.JetSetCurrentIndex(session, Documents, "by_key"); Api.MakeKey(session, Documents, key, Encoding.Unicode, MakeKeyGrbit.NewKey); isUpdate = Api.TrySeek(session, Documents, SeekGrbit.SeekEQ); if (isUpdate) { prep = JET_prep.Replace; } } using (var update = new Update(session, Documents, prep)) { Api.SetColumn(session, Documents, tableColumnsCache.DocumentsColumns["key"], key, Encoding.Unicode); using (var columnStream = new ColumnStream(session, Documents, tableColumnsCache.DocumentsColumns["data"])) { if (isUpdate) { columnStream.SetLength(0); } using (Stream stream = new BufferedStream(columnStream)) using (var finalStream = documentCodecs.Aggregate(stream, (current, codec) => codec.Encode(key, data, metadata, current))) { data.WriteTo(finalStream); finalStream.Flush(); } } Guid newEtag = uuidGenerator.CreateSequentialUuid(UuidType.Documents); Api.SetColumn(session, Documents, tableColumnsCache.DocumentsColumns["etag"], newEtag.TransformToValueForEsentSorting()); DateTime savedAt = SystemTime.UtcNow; Api.SetColumn(session, Documents, tableColumnsCache.DocumentsColumns["last_modified"], savedAt.ToBinary()); using (var columnStream = new ColumnStream(session, Documents, tableColumnsCache.DocumentsColumns["metadata"])) { if (isUpdate) { columnStream.SetLength(0); } using (Stream stream = new BufferedStream(columnStream)) { metadata.WriteTo(stream); stream.Flush(); } } update.Save(); return(new AddDocumentResult { Etag = newEtag, SavedAt = savedAt, Updated = isUpdate }); } }
public void ColumnStreamThrowsExceptionWhenSeekOffsetIsTooLarge() { using (var t = new Transaction(this.sesid)) using (var u = new Update(this.sesid, this.tableid, JET_prep.Insert)) using (var stream = new ColumnStream(this.sesid, this.tableid, this.columnidLongText)) { stream.Seek(0x800000000, SeekOrigin.Begin); } }
public void ColumnStreamToString() { var value = new ColumnStream(JET_SESID.Nil, JET_TABLEID.Nil, new JET_COLUMNID { Value = 0x1a }); value.Itag = 2; Assert.AreEqual("ColumnStream(0x1a:2)", value.ToString()); }
public void SettingPositionThrowsExceptionWhenPositionIsTooLong() { using (var t = new Transaction(this.sesid)) using (var u = new Update(this.sesid, this.tableid, JET_prep.Insert)) using (var stream = new ColumnStream(this.sesid, this.tableid, this.columnidLongText)) { stream.Position = 0x800000000; } }
public void WriteThrowsExceptionWhenBufferIsNull() { using (var t = new Transaction(this.sesid)) using (var u = new Update(this.sesid, this.tableid, JET_prep.Insert)) using (var stream = new ColumnStream(this.sesid, this.tableid, this.columnidLongText)) { stream.Write(null, 0, 0); } }
public void ColumnStreamSupportsSeek() { using (var t = new Transaction(this.sesid)) using (var u = new Update(this.sesid, this.tableid, JET_prep.Insert)) using (var stream = new ColumnStream(this.sesid, this.tableid, this.columnidLongText)) { Assert.IsTrue(stream.CanSeek); } }
public void ColumnStreamThrowsExceptionWhenSeekOriginIsInvalid() { using (var t = new Transaction(this.sesid)) using (var u = new Update(this.sesid, this.tableid, JET_prep.Insert)) using (var stream = new ColumnStream(this.sesid, this.tableid, this.columnidLongText)) { stream.Seek(0x800000000, (SeekOrigin)0x1234); } }
public void ColumnStreamSetLengthThrowsExceptionWhenLengthIsNegative() { using (var t = new Transaction(this.sesid)) using (var u = new Update(this.sesid, this.tableid, JET_prep.Insert)) using (var stream = new ColumnStream(this.sesid, this.tableid, this.columnidLongText)) { stream.SetLength(-1); } }
public bool Initialize(String filePath, int pageSize) { this._databaseFile = filePath; DatabasePageSize = pageSize; VersionStorePageSize = pageSize * 2; _instance = CreateEsentInstance(); _primarySessionId = new Session(_instance); InitializeDatabaseAndTables(); JET_TABLEID tableid; bool result = Api.TryOpenTable(_primarySessionId, _primaryDatabaseId, "Containers", OpenTableGrbit.None, out tableid); if (result == false) { return(false); } this.tableNameDict = new Dictionary <string, string>(); using (var trx = new Transaction(_primarySessionId)) { if (Api.TryMoveFirst(_primarySessionId, tableid)) { do { JET_COLUMNBASE colBaseName; Api.JetGetColumnInfo(_primarySessionId, _primaryDatabaseId, "Containers", "Name", out colBaseName); ColumnStream streamName = new ColumnStream(_primarySessionId, tableid, colBaseName.columnid); Byte[] dataName = new Byte[streamName.Length]; streamName.Read(dataName, 0, (int)streamName.Length); String tableConvertName = Encoding.Unicode.GetString(dataName).Replace("\0", string.Empty); JET_COLUMNBASE colBasePartition; Api.JetGetColumnInfo(_primarySessionId, _primaryDatabaseId, "Containers", "PartitionId", out colBasePartition); ColumnStream streamPartition = new ColumnStream(_primarySessionId, tableid, colBasePartition.columnid); Byte[] dataPartition = new Byte[streamPartition.Length]; streamPartition.Read(dataPartition, 0, (int)streamPartition.Length); String tableConvertPartition = Encoding.Unicode.GetString(dataPartition).Replace("\0", string.Empty); tableConvertName += "(" + tableConvertPartition + ")"; JET_COLUMNBASE colBaseID; Api.JetGetColumnInfo(_primarySessionId, _primaryDatabaseId, "Containers", "ContainerId", out colBaseID); ColumnStream streamID = new ColumnStream(_primarySessionId, tableid, colBaseID.columnid); Byte[] dataID = new Byte[streamID.Length]; streamID.Read(dataID, 0, (int)streamID.Length); String tableConvertID = BitConverter.ToUInt64(dataID, 0).ToString(); tableNameDict[tableConvertName] = tableConvertID; }while (Api.TryMoveNext(_primarySessionId, tableid)); } Api.JetCloseTable(_primarySessionId, tableid); } return(true); }
public Stream GetStream(int len, bool canSeek) { if (_columnStream == null) { _columnStream = new ColumnStream(this); } _columnStream.Init(len, canSeek); return(_columnStream); }
public void ReadThrowsExceptionWhenNumberOfBytesIsNegative() { using (var t = new Transaction(this.sesid)) using (var u = new Update(this.sesid, this.tableid, JET_prep.Insert)) using (var stream = new ColumnStream(this.sesid, this.tableid, this.columnidLongText)) { var buffer = new byte[10]; stream.Read(buffer, 0, -1); } }
public void WriteThrowsExceptionWhenBufferOffsetIsNegative() { using (var t = new Transaction(this.sesid)) using (var u = new Update(this.sesid, this.tableid, JET_prep.Insert)) using (var stream = new ColumnStream(this.sesid, this.tableid, this.columnidLongText)) { var buffer = new byte[10]; stream.Write(buffer, -1, 1); } }
public void WriteThrowsExceptionWhenNumberOfBytesIsTooLarge() { using (var t = new Transaction(this.sesid)) using (var u = new Update(this.sesid, this.tableid, JET_prep.Insert)) using (var stream = new ColumnStream(this.sesid, this.tableid, this.columnidLongText)) { var buffer = new byte[10]; stream.Write(buffer, 1, buffer.Length); } }
public void ReadThrowsExceptionWhenBufferOffsetIsTooBig() { using (var t = new Transaction(this.sesid)) using (var u = new Update(this.sesid, this.tableid, JET_prep.Insert)) using (var stream = new ColumnStream(this.sesid, this.tableid, this.columnidLongText)) { var buffer = new byte[10]; stream.Read(buffer, buffer.Length, 1); } }
private ColumnStream <T> CreateCsub <T>(ICodec <T> codec, byte[] data) where T : unmanaged { var stream = new ColumnStream <T>( new MinotaurMemoryStream(), codec, 1024); stream.WriteAndReset(data, sizeof(byte)); return(stream); }
static async ValueTask <int> ReadLong(ColumnStream stream, Memory <byte> buffer, CancellationToken cancellationToken = default) { using var registration = stream._startCancellableOperations ? stream._connector.StartNestedCancellableOperation(cancellationToken, attemptPgCancellation: false) : default; var read = await stream._buf.ReadAsync(buffer, cancellationToken); stream._read += read; return(read); }
/// <summary>Retrieve the column value from the DB.</summary> public override object Deserialize(EseCursorBase cur, JET_COLUMNID idColumn) { using (var stm = new ColumnStream(cur.idSession, cur.idTable, idColumn)) { if (stm.Length < 1) { return(null); } using (var br = XmlDictionaryReader.CreateBinaryReader(stm, this.dict, XmlDictionaryReaderQuotas.Max)) return(m_serializer.ReadObject(br)); } }
public int InsertPage(byte[] buffer, int size) { var key = new HashKey(buffer, size); Api.JetSetCurrentIndex(session, Pages, "by_keys"); Api.MakeKey(session, Pages, key.Weak, MakeKeyGrbit.NewKey); Api.MakeKey(session, Pages, key.Strong, MakeKeyGrbit.None); if (Api.TrySeek(session, Pages, SeekGrbit.SeekEQ)) { Api.EscrowUpdate(session, Pages, tableColumnsCache.PagesColumns["usage_count"], 1); return(Api.RetrieveColumnAsInt32(session, Pages, tableColumnsCache.PagesColumns["id"]).Value); } var bookMarkBuffer = new byte[bookmarkMost]; var actualSize = 0; using (var update = new Update(session, Pages, JET_prep.Insert)) { Api.SetColumn(session, Pages, tableColumnsCache.PagesColumns["page_strong_hash"], key.Strong); Api.SetColumn(session, Pages, tableColumnsCache.PagesColumns["page_weak_hash"], key.Weak); using (var columnStream = new ColumnStream(session, Pages, tableColumnsCache.PagesColumns["data"])) { using (Stream stream = new BufferedStream(columnStream)) using (var finalStream = fileCodecs.Aggregate(stream, (current, codec) => codec.EncodePage(current))) { finalStream.Write(buffer, 0, size); finalStream.Flush(); } } try { update.Save(bookMarkBuffer, bookMarkBuffer.Length, out actualSize); } catch (EsentKeyDuplicateException) { // it means that page is being inserted by another thread throw new ConcurrencyException("The same file page is being created"); } } Api.JetGotoBookmark(session, Pages, bookMarkBuffer, actualSize); return(Api.RetrieveColumnAsInt32(session, Pages, tableColumnsCache.PagesColumns["id"]).Value); }
/// <summary>Store the column value in the database.</summary> public override void Serialize(EseCursorBase cur, JET_COLUMNID idColumn, object value, bool bNewRecord) { using (var stm = new ColumnStream(cur.idSession, cur.idTable, idColumn)) { using (XmlDictionaryWriter bw = XmlDictionaryWriter.CreateBinaryWriter(stm, this.dict)) { this.m_serializer.WriteObject(bw, value); bw.Flush(); } // TODO [low]: if the ( current size - new size < 4kb ), then append spaces/zeros instead of resizing the column. The comments inside the SetLength method suggest that shrinking the column is very inefficient for large values. if (stm.Position < stm.Length) { stm.SetLength(stm.Position); } } }
public void Setup() { var data = Factory.CreateRandomBytes(WROTE); var ptr = Marshal.AllocHGlobal(READ); _unmanagedPtr.Add(ptr); _rData = (byte *)ptr; _readData = new byte[READ]; _csFullClassBase = CreateCsb(new VoidCodecFullStream(), data); //_csTemplateCodecBase = CreateCsb(new TemplateVoidCodec(), data); //_csFullTemplateBase = CreateCstb(new TemplateVoidCodec(), data); _csFullClass = CreateCs <byte, VoidCodec <byte> >(new VoidCodec <byte>(), data); _csUnsafeClass = CreateCsub <byte>(new VoidCodec <byte>(), data); //_csTemplateCodec1 = CreateCs(new TemplateVoidCodec(), data); //_csFullTemplateCodec = CreateCst(new TemplateVoidCodec(), data); }