public void HasOffsetResetWhenReleased() { var writer = CharArrayTextWriter.Lease(); writer.Write('a'); writer.Release(); Assert.AreEqual(0, writer.Size); }
public void WritingSingleChar() { var writer = new CharArrayTextWriter(); writer.Write('z'); var written = writer.ToCharSegment(); CollectionAssert.AreEqual('z'.ToString(), written); }
public void WritingString() { var writer = new CharArrayTextWriter(); writer.Write("test"); var written = writer.ToCharSegment(); CollectionAssert.AreEqual("test", written); }
public void writes_single_char() { var writer = new CharArrayTextWriter(); writer.Write('z'); var written = writer.ToCharSegment(); written.ShouldBe('z'.ToString()); }
public void writes_string() { var writer = new CharArrayTextWriter(); writer.Write("test"); var written = writer.ToCharSegment(); written.ShouldBe("test"); }
public void writes_characters_much_beyond_limit() { var writer = new CharArrayTextWriter(); var s = new string('a', CharArrayTextWriter.InitialSize * 8); writer.Write(s); var written = writer.ToCharSegment(); written.ShouldBe(s); }
public void writes_characters_beyond_limit() { var writer = new CharArrayTextWriter(); var s = new string('a', BigEnoughSize) + "b"; writer.Write(s); var written = writer.ToCharSegment(); written.ShouldBe(s); }
public void WritingCharsMuchBeyondLimit() { var writer = new CharArrayTextWriter(); var s = new string('a', CharArrayTextWriter.InitialSize * 8); writer.Write(s); var written = writer.ToCharSegment(); CollectionAssert.AreEqual(s, written); }
private void bulkInsertDocuments <T>(IReadOnlyCollection <T> documents, int batchSize, NpgsqlConnection conn, BulkInsertMode mode) { var loader = _tenant.BulkLoaderFor <T>(); if (mode != BulkInsertMode.InsertsOnly) { var sql = loader.CreateTempTableForCopying(); conn.RunSql(sql); } using (var writer = new CharArrayTextWriter(_writerPool)) { if (documents.Count <= batchSize) { loadDocuments(documents, loader, mode, conn, writer); } else { var batch = new List <T>(batchSize); foreach (var document in documents) { batch.Add(document); if (batch.Count < batchSize) { continue; } loadDocuments(batch, loader, mode, conn, writer); batch.Clear(); } loadDocuments(batch, loader, mode, conn, writer); } } if (mode == BulkInsertMode.IgnoreDuplicates) { var copy = loader.CopyNewDocumentsFromTempTable(); conn.RunSql(copy); } else if (mode == BulkInsertMode.OverwriteExisting) { var overwrite = loader.OverwriteDuplicatesFromTempTable(); var copy = loader.CopyNewDocumentsFromTempTable(); conn.RunSql(overwrite, copy); } }
public void WritingCharArray() { var writer = new CharArrayTextWriter(); var chars = new[] { 'a', 'b', 'c', 'd', 'e', 'f' }; const int offset = 5; const int take = 1; writer.Write(chars, offset, take); var written = writer.ToCharSegment(); CollectionAssert.AreEqual(chars.Skip(offset).Take(take), written); }
public void LoadIntoTempTable(ITenant tenant, ISerializer serializer, NpgsqlConnection conn, IEnumerable <T> documents, CharArrayTextWriter pool) { using (var writer = conn.BeginBinaryImport(TempLoaderSql())) { foreach (var document in documents) { writer.StartRow(); LoadRow(writer, document, tenant, serializer, pool); } writer.Complete(); } }
public void writes_char_array() { var writer = new CharArrayTextWriter(); var chars = new[] { 'a', 'b', 'c', 'd', 'e', 'f' }; const int offset = 5; const int take = 1; writer.Write(chars, offset, take); var written = writer.ToCharSegment(); written.ShouldBe(chars.Skip(offset).Take(take)); }
public void returns_memory_to_pool() { using (var pool = new Pool()) { using (var writer = new CharArrayTextWriter(pool)) { writer.Write("s"); writer.Write("s"); writer.Write("ssss"); writer.Write("ssssssssss"); pool.Disposed.Count.ShouldBe(3); // last one is still used } pool.Disposed.Count.ShouldBe(4); } }
public void Load(ITenant tenant, ISerializer serializer, NpgsqlConnection conn, IEnumerable <T> documents, CharArrayTextWriter textWriter) { load(tenant, serializer, conn, documents, _sql, textWriter); }
private void load(ITenant tenant, ISerializer serializer, NpgsqlConnection conn, IEnumerable <T> documents, string sql, CharArrayTextWriter textWriter) { using (var writer = conn.BeginBinaryImport(sql)) { foreach (var document in documents) { var assigned = false; _assignment.Assign(tenant, document, out assigned); writer.StartRow(); _transferData(document, _mapping.AliasFor(document.GetType()), serializer, writer, textWriter, tenant.TenantId); textWriter.Clear(); } writer.Complete(); } }
public void LoadIntoTempTable(ITenant tenant, ISerializer serializer, NpgsqlConnection conn, IEnumerable <T> documents, CharArrayTextWriter pool) { _inner.LoadIntoTempTable(tenant, serializer, conn, documents.OfType <TRoot>(), pool); }
private void loadDocuments <T>(IEnumerable <T> documents, IBulkLoader <T> loader, BulkInsertMode mode, NpgsqlConnection conn, CharArrayTextWriter writer) { if (mode == BulkInsertMode.InsertsOnly) { loader.Load(_tenant, Serializer, conn, documents, writer); } else { loader.LoadIntoTempTable(_tenant, Serializer, conn, documents, writer); } }
public void Load(ITenant tenant, DbObjectName table, ISerializer serializer, NpgsqlConnection conn, IEnumerable <T> documents, CharArrayTextWriter textWriter) { var sql = _baseSql.Replace("%TABLE%", table.QualifiedName); load(tenant, serializer, conn, documents, sql, textWriter); }
public void LoadIntoTempTable(ITenant tenant, ISerializer serializer, NpgsqlConnection conn, IEnumerable <T> documents, CharArrayTextWriter textWriter) { var sql = _baseSql.Replace("%TABLE%", _tempTableName); load(tenant, serializer, conn, documents, sql, textWriter); }
public abstract void LoadRow(NpgsqlBinaryImporter writer, T document, ITenant tenant, ISerializer serializer, CharArrayTextWriter pool);