public virtual void Write(V value) { Preconditions.CheckNotNullArgument(value); GenericRecord record = ValueDef.GetRecord(value); mWriter.Append(record); mWriter.Sync(); }
// Disabled due to long runtime [TestCase(specificSchema, Codec.Type.Deflate, 1000, 588, 998)] public void TestSyncAndSeekPositions(string schemaStr, Codec.Type codecType, int iterations, int firstSyncPosition, int secondSyncPosition) { // create and write out IList <Foo> records = MakeRecords(GetTestFooObject()); MemoryStream dataFileOutputStream = new MemoryStream(); Schema schema = Schema.Parse(schemaStr); DatumWriter <Foo> writer = new SpecificWriter <Foo>(schema); using (IFileWriter <Foo> dataFileWriter = DataFileWriter <Foo> .OpenWriter(writer, dataFileOutputStream, Codec.CreateCodec(codecType))) { for (int i = 0; i < iterations; ++i) { foreach (Foo foo in records) { dataFileWriter.Append(foo); } // write out block if (i == firstSyncPosition || i == secondSyncPosition) { dataFileWriter.Sync(); } } } MemoryStream dataFileInputStream = new MemoryStream(dataFileOutputStream.ToArray()); // read syncs IList <long> syncs = new List <long>(); using (IFileReader <Foo> reader = DataFileReader <Foo> .OpenReader(dataFileInputStream)) { long previousSync = -1; foreach (Foo foo in reader.NextEntries) { if (reader.PreviousSync() != previousSync && reader.Tell() != reader.PreviousSync()) // EOF { previousSync = reader.PreviousSync(); syncs.Add(previousSync); } } // verify syncs wth seeks reader.Sync(0); // first sync Assert.AreEqual(reader.PreviousSync(), syncs[0], string.Format("Error syncing reader to position: {0}", syncs[0])); foreach (long sync in syncs) // the rest { reader.Seek(sync); Foo foo = reader.Next(); Assert.IsNotNull(foo, string.Format("Error seeking to sync position: {0}", sync)); } } }
[TestCase(specificSchema, Codec.Type.Null, 0, 330)] // 330 public void TestPartialRead(string schemaStr, Codec.Type codecType, int position, int expectedRecords) { // create and write out IList <Foo> records = MakeRecords(GetTestFooObject()); MemoryStream dataFileOutputStream = new MemoryStream(); Schema schema = Schema.Parse(schemaStr); DatumWriter <Foo> writer = new SpecificWriter <Foo>(schema); using (IFileWriter <Foo> dataFileWriter = DataFileWriter <Foo> .OpenWriter(writer, dataFileOutputStream, Codec.CreateCodec(codecType))) { for (int i = 0; i < 10; ++i) { foreach (Foo foo in records) { dataFileWriter.Append(foo); } // write out block if (i == 1 || i == 4) { dataFileWriter.Sync(); } } } MemoryStream dataFileInputStream = new MemoryStream(dataFileOutputStream.ToArray()); // read back IList <Foo> readRecords = new List <Foo>(); using (IFileReader <Foo> reader = DataFileReader <Foo> .OpenReader(dataFileInputStream)) { // move to next block from position reader.Sync(position); // read records from synced position foreach (Foo rec in reader.NextEntries) { readRecords.Add(rec); } } Assert.IsTrue((readRecords != null && readRecords.Count == expectedRecords), string.Format("Error performing partial read after position: {0}", position)); }
public void TestPartialReadAll(string schemaStr, Codec.Type codecType) { // create and write out IList <Foo> records = MakeRecords(GetTestFooObject()); MemoryStream dataFileOutputStream = new MemoryStream(); Schema schema = Schema.Parse(schemaStr); DatumWriter <Foo> writer = new SpecificWriter <Foo>(schema); int numRecords = 0; List <SyncLog> syncLogs = new List <SyncLog>(); using (IFileWriter <Foo> dataFileWriter = DataFileWriter <Foo> .OpenWriter(writer, dataFileOutputStream, Codec.CreateCodec(codecType))) { dataFileWriter.Flush(); syncLogs.Add(new SyncLog { Position = dataFileOutputStream.Position - DataFileConstants.SyncSize + 1, RemainingRecords = numRecords }); long lastPosition = dataFileOutputStream.Position; for (int i = 0; i < 10; ++i) { foreach (Foo foo in records) { dataFileWriter.Append(foo); if (dataFileOutputStream.Position != lastPosition) { syncLogs.Add(new SyncLog { Position = dataFileOutputStream.Position - DataFileConstants.SyncSize + 1, RemainingRecords = numRecords }); lastPosition = dataFileOutputStream.Position; } numRecords++; } // write out block if (i == 1 || i == 4) { dataFileWriter.Sync(); syncLogs.Add(new SyncLog { Position = dataFileOutputStream.Position - DataFileConstants.SyncSize + 1, RemainingRecords = numRecords }); lastPosition = dataFileOutputStream.Position; } } dataFileWriter.Flush(); syncLogs.Add(new SyncLog { Position = dataFileOutputStream.Position, RemainingRecords = numRecords }); } MemoryStream dataFileInputStream = new MemoryStream(dataFileOutputStream.ToArray()); // read back using (IFileReader <Foo> reader = DataFileReader <Foo> .OpenReader(dataFileInputStream)) { long curPosition = 0; foreach (SyncLog syncLog in syncLogs) { int expectedRecords = numRecords - syncLog.RemainingRecords; long nextSyncPoint = syncLog.Position; AssertNumRecordsFromPosition(reader, curPosition, expectedRecords); AssertNumRecordsFromPosition(reader, nextSyncPoint - 1, expectedRecords); curPosition = nextSyncPoint; } } }