public void CanReadWrite() { var sut = _fixture.Create <DbaseFileHeader>(); using (var stream = new MemoryStream()) { using (var writer = new BinaryWriter(stream, Encoding.ASCII, true)) { sut.Write(writer); writer.Flush(); } stream.Position = 0; using (var reader = new BinaryReader(stream, Encoding.ASCII, true)) { var result = DbaseFileHeader.Read(reader); Assert.Equal(sut.LastUpdated, result.LastUpdated); Assert.Equal(sut.CodePage, result.CodePage); Assert.Equal(sut.RecordCount, result.RecordCount); Assert.Equal(sut.Schema, result.Schema); } } }
public void ReadExpectsHeaderToNotExceedFieldCount() { var sut = _fixture.Create <DbaseFileHeader>(); using (var stream = new MemoryStream()) { using (var writer = new BinaryWriter(stream, Encoding.ASCII, true)) { writer.Write(Convert.ToByte(3)); writer.Write(Convert.ToByte(sut.LastUpdated.Year - 1900)); writer.Write(Convert.ToByte(sut.LastUpdated.Month)); writer.Write(Convert.ToByte(sut.LastUpdated.Day)); writer.Write(sut.RecordCount.ToInt32()); var headerLength = DbaseFileHeader.HeaderMetaDataSize + (DbaseFileHeader.FieldMetaDataSize * 129); writer.Write(Convert.ToInt16(headerLength)); writer.Flush(); } stream.Position = 0; using (var reader = new BinaryReader(stream, Encoding.ASCII, true)) { Assert.Throws <DbaseFileHeaderException>( () => DbaseFileHeader.Read(reader) ); } } }
public void ReadExpectsHeaderToEndWithTerminator() { var terminator = _fixture.Create <Generator <byte> >().Where(_ => _ != DbaseFileHeader.Terminator).First(); var sut = _fixture.Create <DbaseFileHeader>(); using (var outputStream = new MemoryStream()) { using (var writer = new BinaryWriter(outputStream, Encoding.ASCII, true)) { sut.Write(writer); writer.Flush(); } var buffer = outputStream.ToArray(); using (var inputStream = new MemoryStream()) { inputStream.Write(buffer, 0, buffer.Length - 1); inputStream.Write(new[] { terminator }, 0, 1); inputStream.Position = 0; using (var reader = new BinaryReader(inputStream, Encoding.ASCII, true)) { Assert.Throws <DbaseFileHeaderException>( () => DbaseFileHeader.Read(reader) ); } } } }
public void ReadExpectsHeaderToHaveSupportedCodePage() { var allSupported = Array.ConvertAll(DbaseCodePage.All, _ => _.ToByte()); var codePage = new Generator <byte>(_fixture) .First(candidate => !Array.Exists(allSupported, supported => supported == candidate)); using (var stream = new MemoryStream()) { using (var writer = new BinaryWriter(stream, Encoding.ASCII, true)) { writer.Write(Convert.ToByte(3)); writer.Write(Convert.ToByte(0)); writer.Write(Convert.ToByte(1)); writer.Write(Convert.ToByte(1)); writer.Write(0); var headerLength = DbaseFileHeader.HeaderMetaDataSize + (DbaseFileHeader.FieldMetaDataSize * 1); writer.Write(Convert.ToInt16(headerLength)); writer.Write(Convert.ToInt16(0)); writer.Write(new byte[17]); writer.Write(codePage); writer.Flush(); } stream.Position = 0; using (var reader = new BinaryReader(stream, Encoding.ASCII, true)) { Assert.Throws <DbaseFileHeaderException>( () => DbaseFileHeader.Read(reader) ); } } }
public void WriteOneHasExpectedResult() { var expectedHeader = _fixture.Create <DbaseFileHeader>(); var expectedRecord = _fixture.GenerateDbaseRecord(expectedHeader.Schema.Fields); using (var stream = new MemoryStream()) { using (var sut = new DbaseBinaryWriter(expectedHeader, new BinaryWriter(stream, Encoding.ASCII, true))) { //Act sut.Write(expectedRecord); } // Assert stream.Position = 0; using (var reader = new BinaryReader(stream, Encoding.ASCII, true)) { var actualHeader = DbaseFileHeader.Read(reader); var actualRecord = new AnonymousDbaseRecord(actualHeader.Schema.Fields); actualRecord.Read(reader); var actualEndOfFile = reader.ReadByte(); Assert.Equal(expectedHeader, actualHeader); Assert.Equal(expectedRecord, actualRecord, new DbaseRecordEqualityComparer()); Assert.Equal(DbaseRecord.EndOfFile, actualEndOfFile); } } }
public void ReadExpectsCorrectFieldsOffsets(DbaseField[] fields) { var lastUpdated = _fixture.Create <DateTime>(); var codePage = _fixture.Create <DbaseCodePage>(); var recordCount = _fixture.Create <DbaseRecordCount>(); var length = fields.Aggregate(DbaseRecordLength.Initial, (current, field) => current.Plus(field.Length)); using (var stream = new MemoryStream()) { using (var writer = new BinaryWriter(stream, Encoding.ASCII, true)) { writer.Write(Convert.ToByte(3)); writer.Write(Convert.ToByte(lastUpdated.Year - 1900)); writer.Write(Convert.ToByte(lastUpdated.Month)); writer.Write(Convert.ToByte(lastUpdated.Day)); writer.Write(recordCount.ToInt32()); var headerLength = DbaseFileHeader.HeaderMetaDataSize + (DbaseFileHeader.FieldMetaDataSize * fields.Length); writer.Write(Convert.ToInt16(headerLength)); writer.Write(length.ToInt16()); writer.Write(new byte[17]); writer.Write(codePage.ToByte()); writer.Write(new byte[2]); foreach (var field in fields) { field.Write(writer); } writer.Write(DbaseFileHeader.Terminator); writer.Flush(); } stream.Position = 0; using (var reader = new BinaryReader(stream, Encoding.ASCII, true)) { Assert.Throws <DbaseFileHeaderException>( () => DbaseFileHeader.Read(reader) ); } } }
public void ReadExpectsHeaderRecordLengthToMatchSchemaRecordLength() { var sut = _fixture.Create <DbaseFileHeader>(); var length = _fixture.Create <Generator <DbaseRecordLength> >().First(_ => _ != sut.Schema.Length); using (var stream = new MemoryStream()) { using (var writer = new BinaryWriter(stream, Encoding.ASCII, true)) { writer.Write(Convert.ToByte(3)); writer.Write(Convert.ToByte(sut.LastUpdated.Year - 1900)); writer.Write(Convert.ToByte(sut.LastUpdated.Month)); writer.Write(Convert.ToByte(sut.LastUpdated.Day)); writer.Write(sut.RecordCount.ToInt32()); var headerLength = DbaseFileHeader.HeaderMetaDataSize + (DbaseFileHeader.FieldMetaDataSize * sut.Schema.Fields.Length); writer.Write(Convert.ToInt16(headerLength)); writer.Write(length.ToInt16()); writer.Write(new byte[17]); writer.Write(sut.CodePage.ToByte()); writer.Write(new byte[2]); foreach (var recordField in sut.Schema.Fields) { recordField.Write(writer); } writer.Write(DbaseFileHeader.Terminator); writer.Flush(); } stream.Position = 0; using (var reader = new BinaryReader(stream, Encoding.ASCII, true)) { Assert.Throws <DbaseFileHeaderException>( () => DbaseFileHeader.Read(reader) ); } } }
public void WriteManyHasExpectedResult() { var expectedHeader = _fixture.Create <DbaseFileHeader>(); var expectedRecords = _fixture.GenerateDbaseRecords(expectedHeader.Schema.Fields, expectedHeader.RecordCount); using (var stream = new MemoryStream()) { using (var sut = new DbaseBinaryWriter(expectedHeader, new BinaryWriter(stream, Encoding.ASCII, true))) { //Act foreach (var expectedRecord in expectedRecords) { sut.Write(expectedRecord); } } // Assert stream.Position = 0; using (var reader = new BinaryReader(stream, Encoding.ASCII, true)) { var actualHeader = DbaseFileHeader.Read(reader); var actualRecords = new DbaseRecord[actualHeader.RecordCount.ToInt32()]; for (var index = 0; index < actualRecords.Length; index++) { var actualRecord = new AnonymousDbaseRecord(actualHeader.Schema.Fields); actualRecord.Read(reader); actualRecords[index] = actualRecord; } var actualEndOfFile = reader.ReadByte(); Assert.Equal(expectedHeader, actualHeader); Assert.Equal(expectedRecords, actualRecords, new DbaseRecordEqualityComparer()); Assert.Equal(DbaseRecord.EndOfFile, actualEndOfFile); } } }
public void ReadExpectsHeaderToStartWithDbase3Format() { var start = _fixture.Create <Generator <byte> >().Where(_ => _ != DbaseFileHeader.ExpectedDbaseFormat).First(); using (var stream = new MemoryStream()) { using (var writer = new BinaryWriter(stream, Encoding.ASCII, true)) { writer.Write(start); writer.Flush(); } stream.Position = 0; using (var reader = new BinaryReader(stream, Encoding.ASCII, true)) { Assert.Throws <DbaseFileHeaderException>( () => DbaseFileHeader.Read(reader) ); } } }
public void ReaderCanNotBeNull() { new GuardClauseAssertion(_fixture) .Verify(Methods.Select(() => DbaseFileHeader.Read(null))); }