public static bool GetOrReserveEventType <TStreamId>( this INameIndex <TStreamId> eventTypeIndex, IRecordFactory <TStreamId> recordFactory, string eventType, long logPosition, out TStreamId eventTypeId, out IPrepareLogRecord <TStreamId> eventTypeRecord) { var preExisting = eventTypeIndex.GetOrReserve(eventType, out eventTypeId, out var addedNumber, out var addedName); var appendNewEventType = recordFactory.ExplicitEventTypeCreation && !preExisting; if (!appendNewEventType) { eventTypeRecord = null; return(preExisting); } eventTypeRecord = recordFactory.CreateEventTypeRecord( eventTypeId: Guid.NewGuid(), parentEventTypeId: Guid.Empty, eventType: addedName, eventTypeNumber: addedNumber, eventTypeVersion: 0, logPosition: logPosition, timeStamp: DateTime.UtcNow); return(preExisting); }
// todo: rename to GetOrReserveStream when we generalise to EventTypes too. /// Generates a StreamRecord if necessary public static bool GetOrReserve <TStreamId>( this INameIndex <TStreamId> streamNameIndex, IRecordFactory <TStreamId> recordFactory, string streamName, long logPosition, out TStreamId streamId, out IPrepareLogRecord <TStreamId> streamRecord) { var preExisting = streamNameIndex.GetOrReserve(streamName, out streamId, out var addedId, out var addedName); var appendNewStream = recordFactory.ExplicitStreamCreation && !preExisting; if (!appendNewStream) { streamRecord = null; return(preExisting); } streamRecord = recordFactory.CreateStreamRecord( streamId: Guid.NewGuid(), logPosition: logPosition, timeStamp: DateTime.UtcNow, streamNumber: addedId, streamName: addedName); return(preExisting); }
/// <summary> /// Decodes a record from the binary data reader. The reader should be positioned /// at the RecordType field, not at the Record Length Indicator. /// </summary> /// <param name="reader">The reader containing the data to be decoded.</param> /// <param name="recordFactory">The factory in charge of instantiating the proper record class.</param> /// <returns>A new Record object that has been initialized from the reader.</returns> public static Record DecodeRecord(BinaryReader reader, IRecordFactory recordFactory) { int recordType = 0; long startPosition = reader.BaseStream.Position; // // Determine the type of record to be decoded. // try { reader.ReadBytes(4); // Skip record length indicator recordType = Convert.ToInt32(reader.ReadEbcdicString(2)); } catch { throw new ArgumentOutOfRangeException("reader", "Unable to determine record type."); } // // Initialize the blank record based on the type. // var record = recordFactory.GetRecordForType(recordType); // // Instruct the record to decode itself from the reader. // reader.BaseStream.Position = startPosition; record.Decode(reader); return(record); }
public RecordMapper(IRecordFactory recordFactory, IFieldParser fieldParser = null, IFieldDisplayNameProvider fieldDisplayNameProvider = null, IFieldTransformer fieldTransformer = null) { this.RecordFactory = recordFactory; this.FieldParser = fieldParser ?? new FieldParser(); this.FieldDisplayNameProvider = fieldDisplayNameProvider ?? new FieldDisplayNameProvider(); this.FieldTransformer = fieldTransformer ?? new FieldTransformer(); }
public EpochManager(IPublisher bus, int cachedEpochCount, ICheckpoint checkpoint, ITransactionFileWriter writer, int initialReaderCount, int maxReaderCount, Func <ITransactionFileReader> readerFactory, IRecordFactory recordFactory, Guid instanceId) { Ensure.NotNull(bus, "bus"); Ensure.Nonnegative(cachedEpochCount, "cachedEpochCount"); Ensure.NotNull(checkpoint, "checkpoint"); Ensure.NotNull(writer, "chunkWriter"); Ensure.Nonnegative(initialReaderCount, "initialReaderCount"); Ensure.Positive(maxReaderCount, "maxReaderCount"); if (initialReaderCount > maxReaderCount) { throw new ArgumentOutOfRangeException(nameof(initialReaderCount), "initialReaderCount is greater than maxReaderCount."); } Ensure.NotNull(readerFactory, "readerFactory"); _bus = bus; _cacheSize = cachedEpochCount; _checkpoint = checkpoint; _readers = new ObjectPool <ITransactionFileReader>("EpochManager readers pool", initialReaderCount, maxReaderCount, readerFactory); _writer = writer; _recordFactory = recordFactory; _instanceId = instanceId; }
public MockRecordMapper( IRecordFactory recordFactory, IFieldParser fieldParser = null, IFieldDisplayNameProvider fieldDisplayNameProvider = null, IFieldTransformer fieldTransformer = null) : base(recordFactory, fieldParser, fieldDisplayNameProvider, fieldTransformer) { }
public RecordLoader(IRecordFactory recordFactory, RecordLoaderConfiguration loaderConfig, ILogger <RecordLoader> log) { this._recordFactory = recordFactory; this._log = log; this._buffer = new StringBuilder(); this._bufferSize = loaderConfig.BufferSize; }
public void TransferRecords(IRecordsSource source, IRecordDestination destination, IRecordValidator validator, IRecordFactory recordFactory) { var validRecords = source.ReadValidRecords(validator, recordFactory); destination.WriteRecords(validRecords); }
public void TransferInfo(ITradeRecordSource source, ITradeRecordDestination destination, ITradeRecordValidator validator, IRecordFactory recordFactory) { var records = source.ReadRecords(validator, recordFactory); destination.WriteRecords(records); }
public ObjectRecordMapper( IRecordFactory recordFactory, IFieldParser fieldParser = null, IFieldDisplayNameProvider fieldDisplayNameProvider = null, IFieldTransformer fieldTransformer = null, ISourceFieldNameProvider sourceFieldNameProvider = null) : base(recordFactory, fieldParser, fieldDisplayNameProvider, fieldTransformer) { this.SourceFieldNameProvider = sourceFieldNameProvider ?? new SourceFieldNameProvider(); }
public FixedWidthBinaryRecordMapper( IRecordFactory recordFactory, IFieldParser fieldParser = null, IFieldDisplayNameProvider fieldDisplayNameProvider = null, IFieldTransformer fieldTransformer = null, ISourceFieldLayoutProvider sourceFieldLayoutProvider = null) : base(recordFactory, fieldParser ?? new BinaryFieldParser(), fieldDisplayNameProvider, fieldTransformer) { this.SourceFieldLayoutProvider = sourceFieldLayoutProvider ?? new SourceFieldLayoutProvider(); }
/// <summary> /// Create a new X937 file by decoding data in the Stream. /// </summary> /// <param name="dataStream">The Stream that contains the previously encoded X937 data.</param> /// <param name="recordFactory">The factory responsible for instantiating new records.</param> public X937File(Stream dataStream, IRecordFactory recordFactory) : this() { using (var reader = new BinaryReader(dataStream)) { while (reader.BaseStream.Position < reader.BaseStream.Length) { Records.Add(Record.DecodeRecord(reader, recordFactory)); } } }
public IEnumerable <Record> ReadValidRecords(IRecordValidator validator, IRecordFactory recordFactory) { ThrowForInvalidParameters(); int lineNumber = 0; using (var sr = new StreamReader(sourcePath)) { while (true) { var recordLine = sr.ReadLine(); if (recordLine == null) { yield break; } Record record = null; try { record = ParseAndCreateRecord(recordLine.Split(",".ToCharArray()), recordFactory, validator); } catch (ArgumentException e) { string logMessage = $"{e.Message}. Invalid record was on line #{lineNumber}"; LoggerService.Warning(logMessage); } if (record != null) { yield return(record); } lineNumber++; } } void ThrowForInvalidParameters() { if (validator == null) { throw new ArgumentNullException(nameof(validator)); } if (recordFactory == null) { throw new ArgumentNullException(nameof(recordFactory)); } } }
public DelimitedTextRecordMapper( IRecordFactory recordFactory, string delimiter = ",", IFieldParser fieldParser = null, IFieldDisplayNameProvider fieldDisplayNameProvider = null, IFieldTransformer fieldTransformer = null, ISourceRecordFieldCountProvider sourceRecordFieldCountProvider = null, ISourceFieldOrdinalProvider sourceFieldOrdinalProvider = null) : base(recordFactory, fieldParser, fieldDisplayNameProvider, fieldTransformer) { this.Delimiter = delimiter; this.SourceRecordFieldCountProvider = sourceRecordFieldCountProvider ?? new SourceRecordFieldCountProvider(); this.SourceFieldOrdinalProvider = sourceFieldOrdinalProvider ?? new SourceFieldOrdinalProvider(); }
private TradeRecord CreateValidRecord(string[] fields, IRecordFactory recordFactory, ITradeRecordValidator validator) { if (fields.Length != StandartFieldLength) { throw new ArgumentException("Not valid amount of fields"); } if (fields[0].Length != StandartCurrencyLength) { throw new ArgumentException("Not valid currency codes"); } var source = fields[0].Substring(0, 3); var destination = fields[0].Substring(3, 3); return(recordFactory.CreateNewRecord(destination, source, fields[2], fields[1], validator)); }
private Record ParseAndCreateRecord(string[] parts, IRecordFactory recordFactory, IRecordValidator validator) { if (parts.Length != 3) { throw new ArgumentException("Not valid amount of parts"); } if (parts[0].Length != 6) { throw new ArgumentException("Not valid currency codes"); } var sourceCurrency = parts[0].Substring(0, 3); var destinationCurrency = parts[0].Substring(3, 3); return(recordFactory.CreateNewRecord(destinationCurrency, sourceCurrency, parts[2], parts[1], validator)); }
public IEnumerable <TradeRecord> ReadRecords(ITradeRecordValidator validator, IRecordFactory recordFactory) { if (validator == null) { throw new ArgumentNullException($"{nameof(validator)} can't be null"); } if (recordFactory == null) { throw new ArgumentNullException($"{nameof(recordFactory)} can't be null"); } var lineNumber = 0; using (var sr = new StreamReader(_sourcePath)) { while (true) { var recordLine = sr.ReadLine(); if (recordLine != null) { TradeRecord record = null; try { record = CreateValidRecord(recordLine.Split(",".ToCharArray()), recordFactory, validator); } catch (ArgumentException e) { var logMessage = $"{e.Message}. Invalid record. Line:{lineNumber}"; LoggerService.Warning(logMessage); } if (record != null) { yield return(record); } lineNumber++; } else { yield break; } } } }
public override async Task TestFixtureSetUp() { await base.TestFixtureSetUp(); var indexDirectory = GetFilePathFor("index"); _logFormat = LogFormatHelper <TLogFormat, TStreamId> .LogFormatFactory.Create(new() { IndexDirectory = indexDirectory, }); _recordFactory = _logFormat.RecordFactory; _streamNameIndex = _logFormat.StreamNameIndex; _eventTypeIndex = _logFormat.EventTypeIndex; WriterCheckpoint = new InMemoryCheckpoint(0); ChaserCheckpoint = new InMemoryCheckpoint(0); Db = new TFChunkDb(TFChunkHelper.CreateDbConfig(PathName, WriterCheckpoint, ChaserCheckpoint, replicationCheckpoint: new InMemoryCheckpoint(-1), chunkSize: _chunkSize)); Db.Open(); // create db Writer = new TFChunkWriter(Db); Writer.Open(); WriteTestScenario(); Writer.Close(); Writer = null; WriterCheckpoint.Flush(); ChaserCheckpoint.Write(WriterCheckpoint.Read()); ChaserCheckpoint.Flush(); var readers = new ObjectPool <ITransactionFileReader>("Readers", 2, 5, () => new TFChunkReader(Db, Db.Config.WriterCheckpoint)); var lowHasher = _logFormat.LowHasher; var highHasher = _logFormat.HighHasher; var emptyStreamId = _logFormat.EmptyStreamId; TableIndex = new TableIndex <TStreamId>(indexDirectory, lowHasher, highHasher, emptyStreamId, () => new HashListMemTable(IndexBitnessVersion, MaxEntriesInMemTable * 2), () => new TFReaderLease(readers), IndexBitnessVersion, int.MaxValue, Constants.PTableMaxReaderCountDefault, MaxEntriesInMemTable); _logFormat.StreamNamesProvider.SetTableIndex(TableIndex); var readIndex = new ReadIndex <TStreamId>(new NoopPublisher(), readers, TableIndex, _logFormat.StreamNameIndexConfirmer, _logFormat.StreamIds, _logFormat.StreamNamesProvider, _logFormat.EmptyStreamId, _logFormat.StreamIdValidator, _logFormat.StreamIdSizer, _logFormat.StreamExistenceFilter, _logFormat.StreamExistenceFilterReader, _logFormat.EventTypeIndexConfirmer, streamInfoCacheCapacity: StreamInfoCacheCapacity, additionalCommitChecks: PerformAdditionalCommitChecks, metastreamMaxCount: MetastreamMaxCount, hashCollisionReadLimit: Opts.HashCollisionReadLimitDefault, skipIndexScanOnReads: Opts.SkipIndexScanOnReadsDefault, replicationCheckpoint: Db.Config.ReplicationCheckpoint, indexCheckpoint: Db.Config.IndexCheckpoint); readIndex.IndexCommitter.Init(ChaserCheckpoint.Read()); ReadIndex = readIndex; // scavenge must run after readIndex is built if (_scavenge) { if (_completeLastChunkOnScavenge) { Db.Manager.GetChunk(Db.Manager.ChunksCount - 1).Complete(); } _scavenger = new TFChunkScavenger <TStreamId>(Db, new FakeTFScavengerLog(), TableIndex, ReadIndex, _logFormat.Metastreams); await _scavenger.Scavenge(alwaysKeepScavenged : true, mergeChunks : _mergeChunks); } }
public InputReaderService(IRecordFactory recordFactory) { _recordFactory = recordFactory; }
public LogV3RecordFactory(IRecordFactory <string> logV2RecordFactory) { _logV2RecordFactory = logV2RecordFactory; }
/// <summary> /// Sets the record factory used by this parser. /// </summary> /// <param name="recordFactory"></param> public void SetRecordFactory(IRecordFactory recordFactory) { if( recordFactory == null ) { throw new ArgumentNullException("recordFactory"); } _recordFactory = recordFactory; }