public void FuzzTestIt() { const string possibleFieldNames = "abcdefghijklm"; var random = new Random(); for (var i = 0; i < 50000; i++) { var numberOfFields = random.Next(10); var fieldNames = Enumerable.Range(0, numberOfFields).Select(x => possibleFieldNames[x]).ToArray(); var comparerKeys = Enumerable.Range(0, numberOfFields).Select(x => "slr"[random.Next(3)]).ToArray(); var comparerers = new List<IFieldComparer>(); for (var j = 0; j < numberOfFields; j++) { var fieldName = fieldNames[j].ToString(CultureInfo.InvariantCulture); var comparerKey = comparerKeys[j]; switch (comparerKey) { case 's': comparerers.Add(new SameResultComparer(fieldName)); break; case 'l': comparerers.Add(new LeftBeforeRightResultComparer(fieldName)); break; case 'r': comparerers.Add(new RightBeforeLeftResultComparer(fieldName)); break; } } var record1 = CreateRecord(random, possibleFieldNames); var record2 = CreateRecord(random, possibleFieldNames); var recordKeyComparer = new RecordKeyComparer(comparerers.ToArray()); var expected = GetExpected(record1, record2, comparerers); try { var actual = recordKeyComparer.Compare(record1, record2); actual.ShouldBeEqualTo(expected.Value); if (expected.ShouldThrowDueToInvalidHeader) { Assert.Fail("Should have thrown invalid header exception."); } } catch (Exception exception) { if (expected.ShouldThrowDueToInvalidHeader && exception.Message.Contains(Record.ErrorFieldNameIsNotAValidHeaderForThisRecordMessage)) { continue; } Console.WriteLine("fields: " + new String(fieldNames)); Console.WriteLine("comparers: " + new String(fieldNames)); Console.WriteLine(exception); } } }
public void Given_an_unsorted_file__should_return_a_sorted_file() { var headings = new[] { "Number", "Name" }; var records = new List<Record> { new Record(new[] { "2", "Two" }, headings), new Record(new[] { "4", "Four" }, headings), new Record(new[] { "6", "Six" }, headings), new Record(new[] { "8", "Eight" }, headings), new Record(new[] { "9", "Nine" }, headings), new Record(new[] { "7", "Seven" }, headings), new Record(new[] { "5", "Five" }, headings), new Record(new[] { "3", "Three" }, headings), new Record(new[] { "1", "One" }, headings) }; var reader = new CsvReader(new DelimitedDataReader(new StreamTokenizer())); var writer = new CsvWriter(); var comparer = new RecordKeyComparer(new StringFieldComparer("Number")); writer.WriteTo("UnsortedNumbers.csv", records, true); var sorted = reader .ReadFrom(File.OpenRead("UnsortedNumbers.csv"), "\r\n", true) .Sort(comparer); writer.WriteTo("SortedNumbers.csv", sorted, true); var actual = reader.ReadFrom(File.OpenRead("SortedNumbers.csv"), "\r\n", true); var lastNumber = ""; Console.WriteLine("Number, Name"); foreach (var record in actual) { Console.WriteLine("{0}, {1}", record["Number"], record["Name"]); record["Number"].ShouldBeGreaterThan(lastNumber); lastNumber = record["Number"]; } }
public virtual void Initialize(ObjectServer server) { if (server == null) { throw new ArgumentNullException("server"); } if (DiskBasedMetaLogging) { throw new NotImplementedException("Disk Based Meta Logging isn't supported in this SOP release."); } Server = server; lock (server) { //** increment number of ongoing transactions Interlocked.Increment(ref Count); //** note: we only support up to int.MaxValue number of active transactions at any given time. do { Id = _counter++; } while (Id == 1 && Sop.Utility.Utility.FileExists(server, (string.Format("{0}{1}{2}.txt", server.Path, AppendLogLiteral, Id)))); if (_counter >= int.MaxValue) { _counter = 1; } DataBackupFilename = string.Format("{0}{1}", DataBackupFilenameLiteral, Id); _appendLogger = _updateLogger = null; //** add store var rkc = new RecordKeyComparer <RecordKey>(); _addStore = new Collections.Generic.ConcurrentSortedDictionary <RecordKey, long>( (byte)BTreeAlgorithm.DefaultSlotLength, rkc); CollectionOnDisk.transaction = this; //** file growth store var fsc = new FileSegmentComparer <RecordKey>(); _fileGrowthStore = new Collections.Generic.ConcurrentSortedDictionary <RecordKey, long>( (byte)BTreeAlgorithm.DefaultSlotLength, fsc); //** recycled collection store _recycledCollectionStore = new Collections.Generic.ConcurrentSortedDictionary <RecordKey, long>( (byte)BTreeAlgorithm.DefaultSlotLength, fsc); //** log collection lock (Locker) { if (LogCollection == null) { var lkc = new BackupDataLogKeyComparer <BackupDataLogKey>(); if (_backupCache == null) { var p = new Profile(server.Profile); _backupCache = new Mru.Generic.ConcurrentMruManager <BackupDataLogKey, byte[]>(p.MruMinCapacity * 2, p.MruMaxCapacity * 2, lkc); } LogCollection = new Collections.Generic.ConcurrentSortedDictionary <BackupDataLogKey, BackupDataLogValue> (new BackupDataLogKeyComparer <BackupDataLogKey>()); } } //** make this transaction the trans owner of all collections on this thread if (CollectionOnDisk.Session != null) { CollectionOnDisk.Session.Transaction = this; } } }
private static ReconciliationStatus CompareResults(Record firstRecord, Record secondRecord) { var dataLoadRecordComparer = new RecordKeyComparer(new StringFieldComparer("ACCOUNT_NUMBER"), new StringFieldComparer("TAX_YEAR"), new StringFieldComparer("SUBJURISDICTION_CODE"), new DateFieldComparer("DELINQUENCY_DATE")); var result = new RecordReconciler().ReconcileRecords(firstRecord, secondRecord, dataLoadRecordComparer); return result; }
private IEnumerable<ReconciliationResult<Record>> CompareResults() { var dataLoadRecordComparer = new RecordKeyComparer(new StringFieldComparer("ACCOUNT_NUMBER"), new StringFieldComparer("TAX_YEAR"), new StringFieldComparer("SUBJURISDICTION_CODE"), new DateFieldComparer("DELINQUENCY_DATE")); var result = _orderedRecordReconciler.Reconcile(_oldList, _newList, new RecordReconciler(), dataLoadRecordComparer); return result; }