public async Task BatchWriter_should_succeed_with_timer() { var writer = new BatchWriter(new BatchWriterSettings()); var ch = new EmbeddedChannel(Flush, writer); await Flush.Activated; /* * Run multiple iterations to ensure that the batching mechanism doesn't become stale */ foreach (var n in Enumerable.Repeat(0, 3)) { var ints = Enumerable.Range(0, 4).ToArray(); foreach (var i in ints) { _ = ch.WriteAsync(Unpooled.Buffer(1).WriteInt(i)); } // force write tasks to run ch.RunPendingTasks(); ch.Unsafe.OutboundBuffer.TotalPendingWriteBytes().Should().Be(ints.Length * 4); ch.OutboundMessages.Count.Should().Be(0); await AwaitAssertAsync(() => { ch.RunPendingTasks(); // force scheduled task to run ch.OutboundMessages.Count.Should().Be(ints.Length); }, interval : 100.Milliseconds()); // reset the outbound queue ch.OutboundMessages.Clear(); } }
private static void Main(string[] args) { try { Arguments arguments = Arguments.ProcessArguments(args, new FileSystemService( )); Trace.TraceInformation("Beginning documentation generation run at {0}".Inject(DateTime.Now.ToShortTimeString( ))); var parser = new XmlDocParser(arguments.XmlDocs, arguments.TargetType) { TargetAssembly = arguments.TargetAssembly }; IEnumerable <XDocument> cleanedDocs = parser.Parse( ); var writer = new BatchWriter(cleanedDocs); writer.Write(arguments.OutputDirectory); Trace.TraceInformation("Finished documentation generation run at {0}".Inject(DateTime.Now.ToShortTimeString( ))); } catch (ArgumentException e) { Console.WriteLine(e.Message); } catch (Exception e) { Trace.TraceInformation("Documentation generation run failed at {0}".Inject(DateTime.Now.ToShortTimeString( ))); Trace.TraceError(e.Message); throw; } //finally //{ // Console.ReadLine(); //} }
public Publisher(string announceIp, int port) { this.AnnounceIp = announceIp; this.AnnouncePort = port; sendSocket = new UdpSource(announceIp, port); writer = new BatchWriter(sendSocket, 1024 * 128); publisher = new TopicPublisher(writer); }
protected override void establish_context() { var docs = GetListOfXDocuments(); FileSystem = Dependancy <IFileSystemService>(); OutputDirectory = @".\docs\"; sut = new BatchWriter(docs) { FileSystemService = FileSystem }; }
public void Flush() { if (TableMap.Tables.Any(t => t.RowCount > 0)) { FinalizeTables(); if (BatchWriter != null) { BatchWriter.WriteBatch(TableMap.Tables); Clear(); } } }
public void EndBatch() { if (IsBatching) { IsBatching = false; long pos = BatchStream.Position; BatchStream.Seek(BatchSizeOff, SeekOrigin.Begin); BatchWriter.Write((uint)pos); BatchStream.Seek(BatchCmdOff, SeekOrigin.Begin); BatchWriter.Write(BatchCmdCount); BatchStream.Seek(pos, SeekOrigin.Begin); } }
private void SendHostConfigQuery() { PgmSource sendSocket = new PgmSource("224.0.0.23", 7272); sendSocket.Start(); BatchWriter asyncWriter = new BatchWriter(sendSocket, 1024 * 128); TopicPublisher publisher = new TopicPublisher(asyncWriter); publisher.Start(); int sendTimeout = 1000; publish.PublishObject("Stock-Quotes-AAPL", 123.3, sendTimeout); }
public PgmMulticastingChannel(string address, int port) { MessageParserFactory = new MessageParserFactory(); var reader = new PgmReader(MessageParserFactory); var receiveSocket = new PgmReceiver(address, port, reader); receiveSocket.Start(); var sendSocket = new PgmSource(address, port); sendSocket.Start(); var asyncWriter = new BatchWriter(sendSocket, 1500); TopicPublisher = new TopicPublisher(asyncWriter); TopicPublisher.Start(); _disposables.Add(receiveSocket); _disposables.Add(sendSocket); }
public void Should_throw_when_name_attribute_is_missing() { var docs = new List <XDocument> { new XDocument(new XElement("member")) }; sut = new BatchWriter(docs) { FileSystemService = FileSystem }; using (Record) { Expect .Call(FileSystem.DoesDirectoryExist(OutputDirectory)) .Return(false); } using (PlayBack) { sut.Write(OutputDirectory); } }
public void AddCommand(RunnerCommand cmd, params object[] args) { if (IsBatching) { switch (cmd) { case RunnerCommand.Ping: { BatchCmdCount++; BatchWriter.Write((int)RunnerCommand.Ping); BatchWriter.Write((uint)args[0]); // sendID BatchWriter.Write((int)args[1]); // flags break; } case RunnerCommand.GetUpdate: { BatchCmdCount++; BatchWriter.Write((int)RunnerCommand.GetUpdate); BatchWriter.Write((int)args[0]); // flags break; } case RunnerCommand.GetInstanceData: { BatchCmdCount++; BatchWriter.Write((int)RunnerCommand.GetInstanceData); BatchWriter.Write(AllInstances.Count); for (int i = 0; i < AllInstances.Count; i++) { BatchWriter.Write(AllInstances[i].ID); } break; } } } }
public static void Write(OutputModel outputModel, DatabasePlan databasePlan, PartitionPlan partitionPlan) { var s = $@" namespace {databasePlan.Namespace}; public class {partitionPlan.ClassName} : Cosmogenesis.Core.DbPartitionBase {{ protected virtual {databasePlan.Namespace}.{databasePlan.DbClassName} {databasePlan.DbClassName} {{ get; }} = default!; {PkClass(partitionPlan)} /// <summary>Mocking constructor</summary> protected {partitionPlan.ClassName}() {{ }} internal protected {partitionPlan.ClassName}( {new[] { ConstructorClassParameter(databasePlan), ConstructorKeyParameter(partitionPlan) }.Where(x => !string.IsNullOrEmpty(x)).JoinNonEmpty()}) : base( db: {databasePlan.DbClassNameArgument}, partitionKey: {partitionPlan.GetPkPlan.FullMethodName}({partitionPlan.GetPkPlan.DocumentToParametersMapping("pkData")}), serializer: {databasePlan.Namespace}.{databasePlan.SerializerClassName}.Instance) {{ this.{databasePlan.DbClassName} = {databasePlan.DbClassNameArgument} ?? throw new System.ArgumentNullException(nameof({databasePlan.DbClassNameArgument})); {PkClassSetter(partitionPlan)} }} {databasePlan.Namespace}.{partitionPlan.QueryBuilderClassName}? queryBuilder; /// <summary> /// Methods to build queries for later execution. /// </summary> public virtual {databasePlan.Namespace}.{partitionPlan.QueryBuilderClassName} QueryBuilder => this.queryBuilder ??= new( {databasePlan.DbClassNameArgument}: this.{databasePlan.DbClassName}, partitionKey: this.PartitionKey); {databasePlan.Namespace}.{partitionPlan.QueryClassName}? query; /// <summary> /// Methods to execute queries. /// </summary> public virtual {databasePlan.Namespace}.{partitionPlan.QueryClassName} Query => this.query ??= new( {databasePlan.DbClassNameArgument}: this.{databasePlan.DbClassName}, {partitionPlan.QueryBuilderClassNameArgument}: this.QueryBuilder); /// <summary> /// A batch of operations to be executed atomically (or not at all) within a {partitionPlan.Name} in the {databasePlan.Name} database. /// </summary> public virtual {databasePlan.Namespace}.{partitionPlan.BatchClassName} CreateBatch() => new( transactionalBatch: this.CreateBatchForPartition(), partitionKey: this.PartitionKeyString, validateStateBeforeSave: this.{databasePlan.DbClassName}.ValidateStateBeforeSave, {partitionPlan.ClassNameArgument}: this); {databasePlan.Namespace}.{partitionPlan.ReadClassName}? read; /// <summary> /// Methods to read documents. /// </summary> public virtual {databasePlan.Namespace}.{partitionPlan.ReadClassName} Read => this.read ??= new( {databasePlan.DbClassNameArgument}: this.{databasePlan.DbClassName}, partitionKey: this.PartitionKey); {databasePlan.Namespace}.{partitionPlan.ReadOrThrowClassName}? readOrThrow; /// <summary> /// Methods to read documents, or throw DbConflictException is they are not found. /// </summary> public virtual {databasePlan.Namespace}.{partitionPlan.ReadOrThrowClassName} ReadOrThrow => this.readOrThrow ??= new( {databasePlan.DbClassNameArgument}: this.{databasePlan.DbClassName}, partitionKey: this.PartitionKey); {databasePlan.Namespace}.{partitionPlan.CreateClassName}? create; /// <summary> /// Methods to create documents. /// </summary> public virtual {databasePlan.Namespace}.{partitionPlan.CreateClassName} Create => this.create ??= new(this); {databasePlan.Namespace}.{partitionPlan.ReadOrCreateClassName}? readOrCreate; /// <summary> /// Methods to read documents, or create them if they did not yet exist. /// </summary> public virtual {databasePlan.Namespace}.{partitionPlan.ReadOrCreateClassName} ReadOrCreate => this.readOrCreate ??= new(this); {ReadMany(databasePlan, partitionPlan)} {CreateOrReplace(databasePlan, partitionPlan)} {string.Concat(partitionPlan.Documents.Select(x => Create(partitionPlan, x)))} {string.Concat(partitionPlan.Documents.Select(x => CreateOrReplace(partitionPlan, x)))} {string.Concat(partitionPlan.Documents.Select(x => ReadOrCreate(partitionPlan, x)))} {string.Concat(partitionPlan.Documents.Select(ReplaceIfMutable))} {string.Concat(partitionPlan.Documents.Select(DeleteIfTransient))} }} "; outputModel.Context.AddSource($"partition_{partitionPlan.ClassName}.cs", s); BatchWriter.Write(outputModel, databasePlan, partitionPlan); CreateWriter.Write(outputModel, databasePlan, partitionPlan); ReadOrCreateWriter.Write(outputModel, databasePlan, partitionPlan); CreateOrReplaceWriter.Write(outputModel, databasePlan, partitionPlan); ReadWriter.Write(outputModel, databasePlan, partitionPlan); ReadOrThrowWriter.Write(outputModel, databasePlan, partitionPlan); ReadManyWriter.Write(outputModel, databasePlan, partitionPlan); QueryBuilderWriter.Write(outputModel, databasePlan, partitionPlan); QueryWriter.Write(outputModel, databasePlan, partitionPlan); }
public void FullTest() { IDataComponentFactory connectionFactory; IDataComponentFactory sqliteConnectionFactory; var entFactory = new EntityFactory(); var outputFileName = OUTPUT_FILENAME; var sqlLiteConnectionString = $"Data Source={outputFileName}"; if (System.IO.File.Exists(outputFileName)) { // System.IO.File.Delete(outputFileName); } //Add your SQLServer Connection string as environment variable "TESTCONNSTRING" connectionFactory = new SqlServerDataComponentFactory(Environment.GetEnvironmentVariable("TESTCONNSTRING")); sqliteConnectionFactory = new SqliteDataComponentFactory(sqlLiteConnectionString); var prm = new SyncProcParams() { ClientId = "CAFEFRESH", LastSync = DateTime.Now, UserId = 44720, PreviousSyncApiId = 0.0f }; using (var outputStream = new MemoryStream()) { using (var proc = new ReadProcedure(connectionFactory)) { var readResult = proc.Read(prm); var ds = readResult.DataSet; var rows = ds.Tables[ds.Tables.Count - 1]; var manifest = ds.Tables[ds.Tables.Count - 1].Rows.ToArray <Manifest>(); var binaryWriter = new BinaryWriter(outputStream); var count = manifest.Length; binaryWriter.Write(count); foreach (var manifestTbl in manifest) { var dataTable = ds.Tables[manifestTbl.table_index]; var outputTable = entFactory.ToTable(dataTable, manifestTbl.table_name); var pkeyColumn = outputTable.Columns.Where(col => col.Name == manifestTbl.primary_key).FirstOrDefault(); if (pkeyColumn != null) { pkeyColumn.IsPrimaryKey = true; } outputStream.Write(outputTable); binaryWriter.Write(dataTable.Rows.Count); binaryWriter.WriteToStream(outputTable, dataTable.Rows); } } outputStream.Seek(0, SeekOrigin.Begin); var recordCount = 0; var sqliteConnection = sqliteConnectionFactory.Open(); var binaryReader = new BinaryReader(outputStream); var tableCount = binaryReader.ReadInt32(); for (var idx = 0; idx < tableCount; ++idx) { var batchWriter = new BatchWriter(); var tbl = outputStream.ReadFromStream <Table>(); batchWriter.Init(tbl, sqliteConnection, truncate: true); batchWriter.Begin(); var rowCount = binaryReader.ReadInt32(); for (var rowIdx = 0; rowIdx < rowCount; ++rowIdx) { var values = binaryReader.ReadRowFromStream(tbl); batchWriter.Execute(values); recordCount++; /* Code to write the object array to SQLite in a transaction */ } batchWriter.End(); } } }
public void BatchWriterCrcTest() { Assert.AreEqual(19164, BatchWriter.Crc("M,2508,0,2510,8568,2517,8016,2522,8360,2515,8368,2517,8388,2529,8364,2534,8356,2520,8368,2536,8388")); }