public void TestLong(long expected) { using (var stream = new MemoryStream()) using (var encoder = new BinaryEncoder(stream)) using (var decoder = new BinaryDecoder(stream)) { encoder.WriteLong(expected); stream.Seek(0, SeekOrigin.Begin); var actual = decoder.ReadLong(); Assert.AreEqual(expected, actual); } }
public void DecodeLong(long expectedValue, int expectedLength, byte[] value) { using (var stream = new MemoryStream(value)) using (var decoder = new BinaryDecoder(stream)) { var decode = decoder.ReadLong(); Assert.AreEqual(expectedLength, stream.Position, "Decode offset error"); Assert.AreEqual(expectedValue, decode); stream.Seek(0, SeekOrigin.Begin); decoder.SkipLong(); Assert.AreEqual(expectedLength, stream.Position, "Skip offset error"); } }
public static async Task Run( [EventHubTrigger("table-update", Connection = "eventHubConnection")] EventData[] events, [CosmosDB( databaseName: "Temenos", collectionName: "Events", ConnectionStringSetting = "CosmosDBConnection")] IAsyncCollector <JObject> eventsOut, ILogger log) { log.LogInformation($"ProcessAvroEvent triggered with {events.Count()} events"); var exceptions = new List <Exception>(); foreach (EventData eventData in events) { try { // convert messageBody of this event to a stream MemoryStream stream = new MemoryStream(eventData.Body.Array, eventData.Body.Offset, eventData.Body.Count); // skip the first 2 bytes stream.Position = 3; // decode the stream and get the schema number BinaryDecoder decoder = new BinaryDecoder(stream); var magicCode = (decoder.ReadBoolean() == false); var schemaNumber = decoder.ReadLong(); // get the appropriate schema Schema schema = null; switch (schemaNumber) { case 23: schema = Schema.Parse(File.ReadAllText(@"SerializationID-46-CUSTOMER.avsc")); break; case -21: schema = Schema.Parse(File.ReadAllText(@"SerializationID-41-DE_ADDRESS.avsc")); break; default: throw new Exception("Unknown schema nuumber: " + schemaNumber); } // read the avro message using the identified schema var reader = new DefaultReader(schema, schema); GenericRecord record = reader.Read(null, schema, schema, decoder) as GenericRecord; // convert to JSON and return JObject outputData = ConvertToJson(record); eventsOut.AddAsync(outputData).Wait(); // Replace these two lines with your processing logic. await Task.Yield(); } catch (Exception e) { // We need to keep processing the rest of the batch - capture this exception and continue. // TODO: consider capturing details of the message that failed processing so it can be processed again later. exceptions.Add(e); } } // Once processing of the batch is complete, if any messages in the batch failed processing throw an exception so that there is a record of the failure. if (exceptions.Count > 1) { throw new AggregateException(exceptions); } if (exceptions.Count == 1) { throw exceptions.Single(); } }