public void Component_AvroAttributeClass_ResultIsEqualToInput() { //Arrange AttributeClass toSerialize = new AttributeClass { AndAnotherString = "anotherString", NullableIntProperty = 1, NullableIntPropertyWithDefaultValue = null, NullableStringProperty = "nullableString" }; string schema = AvroConvert.GenerateSchema(typeof(AttributeClass)); //Act var result = AvroConvert.SerializeHeadless(toSerialize, schema); var deserialized = AvroConvert.DeserializeHeadless <AttributeClass>(result, schema); //Assert Assert.NotNull(result); Assert.NotNull(deserialized); Assert.Equal(toSerialize.NullableIntProperty, deserialized.NullableIntProperty); Assert.Equal(toSerialize.AndAnotherString, deserialized.AndAnotherString); Assert.Equal(toSerialize.NullableStringProperty, deserialized.NullableStringProperty); Assert.Equal(2137, deserialized.NullableIntPropertyWithDefaultValue); }
public virtual async Task <T> DeserializeAsync(ReadOnlyMemory <byte> data, bool isNull, SerializationContext context) { using (var stream = new MemoryStream(data.ToArray(), false)) { //Confluent Kafka format: //https://docs.confluent.io/current/schema-registry/docs/serializer-formatter.html#wire-format if (stream.ReadByte() != 0x00) { throw new InvalidDataException("Invalid Confluent Kafka data format"); } var bytes = new byte[4]; stream.Read(bytes, 0, bytes.Length); var id = BitConverter.ToInt32(bytes, 0); string schema; if (cache.ContainsKey(id)) { schema = cache[id]; } else { schema = await RegistryClient.GetSchemaAsync(id); cache.AddOrUpdate(id, schema, (key, oldValue) => schema); } var confluentSchema = new ConfluentSchema(schema); var result = AvroConvert.DeserializeHeadless <T>(stream.ToArray(), confluentSchema.SchemaString); return(result); } }
public T Deserialize(ReadOnlySpan <byte> data, bool isNull, SerializationContext context) { var dataArray = data.ToArray(); var dataWithoutMagicNumber = dataArray.Skip(5); var result = AvroConvert.DeserializeHeadless <T>(dataWithoutMagicNumber.ToArray(), _schema); return(result); }
public void Component_SerializeHeadlessBiggerObjectAndReadSmaller_NoError() { //Arrange VeryComplexClass toSerialize = _fixture.Create <VeryComplexClass>(); string schema = AvroConvert.GenerateSchema(typeof(VeryComplexClass)); //Act var result = AvroConvert.SerializeHeadless(toSerialize, schema); var deserialized = AvroConvert.DeserializeHeadless <VeryComplexClass>(result, schema); //Assert Assert.NotNull(result); Assert.NotNull(deserialized); Assert.Equal(toSerialize, deserialized); }
public void Component_DeserializeWithMissingFields_NoError() { //Arrange BaseTestClass toSerialize = _fixture.Create <BaseTestClass>(); string schema = AvroConvert.GenerateSchema(typeof(BaseTestClass)); //Act var result = AvroConvert.SerializeHeadless(toSerialize, schema); var deserialized = AvroConvert.DeserializeHeadless <ReducedBaseTestClass>(result, schema); //Assert Assert.NotNull(result); Assert.NotNull(deserialized); Assert.Equal(toSerialize.justSomeProperty, deserialized.justSomeProperty); }
public static async Task Run( [EventHubTrigger( Constants.EVENT_HUB_QA_TELEMETRY_AVRO, ConsumerGroup = Constants.EVENT_HUB_CONSUMER_GROUP, Connection = "PACKAGING_QA_TELEMETRY_AVRO_EVENTHUB" )] EventData[] events, [CosmosDB( databaseName: Constants.COSMOS_DB_DATABASE_NAME, collectionName: Constants.COSMOS_DB_CONTAINER_NAME, ConnectionStringSetting = "FACTORY_COSMOS_DB" )] IAsyncCollector <object> items, ILogger log) { var exceptions = new List <Exception>(); TelemetryAvro deserialized = null; foreach (EventData eventData in events) { try { deserialized = AvroConvert.DeserializeHeadless <TelemetryAvro>(eventData.Body.Array, avroTelemetrySchema); deserialized.DeviceId = eventData.SystemProperties["iothub-connection-device-id"].ToString(); await items.AddAsync(deserialized); } catch (Exception e) { // We need to keep processing the rest of the batch - capture this exception and continue. // Also, consider capturing details of the message that failed processing so it can be processed again later. exceptions.Add(e); } } // Once processing of the batch is complete, if any messages in the batch failed processing throw an exception so that there is a record of the failure. if (exceptions.Count > 1) { throw new AggregateException(exceptions); } if (exceptions.Count == 1) { throw exceptions.Single(); } }
public void Component_SerializeHeadlessBiggerObjectAndReadSmaller_NoError() { //Arrange ExtendedBaseTestClass toSerialize = _fixture.Create <ExtendedBaseTestClass>(); string schema = AvroConvert.GenerateSchema(typeof(BaseTestClass)); //Act var result = AvroConvert.SerializeHeadless(toSerialize, schema); var deserialized = AvroConvert.DeserializeHeadless <BaseTestClass>(result, schema); //Assert Assert.NotNull(result); Assert.NotNull(deserialized); Assert.Equal(toSerialize.andLongProperty, deserialized.andLongProperty); Assert.Equal(toSerialize.justSomeProperty, deserialized.justSomeProperty); }
public static void Invoke() { //Arrange var fixture = new Fixture(); Dataset dataset = fixture.Create <Dataset>(); var schema = AvroConvert.GenerateSchema(typeof(Dataset)); Schema apacheSchema = Schema.Parse(schema); //AvroConvert to Apache var avroConvertSerialized = AvroConvert.SerializeHeadless(dataset, schema); Dataset apacheDeserialized; using (var ms = new MemoryStream(avroConvertSerialized)) { var apacheReader = new GenericDatumReader <GenericRecord>(apacheSchema, apacheSchema); var decoder = new BinaryDecoder(ms); apacheDeserialized = (ApacheAvroHelpers.Decreate <Dataset>(apacheReader.Read(null, decoder))); } Contract.Assert(dataset == apacheDeserialized); //Apache to AvroConvert MemoryStream apacheAvroSerializeStream = new MemoryStream(); var encoder = new BinaryEncoder(apacheAvroSerializeStream); var apacheWriter = new GenericDatumWriter <GenericRecord>(apacheSchema); apacheWriter.Write(ApacheAvroHelpers.Create(dataset, apacheSchema), encoder); var apacheSerialized = apacheAvroSerializeStream.ToArray(); var avroConvertDeserialized = AvroConvert.DeserializeHeadless <Dataset>(apacheSerialized); Contract.Assert(dataset == avroConvertDeserialized); }
private static BenchmarkResult RunBenchmark(Dataset[] datasets, string schema) { var result = new BenchmarkResult(); Stopwatch stopwatch = Stopwatch.StartNew(); //Serialize Apache.Avro MemoryStream apacheAvroSerializeStream = new MemoryStream(); var encoder = new BinaryEncoder(apacheAvroSerializeStream); var apacheSchema = Schema.Parse(AvroConvert.GenerateSchema(typeof(Dataset))); var apacheWriter = new GenericDatumWriter <GenericRecord>(apacheSchema); foreach (var dataset in datasets) { apacheWriter.Write(ApacheAvroHelpers.Create(dataset, apacheSchema), encoder); } var apacheAvro = apacheAvroSerializeStream.ToArray(); result.ApacheAvroSerializeTime = stopwatch.ElapsedMilliseconds; stopwatch.Restart(); //Deserialize Apache.Avro List <Dataset> apacheResult = new List <Dataset>(); using (var ms = new MemoryStream(apacheAvro)) { apacheSchema = Schema.Parse(AvroConvert.GenerateSchema(typeof(Dataset))); var apacheReader = new GenericDatumReader <GenericRecord>(apacheSchema, apacheSchema); var decoder = new BinaryDecoder(ms); foreach (var dataset in datasets) { apacheResult.Add(ApacheAvroHelpers.Decreate <Dataset>(apacheReader.Read(null, decoder))); } } result.ApacheAvroDeserializeTime = stopwatch.ElapsedMilliseconds; stopwatch.Restart(); //Serialize AvroConvert Headerless var avroHeadless = AvroConvert.SerializeHeadless(datasets, schema); result.AvroConvertHeadlessSerializeTime = stopwatch.ElapsedMilliseconds; stopwatch.Restart(); //Deserialize AvroConvert Headerless AvroConvert.DeserializeHeadless <List <Dataset> >(avroHeadless, schema); result.AvroConvertHeadlessDeserializeTime = stopwatch.ElapsedMilliseconds; stopwatch.Restart(); //Serialize AvroConvert Gzip var avroGzip = AvroConvert.Serialize(datasets, CodecType.GZip); result.AvroConvertGzipSerializeTime = stopwatch.ElapsedMilliseconds; stopwatch.Restart(); //Deserialize AvroConvert Gzip AvroConvert.Deserialize <Dataset[]>(avroGzip); result.AvroConvertGzipDeserializeTime = stopwatch.ElapsedMilliseconds; stopwatch.Restart(); //Serialize AvroConvert vNext var newAvro = AvroConvertToUpdate.AvroConvert.SerializeHeadless(datasets, schema); result.AvroConvertVNextHeadlessSerializeTime = stopwatch.ElapsedMilliseconds; stopwatch.Restart(); //Deserialize AvroConvert vNext AvroConvertToUpdate.AvroConvert.DeserializeHeadless <Dataset[]>(newAvro, schema); result.AvroConvertVNextHeadlessDeserializeTime = stopwatch.ElapsedMilliseconds; stopwatch.Stop(); //Serialize AvroConvert vNext Gzip var newAvroGzip = AvroConvertToUpdate.AvroConvert.Serialize(datasets, AvroConvertToUpdate.Codec.CodecType.GZip); result.AvroConvertVNextGzipSerializeTime = stopwatch.ElapsedMilliseconds; stopwatch.Restart(); //Deserialize AvroConvert vNext Gzip AvroConvertToUpdate.AvroConvert.Deserialize <Dataset[]>(newAvroGzip); result.AvroConvertVNextGzipDeserializeTime = stopwatch.ElapsedMilliseconds; stopwatch.Stop(); //Size result.ApacheAvroSize = apacheAvro.Length; result.AvroConvertHeadlessSize = avroHeadless.Length; result.AvroConvertGzipSize = avroGzip.Length; result.AvroConvertVNextSize = newAvro.Length; result.AvroConvertVNextGzipSize = newAvroGzip.Length; return(result); }