public InheritedEntity DeserializeAvroMsft(byte[] bytes)
        {
            var serializer = new AvroSerializer(typeof(InheritedEntity));

            using (var ms = new MemoryStream(bytes))
            {
                return serializer.Deserialize<InheritedEntity>(ms);
            }
        }
        public byte[] SerializeAvroMsft(InheritedEntity thisObj)
        {
            var serializer = new AvroSerializer(thisObj.GetType());

            using (var ms = new MemoryStream())
            {
                serializer.Serialize(thisObj, ms);
                return ms.ToArray();
            }
        }
Beispiel #3
0
        public void TestReadAvroAsODataFeed()
        {
            const string Schema     = @"
{""type"":""array"",
""items"":
{
""type"":""record"",
""name"":""TestNS.Person"",
""fields"":
    [
        { ""name"":""Id"", ""type"":""int"" },
        { ""name"":""Title"", ""type"":""string"" },
    ]
}
}";
            var          serializer = AvroSerializer.CreateGeneric(Schema);

            using (var stream = new MemoryStream())
            {
                var arraySchema  = (ArraySchema)serializer.WriterSchema;
                var recordSchema = arraySchema.ItemSchema;

                var rec1 = new AvroRecord(recordSchema);
                rec1["Id"]    = 1;
                rec1["Title"] = "s1";

                var rec2 = new AvroRecord(recordSchema);
                rec2["Id"]    = 2;
                rec2["Title"] = "s2";

                var array = new[] { rec1, rec2 };

                using (var writer = AvroContainer.CreateGenericWriter(Schema, stream, /*leave open*/ true, Codec.Null))
                    using (var seqWriter = new SequentialWriter <object>(writer, 24))
                    {
                        seqWriter.Write(array);
                    }

                stream.Seek(0, SeekOrigin.Begin);
                var avroReader = new ODataAvroReader(this.CreateODataInputContext(stream), true);
                Assert.AreEqual(ODataReaderState.Start, avroReader.State);
                Assert.IsTrue(avroReader.Read());
                Assert.AreEqual(ODataReaderState.ResourceSetStart, avroReader.State);
                Assert.IsTrue(avroReader.Read());

                // Entry 1
                Assert.AreEqual(ODataReaderState.ResourceStart, avroReader.State);
                Assert.IsTrue(avroReader.Read());
                Assert.AreEqual(ODataReaderState.ResourceEnd, avroReader.State);
                var entry = avroReader.Item as ODataResource;
                Assert.IsNotNull(entry);
                var properties = entry.Properties.ToList();
                Assert.AreEqual(2, properties.Count);
                Assert.AreEqual("Id", properties[0].Name);
                Assert.AreEqual(1, properties[0].Value);
                Assert.AreEqual("Title", properties[1].Name);
                Assert.AreEqual("s1", properties[1].Value);

                // Entry 2
                Assert.IsTrue(avroReader.Read());
                Assert.AreEqual(ODataReaderState.ResourceStart, avroReader.State);
                Assert.IsTrue(avroReader.Read());
                Assert.AreEqual(ODataReaderState.ResourceEnd, avroReader.State);
                entry = avroReader.Item as ODataResource;
                Assert.IsNotNull(entry);
                properties = entry.Properties.ToList();
                Assert.AreEqual(2, properties.Count);
                Assert.AreEqual("Id", properties[0].Name);
                Assert.AreEqual(2, properties[0].Value);
                Assert.AreEqual("Title", properties[1].Name);
                Assert.AreEqual("s2", properties[1].Value);
                Assert.IsTrue(avroReader.Read());
                Assert.AreEqual(ODataReaderState.ResourceSetEnd, avroReader.State);

                Assert.IsFalse(avroReader.Read());
                Assert.AreEqual(ODataReaderState.Completed, avroReader.State);
            }
        }
Beispiel #4
0
 public AvroDeserializer()
 {
     avroSerializer = AvroSerializer.Create <T>();
 }
Beispiel #5
0
        public static void SerializeDynamicSampleFile(string path)
        {
            Console.WriteLine("SERIALIZATION USING GENERIC RECORD AND AVRO OBJECT CONTAINER FILES\n");

            Console.WriteLine("Defining the Schema and creating Sample Data Set...");

            //Define the schema in JSON
            const string Schema = @"{
                                ""type"":""record"",
                                ""name"":""Microsoft.Hadoop.Avro.Specifications.SensorData"",
                                ""fields"":
                                    [
                                        { 
                                            ""name"":""Location"", 
                                            ""type"":
                                                {
                                                    ""type"":""record"",
                                                    ""name"":""Microsoft.Hadoop.Avro.Specifications.Location"",
                                                    ""fields"":
                                                        [
                                                            { ""name"":""Floor"", ""type"":""int"" },
                                                            { ""name"":""Room"", ""type"":""int"" }
                                                        ]
                                                }
                                        },
                                        { ""name"":""Value"", ""type"":""bytes"" }
                                    ]
                            }";

            //Create a generic serializer based on the schema
            var serializer = AvroSerializer.CreateGeneric(Schema);
            var rootSchema = serializer.WriterSchema as RecordSchema;

            //Create a generic record to represent the data
            var testData = new List <AvroRecord>();

            dynamic expected1 = new AvroRecord(rootSchema);
            dynamic location1 = new AvroRecord(rootSchema.GetField("Location").TypeSchema);

            location1.Floor    = 1;
            location1.Room     = 243;
            expected1.Location = location1;
            expected1.Value    = new byte[] { 1, 2, 3, 4, 5 };
            testData.Add(expected1);

            dynamic expected2 = new AvroRecord(rootSchema);
            dynamic location2 = new AvroRecord(rootSchema.GetField("Location").TypeSchema);

            location2.Floor    = 1;
            location2.Room     = 244;
            expected2.Location = location2;
            expected2.Value    = new byte[] { 6, 7, 8, 9 };
            testData.Add(expected2);

            //Serializing and saving data to file
            //Create a MemoryStream buffer
            using (var buffer = new MemoryStream())
            {
                Console.WriteLine("Serializing Sample Data Set...");

                //Create a SequentialWriter instance for type SensorData which can serialize a sequence of SensorData objects to stream
                //Data will not be compressed (Null compression codec)
                using (var writer = AvroContainer.CreateGenericWriter(Schema, buffer, Codec.Null))
                {
                    using (var streamWriter = new SequentialWriter <object>(writer, 24))
                    {
                        // Serialize the data to stream using the sequential writer
                        testData.ForEach(streamWriter.Write);
                    }
                }

                Console.WriteLine("Saving serialized data to file...");

                //Save stream to file
                if (!WriteFile(buffer, path))
                {
                    Console.WriteLine("Error during file operation. Quitting method");
                    return;
                }
            }
        }
Beispiel #6
0
        public void Container_SchemaEvolution_RecordWithPermutatedAndMissingWriterFields()
        {
            const string WriterSchema =
                @"{
                 ""name"":""WriterClass"",
                 ""namespace"":""Microsoft.Hadoop.Avro.Tests"",
                 ""type"":""record"",
                 ""fields"":
                           [
                                {""name"":""BoolA"", ""type"":""boolean""},
                                {""name"":""BoolB"", ""type"":""boolean""},
                                {""name"":""FloatA"", ""type"":""float""},
                                {""name"":""FloatB"", ""type"":""float""},
                                {""name"":""DoubleA"", ""type"":""double""},
                                {""name"":""DoubleB"", ""type"":""double""},
                                {""name"":""IntA"", ""type"":""int""},
                                {""name"":""IntB"", ""type"":""int""},
                                {""name"":""MyGuid"", ""type"": {""type"":""fixed"", ""size"":16, ""name"": ""q"" }},
                                {""name"": ""classField"", ""type"" : [ ""null"", ""Microsoft.Hadoop.Avro.Tests.WriterClass""] },
                                {""name"":""Arr"", ""type"": {""type"":""array"", ""items"":""int""}},
                                {""name"":""LongField"", ""type"":""long""},
                                {""name"":""LongMap"", ""type"": {""type"":""map"", ""values"":""long""}},
                                {""name"":""BytesField"", ""type"":""bytes""},
                           ]
             }";

            const string ReaderSchema =
                @"{
                 ""name"":""WriterClass"",
                 ""namespace"":""Microsoft.Hadoop.Avro.Tests"",
                 ""type"":""record"",
                 ""fields"":
                           [
                                {""name"":""DoubleB"", ""type"":""double""},
                                {""name"":""FloatB"", ""type"":""float""},
                                {""name"":""FloatA"", ""type"":""float""},
                                {""name"":""BoolB"", ""type"":""boolean""},
                                {""name"":""BoolA"", ""type"":""boolean""},
                                {""name"":""DoubleA"", ""type"":""double""},
                           ]
             }";

            const int RecordsCount = 100;

            var serializer = AvroSerializer.CreateGeneric(WriterSchema);
            var schema     = serializer.WriterSchema;
            var expected   = new List <AvroRecord>();

            for (int counter = 0; counter < RecordsCount; counter++)
            {
                dynamic avroRecord = new AvroRecord(schema);
                avroRecord.BoolA      = Utilities.GetRandom <bool>(false);
                avroRecord.BoolB      = Utilities.GetRandom <bool>(false);
                avroRecord.FloatA     = Utilities.GetRandom <float>(false);
                avroRecord.FloatB     = Utilities.GetRandom <float>(false);
                avroRecord.DoubleA    = Utilities.GetRandom <double>(false);
                avroRecord.DoubleB    = Utilities.GetRandom <double>(false);
                avroRecord.IntA       = Utilities.GetRandom <int>(false);
                avroRecord.IntB       = Utilities.GetRandom <int>(false);
                avroRecord.MyGuid     = Utilities.GetRandom <Guid>(false).ToByteArray();
                avroRecord.Arr        = Utilities.GetRandom <int[]>(false);
                avroRecord.LongField  = Utilities.GetRandom <long>(false);
                avroRecord.LongMap    = Utilities.GetRandom <Dictionary <string, long> >(false);
                avroRecord.BytesField = Utilities.GetRandom <byte[]>(false);

                avroRecord.classField            = new AvroRecord(serializer.WriterSchema);
                avroRecord.classField.BoolA      = Utilities.GetRandom <bool>(false);
                avroRecord.classField.BoolB      = Utilities.GetRandom <bool>(false);
                avroRecord.classField.FloatA     = Utilities.GetRandom <float>(false);
                avroRecord.classField.FloatB     = Utilities.GetRandom <float>(false);
                avroRecord.classField.DoubleA    = Utilities.GetRandom <double>(false);
                avroRecord.classField.DoubleB    = Utilities.GetRandom <double>(false);
                avroRecord.classField.IntA       = Utilities.GetRandom <int>(false);
                avroRecord.classField.IntB       = Utilities.GetRandom <int>(false);
                avroRecord.classField.MyGuid     = Utilities.GetRandom <Guid>(false).ToByteArray();
                avroRecord.classField.Arr        = Utilities.GetRandom <int[]>(false);
                avroRecord.classField.classField = null;
                avroRecord.classField.LongField  = Utilities.GetRandom <long>(false);
                avroRecord.classField.LongMap    = Utilities.GetRandom <Dictionary <string, long> >(false);
                avroRecord.classField.BytesField = Utilities.GetRandom <byte[]>(false);
                expected.Add(avroRecord);
            }

            using (var memoryStream = new MemoryStream())
            {
                var writer = AvroContainer.CreateGenericWriter(WriterSchema, memoryStream, Codec.Null);

                var i = 0;
                while (i < expected.Count)
                {
                    var block = writer.CreateBlockAsync().Result;
                    for (var j = 0; j < 2; j++)
                    {
                        if (i >= expected.Count)
                        {
                            break;
                        }
                        block.Write(expected[i]);
                        i++;
                    }
                    writer.WriteBlockAsync(block).Wait();
                }
                writer.Dispose();

                memoryStream.Seek(0, SeekOrigin.Begin);

                var reader = AvroContainer.CreateGenericReader(ReaderSchema, memoryStream, true, new CodecFactory());
                var actual = new List <AvroRecord>();
                while (reader.MoveNext())
                {
                    actual.AddRange(reader.Current.Objects.Cast <AvroRecord>());
                }

                for (var k = 0; k < expected.Count; ++k)
                {
                    Assert.AreEqual(((dynamic)expected[k]).DoubleB, ((dynamic)actual[k]).DoubleB);
                    Assert.AreEqual(((dynamic)expected[k]).FloatB, ((dynamic)actual[k]).FloatB);
                    Assert.AreEqual(((dynamic)expected[k]).FloatA, ((dynamic)actual[k]).FloatA);
                    Assert.AreEqual(((dynamic)expected[k]).BoolB, ((dynamic)actual[k]).BoolB);
                    Assert.AreEqual(((dynamic)expected[k]).BoolA, ((dynamic)actual[k]).BoolA);
                    Assert.AreEqual(((dynamic)expected[k]).DoubleA, ((dynamic)actual[k]).DoubleA);
                }
            }
        }
Beispiel #7
0
        public void Container_SchemaEvolution_RecordContainingArrayWithWriterIntPromotedToReaderLong()
        {
            const string WriterSchema =
                @"{
                 ""name"":""RecordContainingArray"",
                 ""type"":""record"",
                 ""fields"":
                           [
                                {""name"":""ArrayField"", ""type"":{""type"":""array"", ""items"":""int""}},
                           ]
             }";

            const string ReaderSchema =
                @"{
                 ""name"":""RecordContainingArray"",
                 ""type"":""record"",
                 ""fields"":
                           [
                                {""name"":""ArrayField"", ""type"":{""type"":""array"", ""items"":""long""}},
                           ]
             }";

            var serializer   = AvroSerializer.CreateGeneric(WriterSchema);
            var schema       = serializer.WriterSchema;
            var randomArrays = Utilities.GetRandom <List <int[]> >(false);
            var expected     = new List <AvroRecord>();

            foreach (var array in randomArrays)
            {
                dynamic avroRecord = new AvroRecord(schema);
                avroRecord.ArrayField = array;
                expected.Add(avroRecord);
            }

            using (var memoryStream = new MemoryStream())
            {
                var writer = AvroContainer.CreateGenericWriter(WriterSchema, memoryStream, Codec.Null);

                var i = 0;
                while (i < expected.Count)
                {
                    var block = writer.CreateBlockAsync().Result;
                    for (var j = 0; j < 2; j++)
                    {
                        if (i >= expected.Count)
                        {
                            break;
                        }
                        block.Write(expected[i]);
                        i++;
                    }
                    writer.WriteBlockAsync(block).Wait();
                }
                writer.Dispose();

                memoryStream.Seek(0, SeekOrigin.Begin);

                var reader = AvroContainer.CreateGenericReader(ReaderSchema, memoryStream, true, new CodecFactory());
                var actual = new List <AvroRecord>();
                while (reader.MoveNext())
                {
                    actual.AddRange(reader.Current.Objects.Cast <AvroRecord>());
                }

                for (var k = 0; k < expected.Count; ++k)
                {
                    var randomArray = randomArrays[k];
                    Assert.AreEqual(randomArray.Length, ((dynamic)actual[k]).ArrayField.Length);

                    for (int t = 0; t < randomArray.Length; t++)
                    {
                        Assert.AreEqual(randomArray[t], ((dynamic)actual[k]).ArrayField.GetValue(t));
                    }
                }
            }
        }
Beispiel #8
0
 public void Container_CreateBufferReaderBlockNullArguments()
 {
     Utilities.ShouldThrow <ArgumentNullException>(() => new AvroBufferReaderBlock <ClassOfInt>(null, Codec.Null, new byte[] { }, 1));
     Utilities.ShouldThrow <ArgumentNullException>(() => new AvroBufferReaderBlock <ClassOfInt>(AvroSerializer.Create <ClassOfInt>(), null, new byte[] { }, 1));
     Utilities.ShouldThrow <ArgumentNullException>(() => new AvroBufferReaderBlock <ClassOfInt>(AvroSerializer.Create <ClassOfInt>(), Codec.Null, null, 1));
     Utilities.ShouldThrow <ArgumentOutOfRangeException>(() => new AvroBufferReaderBlock <ClassOfInt>(AvroSerializer.Create <ClassOfInt>(), Codec.Null, new byte[] { }, -1));
 }
        public string GetSchema()
        {
            var serializer = AvroSerializer.Create <AvroConfiguration>();

            return(serializer.WriterSchema.ToString());
        }
Beispiel #10
0
        public static void SerializeDeserializeObjectUsingReflectionStream()
        {
            string          blobName = "aaa/avrotest/test008";
            CloudBlobClient client   = new Microsoft.WindowsAzure.Storage.Blob.CloudBlobClient(
                new Uri("http://hanzstorage.blob.core.windows.net"),
                new Microsoft.WindowsAzure.Storage.Auth.StorageCredentials(
                    "hanzstorage",
                    "w9TEpvGTusvFlGAdCoWdDrwqLzy6er0Zm5YKdDD0YTkQdOj3WufeVrgd2c8q8amLR0o6xD0tBChcIIA+DCgxXA=="
                    ));
            CloudBlobContainer container = client.GetContainerReference("hanzhdi");
            CloudBlockBlob     blockBlob = container.GetBlockBlobReference(blobName);

            foreach (var md in blockBlob.Metadata)
            {
                Console.WriteLine("{0}    {1}", md.Key, md.Value);
            }

            Console.WriteLine("Serializing Sample Data Set USING REFLECTION\n");


            AvroBlobAppender.AvroBlobAppenderWriter <MyDataType> writer =
                new AvroBlobAppender.AvroBlobAppenderWriter <MyDataType>(blockBlob, false, AvroSerializer.Create <MyDataType>(), Codec.Null);

            Microsoft.Hadoop.Avro.Container.SequentialWriter <MyDataType> sequentialWriter =
                new SequentialWriter <MyDataType>(writer, 10000);

            List <MyDataType> myDataList = MyDataType.GenerateData(555, 10);

            foreach (var myData in myDataList)
            {
                sequentialWriter.Write(myData);
            }

            sequentialWriter.Flush();
            sequentialWriter.Dispose();

            #region commented code
            //blockBlob.DownloadToFile(blobName, FileMode.Create);
            //using (Stream stream = File.OpenRead(blobName))
            //{
            //    Microsoft.Hadoop.Avro.Container.SequentialReader<MyDataType> reader =
            //        new Microsoft.Hadoop.Avro.Container.SequentialReader<MyDataType>(AvroContainer.CreateReader<MyDataType>(stream));
            //    List<MyDataType> actuals = reader.Objects.ToList();
            //    Console.WriteLine("Number of objects: {0}", actuals.Count);
            //    for (int i = 0; i < actuals.Count; i++)
            //    {
            //        var actual = actuals[i];
            //        MyDataType exp = null;
            //        switch (i)
            //        {
            //            case 0:
            //                exp = expected;
            //                break;
            //            case 1:
            //                exp = expected2;
            //                break;
            //            default:
            //                Console.WriteLine("No expected for object {0}", i);
            //                continue;
            //        }

            //        Console.WriteLine("Result of Data Set Identity Comparison is {0}", Program.Equal(exp, actual));
            //    }
            //}
            #endregion
        }
        public static void Run([BlobTrigger("telemetry-archive/willowtelemetry01/{name}", Connection = "StorageAccount")] Stream myBlob, string name, TraceWriter log)
        {
            DataSet ds = new DataSet(
                new SchemaElement <int>("ObjectID"),
                new SchemaElement <string>("Value"),
                new SchemaElement <string>("ClientID"),
                new SchemaElement <string>("TimeStamp")
                );

            if (myBlob.Length == 508)
            {
                return;
            }                                     // empty dataset

            //log.Info($"C# Blob trigger function Processed blob\n Name:{name} \n Size: {myBlob.Length} Bytes");


            var serializer     = AvroSerializer.CreateGeneric(Schema);
            var jsonSerializer = new JsonSerializer();


            using (var reader = AvroContainer.CreateGenericReader(myBlob))
            {
                using (var streamReader = new SequentialReader <object>(reader))
                {
                    var results = streamReader.Objects;

                    foreach (var item in results)
                    {
                        var body = ((AvroRecord)item)[5];  // ["Body"] is the 5th element

                        var json  = System.Text.Encoding.Default.GetString((byte[])body);
                        var array = (JArray)JsonConvert.DeserializeObject(json);

                        var telemetry = jsonSerializer.Deserialize <Telemetry>(array[0].CreateReader());

                        ds.Add(telemetry.ObjectID, telemetry.Value, telemetry.ClientID, telemetry.TimeStamp.ToString());
                    }
                }
            }

            //log.Info($"{ds.RowCount} Records processed");


            if (ds.RowCount == 0)
            {
                return;
            }

            storageAccount = CloudStorageAccount.Parse(storageAcct);

            CloudBlobClient    blobClient    = storageAccount.CreateCloudBlobClient();
            CloudBlobContainer blobContainer = blobClient.GetContainerReference(parquetContainerName);

            blobContainer.CreateIfNotExists();

            name = name + ".parquet";

            CloudBlob       blob       = blobContainer.GetBlobReference(blobName);
            CloudAppendBlob appendBlob = blobContainer.GetAppendBlobReference(name);

            if (appendBlob.Exists())
            {
                appendBlob.Delete(DeleteSnapshotsOption.IncludeSnapshots);
            }

            log.Info($"===> {ds.RowCount} Records, {name}");


            using (var ms = new MemoryStream())
            {
                using (var writer = new ParquetWriter(ms))
                {
                    writer.Write(ds);
                }

                ms.Seek(0, SeekOrigin.Begin);

                appendBlob.UploadFromStream(ms);
            }
        }
Beispiel #12
0
 public SampleAvroSerializer()
 {
     avroSerializer = AvroSerializer.Create <T>();
 }
 private object CreateSerializer <T>()
 {
     return(AvroSerializer.Create <List <T> >());
 }
Beispiel #14
0
        public void ReadAvroAsParameter()
        {
            EdmEntityType personType = new EdmEntityType("TestNS", "Person");

            personType.AddStructuralProperty("Id", EdmPrimitiveTypeKind.Int32);
            personType.AddStructuralProperty("Title", EdmPrimitiveTypeKind.String);

            var operation = new EdmAction("NS", "op1", null);

            operation.AddParameter("p1", EdmCoreModel.Instance.GetString(false));
            operation.AddParameter("p2", new EdmEntityTypeReference(personType, false));

            const string Schema = @"{
""type"":""record"",
""name"":""NS.op1Parameter"",
""fields"":
    [
        { ""name"":""p1"", ""type"":""string"" },
        { ""name"":""p2"", ""type"":
                {""type"":""record"",
                ""name"":""TestNS.Person"",
                ""fields"":
                    [
                        { ""name"":""Id"", ""type"":""int"" },
                        { ""name"":""Title"", ""type"":""string"" },
                    ]
                }
        }
    ]
}";
            var          stream = new MemoryStream();

            using (var writer = AvroContainer.CreateGenericWriter(Schema, stream, /*leave open*/ true, Codec.Null))
                using (var seqWriter = new SequentialWriter <object>(writer, 24))
                {
                    RecordSchema parameterSchema = (RecordSchema)AvroSerializer.CreateGeneric(Schema).WriterSchema;
                    AvroRecord   ar = new AvroRecord(parameterSchema);
                    ar["p1"] = "dat";
                    var        personSchema = parameterSchema.GetField("p2").TypeSchema;
                    AvroRecord person       = new AvroRecord(personSchema);
                    person["Id"]    = 5;
                    person["Title"] = "per1";
                    ar["p2"]        = person;

                    seqWriter.Write(ar);
                    seqWriter.Flush();
                }

            stream.Flush();
            stream.Seek(0, SeekOrigin.Begin);
            var reader = new ODataAvroParameterReader(this.CreateODataInputContext(stream), operation);

            Assert.AreEqual(ODataParameterReaderState.Start, reader.State);
            Assert.IsTrue(reader.Read());
            Assert.AreEqual(ODataParameterReaderState.Value, reader.State);
            Assert.AreEqual("p1", reader.Name);
            Assert.AreEqual("dat", reader.Value);
            Assert.IsTrue(reader.Read());
            Assert.AreEqual(ODataParameterReaderState.Resource, reader.State);
            Assert.AreEqual("p2", reader.Name);
            var ew = reader.CreateResourceReader();

            Assert.AreEqual(ODataReaderState.Start, ew.State);
            Assert.IsTrue(ew.Read());
            Assert.AreEqual(ODataReaderState.ResourceStart, ew.State);
            Assert.IsTrue(ew.Read());
            Assert.AreEqual(ODataReaderState.ResourceEnd, ew.State);
            var entry = ew.Item as ODataResource;

            Assert.IsFalse(ew.Read());
            Assert.AreEqual(ODataReaderState.Completed, ew.State);

            Assert.IsNotNull(entry);
            var properties = entry.Properties.ToList();

            Assert.AreEqual(2, properties.Count);
            Assert.AreEqual("Id", properties[0].Name);
            Assert.AreEqual(5, properties[0].Value);
            Assert.AreEqual("Title", properties[1].Name);
            Assert.AreEqual("per1", properties[1].Value);

            Assert.IsFalse(reader.Read());
            Assert.AreEqual(ODataParameterReaderState.Completed, reader.State);
        }
Beispiel #15
0
        public void TestReadAvroAsODataEntry()
        {
            const string Schema     = @"
{
""type"":""record"",
""name"":""TestNS.Person"",
""fields"":
    [
        { ""name"":""Id"", ""type"":""int"" },
        { ""name"":""Title"", ""type"":""string"" },
        { ""name"":""Address"", ""type"":{
                ""name"":""TestNS.Address"",
                ""type"":""record"",
                ""fields"":[
                    { ""name"":""ZipCode"", ""type"":""long"" },
                ]
            } 
        },
    ]
}";
            var          serializer = AvroSerializer.CreateGeneric(Schema);

            using (var stream = new MemoryStream())
            {
                var expected = new AvroRecord(serializer.WriterSchema);
                expected["Id"]    = -5;
                expected["Title"] = "set";

                var cpxSchema = ((RecordSchema)serializer.WriterSchema).GetField("Address").TypeSchema;
                var cpx       = new AvroRecord(cpxSchema);
                cpx["ZipCode"]      = 5L;
                expected["Address"] = cpx;

                using (var writer = AvroContainer.CreateGenericWriter(Schema, stream, /*leave open*/ true, Codec.Null))
                {
                    using (var streamWriter = new SequentialWriter <object>(writer, 24))
                    {
                        // Serialize the data to stream using the sequential writer
                        streamWriter.Write(expected);
                    }
                }

                stream.Seek(0, SeekOrigin.Begin);
                var avroReader = new ODataAvroReader(this.CreateODataInputContext(stream), false);
                Assert.AreEqual(ODataReaderState.Start, avroReader.State);
                Assert.IsTrue(avroReader.Read());
                Assert.AreEqual(ODataReaderState.ResourceStart, avroReader.State);
                Assert.IsTrue(avroReader.Read());
                Assert.AreEqual(ODataReaderState.NestedResourceInfoStart, avroReader.State);
                Assert.IsTrue(avroReader.Read());
                Assert.AreEqual(ODataReaderState.ResourceStart, avroReader.State);
                Assert.IsTrue(avroReader.Read());
                Assert.AreEqual(ODataReaderState.ResourceEnd, avroReader.State);
                var entry = avroReader.Item as ODataResource;
                Assert.IsNotNull(entry);
                Assert.AreEqual("TestNS.Address", entry.TypeName);
                var zip = entry.Properties.Single();
                Assert.AreEqual(5L, zip.Value);
                Assert.IsTrue(avroReader.Read());
                Assert.AreEqual(ODataReaderState.NestedResourceInfoEnd, avroReader.State);
                Assert.IsTrue(avroReader.Read());
                Assert.AreEqual(ODataReaderState.ResourceEnd, avroReader.State);
                entry = avroReader.Item as ODataResource;
                Assert.IsNotNull(entry);
                Assert.AreEqual("TestNS.Person", entry.TypeName);
                var properties = entry.Properties.ToList();
                Assert.AreEqual(2, properties.Count);
                Assert.AreEqual("Id", properties[0].Name);
                Assert.AreEqual(-5, properties[0].Value);
                Assert.AreEqual("Title", properties[1].Name);
                Assert.AreEqual("set", properties[1].Value);
                Assert.IsFalse(avroReader.Read());
                Assert.AreEqual(ODataReaderState.Completed, avroReader.State);
            }
        }
Beispiel #16
0
 public Schema GetSchema()
 {
     return(AvroSerializer.Create <LargeClass>(new AvroSerializerSettings {
         UsePosixTime = true
     }).WriterSchema);
 }
        //Serialize and deserialize sample data set using Generic Record.
        //Generic Record is a special class with the schema explicitly defined in JSON.
        //All serialized data should be mapped to the fields of Generic Record,
        //which in turn will be then serialized.
        //
        //This approach is generally slower than using Reflection,
        //but comes very handy in cases, where for example the data is represented
        //as Comma Separated Values (CSV) files rather than objects.
        //
        //In SerializeDeserializeObjectUsingGenericRecords() we will emulate this case
        //though explicit filling up the fields of the Generic Record
        public void SerializeDeserializeObjectUsingGenericRecords()
        {
            Console.WriteLine("SERIALIZATION USING GENERIC RECORD\n");
            Console.WriteLine("Defining the Schema and creating Sample Data Set...");

            //Define the schema in JSON
            const string Schema = @"{
                                ""type"":""record"",
                                ""name"":""Microsoft.Hadoop.Avro.Specifications.SensorData"",
                                ""fields"":
                                    [
                                        { 
                                            ""name"":""Location"", 
                                            ""type"":
                                                {
                                                    ""type"":""record"",
                                                    ""name"":""Microsoft.Hadoop.Avro.Specifications.Location"",
                                                    ""fields"":
                                                        [
                                                            { ""name"":""Floor"", ""type"":""int"" },
                                                            { ""name"":""Room"", ""type"":""int"" }
                                                        ]
                                                }
                                        },
                                        { ""name"":""Value"", ""type"":""bytes"" }
                                    ]
                            }";

            //Create a generic serializer based on the schema
            var serializer = AvroSerializer.CreateGeneric(Schema);
            var rootSchema = serializer.WriterSchema as RecordSchema;

            //Create a Memory Stream buffer
            using (var stream = new MemoryStream())
            {
                //Create a generic record to represent the data
                dynamic location = new AvroRecord(rootSchema.GetField("Location").TypeSchema);
                location.Floor = 1;
                location.Room  = 243;

                dynamic expected = new AvroRecord(serializer.WriterSchema);
                expected.Location = location;
                expected.Value    = new byte[] { 1, 2, 3, 4, 5 };

                Console.WriteLine("Serializing Sample Data Set...");

                //Serialize the data
                serializer.Serialize(stream, expected);

                stream.Seek(0, SeekOrigin.Begin);

                Console.WriteLine("Deserializing Sample Data Set...");

                //Deserialize the data into a generic record
                dynamic actual = serializer.Deserialize(stream);

                Console.WriteLine("Comparing Initial and Deserialized Data Sets...");

                //Finally, verify the results
                bool isEqual = expected.Location.Floor.Equals(actual.Location.Floor);
                isEqual = isEqual && expected.Location.Room.Equals(actual.Location.Room);
                isEqual = isEqual && ((byte[])expected.Value).SequenceEqual((byte[])actual.Value);
                Console.WriteLine("Result of Data Set Identity Comparison is {0}", isEqual);
            }
        }
Beispiel #18
0
 public Serializer()
 {
     _serializer = AvroSerializer.Create <T>();
 }
        //Serializes and deserializes sample data set using Generic Record and Avro Object Container Files
        //Serialized data is not compressed
        //
        //This sample uses Memory Stream for all operations related to serialization, deserialization and
        //Object Container manipulation, though File Stream could be easily used.
        public void SerializeDeserializeUsingObjectContainersGenericRecord()
        {
            Console.WriteLine("SERIALIZATION USING GENERIC RECORD AND AVRO OBJECT CONTAINER FILES\n");

            //Path for Avro Object Container File
            string path = "AvroSampleGenericRecordNullCodec.avro";

            Console.WriteLine("Defining the Schema and creating Sample Data Set...");

            //Define the schema in JSON
            const string Schema = @"{
                                ""type"":""record"",
                                ""name"":""Microsoft.Hadoop.Avro.Specifications.SensorData"",
                                ""fields"":
                                    [
                                        { 
                                            ""name"":""Location"", 
                                            ""type"":
                                                {
                                                    ""type"":""record"",
                                                    ""name"":""Microsoft.Hadoop.Avro.Specifications.Location"",
                                                    ""fields"":
                                                        [
                                                            { ""name"":""Floor"", ""type"":""int"" },
                                                            { ""name"":""Room"", ""type"":""int"" }
                                                        ]
                                                }
                                        },
                                        { ""name"":""Value"", ""type"":""bytes"" }
                                    ]
                            }";

            //Create a generic serializer based on the schema
            var serializer = AvroSerializer.CreateGeneric(Schema);
            var rootSchema = serializer.WriterSchema as RecordSchema;

            //Create a generic record to represent the data
            var testData = new List <AvroRecord>();

            dynamic expected1 = new AvroRecord(rootSchema);
            dynamic location1 = new AvroRecord(rootSchema.GetField("Location").TypeSchema);

            location1.Floor    = 1;
            location1.Room     = 243;
            expected1.Location = location1;
            expected1.Value    = new byte[] { 1, 2, 3, 4, 5 };
            testData.Add(expected1);

            dynamic expected2 = new AvroRecord(rootSchema);
            dynamic location2 = new AvroRecord(rootSchema.GetField("Location").TypeSchema);

            location2.Floor    = 1;
            location2.Room     = 244;
            expected2.Location = location2;
            expected2.Value    = new byte[] { 6, 7, 8, 9 };
            testData.Add(expected2);

            //Serializing and saving data to file
            //Create a MemoryStream buffer
            using (var buffer = new MemoryStream())
            {
                Console.WriteLine("Serializing Sample Data Set...");

                //Create a SequentialWriter instance for type SensorData which can serialize a sequence of SensorData objects to stream
                //Data will not be compressed (Null compression codec)
                using (var writer = AvroContainer.CreateGenericWriter(Schema, buffer, Codec.Null))
                {
                    using (var streamWriter = new SequentialWriter <object>(writer, 24))
                    {
                        // Serialize the data to stream using the sequential writer
                        testData.ForEach(streamWriter.Write);
                    }
                }

                Console.WriteLine("Saving serialized data to file...");

                //Save stream to file
                if (!WriteFile(buffer, path))
                {
                    Console.WriteLine("Error during file operation. Quitting method");
                    return;
                }
            }

            //Reading and deserializng the data
            //Create a Memory Stream buffer
            using (var buffer = new MemoryStream())
            {
                Console.WriteLine("Reading data from file...");

                //Reading data from Object Container File
                if (!ReadFile(buffer, path))
                {
                    Console.WriteLine("Error during file operation. Quitting method");
                    return;
                }

                Console.WriteLine("Deserializing Sample Data Set...");

                //Prepare the stream for deserializing the data
                buffer.Seek(0, SeekOrigin.Begin);

                //Create a SequentialReader for type SensorData which will derserialize all serialized objects from the given stream
                //It allows iterating over the deserialized objects because it implements IEnumerable<T> interface
                using (var reader = AvroContainer.CreateGenericReader(buffer))
                {
                    using (var streamReader = new SequentialReader <object>(reader))
                    {
                        var results = streamReader.Objects;

                        Console.WriteLine("Comparing Initial and Deserialized Data Sets...");

                        //Finally, verify the results
                        var pairs = testData.Zip(results, (serialized, deserialized) => new { expected = (dynamic)serialized, actual = (dynamic)deserialized });
                        int count = 1;
                        foreach (var pair in pairs)
                        {
                            bool isEqual = pair.expected.Location.Floor.Equals(pair.actual.Location.Floor);
                            isEqual = isEqual && pair.expected.Location.Room.Equals(pair.actual.Location.Room);
                            isEqual = isEqual && ((byte[])pair.expected.Value).SequenceEqual((byte[])pair.actual.Value);
                            Console.WriteLine("For Pair {0} result of Data Set Identity Comparison is {1}", count, isEqual.ToString());
                            count++;
                        }
                    }
                }
            }

            //Delete the file
            RemoveFile(path);
        }
Beispiel #20
0
 public void Container_CreateBufferWriterBlockNullArguments()
 {
     Utilities.ShouldThrow <ArgumentNullException>(() => new AvroBufferWriterBlock <ClassOfInt>(null, Codec.Null));
     Utilities.ShouldThrow <ArgumentNullException>(() => new AvroBufferWriterBlock <ClassOfInt>(AvroSerializer.Create <ClassOfInt>(), null));
 }
Beispiel #21
0
        public async Task process(IConfiguration config)
        {
            // creando AvroSchemas from clases
            AvroSerializerSettings settings = new AvroSerializerSettings();

            settings.Resolver = new AvroPublicMemberContractResolver();
            var endpontSchema = AvroSerializer.Create <Endpoint> (settings).WriterSchema.ToString();

            var messageDSchema = AvroSerializer.Create <MessageDestination> (settings).WriterSchema.ToString();

            Console.WriteLine("Endpoint Schema: " + endpontSchema);
            Console.WriteLine("Message Destination Schema: " + messageDSchema);

            Console.WriteLine("RouterProcess");
            var sConfig = new StreamConfig <StringSerDes, StringSerDes> ();

            sConfig.ApplicationId       = config["SPRING_CLOUD_APPLICATION_GROUP"];
            sConfig.BootstrapServers    = config["SPRING_CLOUD_STREAM_KAFKA_BINDER_BROKERS"];
            sConfig.AutoOffsetReset     = AutoOffsetReset.Earliest;
            sConfig.SchemaRegistryUrl   = config["SchemaRegistryUrl"];
            sConfig.AutoRegisterSchemas = true;
            sConfig.NumStreamThreads    = 1;
            sConfig.Acks = Acks.All;
            //sConfig.Debug = "consumer,cgrp,topic,fetch";
            sConfig.AddConsumerConfig("allow.auto.create.topics", "true");
            sConfig.MaxTaskIdleMs         = 50;
            sConfig.InnerExceptionHandler = (e) => ExceptionHandlerResponse.CONTINUE;

            var      timeout = TimeSpan.FromSeconds(10);
            DateTime dt      = DateTime.Now;

            MessageDestination op = new MessageDestination();

            var           serializer = new SchemaAvroSerDes <OrderProduct>();
            StreamBuilder builder    = new StreamBuilder();

            var table = builder.Table(config["endpoints"],
                                      new Int32SerDes(),
                                      new SchemaAvroSerDes <Endpoint> (),
                                      InMemory <int, Endpoint> .As(config["endpoints-table"]));

            builder.Stream <int, OrderProduct, Int32SerDes, SchemaAvroSerDes <OrderProduct> >(config["spring.cloud.stream.bindings.input.destination"])
            .Map <int, OrderProduct>((k, v) =>
            {
                return(KeyValuePair.Create(v.product_id, v));
            })
            .Peek((k, v) =>
            {
                Console.WriteLine($"Sending message {k}  to endpoint {v.product_id}");
                //calcular metrica
            })
            .Join(table, (orderProduct, endpoint) => {
                Console.WriteLine("OrderProduct: " + orderProduct?.order_id);
                Console.WriteLine("Endpoint: " + endpoint?.endpoint_id);

                op = new MessageDestination {
                    messageId = orderProduct.order_id,
                    endpoint  = endpoint,
                    payload   = orderProduct
                };
                return(op);
            })
            .Peek((k, v) =>
            {
                Console.WriteLine($"Sending message {k}  to endpoint {v.endpoint.endpoint_url}");
                // crear metricas
                if (_messageCounterList != null)
                {
                    var counterMessage = Metrics
                                         .CreateCounter($"router_{v.endpoint.endpoint_id}_processed_total", $"Number of messages sent to {v.endpoint.endpoint_url}");

                    counterMessage.Inc();

                    _messageCounterList.Add(counterMessage);
                }
            })
            .Print(Printed <int, MessageDestination> .ToOut());

            Topology t = builder.Build();

            Console.WriteLine(t.Describe());

            KafkaStream stream = new KafkaStream(t, sConfig);

            bool isRunningState = false;

            stream.StateChanged += (old, @new) => {
                if (@new.Equals(KafkaStream.State.RUNNING))
                {
                    isRunningState = true;
                }
            };

            await stream.StartAsync();

            while (!isRunningState)
            {
                Thread.Sleep(250);
                if (DateTime.Now > dt + timeout)
                {
                    break;
                }
            }

            if (isRunningState)
            {
                Console.WriteLine("Stream running state is " + isRunningState.ToString());
            }
        }
Beispiel #22
0
        public void Container_SchemaEvolution_RecordWithPromotionalLongAndFloatFields()
        {
            const string WriterSchema =
                @"{
                 ""name"":""ClassWithPromotionalFields"",
                 ""namespace"":""Microsoft.Hadoop.Avro.Tests"",
                 ""type"":""record"",
                 ""fields"":
                           [
                                {""name"":""LongToFloatField"", ""type"":""long""},
                                {""name"":""LongToDoubleField"", ""type"":""long""},
                                {""name"":""FloatToDoubleField"", ""type"":""float""}
                           ]
             }";

            const string ReaderSchema =
                @"{
                 ""name"":""ClassWithPromotionalFields"",
                 ""namespace"":""Microsoft.Hadoop.Avro.Tests"",
                 ""type"":""record"",
                 ""fields"":
                           [
                                {""name"":""LongToFloatField"", ""type"":""float""},
                                {""name"":""LongToDoubleField"", ""type"":""double""},
                                {""name"":""FloatToDoubleField"", ""type"":""double""}
                           ]
             }";

            const int RecordsCount = 100;

            var serializer = AvroSerializer.CreateGeneric(WriterSchema);
            var schema     = serializer.WriterSchema;
            var expected   = new List <AvroRecord>();

            for (int counter = 0; counter < RecordsCount; counter++)
            {
                dynamic avroRecord = new AvroRecord(schema);
                avroRecord.LongToFloatField   = Utilities.GetRandom <long>(false);
                avroRecord.LongToDoubleField  = Utilities.GetRandom <long>(false);
                avroRecord.FloatToDoubleField = Utilities.GetRandom <float>(false);
                expected.Add(avroRecord);
            }

            using (var memoryStream = new MemoryStream())
            {
                var writer = AvroContainer.CreateGenericWriter(WriterSchema, memoryStream, Codec.Null);

                var i = 0;
                while (i < expected.Count)
                {
                    var block = writer.CreateBlockAsync().Result;
                    for (var j = 0; j < 2; j++)
                    {
                        if (i >= expected.Count)
                        {
                            break;
                        }
                        block.Write(expected[i]);
                        i++;
                    }
                    writer.WriteBlockAsync(block).Wait();
                }
                writer.Dispose();

                memoryStream.Seek(0, SeekOrigin.Begin);

                var reader = AvroContainer.CreateGenericReader(ReaderSchema, memoryStream, true, new CodecFactory());
                var actual = new List <AvroRecord>();
                while (reader.MoveNext())
                {
                    actual.AddRange(reader.Current.Objects.Cast <AvroRecord>());
                }

                for (var k = 0; k < expected.Count; ++k)
                {
                    Assert.AreEqual(((dynamic)expected[k]).LongToFloatField, ((dynamic)actual[k]).LongToFloatField);
                    Assert.AreEqual(((dynamic)expected[k]).LongToDoubleField, ((dynamic)actual[k]).LongToDoubleField);
                    Assert.AreEqual(((dynamic)expected[k]).FloatToDoubleField, ((dynamic)actual[k]).FloatToDoubleField);
                }
            }
        }
Beispiel #23
0
 public void GenericSerializer_CreateWithNullSchema()
 {
     AvroSerializer.CreateGeneric(null);
 }
        /// <summary>
        /// Avro schema
        /// </summary>
        /// <returns></returns>
        private string GetSchema()
        {
            var serializer = AvroSerializer.Create <AvroNode>();

            return(serializer.WriterSchema.ToString());
        }
Beispiel #25
0
 internal MSHadoopAvroSerializer()
     : base(new Catalog())
 {
     serializer = AvroSerializer.Create <Catalog>();
 }
Beispiel #26
0
        static void SerializeAndDeserializeDynamicTest()
        {
            string path = "AvroSampleReflection.avro";
            //SerializeDynamicSampleFile(path);

            var dict = new Dictionary <string, object>();

            dict.Add("1", 3);
            dict.Add("2", new Location {
                Room = 243, Floor = 1
            });

            ChoAvroRecordConfiguration config = null;
            AvroSerializerSettings     sett1  = null;

            using (var w = new ChoAvroWriter(path)
                           .WithAvroSerializer(AvroSerializer.Create <Dictionary <string, object> >(new AvroSerializerSettings()
            {
                Resolver = new ChoAvroPublicMemberContractResolver()
            }))
                           .Configure(c => c.KnownTypes = new List <Type> {
                typeof(Location), typeof(string), typeof(int)
            })
                           //.Configure(c => c.UseAvroSerializer = true)
                           //.Configure(c => c.AvroSerializerSettings.Resolver = new AvroDataContractResolverEx())
                   )
            {
                sett1  = w.Configuration.AvroSerializerSettings;
                config = w.Configuration;

                w.Write(dict);
                w.Write(dict);
                w.Write(dict);
            }
            //var sett = new AvroSerializerSettings();
            //sett.Resolver = new ChoAvroPublicMemberContractResolver(); // false) { Configuration = config };
            //sett.KnownTypes = new List<Type> { typeof(Location), typeof(string), typeof(int) };
            //var avroSerializer = AvroSerializer.Create<Dictionary<string, object>>(sett1);
            //using (var r = new StreamReader(path))
            //{
            //    var rec = avroSerializer.Deserialize(r.BaseStream);
            //    var rec2 = avroSerializer.Deserialize(r.BaseStream);
            //    var rec3 = avroSerializer.Deserialize(r.BaseStream);
            //    Console.WriteLine(rec.Dump());
            //    Console.WriteLine(rec2.Dump());
            //    Console.WriteLine(rec3.Dump());
            //    //var rec4 = avroSerializer.Deserialize(r);
            //}

            StringBuilder json = new StringBuilder();

            using (var r = new ChoAvroReader(path)
                           .Configure(c => c.KnownTypes = new List <Type> {
                typeof(Location), typeof(string), typeof(int)
            })
                           .Configure(c => c.UseAvroSerializer = true)
                           //.Configure(c => c.AvroSerializerSettings = sett1)
                           .Configure(c => c.NestedColumnSeparator = '_')
                   )
            {
                //var dt = r.AsDataTable();
                //Console.WriteLine(dt.Dump());
                //return;
                //foreach (var rec in r)
                //{
                //    Console.WriteLine(rec.Dump());
                //}
                //return;
                using (var w = new ChoJSONWriter(json)
                               .Configure(c => c.TurnOnAutoDiscoverJsonConverters = true)
                       )
                {
                    w.Write(r);
                }
            }
            Console.WriteLine(json.ToString());
        }
        private void CompareSchemas()
        {
            var eCodeGenEntity = new InheritedEntityAvro();

            // Extra test for schema compare, save schema to compare later
            string avroSchema = eCodeGenEntity.Schema.ToString();

            // Extra test for schema compare, save schema to compare later
            var serializer = new AvroSerializer(typeof(InheritedEntity));

            string avroMsftSchema = serializer.Schema;

            Console.Write("\nThe Avro MSFT schema and the Avro Apache schema ");
            if (String.IsNullOrEmpty(avroSchema) || (avroSchema != avroMsftSchema))
            {
                Console.WriteLine("do NOT MATCH !!!\n");
                Console.WriteLine("Avro Apache schema: {0}", avroSchema);
                Console.WriteLine("Avro MSFT schema  : {0}", avroMsftSchema);
            }
            else
                Console.WriteLine("match.\n");
        }
        public void SequentialGenericWritingReading_NestedRecord()
        {
            const string StringSchema = @"{
                                        ""type"":""record"",
                                        ""name"":""Microsoft.Hadoop.Avro.Tests.NestedClass"",
                                        ""fields"":[
                                            {
                                                ""name"":""ClassOfIntReference"",
                                                ""type"":{
                                                        ""type"":""record"",
                                                        ""name"":""Microsoft.Hadoop.Avro.Tests.ClassOfInt"",
                                                        ""fields"":[
                                                            {
                                                                 ""name"":""PrimitiveInt"",
                                                                 ""type"":""int""
                                                            }
                                                        ]
                                                    }
                                            },
                                            {
                                                ""name"":""PrimitiveInt"",""type"":""int""
                                            }
                                        ]
                                    }";

            using (var stream = new MemoryStream())
            {
                var serializer = AvroSerializer.CreateGeneric(StringSchema);
                using (var streamWriter = AvroContainer.CreateGenericWriter(StringSchema, stream, Codec.Null))
                {
                    using (var writer = new SequentialWriter <object>(streamWriter, 24))
                    {
                        var expected = new List <AvroRecord>();
                        var random   = new Random(83);
                        for (int i = 0; i < 10; i++)
                        {
                            dynamic record = new AvroRecord(serializer.WriterSchema);
                            record.PrimitiveInt        = random.Next();
                            record.ClassOfIntReference =
                                new AvroRecord((serializer.WriterSchema as RecordSchema).GetField("ClassOfIntReference").TypeSchema);
                            record.ClassOfIntReference.PrimitiveInt = random.Next();
                            expected.Add(record);
                        }

                        expected.ForEach(writer.Write);
                        writer.Flush();

                        stream.Seek(0, SeekOrigin.Begin);

                        var streamReader = AvroContainer.CreateReader <NestedClass>(stream, true, this.dataContractSettings, new CodecFactory());
                        using (var reader = new SequentialReader <NestedClass>(streamReader))
                        {
                            var j = 0;
                            foreach (var avroRecord in reader.Objects)
                            {
                                Assert.Equal(expected[j]["PrimitiveInt"], avroRecord.PrimitiveInt);
                                Assert.Equal(((dynamic)expected[j++]["ClassOfIntReference"])["PrimitiveInt"], avroRecord.ClassOfIntReference.PrimitiveInt);
                            }
                        }
                    }
                }
            }
        }
 public void GenericSerializer_CreateWithNullSchema()
 {
     Assert.ThrowsException <ArgumentNullException>(() => AvroSerializer.CreateGeneric(null));
 }