public void AppendFile() { ListSerializable <User> users = new UserList(); users.Add(new User("Toto", "Titi")); users.Add(new User("Tata", "Roro")); IWriter <User> writer = new DefaultWriter <User>(new UserBasicSerializer());//take default serializer writer.Write <UserList>(users, TxtFile); ListSerializable <User> OtherUsers = new UserList(); OtherUsers.Add(new User("lala", "lala")); OtherUsers.Add(new User("test", "test")); writer.Append <UserList>(OtherUsers, TxtFile, "users"); StringList lines = FileReader.ReadLines(TxtFile); Assert.AreEqual("Toto Titi", lines[0]); Assert.AreEqual("Tata Roro", lines[1]); Assert.AreEqual("lala lala", lines[2]); Assert.AreEqual("test test", lines[3]); }
public override byte[] EncodeStructuredModeMessage(CloudEvent cloudEvent, out ContentType contentType) { Validation.CheckCloudEventArgument(cloudEvent, nameof(cloudEvent)); contentType = new ContentType(CloudEvent.MediaType + MediaTypeSuffix); // We expect the Avro encoded to detect data types that can't be represented in the schema. GenericRecord record = new GenericRecord(avroSchema); record.Add(DataName, cloudEvent.Data); var recordAttributes = new Dictionary <string, object>(); recordAttributes[CloudEventsSpecVersion.SpecVersionAttribute.Name] = cloudEvent.SpecVersion.VersionId; foreach (var keyValuePair in cloudEvent.GetPopulatedAttributes()) { var attribute = keyValuePair.Key; var value = keyValuePair.Value; // TODO: Create a mapping method in each direction, to have this logic more clearly separated. var avroValue = value is bool || value is int || value is byte[] || value is string ?value : attribute.Format(value); recordAttributes[attribute.Name] = avroValue; } record.Add(AttributeName, recordAttributes); MemoryStream memStream = new MemoryStream(); BinaryEncoder encoder = new BinaryEncoder(memStream); avroWriter.Write(record, encoder); return(memStream.ToArray()); }
public byte[] EncodeStructuredEvent(CloudEvent cloudEvent, out ContentType contentType) { contentType = new ContentType(CloudEvent.MediaType + AvroEventFormatter.MediaTypeSuffix); GenericRecord record = new GenericRecord(avroSchema); record.Add(DataName, SerializeData(cloudEvent.Data)); var recordAttributes = new Dictionary <string, object>(); recordAttributes[CloudEventsSpecVersion.SpecVersionAttribute.Name] = cloudEvent.SpecVersion.VersionId; foreach (var keyValuePair in cloudEvent.GetPopulatedAttributes()) { var attribute = keyValuePair.Key; var value = keyValuePair.Value; // TODO: Create a mapping method in each direction, to have this logic more clearly separated. var avroValue = value is bool || value is int || value is byte[] || value is string ?value : attribute.Format(value); recordAttributes[attribute.Name] = avroValue; } record.Add("attribute", recordAttributes); MemoryStream memStream = new MemoryStream(); BinaryEncoder encoder = new BinaryEncoder(memStream); avroWriter.Write(record, encoder); return(new Span <byte>(memStream.GetBuffer(), 0, (int)memStream.Length).ToArray()); }
/** * This is an example function that creates an Avro schema object based on pre-defined Avro JSON-like schema. It then * adds some example data into the record fields, creates a buffer associated with the record, and sends the buffer * out through UDP connection. **/ public static void sender() { /** * Creating a schema object by loading schema object file (.avsc) * Example.AVSC looks like this: * * {"namespace": "example.avro", * "type": "record", * "name": "User", * "fields": [ * {"name": "name", "type": "string"}, * {"name": "favorite_number", "type": ["int", "null"]}, * {"name": "favorite_color", "type": ["string", "null"]} * ] * } **/ var schema = RecordSchema.Parse(File.ReadAllText(@"C:\Users\user\src\example.avsc")) as RecordSchema; //Passing in schema object to get a record object var exampleRecorder = new GenericRecord(schema); //Filling out records with the corresponding schema exampleRecorder.Add("name", "myExample"); exampleRecorder.Add("favorite_number", 999); exampleRecorder.Add("favorite_color", "read"); //Creating an Avro buffer stream ByteBufferOutputStream buffer = new ByteBufferOutputStream(); //Wraping the buffer stream with the encoder that does low level serialization Avro.IO.Encoder encoder = new BinaryEncoder(buffer); //Creating a writer with the corresponding schema object var writer = new DefaultWriter(schema); //Write (serialize) record object into buffer outputStream with encoder writer.Write <GenericRecord>(exampleRecorder, encoder); //And flush buffer.Flush(); //Creating a UDP client UdpClient udpClient = new UdpClient(0); //Connect to endpoint with host and port number arguments udpClient.Connect("my_udp_end_point.com", 9999); //Get buffer list from buffer stream List <MemoryStream> bufferList = buffer.GetBufferList(); //For each memory stream, creating a byte array, and deliver the byte array to endpoint //You actually do not need a foreach loop, because you will only have one memory stream foreach (MemoryStream ms in bufferList) { byte[] bufferArray; bufferArray = ms.ToArray(); udpClient.Send(bufferArray, bufferArray.Length); } udpClient.Close(); }
public byte[] EncodeStructuredEvent(CloudEvent cloudEvent, out ContentType contentType) { contentType = new ContentType(CloudEvent.MediaType + AvroEventFormatter.MediaTypeSuffix); GenericRecord record = new GenericRecord(avroSchema); var recordAttributes = new Dictionary <string, object>(); var attributes = cloudEvent.GetAttributes(); foreach (var keyValuePair in attributes) { if (keyValuePair.Value == null) { continue; } if (keyValuePair.Value is ContentType && !string.IsNullOrEmpty(((ContentType)keyValuePair.Value).MediaType)) { recordAttributes[keyValuePair.Key] = ((ContentType)keyValuePair.Value).ToString(); } else if (keyValuePair.Value is Uri) { recordAttributes[keyValuePair.Key] = ((Uri)keyValuePair.Value).ToString(); } else if (keyValuePair.Value is DateTime) { recordAttributes[keyValuePair.Key] = ((DateTime)keyValuePair.Value).ToString("o"); } else if (cloudEvent.SpecVersion == CloudEventsSpecVersion.V1_0 && keyValuePair.Key.Equals(CloudEventAttributes.DataAttributeName(cloudEvent.SpecVersion))) { if (keyValuePair.Value is Stream) { using (var sr = new BinaryReader((Stream)keyValuePair.Value)) { record.Add("data", sr.ReadBytes((int)sr.BaseStream.Length)); } } else { record.Add("data", keyValuePair.Value); } } else { recordAttributes[keyValuePair.Key] = keyValuePair.Value; } } record.Add("attribute", recordAttributes); MemoryStream memStream = new MemoryStream(); BinaryEncoder encoder = new BinaryEncoder(memStream); avroWriter.Write(record, encoder); return(new Span <byte>(memStream.GetBuffer(), 0, (int)memStream.Length).ToArray()); }
public byte[] Serialize(GenericRecord payload) { var buffer = new ByteBufferOutputStream(); var encoder = new BinaryEncoder(buffer); var writer = new DefaultWriter(Schema); writer.Write <GenericRecord>(payload, encoder); buffer.Flush(); var streams = buffer.GetBufferList(); return(streams[0].ToArray()); }
static void Main(string[] args) { GenericRecord originalEvent; GenericRecord deserializedEvent; var timestamp = DateTime.UtcNow; // Create a UserLoginEvent using GenericRecord originalEvent = new GenericRecord(UserLoginEventSchema); originalEvent.Add("timestamp", DateTimeToAvroTimestampMillis(timestamp)); originalEvent.Add("userID", "blachniet"); originalEvent.Add("wasSuccessful", true); using (var memoryStream = new MemoryStream()) { // Write the record to a memory stream { var binaryEncoder = new BinaryEncoder(memoryStream); var defaultWriter = new DefaultWriter(UserLoginEventSchema); defaultWriter.Write(originalEvent, binaryEncoder); binaryEncoder.Flush(); } // Reset the stream position before we read it memoryStream.Position = 0; // Read the record from the memory stream { var binaryDecoder = new BinaryDecoder(memoryStream); var defaultReader = new DefaultReader(UserLoginEventSchema, UserLoginEventSchema); deserializedEvent = defaultReader.Read <GenericRecord>(null, binaryDecoder); } } Console.WriteLine($@" Original Event: timestamp : {originalEvent["timestamp"]} userID : {originalEvent["userID"]} wasSuccessful : {originalEvent["wasSuccessful"]} Deserialized Event: timestamp : {deserializedEvent["timestamp"]} userID : {deserializedEvent["userID"]} wasSuccessful : {deserializedEvent["wasSuccessful"]} Press 'Enter' to exit. ".TrimStart()); Console.ReadLine(); }
public void AppendFileWithInvalidFile() { ListSerializable <User> users = new UserList(); users.Add(new User("Toto", "Titi")); users.Add(new User("Tata", "Roro")); IWriter <User> writer = new DefaultWriter <User>(new UserBasicSerializer());//take default serializer writer.Write <UserList>(users, TxtFile); ListSerializable <User> OtherUsers = new UserList(); OtherUsers.Add(new User("lala", "lala")); OtherUsers.Add(new User("test", "test")); writer.Append <UserList>(OtherUsers, "invalid.json", "users"); }
/// <summary> /// Write a record using the v1 schema which doesn't have the 'fieldB' field. Read that /// data using the v2 schema, which has 'fieldB' with 'bravo' as the default value. /// </summary> static void WriteV1ReadV2() { GenericRecord originalEntry; GenericRecord deserializedEntry; // Create an entry using the v1 schema originalEntry = new GenericRecord(EntryV1Schema); originalEntry.Add("fieldA", "Hello"); using (var memoryStream = new MemoryStream()) { // Write the record using the v1 schema { var binaryEncoder = new BinaryEncoder(memoryStream); var writer = new DefaultWriter(EntryV1Schema); writer.Write(originalEntry, binaryEncoder); } memoryStream.Position = 0; // Read the record using the v2 schema { var binaryDecoder = new BinaryDecoder(memoryStream); var defaultReader = new DefaultReader(EntryV1Schema, EntryV2Schema); deserializedEntry = defaultReader.Read <GenericRecord>(null, binaryDecoder); } } Console.WriteLine($@" V1 --> V2 --------------------------------------- Original Entry (v1): fieldA: {originalEntry["fieldA"]} fieldB: <does not exist in v1> Deserialized Entry (v2): fieldA: {deserializedEntry["fieldA"]} fieldB: {deserializedEntry["fieldB"]} ".TrimStart()); }
/// <summary> /// Write a record using the v2 schema. Read that data using the v1 schema, which doesn't /// have the 'fieldB' field. /// </summary> static void WriteV2ReadV1() { GenericRecord originalEntry; GenericRecord deserializedEntry; originalEntry = new GenericRecord(EntryV2Schema); originalEntry.Add("fieldA", "Hello"); originalEntry.Add("fieldB", "World"); using (var memoryStream = new MemoryStream()) { { var binaryEncoder = new BinaryEncoder(memoryStream); var writer = new DefaultWriter(EntryV2Schema); writer.Write(originalEntry, binaryEncoder); } memoryStream.Position = 0; { var binaryDecoder = new BinaryDecoder(memoryStream); var defaultReader = new DefaultReader(EntryV2Schema, EntryV1Schema); deserializedEntry = defaultReader.Read <GenericRecord>(null, binaryDecoder); } } Console.WriteLine($@" V2 --> V1 --------------------------------------- Original Entry (v2): fieldA: {originalEntry["fieldA"]} fieldB: {originalEntry["fieldB"]} Deserialized Entry (v1): fieldA: {deserializedEntry["fieldA"]} fieldB: <does not exist in v1> ".TrimStart()); }
public void GenericReaderDefault() { ListSerializable <User> users = new UserList(); users.Add(new User("Toto", "Titi")); users.Add(new User("Tata", "Roro")); IWriter <User> write = new DefaultWriter <User>(); write.Write <UserList>(users, CsvFile); IGenericReader <User> reader = new DefaultReader <User>(); ListSerializable <User> usersToCompare = reader.read <UserList>(CsvFile); Assert.IsNotNull(usersToCompare); Assert.AreEqual(users[0].Firstname, usersToCompare[0].Firstname); Assert.AreEqual(users[0].Name, usersToCompare[0].Name); Assert.AreEqual(users[1].Firstname, usersToCompare[1].Firstname); Assert.AreEqual(users[1].Name, usersToCompare[1].Name); }