static void Main(string[] args) { ObjectEntity objectEntity = new ObjectEntity(); objectEntity.NullableIntField = 10; objectEntity.StringField = "teststring"; objectEntity.ListField = new List <SubObjectEntity>(); objectEntity.ListField.Add(new SubObjectEntity { IntField = 1, DoubleField = 1.1 }); Schema schema = Schema.Parse(SchemaClass.SCHEMA_OBJECT_ENTITY); SpecificDatumWriter <ObjectEntity> w = new SpecificDatumWriter <ObjectEntity>(schema); byte[] finalbytes = null; using (MemoryStream memStream = new MemoryStream()) { w.Write(objectEntity, new Avro.IO.BinaryEncoder(memStream)); memStream.Seek(0, SeekOrigin.Begin); finalbytes = memStream.ToArray(); } EventData eventToSend = new EventData(finalbytes); EventHubClient eventHubClient = EventHubClient.CreateFromConnectionString( "<eventhubconnection>", "<eventhubname>" ); eventHubClient.SendAsync(eventToSend).Wait(); }
private static void checkAlternateSerializers <T>(byte[] expected, T value, Schema ws) { var ms = new MemoryStream(); var writer = new SpecificDatumWriter <T>(ws); var e = new BinaryEncoder(ms); writer.Write(value, e); var output = ms.ToArray(); Assert.AreEqual(expected.Length, output.Length); Assert.True(expected.SequenceEqual(output)); }
private sbyte[] Write <T>(T message, SpecificDatumWriter <T> writer) { var ms = new MemoryStream(); Encoder e = new BinaryEncoder(ms); writer.Write(message, e); ms.Flush(); ms.Position = 0; var b = ms.ToArray(); return((sbyte[])(object)b); }
/// <summary> /// Write datum to an Avro file container (an "Avro file"). /// This is a self-contained file which includes the schema. /// </summary> private static void SerializeToAvroFileContainer(string file, TransferRequest value) { using (var stream = new FileStream(file, FileMode.Create)) { // The Avro.Specific namespace is for writing a single datum (no container) // SpecificDatumWriter is for writing the generated classes var datumWriter = new SpecificDatumWriter <TransferRequest>(value.Schema); // Note that the Avro.File namespace is for the file containers // Optionally we could also specify the codec (compression and or checksums) using (var avroFileWriter = Avro.File.DataFileWriter <TransferRequest> .OpenWriter(datumWriter, stream)) { avroFileWriter.Append(value); } } }
private byte[] SpecificRecordsToAvro <T>(T record) where T : ISpecificRecord { using (MemoryStream outputStream = new MemoryStream()) { SpecificDatumWriter <T> writer = new SpecificDatumWriter <T>(record.Schema); BinaryEncoder encoder = new BinaryEncoder(outputStream); for (int i = 0; i < _numberOfRecordsInAvro; i++) { writer.Write(record, encoder); } encoder.Flush(); return(outputStream.ToArray()); } }
private static void WriteAndRead <T>(T datum) where T : ISpecificRecord { Console.Write($"{typeof(T).Name}"); try { var tempFile = Path.GetTempFileName(); var writer = new SpecificDatumWriter <T>(datum.Schema); using (var dfw = DataFileWriter <T> .OpenWriter(writer, tempFile)) { dfw.Append(datum); } using (var dfr = DataFileReader <T> .OpenReader(tempFile, datum.Schema)) { while (dfr.HasNext()) { var readDatum = dfr.Next(); } } var prevColor = Console.ForegroundColor; Console.ForegroundColor = ConsoleColor.Green; Console.SetCursorPosition(0, Console.CursorTop); Console.WriteLine($"✓ {typeof(T).Name}"); Console.ForegroundColor = prevColor; } catch (Exception ex) { var prevColor = Console.ForegroundColor; Console.ForegroundColor = ConsoleColor.Red; Console.SetCursorPosition(0, Console.CursorTop); Console.WriteLine($"X {typeof(T).Name}"); Console.ForegroundColor = prevColor; if (!ex.Message.Contains("Unable to find type ")) { Console.WriteLine($"Unexpected Exception: {ex.Message}"); } } }
static void Main(string[] args) { var schema = Schema.Parse(AvroSerializer.Create <Blog>().WriterSchema.ToString()); var inputs = new List <Blog> { new Blog { BlogId = 101, Name = "Tanaka", Author = "One" }, new Blog { BlogId = 201, Name = "Sato", Author = "Two" }, new Blog { BlogId = 301, Name = "Suzuki", Author = "Three" } }; var writer = new SpecificDatumWriter <Blog>(schema); using (var fw = DataFileWriter <Blog> .OpenWriter(writer, "./blog.avro")) { foreach (var blog in inputs) { fw.Append(blog); } } var outputs = new List <Blog>(); using (var fr = DataFileReader <Blog> .OpenReader("./blog.avro")) { while (fr.HasNext()) { outputs.Add(fr.Next()); } } foreach (var b in outputs) { Console.WriteLine("----- Avro → POCO 変換後 -----"); Console.WriteLine($"{b.BlogId} {b.Name} {b.Author}"); } }
public void LogicalTimeStamp() { SpecificDatumReader <MessageTimestampKind> reader; SpecificDatumWriter <MessageTimestampKind> writer; var simple = typeof(MessageTimestampKind).GetSchema(); _output.WriteLine(simple); var schema = (RecordSchema)Schema.Parse(simple); reader = new SpecificDatumReader <MessageTimestampKind>(schema, schema); writer = new SpecificDatumWriter <MessageTimestampKind>(schema); var msgBytes = Write(new MessageTimestampKind { Schema = schema, StampMicros = DateTime.Now, StampMillis = DateTime.Now.AddDays(20) }, writer); using var stream = new MemoryStream((byte[])(object) msgBytes); var msg = Read(stream, reader); Assert.NotNull(msg); }
public void LogicalTime() { SpecificDatumReader <MessageTimeKind> reader; SpecificDatumWriter <MessageTimeKind> writer; var simple = typeof(MessageTimeKind).GetSchema(); _output.WriteLine(simple); var schema = (RecordSchema)Schema.Parse(simple); reader = new SpecificDatumReader <MessageTimeKind>(schema, schema); writer = new SpecificDatumWriter <MessageTimeKind>(schema); var msgBytes = Write(new MessageTimeKind { Schema = schema, TimeMicros = TimeSpan.FromSeconds(60), TimeMillis = TimeSpan.FromSeconds(300) }, writer); using var stream = new MemoryStream((byte[])(object) msgBytes); var msg = Read(stream, reader); Assert.NotNull(msg); }
protected override byte[] DoEncode(string topic, object obj) { if (!(obj is ISpecificRecord)) { throw new InvalidDataException(string.Format("{0} should be instance of ISpecificRecord to user Avro", typeof(object))); } Schema schema = ((ISpecificRecord)obj).Schema; int schemaId = schemaCache.RegisterSchema(schema.ToString(), topic + "-value"); if (!IsValidSchemaId(schemaId)) { throw new InvalidDataException(string.Format("Avro schema of {0} is incompaible with latest schema", typeof(object))); } SpecificDatumWriter<object> w = new SpecificDatumWriter<object>(schema); using (MemoryStream stream = new MemoryStream()) { // same with io.confluent.kafka used by Java // prepend body with magic byte and schemaId stream.WriteByte(MAGIC_BYTE); byte[] schemaIdBytes = BitConverter.GetBytes(schemaId); if (BitConverter.IsLittleEndian) { Array.Reverse(schemaIdBytes); } foreach (byte b in schemaIdBytes) { stream.WriteByte(b); } w.Write(obj, new BinaryEncoder(stream)); stream.Seek(0, SeekOrigin.Begin); byte[] result = new byte[stream.Length]; stream.Read(result, 0, result.Length); return result; } }
public void LogicalDate() { SpecificDatumReader <MessageDateKind> reader; SpecificDatumWriter <MessageDateKind> writer; var simple = typeof(MessageDateKind).GetSchema(); _output.WriteLine(simple); var schema = (RecordSchema)Schema.Parse(simple); reader = new SpecificDatumReader <MessageDateKind>(schema, schema); writer = new SpecificDatumWriter <MessageDateKind>(schema); var msgBytes = Write(new MessageDateKind { Schema = schema, CreatedTime = DateTime.Now, DayOfWeek = "Saturday", Size = new AvroDecimal(102.65M) }, writer); using var stream = new MemoryStream((byte[])(object) msgBytes); var msg = Read(stream, reader); Assert.NotNull(msg); Assert.True(msg.Size == 102.65M); }
public byte[] Serialize(object obj) { try { ISpecificRecord record = (ISpecificRecord)obj; writer = new SpecificDatumWriter <ISpecificRecord>(record.Schema); using (MemoryStream ms = new MemoryStream()) { int function = (int)record.Get(0); ms.Write(BitConverter.GetBytes(function), 0, 4); BinaryEncoder encoder = new BinaryEncoder(ms); writer.Write(record, encoder); return(ms.ToArray()); } } catch (Exception e) { Console.WriteLine(e.Message); } return(null); }
private static void BuildDataFileWriter(string outputFilePath, Mode mode) { var datumWriter = new SpecificDatumWriter <T>(_avroSchema); Codec codec = Codec.CreateCodec(Codec.Type.Deflate); switch (mode) { case Mode.Overwrite: _dataFileWriter = (DataFileWriter <T>) DataFileWriter <T> .OpenWriter( datumWriter, new FileStream(outputFilePath, FileMode.Create), codec); break; case Mode.Append: _dataFileWriter = (DataFileWriter <T>) DataFileWriter <T> .OpenWriter( datumWriter, new FileStream(outputFilePath, FileMode.Append), codec); break; default: throw new ArgumentOutOfRangeException(nameof(mode)); } }
public static void test () { //Schema schema = Schema.Parse ("schema json"); SpecificDatumWriter<User> w = new SpecificDatumWriter<User> (User._SCHEMA); User input = new User (); input.name = "mm"; input.favorite_color = "red"; input.favorite_number = 11; MemoryStream stream = new MemoryStream (); w.Write (input, new BinaryEncoder (stream)); stream.Seek (0, SeekOrigin.Begin); SpecificDatumReader<User> r = new SpecificDatumReader<User> (User._SCHEMA, User._SCHEMA); User output = r.Read (null, new BinaryDecoder (stream)); Console.WriteLine (output.name); }