private static object GetValue(IJsonValue value, Schema schema) { switch (value) { case JsonString s when IsTypeOrUnionWith(schema, Schema.Type.String): return(s.Value); case JsonNumber n when IsTypeOrUnionWith(schema, Schema.Type.Long): return((long)n.Value); case JsonNumber n when IsTypeOrUnionWith(schema, Schema.Type.Float): return((float)n.Value); case JsonNumber n when IsTypeOrUnionWith(schema, Schema.Type.Int): return((int)n.Value); case JsonNumber n when IsTypeOrUnionWith(schema, Schema.Type.Double): return(n.Value); case JsonBoolean b when IsTypeOrUnionWith(schema, Schema.Type.Boolean): return(b.Value); case JsonObject o when IsTypeOrUnionWith(schema, Schema.Type.Map): { var mapResult = new Dictionary <string, object>(); if (schema is UnionSchema union) { var map = (MapSchema)union.Schemas.FirstOrDefault(x => x.Tag == Schema.Type.Map); foreach (var(key, childValue) in o) { mapResult.Add(key, GetValue(childValue, map?.ValueSchema)); } } else if (schema is MapSchema map) { foreach (var(key, childValue) in o) { mapResult.Add(key, GetValue(childValue, map?.ValueSchema)); } } return(mapResult); } case JsonObject o when IsTypeOrUnionWith(schema, Schema.Type.Record): { GenericRecord result = null; if (schema is UnionSchema union) { var record = (RecordSchema)union.Schemas.FirstOrDefault(x => x.Tag == Schema.Type.Record); result = new GenericRecord(record); foreach (var(key, childValue) in o) { if (record != null && record.TryGetField(key, out var field)) { result.Add(key, GetValue(childValue, field.Schema)); } } } else if (schema is RecordSchema record) { result = new GenericRecord(record); foreach (var(key, childValue) in o) { if (record.TryGetField(key, out var field)) { result.Add(key, GetValue(childValue, field.Schema)); } } } return(result); } case JsonArray a when IsTypeOrUnionWith(schema, Schema.Type.Array): { var result = new List <object>(); if (schema is UnionSchema union) { var arraySchema = (ArraySchema)union.Schemas.FirstOrDefault(x => x.Tag == Schema.Type.Array); foreach (var item in a) { result.Add(GetValue(item, arraySchema?.ItemSchema)); } } else if (schema is ArraySchema array) { foreach (var item in a) { result.Add(GetValue(item, array.ItemSchema)); } } return(result.ToArray()); } } return(null); }
public void SequentialReader_ApacheWriterMicrosoftReader() { var serializer = AvroSerializer.Create<ClassOfInt>(new AvroSerializerSettings { Resolver = new AvroDataContractResolver(true) }); var schema = ApacheAvro.Schema.Parse(serializer.WriterSchema.ToString()) as ApacheAvro.UnionSchema; Assert.IsNotNull(schema); var recordSchema = schema.Schemas[1] as ApacheAvro.RecordSchema; Assert.IsNotNull(recordSchema); var expected = new List<GenericRecord>(); for (var i = 0; i < 7; i++) { var record = new GenericRecord(recordSchema); record.Add("PrimitiveInt", ClassOfInt.Create(true).PrimitiveInt); expected.Add(record); } var datumWriter = new GenericWriter<GenericRecord>(schema); var writer = DataFileWriter<GenericRecord>.OpenWriter(datumWriter, this.resultStream); writer.WriteHeader(); foreach (var obj in expected) { writer.Append(obj); } writer.Flush(); this.resultStream.Seek(0, SeekOrigin.Begin); var r = AvroContainer.CreateReader<ClassOfInt>(this.resultStream, true, new AvroSerializerSettings { Resolver = new AvroDataContractResolver(true) }, new CodecFactory()); using (var reader = new SequentialReader<ClassOfInt>(r)) { var actual = reader.Objects.ToList(); Assert.AreEqual(expected.Count, actual.Count); for (var i = 0; i < expected.Count; ++i) { Assert.AreEqual(expected[i]["PrimitiveInt"], actual[i].PrimitiveInt); } } }
public static GenericRecord[] SplitSentenceBeanMethodReturnAvro(GenericRecord sentenceEvent) { return SplitSentenceMethodReturnAvro((string) sentenceEvent.Get("sentence")); }
public override void Run(EPServiceProvider epService) { AddMapEventType(epService); AddOAEventType(epService); epService.EPAdministrator.Configuration.AddEventType(BEAN_TYPE); AddAvroEventType(epService); var notExists = MultipleNotExists(6); // Bean var inner = SupportBeanComplexProps.MakeDefaultBean(); var beanTests = new[] { new Pair <SupportMarkerInterface, ValueWithExistsFlag[]>(new SupportBeanDynRoot("xxx"), notExists), new Pair <SupportMarkerInterface, ValueWithExistsFlag[]>(new SupportBeanDynRoot(inner), AllExist( inner.GetIndexed(0), inner.GetIndexed(1), inner.ArrayProperty[1], inner.GetMapped("keyOne"), inner.GetMapped("keyTwo"), inner.MapProperty.Get("xOne") )), }; RunAssertion(epService, BEAN_TYPE.Name, FBEAN, null, beanTests, typeof(object)); // Map var mapNestedOne = new Dictionary <string, object>(); mapNestedOne.Put("indexed", new[] { 1, 2 }); mapNestedOne.Put("arrayProperty", null); mapNestedOne.Put("mapped", TwoEntryMap("keyOne", 100, "keyTwo", 200)); mapNestedOne.Put("mapProperty", null); var mapOne = Collections.SingletonDataMap("item", mapNestedOne); var mapTests = new[] { new Pair <Map, ValueWithExistsFlag[]>(Collections.EmptyDataMap, notExists), new Pair <Map, ValueWithExistsFlag[]>(mapOne, new[] { Exists(1), Exists(2), NotExists(), Exists(100), Exists(200), NotExists() }), }; RunAssertion(epService, MAP_TYPENAME, FMAP, null, mapTests, typeof(object)); // Object-Array var oaNestedOne = new object[] { new[] { 1, 2 }, TwoEntryMap("keyOne", 100, "keyTwo", 200), new[] { 1000, 2000 }, Collections.SingletonMap("xOne", "abc") }; var oaOne = new object[] { null, oaNestedOne }; var oaTests = new[] { new Pair <object[], ValueWithExistsFlag[]>(new object[] { null, null }, notExists), new Pair <object[], ValueWithExistsFlag[]>(oaOne, AllExist(1, 2, 2000, 100, 200, "abc")), }; RunAssertion(epService, OA_TYPENAME, FOA, null, oaTests, typeof(object)); // XML var xmlTests = new[] { new Pair <string, ValueWithExistsFlag[]>("", notExists), new Pair <string, ValueWithExistsFlag[]>("<item>" + "<indexed>1</indexed><indexed>2</indexed><mapped id=\"keyOne\">3</mapped><mapped id=\"keyTwo\">4</mapped>" + "</item>", new[] { Exists("1"), Exists("2"), NotExists(), Exists("3"), Exists("4"), NotExists() }) }; RunAssertion(epService, XML_TYPENAME, FXML, XML_TO_VALUE, xmlTests, typeof(XmlNode)); // Avro var schema = GetAvroSchema(); var itemSchema = AvroSchemaUtil.FindUnionRecordSchemaSingle(schema.GetField("item").Schema); var datumOne = new GenericRecord(schema); datumOne.Put("item", null); var datumItemTwo = new GenericRecord(itemSchema.AsRecordSchema()); datumItemTwo.Put("indexed", Collections.List(1, 2)); datumItemTwo.Put("mapped", TwoEntryMap("keyOne", 3, "keyTwo", 4)); var datumTwo = new GenericRecord(schema); datumTwo.Put("item", datumItemTwo); var avroTests = new[] { new Pair <GenericRecord, ValueWithExistsFlag[]>(new GenericRecord(schema), notExists), new Pair <GenericRecord, ValueWithExistsFlag[]>(datumOne, notExists), new Pair <GenericRecord, ValueWithExistsFlag[]>(datumTwo, new[] { Exists(1), Exists(2), NotExists(), Exists(3), Exists(4), NotExists() }), }; RunAssertion(epService, AVRO_TYPENAME, FAVRO, null, avroTests, typeof(object)); }
static async Task Main(string[] args) { if (args.Length != 3) { Console.WriteLine("Usage: .. bootstrapServers schemaRegistryUrl topicName"); return; } string bootstrapServers = args[0]; string schemaRegistryUrl = args[1]; string topicName = args[2]; string groupName = "avro-generic-example-group"; // var s = (RecordSchema)RecordSchema.Parse(File.ReadAllText("my-schema.json")); var s = (RecordSchema)RecordSchema.Parse( @"{ ""namespace"": ""Confluent.Kafka.Examples.AvroSpecific"", ""type"": ""record"", ""name"": ""User"", ""fields"": [ {""name"": ""name"", ""type"": ""string""}, {""name"": ""favorite_number"", ""type"": [""int"", ""null""]}, {""name"": ""favorite_color"", ""type"": [""string"", ""null""]} ] }" ); CancellationTokenSource cts = new CancellationTokenSource(); var consumeTask = Task.Run(() => { using (var schemaRegistry = new CachedSchemaRegistryClient(new SchemaRegistryConfig { SchemaRegistryUrl = schemaRegistryUrl })) using (var consumer = new ConsumerBuilder <string, GenericRecord>(new ConsumerConfig { BootstrapServers = bootstrapServers, GroupId = groupName }) .SetKeyDeserializer(new AvroDeserializer <string>(schemaRegistry)) .SetValueDeserializer(new AvroDeserializer <GenericRecord>(schemaRegistry)) .SetErrorHandler((_, e) => Console.WriteLine($"Error: {e.Reason}")) .Build()) { consumer.Subscribe(topicName); while (!cts.Token.IsCancellationRequested) { try { var consumeResult = consumer.Consume(cts.Token); Console.WriteLine($"Key: {consumeResult.Message.Key}\nValue: {consumeResult.Value}"); } catch (ConsumeException e) { Console.WriteLine("Consume error: " + e.Error.Reason); } } consumer.Close(); } }, cts.Token); using (var schemaRegistry = new CachedSchemaRegistryClient(new SchemaRegistryConfig { SchemaRegistryUrl = schemaRegistryUrl })) using (var producer = new ProducerBuilder <string, GenericRecord>(new ProducerConfig { BootstrapServers = bootstrapServers }) .SetKeySerializer(new AvroSerializer <string>(schemaRegistry)) .SetValueSerializer(new AvroSerializer <GenericRecord>(schemaRegistry)) .Build()) { Console.WriteLine($"{producer.Name} producing on {topicName}. Enter user names, q to exit."); int i = 0; string text; while ((text = Console.ReadLine()) != "q") { var record = new GenericRecord(s); record.Add("name", text); record.Add("favorite_number", i++); record.Add("favorite_color", "blue"); await producer .ProduceAsync(topicName, new Message <string, GenericRecord> { Key = text, Value = record }) .ContinueWith(task => task.IsFaulted ? $"error producing message: {task.Exception.Message}" : $"produced to: {task.Result.TopicPartitionOffset}"); } } cts.Cancel(); }
private void TryAssertionSingleRowSplitAndType( EPServiceProvider epService, EventRepresentationChoice eventRepresentationEnum) { string[] methods; if (eventRepresentationEnum.IsObjectArrayEvent()) { methods = new[] { "SplitSentenceMethodReturnObjectArray", "SplitSentenceBeanMethodReturnObjectArray", "SplitWordMethodReturnObjectArray" }; } else if (eventRepresentationEnum.IsMapEvent()) { methods = new[] { "SplitSentenceMethodReturnMap", "SplitSentenceBeanMethodReturnMap", "SplitWordMethodReturnMap" }; } else if (eventRepresentationEnum.IsAvroEvent()) { methods = new[] { "SplitSentenceMethodReturnAvro", "SplitSentenceBeanMethodReturnAvro", "SplitWordMethodReturnAvro" }; } else { throw new IllegalStateException("Unrecognized enum " + eventRepresentationEnum); } var funcs = new[] { "SplitSentence", "SplitSentenceBean", "SplitWord" }; for (var i = 0; i < funcs.Length; i++) { epService.EPAdministrator.Configuration.AddPlugInSingleRowFunction( funcs[i] + "_" + eventRepresentationEnum.GetName(), GetType(), methods[i]); } epService.EPAdministrator.CreateEPL( eventRepresentationEnum.GetAnnotationText() + " create schema SentenceEvent(sentence string)"); epService.EPAdministrator.CreateEPL( eventRepresentationEnum.GetAnnotationText() + " create schema WordEvent(word string)"); epService.EPAdministrator.CreateEPL( eventRepresentationEnum.GetAnnotationText() + " create schema CharacterEvent(char string)"); var fields = "word".Split(','); // test single-row method var stmtText = "select * from SentenceEvent[SplitSentence" + "_" + eventRepresentationEnum.GetName() + "(sentence)@Type(WordEvent)]"; var stmt = epService.EPAdministrator.CreateEPL(stmtText); var listener = new SupportUpdateListener(); stmt.Events += listener.Update; Assert.AreEqual("WordEvent", stmt.EventType.Name); Assert.IsTrue(eventRepresentationEnum.MatchesClass(stmt.EventType.UnderlyingType)); SendSentenceEvent(epService, eventRepresentationEnum, "I am testing this code"); EPAssertionUtil.AssertPropsPerRow( listener.GetAndResetLastNewData(), fields, new[] { new object[] {"I"}, new object[] {"am"}, new object[] {"testing"}, new object[] {"this"}, new object[] {"code"} }); SendSentenceEvent(epService, eventRepresentationEnum, "the second event"); EPAssertionUtil.AssertPropsPerRow( listener.GetAndResetLastNewData(), fields, new[] { new object[] {"the"}, new object[] {"second"}, new object[] {"event"} }); stmt.Dispose(); // test SODA var model = epService.EPAdministrator.CompileEPL(stmtText); Assert.AreEqual(stmtText, model.ToEPL()); stmt = epService.EPAdministrator.Create(model); Assert.AreEqual(stmtText, stmt.Text); stmt.Events += listener.Update; SendSentenceEvent(epService, eventRepresentationEnum, "the third event"); EPAssertionUtil.AssertPropsPerRow( listener.GetAndResetLastNewData(), fields, new[] {new object[] {"the"}, new object[] {"third"}, new object[] {"event"}}); stmt.Dispose(); // test script if (eventRepresentationEnum.IsMapEvent()) { stmtText = "expression System.Collections.IList jscript:SplitSentenceJS(sentence) [" + " debug.Debug('test');" + " var listType = host.type('System.Collections.ArrayList');" + " var words = host.newObj(listType);" + " debug.Debug(words);" + " words.Add(Collections.SingletonDataMap('word', 'wordOne'));" + " words.Add(Collections.SingletonDataMap('word', 'wordTwo'));" + " return words;" + "]" + "select * from SentenceEvent[SplitSentenceJS(sentence)@Type(WordEvent)]"; stmt = epService.EPAdministrator.CreateEPL(stmtText); stmt.Events += listener.Update; Assert.AreEqual("WordEvent", stmt.EventType.Name); epService.EPRuntime.SendEvent(Collections.EmptyDataMap, "SentenceEvent"); EPAssertionUtil.AssertPropsPerRowAnyOrder( listener.GetAndResetLastNewData(), fields, new[] { new object[] {"wordOne"}, new object[] {"wordTwo"} }); stmt.Dispose(); } // test multiple splitters stmtText = "select * from SentenceEvent[SplitSentence_" + eventRepresentationEnum.GetName() + "(sentence)@Type(WordEvent)][SplitWord_" + eventRepresentationEnum.GetName() + "(word)@Type(CharacterEvent)]"; stmt = epService.EPAdministrator.CreateEPL(stmtText); stmt.Events += listener.Update; Assert.AreEqual("CharacterEvent", stmt.EventType.Name); SendSentenceEvent(epService, eventRepresentationEnum, "I am"); EPAssertionUtil.AssertPropsPerRowAnyOrder( listener.GetAndResetLastNewData(), "char".Split(','), new[] {new object[] {"I"}, new object[] {"a"}, new object[] {"m"}}); stmt.Dispose(); // test wildcard parameter stmtText = "select * from SentenceEvent[SplitSentenceBean_" + eventRepresentationEnum.GetName() + "(*)@Type(WordEvent)]"; stmt = epService.EPAdministrator.CreateEPL(stmtText); stmt.Events += listener.Update; Assert.AreEqual("WordEvent", stmt.EventType.Name); SendSentenceEvent(epService, eventRepresentationEnum, "another test sentence"); EPAssertionUtil.AssertPropsPerRowAnyOrder( listener.GetAndResetLastNewData(), fields, new[] { new object[] {"another"}, new object[] {"test"}, new object[] {"sentence"} }); stmt.Dispose(); // test property returning untyped collection if (eventRepresentationEnum.IsObjectArrayEvent()) { epService.EPAdministrator.Configuration.AddEventType(typeof(ObjectArrayEvent)); stmtText = eventRepresentationEnum.GetAnnotationText() + " select * from ObjectArrayEvent[someObjectArray@Type(WordEvent)]"; stmt = epService.EPAdministrator.CreateEPL(stmtText); stmt.Events += listener.Update; Assert.AreEqual("WordEvent", stmt.EventType.Name); var rows = new[] { new object[] {"this"}, new object[] {"is"}, new object[] {"collection"} }; epService.EPRuntime.SendEvent(new ObjectArrayEvent(rows)); EPAssertionUtil.AssertPropsPerRow( listener.GetAndResetLastNewData(), fields, new[] { new object[] {"this"}, new object[] {"is"}, new object[] {"collection"} }); stmt.Dispose(); } else if (eventRepresentationEnum.IsMapEvent()) { epService.EPAdministrator.Configuration.AddEventType(typeof(MyCollectionEvent)); stmtText = eventRepresentationEnum.GetAnnotationText() + " select * from MyCollectionEvent[someCollection@Type(WordEvent)]"; stmt = epService.EPAdministrator.CreateEPL(stmtText); stmt.Events += listener.Update; Assert.AreEqual("WordEvent", stmt.EventType.Name); var coll = new List<Map>(); coll.Add(Collections.SingletonDataMap("word", "this")); coll.Add(Collections.SingletonDataMap("word", "is")); coll.Add(Collections.SingletonDataMap("word", "collection")); epService.EPRuntime.SendEvent(new MyCollectionEvent(coll)); EPAssertionUtil.AssertPropsPerRowAnyOrder( listener.GetAndResetLastNewData(), fields, new[] { new object[] {"this"}, new object[] {"is"}, new object[] {"collection"} }); stmt.Dispose(); } else if (eventRepresentationEnum.IsAvroEvent()) { epService.EPAdministrator.Configuration.AddEventType(typeof(AvroArrayEvent)); stmtText = eventRepresentationEnum.GetAnnotationText() + " select * from AvroArrayEvent[someAvroArray@Type(WordEvent)]"; stmt = epService.EPAdministrator.CreateEPL(stmtText); stmt.Events += listener.Update; Assert.AreEqual("WordEvent", stmt.EventType.Name); var rows = new GenericRecord[3]; var words = "this,is,avro".Split(','); for (var i = 0; i < words.Length; i++) { rows[i] = new GenericRecord(((AvroEventType) stmt.EventType).SchemaAvro); rows[i].Put("word", words[i]); } epService.EPRuntime.SendEvent(new AvroArrayEvent(rows)); EPAssertionUtil.AssertPropsPerRow( listener.GetAndResetLastNewData(), fields, new[] { new object[] {"this"}, new object[] {"is"}, new object[] {"avro"} }); stmt.Dispose(); } else { throw new ArgumentException("Unrecognized enum " + eventRepresentationEnum); } // invalid: event type not found TryInvalid( epService, "select * from SentenceEvent[SplitSentence_" + eventRepresentationEnum.GetName() + "(sentence)@type(XYZ)]", "Event type by name 'XYZ' could not be found"); // invalid lib-function annotation TryInvalid( epService, "select * from SentenceEvent[splitSentence_" + eventRepresentationEnum.GetName() + "(sentence)@dummy(WordEvent)]", "Invalid annotation for property selection, expected 'type' but found 'dummy' in text '@dummy(WordEvent)'"); // invalid type assignment to event type if (eventRepresentationEnum.IsObjectArrayEvent()) { TryInvalid( epService, "select * from SentenceEvent[InvalidSentence(sentence)@type(WordEvent)]", "Event type 'WordEvent' underlying type System.Object[] cannot be assigned a value of type"); } else if (eventRepresentationEnum.IsMapEvent()) { TryInvalid( epService, "select * from SentenceEvent[InvalidSentence(sentence)@type(WordEvent)]", "Event type 'WordEvent' underlying type " + Name.Clean<IDictionary<string, object>>() + " cannot be assigned a value of type"); } else if (eventRepresentationEnum.IsAvroEvent()) { TryInvalid( epService, "select * from SentenceEvent[InvalidSentence(sentence)@Type(WordEvent)]", "Event type 'WordEvent' underlying type " + AvroConstantsNoDep.GENERIC_RECORD_CLASSNAME + " cannot be assigned a value of type"); } else { Assert.Fail(); } // invalid subquery TryInvalid( epService, "select * from SentenceEvent[SplitSentence((select * from SupportBean#keepall))@type(WordEvent)]", "Invalid contained-event expression 'SplitSentence(subselect_0)': Aggregation, sub-select, previous or prior functions are not supported in this context [select * from SentenceEvent[SplitSentence((select * from SupportBean#keepall))@type(WordEvent)]]"); epService.EPAdministrator.DestroyAllStatements(); foreach (var name in "SentenceEvent,WordEvent,CharacterEvent".Split(',')) { epService.EPAdministrator.Configuration.RemoveEventType(name, true); } }
private void FireAndForget(GenericRecord genericRecord) { FireAndForget(proxy, genericRecord); }
public void Run(RegressionEnvironment env) { EPStage stage = env.StageService.GetStage("ST"); RegressionPath path = new RegressionPath(); // Bean RunAssertion( env, path, stage, "SupportBean", new SupportBean(), svc => svc.SendEventBean(new SupportBean(), "SupportBean")); // Map RunAssertion( env, path, stage, MAP_TYPENAME, new Dictionary <string, object>(), svc => svc.SendEventMap(new Dictionary <string, object>(), MAP_TYPENAME)); // Object-Array RunAssertion( env, path, stage, OA_TYPENAME, new object[0], svc => svc.SendEventObjectArray(new object[0], OA_TYPENAME)); // XML var node = SupportXML.GetDocument("<myevent/>").DocumentElement; RunAssertion( env, path, stage, XML_TYPENAME, node, svc => svc.SendEventXMLDOM(node, XML_TYPENAME)); // Avro var schema = AvroSchemaUtil.ResolveAvroSchema(env.Runtime.EventTypeService.GetEventTypePreconfigured(AVRO_TYPENAME)); var record = new GenericRecord(schema.AsRecordSchema()); RunAssertion( env, path, stage, AVRO_TYPENAME, record, svc => svc.SendEventAvro(record, AVRO_TYPENAME)); // Json RunAssertion( env, path, stage, JSON_TYPENAME, "{}", svc => svc.SendEventJson("{}", JSON_TYPENAME)); env.UndeployAll(); }
public async Task FSharpGrains_Ping_GenericRecord_ofIntOption_None() { var input = GenericRecord <FSharpOption <int> > .ofT(FSharpOption <int> .None); await PingTest <GenericRecord <FSharpOption <int> > >(input); }
static void GenerateInteropData(string schemaPath, string outputDir) { RecordSchema schema = null; using (var reader = new StreamReader(schemaPath)) { schema = Schema.Parse(reader.ReadToEnd()) as RecordSchema; } var mapFieldSchema = (schema.Fields.Find(x => x.Name == "mapField").Schema as MapSchema).ValueSchema as RecordSchema; var mapFieldRecord0 = new GenericRecord(mapFieldSchema); var mapFieldRecord1 = new GenericRecord(mapFieldSchema); mapFieldRecord0.Add("label", "a"); mapFieldRecord1.Add("label", "cee"); var mapFieldValue = new Dictionary <string, GenericRecord> { { "a", mapFieldRecord0 }, { "bee", mapFieldRecord1 } }; var enumFieldValue = new GenericEnum(schema.Fields.Find(x => x.Name == "enumField").Schema as EnumSchema, "C"); var fixedFieldValue = new GenericFixed( schema.Fields.Find(x => x.Name == "fixedField").Schema as FixedSchema, Encoding.ASCII.GetBytes("1019181716151413")); var nodeSchema = schema.Fields.Find(x => x.Name == "recordField").Schema as RecordSchema; var recordFieldValue = new GenericRecord(nodeSchema); var innerRecordFieldValue = new GenericRecord(nodeSchema); innerRecordFieldValue.Add("label", "inner"); innerRecordFieldValue.Add("children", new GenericRecord[] { }); recordFieldValue.Add("label", "blah"); recordFieldValue.Add("children", new GenericRecord[] { innerRecordFieldValue }); GenericRecord record = new GenericRecord(schema); record.Add("intField", 12); record.Add("longField", 15234324L); record.Add("stringField", "hey"); record.Add("boolField", true); record.Add("floatField", 1234.0f); record.Add("doubleField", -1234.0); record.Add("bytesField", Encoding.UTF8.GetBytes("12312adf")); record.Add("nullField", null); record.Add("arrayField", new double[] { 5.0, 0.0, 12.0 }); record.Add("mapField", mapFieldValue); record.Add("unionField", 12.0); record.Add("enumField", enumFieldValue); record.Add("fixedField", fixedFieldValue); record.Add("recordField", recordFieldValue); var datumWriter = new GenericDatumWriter <GenericRecord>(schema); foreach (var codecName in InteropDataConstants.SupportedCodecNames) { var outputFile = "csharp.avro"; if (codecName != DataFileConstants.NullCodec) { outputFile = string.Format("csharp_{0}.avro", codecName); } var outputPath = Path.Combine(outputDir, outputFile); var codec = Codec.CreateCodecFromString(codecName); using (var dataFileWriter = DataFileWriter <GenericRecord> .OpenWriter(datumWriter, outputPath, codec)) { dataFileWriter.Append(record); } } }
public async Task FSharpGrains_Ping_GenericRecord_ofInt() { var input = GenericRecord <int> .ofT(0); await PingTest <GenericRecord <int> >(input); }
public void TestStreamSelectConversionFunction() { // define Bean _epService.EPAdministrator.Configuration.AddEventType(typeof(SupportBean)); _epService.EPAdministrator.Configuration.AddEventType(typeof(SupportMarketDataBean)); // define Map IDictionary <string, object> mapTypeInfo = new Dictionary <string, object>(); mapTypeInfo.Put("one", typeof(string)); mapTypeInfo.Put("two", typeof(string)); _epService.EPAdministrator.Configuration.AddEventType("MapOne", mapTypeInfo); _epService.EPAdministrator.Configuration.AddEventType("MapTwo", mapTypeInfo); // define OA string[] props = { "one", "two" }; object[] types = { typeof(string), typeof(string) }; _epService.EPAdministrator.Configuration.AddEventType("OAOne", props, types); _epService.EPAdministrator.Configuration.AddEventType("OATwo", props, types); // define Avro var schema = SchemaBuilder.Record( "name", TypeBuilder.RequiredString("one"), TypeBuilder.RequiredString("two")); _epService.EPAdministrator.Configuration.AddEventTypeAvro("AvroOne", new ConfigurationEventTypeAvro(schema)); _epService.EPAdministrator.Configuration.AddEventTypeAvro("AvroTwo", new ConfigurationEventTypeAvro(schema)); // Bean RunAssertionConversionImplicitType( "Bean", typeof(SupportBean).Name, "ConvertEvent", typeof(BeanEventType), typeof(SupportBean), typeof(SupportMarketDataBean).FullName, new SupportMarketDataBean("ACME", 0, 0L, null), SupportEventInfra.FBEANWTYPE, "theString".SplitCsv(), new object[] { "ACME" }); // Map IDictionary <string, object> mapEventOne = new Dictionary <string, object>(); mapEventOne.Put("one", "1"); mapEventOne.Put("two", "2"); RunAssertionConversionImplicitType("Map", "MapOne", "ConvertEventMap", typeof(WrapperEventType), typeof(IDictionary <string, object>), "MapTwo", mapEventOne, SupportEventInfra.FMAPWTYPE, "one,two".SplitCsv(), new object[] { "1", "|2|" }); IDictionary <string, object> mapEventTwo = new Dictionary <string, object>(); mapEventTwo.Put("one", "3"); mapEventTwo.Put("two", "4"); RunAssertionConversionConfiguredType("MapOne", "ConvertEventMap", "MapTwo", typeof(MappedEventBean), typeof(Dictionary <string, object>), mapEventTwo, SupportEventInfra.FMAPWTYPE, "one,two".SplitCsv(), new object[] { "3", "|4|" }); // Object-Array RunAssertionConversionImplicitType("OA", "OAOne", "ConvertEventObjectArray", typeof(WrapperEventType), typeof(object[]), "OATwo", new object[] { "1", "2" }, SupportEventInfra.FOAWTYPE, "one,two".SplitCsv(), new object[] { "1", "|2|" }); RunAssertionConversionConfiguredType("OAOne", "ConvertEventObjectArray", "OATwo", typeof(ObjectArrayBackedEventBean), typeof(object[]), new object[] { "3", "4" }, SupportEventInfra.FOAWTYPE, "one,two".SplitCsv(), new object[] { "3", "|4|" }); // Avro var rowOne = new GenericRecord(schema); rowOne.Put("one", "1"); rowOne.Put("two", "2"); RunAssertionConversionImplicitType("Avro", "AvroOne", "ConvertEventAvro", typeof(WrapperEventType), typeof(GenericRecord), "AvroTwo", rowOne, SupportEventInfra.FAVROWTYPE, "one,two".SplitCsv(), new object[] { "1", "|2|" }); var rowTwo = new GenericRecord(schema); rowTwo.Put("one", "3"); rowTwo.Put("two", "4"); RunAssertionConversionConfiguredType("AvroOne", "ConvertEventAvro", "AvroTwo", typeof(AvroGenericDataBackedEventBean), typeof(GenericRecord), rowTwo, SupportEventInfra.FAVROWTYPE, "one,two".SplitCsv(), new object[] { "3", "|4|" }); }
public void Run(RegressionEnvironment env) { var epl = "@Name('bean') select * from " + BEAN_TYPENAME + ";\n" + "@Name('map') select * from " + MAP_TYPENAME + ";\n" + "@Name('oa') select * from " + OA_TYPENAME + ";\n" + "@Name('xml') select * from " + XML_TYPENAME + ";\n" + "@Name('avro') select * from " + AVRO_TYPENAME + ";\n" + "@public @buseventtype create json schema JsonEvent(Ident string);\n" + "@Name('json') select * from JsonEvent;\n" + "@Name('trigger') select * from SupportBean;"; env.CompileDeploy(epl) .AddListener("map") .AddListener("oa") .AddListener("xml") .AddListener("avro") .AddListener("bean") .AddListener("json"); env.Statement("trigger").Events += ( sender, updateEventArgs) => { var newEvents = updateEventArgs.NewEvents; var processEvent = updateEventArgs.Runtime.EventService; var ident = (string) newEvents[0].Get("TheString"); processEvent.RouteEventBean(new RoutedBeanEvent(ident), BEAN_TYPENAME); processEvent.RouteEventMap(Collections.SingletonDataMap("Ident", ident), MAP_TYPENAME); processEvent.RouteEventObjectArray(new object[] {ident}, OA_TYPENAME); var xml = "<Myevent Ident=\"XXXXXX\"></Myevent>\n".Replace("XXXXXX", ident); processEvent.RouteEventXMLDOM(SupportXML.GetDocument(xml).DocumentElement, XML_TYPENAME); var avroSchema = AvroSchemaUtil.ResolveAvroSchema(env.Runtime.EventTypeService.GetEventTypePreconfigured(AVRO_TYPENAME)); var datum = new GenericRecord(avroSchema.AsRecordSchema()); datum.Put("Ident", ident); processEvent.RouteEventAvro(datum, AVRO_TYPENAME); var jsonObject = new JObject(new JProperty("Ident", ident)); processEvent.RouteEventJson(jsonObject.ToString(), "JsonEvent"); }; env.SendEventBean(new SupportBean("xy", -1)); foreach (var name in new[] {"map", "bean", "oa", "xml", "avro", "json"}) { var listener = env.Listener(name); Assert.IsTrue(listener.IsInvoked, "failed for " + name); Assert.AreEqual("xy", env.Listener(name).AssertOneGetNewAndReset().Get("Ident")); } env.UndeployAll(); }
public static void ProduceConsumeGeneric(string bootstrapServers, string schemaRegistryServers) { var s = (RecordSchema)Schema.Parse( @"{ ""namespace"": ""Confluent.Kafka.Examples.AvroSpecific"", ""type"": ""record"", ""name"": ""User"", ""fields"": [ {""name"": ""name"", ""type"": ""string""}, {""name"": ""favorite_number"", ""type"": [""int"", ""null""]}, {""name"": ""favorite_color"", ""type"": [""string"", ""null""]} ] }" ); var config = new Dictionary <string, object>() { { "bootstrap.servers", bootstrapServers }, { "schema.registry.url", schemaRegistryServers } }; var topic = Guid.NewGuid().ToString(); Message <Null, GenericRecord> dr; using (var p = new Producer <Null, GenericRecord>(config, null, new AvroSerializer <GenericRecord>())) { var record = new GenericRecord(s); record.Add("name", "my name 2"); record.Add("favorite_number", 44); record.Add("favorite_color", null); dr = p.ProduceAsync(topic, null, record).Result; } // produce a specific record (to later consume back as a generic record). using (var p = new Producer <Null, User>(config, null, new AvroSerializer <User>())) { var user = new User { name = "my name 3", favorite_number = 47, favorite_color = "orange" }; p.ProduceAsync(topic, null, user).Wait(); } Assert.Null(dr.Key); Assert.NotNull(dr.Value); dr.Value.TryGetValue("name", out object name); dr.Value.TryGetValue("favorite_number", out object number); dr.Value.TryGetValue("favorite_color", out object color); Assert.IsType <string>(name); Assert.IsType <int>(number); Assert.Equal("my name 2", name); Assert.Equal(44, number); Assert.Null(color); var cconfig = new Dictionary <string, object>() { { "group.id", Guid.NewGuid().ToString() }, { "bootstrap.servers", bootstrapServers }, { "schema.registry.url", schemaRegistryServers } }; using (var c = new Consumer <Null, GenericRecord>(cconfig, null, new AvroDeserializer <GenericRecord>())) { // consume generic record produced as a generic record. c.Assign(new List <TopicPartitionOffset> { new TopicPartitionOffset(topic, 0, dr.Offset) }); c.Consume(out Message <Null, GenericRecord> msg, 20000); msg.Value.TryGetValue("name", out object msgName); msg.Value.TryGetValue("favorite_number", out object msgNumber); msg.Value.TryGetValue("favorite_color", out object msgColor); Assert.IsType <string>(msgName); Assert.IsType <int>(msgNumber); Assert.Equal("my name 2", msgName); Assert.Equal(44, msgNumber); Assert.Null(msgColor); // consume generic record produced as a specific record. c.Consume(out msg, 20000); msg.Value.TryGetValue("name", out msgName); msg.Value.TryGetValue("favorite_number", out msgNumber); msg.Value.TryGetValue("favorite_color", out msgColor); Assert.IsType <string>(msgName); Assert.IsType <int>(msgNumber); Assert.IsType <string>(msgColor); Assert.Equal("my name 3", msgName); Assert.Equal(47, msgNumber); Assert.Equal("orange", msgColor); } using (var c = new Consumer <Null, User>(cconfig, null, new AvroDeserializer <User>())) { c.Assign(new List <TopicPartitionOffset> { new TopicPartitionOffset(topic, 0, dr.Offset) }); c.Consume(out Message <Null, User> msg, 20000); Assert.Equal("my name 2", msg.Value.name); Assert.Equal(44, msg.Value.favorite_number); Assert.Null(msg.Value.favorite_color); } }
public void Run(RegressionEnvironment env) { // Bean BiConsumer <EventType, string[]> bean = ( type, array) => { env.SendEventBean(new LocalEvent(array)); }; var beanepl = $"@public @buseventtype create schema LocalEvent as {typeof(LocalEvent).MaskTypeName()}"; RunAssertion(env, beanepl, bean); // Map BiConsumer <EventType, string[]> map = ( type, array) => { env.SendEventMap(Collections.SingletonDataMap("Array", array), "LocalEvent"); }; var mapepl = "@public @buseventtype create schema LocalEvent(Array string[]);\n"; RunAssertion(env, mapepl, map); // Object-array BiConsumer <EventType, string[]> oa = ( type, array) => { env.SendEventObjectArray(new object[] { array }, "LocalEvent"); }; var oaepl = "@public @buseventtype create objectarray schema LocalEvent(Array string[]);\n"; RunAssertion(env, oaepl, oa); // Json BiConsumer <EventType, string[]> json = ( type, array) => { if (array == null) { env.SendEventJson(new JObject(new JProperty("Array", null)).ToString(), "LocalEvent"); } else { var @event = new JObject(); var jsonarray = new JArray(); @event.Add("Array", jsonarray); foreach (var @string in array) { jsonarray.Add(@string); } env.SendEventJson(@event.ToString(), "LocalEvent"); } }; RunAssertion(env, "@public @buseventtype create json schema LocalEvent(Array string[]);\n", json); // Json-Class-Provided RunAssertion( env, "@JsonSchema(ClassName='" + typeof(MyLocalJsonProvided).MaskTypeName() + "') @public @buseventtype create json schema LocalEvent();\n", json); // Avro BiConsumer <EventType, string[]> avro = ( type, array) => { var @event = new GenericRecord(SupportAvroUtil.GetAvroSchema(type).AsRecordSchema()); @event.Put("Array", array == null ? null : Arrays.AsList(array)); env.SendEventAvro(@event, "LocalEvent"); }; RunAssertion(env, "@public @buseventtype create avro schema LocalEvent(Array string[]);\n", avro); }