public async Task TestSimpleConsumerWorksOk() { var keySerializer = new NullSerializer<object>(); var valueSerializer = new StringSerializer(); var messagePartitioner = new LoadBalancedPartitioner<object>(); using (var temporaryTopic = testCluster.CreateTemporaryTopic()) using (var brokers = new KafkaBrokers(testCluster.CreateBrokerUris())) { var topic = temporaryTopic.Name; var producer = KafkaProducer.Create(brokers, keySerializer, valueSerializer, messagePartitioner); var consumer = KafkaConsumer.Create(defaultConsumerGroup, brokers, keySerializer, valueSerializer, new TopicSelector { Partition = 0, Topic = topic }); await producer.SendAsync(KeyedMessage.Create(topic, "Message"), CancellationToken.None); var responses = await consumer.ReceiveAsync(CancellationToken.None); Assert.That(responses, Is.Not.Null); Assert.That(responses, Has.Count.EqualTo(1)); var first = responses.First(); Assert.That(first.Key, Is.Null); Assert.That(first.Offset, Is.EqualTo(0)); Assert.That(first.Partition, Is.EqualTo(0)); Assert.That(first.Topic, Is.EqualTo(topic)); Assert.That(first.Value, Is.EqualTo("Message")); } }
public async Task ProduceToMultiplePartitions(int numberOfPartitions, int numberOfKeys, int numberOfMessages) { var keySerializer = new Int32Serializer(); var valueSerializer = new StringSerializer(); var messagePartitioner = new Int32Partitioner(); using (var temporaryTopic = testCluster.CreateTemporaryTopic(partitions: 2)) using (var brokers = new KafkaBrokers(testCluster.CreateBrokerUris())) { var topic = temporaryTopic.Name; { var producer = KafkaProducer.Create(brokers, keySerializer, valueSerializer, messagePartitioner); var messages = Enumerable .Range(0, numberOfMessages) .Select(i => KeyedMessage.Create(topic, i % numberOfKeys, i % numberOfPartitions, "Message " + i)); await producer.SendAsync(messages, CancellationToken.None); } { var selectors = Enumerable .Range(0, numberOfPartitions) .Select(partition => new TopicSelector { Partition = partition, Topic = topic }) .ToArray(); var consumer = KafkaConsumer.Create(defaultConsumerGroup, brokers, keySerializer, valueSerializer, selectors); var responses = await consumer.ReceiveAsync(CancellationToken.None); Assert.That(responses, Has.Count.EqualTo(numberOfMessages)); var received = new bool[numberOfMessages]; var offsets = new long[numberOfPartitions]; foreach (var response in responses) { var split = response.Value.Split(' '); Assert.That(split, Has.Length.EqualTo(2)); Assert.That(split[0], Is.EqualTo("Message")); int messageNumber; var parsed = Int32.TryParse(split[1], out messageNumber); Assert.That(parsed, Is.True); Assert.That(messageNumber, Is.InRange(0, numberOfMessages - 1)); var key = messageNumber % numberOfKeys; Assert.That(response.Key, Is.EqualTo(key)); var partition = messageNumber % numberOfPartitions; Assert.That(response.Partition, Is.EqualTo(partition)); Assert.That(received[messageNumber], Is.False); received[messageNumber] = true; Assert.That(response.Offset, Is.EqualTo(offsets[response.Partition])); offsets[response.Partition] += 1; Assert.That(response.Topic, Is.EqualTo(topic)); } } } }
public void Serialize_To_String() { var ser = new StringSerializer(); var value = new object(); MemoryStream str = new MemoryStream(); ser.Serialize(str, value); str.Seek(0, SeekOrigin.Begin); StreamReader sr = new StreamReader(str); Assert.AreEqual(value.ToString(), sr.ReadToEnd()); }
internal EhcacheServerRequest(Uri endpoint, string defaultCache, IMetadataSerializationService serializerService) { this.endpoint = endpoint; this.defaultCache = defaultCache; this.serializerService = serializerService; ContentTypeToSerializer = new Dictionary<string, ISerializer>(); TypeToSerializer = new Dictionary<Type, ISerializer>(); Timeout = 10*1000; DefaultSerializer = new StringSerializer(); }
public void Deserialize_From_String_Always_Returns_String() { var ser = new StringSerializer(); MemoryStream st = new MemoryStream(); StreamWriter str = new StreamWriter(st); str.Write(true); str.Write(false); str.Write(10); str.Write("hello"); str.Flush(); st.Seek(0, SeekOrigin.Begin); var valueString = ser.Deserialize<string>(st); Assert.AreEqual("TrueFalse10hello", valueString); st.Seek(0, SeekOrigin.Begin); var valueObject = ser.Deserialize(st); Assert.IsInstanceOfType(valueObject, typeof(string)); Assert.AreEqual("TrueFalse10hello", valueObject); }
public async Task ProduceToTwoPartitions() { var keySerializer = new Int32Serializer(); var valueSerializer = new StringSerializer(); var messagePartitioner = new Int32Partitioner(); using (var temporaryTopic = testCluster.CreateTemporaryTopic(partitions:2)) using (var brokers = new KafkaBrokers(testCluster.CreateBrokerUris())) { var topic = temporaryTopic.Name; var producer = KafkaProducer.Create(brokers, keySerializer, valueSerializer, messagePartitioner); var consumers = new[] { KafkaConsumer.Create(defaultConsumerGroup, brokers, keySerializer, valueSerializer, new TopicSelector { Partition = 0, Topic = topic }), KafkaConsumer.Create(defaultConsumerGroup, brokers, keySerializer, valueSerializer, new TopicSelector { Partition = 1, Topic = topic }) }; await producer.SendAsync(new[] { KeyedMessage.Create(topic, 0, "Message to partition 0"), KeyedMessage.Create(topic, 1, "Message to partition 1") }, CancellationToken.None); for (var i = 0; i < consumers.Length; i++) { var responses = await consumers[i].ReceiveAsync(CancellationToken.None); Assert.That(responses, Is.Not.Null); Assert.That(responses, Has.Count.EqualTo(1)); var first = responses.First(); Assert.That(first.Offset, Is.EqualTo(0)); Assert.That(first.Partition, Is.EqualTo(i)); Assert.That(first.Key, Is.EqualTo(i)); Assert.That(first.Topic, Is.EqualTo(topic)); Assert.That(first.Value, Is.EqualTo("Message to partition " + i)); } } }
public async Task <IEnumerable <MediumMessage> > GetPublishedMessagesOfNeedRetry() { var fourMinAgo = DateTime.Now.AddMinutes(-4).ToString("O"); var sql = $"SELECT TOP (200) * FROM {_pubName} WITH (readpast) WHERE Retries<{_capOptions.Value.FailedRetryCount} " + $"AND Version='{_capOptions.Value.Version}' AND Added<'{fourMinAgo}' AND (StatusName = '{StatusName.Failed}' OR StatusName = '{StatusName.Scheduled}')"; var result = new List <MediumMessage>(); // using var connection = new SqlConnection(_options.Value.ConnectionString); var connection = this.DbConnection; var reader = await connection.ExecuteReaderAsync(sql); while (reader.Read()) { result.Add(new MediumMessage { DbId = reader.GetInt64(0).ToString(), Origin = StringSerializer.DeSerialize(reader.GetString(3)), Retries = reader.GetInt32(4), Added = reader.GetDateTime(5) }); } return(result); }
public async Task <IEnumerable <MediumMessage> > GetReceivedMessagesOfNeedRetry() { var fourMinAgo = DateTime.Now.AddMinutes(-4).ToString("O"); var sql = $"SELECT * FROM `{_initializer.GetReceivedTableName()}` WHERE `Retries`<{_capOptions.Value.FailedRetryCount} AND `Version`='{_capOptions.Value.Version}' AND `Added`<'{fourMinAgo}' AND (`StatusName` = '{StatusName.Failed}' OR `StatusName` = '{StatusName.Scheduled}') LIMIT 200;"; var result = new List <MediumMessage>(); await using var connection = new MySqlConnection(_options.Value.ConnectionString); var reader = await connection.ExecuteReaderAsync(sql); while (reader.Read()) { result.Add(new MediumMessage { DbId = reader.GetInt64(0).ToString(), Origin = StringSerializer.DeSerialize(reader.GetString(4)), Retries = reader.GetInt32(5), Added = reader.GetDateTime(6) }); } return(result); }
public void ComplexDeserializeTest() { var stringSerializer = new StringSerializer(); var sourceObject = new SecondSerializableClass { FieldA = 5, FieldB = 6, SomeProperty = 9, FieldC = new SecondSerializableClass { FieldA = 10, FieldB = 11, FieldValue = "Andrey", FieldC = new SecondSerializableClass { FieldA = 15, FieldB = 16, SomeProperty = 21, FieldC = new SecondSerializableClass { FieldA = 15, FieldB = 16, SomeProperty = 21, FieldValue = "Anton" }, FieldValue = "Yuri" }, SomeProperty = 15 }, FieldValue = "Sergey" }; var resultObject = (SecondSerializableClass)stringSerializer.Deserialize(stringSerializer.Serialize(sourceObject)); Assert.IsTrue(sourceObject.Equals(resultObject)); }
public override void Deserialize(BinaryReader r) { base.Deserialize(r); requestId = r.ReadInt32(); data = ComplexDataSerializer.Deserialize(r); name = StringSerializer.Deserialize(r); { if (!r.ReadBoolean()) { datas = null; } else { int lenght = r.ReadInt32(); var list = new List <SubData>(lenght); for (int i = 0; i < lenght; i++) { var x = SubDataSerializer.Deserialize(r); list.Add(x); } datas = list; } } }
public override ValueSerializer BuildSerializer(Serializer serializer, Type type, ConcurrentDictionary <Type, ValueSerializer> typeMapping) { var exceptionSerializer = new ObjectSerializer(serializer.Options.FieldSelector, type); exceptionSerializer.Initialize((stream, session) => { var exception = Activator.CreateInstance(type); var className = stream.ReadString(session); var message = stream.ReadString(session); var remoteStackTraceString = stream.ReadString(session); var stackTraceString = stream.ReadString(session); var innerException = stream.ReadObject(session); _className.SetValue(exception, className); _message.SetValue(exception, message); _remoteStackTraceString.SetValue(exception, remoteStackTraceString); _stackTraceString.SetValue(exception, stackTraceString); _innerException.SetValue(exception, innerException); return(exception); }, (stream, exception, session) => { var className = (string)_className.GetValue(exception); var message = (string)_message.GetValue(exception); var remoteStackTraceString = (string)_remoteStackTraceString.GetValue(exception); var stackTraceString = (string)_stackTraceString.GetValue(exception); var innerException = _innerException.GetValue(exception); StringSerializer.WriteValueImpl(stream, className, session); StringSerializer.WriteValueImpl(stream, message, session); StringSerializer.WriteValueImpl(stream, remoteStackTraceString, session); StringSerializer.WriteValueImpl(stream, stackTraceString, session); stream.WriteObjectWithManifest(innerException, session); }); typeMapping.TryAdd(type, exceptionSerializer); return(exceptionSerializer); }
static void Main(string[] args) { Console.WriteLine("Hello World!"); var parser = new CommandParser(); var commandSerializer = new StringSerializer(parser); var serializer = new FileReparsingSerializer("journal.txt", commandSerializer); Console.WriteLine("Loading"); var commands = serializer.Load(); var journal = new Journal(commands); var engine = new Engine(journal); Console.WriteLine("Ready"); while (true) { var query = Console.ReadLine().Trim(); if (string.Equals(query, "exit", StringComparison.InvariantCultureIgnoreCase)) { break; } var command = parser.Parse(query, DateTime.Now); if (command == null) { Console.WriteLine("Error parsing query"); } else { journal.Add(command); engine.Invoke(command); serializer.Save(command); } } Console.WriteLine("Goodbye"); }
public string Serialize(object obj, Type type, StringSerializer stringSerializer) { if (obj == null) { return(StringSerializer.Unknown); } builder.Clear(); var list = (IList)obj; // find generic type var elementType = ReflectionHelper.GetGenericListType(type); if (CanOperate(elementType)) { throw new InvalidOperationException("This serializer does not support nested arrays!!!"); } foreach (var el in list) { builder.AppendLine(stringSerializer.Serialize(el, elementType)); } return(builder.ToString()); }
/// <summary> /// Exports a short string list of Id, Email, Name separated by | /// </summary> /// <returns></returns> public override string ToString() { return(StringSerializer.SerializeObject(this)); }
public SchematicRepository(EngineRepositoryContext root, StringSerializer stringSerializer) : base(root) { _StringSerializer = stringSerializer; }
public object Deserialize(string value, Type type, StringSerializer stringSerializer) { return(value.Substring(0, value.Length - 3)); }
public string Serialize(object obj, Type type, StringSerializer stringSerializer) { return((string)obj + "boe"); }
/// <summary> /// Serializes the specified object. /// </summary> /// <param name="obj">The object.</param> /// <param name="serializer">The serializer.</param> /// <returns>A <see cref="Dictionary{TKey,TValue}"/> instance</returns> public abstract IDictionary <string, object> Serialize(object obj, StringSerializer serializer);
public async Task TestProducing3MessagesAllowsTheConsumerToChooseTheCorrectMessage() { var valueSerializer = new StringSerializer(); using (var temporaryTopic = testCluster.CreateTemporaryTopic()) using (var brokers = new KafkaBrokers(testCluster.CreateBrokerUris())) { var topic = temporaryTopic.Name; { var producer = KafkaProducer.Create(brokers, valueSerializer); await producer.SendAsync(new[] { KeyedMessage.Create(topic, "1"), KeyedMessage.Create(topic, "2"), KeyedMessage.Create(topic, "3"), }, CancellationToken.None); } { var earliest = KafkaConsumer.Create(defaultConsumerGroup, brokers, valueSerializer, new TopicSelector { Partition = 0, Topic = topic, DefaultOffsetSelection = OffsetSelectionStrategy.Earliest }); var responses = await earliest.ReceiveAsync(CancellationToken.None); Assert.That(responses, Is.Not.Null); Assert.That(responses, Has.Count.EqualTo(3)); var first = responses.First(); Assert.That(first.Key, Is.Null); Assert.That(first.Offset, Is.EqualTo(0)); Assert.That(first.Partition, Is.EqualTo(0)); Assert.That(first.Topic, Is.EqualTo(topic)); Assert.That(first.Value, Is.EqualTo("1")); } { var latest = KafkaConsumer.Create(defaultConsumerGroup, brokers, valueSerializer, new TopicSelector { Partition = 0, Topic = topic, DefaultOffsetSelection = OffsetSelectionStrategy.Last }); var responses = await latest.ReceiveAsync(CancellationToken.None); Assert.That(responses, Is.Not.Null); Assert.That(responses, Has.Count.EqualTo(1)); var first = responses.First(); Assert.That(first.Key, Is.Null); Assert.That(first.Offset, Is.EqualTo(2)); Assert.That(first.Partition, Is.EqualTo(0)); Assert.That(first.Topic, Is.EqualTo(topic)); Assert.That(first.Value, Is.EqualTo("3")); } { var latest = KafkaConsumer.Create(defaultConsumerGroup, brokers, valueSerializer, new TopicSelector { Partition = 0, Topic = topic, DefaultOffsetSelection = OffsetSelectionStrategy.Next }); var responses = await latest.ReceiveAsync(CancellationToken.None); Assert.That(responses, Is.Not.Null); Assert.That(responses, Has.Count.EqualTo(0)); } { var specified = KafkaConsumer.Create(defaultConsumerGroup, brokers, valueSerializer, new TopicSelector { Partition = 0, Topic = topic, DefaultOffsetSelection = OffsetSelectionStrategy.Specified, Offset = 1 }); var responses = await specified.ReceiveAsync(CancellationToken.None); Assert.That(responses, Is.Not.Null); Assert.That(responses, Has.Count.EqualTo(2)); var first = responses.First(); Assert.That(first.Key, Is.Null); Assert.That(first.Offset, Is.EqualTo(1)); Assert.That(first.Partition, Is.EqualTo(0)); Assert.That(first.Topic, Is.EqualTo(topic)); Assert.That(first.Value, Is.EqualTo("2")); } } }
private ClassInfo <T> RegisterInternal <T>() { if (ClassInfo <T> .Instance != null) { return(ClassInfo <T> .Instance); } var t = typeof(T); var props = t.GetProperties( BindingFlags.Instance | BindingFlags.Public | BindingFlags.GetProperty | BindingFlags.SetProperty); var serializers = new List <FastCall <T> >(); for (var i = 0; i < props.Length; i++) { var property = props[i]; var propertyType = property.PropertyType; var elementType = propertyType.IsArray ? propertyType.GetElementType() : propertyType; var callType = propertyType.IsArray ? CallType.Array : CallType.Basic; if (propertyType.IsGenericType && propertyType.GetGenericTypeDefinition() == typeof(List <>)) { elementType = propertyType.GetGenericArguments()[0]; callType = CallType.List; } var getMethod = property.GetGetMethod(); var setMethod = property.GetSetMethod(); if (getMethod == null || setMethod == null) { continue; } FastCall <T> serialzer = null; if (propertyType.IsEnum) { var underlyingType = Enum.GetUnderlyingType(propertyType); if (underlyingType == typeof(byte)) { serialzer = new EnumByteSerializer <T>(property, propertyType); } else if (underlyingType == typeof(int)) { serialzer = new EnumIntSerializer <T>(property, propertyType); } else { throw new InvalidTypeException("Not supported enum underlying type: " + underlyingType.Name); } } else if (elementType == typeof(string)) { serialzer = new StringSerializer <T>(_maxStringLength); } else if (elementType == typeof(bool)) { serialzer = new BoolSerializer <T>(); } else if (elementType == typeof(byte)) { serialzer = new ByteSerializer <T>(); } else if (elementType == typeof(sbyte)) { serialzer = new SByteSerializer <T>(); } else if (elementType == typeof(short)) { serialzer = new ShortSerializer <T>(); } else if (elementType == typeof(ushort)) { serialzer = new UShortSerializer <T>(); } else if (elementType == typeof(int)) { serialzer = new IntSerializer <T>(); } else if (elementType == typeof(uint)) { serialzer = new UIntSerializer <T>(); } else if (elementType == typeof(long)) { serialzer = new LongSerializer <T>(); } else if (elementType == typeof(ulong)) { serialzer = new ULongSerializer <T>(); } else if (elementType == typeof(float)) { serialzer = new FloatSerializer <T>(); } else if (elementType == typeof(double)) { serialzer = new DoubleSerializer <T>(); } else if (elementType == typeof(char)) { serialzer = new CharSerializer <T>(); } else if (elementType == typeof(IPEndPoint)) { serialzer = new IPEndPointSerializer <T>(); } else { CustomType customType; _registeredTypes.TryGetValue(elementType, out customType); if (customType != null) { serialzer = customType.Get <T>(); } } if (serialzer != null) { serialzer.Init(getMethod, setMethod, callType); serializers.Add(serialzer); } else { throw new InvalidTypeException("Unknown property type: " + propertyType.FullName); } } ClassInfo <T> .Instance = new ClassInfo <T>(serializers); return(ClassInfo <T> .Instance); }
public override void Deserialize(BinaryReader r) { base.Deserialize(r); roomName = StringSerializer.Deserialize(r); message = StringSerializer.Deserialize(r); }
public void Get_Size_From_Value_Returns_String_Size_In_And_Size_Field_Bytes() { Assert.Equal(10, StringSerializer.GetSizeFromValue("hell")); Assert.Equal(14, StringSerializer.GetSizeFromValue(" ")); }
public override void Deserialize(BinaryReader r) { base.Deserialize(r); name = StringSerializer.Deserialize(r); }
public async Task TestSimpleProducerWorksOk() { var valueSerializer = new StringSerializer(); using (var temporaryTopic = testCluster.CreateTemporaryTopic()) using (var brokers = new KafkaBrokers(testCluster.CreateBrokerUris())) { var producer = KafkaProducer.Create(brokers, valueSerializer); await producer.SendAsync(KeyedMessage.Create(temporaryTopic.Name, "Message"), CancellationToken.None); } }
public ChronoSession(StringSerializer stringSerializer, Uri apiKeyAuthAddress, Uri baseAddress, string apiKey, string token) : base(stringSerializer, apiKeyAuthAddress, baseAddress, apiKey, token) { }
/// <summary> /// Returns a serializer that can be used to serialize and object /// of type <paramref name="objectType"/>. /// <note> /// TODO: Add support for caching. /// </note> /// </summary> /// <param name="objectType">The type of object to be serialized.</param> /// <param name="ctx">The serialization context.</param> public virtual ISerializer Build(Type objectType, SerializationContext ctx) { if (objectType != null) { ISerializer s; if (typeof(Attachment).IsAssignableFrom(objectType)) { s = new AttachmentSerializer(); } else if (typeof(Attendee).IsAssignableFrom(objectType)) { s = new AttendeeSerializer(); } else if (typeof(IDateTime).IsAssignableFrom(objectType)) { s = new DateTimeSerializer(); } else if (typeof(FreeBusyEntry).IsAssignableFrom(objectType)) { s = new FreeBusyEntrySerializer(); } else if (typeof(GeographicLocation).IsAssignableFrom(objectType)) { s = new GeographicLocationSerializer(); } else if (typeof(Organizer).IsAssignableFrom(objectType)) { s = new OrganizerSerializer(); } else if (typeof(Period).IsAssignableFrom(objectType)) { s = new PeriodSerializer(); } else if (typeof(PeriodList).IsAssignableFrom(objectType)) { s = new PeriodListSerializer(); } else if (typeof(RecurrencePattern).IsAssignableFrom(objectType)) { s = new RecurrencePatternSerializer(); } else if (typeof(RequestStatus).IsAssignableFrom(objectType)) { s = new RequestStatusSerializer(); } else if (typeof(StatusCode).IsAssignableFrom(objectType)) { s = new StatusCodeSerializer(); } else if (typeof(Trigger).IsAssignableFrom(objectType)) { s = new TriggerSerializer(); } else if (typeof(UtcOffset).IsAssignableFrom(objectType)) { s = new UtcOffsetSerializer(); } else if (typeof(WeekDay).IsAssignableFrom(objectType)) { s = new WeekDaySerializer(); } // Default to a string serializer, which simply calls // ToString() on the value to serialize it. else { s = new StringSerializer(); } // Set the serialization context s.SerializationContext = ctx; return(s); } return(null); }
/// <summary> /// Write a string value to the given file. /// </summary> /// <param name="value">Value to write</param> /// <param name="path">DiskCache relative path at which to write</param> public static void Write(string value, string path) { StringSerializer s = new StringSerializer(value); s.Write(path); }
public IInventoryProperties GetProperties(InventoryType type, string str) { Type propsType = ALData.InventoryTypes[(int)type].PropsType; return((propsType == null) ? null : (IInventoryProperties)StringSerializer.Deserialize(propsType, str)); }
/// <summary> /// Deserializes the specified dictionary. /// </summary> /// <param name="dictionary">The dictionary.</param> /// <param name="type">The type.</param> /// <param name="serializer">The serializer.</param> /// <returns>An <see cref="object"/> representing <paramref name="dictionary"/></returns> public abstract object Deserialize(IDictionary <string, object> dictionary, Type type, StringSerializer serializer);
public static LicenseInformation FromString(string data) { return(StringSerializer.Deserialize <LicenseInformation>(data, ",")); }
public async Task TestProducing3MessagesAllowsTheConsumerToCommitAndRestart() { var valueSerializer = new StringSerializer(); using (var temporaryTopic = testCluster.CreateTemporaryTopic()) using (var brokers = new KafkaBrokers(testCluster.CreateBrokerUris())) { var topic = temporaryTopic.Name; { var producer = KafkaProducer.Create(brokers, valueSerializer); await producer.SendAsync(new[] { KeyedMessage.Create(topic, "1"), KeyedMessage.Create(topic, "2"), KeyedMessage.Create(topic, "3"), }, CancellationToken.None); } { var noPreviousCommits = KafkaConsumer.Create(defaultConsumerGroup, brokers, valueSerializer, new TopicSelector { Partition = 0, Topic = topic, DefaultOffsetSelection = OffsetSelectionStrategy.NextUncommitted, FailureOffsetSelection = OffsetSelectionStrategy.Earliest }); var responses = await noPreviousCommits.ReceiveAsync(CancellationToken.None); Assert.That(responses, Is.Not.Null); Assert.That(responses, Has.Count.EqualTo(3)); var first = responses.First(); Assert.That(first.Key, Is.Null); Assert.That(first.Offset, Is.EqualTo(0)); Assert.That(first.Partition, Is.EqualTo(0)); Assert.That(first.Topic, Is.EqualTo(topic)); Assert.That(first.Value, Is.EqualTo("1")); await noPreviousCommits.CommitAsync(new[] { new TopicPartitionOffset { Topic = topic, Partition = 0, Offset = 0 } }, CancellationToken.None); ; } { var previousCommit = KafkaConsumer.Create(defaultConsumerGroup, brokers, valueSerializer, new TopicSelector { Partition = 0, Topic = topic, DefaultOffsetSelection = OffsetSelectionStrategy.NextUncommitted, FailureOffsetSelection = OffsetSelectionStrategy.Earliest }); var responses = await previousCommit.ReceiveAsync(CancellationToken.None); Assert.That(responses, Is.Not.Null); Assert.That(responses, Has.Count.EqualTo(2)); var first = responses.First(); Assert.That(first.Key, Is.Null); Assert.That(first.Offset, Is.EqualTo(1)); Assert.That(first.Partition, Is.EqualTo(0)); Assert.That(first.Topic, Is.EqualTo(topic)); Assert.That(first.Value, Is.EqualTo("2")); } { var previousCommitAgain = KafkaConsumer.Create(defaultConsumerGroup, brokers, valueSerializer, new TopicSelector { Partition = 0, Topic = topic, DefaultOffsetSelection = OffsetSelectionStrategy.NextUncommitted, FailureOffsetSelection = OffsetSelectionStrategy.Earliest }); var responses = await previousCommitAgain.ReceiveAsync(CancellationToken.None); Assert.That(responses, Is.Not.Null); Assert.That(responses, Has.Count.EqualTo(2)); var first = responses.First(); Assert.That(first.Key, Is.Null); Assert.That(first.Offset, Is.EqualTo(1)); Assert.That(first.Partition, Is.EqualTo(0)); Assert.That(first.Topic, Is.EqualTo(topic)); Assert.That(first.Value, Is.EqualTo("2")); await previousCommitAgain.CommitAsync(new[] { new TopicPartitionOffset { Topic = topic, Partition = 0, Offset = 1 } }, CancellationToken.None); ; } { var secondCommit = KafkaConsumer.Create(defaultConsumerGroup, brokers, valueSerializer, new TopicSelector { Partition = 0, Topic = topic, DefaultOffsetSelection = OffsetSelectionStrategy.NextUncommitted, FailureOffsetSelection = OffsetSelectionStrategy.Earliest }); var responses = await secondCommit.ReceiveAsync(CancellationToken.None); Assert.That(responses, Is.Not.Null); Assert.That(responses, Has.Count.EqualTo(1)); var first = responses.First(); Assert.That(first.Key, Is.Null); Assert.That(first.Offset, Is.EqualTo(2)); Assert.That(first.Partition, Is.EqualTo(0)); Assert.That(first.Topic, Is.EqualTo(topic)); Assert.That(first.Value, Is.EqualTo("3")); await secondCommit.CommitAsync(new[] { new TopicPartitionOffset { Topic = topic, Partition = 0, Offset = 2 } }, CancellationToken.None); ; } { var thirdCommit = KafkaConsumer.Create(defaultConsumerGroup, brokers, valueSerializer, new TopicSelector { Partition = 0, Topic = topic, DefaultOffsetSelection = OffsetSelectionStrategy.NextUncommitted, FailureOffsetSelection = OffsetSelectionStrategy.Earliest }); var responses = await thirdCommit.ReceiveAsync(CancellationToken.None); Assert.That(responses, Is.Not.Null); Assert.That(responses, Has.Count.EqualTo(0)); } } }
public override void Serialize(BinaryWriter w) { base.Serialize(w); StringSerializer.Serialize(name, w); }
/// <summary> /// Returns a serializer that can be used to serialize and object /// of type <paramref name="objectType"/>. /// <note> /// TODO: Add support for caching. /// </note> /// </summary> /// <param name="objectType">The type of object to be serialized.</param> /// <param name="ctx">The serialization context.</param> public virtual ISerializer Build(Type objectType, SerializationContext ctx) { if (objectType != null) { ISerializer s; if (typeof(Calendar).IsAssignableFrom(objectType)) { s = new CalendarSerializer(ctx); } else if (typeof(ICalendarComponent).IsAssignableFrom(objectType)) { s = typeof(CalendarEvent).IsAssignableFrom(objectType) ? new EventSerializer(ctx) : new ComponentSerializer(ctx); } else if (typeof(ICalendarProperty).IsAssignableFrom(objectType)) { s = new PropertySerializer(ctx); } else if (typeof(CalendarParameter).IsAssignableFrom(objectType)) { s = new ParameterSerializer(ctx); } else if (typeof(string).IsAssignableFrom(objectType)) { s = new StringSerializer(ctx); } #if NET_4 else if (objectType.IsEnum) { s = new EnumSerializer(objectType, ctx); } #else else if (objectType.GetTypeInfo().IsEnum) { s = new EnumSerializer(objectType, ctx); } #endif else if (typeof(TimeSpan).IsAssignableFrom(objectType)) { s = new TimeSpanSerializer(ctx); } else if (typeof(int).IsAssignableFrom(objectType)) { s = new IntegerSerializer(ctx); } else if (typeof(Uri).IsAssignableFrom(objectType)) { s = new UriSerializer(ctx); } else if (typeof(ICalendarDataType).IsAssignableFrom(objectType)) { s = _mDataTypeSerializerFactory.Build(objectType, ctx); } // Default to a string serializer, which simply calls // ToString() on the value to serialize it. else { s = new StringSerializer(ctx); } return(s); } return(null); }
public override void Serialize(BinaryWriter w) { base.Serialize(w); StringSerializer.Serialize(roomName, w); StringSerializer.Serialize(message, w); }
/// <summary> /// Deserializes the specified primitive value. /// </summary> /// <param name="primitiveValue">The primitive value.</param> /// <param name="type">The type.</param> /// <param name="serializer">The serializer.</param> /// <returns>The deserialized <paramref name="primitiveValue"/></returns> public abstract object Deserialize(object primitiveValue, Type type, StringSerializer serializer);
private static void Main(string[] args) { Mode mode = Mode.Profile; bool mix = false; var configuration = new Configuration { ConsumeBatchSize = 100 }; // Ugly command line parsing string curOpt = ""; try { bool seeds = false; bool topics = false; for (int i = 0; i < args.Length; ++i) { curOpt = args[i]; switch (args[i]) { case "--global": configuration.BatchStrategy = BatchStrategy.Global; break; case "--mix": mix = true; break; case "--stress": mode = Mode.Stress; break; case "--hard": mode = Mode.StressHard; break; case "--discard": configuration.ErrorStrategy = ErrorStrategy.Discard; break; case "--retry": configuration.ErrorStrategy = ErrorStrategy.Retry; break; case "--gzip": configuration.CompressionCodec = CompressionCodec.Gzip; break; case "--snappy": configuration.CompressionCodec = CompressionCodec.Snappy; break; case "--no-ack": configuration.RequiredAcks = RequiredAcks.None; break; case "--all-sync-ack": configuration.RequiredAcks = RequiredAcks.AllInSyncReplicas; break; case "--ttl": configuration.MessageTtl = TimeSpan.FromSeconds(int.Parse(args[++i])); break; case "--batch": configuration.ProduceBatchSize = int.Parse(args[++i]); break; case "--time": configuration.ProduceBufferingTime = TimeSpan.FromMilliseconds(int.Parse(args[++i])); break; case "--max-messages": configuration.MaxBufferedMessages = int.Parse(args[++i]); break; case "--topics": topics = true; _topics = args[++i].Split(','); break; case "--seeds": seeds = true; configuration.Seeds = args[++i]; break; case "--clientid": configuration.ClientId = args[++i]; break; case "--concurrency": configuration.MaximumConcurrency = int.Parse(args[++i]); break; case "--send-buffer": configuration.SendBufferSize = int.Parse(args[++i]); break; case "--receive-buffer": configuration.ReceiveBufferSize = int.Parse(args[++i]); break; case "--timeout": configuration.RequestTimeoutMs = int.Parse(args[++i]); break; case "--min-bytes": configuration.FetchMinBytes = int.Parse(args[++i]); break; case "--max-wait": configuration.FetchMaxWaitTime = int.Parse(args[++i]); break; case "--max-bytes": configuration.FetchMessageMaxBytes = int.Parse(args[++i]); break; case "--delay": configuration.NumberOfMessagesBeforeRoundRobin = int.Parse(args[++i]); break; case "--consume": { _consumeFrom = long.Parse(args[++i]); var p = args[++i].Split(';'); _partitions = new int[p.Length][]; for (int j = 0; j < _partitions.Length; ++j) { _partitions[j] = p[j].Split(',').Select(int.Parse).ToArray(); } } break; } } // Minimal error management if (args.Length < 1 || !seeds || !topics) { throw new ArgumentException(); } } catch { // Minimal error management Console.WriteLine("Syntax error in option {0}", curOpt); Usage(); Environment.Exit(-1); } var serializer = new StringSerializer(); var deserializer = new StringDeserializer(); var serializationConfig = new SerializationConfig() { SerializeOnProduce = true }; foreach (var topic in _topics) { serializationConfig.SetSerializersForTopic(topic, serializer, serializer); serializationConfig.SetDeserializersForTopic(topic, deserializer, deserializer); } configuration.SerializationConfig = serializationConfig; var cluster = new ClusterClient(configuration, new ConsoleLogger()); if (_partitions == null) { var task = Start(mode, cluster); Console.ReadKey(); _running = false; Console.ReadKey(); task.Wait(); } else { int i = 0; foreach (var topic in _topics) { var capturedTopic = topic; cluster.Messages.Where(kr => kr.Topic == capturedTopic).Sample(TimeSpan.FromMilliseconds(15)) .Subscribe(kr => Console.WriteLine("{0}/{1} {2}: {3}", kr.Topic, kr.Partition, kr.Offset, kr.Value as string)); foreach (var p in _partitions[i]) { cluster.Consume(topic, p, _consumeFrom); } ++i; } Task task = null; if (mix) { task = Start(mode, cluster); } Console.ReadKey(); i = 0; foreach (var topic in _topics) { foreach (var p in _partitions[i]) { if (p < 0) { cluster.StopConsume(topic); } else { cluster.StopConsume(topic, p); } } ++i; } if (task != null) { _running = false; task.Wait(); } } Console.WriteLine(cluster.Statistics); Console.ReadKey(); cluster.Dispose(); }
/// <summary> /// Serializes the specified object. /// </summary> /// <param name="obj">The object.</param> /// <param name="serializer">The serializer.</param> /// <returns>The serialized <see cref="object"/></returns> public abstract object Serialize(object obj, StringSerializer serializer);
public void Get_Size_From_Buffer_Returns_String_Size_And_Size_Field_In_Bytes() { Assert.Equal(10, StringSerializer.GetSizeFromBuffer(new byte[] { 8, 0, 0, 0 }, 0)); }
/// <summary> /// Write a DateTime value to the given file. /// </summary> /// <param name="value">Value to write</param> /// <param name="path">DiskCache relative path at which to write</param> public static void Write(DateTime value, string path) { StringSerializer s = new StringSerializer(value.ToUniversalTime().ToString("o", CultureInfo.InvariantCulture)); s.Write(path); }