/// <summary> Read state data function for this storage provider. </summary> /// <see cref="IStorageProvider#ReadStateAsync"/> public async Task ReadStateAsync(string grainType, GrainReference grainReference, IGrainState grainState) { var timer = Stopwatch.StartNew(); var key = GetKey(grainReference); var db = await GetDatabase().ConfigureAwait(false); try { var hashEntries = await Perform("Reading", 3, () => db.HashGetAllAsync(key)).ConfigureAwait(false); if (hashEntries.Length == 2) { var etagEntry = hashEntries.Single(e => e.Name == "etag"); var valueEntry = hashEntries.Single(e => e.Name == "data"); if (_options.UseJson) { grainState.State = JsonConvert.DeserializeObject(valueEntry.Value, grainState.State.GetType(), _jsonSettings); } else { grainState.State = _serializationManager.DeserializeFromByteArray <object>(valueEntry.Value); } grainState.ETag = etagEntry.Value; } else { grainState.ETag = Guid.NewGuid().ToString(); } timer.Stop(); _logger.LogInformation("Reading: GrainType={0} Pk={1} Grainid={2} ETag={3} from Database={4}, finished in {5} ms", grainType, key, grainReference, grainState.ETag, db.Database, timer.Elapsed.TotalMilliseconds.ToString("0.00")); } catch (RedisServerException) { var stringValue = await Perform("Reading", 3, () => db.StringGetAsync(key)).ConfigureAwait(false); if (stringValue.HasValue) { if (_options.UseJson) { grainState.State = JsonConvert.DeserializeObject(stringValue, grainState.State.GetType(), _jsonSettings); } else { grainState.State = _serializationManager.DeserializeFromByteArray <object>(stringValue); } } grainState.ETag = Guid.NewGuid().ToString(); timer.Stop(); _logger.LogInformation("Reading: GrainType={0} Pk={1} Grainid={2} ETag={3} from Database={4}, finished in {5} ms (migrated old Redis data, grain now supports ETag)", grainType, key, grainReference, grainState.ETag, db.Database, timer.Elapsed.TotalMilliseconds.ToString("0.00")); } }
internal static AzureQueueBatchContainer FromCloudQueueMessage(CloudQueueMessage cloudMsg, long sequenceId) { var azureQueueBatch = SerializationManager.DeserializeFromByteArray<AzureQueueBatchContainer>(cloudMsg.AsBytes); azureQueueBatch.CloudQueueMessage = cloudMsg; azureQueueBatch.sequenceToken = new EventSequenceToken(sequenceId); return azureQueueBatch; }
public async Task <IList <IBatchContainer> > GetQueueMessagesAsync(int maxCount) { var listOfMessages = new List <byte[]>(); var listLength = await _queueProvider.Length(Id); var max = Math.Min(maxCount, listLength); for (var i = 0; i < max; i++) { var nextMsg = await _queueProvider.Dequeue(Id); if (nextMsg != null) { listOfMessages.Add(nextMsg); } else { _logger.AutoWarn("The queue returned a null message. This shouldn't happen. Ignored."); } } var list = (from m in listOfMessages select SerializationManager.DeserializeFromByteArray <PlainBatchContainer>(m)); var pipeQueueAdapterBatchContainers = list as IList <PlainBatchContainer> ?? list.ToList(); foreach (var batchContainer in pipeQueueAdapterBatchContainers) { batchContainer.RealToken = new SimpleSequenceToken(_sequenceId++); } _logger.AutoVerbose($"Read {pipeQueueAdapterBatchContainers.Count} batch containers"); // TODO: Is this an expensive call? return(pipeQueueAdapterBatchContainers.ToList <IBatchContainer>()); }
public static KafkaBatchContainer ToBatchContainer( this ConsumeResult <byte[], byte[]> result, SerializationManager serializationManager, QueueProperties queueProperties ) { var sequence = new EventSequenceTokenV2(result.Offset.Value); if (queueProperties.IsExternal) { var key = Encoding.UTF8.GetString(result.Key); return(new KafkaBatchContainer( StreamProviderUtils.GenerateStreamGuid(key), queueProperties.Namespace, new List <object> { Encoding.UTF8.GetString(result.Value) }, null, isExternalBatch: true, sequence, result.TopicPartitionOffset )); } var batchContainer = serializationManager.DeserializeFromByteArray <KafkaBatchContainer>(result.Value); if (batchContainer.SequenceToken == null) { batchContainer.SequenceToken = sequence; } batchContainer.TopicPartitionOffSet = result.TopicPartitionOffset; return(batchContainer); }
public IBatchContainer FromRabbitMQMessage(RabbitMQMessage message, long sequenceId) { var rabbitMqQueueBatch = _serializationManager.DeserializeFromByteArray <RabbitMQBatchContainer>(message.Body); rabbitMqQueueBatch.RealSequenceToken = new EventSequenceTokenV2(sequenceId); return(rabbitMqQueueBatch); }
/// <summary> /// Creates a batch container from a cloud queue message /// </summary> public IBatchContainer FromCloudQueueMessage(CloudQueueMessage cloudMsg, long sequenceId) { var azureQueueBatch = SerializationManager.DeserializeFromByteArray <AzureQueueBatchContainerV2>(cloudMsg.AsBytes); azureQueueBatch.RealSequenceToken = new EventSequenceTokenV2(sequenceId); return(azureQueueBatch); }
/// <summary> Read state data function for this storage provider. </summary> /// <see cref="IStorageProvider#ReadStateAsync"/> public async Task ReadStateAsync(string grainType, GrainReference grainReference, IGrainState grainState) { var primaryKey = grainReference.ToKeyString(); if (Log.IsVerbose3) { Log.Verbose3((int)ProviderErrorCode.RedisStorageProvider_ReadingData, "Reading: GrainType={0} Pk={1} Grainid={2} from Database={3}", grainType, primaryKey, grainReference, redisDatabase.Database); } RedisValue value = await redisDatabase.StringGetAsync(primaryKey); if (value.HasValue) { if (useJsonFormat) { grainState.State = JsonConvert.DeserializeObject(value, grainState.State.GetType(), jsonSettings); } else { grainState.State = serializationManager.DeserializeFromByteArray <object>(value); } } // TODO : Fix this grainState.ETag = Guid.NewGuid().ToString(); }
public async Task ReadStateAsync(string grainType, GrainReference grainReference, IGrainState grainState) { if (redisDatabase == null) { await ConnectToDatabase(); } string key = grainReference.ToKeyString(); logger.Debug((int)RedisProviderLogCode.ReadingRedisData, "Reading: GrainType={0} Pk={1} Grainid={2} from Database={3}", grainType, key, grainReference, redisDatabase.Database); RedisValue value = await redisDatabase.StringGetAsync(key); if (value.HasValue) { if (options.UseJsonFormat) { grainState.State = JsonConvert.DeserializeObject(value, grainState.State.GetType(), jsonSettings); } else { grainState.State = serializationManager.DeserializeFromByteArray <object>(value); } } grainState.ETag = key; }
/// <summary> /// Replay all events since the last snapshot. /// </summary> /// <returns> /// A <see cref="Task"/> representing the work performed. /// </returns> public async Task ReplayUnappliedEvents() { if (!this.IsBeingReplayed) { try { this.IsBeingReplayed = true; var currentEventId = this.LastJournaledEventId; await this.journal.ReadFrom(currentEventId).ForEachAsync( async @event => { // Update internal state. if (@event.EventId > this.LastJournaledEventId) { this.LastJournaledEventId = @event.EventId; } // Deserialize the event. var request = SerializationManager.DeserializeFromByteArray <MethodInvocation>(@event.Event); // Apply the event. await GrainExecutionHelper.ApplyRequest(this.grain, request); }); } finally { this.IsBeingReplayed = false; } } }
/// <inherithdoc/> public IBatchContainer FromPullResponseMessage(PubsubMessage msg, long sequenceId) { var batchContainer = _serializationManager.DeserializeFromByteArray <PubSubBatchContainer>(msg.Data.ToByteArray()); batchContainer.RealSequenceToken = new EventSequenceTokenV2(sequenceId); return(batchContainer); }
/// <inheritdoc /> public async Task ReadStateAsync(string grainType, GrainReference grainReference, IGrainState grainState) { var key = GetKey(grainReference); try { var hashEntries = await _db.HashGetAllAsync(key); if (hashEntries.Count() == 2) { var etagEntry = hashEntries.Single(e => e.Name == "etag"); var valueEntry = hashEntries.Single(e => e.Name == "data"); if (_options.UseJson) { grainState.State = JsonConvert.DeserializeObject(valueEntry.Value, grainState.State.GetType(), _jsonSettings); } else { grainState.State = _serializationManager.DeserializeFromByteArray <object>(valueEntry.Value); } grainState.ETag = etagEntry.Value; } else { grainState.ETag = Guid.NewGuid().ToString(); } } catch (RedisServerException) { var stringValue = await _db.StringGetAsync(key); if (stringValue.HasValue) { if (_options.UseJson) { grainState.State = JsonConvert.DeserializeObject(stringValue, grainState.State.GetType(), _jsonSettings); } else { grainState.State = _serializationManager.DeserializeFromByteArray <object>(stringValue); } } grainState.ETag = Guid.NewGuid().ToString(); } }
public static RabbitMqBatchContainer FromQueueMessage(SerializationManager serializationManager, byte[] data, long sequenceId, ulong deliveryTag) { var batchContainer = serializationManager.DeserializeFromByteArray <RabbitMqBatchContainer>(data); batchContainer.EventSequenceToken = new EventSequenceToken(sequenceId); batchContainer.DeliveryTag = deliveryTag; return(batchContainer); }
internal static KafkaBatchContainer FromKafkaMessage(Message message, long sequenceId) { var kafkaBatch = SerializationManager.DeserializeFromByteArray <KafkaBatchContainer>(message.Value); kafkaBatch._sequenceToken = new EventSequenceToken(sequenceId); return(kafkaBatch); }
internal static RabbitMessageQueueBatchContainer FromCloudQueueMessage(RabbitMessage cloudMsg, long sequenceId) { var rabbitQueueBatch = SerializationManager.DeserializeFromByteArray <RabbitMessageQueueBatchContainer>(cloudMsg.Body); rabbitQueueBatch.QueueMessage = cloudMsg; rabbitQueueBatch._sequenceToken = new EventSequenceToken(sequenceId); return(rabbitQueueBatch); }
public IEnumerable <Tuple <T, StreamSequenceToken> > GetEvents <T>() { T t = SerializationManager.DeserializeFromByteArray <T>(Payload); return(new[] { new Tuple <T, StreamSequenceToken>(t, new EventSequenceTokenV2(SequenceNumber, 0)) }); }
public void DeserializerBenchmark() { var bytes = SerializationManager.SerializeToByteArray(poco); for (int i = 0; i < Repeats; i++) { var obj = SerializationManager.DeserializeFromByteArray <PocoState>(bytes); } }
internal static SQSBatchContainer FromSQSMessage(SerializationManager serializationManager, SQSMessage msg, long sequenceId) { var json = JObject.Parse(msg.Body); var sqsBatch = serializationManager.DeserializeFromByteArray <SQSBatchContainer>(json["payload"].ToObject <byte[]>()); sqsBatch.Message = msg; sqsBatch.sequenceToken = new EventSequenceTokenV2(sequenceId); return(sqsBatch); }
/// <summary> Read state data function for this storage provider. </summary> /// <see cref="IStorageProvider#ReadStateAsync"/> public async Task ReadStateAsync(string grainType, GrainReference grainReference, IGrainState grainState) { var primaryKey = grainReference.ToKeyString(); if (Log.IsVerbose3) { Log.Verbose3((int)SimpleSQLServerProviderErrorCodes.SimpleSQLServerProvider_ReadingData, $"Reading: GrainType={grainType} Pk={primaryKey} Grainid={grainReference} from DataSource={this.sqlconnBuilder.DataSource + "." + this.sqlconnBuilder.InitialCatalog}"); } try { using (var db = new KeyValueDbContext(this.sqlconnBuilder.ConnectionString)) { switch (this.useJsonOrBinaryFormat) { case StorageFormatEnum.Binary: case StorageFormatEnum.Both: { var value = await db.KeyValues.Where(s => s.GrainKeyId.Equals(primaryKey)).Select(s => s.BinaryContent).SingleOrDefaultAsync(); if (value != null) { //data = SerializationManager.DeserializeFromByteArray<Dictionary<string, object>>(value); grainState.State = SerializationManager.DeserializeFromByteArray <object>(value); } } break; case StorageFormatEnum.Json: { var value = await db.KeyValues.Where(s => s.GrainKeyId.Equals(primaryKey)).Select(s => s.JsonContext).SingleOrDefaultAsync(); if (!string.IsNullOrEmpty(value)) { //data = JsonConvert.DeserializeObject<Dictionary<string, object>>(value, jsonSettings); grainState.State = JsonConvert.DeserializeObject(value, grainState.State.GetType(), jsonSettings); } } break; default: break; } } grainState.ETag = Guid.NewGuid().ToString(); } catch (Exception ex) { Log.Error((int)SimpleSQLServerProviderErrorCodes.SimpleSQLServerProvider_ReadError, $"Error reading: GrainType={grainType} Grainid={grainReference} ETag={grainState.ETag} from DataSource={this.sqlconnBuilder.DataSource + "." + this.sqlconnBuilder.InitialCatalog}", ex); throw; } }
private async Task AddMessagesToQueue(object channel, BasicDeliverEventArgs eventArgs) { var body = eventArgs.Body.ToArray(); var batchContainer = _serializationManager.DeserializeFromByteArray <RabbitMQBatchContainer>(body); batchContainer.DeliveryTag = eventArgs.DeliveryTag; batchContainer.SequenceToken = new EventSequenceTokenV2((long)eventArgs.DeliveryTag); _batchQueue.Enqueue(batchContainer); await Task.Yield(); }
private static void TestSerializationRoundTrip(SerializationManager serializationManager) { var data = new JsonPoco { Prop = "some data" }; var serialized = serializationManager.SerializeToByteArray(data); var subSequence = Encoding.UTF8.GetBytes("crazy_name"); //The serialized data should have our custom [JsonProperty] name, 'crazy_name', in it. Assert.Contains(ToString(subSequence), ToString(serialized)); var deserialized = serializationManager.DeserializeFromByteArray <JsonPoco>(serialized); Assert.Equal(data.Prop, deserialized.Prop); var enumSerialized = serializationManager.SerializeToByteArray(SampleEnum.One); var enumDeserialized = serializationManager.DeserializeFromByteArray <SampleEnum>(enumSerialized); Assert.Equal(SampleEnum.One, enumDeserialized); }
public async Task <IGrainState> ReadStateAsync( string grainType, GrainReference grainReference, IGrainState grainState ) { try { if (string.IsNullOrWhiteSpace(grainType)) { throw new ArgumentNullException(nameof(grainType)); } grainReference = grainReference ?? throw new ArgumentNullException(nameof(grainReference)); grainState = grainState ?? throw new ArgumentNullException(nameof(grainState)); var dbGrain = await _db .Storage .FirstOrDefaultAsync(a => a.Type == grainType && a.PrimaryKey == grainReference.ToKeyString() ); if (dbGrain == null) { grainState.ETag = null; grainState.State = null; return(grainState); } grainState.ETag = dbGrain.ETag; var dbGrainData = Convert .FromBase64String(dbGrain.Data); grainState.State = _serializationManager .DeserializeFromByteArray <object>(dbGrainData); return(grainState); } catch (Exception e) { _logger.LogError(0, e, nameof(ReadStateAsync)); throw; } }
public void OrleansSerialization_HttpRequestException_IsEquivalent() { var expected = new HttpRequestException("HTTP request exception").ThrowAndCatch(); var actual1 = (HttpRequestException)_serializationManager.DeepCopy(expected); AssertExceptionsAreEqual(expected, actual1); var actual = _serializationManager.RoundTripSerializationForTesting(expected); var actual2 = _serializationManager.DeserializeFromByteArray <HttpRequestException>(_serializationManager.SerializeToByteArray(expected)); AssertExceptionsAreEqual(expected, actual2); AssertExceptionsAreEqual(expected, actual); }
/// <summary> Read state data function for this storage provider. </summary> /// <see cref="IStorageProvider#ReadStateAsync"/> public async Task ReadStateAsync(string grainType, GrainReference grainReference, GrainState grainState) { var primaryKey = grainReference.ToKeyString(); if (Log.IsVerbose3) { Log.Verbose3((int)SimpleSQLServerProviderErrorCodes.SimpleSQLServerProvide_ReadingData, "Reading: GrainType={0} Pk={1} Grainid={2} from DataSource={3}", grainType, primaryKey, grainReference, this.sqlconnBuilder.DataSource + "." + this.sqlconnBuilder.InitialCatalog); } var data = new Dictionary <string, object>(); using (var db = new KeyValueDbContext(this.sqlconnBuilder.ConnectionString)) { switch (this.useJsonOrBinaryFormat) { case StorageFormatEnum.Binary: case StorageFormatEnum.Both: { var value = await db.KeyValues.Where(s => s.GrainKeyId.Equals(primaryKey)).Select(s => s.BinaryContent).SingleOrDefaultAsync(); if (value != null) { data = SerializationManager.DeserializeFromByteArray <Dictionary <string, object> >(value); } } break; case StorageFormatEnum.Json: { var value = await db.KeyValues.Where(s => s.GrainKeyId.Equals(primaryKey)).Select(s => s.JsonContext).SingleOrDefaultAsync(); if (!string.IsNullOrEmpty(value)) { data = JsonConvert.DeserializeObject <Dictionary <string, object> >(value, jsonSettings); } } break; default: break; } } grainState.SetAll(data); grainState.Etag = Guid.NewGuid().ToString(); }
/// <summary> /// Deserialize from Azure storage format /// </summary> /// <param name="grainState">The grain state data to be deserialized in to</param> /// <param name="entity">The Azure table entity the stored data</param> internal void ConvertFromStorageFormat(GrainState grainState, GrainStateEntity entity) { Dictionary <string, object> dataValues = null; try { if (entity.Data != null) { // Rehydrate dataValues = SerializationManager.DeserializeFromByteArray <Dictionary <string, object> >(entity.Data); } else if (entity.StringData != null) { dataValues = Newtonsoft.Json.JsonConvert.DeserializeObject <Dictionary <string, object> >(entity.StringData, jsonSettings); } if (dataValues != null) { grainState.SetAll(dataValues); } // Else, no data found } catch (Exception exc) { var sb = new StringBuilder(); if (entity.Data != null) { sb.AppendFormat("Unable to convert from storage format GrainStateEntity.Data={0}", entity.Data); } else if (entity.StringData != null) { sb.AppendFormat("Unable to convert from storage format GrainStateEntity.StringData={0}", entity.StringData); } if (dataValues != null) { int i = 1; foreach (var dvKey in dataValues.Keys) { object dvValue = dataValues[dvKey]; sb.AppendLine(); sb.AppendFormat("Data #{0} Key={1} Value={2} Type={3}", i, dvKey, dvValue, dvValue.GetType()); i++; } } Log.Error(0, sb.ToString(), exc); throw new AggregateException(sb.ToString(), exc); } }
internal object ConvertFromStorageFormat(GrainStateRecord entity) { var binaryData = entity.BinaryState; var stringData = entity.StringState; object dataValue = null; try { if (binaryData?.Length > 0) { // Rehydrate dataValue = SerializationManager.DeserializeFromByteArray <object>(binaryData); } else if (!string.IsNullOrEmpty(stringData)) { dataValue = JsonConvert.DeserializeObject <object>(stringData, jsonSettings); } // Else, no data found } catch (Exception exc) { var sb = new StringBuilder(); if (binaryData.Length > 0) { sb.AppendFormat("Unable to convert from storage format GrainStateEntity.Data={0}", binaryData); } else if (!string.IsNullOrEmpty(stringData)) { sb.AppendFormat("Unable to convert from storage format GrainStateEntity.StringData={0}", stringData); } if (dataValue != null) { sb.AppendFormat("Data Value={0} Type={1}", dataValue, dataValue.GetType()); } Log.Error(0, sb.ToString(), exc); throw new AggregateException(sb.ToString(), exc); } return(dataValue); }
public static KafkaBatchContainer ToBatchContainer( this ConsumeResult <byte[], byte[]> result, SerializationManager serializationManager, KafkaStreamOptions options, string streamNamespace ) { var externalHeader = result.Headers.FirstOrDefault(header => header.Key == options.ExternalMessageIdentifier); var sequence = new EventSequenceTokenV2(result.Offset.Value); if (externalHeader != null) { var isExternal = BitConverter.ToBoolean(externalHeader.GetValueBytes(), 0); if (isExternal) { var key = Encoding.UTF8.GetString(result.Key); return(new KafkaBatchContainer( StreamProviderUtils.GenerateStreamGuid(key), streamNamespace, new List <object> { Encoding.UTF8.GetString(result.Value) }, null, isExternalBatch: true, sequence, result.TopicPartitionOffset )); } } var batchContainer = serializationManager.DeserializeFromByteArray <KafkaBatchContainer>(result.Value); if (batchContainer.SequenceToken == null) { batchContainer.SequenceToken = sequence; } batchContainer.TopicPartitionOffSet = result.TopicPartitionOffset; return(batchContainer); }
/// <summary> /// Deserialize from Azure storage format /// </summary> /// <param name="entity">The Azure table entity the stored data</param> internal object ConvertFromStorageFormat(GrainStateEntity entity) { object dataValue = null; try { if (entity.Data != null) { // Rehydrate dataValue = SerializationManager.DeserializeFromByteArray <object>(entity.Data); } else if (entity.StringData != null) { dataValue = Newtonsoft.Json.JsonConvert.DeserializeObject <object>(entity.StringData, jsonSettings); } // Else, no data found } catch (Exception exc) { var sb = new StringBuilder(); if (entity.Data != null) { sb.AppendFormat("Unable to convert from storage format GrainStateEntity.Data={0}", entity.Data); } else if (entity.StringData != null) { sb.AppendFormat("Unable to convert from storage format GrainStateEntity.StringData={0}", entity.StringData); } if (dataValue != null) { sb.AppendFormat("Data Value={0} Type={1}", dataValue, dataValue.GetType()); } Log.Error(0, sb.ToString(), exc); throw new AggregateException(sb.ToString(), exc); } return(dataValue); }
public static KafkaEventBusBatchContainer FromKafkaMessage(Message msg, SerializationManager serializationManager, long seqNumber) { var container = new KafkaEventBusBatchContainer(); var events = serializationManager.DeserializeFromByteArray <List <Event> >(msg.Value); var aggIdString = msg.Key == null ? null : Encoding.UTF8.GetString(msg.Key); if (string.IsNullOrEmpty(aggIdString) && Guid.TryParse(aggIdString, out var guid)) { container.StreamGuid = guid; } else { container.StreamGuid = new Guid(msg.Partition, 0, 0, zero8); } container.StreamNamespace = msg.Topic; container.EventSequenceToken = new EventSequenceToken(seqNumber); container.Events = events; container.TopicPartitionOffset = msg.TopicPartitionOffset; return(container); }
public async Task <IList <IBatchContainer> > GetQueueMessagesAsync(int maxCount) { if (!IsConnected) { await ConnectAsync(); } var messages = await _receiver !.ReceiveMessagesAsync(maxCount); messages ??= new ServiceBusReceivedMessage[0]; return(messages .Select(message => { var body = message.Body.ToArray(); var batchContainer = _serializationManager.DeserializeFromByteArray <ServiceBusBatchContainer>(body); batchContainer.ServiceBusReceivedMessage = message; batchContainer.SequenceToken = new EventSequenceTokenV2(message.SequenceNumber); return (IBatchContainer)batchContainer; }) .ToList()); }
/// <inheritdoc /> public MemoryMessageBody Deserialize(ArraySegment <byte> bodyBytes) { return(serializationManager.DeserializeFromByteArray <MemoryMessageBody>(bodyBytes.ToArray())); }