protected IChunk ReadChunk(DbDataReader reader) { var chunk = new SqlChunk { Position = reader.GetInt64(0), PartitionId = reader.GetString(1), Index = reader.GetInt64(2), OperationId = reader.GetString(4), SerializerInfo = reader.GetString(5), }; chunk.Payload = Options.Serializer.Deserialize ( reader.GetFieldValue <byte[]>(3), chunk.SerializerInfo ); return(chunk); }
public async Task <IChunk> AppendAsync( string partitionId, long index, object payload, string operationId, CancellationToken cancellationToken) { try { if (index == -1) { index = GenerateIndex(); } var chunk = new SqlChunk() { PartitionId = partitionId, Index = index, Payload = payload, OperationId = operationId ?? Guid.NewGuid().ToString() }; var bytes = Options.Serializer.Serialize(payload, out string serializerInfo); chunk.SerializerInfo = serializerInfo; var sql = Options.GetInsertChunkSql(); using (var context = await Options.GetContextAsync(cancellationToken).ConfigureAwait(false)) { using (var command = context.CreateCommand(sql)) { context.AddParam(command, "@PartitionId", partitionId); context.AddParam(command, "@Index", index); context.AddParam(command, "@OperationId", chunk.OperationId); context.AddParam(command, "@Payload", bytes); context.AddParam(command, "@SerializerInfo", serializerInfo); chunk.Position = (long)await command.ExecuteScalarAsync(cancellationToken).ConfigureAwait(false); } } return(chunk); } catch (Exception ex) { if (IsDuplicatedStreamIndex(ex)) { throw new DuplicateStreamIndexException(partitionId, index); } if (IsDuplicatedStreamOperation(ex)) { _logger.LogInformation( $"Skipped duplicated chunk on '{partitionId}' by operation id '{operationId}'"); return(null); } _logger.LogError(ex.Message); throw; } }