public static Task<JObject> ReadJObject(this Socket socket) { var tcs = new TaskCompletionSource<JObject>(); socket.ReadBuffer(4) .ContinueWith(task => { try { var len = BitConverter.ToInt32(task.Result.Array, task.Result.Offset); if(len > TenMB) throw new InvalidOperationException("Got a reply for single JObject > 10 MB, rejecting as invalid"); socket.ReadBuffer(len) .ContinueWith(readLenTask => { try { var ms = new MemoryStream(readLenTask.Result.Array, readLenTask.Result.Offset, readLenTask.Result.Count); tcs.SetResult(ms.ToJObject()); } catch (Exception e) { tcs.SetException(e); } }); } catch (Exception e) { tcs.SetException(e); } }); return tcs.Task; }
public void CanParseJson() { var tasks = new List<Task>(); for (int i = 0; i < 100; i++) { tasks.Add(Task.Factory.StartNew(() => { var memoryStream = new MemoryStream(Encoding.UTF8.GetBytes("{'Test': true}")); var ravenJObject = memoryStream.ToJObject(); Assert.True(ravenJObject.Value<bool>("Test")); })); } Task.WaitAll(tasks.ToArray()); }
public Attachment GetAttachment(string key) { var readResult = storage.Attachments.Read(new JObject { { "key", key } }); if (readResult == null) return null; var attachmentDAta = readResult.Data(); var memoryStream = new MemoryStream(attachmentDAta); var metadata = memoryStream.ToJObject(); var data = new byte[readResult.Size - memoryStream.Position]; Buffer.BlockCopy(attachmentDAta,(int)memoryStream.Position, data, 0, data.Length); return new Attachment { Etag = new Guid(readResult.Key.Value<byte[]>("etag")), Metadata = metadata, Data = data }; }
private void ReadMetadataAndData(string key, Guid etag, Func<byte[]> getData, out JObject metadata, out JObject dataAsJson) { var cachedDocument = storage.GetCachedDocument(key, etag); if (cachedDocument != null) { metadata = cachedDocument.Item1; dataAsJson = cachedDocument.Item2; return; } var buffer = getData(); var memoryStream = new MemoryStream(buffer, 0, buffer.Length); metadata = memoryStream.ToJObject(); if (documentCodecs.Count() > 0) { var metadataCopy = new JObject(metadata); var dataBuffer = new byte[memoryStream.Length - memoryStream.Position]; Buffer.BlockCopy(buffer, (int)memoryStream.Position, dataBuffer, 0, dataBuffer.Length); documentCodecs.Aggregate(dataBuffer, (bytes, codec) => codec.Decode(key, metadataCopy, bytes)); //copy back Buffer.BlockCopy(dataBuffer, 0, buffer, (int)memoryStream.Position, dataBuffer.Length); } dataAsJson = memoryStream.ToJObject(); storage.SetCachedDocument(key, etag, Tuple.Create(new JObject(metadata), new JObject(dataAsJson))); }
public void CompleteTransaction(Guid txId, Action<DocumentInTransactionData> perDocumentModified) { storage.Transactions.Remove(new JObject { { "txId", txId.ToByteArray() } }); var documentsInTx = storage.DocumentsModifiedByTransactions["ByTxId"] .SkipTo(new JObject { { "txId", txId.ToByteArray() } }) .TakeWhile(x => new Guid(x.Value<byte[]>("txId")) == txId); foreach (var docInTx in documentsInTx) { var readResult = storage.DocumentsModifiedByTransactions.Read(docInTx); storage.DocumentsModifiedByTransactions.Remove(docInTx); JObject metadata = null; JObject data = null; if (readResult.Position > 0) // position can never be 0, because of the skip record { var ms = new MemoryStream(readResult.Data()); metadata = ms.ToJObject(); data = ms.ToJObject(); } perDocumentModified(new DocumentInTransactionData { Key = readResult.Key.Value<string>("key"), Etag = new Guid(readResult.Key.Value<byte[]>("etag")), Delete = readResult.Key.Value<bool>("deleted"), Metadata = metadata, Data = data, }); } }
public JsonDocument DocumentByKey(string key, TransactionInformation transactionInformation) { var resultInTx = storage.DocumentsModifiedByTransactions.Read(new JObject { { "key", key } }); if (transactionInformation != null && resultInTx != null) { if(new Guid(resultInTx.Key.Value<byte[]>("txId")) == transactionInformation.Id) { if (resultInTx.Key.Value<bool>("deleted")) return null; JObject metadata = null; JObject dataAsJson = null; if (resultInTx.Position != -1) { using (var memoryStreamFromTx = new MemoryStream(resultInTx.Data())) { metadata = memoryStreamFromTx.ToJObject(); dataAsJson = memoryStreamFromTx.ToJObject(); } } return new JsonDocument { Key = resultInTx.Key.Value<string>("key"), Etag = new Guid(resultInTx.Key.Value<byte[]>("etag")), Metadata = metadata, DataAsJson = dataAsJson, LastModified = resultInTx.Key.Value<DateTime>("modified"), }; } } var readResult = storage.Documents.Read(new JObject{{"key", key}}); if (readResult == null) return null; var memoryStream = new MemoryStream(readResult.Data()); return new JsonDocument { Key = readResult.Key.Value<string>("key"), Etag = new Guid(readResult.Key.Value<byte[]>("etag")), Metadata = memoryStream.ToJObject(), DataAsJson = memoryStream.ToJObject(), LastModified = readResult.Key.Value<DateTime>("modified"), NonAuthoritiveInformation = resultInTx != null }; }
public Attachment GetAttachment(string key) { var readResult = storage.Attachments.Read(new RavenJObject { { "key", key } }); if (readResult == null) return null; var attachmentDAta = readResult.Data(); var memoryStream = new MemoryStream(attachmentDAta); var metadata = memoryStream.ToJObject(); return new Attachment { Key = key, Etag = new Guid(readResult.Key.Value<byte[]>("etag")), Metadata = metadata, Data = () => memoryStream, Size = (int)(memoryStream.Length - memoryStream.Position) }; }
public ReadResults Read(ReadRequest readRequest) { bool hasMoreItems = false; var msgs = new List<OutgoingMessage>(); var realPageSize = Math.Min(configuration.MaxPageSize, readRequest.PageSize); // have to be a separate tranasction, so the next read will get it // we expect that most of the time this is a no op, because there won't be any expired messages // this also follows the rule of only making the change when it is actually happening, so // until you read, there isn't any activity in the system transactionalStorage.Batch(actions => actions.Messages.ResetExpiredMessages()); transactionalStorage.Batch(actions => { var outgoingMessage = actions.Messages.Dequeue(readRequest.Queue, readRequest.LastMessageId); while (outgoingMessage != null && msgs.Count < realPageSize) { if(ShouldConsumeMessage(outgoingMessage.Expiry,outgoingMessage.Queue)) { actions.Messages.HideMessageFor(outgoingMessage.Id, readRequest.HideTimeout); } if (ShouldIncludeMessage(outgoingMessage)) { var buffer = outgoingMessage.Data; var memoryStream = new MemoryStream(buffer); outgoingMessage.Metadata = memoryStream.ToJObject(); outgoingMessage.Data = new byte[outgoingMessage.Data.Length - memoryStream.Position]; Array.Copy(buffer, memoryStream.Position, outgoingMessage.Data, 0, outgoingMessage.Data.Length); msgs.Add(outgoingMessage); } outgoingMessage = actions.Messages.Dequeue(readRequest.Queue, outgoingMessage.Id); } hasMoreItems = outgoingMessage != null; }); return new ReadResults { HasMoreResults = hasMoreItems, Results = msgs, Queue = readRequest.Queue }; }
private void ReadMetadataAndData(string key, MemoryStream memoryStreamFromTx, out JObject metadata, out JObject dataAsJson) { metadata = memoryStreamFromTx.ToJObject(); var metadataCopy = metadata; var dataBuffer = new byte[memoryStreamFromTx.Length - memoryStreamFromTx.Position]; Buffer.BlockCopy(memoryStreamFromTx.GetBuffer(), (int)memoryStreamFromTx.Position, dataBuffer, 0, dataBuffer.Length); documentCodecs.Aggregate(dataBuffer, (bytes, codec) => codec.Decode(key, metadataCopy, bytes)); dataAsJson = dataBuffer.ToJObject(); }