private RavenJArray GetAttachments(int start, Guid?etag) { var array = new RavenJArray(); var attachmentInfos = _database.GetAttachments(start, 128, etag); foreach (var attachmentInfo in attachmentInfos) { var attachment = _database.GetStatic(attachmentInfo.Key); if (attachment == null) { return(null); } var data = attachment.Data; attachment.Data = () => { var memoryStream = new MemoryStream(); _database.TransactionalStorage.Batch(accessor => data().CopyTo(memoryStream)); memoryStream.Position = 0; return(memoryStream); }; var bytes = attachment.Data().ReadData(); array.Add( new RavenJObject { { "Data", bytes }, { "Metadata", attachmentInfo.Metadata }, { "Key", attachmentInfo.Key }, { "Etag", new RavenJValue(attachmentInfo.Etag.ToString()) } }); } return(array); }
public void CanModifyAttachmentPut() { db.PutStatic("ayende", null, new MemoryStream(new byte[] { 1, 2, 3 }), new RavenJObject()); var attachment = db.GetStatic("ayende"); Assert.Equal(new DateTime(2000, 1, 1, 0, 0, 0, DateTimeKind.Utc), attachment.Metadata.Value <DateTime>("created_at")); }
/// <summary> /// Gets the attachment by the specified key /// </summary> /// <param name="key">The key.</param> /// <returns></returns> public Attachment GetAttachment(string key) { CurrentOperationContext.Headers.Value = OperationsHeaders; Attachment attachment = database.GetStatic(key); if (attachment == null) { return(null); } Func <Stream> data = attachment.Data; attachment.Data = () => { var memoryStream = new MemoryStream(); database.TransactionalStorage.Batch(accessor => data().CopyTo(memoryStream)); memoryStream.Position = 0; return(memoryStream); }; return(attachment); }
private IEnumerable <AbstractField> CreateRegularFields(string name, object value, Field.Store defaultStorage, bool nestedArray = false, Field.TermVector defaultTermVector = Field.TermVector.NO, Field.Index?analyzed = null) { var fieldIndexingOptions = analyzed ?? indexDefinition.GetIndex(name, null); var storage = indexDefinition.GetStorage(name, defaultStorage); var termVector = indexDefinition.GetTermVector(name, defaultTermVector); if (fieldIndexingOptions == Field.Index.NO && storage == Field.Store.NO && termVector == Field.TermVector.NO) { yield break; } if (fieldIndexingOptions == Field.Index.NO && storage == Field.Store.NO) { fieldIndexingOptions = Field.Index.ANALYZED; // we have some sort of term vector, forcing index to be analyzed, then. } if (value == null) { yield return(CreateFieldWithCaching(name, Constants.NullValue, storage, Field.Index.NOT_ANALYZED_NO_NORMS, Field.TermVector.NO)); yield break; } var attachmentFoIndexing = value as AttachmentForIndexing; if (attachmentFoIndexing != null) { if (database == null) { throw new InvalidOperationException( "Cannot use attachment for indexing if the database parameter is null. This is probably a RavenDB bug"); } var attachment = database.GetStatic(attachmentFoIndexing.Key); if (attachment == null) { yield break; } var fieldWithCaching = CreateFieldWithCaching(name, string.Empty, Field.Store.NO, fieldIndexingOptions, termVector); if (database.TransactionalStorage.IsAlreadyInBatch) { var streamReader = new StreamReader(attachment.Data()); fieldWithCaching.SetValue(streamReader); } else { // we are not in batch operation so we have to create it be able to read attachment's data database.TransactionalStorage.Batch(accessor => { var streamReader = new StreamReader(attachment.Data()); // we have to read it into memory because we after exiting the batch an attachment's data stream will be closed fieldWithCaching.SetValue(streamReader.ReadToEnd()); }); } yield return(fieldWithCaching); yield break; } if (Equals(value, string.Empty)) { yield return(CreateFieldWithCaching(name, Constants.EmptyString, storage, Field.Index.NOT_ANALYZED_NO_NORMS, Field.TermVector.NO)); yield break; } var dynamicNullObject = value as DynamicNullObject; if (ReferenceEquals(dynamicNullObject, null) == false) { if (dynamicNullObject.IsExplicitNull) { var sortOptions = indexDefinition.GetSortOption(name); yield return(CreateFieldWithCaching(name, Constants.NullValue, storage, Field.Index.NOT_ANALYZED_NO_NORMS, Field.TermVector.NO)); foreach (var field in CreateNumericFieldWithCaching(name, GetNullValueForSorting(sortOptions), storage, termVector)) { yield return(field); } } yield break; } var boostedValue = value as BoostedValue; if (boostedValue != null) { foreach (var field in CreateFields(name, boostedValue.Value, storage, false, termVector)) { field.Boost = boostedValue.Boost; field.OmitNorms = false; yield return(field); } yield break; } var abstractField = value as AbstractField; if (abstractField != null) { yield return(abstractField); yield break; } var bytes = value as byte[]; if (bytes != null) { yield return(CreateBinaryFieldWithCaching(name, bytes, storage, fieldIndexingOptions, termVector)); yield break; } var itemsToIndex = value as IEnumerable; if (itemsToIndex != null && ShouldTreatAsEnumerable(itemsToIndex)) { int count = 1; if (nestedArray == false) { yield return(new Field(name + "_IsArray", "true", storage, Field.Index.NOT_ANALYZED_NO_NORMS, Field.TermVector.NO)); } foreach (var itemToIndex in itemsToIndex) { if (!CanCreateFieldsForNestedArray(itemToIndex, fieldIndexingOptions)) { continue; } multipleItemsSameFieldCount.Add(count++); foreach (var field in CreateFields(name, itemToIndex, storage, nestedArray: true, defaultTermVector: defaultTermVector, analyzed: analyzed)) { yield return(field); } multipleItemsSameFieldCount.RemoveAt(multipleItemsSameFieldCount.Count - 1); } yield break; } if (Equals(fieldIndexingOptions, Field.Index.NOT_ANALYZED) || Equals(fieldIndexingOptions, Field.Index.NOT_ANALYZED_NO_NORMS)) // explicitly not analyzed { // date time, time span and date time offset have the same structure fo analyzed and not analyzed. if (!(value is DateTime) && !(value is DateTimeOffset) && !(value is TimeSpan)) { yield return(CreateFieldWithCaching(name, value.ToString(), storage, indexDefinition.GetIndex(name, Field.Index.NOT_ANALYZED_NO_NORMS), termVector)); yield break; } } if (value is string) { var index = indexDefinition.GetIndex(name, Field.Index.ANALYZED); yield return(CreateFieldWithCaching(name, value.ToString(), storage, index, termVector)); yield break; } if (value is TimeSpan) { var val = (TimeSpan)value; yield return(CreateFieldWithCaching(name, val.ToString("c", CultureInfo.InvariantCulture), storage, indexDefinition.GetIndex(name, Field.Index.NOT_ANALYZED_NO_NORMS), termVector)); } else if (value is DateTime) { var val = (DateTime)value; var dateAsString = val.ToString(Default.DateTimeFormatsToWrite, CultureInfo.InvariantCulture); if (val.Kind == DateTimeKind.Utc) { dateAsString += "Z"; } yield return(CreateFieldWithCaching(name, dateAsString, storage, indexDefinition.GetIndex(name, Field.Index.NOT_ANALYZED_NO_NORMS), termVector)); } else if (value is DateTimeOffset) { var val = (DateTimeOffset)value; string dtoStr; if (Equals(fieldIndexingOptions, Field.Index.NOT_ANALYZED) || Equals(fieldIndexingOptions, Field.Index.NOT_ANALYZED_NO_NORMS)) { dtoStr = val.ToString(Default.DateTimeOffsetFormatsToWrite, CultureInfo.InvariantCulture); } else { dtoStr = val.UtcDateTime.ToString(Default.DateTimeFormatsToWrite, CultureInfo.InvariantCulture) + "Z"; } yield return(CreateFieldWithCaching(name, dtoStr, storage, indexDefinition.GetIndex(name, Field.Index.NOT_ANALYZED_NO_NORMS), termVector)); } else if (value is bool) { yield return(new Field(name, ((bool)value) ? "true" : "false", storage, indexDefinition.GetIndex(name, Field.Index.NOT_ANALYZED_NO_NORMS), termVector)); } else if (value is double) { var d = (double)value; yield return(CreateFieldWithCaching(name, d.ToString("r", CultureInfo.InvariantCulture), storage, indexDefinition.GetIndex(name, Field.Index.NOT_ANALYZED_NO_NORMS), termVector)); } else if (value is decimal) { var d = (decimal)value; var s = d.ToString(CultureInfo.InvariantCulture); if (s.Contains('.')) { s = s.TrimEnd('0'); if (s.EndsWith(".")) { s = s.Substring(0, s.Length - 1); } } yield return(CreateFieldWithCaching(name, s, storage, indexDefinition.GetIndex(name, Field.Index.NOT_ANALYZED_NO_NORMS), termVector)); } else if (value is IConvertible) // we need this to store numbers in invariant format, so JSON could read them { var convert = ((IConvertible)value); yield return(CreateFieldWithCaching(name, convert.ToString(CultureInfo.InvariantCulture), storage, indexDefinition.GetIndex(name, Field.Index.NOT_ANALYZED_NO_NORMS), termVector)); } else if (value is IDynamicJsonObject) { var inner = ((IDynamicJsonObject)value).Inner; yield return(CreateFieldWithCaching(name + "_ConvertToJson", "true", Field.Store.YES, Field.Index.NOT_ANALYZED_NO_NORMS, Field.TermVector.NO)); yield return(CreateFieldWithCaching(name, inner.ToString(Formatting.None), storage, indexDefinition.GetIndex(name, Field.Index.NOT_ANALYZED_NO_NORMS), termVector)); } else { var jsonVal = RavenJToken.FromObject(value).ToString(Formatting.None); if (jsonVal.StartsWith("{") || jsonVal.StartsWith("[")) { yield return(CreateFieldWithCaching(name + "_ConvertToJson", "true", Field.Store.YES, Field.Index.NOT_ANALYZED_NO_NORMS, Field.TermVector.NO)); } else if (jsonVal.StartsWith("\"") && jsonVal.EndsWith("\"") && jsonVal.Length > 1) { jsonVal = jsonVal.Substring(1, jsonVal.Length - 2); } yield return(CreateFieldWithCaching(name, jsonVal, storage, indexDefinition.GetIndex(name, Field.Index.NOT_ANALYZED_NO_NORMS), termVector)); } foreach (var numericField in CreateNumericFieldWithCaching(name, value, storage, termVector)) { yield return(numericField); } }
/// <summary> /// Gets the attachment by the specified key /// </summary> /// <param name="key">The key.</param> /// <returns></returns> public Attachment GetAttachment(string key) { CurrentOperationContext.Headers.Value = OperationsHeaders; return(database.GetStatic(key)); }
public Attachment GetStatic(string name) { return(database.GetStatic(name)); }
public Attachment GetStatic(string name) { CurrentRavenOperation.Headers.Value = OperationsHeaders; return(database.GetStatic(name)); }
public void CanModifyAttachmentPut() { db.PutStatic("ayende", null, new byte[] { 1, 2, 3 }, new JObject()); Assert.Equal(new DateTime(2000, 1, 1, 0, 0, 0, DateTimeKind.Utc), db.GetStatic("ayende").Metadata.Value <DateTime>("created_at")); }