private void ReplicateDocument(DocumentStorageActions actions, string id, JObject metadata, JObject document, string src) { var existingDoc = actions.DocumentByKey(id, null); if (existingDoc == null) { log.DebugFormat("New document {0} replicated successfully from {1}", id, src); actions.AddDocument(id, Guid.Empty, document, metadata); return; } var existingDocumentIsInConflict = existingDoc.Metadata[ReplicationConstants.RavenReplicationConflict] != null; if (existingDocumentIsInConflict == false && // if the current document is not in conflict, we can continue without having to keep conflict semantics (IsDirectChildOfCurrentDocument(existingDoc, metadata))) // this update is direct child of the existing doc, so we are fine with overwriting this { log.DebugFormat("Existing document {0} replicated successfully from {1}", id, src); actions.AddDocument(id, null, document, metadata); return; } var newDocumentConflictId = id + "/conflicts/" + metadata.Value<string>("@etag"); metadata.Add(ReplicationConstants.RavenReplicationConflict, JToken.FromObject(true)); actions.AddDocument(newDocumentConflictId, null, document, metadata); if (existingDocumentIsInConflict) // the existing document is in conflict { log.DebugFormat("Conflicted document {0} has a new version from {1}, adding to conflicted documents", id, src); // just update the current doc with the new conflict document existingDoc.DataAsJson.Value<JArray>("Conflicts").Add(JToken.FromObject(newDocumentConflictId)); actions.AddDocument(id, existingDoc.Etag, existingDoc.DataAsJson, existingDoc.Metadata); return; } log.DebugFormat("Existing document {0} is in conflict with replicated version from {1}, marking document as conflicted", id, src); // we have a new conflict // move the existing doc to a conflict and create a conflict document var existingDocumentConflictId = id +"/conflicts/"+existingDoc.Etag; existingDoc.Metadata.Add(ReplicationConstants.RavenReplicationConflict, JToken.FromObject(true)); actions.AddDocument(existingDocumentConflictId, null, existingDoc.DataAsJson, existingDoc.Metadata); actions.AddDocument(id, null, new JObject( new JProperty("Conflicts", new JArray(existingDocumentConflictId, newDocumentConflictId))), new JObject( new JProperty(ReplicationConstants.RavenReplicationConflict, true), new JProperty("@Http-Status-Code", 409), new JProperty("@Http-Status-Description", "Conflict") )); }
private bool? MoveNext(IEnumerator en, StatefulEnumerableWrapper<object> innerEnumerator, WorkContext context, DocumentStorageActions actions) { try { actions.IncrementIndexingAttempt(); var moveNext = en.MoveNext(); if (moveNext == false) actions.DecrementIndexingAttempt(); return moveNext; } catch (Exception e) { actions.IncrementIndexingFailure(); context.AddError(name, TryGetDocKey(innerEnumerator.Current), e.Message ); log.WarnFormat(e, "Failed to execute indexing function on {0} on {1}", name, GetDocId(innerEnumerator)); } return null; }
protected IEnumerable<object> RobustEnumeration(IEnumerable<object> input, IndexingFunc func, DocumentStorageActions actions, WorkContext context) { var wrapped = new StatefulEnumerableWrapper<dynamic>(input.GetEnumerator()); IEnumerator<object> en = func(wrapped).GetEnumerator(); do { var moveSuccessful = MoveNext(en, wrapped, context, actions); if (moveSuccessful == false) yield break; if (moveSuccessful == true) yield return en.Current; else en = func(wrapped).GetEnumerator(); } while (true); }
public abstract void IndexDocuments(AbstractViewGenerator viewGenerator, IEnumerable<object> documents, WorkContext context, DocumentStorageActions actions);
public override void IndexDocuments( AbstractViewGenerator viewGenerator, IEnumerable<dynamic> documents, WorkContext context, DocumentStorageActions actions) { actions.SetCurrentIndexStatsTo(name); var count = 0; Func<object, object> documentIdFetcher = null; var reduceKeys = new HashSet<string>(); var documentsWrapped = documents.Select(doc => { var documentId = doc.__document_id; foreach (var reduceKey in actions.DeleteMappedResultsForDocumentId((string)documentId, name)) { reduceKeys.Add(reduceKey); } return doc; }); foreach (var doc in RobustEnumeration(documentsWrapped, viewGenerator.MapDefinition, actions, context)) { count++; documentIdFetcher = CreateDocumentIdFetcherIfNeeded(documentIdFetcher, doc); var docIdValue = documentIdFetcher(doc); if (docIdValue == null) throw new InvalidOperationException("Could not find document id for this document"); var reduceValue = viewGenerator.GroupByExtraction(doc); if (reduceValue == null) { log.DebugFormat("Field {0} is used as the reduce key and cannot be null, skipping document {1}", viewGenerator.GroupByExtraction, docIdValue); continue; } var reduceKey = ReduceKeyToString(reduceValue); var docId = docIdValue.ToString(); reduceKeys.Add(reduceKey); string data = GetMapedData(doc); log.DebugFormat("Mapped result for '{0}': '{1}'", name, data); var hash = ComputeHash(name, reduceKey); actions.PutMappedResult(name, docId, reduceKey, data, hash); actions.IncrementSuccessIndexing(); } foreach (var reduceKey in reduceKeys) { actions.AddTask(new ReduceTask { Index = name, ReduceKey = reduceKey }); } log.DebugFormat("Mapped {0} documents for {1}", count, name); }
public void ReduceDocuments(AbstractViewGenerator viewGenerator, IEnumerable<object> mappedResults, WorkContext context, DocumentStorageActions actions, string reduceKey) { actions.SetCurrentIndexStatsTo(name); var count = 0; Write(indexWriter => { indexWriter.DeleteDocuments(new Term("__reduce_key", reduceKey)); context.IndexUpdateTriggers.Apply(trigger => trigger.OnIndexEntryDeleted(name, reduceKey)); PropertyDescriptorCollection properties = null; foreach (var doc in RobustEnumeration(mappedResults, viewGenerator.ReduceDefinition, actions, context)) { count++; var fields = GetFields(doc, ref properties); var luceneDoc = new Document(); luceneDoc.Add(new Field("__reduce_key", reduceKey, Field.Store.NO, Field.Index.NOT_ANALYZED)); foreach (var field in fields) { luceneDoc.Add(field); } context.IndexUpdateTriggers.Apply(trigger => trigger.OnIndexEntryCreated(name, reduceKey, luceneDoc)); log.DebugFormat("Reduce key {0} result in index {1} gave document: {2}", reduceKey, name, luceneDoc); indexWriter.AddDocument(luceneDoc); actions.IncrementSuccessIndexing(); } return true; }); log.DebugFormat("Reduce resulted in {0} entries for {1} for reduce key {2}", count, name, reduceKey); }