/// <exception cref="Couchbase.Lite.CouchbaseLiteException"></exception> public virtual void TestForceInsertEmptyHistory() { IList<string> revHistory = null; RevisionInternal rev = new RevisionInternal("FakeDocId", "1-tango", false, database ); IDictionary<string, object> revProperties = new Dictionary<string, object>(); revProperties.Put("_id", rev.GetDocId()); revProperties.Put("_rev", rev.GetRevId()); revProperties.Put("message", "hi"); rev.SetProperties(revProperties); database.ForceInsert(rev, revHistory, null); }
/// <exception cref="Couchbase.Lite.CouchbaseLiteException"></exception> public virtual void TestCreateDocument() { IDictionary<string, object> properties = new Dictionary<string, object>(); properties.Put("testName", "testCreateDocument"); properties.Put("tag", 1337); Database db = StartDatabase(); Document doc = CreateDocumentWithProperties(db, properties); string docID = doc.GetId(); NUnit.Framework.Assert.IsTrue("Invalid doc ID: " + docID, docID.Length > 10); string currentRevisionID = doc.GetCurrentRevisionId(); NUnit.Framework.Assert.IsTrue("Invalid doc revision: " + docID, currentRevisionID .Length > 10); NUnit.Framework.Assert.AreEqual(doc.GetUserProperties(), properties); NUnit.Framework.Assert.AreEqual(db.GetDocument(docID), doc); db.ClearDocumentCache(); // so we can load fresh copies Document doc2 = db.GetExistingDocument(docID); NUnit.Framework.Assert.AreEqual(doc2.GetId(), docID); NUnit.Framework.Assert.AreEqual(doc2.GetCurrentRevisionId(), currentRevisionID); NUnit.Framework.Assert.IsNull(db.GetExistingDocument("b0gus")); }
public static IDictionary<string, object> MakeRevisionHistoryDict(IList<RevisionInternal > history) { if (history == null) { return null; } // Try to extract descending numeric prefixes: IList<string> suffixes = new AList<string>(); int start = -1; int lastRevNo = -1; foreach (RevisionInternal rev in history) { int revNo = ParseRevIDNumber(rev.GetRevId()); string suffix = ParseRevIDSuffix(rev.GetRevId()); if (revNo > 0 && suffix.Length > 0) { if (start < 0) { start = revNo; } else { if (revNo != lastRevNo - 1) { start = -1; break; } } lastRevNo = revNo; suffixes.AddItem(suffix); } else { start = -1; break; } } IDictionary<string, object> result = new Dictionary<string, object>(); if (start == -1) { // we failed to build sequence, just stuff all the revs in list suffixes = new AList<string>(); foreach (RevisionInternal rev_1 in history) { suffixes.AddItem(rev_1.GetRevId()); } } else { result.Put("start", start); } result.Put("ids", suffixes); return result; }
public void TestPushPurgedDoc() { if (!Boolean.Parse((string)Runtime.Properties["replicationTestsEnabled"])) { Assert.Inconclusive("Replication tests disabled."); return; } var numBulkDocRequests = 0; HttpRequestMessage lastBulkDocsRequest = null; var doc = CreateDocumentWithProperties( database, new Dictionary<string, object> { {"testName", "testPurgeDocument"} } ); Assert.IsNotNull(doc); using (var remoteDb = _sg.CreateDatabase(TempDbName())) { var remote = remoteDb.RemoteUri; var factory = new MockHttpClientFactory(); factory.HttpHandler.ClearResponders(); factory.HttpHandler.AddResponderRevDiffsAllMissing(); factory.HttpHandler.AddResponderFakeLocalDocumentUpdate404(); factory.HttpHandler.AddResponderFakeBulkDocs(); manager.DefaultHttpClientFactory = factory; var pusher = database.CreatePushReplication(remote); var replicationCaughtUpSignal = new CountdownEvent(1); pusher.Changed += (sender, e) => { var changesCount = e.Source.ChangesCount; var completedChangesCount = e.Source.CompletedChangesCount; var msg = "changes: {0} completed changes: {1}".Fmt(changesCount, completedChangesCount); Log.D(Tag, msg); if (changesCount > 0 && changesCount == completedChangesCount && replicationCaughtUpSignal.CurrentCount > 0) { replicationCaughtUpSignal.Signal(); } }; pusher.Start(); // wait until that doc is pushed var didNotTimeOut = replicationCaughtUpSignal.Wait(TimeSpan.FromSeconds(15)); Assert.IsTrue(didNotTimeOut); // at this point, we should have captured exactly 1 bulk docs request numBulkDocRequests = 0; var handler = factory.HttpHandler; foreach (var capturedRequest in handler.CapturedRequests) { if (capturedRequest.Method == HttpMethod.Post && capturedRequest.RequestUri.AbsoluteUri.EndsWith("_bulk_docs", StringComparison.Ordinal)) { lastBulkDocsRequest = capturedRequest; numBulkDocRequests++; } } Assert.AreEqual(1, numBulkDocRequests); // that bulk docs request should have the "start" key under its _revisions var jsonMap = MockHttpRequestHandler.GetJsonMapFromRequest(lastBulkDocsRequest); var docs = (jsonMap.Get("docs")).AsList<IDictionary<string,object>>(); var onlyDoc = docs[0]; var revisions = onlyDoc.Get("_revisions").AsDictionary<string,object>(); Assert.IsTrue(revisions.ContainsKey("start")); // Reset for the next attempt. handler.ClearCapturedRequests(); // now add a new revision, which will trigger the pusher to try to push it var properties = new Dictionary<string, object>(); properties.Put("testName2", "update doc"); var unsavedRevision = doc.CreateRevision(); unsavedRevision.SetUserProperties(properties); unsavedRevision.Save(); // but then immediately purge it doc.Purge(); pusher.Start(); // wait for a while to give the replicator a chance to push it // (it should not actually push anything) Sleep(5 * 1000); // we should not have gotten any more _bulk_docs requests, because // the replicator should not have pushed anything else. // (in the case of the bug, it was trying to push the purged revision) numBulkDocRequests = 0; foreach (var capturedRequest in handler.CapturedRequests) { if (capturedRequest.Method == HttpMethod.Post && capturedRequest.RequestUri.AbsoluteUri.EndsWith("_bulk_docs", StringComparison.Ordinal)) { numBulkDocRequests++; } } Assert.AreEqual(0, numBulkDocRequests); pusher.Stop(); } }
private IDictionary<string, object> ParseSourceOrTarget(IDictionary<string, object > properties, string key) { IDictionary<string, object> result = new Dictionary<string, object>(); object value = properties.Get(key); if (value is string) { result.Put("url", (string)value); } else { if (value is IDictionary) { result = (IDictionary<string, object>)value; } } return result; }
internal int PruneRevsToMaxDepth(int maxDepth) { int outPruned = 0; bool shouldCommit = false; IDictionary<long, int> toPrune = new Dictionary<long, int>(); if (maxDepth == 0) { maxDepth = MaxRevTreeDepth; } // First find which docs need pruning, and by how much: Cursor cursor = null; var sql = "SELECT doc_id, MIN(revid), MAX(revid) FROM revs GROUP BY doc_id"; long docNumericID = -1; var minGen = 0; var maxGen = 0; try { cursor = StorageEngine.RawQuery(sql); while (cursor.MoveToNext()) { docNumericID = cursor.GetLong(0); var minGenRevId = cursor.GetString(1); var maxGenRevId = cursor.GetString(2); minGen = RevisionInternal.GenerationFromRevID(minGenRevId); maxGen = RevisionInternal.GenerationFromRevID(maxGenRevId); if ((maxGen - minGen + 1) > maxDepth) { toPrune.Put(docNumericID, (maxGen - minGen)); } } BeginTransaction(); if (toPrune.Count == 0) { return 0; } foreach (long id in toPrune.Keys) { string minIDToKeep = String.Format("{0}-", (toPrune.Get(id) + 1)); string[] deleteArgs = new string[] { System.Convert.ToString(docNumericID), minIDToKeep }; int rowsDeleted = StorageEngine.Delete("revs", "doc_id=? AND revid < ? AND current=0", deleteArgs); outPruned += rowsDeleted; } shouldCommit = true; } catch (Exception e) { throw new CouchbaseLiteException(e, StatusCode.InternalServerError); } finally { EndTransaction(shouldCommit); if (cursor != null) { cursor.Close(); } } return outPruned; }
/// <summary>INSERTION:</summary> internal IEnumerable<Byte> EncodeDocumentJSON(RevisionInternal rev) { var origProps = rev.GetProperties(); if (origProps == null) { return null; } var specialKeysToLeave = new[] { "_removed", "_replication_id", "_replication_state", "_replication_state_time" }; // Don't allow any "_"-prefixed keys. Known ones we'll ignore, unknown ones are an error. var properties = new Dictionary<String, Object>(origProps.Count); foreach (var key in origProps.Keys) { var shouldAdd = false; if (key.StartsWith("_", StringComparison.InvariantCultureIgnoreCase)) { if (!KnownSpecialKeys.Contains(key)) { Log.E(Tag, "Database: Invalid top-level key '" + key + "' in document to be inserted"); return null; } if (specialKeysToLeave.Contains(key)) { shouldAdd = true; } } else { shouldAdd = true; } if (shouldAdd) { properties.Put(key, origProps.Get(key)); } } IEnumerable<byte> json = null; try { json = Manager.GetObjectMapper().WriteValueAsBytes(properties); } catch (Exception e) { Log.E(Tag, "Error serializing " + rev + " to JSON", e); } return json; }
// Replaces attachment data whose revpos is < minRevPos with stubs. // If attachmentsFollow==YES, replaces data with "follows" key. private static void StubOutAttachmentsInRevBeforeRevPos(RevisionInternal rev, int minRevPos, bool attachmentsFollow) { if (minRevPos <= 1 && !attachmentsFollow) { return; } rev.MutateAttachments((s, attachment)=> { var revPos = 0; if (attachment.Get("revpos") != null) { revPos = (int)attachment.Get("revpos"); } var includeAttachment = (revPos == 0 || revPos >= minRevPos); var stubItOut = !includeAttachment && (attachment.Get("stub") == null || (bool)attachment.Get("stub") == false); var addFollows = includeAttachment && attachmentsFollow && (attachment.Get("follows") == null || !(bool)attachment.Get ("follows")); if (!stubItOut && !addFollows) { return attachment; } // no change // Need to modify attachment entry: var editedAttachment = new Dictionary<string, object>(attachment); editedAttachment.Remove("data"); if (stubItOut) { // ...then remove the 'data' and 'follows' key: editedAttachment.Remove("follows"); editedAttachment.Put("stub", true); Log.V(Tag, "Stubbed out attachment {0}: revpos {1} < {2}".Fmt(rev, revPos, minRevPos)); } else { if (addFollows) { editedAttachment.Remove("stub"); editedAttachment.Put("follows", true); Log.V(Tag, "Added 'follows' for attachment {0}: revpos {1} >= {2}".Fmt(rev, revPos, minRevPos)); } } return editedAttachment; }); }
/// <summary>Inserts the _id, _rev and _attachments properties into the JSON data and stores it in rev. /// </summary> /// <remarks> /// Inserts the _id, _rev and _attachments properties into the JSON data and stores it in rev. /// Rev must already have its revID and sequence properties set. /// </remarks> internal IDictionary<String, Object> ExtraPropertiesForRevision(RevisionInternal rev, DocumentContentOptions contentOptions) { var docId = rev.GetDocId(); var revId = rev.GetRevId(); var sequenceNumber = rev.GetSequence(); Debug.Assert((revId != null)); Debug.Assert((sequenceNumber > 0)); // Get attachment metadata, and optionally the contents: IDictionary<string, object> attachmentsDict = null; if (!contentOptions.HasFlag(DocumentContentOptions.NoAttachments)) { attachmentsDict = GetAttachmentsDictForSequenceWithContent (sequenceNumber, contentOptions); } // Get more optional stuff to put in the properties: //OPT: This probably ends up making redundant SQL queries if multiple options are enabled. var localSeq = -1L; if (contentOptions.HasFlag(DocumentContentOptions.IncludeLocalSeq)) { localSeq = sequenceNumber; } IDictionary<string, object> revHistory = null; if (contentOptions.HasFlag(DocumentContentOptions.IncludeRevs)) { revHistory = GetRevisionHistoryDict(rev); } IList<object> revsInfo = null; if (contentOptions.HasFlag(DocumentContentOptions.IncludeRevsInfo)) { revsInfo = new AList<object>(); var revHistoryFull = GetRevisionHistory(rev); foreach (RevisionInternal historicalRev in revHistoryFull) { var revHistoryItem = new Dictionary<string, object>(); var status = "available"; if (historicalRev.IsDeleted()) { status = "deleted"; } if (historicalRev.IsMissing()) { status = "missing"; } revHistoryItem.Put("rev", historicalRev.GetRevId()); revHistoryItem["status"] = status; revsInfo.AddItem(revHistoryItem); } } IList<string> conflicts = null; if (contentOptions.HasFlag(DocumentContentOptions.IncludeConflicts)) { var revs = GetAllRevisionsOfDocumentID(docId, true); if (revs.Count > 1) { conflicts = new AList<string>(); foreach (RevisionInternal savedRev in revs) { if (!(savedRev.Equals(rev) || savedRev.IsDeleted())) { conflicts.AddItem(savedRev.GetRevId()); } } } } var result = new Dictionary<string, object>(); result["_id"] = docId; result["_rev"] = revId; if (rev.IsDeleted()) { result["_deleted"] = true; } if (attachmentsDict != null) { result["_attachments"] = attachmentsDict; } if (localSeq > -1) { result["_local_seq"] = localSeq; } if (revHistory != null) { result["_revisions"] = revHistory; } if (revsInfo != null) { result["_revs_info"] = revsInfo; } if (conflicts != null) { result["_conflicts"] = conflicts; } return result; }
/// <summary> /// Test that the public API works as expected in change notifications after a rev tree /// insertion. /// </summary> /// <remarks> /// Test that the public API works as expected in change notifications after a rev tree /// insertion. See https://github.com/couchbase/couchbase-lite-android-core/pull/27 /// </remarks> /// <exception cref="Couchbase.Lite.CouchbaseLiteException"></exception> public virtual void TestRevTreeChangeNotifications() { string DocumentId = "MyDocId"; // add a document with a single (first) revision RevisionInternal rev = new RevisionInternal(DocumentId, "1-one", false, database); IDictionary<string, object> revProperties = new Dictionary<string, object>(); revProperties.Put("_id", rev.GetDocId()); revProperties.Put("_rev", rev.GetRevId()); revProperties.Put("message", "hi"); rev.SetProperties(revProperties); IList<string> revHistory = Arrays.AsList(rev.GetRevId()); Database.ChangeListener listener = new _ChangeListener_154(this, DocumentId, rev); database.AddChangeListener(listener); database.ForceInsert(rev, revHistory, null); database.RemoveChangeListener(listener); // add two more revisions to the document RevisionInternal rev3 = new RevisionInternal(DocumentId, "3-three", false, database ); IDictionary<string, object> rev3Properties = new Dictionary<string, object>(); rev3Properties.Put("_id", rev3.GetDocId()); rev3Properties.Put("_rev", rev3.GetRevId()); rev3Properties.Put("message", "hi again"); rev3.SetProperties(rev3Properties); IList<string> rev3History = Arrays.AsList(rev3.GetRevId(), "2-two", rev.GetRevId( )); listener = new _ChangeListener_182(this, DocumentId, rev3); database.AddChangeListener(listener); database.ForceInsert(rev3, rev3History, null); database.RemoveChangeListener(listener); // add a conflicting revision, with the same history length as the last revision we // inserted. Since this new revision's revID has a higher ASCII sort, it should become the // new winning revision. RevisionInternal conflictRev = new RevisionInternal(DocumentId, "3-winner", false , database); IDictionary<string, object> conflictProperties = new Dictionary<string, object>(); conflictProperties.Put("_id", conflictRev.GetDocId()); conflictProperties.Put("_rev", conflictRev.GetRevId()); conflictProperties.Put("message", "winner"); conflictRev.SetProperties(conflictProperties); IList<string> conflictRevHistory = Arrays.AsList(conflictRev.GetRevId(), "2-two", rev.GetRevId()); listener = new _ChangeListener_217(this, DocumentId, conflictRev); database.AddChangeListener(listener); database.ForceInsert(conflictRev, conflictRevHistory, null); database.RemoveChangeListener(listener); }
internal IDictionary<string, AttachmentInternal> GetAttachmentsFromRevision(RevisionInternal rev) { IDictionary<string, object> revAttachments = (IDictionary<string, object>)rev.GetPropertyForKey ("_attachments"); if (revAttachments == null || revAttachments.Count == 0 || rev.IsDeleted()) { return new Dictionary<string, AttachmentInternal>(); } IDictionary<string, AttachmentInternal> attachments = new Dictionary<string, AttachmentInternal >(); foreach (string name in revAttachments.Keys) { IDictionary<string, object> attachInfo = (IDictionary<string, object>)revAttachments .Get(name); string contentType = (string)attachInfo.Get("content_type"); AttachmentInternal attachment = new AttachmentInternal(name, contentType); string newContentBase64 = (string)attachInfo.Get("data"); if (newContentBase64 != null) { // If there's inline attachment data, decode and store it: byte[] newContents; try { newContents = Base64.Decode(newContentBase64); } catch (IOException e) { throw new CouchbaseLiteException(e, Status.BadEncoding); } attachment.SetLength(newContents.Length); BlobKey outBlobKey = new BlobKey(); bool storedBlob = GetAttachments().StoreBlob(newContents, outBlobKey); attachment.SetBlobKey(outBlobKey); if (!storedBlob) { throw new CouchbaseLiteException(Status.StatusAttachmentError); } } else { if (attachInfo.ContainsKey("follows") && ((bool)attachInfo.Get("follows")) == true) { // "follows" means the uploader provided the attachment in a separate MIME part. // This means it's already been registered in _pendingAttachmentsByDigest; // I just need to look it up by its "digest" property and install it into the store: InstallAttachment(attachment, attachInfo); } else { // This item is just a stub; validate and skip it if (((bool)attachInfo.Get("stub")) == false) { throw new CouchbaseLiteException("Expected this attachment to be a stub", Status. BadAttachment); } int revPos = ((int)attachInfo.Get("revpos")); if (revPos <= 0) { throw new CouchbaseLiteException("Invalid revpos: " + revPos, Status.BadAttachment ); } continue; } } // Handle encoded attachment: string encodingStr = (string)attachInfo.Get("encoding"); if (encodingStr != null && encodingStr.Length > 0) { if (Sharpen.Runtime.EqualsIgnoreCase(encodingStr, "gzip")) { attachment.SetEncoding(AttachmentInternal.AttachmentEncoding.AttachmentEncodingGZIP ); } else { throw new CouchbaseLiteException("Unnkown encoding: " + encodingStr, Status.BadEncoding ); } attachment.SetEncodedLength(attachment.GetLength()); if (attachInfo.ContainsKey("length")) { Number attachmentLength = (Number)attachInfo.Get("length"); attachment.SetLength(attachmentLength); } } if (attachInfo.ContainsKey("revpos")) { attachment.SetRevpos((int)attachInfo.Get("revpos")); } else { attachment.SetRevpos(1); } attachments.Put(name, attachment); } return attachments; }
public byte[] EncodeDocumentJSON(RevisionInternal rev) { IDictionary<string, object> origProps = rev.GetProperties(); if (origProps == null) { return null; } // Don't allow any "_"-prefixed keys. Known ones we'll ignore, unknown ones are an error. IDictionary<string, object> properties = new Dictionary<string, object>(origProps .Count); foreach (string key in origProps.Keys) { if (key.StartsWith("_")) { if (!KnownSpecialKeys.Contains(key)) { Log.E(Tag, "Database: Invalid top-level key '" + key + "' in document to be inserted" ); return null; } } else { properties.Put(key, origProps.Get(key)); } } byte[] json = null; try { json = Manager.GetObjectMapper().WriteValueAsBytes(properties); } catch (Exception e) { Log.E(Database.Tag, "Error serializing " + rev + " to JSON", e); } return json; }
public void StubOutAttachmentsIn(RevisionInternal rev, int minRevPos) { if (minRevPos <= 1) { return; } IDictionary<string, object> properties = (IDictionary<string, object>)rev.GetProperties (); IDictionary<string, object> attachments = null; if (properties != null) { attachments = (IDictionary<string, object>)properties.Get("_attachments"); } IDictionary<string, object> editedProperties = null; IDictionary<string, object> editedAttachments = null; foreach (string name in attachments.Keys) { IDictionary<string, object> attachment = (IDictionary<string, object>)attachments .Get(name); int revPos = (int)attachment.Get("revpos"); object stub = attachment.Get("stub"); if (revPos > 0 && revPos < minRevPos && (stub == null)) { // Strip this attachment's body. First make its dictionary mutable: if (editedProperties == null) { editedProperties = new Dictionary<string, object>(properties); editedAttachments = new Dictionary<string, object>(attachments); editedProperties.Put("_attachments", editedAttachments); } // ...then remove the 'data' and 'follows' key: IDictionary<string, object> editedAttachment = new Dictionary<string, object>(attachment ); Sharpen.Collections.Remove(editedAttachment, "data"); Sharpen.Collections.Remove(editedAttachment, "follows"); editedAttachment.Put("stub", true); editedAttachments.Put(name, editedAttachment); Log.D(Database.Tag, "Stubbed out attachment" + rev + " " + name + ": revpos" + revPos + " " + minRevPos); } } if (editedProperties != null) { rev.SetProperties(editedProperties); } }
public IDictionary<string, object> GetAttachmentsDictForSequenceWithContent(long sequence, EnumSet<Database.TDContentOptions> contentOptions) { System.Diagnostics.Debug.Assert((sequence > 0)); Cursor cursor = null; string[] args = new string[] { System.Convert.ToString(sequence) }; try { cursor = database.RawQuery("SELECT filename, key, type, length, revpos FROM attachments WHERE sequence=?" , args); if (!cursor.MoveToNext()) { return null; } IDictionary<string, object> result = new Dictionary<string, object>(); while (!cursor.IsAfterLast()) { bool dataSuppressed = false; int length = cursor.GetInt(3); byte[] keyData = cursor.GetBlob(1); BlobKey key = new BlobKey(keyData); string digestString = "sha1-" + Base64.EncodeBytes(keyData); string dataBase64 = null; if (contentOptions.Contains(Database.TDContentOptions.TDIncludeAttachments)) { if (contentOptions.Contains(Database.TDContentOptions.TDBigAttachmentsFollow) && length >= Database.kBigAttachmentLength) { dataSuppressed = true; } else { byte[] data = attachments.BlobForKey(key); if (data != null) { dataBase64 = Base64.EncodeBytes(data); } else { // <-- very expensive Log.W(Database.Tag, "Error loading attachment"); } } } IDictionary<string, object> attachment = new Dictionary<string, object>(); if (dataBase64 == null || dataSuppressed == true) { attachment.Put("stub", true); } if (dataBase64 != null) { attachment.Put("data", dataBase64); } if (dataSuppressed == true) { attachment.Put("follows", true); } attachment.Put("digest", digestString); string contentType = cursor.GetString(2); attachment.Put("content_type", contentType); attachment.Put("length", length); attachment.Put("revpos", cursor.GetInt(4)); string filename = cursor.GetString(0); result.Put(filename, attachment); cursor.MoveToNext(); } return result; } catch (SQLException e) { Log.E(Database.Tag, "Error getting attachments for sequence", e); return null; } finally { if (cursor != null) { cursor.Close(); } } }
public IDictionary<string, object> GetAllDocs(QueryOptions options) { IDictionary<string, object> result = new Dictionary<string, object>(); IList<QueryRow> rows = new AList<QueryRow>(); if (options == null) { options = new QueryOptions(); } bool includeDeletedDocs = (options.GetAllDocsMode() == Query.AllDocsMode.IncludeDeleted ); long updateSeq = 0; if (options.IsUpdateSeq()) { updateSeq = GetLastSequenceNumber(); } // TODO: needs to be atomic with the following SELECT StringBuilder sql = new StringBuilder("SELECT revs.doc_id, docid, revid, sequence" ); if (options.IsIncludeDocs()) { sql.Append(", json"); } if (includeDeletedDocs) { sql.Append(", deleted"); } sql.Append(" FROM revs, docs WHERE"); if (options.GetKeys() != null) { if (options.GetKeys().Count == 0) { return result; } string commaSeperatedIds = JoinQuotedObjects(options.GetKeys()); sql.Append(string.Format(" revs.doc_id IN (SELECT doc_id FROM docs WHERE docid IN (%s)) AND" , commaSeperatedIds)); } sql.Append(" docs.doc_id = revs.doc_id AND current=1"); if (!includeDeletedDocs) { sql.Append(" AND deleted=0"); } IList<string> args = new AList<string>(); object minKey = options.GetStartKey(); object maxKey = options.GetEndKey(); bool inclusiveMin = true; bool inclusiveMax = options.IsInclusiveEnd(); if (options.IsDescending()) { minKey = maxKey; maxKey = options.GetStartKey(); inclusiveMin = inclusiveMax; inclusiveMax = true; } if (minKey != null) { System.Diagnostics.Debug.Assert((minKey is string)); sql.Append((inclusiveMin ? " AND docid >= ?" : " AND docid > ?")); args.AddItem((string)minKey); } if (maxKey != null) { System.Diagnostics.Debug.Assert((maxKey is string)); sql.Append((inclusiveMax ? " AND docid <= ?" : " AND docid < ?")); args.AddItem((string)maxKey); } sql.Append(string.Format(" ORDER BY docid %s, %s revid DESC LIMIT ? OFFSET ?", (options .IsDescending() ? "DESC" : "ASC"), (includeDeletedDocs ? "deleted ASC," : string.Empty ))); args.AddItem(Sharpen.Extensions.ToString(options.GetLimit())); args.AddItem(Sharpen.Extensions.ToString(options.GetSkip())); Cursor cursor = null; IDictionary<string, QueryRow> docs = new Dictionary<string, QueryRow>(); try { cursor = database.RawQuery(sql.ToString(), Sharpen.Collections.ToArray(args, new string[args.Count])); bool keepGoing = cursor.MoveToNext(); while (keepGoing) { long docNumericID = cursor.GetLong(0); string docId = cursor.GetString(1); string revId = cursor.GetString(2); long sequenceNumber = cursor.GetLong(3); bool deleted = includeDeletedDocs && cursor.GetInt(GetDeletedColumnIndex(options) ) > 0; IDictionary<string, object> docContents = null; if (options.IsIncludeDocs()) { byte[] json = cursor.GetBlob(4); docContents = DocumentPropertiesFromJSON(json, docId, revId, deleted, sequenceNumber , options.GetContentOptions()); } // Iterate over following rows with the same doc_id -- these are conflicts. // Skip them, but collect their revIDs if the 'conflicts' option is set: IList<string> conflicts = new AList<string>(); while (((keepGoing = cursor.MoveToNext()) == true) && cursor.GetLong(0) == docNumericID ) { if (options.GetAllDocsMode() == Query.AllDocsMode.ShowConflicts || options.GetAllDocsMode () == Query.AllDocsMode.OnlyConflicts) { if (conflicts.IsEmpty()) { conflicts.AddItem(revId); } conflicts.AddItem(cursor.GetString(2)); } } if (options.GetAllDocsMode() == Query.AllDocsMode.OnlyConflicts && conflicts.IsEmpty ()) { continue; } IDictionary<string, object> value = new Dictionary<string, object>(); value.Put("rev", revId); value.Put("_conflicts", conflicts); if (includeDeletedDocs) { value.Put("deleted", (deleted ? true : null)); } QueryRow change = new QueryRow(docId, sequenceNumber, docId, value, docContents); change.SetDatabase(this); if (options.GetKeys() != null) { docs.Put(docId, change); } else { rows.AddItem(change); } } if (options.GetKeys() != null) { foreach (object docIdObject in options.GetKeys()) { if (docIdObject is string) { string docId = (string)docIdObject; QueryRow change = docs.Get(docId); if (change == null) { IDictionary<string, object> value = new Dictionary<string, object>(); long docNumericID = GetDocNumericID(docId); if (docNumericID > 0) { bool deleted; IList<bool> outIsDeleted = new AList<bool>(); IList<bool> outIsConflict = new AList<bool>(); string revId = WinningRevIDOfDoc(docNumericID, outIsDeleted, outIsConflict); if (outIsDeleted.Count > 0) { deleted = true; } if (revId != null) { value.Put("rev", revId); value.Put("deleted", true); } } change = new QueryRow((value != null ? docId : null), 0, docId, value, null); change.SetDatabase(this); } rows.AddItem(change); } } } } catch (SQLException e) { Log.E(Database.Tag, "Error getting all docs", e); throw new CouchbaseLiteException("Error getting all docs", e, new Status(Status.InternalServerError )); } finally { if (cursor != null) { cursor.Close(); } } result.Put("rows", rows); result.Put("total_rows", rows.Count); result.Put("offset", options.GetSkip()); if (updateSeq != 0) { result.Put("update_seq", updateSeq); } return result; }
/// <summary> /// Gets or sets the userProperties of the <see cref="Couchbase.Lite.Revision"/>. /// </summary> /// <remarks> /// Gets or sets the userProperties of the <see cref="Couchbase.Lite.Revision"/>. /// Get, returns the properties of the <see cref="Couchbase.Lite.Revision"/> /// without any properties with keys prefixed with '_' (which contain Couchbase Lite data). /// Set, replaces all properties except for those with keys prefixed with '_'. /// </remarks> /// <value>The userProperties of the <see cref="Couchbase.Lite.Revision"/>.</value> public void SetUserProperties(IDictionary<String, Object> userProperties) { var newProps = new Dictionary<String, Object>(); newProps.PutAll(userProperties); foreach (string key in Properties.Keys) { if (key.StartsWith("_", StringComparison.InvariantCultureIgnoreCase)) { newProps.Put(key, properties.Get(key)); } } // Preserve metadata properties properties = newProps; }
/// <exception cref="Couchbase.Lite.CouchbaseLiteException"></exception> internal IDictionary<String, Object> GetAllDocs(QueryOptions options) { var result = new Dictionary<String, Object>(); var rows = new AList<QueryRow>(); if (options == null) options = new QueryOptions(); var includeDeletedDocs = (options.GetAllDocsMode() == AllDocsMode.IncludeDeleted); var updateSeq = 0L; if (options.IsUpdateSeq()) { updateSeq = GetLastSequenceNumber(); } // TODO: needs to be atomic with the following SELECT var sql = new StringBuilder("SELECT revs.doc_id, docid, revid, sequence"); if (options.IsIncludeDocs()) { sql.Append(", json"); } if (includeDeletedDocs) { sql.Append(", deleted"); } sql.Append(" FROM revs, docs WHERE"); if (options.GetKeys() != null) { if (options.GetKeys().Count() == 0) { return result; } var commaSeperatedIds = JoinQuotedObjects(options.GetKeys()); sql.Append(String.Format(" revs.doc_id IN (SELECT doc_id FROM docs WHERE docid IN ({0})) AND", commaSeperatedIds)); } sql.Append(" docs.doc_id = revs.doc_id AND current=1"); if (!includeDeletedDocs) { sql.Append(" AND deleted=0"); } var args = new AList<String>(); var minKey = options.GetStartKey(); var maxKey = options.GetEndKey(); var inclusiveMin = true; var inclusiveMax = options.IsInclusiveEnd(); if (options.IsDescending()) { minKey = maxKey; maxKey = options.GetStartKey(); inclusiveMin = inclusiveMax; inclusiveMax = true; } if (minKey != null) { Debug.Assert((minKey is String)); sql.Append((inclusiveMin ? " AND docid >= ?" : " AND docid > ?")); args.AddItem((string)minKey); } if (maxKey != null) { Debug.Assert((maxKey is string)); sql.Append((inclusiveMax ? " AND docid <= ?" : " AND docid < ?")); args.AddItem((string)maxKey); } sql.Append( String.Format(" ORDER BY docid {0}, {1} revid DESC LIMIT ? OFFSET ?", options.IsDescending() ? "DESC" : "ASC", includeDeletedDocs ? "deleted ASC," : String.Empty ) ); args.AddItem(options.GetLimit().ToString()); args.AddItem(options.GetSkip().ToString()); Cursor cursor = null; var docs = new Dictionary<String, QueryRow>(); try { cursor = StorageEngine.RawQuery( sql.ToString(), CommandBehavior.SequentialAccess, args.ToArray() ); // cursor.MoveToNext(); var keepGoing = cursor.MoveToNext(); while (keepGoing) { var docNumericID = cursor.GetLong(0); var includeDocs = options.IsIncludeDocs(); var docId = cursor.GetString(1); var revId = cursor.GetString(2); var sequenceNumber = cursor.GetLong(3); byte[] json = null; if (includeDocs) { json = cursor.GetBlob(4); } var deleted = includeDeletedDocs && cursor.GetInt(GetDeletedColumnIndex(options)) > 0; IDictionary<String, Object> docContents = null; if (includeDocs) { docContents = DocumentPropertiesFromJSON(json, docId, revId, deleted, sequenceNumber, options.GetContentOptions()); } // Iterate over following rows with the same doc_id -- these are conflicts. // Skip them, but collect their revIDs if the 'conflicts' option is set: var conflicts = new List<string>(); while (((keepGoing = cursor.MoveToNext())) && cursor.GetLong(0) == docNumericID) { if (options.GetAllDocsMode() == AllDocsMode.ShowConflicts || options.GetAllDocsMode() == AllDocsMode.OnlyConflicts) { if (conflicts.IsEmpty()) { conflicts.AddItem(revId); } conflicts.AddItem(cursor.GetString(2)); } } if (options.GetAllDocsMode() == AllDocsMode.OnlyConflicts && conflicts.IsEmpty()) { continue; } var value = new Dictionary<string, object>(); value["rev"] = revId; value["_conflicts"] = conflicts; if (includeDeletedDocs) { value["deleted"] = deleted; } var change = new QueryRow(docId, sequenceNumber, docId, value, docContents); change.Database = this; if (options.GetKeys() != null) { docs[docId] = change; } else { rows.AddItem(change); } } if (options.GetKeys() != null) { foreach (var docIdObject in options.GetKeys()) { if (docIdObject is string) { var docId = (string)docIdObject; var change = docs.Get(docId); if (change == null) { var value = new Dictionary<string, object>(); var docNumericID = GetDocNumericID(docId); if (docNumericID > 0) { bool deleted; var outIsDeleted = new AList<bool>(); var outIsConflict = new AList<bool>(); var revId = WinningRevIDOfDoc(docNumericID, outIsDeleted, outIsConflict); if (outIsDeleted.Count > 0) { deleted = true; } if (revId != null) { value["rev"] = revId; value["deleted"] = true; // FIXME: SHould this be set the value of `deleted`? } } change = new QueryRow((value != null ? docId : null), 0, docId, value, null); change.Database = this; } rows.AddItem(change); } } } } catch (SQLException e) { Log.E(Tag, "Error getting all docs", e); throw new CouchbaseLiteException("Error getting all docs", e, new Status(StatusCode.InternalServerError)); } finally { if (cursor != null) cursor.Close(); } result["rows"] = rows; result["total_rows"] = rows.Count; result.Put("offset", options.GetSkip()); if (updateSeq != 0) { result["update_seq"] = updateSeq; } return result; }
/// <exception cref="Couchbase.Lite.CouchbaseLiteException"></exception> public virtual void TestRevTree() { RevisionInternal rev = new RevisionInternal("MyDocId", "4-foxy", false, database); IDictionary<string, object> revProperties = new Dictionary<string, object>(); revProperties.Put("_id", rev.GetDocId()); revProperties.Put("_rev", rev.GetRevId()); revProperties.Put("message", "hi"); rev.SetProperties(revProperties); IList<string> revHistory = new AList<string>(); revHistory.AddItem(rev.GetRevId()); revHistory.AddItem("3-thrice"); revHistory.AddItem("2-too"); revHistory.AddItem("1-won"); database.ForceInsert(rev, revHistory, null); NUnit.Framework.Assert.AreEqual(1, database.GetDocumentCount()); VerifyHistory(database, rev, revHistory); RevisionInternal conflict = new RevisionInternal("MyDocId", "5-epsilon", false, database ); IDictionary<string, object> conflictProperties = new Dictionary<string, object>(); conflictProperties.Put("_id", conflict.GetDocId()); conflictProperties.Put("_rev", conflict.GetRevId()); conflictProperties.Put("message", "yo"); conflict.SetProperties(conflictProperties); IList<string> conflictHistory = new AList<string>(); conflictHistory.AddItem(conflict.GetRevId()); conflictHistory.AddItem("4-delta"); conflictHistory.AddItem("3-gamma"); conflictHistory.AddItem("2-too"); conflictHistory.AddItem("1-won"); IList wasInConflict = new ArrayList(); Database.ChangeListener listener = new _ChangeListener_84(wasInConflict); database.AddChangeListener(listener); database.ForceInsert(conflict, conflictHistory, null); NUnit.Framework.Assert.IsTrue(wasInConflict.Count > 0); database.RemoveChangeListener(listener); NUnit.Framework.Assert.AreEqual(1, database.GetDocumentCount()); VerifyHistory(database, conflict, conflictHistory); // Add an unrelated document: RevisionInternal other = new RevisionInternal("AnotherDocID", "1-ichi", false, database ); IDictionary<string, object> otherProperties = new Dictionary<string, object>(); otherProperties.Put("language", "jp"); other.SetProperties(otherProperties); IList<string> otherHistory = new AList<string>(); otherHistory.AddItem(other.GetRevId()); database.ForceInsert(other, otherHistory, null); // Fetch one of those phantom revisions with no body: RevisionInternal rev2 = database.GetDocumentWithIDAndRev(rev.GetDocId(), "2-too", EnumSet.NoneOf<Database.TDContentOptions>()); NUnit.Framework.Assert.AreEqual(rev.GetDocId(), rev2.GetDocId()); NUnit.Framework.Assert.AreEqual("2-too", rev2.GetRevId()); //Assert.assertNull(rev2.getContent()); // Make sure no duplicate rows were inserted for the common revisions: NUnit.Framework.Assert.AreEqual(8, database.GetLastSequenceNumber()); // Make sure the revision with the higher revID wins the conflict: RevisionInternal current = database.GetDocumentWithIDAndRev(rev.GetDocId(), null, EnumSet.NoneOf<Database.TDContentOptions>()); NUnit.Framework.Assert.AreEqual(conflict, current); // Get the _changes feed and verify only the winner is in it: ChangesOptions options = new ChangesOptions(); RevisionList changes = database.ChangesSince(0, options, null); RevisionList expectedChanges = new RevisionList(); expectedChanges.AddItem(conflict); expectedChanges.AddItem(other); NUnit.Framework.Assert.AreEqual(changes, expectedChanges); options.SetIncludeConflicts(true); changes = database.ChangesSince(0, options, null); expectedChanges = new RevisionList(); expectedChanges.AddItem(rev); expectedChanges.AddItem(conflict); expectedChanges.AddItem(other); NUnit.Framework.Assert.AreEqual(changes, expectedChanges); }
/// <summary>Constructs an "_attachments" dictionary for a revision, to be inserted in its JSON body.</summary> internal IDictionary<String, Object> GetAttachmentsDictForSequenceWithContent(long sequence, DocumentContentOptions contentOptions) { Debug.Assert((sequence > 0)); Cursor cursor = null; var args = new Object[] { sequence }; try { cursor = StorageEngine.RawQuery("SELECT filename, key, type, length, revpos FROM attachments WHERE sequence=?", CommandBehavior.SequentialAccess, args); if (!cursor.MoveToNext()) { return null; } var result = new Dictionary<String, Object>(); while (!cursor.IsAfterLast()) { var dataSuppressed = false; var filename = cursor.GetString(0); var keyData = cursor.GetBlob(1); var contentType = cursor.GetString(2); var length = cursor.GetInt(3); var revpos = cursor.GetInt(4); var key = new BlobKey(keyData); var digestString = "sha1-" + Convert.ToBase64String(keyData); var dataBase64 = (string) null; if (contentOptions.HasFlag(DocumentContentOptions.IncludeAttachments)) { if (contentOptions.HasFlag(DocumentContentOptions.BigAttachmentsFollow) && length >= Database.BigAttachmentLength) { dataSuppressed = true; } else { byte[] data = Attachments.BlobForKey(key); if (data != null) { dataBase64 = Convert.ToBase64String(data); } else { // <-- very expensive Log.W(Tag, "Error loading attachment"); } } } var attachment = new Dictionary<string, object>(); if (!(dataBase64 != null || dataSuppressed)) { attachment["stub"] = true; } if (dataBase64 != null) { attachment["data"] = dataBase64; } if (dataSuppressed) { attachment.Put ("follows", true); } attachment["digest"] = digestString; attachment["content_type"] = contentType; attachment["length"] = length; attachment["revpos"] = revpos; result[filename] = attachment; cursor.MoveToNext(); } return result; } catch (SQLException e) { Log.E(Tag, "Error getting attachments for sequence", e); return null; } finally { if (cursor != null) { cursor.Close(); } } }
private static IDictionary<string, PackFile> ReuseMap(ObjectDirectory.PackList old ) { IDictionary<string, PackFile> forReuse = new Dictionary<string, PackFile>(); foreach (PackFile p in old.packs) { if (p.Invalid()) { // The pack instance is corrupted, and cannot be safely used // again. Do not include it in our reuse map. // p.Close(); continue; } PackFile prior = forReuse.Put(p.GetPackFile().GetName(), p); if (prior != null) { // This should never occur. It should be impossible for us // to have two pack files with the same name, as all of them // came out of the same directory. If it does, we promised to // close any PackFiles we did not reuse, so close the second, // readers are likely to be actively using the first. // forReuse.Put(prior.GetPackFile().GetName(), prior); p.Close(); } } return forReuse; }
// Replaces the "follows" key with the real attachment data in all attachments to 'doc'. public bool InlineFollowingAttachmentsIn(RevisionInternal rev) { return rev.MutateAttachments((s, attachment)=> { if (!attachment.ContainsKey("follows")) { return attachment; } var fileURL = FileForAttachmentDict(attachment); byte[] fileData = null; try { var inputStream = fileURL.OpenConnection().GetInputStream(); var os = new ByteArrayOutputStream(); inputStream.CopyTo(os); fileData = os.ToByteArray(); } catch (IOException e) { Log.E(Tag, "could not retrieve attachment data: {0}".Fmt(fileURL.ToString()), e); return null; } var editedAttachment = new Dictionary<string, object>(attachment); editedAttachment.Remove("follows"); editedAttachment.Put("data", Convert.ToBase64String(fileData)); return editedAttachment; }); }
protected IDictionary<string, object> UserProperties(IDictionary <string, object> properties) { var result = new Dictionary<string, object>(); foreach (string key in properties.Keys) { if (!key.StartsWith ("_", StringComparison.Ordinal)) { result.Put(key, properties[key]); } } return result; }
/// <summary>Updates or deletes an attachment, creating a new document revision in the process. /// </summary> /// <remarks> /// Updates or deletes an attachment, creating a new document revision in the process. /// Used by the PUT / DELETE methods called on attachment URLs. /// </remarks> /// <exclude></exclude> /// <exception cref="Couchbase.Lite.CouchbaseLiteException"></exception> internal RevisionInternal UpdateAttachment(string filename, BlobStoreWriter body, string contentType, AttachmentEncoding encoding, string docID, string oldRevID) { var isSuccessful = false; if (String.IsNullOrEmpty (filename) || (body != null && contentType == null) || (oldRevID != null && docID == null) || (body != null && docID == null)) { throw new CouchbaseLiteException(StatusCode.BadRequest); } BeginTransaction(); try { var oldRev = new RevisionInternal(docID, oldRevID, false, this); if (oldRevID != null) { // Load existing revision if this is a replacement: try { LoadRevisionBody(oldRev, DocumentContentOptions.None); } catch (CouchbaseLiteException e) { if (e.GetCBLStatus().GetCode() == StatusCode.NotFound && ExistsDocumentWithIDAndRev(docID, null)) { throw new CouchbaseLiteException(StatusCode.Conflict); } } } else { // If this creates a new doc, it needs a body: oldRev.SetBody(new Body(new Dictionary<string, object>())); } // Update the _attachments dictionary: var oldRevProps = oldRev.GetProperties(); IDictionary<string, object> attachments = null; if (oldRevProps != null) { attachments = (IDictionary<string, object>)oldRevProps.Get("_attachments"); } if (attachments == null) { attachments = new Dictionary<string, object>(); } if (body != null) { var key = body.GetBlobKey(); var digest = key.Base64Digest(); var blobsByDigest = new Dictionary<string, BlobStoreWriter>(); blobsByDigest.Put(digest, body); RememberAttachmentWritersForDigests(blobsByDigest); var encodingName = (encoding == AttachmentEncoding.AttachmentEncodingGZIP) ? "gzip" : null; var dict = new Dictionary<string, object>(); dict.Put("digest", digest); dict.Put("length", body.GetLength()); dict.Put("follows", true); dict.Put("content_type", contentType); dict.Put("encoding", encodingName); attachments.Put(filename, dict); } else { if (oldRevID != null && !attachments.ContainsKey(filename)) { throw new CouchbaseLiteException(StatusCode.NotFound); } attachments.Remove(filename); } var properties = oldRev.GetProperties(); properties.Put("_attachments", attachments); oldRev.SetProperties(properties); // Create a new revision: var putStatus = new Status(); var newRev = PutRevision(oldRev, oldRevID, false, putStatus); isSuccessful = true; return newRev; } catch (SQLException e) { Log.E(Tag, "Error updating attachment", e); throw new CouchbaseLiteException(StatusCode.InternalServerError); } finally { EndTransaction(isSuccessful); } }
/// <exception cref="System.IO.IOException"></exception> public virtual IDictionary<string, object> GetPullReplicationParsedJson() { IDictionary<string, object> authProperties = GetReplicationAuthParsedJson(); IDictionary<string, object> sourceProperties = new Dictionary<string, object>(); sourceProperties.Put("url", GetReplicationURL().ToString()); sourceProperties["auth"] = authProperties; IDictionary<string, object> properties = new Dictionary<string, object>(); properties["source"] = sourceProperties; properties["target"] = DefaultTestDb; return properties; }
/// <summary> /// Returns the query row formatted as a JSON object /// </summary> /// <returns>The query row formatted as a JSON object</returns> public IDictionary<string, object> AsJSONDictionary() { var result = new Dictionary<string, object>(); if (Value != null || SourceDocumentId != null) { result.Put("key", Key); if (Value != null) { result.Put("value", Value); } result.Put("id", SourceDocumentId); if (DocumentProperties != null) { result.Put("doc", DocumentProperties); } } else { result.Put("key", Key); result.Put("error", "not_found"); } return result; }
internal static Document CreateDocWithAttachment(Database database, string attachmentName, string content) { var properties = new Dictionary<string, object>(); properties.Put("foo", "bar"); var doc = CreateDocumentWithProperties(database, properties); var rev = doc.CurrentRevision; var attachment = rev.GetAttachment(attachmentName); Assert.AreEqual(rev.Attachments.Count(), 0); Assert.AreEqual(rev.AttachmentNames.Count(), 0); Assert.IsNull(attachment); var body = new MemoryStream(Encoding.UTF8.GetBytes(content)); var rev2 = doc.CreateRevision(); rev2.SetAttachment(attachmentName, "text/plain; charset=utf-8", body); var rev3 = rev2.Save(); rev2.Dispose(); Assert.IsNotNull(rev3); Assert.AreEqual(rev3.Attachments.Count(), 1); Assert.AreEqual(rev3.AttachmentNames.Count(), 1); attachment = rev3.GetAttachment(attachmentName); Assert.IsNotNull(attachment); Assert.AreEqual(doc, attachment.Document); Assert.AreEqual(attachmentName, attachment.Name); var attNames = new List<string>(); attNames.AddItem(attachmentName); Assert.AreEqual(rev3.AttachmentNames, attNames); Assert.AreEqual("text/plain; charset=utf-8", attachment.ContentType); Assert.AreEqual(Encoding.UTF8.GetString(attachment.Content.ToArray()), content); Assert.AreEqual(Encoding.UTF8.GetBytes(content).Length, attachment.Length); attachment.Dispose(); return doc; }
public virtual void TestChannelsMore() { if (!Boolean.Parse((string)Runtime.Properties["replicationTestsEnabled"])) { Assert.Inconclusive("Replication tests disabled."); return; } var fakeRemoteURL = new Uri("http://couchbase.com/no_such_db"); var r1 = database.CreatePullReplication(fakeRemoteURL); Assert.IsTrue(!r1.Channels.Any()); r1.Filter = "foo/bar"; Assert.IsTrue(!r1.Channels.Any()); var filterParams = new Dictionary<string, object>(); filterParams.Put("a", "b"); r1.FilterParams = filterParams; Assert.IsTrue(!r1.Channels.Any()); r1.Channels = null; Assert.AreEqual("foo/bar", r1.Filter); Assert.AreEqual(filterParams, r1.FilterParams); var channels = new List<string>(); channels.Add("NBC"); channels.Add("MTV"); r1.Channels = channels; Assert.AreEqual(channels, r1.Channels); Assert.AreEqual("sync_gateway/bychannel", r1.Filter); filterParams = new Dictionary<string, object>(); filterParams.Put("channels", "NBC,MTV"); Assert.AreEqual(filterParams, r1.FilterParams); r1.Channels = null; Assert.AreEqual(r1.Filter, null); Assert.AreEqual(null, r1.FilterParams); }
/// <exception cref="System.Exception"></exception> public static SavedRevision CreateRevisionWithRandomProps(SavedRevision createRevFrom, bool allowConflict) { var properties = new Dictionary<string, object>(); properties.Put(Misc.CreateGUID(), "val"); var unsavedRevision = createRevFrom.CreateRevision(); unsavedRevision.SetUserProperties(properties); return unsavedRevision.Save(allowConflict); }
public void TestPusher() { if (!Boolean.Parse((string)Runtime.Properties["replicationTestsEnabled"])) { Assert.Inconclusive("Replication tests disabled."); return; } using (var remoteDb = _sg.CreateDatabase(TempDbName())) { var remote = remoteDb.RemoteUri; var docIdTimestamp = Convert.ToString(Runtime.CurrentTimeMillis()); // Create some documents: var documentProperties = new Dictionary<string, object>(); var doc1Id = string.Format("doc1-{0}", docIdTimestamp); documentProperties["_id"] = doc1Id; documentProperties["foo"] = 1; documentProperties["bar"] = false; var body = new Body(documentProperties); var rev1 = new RevisionInternal(body); rev1 = database.PutRevision(rev1, null, false); documentProperties.Put("_rev", rev1.GetRevId()); documentProperties["UPDATED"] = true; database.PutRevision(new RevisionInternal(documentProperties), rev1.GetRevId(), false); documentProperties = new Dictionary<string, object>(); var doc2Id = string.Format("doc2-{0}", docIdTimestamp); documentProperties["_id"] = doc2Id; documentProperties["baz"] = 666; documentProperties["fnord"] = true; database.PutRevision(new RevisionInternal(documentProperties), null, false); var doc2 = database.GetDocument(doc2Id); var doc2UnsavedRev = doc2.CreateRevision(); var attachmentStream = GetAsset("attachment.png"); doc2UnsavedRev.SetAttachment("attachment_testPusher.png", "image/png", attachmentStream); var doc2Rev = doc2UnsavedRev.Save(); doc2UnsavedRev.Dispose(); attachmentStream.Dispose(); Assert.IsNotNull(doc2Rev); const bool continuous = false; var repl = database.CreatePushReplication(remote); repl.Continuous = continuous; if (!IsSyncGateway(remote)) { repl.CreateTarget = true; } // Check the replication's properties: Assert.AreEqual(database, repl.LocalDatabase); Assert.AreEqual(remote, repl.RemoteUrl); Assert.IsFalse(repl.IsPull); Assert.IsFalse(repl.Continuous); Assert.IsNull(repl.Filter); Assert.IsNull(repl.FilterParams); Assert.IsNull(repl.DocIds); // TODO: CAssertNil(r1.headers); still not null! // Check that the replication hasn't started running: Assert.IsFalse(repl.IsRunning); Assert.AreEqual(ReplicationStatus.Stopped, repl.Status); Assert.AreEqual(0, repl.CompletedChangesCount); Assert.AreEqual(0, repl.ChangesCount); Assert.IsNull(repl.LastError); RunReplication(repl); // TODO: Verify the foloowing 2 asserts. ChangesCount and CompletedChangesCount // should already be reset when the replicator stopped. Assert.IsNull(repl.LastError); Assert.IsTrue(repl.ChangesCount >= 2); Assert.IsTrue(repl.CompletedChangesCount >= 2); remoteDb.VerifyDocumentExists(doc1Id); // Add doc3 documentProperties = new Dictionary<string, object>(); var doc3Id = string.Format("doc3-{0}", docIdTimestamp); var doc3 = database.GetDocument(doc3Id); documentProperties["bat"] = 677; doc3.PutProperties(documentProperties); // re-run push replication var repl2 = database.CreatePushReplication(remote); repl2.Continuous = continuous; if (!IsSyncGateway(remote)) { repl2.CreateTarget = true; } var repl2CheckedpointId = repl2.RemoteCheckpointDocID(); RunReplication(repl2); Assert.IsNull(repl2.LastError); Sleep(1000); // make sure trhe doc has been added remoteDb.VerifyDocumentExists(doc3Id); Assert.AreEqual(repl2.LastSequence, database.LastSequenceWithCheckpointId(repl2CheckedpointId)); Sleep(2000); var json = GetRemoteDoc(remote, repl2CheckedpointId); var remoteLastSequence = (string)json["lastSequence"]; Assert.AreEqual(repl2.LastSequence, remoteLastSequence); } }
public virtual IDictionary<string, object> AsJSONDictionary() { IDictionary<string, object> result = new Dictionary<string, object>(); if (value != null || sourceDocumentId != null) { result.Put("key", key); if (value != null) { result.Put("value", value); } result.Put("id", sourceDocumentId); if (documentProperties != null) { result.Put("doc", documentProperties); } } else { result.Put("key", key); result.Put("error", "not_found"); } return result; }