public bool Run() { StringBuilder sb = new StringBuilder(); for (int i = 0; i < this._enclosing.GetSizeOfAttachment(); i++) { sb.Append('1'); } byte[] attach1 = Sharpen.Runtime.GetBytesForString(sb.ToString()); try { Status status = new Status(); for (int i_1 = 0; i_1 < this._enclosing.GetNumberOfDocuments(); i_1++) { IDictionary<string, object> rev1Properties = new Dictionary<string, object>(); rev1Properties.Put("foo", 1); rev1Properties.Put("bar", false); RevisionInternal rev1 = this._enclosing.database.PutRevision(new RevisionInternal (rev1Properties, this._enclosing.database), null, false, status); NUnit.Framework.Assert.AreEqual(Status.Created, status.GetCode()); this._enclosing.database.InsertAttachmentForSequenceWithNameAndType(new ByteArrayInputStream (attach1), rev1.GetSequence(), Test3_CreateDocsWithAttachments._testAttachmentName , "text/plain", rev1.GetGeneration()); NUnit.Framework.Assert.AreEqual(Status.Created, status.GetCode()); } } catch (Exception t) { Log.E(Test3_CreateDocsWithAttachments.Tag, "Document create with attachment failed" , t); return false; } return true; }
public void TestChangeNotification() { var changeNotifications = 0; EventHandler<DatabaseChangeEventArgs> handler = (sender, e) => changeNotifications++; database.Changed += handler; // create a document var documentProperties = new Dictionary<string, object>(); documentProperties["foo"] = 1; documentProperties["bar"] = false; documentProperties["baz"] = "touch"; var body = new Body(documentProperties); var rev1 = new RevisionInternal(body, database); var status = new Status(); database.PutRevision(rev1, null, false, status); Assert.AreEqual(1, changeNotifications); // Analysis disable once DelegateSubtraction database.Changed -= handler; }
public bool Run() { string[] bigObj = new string[this._enclosing.GetSizeOfDocument()]; for (int i = 0; i < this._enclosing.GetSizeOfDocument(); i++) { bigObj[i] = Test10_DeleteDB._propertyValue; } for (int i_1 = 0; i_1 < this._enclosing.GetNumberOfDocuments(); i_1++) { //create a document IDictionary<string, object> props = new Dictionary<string, object>(); props.Put("bigArray", bigObj); Body body = new Body(props); RevisionInternal rev1 = new RevisionInternal(body, this._enclosing.database); Status status = new Status(); try { rev1 = this._enclosing.database.PutRevision(rev1, null, false, status); } catch (Exception t) { Log.E(Test10_DeleteDB.Tag, "Document create failed", t); return false; } } return true; }
/// <exception cref="Couchbase.Lite.CouchbaseLiteException"></exception> private RevisionInternal PutDoc(Database db, IDictionary<string, object> props) { RevisionInternal rev = new RevisionInternal(props, db); Status status = new Status(); rev = db.PutRevision(rev, null, false, status); NUnit.Framework.Assert.IsTrue(status.IsSuccessful()); return rev; }
/// <exception cref="Couchbase.Lite.CouchbaseLiteException"></exception> public virtual void TestChangeNotification() { // add listener to database database.Changed += (sender, e) => changeNotifications++; // create a document IDictionary<string, object> documentProperties = new Dictionary<string, object>(); documentProperties["foo"] = 1; documentProperties["bar"] = false; documentProperties["baz"] = "touch"; Body body = new Body(documentProperties); RevisionInternal rev1 = new RevisionInternal(body, database); Status status = new Status(); rev1 = database.PutRevision(rev1, null, false, status); NUnit.Framework.Assert.AreEqual(1, changeNotifications); }
/// <exception cref="Couchbase.Lite.CouchbaseLiteException"></exception> public virtual void TestChangeNotification() { Database.ChangeListener changeListener = new _ChangeListener_16(this); // add listener to database database.AddChangeListener(changeListener); // create a document IDictionary<string, object> documentProperties = new Dictionary<string, object>(); documentProperties.Put("foo", 1); documentProperties.Put("bar", false); documentProperties.Put("baz", "touch"); Body body = new Body(documentProperties); RevisionInternal rev1 = new RevisionInternal(body, database); Status status = new Status(); rev1 = database.PutRevision(rev1, null, false, status); NUnit.Framework.Assert.AreEqual(1, changeNotifications); }
/// <exception cref="Couchbase.Lite.CouchbaseLiteException"></exception> public virtual void TestLoadDBPerformance() { long startMillis = Runtime.CurrentTimeMillis(); string[] bigObj = new string[GetSizeOfDocument()]; for (int i = 0; i < GetSizeOfDocument(); i++) { bigObj[i] = _propertyValue; } for (int j = 0; j < GetNumberOfShutAndReloadCycles(); j++) { //Force close and reopen of manager and database to ensure cold //start before doc creation try { TearDown(); manager = new Manager(new LiteTestContext(), Manager.DefaultOptions); database = manager.GetExistingDatabase(DefaultTestDb); } catch (Exception ex) { Log.E(Tag, "DB teardown", ex); Fail(); } for (int k = 0; k < GetNumberOfDocuments(); k++) { //create a document IDictionary<string, object> props = new Dictionary<string, object>(); props.Put("bigArray", bigObj); Body body = new Body(props); RevisionInternal rev1 = new RevisionInternal(body, database); Status status = new Status(); try { rev1 = database.PutRevision(rev1, null, false, status); } catch (Exception t) { Log.E(Tag, "Document creation failed", t); Fail(); } } } Log.V("PerformanceStats", Tag + "," + Sharpen.Extensions.ValueOf(Runtime.CurrentTimeMillis () - startMillis).ToString() + "," + GetNumberOfDocuments() + "," + GetSizeOfDocument () + ",," + GetNumberOfShutAndReloadCycles()); }
/// <exception cref="Couchbase.Lite.CouchbaseLiteException"></exception> public virtual void TestCreateDocsUnoptimizedWayPerformance() { long startMillis = Runtime.CurrentTimeMillis(); string[] bigObj = new string[GetSizeOfDocument()]; for (int i = 0; i < GetSizeOfDocument(); i++) { bigObj[i] = _propertyValue; } for (int i_1 = 0; i_1 < GetNumberOfDocuments(); i_1++) { //create a document IDictionary<string, object> props = new Dictionary<string, object>(); props.Put("bigArray", bigObj); Body body = new Body(props); RevisionInternal rev1 = new RevisionInternal(body, database); Status status = new Status(); rev1 = database.PutRevision(rev1, null, false, status); } Log.V("PerformanceStats", Tag + "," + Sharpen.Extensions.ValueOf(Runtime.CurrentTimeMillis () - startMillis).ToString() + "," + GetNumberOfDocuments() + "," + GetSizeOfDocument ()); }
internal IEnumerable<QueryRow> QueryViewNamed(String viewName, QueryOptions options, long ifChangedSince, ValueTypePtr<long> outLastSequence, Status outStatus = null) { if (outStatus == null) { outStatus = new Status(); } IEnumerable<QueryRow> iterator = null; Status status = null; long lastIndexedSequence = 0, lastChangedSequence = 0; do { if(viewName != null) { var view = GetView(viewName); if(view == null) { outStatus.Code = StatusCode.NotFound; break; } lastIndexedSequence = view.LastSequenceIndexed; if(options.Stale == IndexUpdateMode.Before || lastIndexedSequence <= 0) { status = view.UpdateIndex(); if(status.IsError) { Log.W(TAG, "Failed to update index: {0}", status.Code); break; } lastIndexedSequence = view.LastSequenceIndexed; } else if(options.Stale == IndexUpdateMode.After && lastIndexedSequence <= LastSequenceNumber) { RunAsync(d => view.UpdateIndex()); } lastChangedSequence = view.LastSequenceChangedAt; iterator = view.QueryWithOptions(options); } else { // null view means query _all_docs iterator = GetAllDocs(options); lastIndexedSequence = lastChangedSequence = LastSequenceNumber; } if(lastChangedSequence <= ifChangedSince) { status = new Status(StatusCode.NotModified); } } while(false); // just to allow 'break' within the block outLastSequence.Value = lastIndexedSequence; if (status != null) { outStatus.Code = status.Code; } return iterator; }
/// <summary> /// Returns the <see cref="ValidateDelegate" /> for the given name, or null if it does not exist. /// </summary> /// <returns>The <see cref="ValidateDelegate" /> for the given name, or null if it does not exist.</returns> /// <param name="name">The name of the validation delegate to get.</param> /// <param name="status">The result of the operation</param> public FilterDelegate GetFilter(String name, Status status = null) { FilterDelegate result = null; if (!Shared.TryGetValue("filter", name, Name, out result)) { result = null; } if (result == null) { var filterCompiler = FilterCompiler; if (filterCompiler == null) { return null; } string language = null; var sourceCode = GetDesignDocFunction(name, "filters", out language) as string; if (sourceCode == null) { if (status != null) { status.Code = StatusCode.NotFound; } return null; } var filter = filterCompiler.CompileFilter(sourceCode, language); if (filter == null) { if (status != null) { status.Code = StatusCode.CallbackError; } Log.W(TAG, string.Format("Filter {0} failed to compile", name)); return null; } SetFilter(name, filter); return filter; } return result; }
/// <summary>Updates or deletes an attachment, creating a new document revision in the process. /// </summary> /// <remarks> /// Updates or deletes an attachment, creating a new document revision in the process. /// Used by the PUT / DELETE methods called on attachment URLs. /// </remarks> /// <exclude></exclude> /// <exception cref="Couchbase.Lite.CouchbaseLiteException"></exception> internal RevisionInternal UpdateAttachment(string filename, BlobStoreWriter body, string contentType, AttachmentEncoding encoding, string docID, string oldRevID) { if(StringEx.IsNullOrWhiteSpace(filename) || (body != null && contentType == null) || (oldRevID != null && docID == null) || (body != null && docID == null)) { throw new CouchbaseLiteException(StatusCode.BadAttachment); } var oldRev = new RevisionInternal(docID, oldRevID, false); if (oldRevID != null) { // Load existing revision if this is a replacement: try { oldRev = LoadRevisionBody(oldRev); } catch (CouchbaseLiteException e) { if (e.Code == StatusCode.NotFound && GetDocument(docID, null, false) != null) { throw new CouchbaseLiteException(StatusCode.Conflict); } throw; } } else { // If this creates a new doc, it needs a body: oldRev.SetBody(new Body(new Dictionary<string, object>())); } // Update the _attachments dictionary: var attachments = oldRev.GetProperties().Get("_attachments").AsDictionary<string, object>(); if (attachments == null) { attachments = new Dictionary<string, object>(); } if (body != null) { var key = body.GetBlobKey(); string digest = key.Base64Digest(); RememberAttachmentWriter(body); string encodingName = (encoding == AttachmentEncoding.GZIP) ? "gzip" : null; attachments[filename] = new NonNullDictionary<string, object> { { "digest", digest }, { "length", body.GetLength() }, { "follows", true }, { "content_type", contentType }, { "encoding", encodingName } }; } else { if (oldRevID != null && attachments.Get(filename) == null) { throw new CouchbaseLiteException(StatusCode.AttachmentNotFound); } attachments.Remove(filename); } var properties = oldRev.GetProperties(); properties["_attachments"] = attachments; oldRev.SetProperties(properties); Status status = new Status(); var newRev = PutRevision(oldRev, oldRevID, false, status); if (status.IsError) { throw new CouchbaseLiteException(status.Code); } return newRev; }
internal AttachmentInternal GetAttachmentForRevision(RevisionInternal rev, string name, Status status = null) { Debug.Assert(name != null); var attachments = rev.GetAttachments(); if (attachments == null) { try { rev = LoadRevisionBody(rev); } catch(CouchbaseLiteException e) { if (status != null) { status.Code = e.CBLStatus.Code; } return null; } attachments = rev.GetAttachments(); if (attachments == null) { status.Code = StatusCode.NotFound; return null; } } return AttachmentForDict(attachments.Get(name).AsDictionary<string, object>(), name, status); }
internal bool ProcessAttachmentsForRevision(RevisionInternal rev, string prevRevId, Status status) { if (status == null) { status = new Status(); } status.Code = StatusCode.Ok; var revAttachments = rev.GetAttachments(); if (revAttachments == null) { return true; // no-op: no attachments } // Deletions can't have attachments: if (rev.IsDeleted() || revAttachments.Count == 0) { var body = rev.GetProperties(); body.Remove("_attachments"); rev.SetProperties(body); return true; } int generation = RevisionInternal.GenerationFromRevID(prevRevId) + 1; IDictionary<string, object> parentAttachments = null; return rev.MutateAttachments((name, attachInfo) => { AttachmentInternal attachment = null; try { attachment = new AttachmentInternal(name, attachInfo); } catch(CouchbaseLiteException e) { return null; } if(attachment.EncodedContent != null) { // If there's inline attachment data, decode and store it: BlobKey blobKey = new BlobKey(); if(!Attachments.StoreBlob(attachment.EncodedContent.ToArray(), blobKey)) { status.Code = StatusCode.AttachmentError; return null; } attachment.BlobKey = blobKey; } else if(attachInfo.GetCast<bool>("follows")) { // "follows" means the uploader provided the attachment in a separate MIME part. // This means it's already been registered in _pendingAttachmentsByDigest; // I just need to look it up by its "digest" property and install it into the store: InstallAttachment(attachment, attachInfo); } else if(attachInfo.GetCast<bool>("stub")) { // "stub" on an incoming revision means the attachment is the same as in the parent. if(parentAttachments == null && prevRevId != null) { parentAttachments = GetAttachmentsFromDoc(rev.GetDocId(), prevRevId, status); if(parentAttachments == null) { if(status.Code == StatusCode.Ok || status.Code == StatusCode.NotFound) { status.Code = StatusCode.BadAttachment; } return null; } } var parentAttachment = parentAttachments == null ? null : parentAttachments.Get(name).AsDictionary<string, object>(); if(parentAttachment == null) { status.Code = StatusCode.BadAttachment; return null; } return parentAttachment; } // Set or validate the revpos: if(attachment.RevPos == 0) { attachment.RevPos = generation; } else if(attachment.RevPos >= generation) { status.Code = StatusCode.BadAttachment; return null; } Debug.Assert(attachment.IsValid); return attachment.AsStubDictionary(); }); }
internal bool ExpandAttachments(RevisionInternal rev, int minRevPos, bool allowFollows, bool decodeAttachments, Status outStatus) { outStatus.Code = StatusCode.Ok; rev.MutateAttachments((name, attachment) => { var revPos = attachment.GetCast<long>("revpos"); if(revPos < minRevPos && revPos != 0) { //Stub: return new Dictionary<string, object> { { "stub", true }, { "revpos", revPos } }; } var expanded = new Dictionary<string, object>(attachment); expanded.Remove("stub"); if(decodeAttachments) { expanded.Remove("encoding"); expanded.Remove("encoded_length"); } if(allowFollows && SmallestLength(expanded) >= Database.BIG_ATTACHMENT_LENGTH) { //Data will follow (multipart): expanded["follows"] = true; expanded.Remove("data"); } else { //Put data inline: expanded.Remove("follows"); Status status = new Status(); var attachObj = AttachmentForDict(attachment, name, status); if(attachObj == null) { Log.W(TAG, "Can't get attachment '{0}' of {1} (status {2})", name, rev, status); outStatus.Code = status.Code; return attachment; } var data = decodeAttachments ? attachObj.Content : attachObj.EncodedContent; if(data == null) { Log.W(TAG, "Can't get binary data of attachment '{0}' of {1}", name, rev); outStatus.Code = StatusCode.NotFound; return attachment; } expanded["data"] = Convert.ToBase64String(data.ToArray()); } return expanded; }); return outStatus.Code == StatusCode.Ok; }
/// <summary>Updates or deletes an attachment, creating a new document revision in the process. /// </summary> /// <remarks> /// Updates or deletes an attachment, creating a new document revision in the process. /// Used by the PUT / DELETE methods called on attachment URLs. /// </remarks> /// <exclude></exclude> /// <exception cref="Couchbase.Lite.CouchbaseLiteException"></exception> internal RevisionInternal UpdateAttachment(string filename, BlobStoreWriter body, string contentType, AttachmentEncoding encoding, string docID, string oldRevID) { var isSuccessful = false; if (String.IsNullOrEmpty (filename) || (body != null && contentType == null) || (oldRevID != null && docID == null) || (body != null && docID == null)) { throw new CouchbaseLiteException(StatusCode.BadRequest); } BeginTransaction(); try { var oldRev = new RevisionInternal(docID, oldRevID, false, this); if (oldRevID != null) { // Load existing revision if this is a replacement: try { LoadRevisionBody(oldRev, DocumentContentOptions.None); } catch (CouchbaseLiteException e) { if (e.GetCBLStatus().GetCode() == StatusCode.NotFound && ExistsDocumentWithIDAndRev(docID, null)) { throw new CouchbaseLiteException(StatusCode.Conflict); } } } else { // If this creates a new doc, it needs a body: oldRev.SetBody(new Body(new Dictionary<string, object>())); } // Update the _attachments dictionary: var oldRevProps = oldRev.GetProperties(); IDictionary<string, object> attachments = null; if (oldRevProps != null) { attachments = (IDictionary<string, object>)oldRevProps.Get("_attachments"); } if (attachments == null) { attachments = new Dictionary<string, object>(); } if (body != null) { var key = body.GetBlobKey(); var digest = key.Base64Digest(); var blobsByDigest = new Dictionary<string, BlobStoreWriter>(); blobsByDigest.Put(digest, body); RememberAttachmentWritersForDigests(blobsByDigest); var encodingName = (encoding == AttachmentEncoding.AttachmentEncodingGZIP) ? "gzip" : null; var dict = new Dictionary<string, object>(); dict.Put("digest", digest); dict.Put("length", body.GetLength()); dict.Put("follows", true); dict.Put("content_type", contentType); dict.Put("encoding", encodingName); attachments.Put(filename, dict); } else { if (oldRevID != null && !attachments.ContainsKey(filename)) { throw new CouchbaseLiteException(StatusCode.NotFound); } attachments.Remove(filename); } var properties = oldRev.GetProperties(); properties.Put("_attachments", attachments); oldRev.SetProperties(properties); // Create a new revision: var putStatus = new Status(); var newRev = PutRevision(oldRev, oldRevID, false, putStatus); isSuccessful = true; return newRev; } catch (SQLException e) { Log.E(Tag, "Error updating attachment", e); throw new CouchbaseLiteException(StatusCode.InternalServerError); } finally { EndTransaction(isSuccessful); } }
internal RevisionInternal PutDocument(string docId, IDictionary<string, object> properties, string prevRevId, bool allowConflict, Status resultStatus) { bool deleting = properties == null || properties.GetCast<bool>("_deleted"); Log.D(TAG, "PUT _id={0}, _rev={1}, _deleted={2}, allowConflict={3}", docId, prevRevId, deleting, allowConflict); if ((prevRevId != null && docId == null) || (deleting && docId == null)) { if (resultStatus != null) { resultStatus.Code = StatusCode.BadId; return null; } } if (properties != null && properties.Get("_attachments").AsDictionary<string, object>() != null) { var tmpRev = new RevisionInternal(docId, prevRevId, deleting); tmpRev.SetProperties(properties); if (!ProcessAttachmentsForRevision(tmpRev, prevRevId, resultStatus)) { return null; } properties = tmpRev.GetProperties(); } StoreValidation validationBlock = null; if (Shared.HasValues("validation", Name)) { validationBlock = ValidateRevision; } var putRev = Storage.PutRevision(docId, prevRevId, properties, deleting, allowConflict, validationBlock, resultStatus); if (putRev != null) { Log.D(TAG, "--> created {0}", putRev); if (!string.IsNullOrEmpty(docId)) { UnsavedRevisionDocumentCache.Remove(docId); } } return putRev; }
/// <summary>Stores a new (or initial) revision of a document.</summary> /// <remarks> /// Stores a new (or initial) revision of a document. /// This is what's invoked by a PUT or POST. As with those, the previous revision ID must be supplied when necessary and the call will fail if it doesn't match. /// </remarks> /// <param name="oldRev">The revision to add. If the docID is null, a new UUID will be assigned. Its revID must be null. It must have a JSON body. /// </param> /// <param name="prevRevId">The ID of the revision to replace (same as the "?rev=" parameter to a PUT), or null if this is a new document. /// </param> /// <param name="allowConflict">If false, an error status 409 will be returned if the insertion would create a conflict, i.e. if the previous revision already has a child. /// </param> /// <param name="resultStatus">On return, an HTTP status code indicating success or failure. /// </param> /// <returns>A new RevisionInternal with the docID, revID and sequence filled in (but no body). /// </returns> /// <exception cref="Couchbase.Lite.CouchbaseLiteException"></exception> internal RevisionInternal PutRevision(RevisionInternal oldRev, string prevRevId, bool allowConflict, Status resultStatus = null) { return PutDocument(oldRev.GetDocId(), oldRev.GetProperties(), prevRevId, allowConflict, resultStatus); }
public void TestPusher() { if (!Boolean.Parse((string)Runtime.Properties["replicationTestsEnabled"])) { Assert.Inconclusive("Replication tests disabled."); return; } var remote = GetReplicationURL(); var docIdTimestamp = Convert.ToString(Runtime.CurrentTimeMillis()); // Create some documents: var documentProperties = new Dictionary<string, object>(); var doc1Id = string.Format("doc1-{0}", docIdTimestamp); documentProperties["_id"] = doc1Id; documentProperties["foo"] = 1; documentProperties["bar"] = false; var body = new Body(documentProperties); var rev1 = new RevisionInternal(body); var status = new Status(); rev1 = database.PutRevision(rev1, null, false, status); Assert.AreEqual(StatusCode.Created, status.Code); documentProperties.Put("_rev", rev1.GetRevId()); documentProperties["UPDATED"] = true; database.PutRevision(new RevisionInternal(documentProperties), rev1.GetRevId(), false, status); Assert.AreEqual(StatusCode.Created, status.Code); documentProperties = new Dictionary<string, object>(); var doc2Id = string.Format("doc2-{0}", docIdTimestamp); documentProperties["_id"] = doc2Id; documentProperties["baz"] = 666; documentProperties["fnord"] = true; database.PutRevision(new RevisionInternal(documentProperties), null, false, status); Assert.AreEqual(StatusCode.Created, status.Code); var doc2 = database.GetDocument(doc2Id); var doc2UnsavedRev = doc2.CreateRevision(); var attachmentStream = GetAsset("attachment.png"); doc2UnsavedRev.SetAttachment("attachment_testPusher.png", "image/png", attachmentStream); var doc2Rev = doc2UnsavedRev.Save(); doc2UnsavedRev.Dispose(); attachmentStream.Dispose(); Assert.IsNotNull(doc2Rev); const bool continuous = false; var repl = database.CreatePushReplication(remote); repl.Continuous = continuous; if (!IsSyncGateway(remote)) { repl.CreateTarget = true; } // Check the replication's properties: Assert.AreEqual(database, repl.LocalDatabase); Assert.AreEqual(remote, repl.RemoteUrl); Assert.IsFalse(repl.IsPull); Assert.IsFalse(repl.Continuous); Assert.IsNull(repl.Filter); Assert.IsNull(repl.FilterParams); Assert.IsNull(repl.DocIds); // TODO: CAssertNil(r1.headers); still not null! // Check that the replication hasn't started running: Assert.IsFalse(repl.IsRunning); Assert.AreEqual((int)repl.Status, (int)ReplicationStatus.Stopped); Assert.AreEqual(0, repl.CompletedChangesCount); Assert.AreEqual(0, repl.ChangesCount); Assert.IsNull(repl.LastError); RunReplication(repl); // TODO: Verify the foloowing 2 asserts. ChangesCount and CompletedChangesCount // should already be reset when the replicator stopped. Assert.IsTrue(repl.ChangesCount >= 2); Assert.IsTrue(repl.CompletedChangesCount >= 2); Assert.IsNull(repl.LastError); VerifyRemoteDocExists(remote, doc1Id); // Add doc3 documentProperties = new Dictionary<string, object>(); var doc3Id = string.Format("doc3-{0}", docIdTimestamp); var doc3 = database.GetDocument(doc3Id); documentProperties["bat"] = 677; doc3.PutProperties(documentProperties); // re-run push replication var repl2 = database.CreatePushReplication(remote); repl2.Continuous = continuous; if (!IsSyncGateway(remote)) { repl2.CreateTarget = true; } var repl2CheckedpointId = repl2.RemoteCheckpointDocID(); RunReplication(repl2); Assert.IsNull(repl2.LastError); // make sure trhe doc has been added VerifyRemoteDocExists(remote, doc3Id); Assert.AreEqual(repl2.LastSequence, database.LastSequenceWithCheckpointId(repl2CheckedpointId)); System.Threading.Thread.Sleep(2000); var json = GetRemoteDoc(remote, repl2CheckedpointId); var remoteLastSequence = (string)json["lastSequence"]; Assert.AreEqual(repl2.LastSequence, remoteLastSequence); Log.D(Tag, "testPusher() finished"); }
internal AttachmentInternal AttachmentForDict(IDictionary<string, object> info, string filename, Status status) { if (info == null) { if (status != null) { status.Code = StatusCode.NotFound; } return null; } AttachmentInternal attachment; try { attachment = new AttachmentInternal(filename, info); } catch(CouchbaseLiteException e) { if (status != null) { status.Code = e.CBLStatus.Code; } return null; } attachment.Database = this; return attachment; }
public void TestPusherDeletedDoc() { if (!Boolean.Parse((string)Runtime.Properties["replicationTestsEnabled"])) { Assert.Inconclusive("Replication tests disabled."); return; } var remote = GetReplicationURL(); var docIdTimestamp = Convert.ToString(Runtime.CurrentTimeMillis()); // Create some documentsConvert var documentProperties = new Dictionary<string, object>(); var doc1Id = string.Format("doc1-{0}", docIdTimestamp); documentProperties["_id"] = doc1Id; documentProperties["foo"] = 1; documentProperties["bar"] = false; var body = new Body(documentProperties); var rev1 = new RevisionInternal(body); var status = new Status(); rev1 = database.PutRevision(rev1, null, false, status); Assert.AreEqual(StatusCode.Created, status.Code); documentProperties["_rev"] = rev1.GetRevId(); documentProperties["UPDATED"] = true; documentProperties["_deleted"] = true; database.PutRevision(new RevisionInternal(documentProperties), rev1.GetRevId(), false, status); Assert.IsTrue((int)status.Code >= 200 && (int)status.Code < 300); var repl = database.CreatePushReplication(remote); if (!IsSyncGateway(remote)) { ((Pusher)repl).CreateTarget = true; } RunReplication(repl); Assert.IsNull(repl.LastError); // make sure doc1 is deleted var replicationUrlTrailing = new Uri(string.Format ("{0}/", remote)); var pathToDoc = new Uri(replicationUrlTrailing, doc1Id); Log.D(Tag, "Send http request to " + pathToDoc); var httpRequestDoneSignal = new CountDownLatch(1); var httpclient = new HttpClient(); try { var getDocResponse = httpclient.GetAsync(pathToDoc.ToString()).Result; var statusLine = getDocResponse.StatusCode; Log.D(ReplicationTest.Tag, "statusLine " + statusLine); Assert.AreEqual(Couchbase.Lite.StatusCode.NotFound, statusLine.GetStatusCode()); } catch (ProtocolViolationException e) { Assert.IsNull(e, "Got ClientProtocolException: " + e.Message); } catch (IOException e) { Assert.IsNull(e, "Got IOException: " + e.Message); } finally { httpRequestDoneSignal.CountDown(); } Log.D(Tag, "Waiting for http request to finish"); try { httpRequestDoneSignal.Await(TimeSpan.FromSeconds(10)); Log.D(Tag, "http request finished"); } catch (Exception e) { Runtime.PrintStackTrace(e); } Log.D(Tag, "testPusherDeletedDoc() finished"); }
internal IDictionary<string, object> GetAttachmentsFromDoc(string docId, string revId, Status status) { var rev = new RevisionInternal(docId, revId, false); try { LoadRevisionBody(rev); } catch(CouchbaseLiteException e) { status.Code = e.CBLStatus.Code; return null; } return rev.GetAttachments(); }
internal void UpdateIndex() { Log.V(Database.Tag, "Re-indexing view " + Name + " ..."); System.Diagnostics.Debug.Assert((Map != null)); if (Id < 0) { var msg = string.Format("View.Id < 0"); throw new CouchbaseLiteException(msg, new Status(StatusCode.NotFound)); } Database.BeginTransaction(); var result = new Status(StatusCode.InternalServerError); Cursor cursor = null; try { var lastSequence = LastSequenceIndexed; var dbMaxSequence = Database.LastSequenceNumber; if (lastSequence == dbMaxSequence) { // nothing to do (eg, kCBLStatusNotModified) var msg = String.Format("lastSequence ({0}) == dbMaxSequence ({1}), nothing to do", lastSequence, dbMaxSequence); Log.D(Database.Tag, msg); result.SetCode(StatusCode.Ok); return; } // First remove obsolete emitted results from the 'maps' table: var sequence = lastSequence; if (lastSequence < 0) { var msg = string.Format("lastSequence < 0 ({0})", lastSequence); throw new CouchbaseLiteException(msg, new Status(StatusCode.InternalServerError)); } if (lastSequence == 0) { // If the lastSequence has been reset to 0, make sure to remove // any leftover rows: var whereArgs = new string[] { Sharpen.Extensions.ToString(Id) }; Database.StorageEngine.Delete("maps", "view_id=@", whereArgs); } else { // Delete all obsolete map results (ones from since-replaced // revisions): var args = new [] { Id.ToString(), lastSequence.ToString(), lastSequence.ToString() }; Database.StorageEngine.ExecSQL( "DELETE FROM maps WHERE view_id=@ AND sequence IN (" + "SELECT parent FROM revs WHERE sequence>@ " + "AND parent>0 AND parent<=@)", args); } var deleted = 0; cursor = Database.StorageEngine.RawQuery("SELECT changes()", null); // TODO: Convert to ADO params. cursor.MoveToNext(); deleted = cursor.GetInt(0); cursor.Close(); // find a better way to propagate this back // Now scan every revision added since the last time the view was // indexed: var selectArgs = new[] { Convert.ToString(lastSequence) }; cursor = Database.StorageEngine.RawQuery("SELECT revs.doc_id, sequence, docid, revid, json FROM revs, docs " + "WHERE sequence>@ AND current!=0 AND deleted=0 " + "AND revs.doc_id = docs.doc_id " + "ORDER BY revs.doc_id, revid DESC", CommandBehavior.SequentialAccess, selectArgs); cursor.MoveToNext(); var lastDocID = 0L; while (!cursor.IsAfterLast()) { long docID = cursor.GetLong(0); if (docID != lastDocID) { // Only look at the first-iterated revision of any document, // because this is the // one with the highest revid, hence the "winning" revision // of a conflict. lastDocID = docID; // Reconstitute the document as a dictionary: sequence = cursor.GetLong(1); string docId = cursor.GetString(2); if (docId.StartsWith("_design/", StringCompare.IgnoreCase)) { // design docs don't get indexed! cursor.MoveToNext(); continue; } var revId = cursor.GetString(3); var json = cursor.GetBlob(4); var properties = Database.DocumentPropertiesFromJSON( json, docId, revId, false, sequence, EnumSet.NoneOf<TDContentOptions>() ); if (properties != null) { // Call the user-defined map() to emit new key/value // pairs from this revision: Log.V(Database.Tag, " call map for sequence=" + System.Convert.ToString(sequence )); // This is the emit() block, which gets called from within the // user-defined map() block // that's called down below. var enclosingView = this; var thisSequence = sequence; var map = Map; if (map == null) throw new CouchbaseLiteException("Map function is missing."); EmitDelegate emitBlock = (key, value) => { // TODO: Do we need to do any null checks on key or value? try { var keyJson = Manager.GetObjectMapper().WriteValueAsString(key); var valueJson = value == null ? null : Manager.GetObjectMapper().WriteValueAsString(value) ; Log.V(Database.Tag, String.Format(" emit({0}, {1})", keyJson, valueJson)); var insertValues = new ContentValues(); insertValues.Put("view_id", enclosingView.Id); insertValues["sequence"] = thisSequence; insertValues["key"] = keyJson; insertValues["value"] = valueJson; enclosingView.Database.StorageEngine.Insert("maps", null, insertValues); } catch (Exception e) { Log.E(Database.Tag, "Error emitting", e); } }; map(properties, emitBlock); } } cursor.MoveToNext(); } // Finally, record the last revision sequence number that was // indexed: ContentValues updateValues = new ContentValues(); updateValues["lastSequence"] = dbMaxSequence; var whereArgs_1 = new string[] { Sharpen.Extensions.ToString(Id) }; Database.StorageEngine.Update("views", updateValues, "view_id=@", whereArgs_1); // FIXME actually count number added :) Log.V(Database.Tag, "...Finished re-indexing view " + Name + " up to sequence " + System.Convert.ToString(dbMaxSequence) + " (deleted " + deleted + " added " + "?" + ")"); result.SetCode(StatusCode.Ok); } catch (SQLException e) { throw new CouchbaseLiteException(e, new Status(StatusCode.DbError)); } finally { if (cursor != null) { cursor.Close(); } if (!result.IsSuccessful()) { Log.W(Database.Tag, "Failed to rebuild view " + Name + ": " + result.GetCode()); } if (Database != null) { Database.EndTransaction(result.IsSuccessful()); } } }
internal RevisionInternal RevisionByLoadingBody(RevisionInternal rev, Status outStatus) { // First check for no-op -- if we just need the default properties and already have them: if (rev.GetSequence() != 0) { var props = rev.GetProperties(); if (props != null && props.ContainsKey("_rev") && props.ContainsKey("_id")) { if (outStatus != null) { outStatus.Code = StatusCode.Ok; } return rev; } } RevisionInternal nuRev = rev.CopyWithDocID(rev.GetDocId(), rev.GetRevId()); try { LoadRevisionBody(nuRev); } catch(CouchbaseLiteException e) { if (outStatus != null) { outStatus.Code = e.CBLStatus.Code; } nuRev = null; } return nuRev; }
public CouchbaseLiteException (Exception innerException, Status status) : this(innerException, status.GetCode()) { Code = status.GetCode(); }
/// <summary>VALIDATION</summary> /// <exception cref="Couchbase.Lite.CouchbaseLiteException"></exception> internal Status ValidateRevision(RevisionInternal newRev, RevisionInternal oldRev, String parentRevId) { var validations = Shared.GetValues("validation", Name); if (validations == null || validations.Count == 0) { return new Status(StatusCode.Ok); } var publicRev = new SavedRevision(this, newRev, parentRevId); var context = new ValidationContext(this, oldRev, newRev); Status status = new Status(StatusCode.Ok); foreach (var validationName in validations.Keys) { var validation = GetValidation(validationName); try { validation(publicRev, context); } catch(Exception e) { Log.E(TAG, String.Format("Validation block '{0}'", validationName), e); status.Code = StatusCode.Exception; break; } if (context.RejectMessage != null) { Log.D(TAG, "Failed update of {0}: {1}:{2} Old doc = {3}{2} New doc = {4}", oldRev, context.RejectMessage, Environment.NewLine, oldRev == null ? null : oldRev.GetProperties(), newRev.GetProperties()); status.Code = StatusCode.Forbidden; break; } } return status; }
internal RevisionInternal GetDocument(string docId, string revId, bool withBody, Status status = null) { return Storage.GetDocument(docId, revId, withBody, status); }
/// <exception cref="Couchbase.Lite.CouchbaseLiteException">When attempting to add an invalid revision</exception> internal void ForceInsert(RevisionInternal inRev, IList<string> revHistory, Uri source) { if (revHistory == null) { revHistory = new List<string>(0); } var rev = inRev.CopyWithDocID(inRev.GetDocId(), inRev.GetRevId()); rev.SetSequence(0); string revID = rev.GetRevId(); if (!IsValidDocumentId(rev.GetDocId()) || revID == null) { throw new CouchbaseLiteException(StatusCode.BadId); } if (revHistory.Count == 0) { revHistory.Add(revID); } else if (revID != revHistory[0]) { throw new CouchbaseLiteException(StatusCode.BadId); } if (inRev.GetAttachments() != null) { var updatedRev = inRev.CopyWithDocID(inRev.GetDocId(), inRev.GetRevId()); string prevRevID = revHistory.Count >= 2 ? revHistory[1] : null; Status status = new Status(); if (!ProcessAttachmentsForRevision(updatedRev, prevRevID, status)) { throw new CouchbaseLiteException(status.Code); } inRev = updatedRev; } StoreValidation validationBlock = null; if (Shared != null && Shared.HasValues("validation", Name)) { validationBlock = ValidateRevision; } var insertStatus = Storage.ForceInsert(inRev, revHistory, validationBlock, source); if(insertStatus.IsError) { throw new CouchbaseLiteException(insertStatus.Code); } }
internal MultipartWriter MultipartWriterForRev(RevisionInternal rev, string contentType) { var writer = new MultipartWriter(contentType, null); writer.SetNextPartHeaders(new Dictionary<string, string> { { "Content-Type", "application/json" } }); writer.AddData(rev.GetBody().AsJson()); var attachments = rev.GetAttachments(); if (attachments == null) { return writer; } foreach (var entry in attachments) { var attachment = entry.Value.AsDictionary<string, object>(); if (attachment != null && attachment.GetCast<bool>("follows", false)) { var disposition = String.Format("attachment; filename={0}", Quote(entry.Key)); writer.SetNextPartHeaders(new Dictionary<string, string> { { "Content-Disposition", disposition } }); Status status = new Status(); var attachObj = AttachmentForDict(attachment, entry.Key, status); if (attachObj == null) { return null; } var fileURL = attachObj.ContentUrl; if (fileURL != null) { writer.AddFileUrl(fileURL); } else { writer.AddStream(attachObj.ContentStream); } } } return writer; }
/// <exception cref="Couchbase.Lite.CouchbaseLiteException"></exception> public virtual void TestValidations() { Database.ValidateDelegate validator = (Revision newRevision, ValidationContext context)=> { NUnit.Framework.Assert.IsNotNull(newRevision); NUnit.Framework.Assert.IsNotNull(context); NUnit.Framework.Assert.IsTrue(newRevision.Properties != null || newRevision. IsDeletion); this._enclosing.validationCalled = true; bool hoopy = newRevision.IsDeletion || (newRevision.Properties.Get("towel" ) != null); Log.V(ValidationsTest.Tag, string.Format("--- Validating %s --> %b", newRevision. Properties, hoopy)); if (!hoopy) { context.Reject("Where's your towel?"); } return hoopy; }; database.SetValidation("hoopy", validator); // POST a valid new document: IDictionary<string, object> props = new Dictionary<string, object>(); props["name"] = "Zaphod Beeblebrox"; props["towel"] = "velvet"; RevisionInternal rev = new RevisionInternal(props, database); Status status = new Status(); validationCalled = false; rev = database.PutRevision(rev, null, false, status); NUnit.Framework.Assert.IsTrue(validationCalled); NUnit.Framework.Assert.AreEqual(StatusCode.Created, status.GetCode()); // PUT a valid update: props["head_count"] = 3; rev.SetProperties(props); validationCalled = false; rev = database.PutRevision(rev, rev.GetRevId(), false, status); NUnit.Framework.Assert.IsTrue(validationCalled); NUnit.Framework.Assert.AreEqual(StatusCode.Created, status.GetCode()); // PUT an invalid update: Sharpen.Collections.Remove(props, "towel"); rev.SetProperties(props); validationCalled = false; bool gotExpectedError = false; try { rev = database.PutRevision(rev, rev.GetRevId(), false, status); } catch (CouchbaseLiteException e) { gotExpectedError = (e.GetCBLStatus().GetCode() == StatusCode.Forbidden); } NUnit.Framework.Assert.IsTrue(validationCalled); NUnit.Framework.Assert.IsTrue(gotExpectedError); // POST an invalid new document: props = new Dictionary<string, object>(); props["name"] = "Vogon"; props["poetry"] = true; rev = new RevisionInternal(props, database); validationCalled = false; gotExpectedError = false; try { rev = database.PutRevision(rev, null, false, status); } catch (CouchbaseLiteException e) { gotExpectedError = (e.GetCBLStatus().GetCode() == StatusCode.Forbidden); } NUnit.Framework.Assert.IsTrue(validationCalled); NUnit.Framework.Assert.IsTrue(gotExpectedError); // PUT a valid new document with an ID: props = new Dictionary<string, object>(); props["_id"] = "ford"; props["name"] = "Ford Prefect"; props["towel"] = "terrycloth"; rev = new RevisionInternal(props, database); validationCalled = false; rev = database.PutRevision(rev, null, false, status); NUnit.Framework.Assert.IsTrue(validationCalled); NUnit.Framework.Assert.AreEqual("ford", rev.GetDocId()); // DELETE a document: rev = new RevisionInternal(rev.GetDocId(), rev.GetRevId(), true, database); NUnit.Framework.Assert.IsTrue(rev.IsDeleted()); validationCalled = false; rev = database.PutRevision(rev, rev.GetRevId(), false, status); NUnit.Framework.Assert.IsTrue(validationCalled); // PUT an invalid new document: props = new Dictionary<string, object>(); props["_id"] = "petunias"; props["name"] = "Pot of Petunias"; rev = new RevisionInternal(props, database); validationCalled = false; gotExpectedError = false; try { rev = database.PutRevision(rev, null, false, status); } catch (CouchbaseLiteException e) { gotExpectedError = (e.GetCBLStatus().GetCode() == StatusCode.Forbidden); } NUnit.Framework.Assert.IsTrue(validationCalled); NUnit.Framework.Assert.IsTrue(gotExpectedError); }
/// <exception cref="Couchbase.Lite.CouchbaseLiteException"></exception> internal RevisionInternal PutRevision(RevisionInternal rev, String prevRevId, Status resultStatus) { return PutRevision(rev, prevRevId, false, resultStatus); }