public void TestConcurrentInBatch() { const int nDocs = 1000; const uint nConcurrent = 10; ConcurrentRuns(nConcurrent, (index) => { if (Db == null) { return; } Db.InBatch(() => { var tag = $"Create{index}"; CreateDocs(nDocs, tag).Should().HaveCount(nDocs); // Force evaluation, not a needed assert }); }); for (uint i = 0; i < nConcurrent; i++) { var tag = $"Create{i}"; VerifyByTagName(tag, nDocs); } }
public void TestSaveInBatch() { Db.InBatch(() => CreateDocs(10)); Db.Count.Should().Be(10UL, "because 10 documents were added"); ValidateDocs(10); }
public void TestDatabaseChange() { var wa = new WaitAssert(); Db.AddChangeListener(null, (sender, args) => { var docIDs = args.DocumentIDs; wa.RunAssert(() => { args.Database.Should().Be(Db); docIDs.Should().HaveCount(10, "because that is the number of expected rows"); }); }); Db.InBatch(() => { for (uint i = 0; i < 10; i++) { var doc = new MutableDocument($"doc-{i}"); doc.SetString("type", "demo"); Db.Save(doc); } }); wa.WaitForResult(TimeSpan.FromSeconds(5)); }
private unsafe void SaveProperties(IDictionary <string, object> props, string docID) { Db.InBatch(() => { var tricky = (C4Document *)LiteCoreBridge.Check(err => Native.c4doc_get(Db.c4db, docID, true, err)); var put = new C4DocPutRequest { docID = tricky->docID, history = &tricky->revID, historyCount = 1, save = true }; var enc = Native.c4db_getSharedFleeceEncoder(Db.c4db); props.FLEncode(enc); var body = NativeRaw.FLEncoder_Finish(enc, null); put.body = (C4Slice)body; LiteCoreBridge.Check(err => { var localPut = put; var retVal = Native.c4doc_put(Db.c4db, &localPut, null, err); Native.FLSliceResult_Free(body); return(retVal); }); }); }
private int UpdateArtistNames() { _updateArtistsBench.Start(); var count = 0; Db.InBatch(() => { using (var q = Query.Select(SelectResult.Expression(Expression.Meta().ID), SelectResult.Expression(Expression.Property("Artist"))) .From(DataSource.Database(Db))) { using (var results = q.Execute()) { foreach (var result in results) { var artist = result.GetString(1); if (artist.StartsWith("The ")) { using (var doc = Db.GetDocument(result.GetString(0))) { doc.Set("Artist", artist.Substring(4)); Db.Save(doc); count++; } } } } } }); _updateArtistsBench.Stop(); return(count); }
private int ImportLibrary() { _importBench.Start(); _documentCount = 0; Db.InBatch(() => { foreach (var track in _tracks) { var trackType = track.GetCast <string>("Track Type"); if (trackType != "File" && trackType != "Remote") { continue; } var documentID = track.GetCast <string>("Persistent ID"); if (documentID == null) { continue; } ++_documentCount; using (var doc = new Document(documentID, track)) { Db.Save(doc); } } }); _importBench.Stop(); return(_documentCount); }
public void TestChannelPull() { _otherDB.Count.Should().Be(0); Db.InBatch(() => { for (int i = 0; i < 5; i++) { using (var doc = new MutableDocument($"doc-{i}")) { doc["foo"].Value = "bar"; Db.Save(doc); } } for (int i = 0; i < 10; i++) { using (var doc = new MutableDocument($"doc-{i+5}")) { doc["channels"].Value = "my_channel"; Db.Save(doc); } } }); var config = CreateConfig(true, false, false, new URLEndpoint(new Uri("ws://localhost/db"))); RunReplication(config, 0, 0); config = new ReplicatorConfiguration(_otherDB, new URLEndpoint(new Uri("ws://localhost/db"))); ModifyConfig(config, false, true, false); config.Channels = new[] { "my_channel" }; RunReplication(config, 0, 0); _otherDB.Count.Should().Be(10, "because 10 documents should be in the given channel"); }
public void TestCompact() { var docs = CreateDocs(20); var nextDocs = new List <Document>(); Db.InBatch(() => { foreach (var doc in docs) { var docToUse = doc; for (int i = 0; i < 25; i++) { var mDoc = docToUse.ToMutable(); mDoc.SetInt("number", i); docToUse = Db.Save(mDoc); } nextDocs.Add(docToUse); } }); docs = nextDocs; nextDocs = new List <Document>(); foreach (var doc in docs) { var content = Encoding.UTF8.GetBytes(doc.Id); var blob = new Blob("text/plain", content); var mDoc = doc.ToMutable(); mDoc.SetBlob("blob", blob); nextDocs.Add(Db.Save(mDoc)); } Db.Count.Should().Be(20, "because that is the number of documents that were added"); var attsDir = new DirectoryInfo(Path.Combine(Db.Path, "Attachments")); var atts = attsDir.EnumerateFiles(); atts.Should().HaveCount(20, "because there should be one blob per document"); Db.Compact(); docs = nextDocs; nextDocs = new List <Document>(); foreach (var doc in docs) { var savedDoc = Db.GetDocument(doc.Id); Db.Delete(savedDoc); Db.GetDocument(savedDoc.Id).Should().BeNull("because the document was just deleted"); } Db.Count.Should().Be(0, "because all documents were deleted"); Db.Compact(); atts = attsDir.EnumerateFiles(); atts.Should().BeEmpty("because the blobs should be collected by the compaction"); }
public void TestGetExistingDocWithIDInBatch() { var docs = CreateDocs(10); Db.InBatch(() => ValidateDocs(10)); foreach (var doc in docs) { doc.Dispose(); } }
private void AddRevisions(uint count) { var doc = Db.GetDocument("doc").ToMutable(); Db.InBatch(() => { for (int i = 0; i < count; i++) { doc.SetInt("count", i); Db.Save(doc); } }); }
public unsafe void TestReadOnlyDictionary() { var now = DateTimeOffset.UtcNow; var nestedArray = new[] { 1L, 2L, 3L }; var nestedDict = new Dictionary <string, object> { ["foo"] = "bar" }; var masterData = new Dictionary <string, object> { ["date"] = now, ["array"] = nestedArray, ["dict"] = nestedDict }; var flData = new FLSliceResult(); Db.InBatch(() => { flData = masterData.FLEncode(); }); try { var context = new DocContext(Db, null); using (var mRoot = new MRoot(context)) { mRoot.Context.Should().BeSameAs(context); FLDoc *fleeceDoc = Native.FLDoc_FromResultData(flData, FLTrust.Trusted, Native.c4db_getFLSharedKeys(Db.c4db), FLSlice.Null); var flValue = Native.FLDoc_GetRoot(fleeceDoc); var mDict = new MDict(new MValue(flValue), mRoot); var deserializedDict = new DictionaryObject(mDict, false); deserializedDict["bogus"].Blob.Should().BeNull(); deserializedDict["date"].Date.Should().Be(now); deserializedDict.GetDate("bogus").Should().Be(DateTimeOffset.MinValue); deserializedDict.GetArray("array").Should().Equal(1L, 2L, 3L); deserializedDict.GetArray("bogus").Should().BeNull(); deserializedDict.GetDictionary("dict").Should().BeEquivalentTo(nestedDict); deserializedDict.GetDictionary("bogus").Should().BeNull(); var dict = deserializedDict.ToDictionary(); dict["array"].As <IList>().Should().Equal(1L, 2L, 3L); dict["dict"].As <IDictionary <string, object> >().Should().BeEquivalentTo(nestedDict); var isContain = mDict.Contains(""); isContain.Should().BeFalse(); Native.FLDoc_Release(fleeceDoc); } } finally { Native.FLSliceResult_Release(flData); } }
public void TestConcurrentReadInBatch() { const uint nDocs = 10; const uint nRounds = 100; const uint nConcurrent = 10; var docs = CreateDocs(nDocs, "Create"); var docIDs = docs.Select(x => x.Id).ToList(); ConcurrentRuns(nConcurrent, (index) => { Db.InBatch(() => { ReadDocs(docIDs, nRounds); }); }); }
public unsafe void TestReadOnlyArray() { var now = DateTimeOffset.UtcNow; var nestedArray = new[] { 1L, 2L, 3L }; var nestedDict = new Dictionary <string, object> { ["foo"] = "bar" }; var masterData = new object[] { 1, "str", nestedArray, now, nestedDict }; var flData = new FLSliceResult(); Db.InBatch(() => { flData = masterData.FLEncode(); }); try { var context = new DocContext(Db, null); using (var mRoot = new MRoot(context)) { mRoot.Context.Should().BeSameAs(context); FLDoc *fleeceDoc = Native.FLDoc_FromResultData(flData, FLTrust.Trusted, Native.c4db_getFLSharedKeys(Db.c4db), FLSlice.Null); var flValue = Native.FLDoc_GetRoot(fleeceDoc); var mArr = new FleeceMutableArray(new MValue(flValue), mRoot); var deserializedArray = new ArrayObject(mArr, false); deserializedArray.GetArray(2).Should().Equal(1L, 2L, 3L); deserializedArray.GetArray(3).Should().BeNull(); deserializedArray.GetBlob(1).Should().BeNull(); deserializedArray.GetDate(3).Should().Be(now); deserializedArray.GetDate(4).Should().Be(DateTimeOffset.MinValue); deserializedArray[1].ToString().Should().Be("str"); deserializedArray.GetString(2).Should().BeNull(); deserializedArray.GetDictionary(4).Should().BeEquivalentTo(nestedDict); deserializedArray[0].Dictionary.Should().BeNull(); var list = deserializedArray.ToList(); list[2].Should().BeAssignableTo <IList <object> >(); list[4].Should().BeAssignableTo <IDictionary <string, object> >(); var mVal = new MValue(); mVal.Dispose(); Native.FLDoc_Release(fleeceDoc); } } finally { Native.FLSliceResult_Release(flData); } var mroot = new MRoot(); }
public void TestConcurrentCreateInBatch() { const int nDocs = 1000; const uint nConcurrent = 10; ConcurrentRuns(nConcurrent, (index) => { var tag = $"Create{index}"; Db.InBatch(() => { CreateDocs(nDocs, tag).Should().HaveCount(nDocs); }); }); for (uint i = 0; i < nConcurrent; i++) { var tag = $"Create{i}"; VerifyByTagName(tag, nDocs); } }
public void TestPurgeDocInBatch() { CreateDocs(10); Db.InBatch(() => { for (int i = 0; i < 10; i++) { var docID = $"doc_{i:D3}"; var doc = Db.GetDocument(docID); PurgeDocAndVerify(doc); Db.Count.Should().Be(9UL - (ulong)i, "because the document count should be accurate after deletion"); } }); Db.Count.Should().Be(0, "because all documents were purged"); }
public unsafe void TestReadOnlyDictionary() { var now = DateTimeOffset.UtcNow; var nestedArray = new[] { 1L, 2L, 3L }; var nestedDict = new Dictionary <string, object> { ["foo"] = "bar" }; var masterData = new Dictionary <string, object> { ["date"] = now, ["array"] = nestedArray, ["dict"] = nestedDict }; var flData = new FLSliceResult(); Db.InBatch(() => { flData = masterData.FLEncode(); }); try { var context = new DocContext(Db, null); using (var mRoot = new MRoot(context)) { mRoot.Context.Should().BeSameAs(context); var flValue = NativeRaw.FLValue_FromTrustedData((FLSlice)flData); var mDict = new MDict(new MValue(flValue), mRoot); var deserializedDict = new DictionaryObject(mDict, false); deserializedDict["bogus"].Blob.Should().BeNull(); deserializedDict["date"].Date.Should().Be(now); deserializedDict.GetDate("bogus").Should().Be(DateTimeOffset.MinValue); deserializedDict.GetArray("array").Should().Equal(1L, 2L, 3L); deserializedDict.GetArray("bogus").Should().BeNull(); deserializedDict.GetDictionary("dict").Should().BeEquivalentTo(nestedDict); deserializedDict.GetDictionary("bogus").Should().BeNull(); var dict = deserializedDict.ToDictionary(); dict["array"].As <IList>().Should().Equal(1L, 2L, 3L); dict["dict"].As <IDictionary <string, object> >().ShouldBeEquivalentTo(nestedDict); } } finally { Native.FLSliceResult_Free(flData); } }
public unsafe void TestSerializationRoundTrip() { var masterList = new List <Dictionary <string, object> >(); var settings = new JsonSerializerSettings { DateParseHandling = DateParseHandling.None }; var s = JsonSerializer.CreateDefault(settings); ReadFileByLines("C/tests/data/iTunesMusicLibrary.json", line => { using (var reader = new JsonTextReader(new StringReader(line))) { masterList.Add(s.Deserialize <Dictionary <string, object> >(reader)); } return(true); }); var retrieved = default(List <Dictionary <string, object> >); Db.InBatch(() => { using (var flData = masterList.FLEncode()) { retrieved = FLValueConverter.ToCouchbaseObject(NativeRaw.FLValue_FromData((FLSlice)flData, FLTrust.Trusted), Db, true, typeof(Dictionary <,>).MakeGenericType(typeof(string), typeof(object))) as List <Dictionary <string, object> >; } }); var i = 0; foreach (var entry in retrieved) { var entry2 = masterList[i]; foreach (var key in entry.Keys) { entry[key].Should().Be(entry2[key]); } i++; } }
public unsafe void TestSharedstrings() { var now = DateTimeOffset.UtcNow; var nestedArray = new[] { 1L, 2L, 3L }; var nestedDict = new Dictionary <string, object> { ["foo"] = "bar" }; var masterData = new object[] { 1, "str", nestedArray, now, nestedDict }; var flData = new FLSliceResult(); Db.InBatch(() => { flData = masterData.FLEncode(); }); try { var context = new DocContext(Db, null); using (var mRoot = new MRoot(context)) { var flValue = NativeRaw.FLValue_FromTrustedData((FLSlice)flData); var mArr = new MArray(new MValue(flValue), mRoot); var sharedstrings = context.SharedStrings; FLEncoder *fLEncoder = Db.SharedEncoder; mRoot.FLEncode(fLEncoder); mRoot.Encode(); var isReadonly = mArr.IsReadOnly; isReadonly.Should().BeFalse(); #if !WINDOWS_UWP Assert.Throws <NotImplementedException>(() => mArr.IndexOf(now)); Assert.Throws <NotImplementedException>(() => mArr.Contains(now)); Assert.Throws <NotImplementedException>(() => mArr.Remove(now)); Assert.Throws <NotImplementedException>(() => mArr.CopyTo(new object[] { }, 12)); #endif var flDict = Native.FLValue_AsDict(flValue); var sharedStringCache = new SharedStringCache(); var sharedStringCache1 = new SharedStringCache(sharedStringCache); sharedStringCache1 = new SharedStringCache(sharedStringCache, flDict); var i = default(FLDictIterator); var iterKey = sharedStringCache1.GetDictIterKey(&i); sharedStringCache1.UseDocumentRoot(flDict); } } finally { Native.FLSliceResult_Free(flData); } }
private void LoadModelNumbers(int num) { Db.InBatch(() => { for (int i = 1; i <= num; i++) { var docID = $"doc{i}"; var doc = new MutableDocument(docID); var model = new NumbersModel { Number1 = i, Number2 = num - i, Document = doc }; Db.Save(model); } }); }
public unsafe void TestReadOnlyArray() { var now = DateTimeOffset.UtcNow; var nestedArray = new[] { 1L, 2L, 3L }; var nestedDict = new Dictionary <string, object> { ["foo"] = "bar" }; var masterData = new object[] { 1, "str", nestedArray, now, nestedDict }; var flData = new FLSliceResult(); Db.InBatch(() => { flData = masterData.FLEncode(); }); try { var context = new DocContext(Db, null); using (var mRoot = new MRoot(context)) { mRoot.Context.Should().BeSameAs(context); var flValue = NativeRaw.FLValue_FromTrustedData((FLSlice)flData); var mArr = new MArray(new MValue(flValue), mRoot); var deserializedArray = new ArrayObject(mArr, false); deserializedArray.GetArray(2).Should().Equal(1L, 2L, 3L); deserializedArray.GetArray(3).Should().BeNull(); deserializedArray.GetBlob(1).Should().BeNull(); deserializedArray.GetDate(3).Should().Be(now); deserializedArray.GetDate(4).Should().Be(DateTimeOffset.MinValue); deserializedArray[1].ToString().Should().Be("str"); deserializedArray.GetString(2).Should().BeNull(); deserializedArray.GetDictionary(4).Should().BeEquivalentTo(nestedDict); deserializedArray[0].Dictionary.Should().BeNull(); var list = deserializedArray.ToList(); list[2].Should().BeAssignableTo <IList <object> >(); list[4].Should().BeAssignableTo <IDictionary <string, object> >(); } } finally { Native.FLSliceResult_Free(flData); } }
public void TestExternalChanges() { using (var db2 = new Database(Db)) { var countdownDB = new CountdownEvent(1); db2.AddChangeListener((sender, args) => { args.Should().NotBeNull(); args.DocumentIDs.Count.Should().Be(10); countdownDB.CurrentCount.Should().Be(1); countdownDB.Signal(); }); var countdownDoc = new CountdownEvent(1); db2.AddDocumentChangeListener("doc-6", (sender, args) => { args.Should().NotBeNull(); args.DocumentID.Should().Be("doc-6"); using (var doc = Db.GetDocument(args.DocumentID)) { doc.GetString("type").Should().Be("demo"); countdownDoc.CurrentCount.Should().Be(1); countdownDoc.Signal(); } }); Db.InBatch(() => { for (var i = 0; i < 10; i++) { using (var doc = new MutableDocument($"doc-{i}")) { doc.SetString("type", "demo"); Db.Save(doc); } } }); countdownDB.Wait(TimeSpan.FromSeconds(5)).Should().BeTrue(); countdownDoc.Wait(TimeSpan.FromSeconds(5)).Should().BeTrue(); } }