private static bool ReindexBinaryProperties(int versionId, DateTime timeLimit) { using (new SystemAccount()) { var node = Node.LoadNodeByVersionId(versionId); if (node == null) { return(true); } if (node.VersionModificationDate > timeLimit) { Tracer.Write($"SKIP V#{node.VersionId} {node.Version} N#{node.Id} {node.Path}"); return(true); } try { Retrier.Retry(3, 2000, typeof(Exception), () => { var indx = SearchManager.LoadIndexDocumentByVersionId(versionId); DataBackingStore.SaveIndexDocument(node, indx); }); Tracer.Write($"Save V#{node.VersionId} {node.Version} N#{node.Id} {node.Path}"); return(true); } catch (Exception e) { Tracer.WriteError("Error after 3 attempts: {0}", e); return(false); } } }
/// <summary> /// Extends a Content version with text extract. /// </summary> public static void AddTextExtract(int versionId, string textExtract) { // 1: load indexDocument. var docData = SearchManager.LoadIndexDocumentByVersionId(versionId); var indexDoc = docData.IndexDocument; // 2: original and new text extract concatenation. textExtract = (indexDoc.GetStringValue(IndexFieldName.AllText) ?? "") + textExtract; indexDoc.Add(new IndexField(IndexFieldName.AllText, textExtract, IndexingMode.Analyzed, IndexStoringMode.No, IndexTermVector.No)); // 3: save indexDocument. docData.IndexDocumentChanged(); DataProvider.SaveIndexDocument(versionId, docData.SerializedIndexDocument); // 4: distributed cache invalidation because of version timestamp. DataBackingStore.RemoveNodeDataFromCacheByVersionId(versionId); // 5: index update. var node = Node.LoadNodeByVersionId(versionId); if (node != null) { SearchManager.GetIndexPopulator().RebuildIndex(node); } }
private void RebuildIndex_NoRecursive(Node node, bool databaseAndIndex) { TreeLock.AssertFree(node.Path); var head = NodeHead.Get(node.Id); bool hasBinary; if (databaseAndIndex) { foreach (var version in head.Versions.Select(v => Node.LoadNodeByVersionId(v.VersionId))) { DataBackingStore.SaveIndexDocument(version, false, false, out hasBinary); } } var versioningInfo = new VersioningInfo { LastDraftVersionId = head.LastMinorVersionId, LastPublicVersionId = head.LastMajorVersionId, Delete = new int[0], Reindex = new int[0] }; CreateActivityAndExecute(IndexingActivityType.Rebuild, node.Path, node.Id, 0, 0, null, versioningInfo, null); }
private void RefreshIndexDocumentInfoOneNode(Node node) { var versionId = node.VersionId; bool hasBinary; DataBackingStore.SaveIndexDocument(node, false, out hasBinary); }
DataBackingStore CreateBackingStore(string instanceName, string partitionName) { DataBackingStore backingStore = new DataBackingStore(new SqlDatabase(@"server=(local)\SQLEXPRESS;database=Caching;Integrated Security=true"), partitionName, null); return(backingStore); }
public void RebuildIndex(Node node, bool recursive = false, IndexRebuildLevel rebuildLevel = IndexRebuildLevel.IndexOnly) { // do nothing in case of IndexOnly level, because this is a NULL populator if (rebuildLevel == IndexRebuildLevel.IndexOnly) { return; } using (var op = SnTrace.Index.StartOperation("NullPopulator.RefreshIndex. Version: {0}, VersionId: {1}, recursive: {2}, level: {3}", node.Version, node.VersionId, recursive, rebuildLevel)) { using (new Storage.Security.SystemAccount()) { if (recursive) { using (TreeLock.Acquire(node.Path)) { foreach (var n in NodeEnumerator.GetNodes(node.Path)) { DataBackingStore.SaveIndexDocument(n, false, false, out _); } } } else { TreeLock.AssertFree(node.Path); DataBackingStore.SaveIndexDocument(node, false, false, out _); } } op.Successful = true; } }
public void Versioning_LostVersion_NodeIsDeleted() { Test(() => { //-- Preparing var file = CreateTestFile(save: false); file.ApprovingMode = ApprovingType.False; file.VersioningMode = VersioningType.None; file.Save(); var contentId = file.Id; //-- Thread #1 file.CheckOut(); //-- Thread #2 var head = DataBackingStore.GetNodeHead(contentId); //-- Thread #1 file.ForceDelete(); //-- Thread #2 var node = LoadNode(head, VersionNumber.LastAccessible); Assert.IsNull(node); }); }
public void Versioning_LostVersion_NodeDataIsNull() { //-- Preparing var content = Content.CreateNew("Car", TestRoot, "car"); var gcontent = (GenericContent)content.ContentHandler; gcontent.ApprovingMode = ApprovingType.False; gcontent.VersioningMode = VersioningType.None; gcontent.Save(); var contentId = gcontent.Id; //-- Thread #1 gcontent.CheckOut(); //-- Thread #2 var head = DataBackingStore.GetNodeHead(contentId); //-- Thread #1 gcontent.CheckIn(); //-- Thread #2 var data = DataBackingStore.GetNodeData(head, head.LastMinorVersionId).NodeData; Assert.IsNull(data); }
public void Versioning_LostVersion_NodeDataIsNull() { Test(() => { //-- Preparing var file = CreateTestFile(save: false); file.ApprovingMode = ApprovingType.False; file.VersioningMode = VersioningType.None; file.Save(); var contentId = file.Id; //-- Thread #1 file.CheckOut(); //-- Thread #2 var head = DataBackingStore.GetNodeHead(contentId); //-- Thread #1 file.CheckIn(); //-- Thread #2 var data = DataBackingStore.GetNodeData(head, head.LastMinorVersionId).NodeData; Assert.IsNull(data); }); }
// caller: IndexPopulator.Populator public void RebuildIndexDirectly(string path, IndexRebuildLevel level = IndexRebuildLevel.IndexOnly) { if (level == IndexRebuildLevel.DatabaseAndIndex) { using (var op2 = SnTrace.Index.StartOperation("IndexPopulator: Rebuild index documents.")) { using (new SystemAccount()) { var node = Node.LoadNode(path); DataBackingStore.SaveIndexDocument(node, false, false, out _); Parallel.ForEach(NodeQuery.QueryNodesByPath(node.Path, true).Nodes, n => { DataBackingStore.SaveIndexDocument(n, false, false, out _); }); } op2.Successful = true; } } using (var op = SnTrace.Index.StartOperation("IndexPopulator: Rebuild index.")) { IndexManager.IndexingEngine.WriteIndex( new[] { new SnTerm(IndexFieldName.InTree, path) }, null, SearchManager.LoadIndexDocumentsByPath(path, IndexManager.GetNotIndexedNodeTypes()) .Select(d => { var indexDoc = IndexManager.CompleteIndexDocument(d); OnNodeIndexed(d.Path); return(indexDoc); })); op.Successful = true; } }
public void Versioning_LostVersion_NodeIsDeleted() { //-- Preparing var content = Content.CreateNew("Car", TestRoot, "car"); var gcontent = (GenericContent)content.ContentHandler; gcontent.ApprovingMode = ApprovingType.False; gcontent.VersioningMode = VersioningType.None; gcontent.Save(); var contentId = gcontent.Id; //-- Thread #1 gcontent.CheckOut(); //-- Thread #2 var head = DataBackingStore.GetNodeHead(contentId); //-- Thread #1 gcontent.ForceDelete(); //-- Thread #2 var node = LoadNode(head, VersionNumber.LastAccessible); Assert.IsNull(node); }
public void RebuildIndex(Node node, bool recursive = false, IndexRebuildLevel rebuildLevel = IndexRebuildLevel.IndexOnly) { if (rebuildLevel == IndexRebuildLevel.IndexOnly) { return; } using (var op = SnTrace.Index.StartOperation("NullPopulator.RefreshIndex. Version: {0}, VersionId: {1}, recursive: {2}, level: {3}", node.Version, node.VersionId, recursive, rebuildLevel)) { bool hasBinary; using (new SenseNet.ContentRepository.Storage.Security.SystemAccount()) { if (recursive) { using (SenseNet.ContentRepository.Storage.TreeLock.Acquire(node.Path)) { foreach (var n in NodeEnumerator.GetNodes(node.Path)) { DataBackingStore.SaveIndexDocument(n, false, false, out hasBinary); } } } else { SenseNet.ContentRepository.Storage.TreeLock.AssertFree(node.Path); DataBackingStore.SaveIndexDocument(node, false, false, out hasBinary); } } op.Successful = true; } }
public void ItemsGetRemovedFromCacheAfterExpiration() { Assert.IsNull(cache.GetData("key")); DataBackingStore backingStore = CreateDataBackingStore(); Assert.AreEqual(0, backingStore.Load().Count); AbsoluteTime threeSecondExpiration = new AbsoluteTime(DateTime.Now + TimeSpan.FromSeconds(3.0)); lock (callbackLock) { cache.Add("key", "value", CacheItemPriority.NotRemovable, new RefreshAction(), threeSecondExpiration); Assert.IsNotNull(cache.GetData("key")); Hashtable oneEntryHashTable = backingStore.Load(); Assert.AreEqual(1, oneEntryHashTable.Count); Monitor.Wait(callbackLock, 15000); Assert.IsTrue(wasCalledBack); } object removedItem = cache.GetData("key"); Assert.IsNull(removedItem); Hashtable emptyHashTable = backingStore.Load(); Assert.AreEqual(0, emptyHashTable.Count); }
public void TestInitialize() { firstCache = new DataBackingStore(new SqlDatabase(@"server=(local)\SQLEXPRESS;database=Caching;Integrated Security=true"), "Partition1", null); secondCache = new DataBackingStore(new SqlDatabase(@"server=(local)\SQLEXPRESS;database=Caching;Integrated Security=true"), "Partition2", null); firstCache.Flush(); secondCache.Flush(); }
public void SetUp() { DatabaseProviderFactory dbFactory = new DatabaseProviderFactory(Context); db = dbFactory.CreateDefaultDatabase(); unencryptedBackingStore = new DataBackingStore(db, "encryptionTests", null); unencryptedBackingStore.Flush(); }
public void TestInitialize() { string partitionName = "Partition1"; backingStore = CreateBackingStore(instanceName, partitionName); Data.Database db = new SqlDatabase(@"server=(local)\SQLEXPRESS;database=Caching;Integrated Security=true"); DbCommand wrapper = db.GetSqlStringCommand("delete from CacheData"); db.ExecuteNonQuery(wrapper); }
public void AttemptingToReadEncryptedDataWithoutDecryptingThrowsException() { StorageEncryptionFactory factory = new StorageEncryptionFactory(Context); IStorageEncryptionProvider encryptionProvider = factory.CreateSymmetricProvider(CacheManagerName); DataBackingStore encryptingBackingStore = new DataBackingStore(db, "encryptionTests", encryptionProvider); encryptingBackingStore.Add(new CacheItem("key", "value", CacheItemPriority.Normal, new MockRefreshAction(), new AlwaysExpired())); Hashtable dataInCache = unencryptedBackingStore.Load(); }
private void RefreshIndexDocumentInfoOneNode(Node node) { var versionId = node.VersionId; DataBackingStore.SaveIndexDocument(node); if (RepositoryInstance.ContentQueryIsAllowed) { ExecuteActivity(CreateActivity(IndexingActivityType.UpdateDocument, node.Id, node.VersionId, node.VersionTimestamp, null, null));//UNDONE: SingleVersion } }
public void SetUp() { string partitionName = "Partition1"; backingStore = CreateBackingStore(instanceName, partitionName); Data.Database db = DatabaseFactory.CreateDatabase("CachingDatabase"); DBCommandWrapper wrapper = db.GetSqlStringCommandWrapper("delete from CacheData"); db.ExecuteNonQuery(wrapper); }
private void ReindexNode(Node node) { var indx = DataBackingStore.SaveIndexDocument(node, true, false, out var hasBinary); if (hasBinary) { CreateBinaryReindexTask(node, indx.IsLastPublic ? 1 : indx.IsLastDraft ? 2 : 3); } _reindexMetadataProgress++; }
private DataBackingStore CreateDataBackingStore() { DataCacheStorageData data = new DataCacheStorageData(); data.DatabaseInstanceName = "CachingDatabase"; data.PartitionName = "Partition1"; DataBackingStore backingStore = new DataBackingStore(); backingStore.Initialize(new TestCachingConfigurationView(data, Context)); return(backingStore); }
private void RefreshIndexDocumentInfoOneNode(Node node) { var versionId = node.VersionId; bool hasBinary; DataBackingStore.SaveIndexDocument(node, false, out hasBinary); if (RepositoryInstance.ContentQueryIsAllowed) { ExecuteActivity(CreateActivity(IndexingActivityType.UpdateDocument, node.Id, node.VersionId, node.VersionTimestamp, null, null)); } }
public void AttemptingToReadEncryptedDataWithoutDecryptingThrowsException() { IStorageEncryptionProvider encryptionProvider = null; encryptionProvider = EnterpriseLibraryFactory.BuildUp <IStorageEncryptionProvider>("Fred"); DataBackingStore encryptingBackingStore = new DataBackingStore(db, "encryptionTests", encryptionProvider); encryptingBackingStore.Add(new CacheItem("key", "value", CacheItemPriority.Normal, new MockRefreshAction(), new AlwaysExpired())); Hashtable dataInCache = unencryptedBackingStore.Load(); }
private DataBackingStore CreateBackingStore(string instanceName, string partitionName) { DataCacheStorageData data = new DataCacheStorageData(); data.DatabaseInstanceName = instanceName; data.PartitionName = partitionName; DataBackingStore backingStore = new DataBackingStore(); backingStore.Initialize(new TestCachingConfigurationView(data, Context)); return(backingStore); }
public void AddSameKey() { cache.Add("Add1", "12345"); cache.Add("Add1", "23456"); object o1 = cache.GetData("Add1"); Assert.AreEqual("23456", o1); DataBackingStore backingStore = CreateDataBackingStore(); Hashtable inDatabaseItems = backingStore.Load(); Assert.AreEqual("23456", ((CacheItem)inDatabaseItems["Add1"]).Value); }
public void CacheRemove() { cache.Add("Remove1", "98761"); cache.Remove("Remove1"); Assert.IsNull(cache.GetData("Remove1")); DataBackingStore backingStore = CreateDataBackingStore(); Hashtable inDatabaseItems = backingStore.Load(); Assert.AreEqual(0, inDatabaseItems.Count); }
private static void SaveInitialIndexDocuments() { LogWriteLine("Create initial index documents."); var idSet = DataProvider.LoadIdsOfNodesThatDoNotHaveIndexDocument(); var nodes = Node.LoadNodes(idSet); foreach (var node in nodes) { DataBackingStore.SaveIndexDocument(node); LogWriteLine(" ", node.Path); } LogWriteLine("Ok."); }
private static void SaveInitialIndexDocuments() { LogWriteLine("Create initial index documents."); var idSet = SenseNet.ContentRepository.Storage.Data.DataProvider.LoadIdsOfNodesThatDoNotHaveIndexDocument(); var nodes = Node.LoadNodes(idSet); bool hasBinary; foreach (var node in nodes) { DataBackingStore.SaveIndexDocument(node, false, out hasBinary); LogWriteLine(" ", node.Path); } LogWriteLine("Ok."); }
public static void AddTextExtract(int versionId, string textExtract) { // 1: load indexDocumentInfo. var docData = StorageContext.Search.LoadIndexDocumentByVersionId(versionId); var buffer = docData.IndexDocumentInfoBytes; var docStream = new System.IO.MemoryStream(buffer); var formatter = new System.Runtime.Serialization.Formatters.Binary.BinaryFormatter(); var info = (IndexDocumentInfo)formatter.Deserialize(docStream); // 2: original and new text extract concatenation. var allTextField = info.Fields.FirstOrDefault(f => f.Name == LucObject.FieldName.AllText); if (allTextField != null) { textExtract = allTextField.Value + textExtract; info.Fields.Remove(allTextField); } info.Fields.Add( new IndexFieldInfo( LucObject.FieldName.AllText, textExtract, FieldInfoType.StringField, Lucene.Net.Documents.Field.Store.NO, Lucene.Net.Documents.Field.Index.ANALYZED, Lucene.Net.Documents.Field.TermVector.NO)); // 3: save indexDocumentInfo. using (var docStream2 = new MemoryStream()) { var formatter2 = new System.Runtime.Serialization.Formatters.Binary.BinaryFormatter(); formatter2.Serialize(docStream2, info); docStream2.Flush(); docStream2.Position = 0; var bytes = docStream2.GetBuffer(); DataProvider.SaveIndexDocument(versionId, bytes); } // 4: distributed cache invalidation because of version timestamp. DataBackingStore.RemoveNodeDataFromCacheByVersionId(versionId); // 5: distributed lucene index update. var node = Node.LoadNodeByVersionId(versionId); if (node != null) { StorageContext.Search.SearchEngine.GetPopulator().RefreshIndex(node, false); } }
public void SetUp() { DatabaseProviderFactory dbFactory = new DatabaseProviderFactory(ConfigurationSourceFactory.Create()); db = dbFactory.CreateDefault(); unencryptedBackingStore = new DataBackingStore(db, "encryptionTests", null); unencryptedBackingStore.Flush(); ProtectedKey key = KeyManager.GenerateSymmetricKey(typeof(RijndaelManaged), DataProtectionScope.CurrentUser); using (FileStream stream = new FileStream("ProtectedKey.file", FileMode.Create)) { KeyManager.Write(stream, key); } }
public override void FixtureSetup() { base.FixtureSetup (); DataCacheStorageData firstCacheData = new DataCacheStorageData(); firstCacheData.DatabaseInstanceName = "CachingDatabase"; firstCacheData.PartitionName = "Partition1"; firstCache = new DataBackingStore(); firstCache.Initialize(new TestCachingConfigurationView(firstCacheData, Context)); DataCacheStorageData secondCacheData = new DataCacheStorageData(); secondCacheData.DatabaseInstanceName = "CachingDatabase"; secondCacheData.PartitionName = "Partition2"; secondCache = new DataBackingStore(); secondCache.Initialize(new TestCachingConfigurationView(secondCacheData, Context)); }
private DataBackingStore CreateBackingStore(string instanceName, string partitionName) { DataCacheStorageData data = new DataCacheStorageData(); data.DatabaseInstanceName = instanceName; data.PartitionName = partitionName; DataBackingStore backingStore = new DataBackingStore(); backingStore.Initialize(new TestCachingConfigurationView(data, Context)); return backingStore; }
public void Setup() { _backer = new DataBackingStore(); }
private DataBackingStore CreateDataBackingStore() { DataBackingStore backingStore = new DataBackingStore(new SqlDatabase(@"server=(local)\SQLEXPRESS;database=Caching;Integrated Security=true"), "Partition1", null); return backingStore; }
private DataBackingStore CreateDataBackingStore() { DataCacheStorageData data = new DataCacheStorageData(); data.DatabaseInstanceName = "CachingDatabase"; data.PartitionName = "Partition1"; DataBackingStore backingStore = new DataBackingStore(); backingStore.Initialize(new TestCachingConfigurationView(data, Context)); return backingStore; }
private DataBackingStore CreateBackingStore(string instanceName, string partitionName) { DataBackingStore backingStore = new DataBackingStore(new SqlDatabase(@"server=(local)\SQLEXPRESS;database=Caching;Integrated Security=true"), partitionName, null); return backingStore; }