internal bool TryGetValue(Guid objId, out ObjectIdRecord value) { IndexKey key = new IndexKey(); key.Id = objId; return _index.TryGetValue(key, out value); }
public IObservable <MessageItem> LoadAll(string index = AllTweets, long begin = 0, long end = -1) { var key = new IndexKey(this, index, false); var observable = redis.Client.GetRecords <TweetData>(key, begin, end); return(observable.SelectMany( item => Observable.Start( async() => { var message = await ConstructMessage(item).ConfigureAwait(false); if (message.Data.IsRetweet && ResolveRetweets) { var id = $"Message{message.Data.RetweetedId}"; var retweet = await cache.GetOrCreateAsync( id, async cacheItem => { cacheItem.SlidingExpiration = TimeSpan.FromMinutes(1); var retweetData = await LoadMessage(message.Data.RetweetedId).ConfigureAwait(false); return await ConstructMessage(retweetData).ConfigureAwait(false); }).ConfigureAwait(false); message.Retweet = retweet; } return message; })) .Merge()); }
private DataFile GetDataFile(IndexKey key) { return DataFolders.Instances.GetFolder(key.Instance) .GetFolder("Indexes") .GetFolder("RebuildInfos", DataFileFormats.Json) .GetFile(key.ModelType.Name + (String.IsNullOrEmpty(key.Culture.Name) ? String.Empty : "-" + key.Culture.Name) + ".json"); }
public void Remove(IndexKey indexKey) { using (IndexReader indexReader = IndexReader.Open(LuceneDirectory, false)) { indexReader.DeleteDocuments(new Term("key", indexKey.Key)); } }
private DataFile GetDataFile(IndexKey key) { return(DataFolders.Instances.GetFolder(key.Instance) .GetFolder("Indexes") .GetFolder("RebuildInfos", DataFileFormats.Json) .GetFile(key.ModelType.Name + (String.IsNullOrEmpty(key.Culture.Name) ? String.Empty : "-" + key.Culture.Name) + ".json")); }
/// <inheritdoc /> public IEnumerable <Edge> AllWhere(Edge edge) { var index = this.IndexCoverage.SelectIndex(edge); IEnumerable <Edge> result; if (index.Item1 == IndexCoverage.None) { result = _edges; } else { IReadOnlyCollection <Edge> r; if (_index.TryGetValue(IndexKey.Create(edge, index.Item1.ToMask()), out r)) { result = r; } else { result = Array.Empty <Edge>(); } } if (index.Item2 != TripleMask.None) { result = result.Where(t => edge.EqualsWithMask(t, index.Item2)); } return(result); }
internal void Remove(Guid objId) { IndexKey key = new IndexKey(); key.Id = objId; _index.Remove(key); _file.UpdateRecordInMft(); }
internal bool TryGetValue(Guid objId, out ObjectIdRecord value) { IndexKey key = new IndexKey(); key.Id = objId; return(_index.TryGetValue(key, out value)); }
protected void UpdateTaskInfo(Action <RebuildInfo> action) { var indexKey = new IndexKey(Context.Instance, Context.ModelType, Context.Culture); var info = _taskInfoManager.Load(indexKey) ?? new RebuildInfo(); action(info); _taskInfoManager.Save(indexKey, info); }
/// <summary> /// </summary> /// <param name="triple"></param> private void RemoveFromIndex(Edge triple) { Contract.Requires(triple != null); foreach (var index in _indexes) { _index.Remove(IndexKey.Create(triple, index.ToMask()), triple); } }
public override bool Equals(object obj) { IndexKey test = obj as IndexKey; if (test != null) { return(test.object_id == object_id && test.index_id == index_id); } return(false); }
private void SaveIndices(IList <Token> tokens, IList <string> linkTexts, UrlFile urlFile, ulong oldUrlFileId) { var invertedIndices = new List <Index>(); var postingLists = new List <PostingList>(); foreach (var token in tokens) { var key = new IndexKey { Word = token.Word, UrlFileId = urlFile.UrlFileId, }; var weight = ScoringHelper.CalculateIndexWeight(urlFile.Title, urlFile.TextContent, urlFile.Url, DateTime.FromBinary((long)urlFile.PublishDate), token.OccurencesInTitle, token.OccurencesInLinks, linkTexts, token.Word, token.WordFrequency, token.Positions); var index = new Index { Key = key, WordFrequency = token.WordFrequency, OccurencesInTitle = token.OccurencesInTitle, OccurencesInLinks = token.OccurencesInLinks, OccurencesInHeaders = token.OccurencesInHeaders, Weight = weight, }; index.Positions.AddRange(token.Positions); invertedIndices.Add(index); var postingList = new PostingList { Word = token.Word, WordFrequency = token.WordFrequency, DocumentFrequency = 1, IsAdd = true, }; var posting = new Posting { UrlFileId = urlFile.UrlFileId, Weight = weight, }; postingList.Postings.Add(posting); postingLists.Add(postingList); } mConfig.PostingListStore.SavePostingLists(urlFile.UrlFileId, postingLists); mConfig.InvertedIndexStore.SaveIndices(urlFile.UrlFileId, invertedIndices); }
public BPlusIndex(IndexConfiguration configurations, string collectionName, string path, CollectionIndexManager parent) { _configuration = configurations; //configurations.Clone() as IndexConfiguration; _indexKey = new UniIndexKey(_configuration.Attributes); order = _configuration.Attributes.SortOrder; _path = DirectoryUtil.GetIndexPath(path, collectionName, _configuration.IndexName); _parent = parent; }
public IObservable <UserItem> LoadAllUsers(int begin = 0, int end = -1) { var key = new IndexKey(this, AllUserTag, false); var observable = redis.Client.GetRecords <TweetUser>(key, begin, end); return(observable.Select( item => { var userItem = new UserItem(item); return cache.GetOrCreate(item.Id.ToString(), cacheEntry => userItem); })); }
private int GetParentIndex(MetadataIdentity identity, IReferenceKey key) { IndexKey parentKey = new IndexKey(identity, key); if (this.parentMap.TryGetValue(parentKey, out int parentIndex)) { return(parentIndex); } lock (this.state) { return(this.parentMap.GetOrAdd(parentKey, this.parentMap.Count)); } }
internal void Add(Guid objId, FileRecordReference mftRef, Guid birthId, Guid birthVolumeId, Guid birthDomainId) { IndexKey newKey = new IndexKey(); newKey.Id = objId; ObjectIdRecord newData = new ObjectIdRecord(); newData.MftReference = mftRef; newData.BirthObjectId = birthId; newData.BirthVolumeId = birthVolumeId; newData.BirthDomainId = birthDomainId; _index[newKey] = newData; _file.UpdateRecordInMft(); }
private async Task <LightDocument> GetById(string id, IDocument original) { var index = new IndexKey(this, id, true); var result = await manager.Client.GetRecords <LightDocument>(index).LastOrDefaultAsync(); if (result?.Text == original.Text) { await local.Save(result).ConfigureAwait(false); return(result); } log.LogWarning("Mistmatch in document text: {0}", id); return(null); }
public void Add(IndexKey indexKey, string data) { Document doc = new Document(); doc.Add(new Field("key", indexKey.Key, Field.Store.YES, Field.Index.NOT_ANALYZED, Field.TermVector.NO)); doc.Add(new Field("scope", indexKey.Scope, Field.Store.YES, Field.Index.NOT_ANALYZED, Field.TermVector.NO)); doc.Add(new Field("id", indexKey.EntityId, Field.Store.YES, Field.Index.NOT_ANALYZED, Field.TermVector.NO)); doc.Add(new Field("data", data, Field.Store.NO, Field.Index.ANALYZED, Field.TermVector.NO)); if (IndexWriter != null) { IndexWriter.AddDocument(doc, Analyzer); } else { using (IndexWriter indexWriter = GetIndexWriter()) { indexWriter.AddDocument(doc, Analyzer); } } }
protected override void RenderAttributes(XElement element, FoRenderOptions options) { base.RenderAttributes(element, options); if (Id.HasValue()) { element.Add(new XAttribute("id", Id)); } if (IndexClass.HasValue()) { element.Add(new XAttribute("index-class", IndexClass)); } if (IndexKey.HasValue()) { element.Add(new XAttribute("index-key", IndexKey)); } if (FlowName.HasValue()) { element.Add(new XAttribute("flow-name", FlowName)); } }
private int GetChildIndex(int parentIndex, MetadataIdentity identity, IReferenceKey key) { IndexKey childKey = new IndexKey(identity, key); if (this.childMap.TryGetValue(childKey, out int childIndex)) { return(childIndex); } lock (this.state) { if (!this.childMap.TryGetValue(childKey, out childIndex)) { childIndex = this.childSizes.TryGetValue(parentIndex); this.childSizes[parentIndex] = childIndex + 1; this.childMap.TryAdd(childKey, childIndex); } return(childIndex); } }
public virtual async Task Setup() { redisInstance = new RedisInside.Redis(i => i.Port(6666).LogTo(item => Global.Logger.LogDebug(item))); var config = XDocument.Load(Path.Combine(TestContext.CurrentContext.TestDirectory, @"Config\redis.config")).XmlDeserialize <RedisConfiguration>(); var provider = new ModuleHelper(config).Provider; Redis = await provider.GetService <IAsyncServiceFactory <IRedisLink> >().GetService(true); Redis.Multiplexer.Flush(); Resilience = provider.GetService <IResilience>(); Key = new ObjectKey("Key1"); Routing = new Identity(); Routing.ApplicationId = "Test"; Routing.Environment = "DEV"; Repository = new Mock <ILimitedSizeRepository>(); Repository.Setup(item => item.Name).Returns("Test"); Repository.Setup(item => item.Size).Returns(2); RepositoryKey = new RepositoryKey(Repository.Object, Key); ListAll = new IndexKey(Repository.Object, "All", true); ListAll2 = new IndexKey(Repository.Object, "All2", true); }
void bindData() { List <DataScript> tempsource = dataSource.ToList(); if (FileType.SelectedIndex != 0) { tempsource = tempsource.Where(it => it.FileType.IndexOf(FileType.SelectedValue.ToString()) > -1).ToList(); } if (MidsScriptCode != null && !string.IsNullOrEmpty(MidsScriptCode.getText().Trim())) { tempsource = tempsource.Where(it => it.MidsScriptCode.IndexOf(MidsScriptCode.getText().Trim()) > -1).ToList(); } if (MidsScriptName != null && !string.IsNullOrEmpty(MidsScriptName.getText().Trim())) { tempsource = tempsource.Where(it => it.MidsScriptName.IndexOf(MidsScriptName.getText().Trim()) > -1).ToList(); } if (IndexKey != null && !string.IsNullOrEmpty(IndexKey.getText().Trim())) { tempsource = tempsource.Where(it => it.IndexKey.IndexOf(IndexKey.getText().Trim()) > -1).ToList(); } dataGrid.DataContext = tempsource; }
public static byte[] TilesToBinary(Tiles tiles) { var tilesIndexList = new List <ushort>(); var keyStroage = new Dictionary <string, IndexKey>(); var indexBytes = new List <byte[]>(); int maxIndex = 0; // Tile 바이너리 추출 및 헤더 추가 for (int n = 0; n < tiles.GetCount(); ++n) { string name = "defaultTile"; if (tiles[n] != null && tiles[n].GetSprite() != null) { name = tiles[n].GetSprite().name; } IndexKey key; if (!keyStroage.TryGetValue(name, out key)) { key = new IndexKey() { FileName = name, Index = maxIndex }; keyStroage.Add(name, key); maxIndex++; } tilesIndexList.Add((ushort)key.Index); } // 헤더 바이너리 추출 foreach (var indexKey in keyStroage.Values) { indexBytes.Add(Serializer.SerializeToBytes(indexKey)); } using (var stream = new MemoryStream()) { using (BinaryWriter binaryWriter = new BinaryWriter(stream)) { /// Resource Index 저장 // ResourceIndex 갯수 저장( 2 bytes ) binaryWriter.Write((ushort)indexBytes.Count); for (int n = 0; n < indexBytes.Count; ++n) { var indexByte = indexBytes[n]; // Byte Length 저장 ( 2 bytes ) binaryWriter.Write((ushort)indexByte.Length); // Index Binary 저장 ( ? bytes ) binaryWriter.Write(indexByte); } // Row , Col, BlockSize ( 2 + 2 + 2 bytes); binaryWriter.Write((ushort)tiles.MaxRow); binaryWriter.Write((ushort)tiles.MaxCol); binaryWriter.Write((ushort)tiles.BlockSize); for (int n = 0; n < tilesIndexList.Count; ++n) { // tile Index 저장 ( 2 bytes ); binaryWriter.Write(tilesIndexList[n]); } } return(stream.ToArray()); } }
public RebuildInfo Load(IndexKey indexKey) { return(GetDataFile(indexKey).Read <RebuildInfo>()); }
public void Document_Create() { var now = DateTime.Now; var cid = Guid.NewGuid(); // create a typed object var orderObject = new Order { OrderKey = 123, CustomerId = cid, Date = now, Items = new List <OrderItem>() { new OrderItem { Qtd = 3, Description = "Package", Unit = 99m } } }; // create same object, but using BsonDocument var orderDoc = new BsonDocument(); orderDoc.Id = 123; orderDoc["CustomerId"] = cid; orderDoc["Date"] = now; orderDoc["Items"] = new BsonArray(); var i = new BsonObject(); i["Qtd"] = 3; i["Description"] = "Package"; i["Unit"] = 99m; orderDoc["Items"].AsArray.Add(i); // serialize both and get indexKey for each one var bytesObject = BsonSerializer.Serialize(orderObject); var keyObject = new IndexKey(BsonSerializer.GetIdValue(orderObject)); var bytesDoc = BsonSerializer.Serialize(orderDoc); var keyDoc = new IndexKey(BsonSerializer.GetIdValue(orderDoc)); // lets revert objects (create a object from Document and create a Document from a object) var revertObject = BsonSerializer.Deserialize <Order>(keyDoc, bytesDoc); var revertDoc = BsonSerializer.Deserialize <BsonDocument>(keyObject, bytesObject); // lets compare properties Assert.AreEqual(revertObject.OrderKey, revertDoc.Id); Assert.AreEqual(revertObject.CustomerId, revertDoc["CustomerId"].AsGuid); Assert.AreEqual(revertObject.Date, revertDoc["Date"].AsDateTime); Assert.AreEqual(revertObject.Items[0].Unit, revertDoc["Items"][0]["Unit"].AsDecimal); // get some property Assert.AreEqual(now, BsonSerializer.GetFieldValue(revertObject, "Date")); Assert.AreEqual(now, BsonSerializer.GetFieldValue(revertDoc, "Date")); Assert.AreEqual(cid, BsonSerializer.GetFieldValue(revertObject, "CustomerId")); Assert.AreEqual(cid, BsonSerializer.GetFieldValue(revertDoc, "CustomerId")); Assert.AreEqual(null, BsonSerializer.GetFieldValue(revertObject, "Date2")); Assert.AreEqual(null, BsonSerializer.GetFieldValue(revertDoc, "Date2")); }
public void Setup() { Randomizer = Randomizer.CreateRandomizer(); ExpectedBasicEntity1 = new BasicEntity { Id = Randomizer.Next() }; ExpectedBasicEntity2 = new BasicEntity { Id = Randomizer.Next() }; ExpectedIndexedEntity = new IndexedEntity { Id = Randomizer.Next(), IndexedValue = Randomizer.GetString() }; ExpectedAuditedEntity = new AuditedEntity { Id = Randomizer.Next() }; ExpectedAuditedEntityAuditLog = new AuditLog(); ExpectedIndexKey = new IndexKey <string>() { Value = Randomizer.GetString() }; MockDataStream = new Mock <IDataWorker>(); MockDataStream .Setup(x => x.Exists <BasicEntity>(ExpectedBasicEntity1.Id)) .ReturnsAsync(true); MockDataStream .Setup(x => x.Read <BasicEntity>(ExpectedBasicEntity1.Id)) .ReturnsAsync(ExpectedBasicEntity1); MockDataStream .Setup(x => x.Exists <BasicEntity>(ExpectedIndexedEntity.Id)) .ReturnsAsync(true); MockDataStream .Setup(x => x.Exists <IndexedEntity>(ExpectedIndexedEntity.Id)) .ReturnsAsync(true); MockDataStream .Setup(x => x.Read <BasicEntity>(ExpectedIndexedEntity.Id)) .ReturnsAsync(ExpectedIndexedEntity); MockDataStream .Setup(x => x.Read <IndexedEntity>(ExpectedIndexedEntity.Id)) .ReturnsAsync(ExpectedIndexedEntity); MockDataStream .Setup(x => x.Exists <BasicEntity>(ExpectedAuditedEntity.Id)) .ReturnsAsync(true); MockDataStream .Setup(x => x.Exists <AuditedEntity>(ExpectedAuditedEntity.Id)) .ReturnsAsync(true); MockDataStream .Setup(x => x.Read <BasicEntity>(ExpectedAuditedEntity.Id)) .ReturnsAsync(ExpectedAuditedEntity); MockDataStream .Setup(x => x.Read <AuditedEntity>(ExpectedAuditedEntity.Id)) .ReturnsAsync(ExpectedAuditedEntity); MockDataStream .Setup(x => x.WriteAndCommit(ExpectedBasicEntity1.Id, ExpectedBasicEntity1)) .ReturnsAsync(true); MockDataStream .Setup(x => x.WriteAndCommit(ExpectedIndexedEntity.Id, ExpectedIndexedEntity)) .ReturnsAsync(true); MockDataStream .Setup(x => x.Write(ExpectedAuditedEntity.Id, ExpectedAuditedEntity)) .ReturnsAsync(true); MockDataStream .Setup(x => x.Commit <BasicEntity>(ExpectedAuditedEntity.Id)) .ReturnsAsync(true); MockDataStream .Setup(x => x.Delete <BasicEntity>(ExpectedBasicEntity1.Id)) .ReturnsAsync(true); MockDataStream .Setup(x => x.Delete <AuditedEntity>(ExpectedAuditedEntity.Id)) .ReturnsAsync(true); ExpectedNextBasicEntityId = Randomizer.Next(); MockDataStream .Setup(x => x.NextId <BasicEntity>()) .Returns(ExpectedNextBasicEntityId); MockDataStream .Setup(x => x.ReadAll <BasicEntity>()) .Returns(ExpectedBasicEntityList); MockIndexer = new Mock <IIndexWorker>(); MockIndexer .Setup(x => x.IndexExist <IndexedEntity>("IndexedValue")) .ReturnsAsync(true); MockIndexer .Setup( x => x.GetIdsFromIndex <IndexedEntity, string>("IndexedValue", ExpectedIndexedEntity.IndexedValue)) .Returns(ExpectedIndexEntityList); MockIndexer .Setup( x => x.GetKeysFromIndex <IndexedEntity, string>("IndexedValue")) .Returns(ExpectedIndexKeysEntityList); MockIndexer .Setup(x => x.Index(ExpectedBasicEntity1)) .ReturnsAsync(true); MockIndexer .Setup(x => x.Index(ExpectedIndexedEntity)) .ReturnsAsync(true); MockIndexer .Setup(x => x.Index(ExpectedAuditedEntity)) .ReturnsAsync(true); MockIndexer .Setup(x => x.Unindex(ExpectedBasicEntity1)) .ReturnsAsync(true); MockIndexer .Setup(x => x.Unindex(ExpectedAuditedEntity)) .ReturnsAsync(true); MockAudit = new Mock <IAuditWorker>(); MockAudit .Setup(x => x.CreationEvent(ExpectedAuditedEntity)) .ReturnsAsync(true); MockAudit .Setup(x => x.UpdateEvent(ExpectedAuditedEntity, ExpectedAuditedEntity)) .ReturnsAsync(true); MockAudit .Setup(x => x.DeleteEvent(ExpectedAuditedEntity)) .ReturnsAsync(true); MockAudit .Setup(x => x.CommitEvents(ExpectedAuditedEntity)) .ReturnsAsync(true); MockAudit .Setup(x => x.ReadAllEvents <AuditedEntity>(ExpectedAuditedEntity.Id)) .ReturnsAsync(ExpectedAuditedEntityAuditLog); MockLogger = new Mock <ILogger>(); DataManager = new DataManager(MockDataStream.Object, MockIndexer.Object, MockAudit.Object, MockLogger.Object); }
public void Save(IndexKey indexKey, RebuildInfo info) { GetDataFile(indexKey).Write(info); }
public RebuildInfo Load(IndexKey indexKey) { return GetDataFile(indexKey).Read<RebuildInfo>(); }
private async Task AddTablesAsync(IDbConnection connection, List <DbObject> results) { var tables = await connection.QueryAsync <Table>( @"WITH [clusteredIndexes] AS ( SELECT [name], [object_id] FROM [sys].[indexes] WHERE [type_desc]='CLUSTERED' ), [identityColumns] AS ( SELECT [object_id], [name] FROM [sys].[columns] WHERE [is_identity]=1 ) SELECT [t].[name] AS [Name], SCHEMA_NAME([t].[schema_id]) AS [Schema], [t].[object_id] AS [Id], [c].[name] AS [ClusteredIndex], [i].[name] AS [IdentityColumn], (SELECT SUM(row_count) FROM [sys].[dm_db_partition_stats] WHERE [object_id]=[t].[object_id] AND [index_id] IN (0, 1)) AS [RowCount] FROM [sys].[tables] [t] LEFT JOIN [clusteredIndexes] [c] ON [t].[object_id]=[c].[object_id] LEFT JOIN [identityColumns] [i] ON [t].[object_id]=[i].[object_id]" ); var columns = await connection.QueryAsync <Column>( @"WITH [pkColumns] AS ( SELECT [xcol].[object_id], [col].[name], [col].[column_id] FROM [sys].[index_columns] [xcol] INNER JOIN [sys].[indexes] [x] ON [xcol].[object_id]=[x].[object_id] AND [xcol].[index_id]=[x].[index_id] INNER JOIN [sys].[columns] [col] ON [xcol].[object_id]=[col].[object_id] AND [xcol].[column_id]=[col].[column_id] INNER JOIN [sys].[tables] [t] ON [x].[object_id]=[t].[object_id] WHERE [t].[type_desc]='USER_TABLE' AND [x].[is_primary_key]=1 ) SELECT [col].[object_id] AS [ObjectId], [col].[name] AS [Name], TYPE_NAME([col].[system_type_id]) AS [DataType], [col].[is_nullable] AS [IsNullable], [def].[definition] AS [DefaultValue], [col].[collation_name] AS [Collation], CASE WHEN TYPE_NAME([col].[system_type_id]) LIKE 'nvar%' AND [col].[max_length]>0 THEN ([col].[max_length]/2) WHEN TYPE_NAME([col].[system_type_id]) LIKE 'nvar%' AND [col].[max_length]=0 THEN -1 ELSE [col].[max_length] END AS [MaxLength], [col].[precision] AS [Precision], [col].[scale] AS [Scale], [col].[column_id] AS [Position], [calc].[definition] AS [Expression], CASE WHEN [pk].[name] IS NOT NULL THEN 1 ELSE 0 END AS [InPrimaryKey] FROM [sys].[columns] [col] INNER JOIN [sys].[tables] [t] ON [col].[object_id]=[t].[object_id] LEFT JOIN [sys].[default_constraints] [def] ON [col].[default_object_id]=[def].[object_id] LEFT JOIN [sys].[computed_columns] [calc] ON [col].[object_id]=[calc].[object_id] AND [col].[column_id]=[calc].[column_id] LEFT JOIN [pkColumns] [pk] ON [col].[object_id]=[pk].[object_id] AND [col].[column_id]=[pk].[column_id] WHERE [t].[type_desc]='USER_TABLE'" ); var indexes = await connection.QueryAsync <Index>( @"SELECT [x].[object_id] AS [ObjectId], [x].[name] AS [Name], CONVERT(bit, CASE WHEN [x].[type_desc]='CLUSTERED' THEN 1 ELSE 0 END) AS [IsClustered], CASE WHEN [x].[is_primary_key]=1 THEN 1 WHEN [x].[is_unique]=1 AND [x].[is_unique_constraint]=0 THEN 2 WHEN [x].[is_unique_constraint]=1 THEN 3 WHEN [x].[is_unique]=0 THEN 4 END AS [Type], [x].[index_id] AS [InternalId] FROM [sys].[indexes] [x] INNER JOIN [sys].[tables] [t] ON [x].[object_id]=[t].[object_id] WHERE [t].[type_desc]='USER_TABLE' AND [x].[type]<>0" ); var indexCols = await connection.QueryAsync <IndexColumnResult>( @"SELECT [xcol].[object_id], [xcol].[index_id], [col].[name], [xcol].[key_ordinal], [xcol].[is_descending_key] FROM [sys].[index_columns] [xcol] INNER JOIN [sys].[indexes] [x] ON [xcol].[object_id]=[x].[object_id] AND [xcol].[index_id]=[x].[index_id] INNER JOIN [sys].[columns] [col] ON [xcol].[object_id]=[col].[object_id] AND [xcol].[column_id]=[col].[column_id] INNER JOIN [sys].[tables] [t] ON [x].[object_id]=[t].[object_id] WHERE [t].[type_desc]='USER_TABLE'" ); var columnLookup = columns.ToLookup(row => row.ObjectId); var indexLookup = indexes.ToLookup(row => row.ObjectId); var indexColLookup = indexCols.ToLookup(row => new IndexKey() { object_id = row.object_id, index_id = row.index_id }); foreach (var x in indexes) { var indexKey = new IndexKey() { object_id = x.ObjectId, index_id = x.InternalId }; x.Columns = indexColLookup[indexKey].Select(row => new Index.Column() { Name = row.name, Order = row.key_ordinal, SortDirection = (row.is_descending_key) ? SortDirection.Descending : SortDirection.Ascending }); } foreach (var tbl in tables) { tbl.Columns = columnLookup[tbl.Id].ToArray(); tbl.Indexes = indexLookup[tbl.Id].ToArray(); } results.AddRange(tables); }
private IndexKey FindIndexOn(IList<Expression> expressions, Expression expression) { // expressions must all be simple fetch variable operations TableName tableName = null; List<Variable> vars = new List<Variable>(expressions.Count); for (int i = 0; i < expressions.Count; ++i) { Expression var_op = expressions[i]; if (var_op is FetchVariableExpression) { Variable v = ((FetchVariableExpression)var_op).Variable; v = Dereference(expression, v); // If can't dereference, then return null if (v == null) return null; if (tableName == null) { tableName = v.TableName; } else if (!tableName.Equals(v.TableName)) { // If not a common table name, return null return null; } else if (vars.Contains(v)) { // If repeat vars, index not possible return null; } vars.Add(v); } else { return null; } } // Single case (easy) if (vars.Count == 1) { IndexKey indexVal = new IndexKey(); indexVal.IndexTable = expressions[0].IndexTableName; indexVal.IndexName = expressions[0].IndexCandidate; return indexVal; } // ok, common table name and all the expressions are variables IIndexSetDataSource[] indexInfo = transaction.GetTableIndexes(tableName); for (int i = 0; i < indexInfo.Length; ++i) { // Get the collation IndexCollation collation = indexInfo[i].Collation; // Matches? if (collation.Columns.Length == vars.Count) { bool match = true; foreach (Variable v in vars) { if (!collation.ContainsColumn(v.Name)) { match = false; break; } } // Found a match, so return the index name if (match) { IndexKey indexVal = new IndexKey(); indexVal.IndexTable = tableName; indexVal.IndexName = indexInfo[i].Name; return indexVal; } } } // No index discovered return null; }
protected static async Task <IEnumerable <Table> > GetTablesAsync(IDbConnection connection) { var tables = await connection.QueryAsync <Table>( @"WITH [clusteredIndexes] AS ( SELECT [name], [object_id] FROM [sys].[indexes] WHERE [type_desc]='CLUSTERED' ), [identityColumns] AS ( SELECT [object_id], [name] FROM [sys].[columns] WHERE [is_identity]=1 ) SELECT SCHEMA_NAME([t].[schema_id]) + '.' + [t].[name] AS [Name], [t].[object_id] AS [ObjectId], [c].[name] AS [ClusteredIndex], [i].[name] AS [IdentityColumn], (SELECT SUM(row_count) FROM [sys].[dm_db_partition_stats] WHERE [object_id]=[t].[object_id] AND [index_id] IN (0, 1)) AS [RowCount] FROM [sys].[tables] [t] LEFT JOIN [clusteredIndexes] [c] ON [t].[object_id]=[c].[object_id] LEFT JOIN [identityColumns] [i] ON [t].[object_id]=[i].[object_id] WHERE [t].[name] NOT IN ('__MigrationHistory', '__EFMigrationsHistory')" ); var columns = await connection.QueryAsync <Column>( @"WITH [identityColumns] AS ( SELECT [object_id], [name] FROM [sys].[columns] WHERE [is_identity]=1 ), [source] AS ( SELECT [col].[object_id] AS [ObjectId], [col].[name] AS [Name], TYPE_NAME([col].[system_type_id]) AS [DataType], [col].[is_nullable] AS [IsNullable], [def].[definition] AS [DefaultValue], [col].[collation_name] AS [Collation], CASE WHEN TYPE_NAME([col].[system_type_id]) LIKE 'nvar%' AND [col].[max_length]>0 THEN ([col].[max_length]/2) WHEN TYPE_NAME([col].[system_type_id]) LIKE 'nvar%' AND [col].[max_length]=-1 THEN -1 ELSE NULL END AS [MaxLength], [col].[precision] AS [Precision], [col].[scale] AS [Scale], [col].[column_id] AS [InternalId], [calc].[definition] AS [Expression], CASE WHEN [ic].[name] IS NOT NULL THEN 1 ELSE 0 END AS [IsIdentity], [col].[system_type_id] FROM [sys].[columns] [col] INNER JOIN [sys].[tables] [t] ON [col].[object_id]=[t].[object_id] LEFT JOIN [sys].[default_constraints] [def] ON [col].[default_object_id]=[def].[object_id] LEFT JOIN [sys].[computed_columns] [calc] ON [col].[object_id]=[calc].[object_id] AND [col].[column_id]=[calc].[column_id] LEFT JOIN [identityColumns] [ic] ON [ic].[object_id]=[col].[object_id] AND [ic].[name]=[col].[name] WHERE [t].[type_desc]='USER_TABLE' ) SELECT [ObjectId], [Name], CASE WHEN [system_type_id]=106 THEN [DataType] + '(' + CONVERT(varchar, [Precision]) + ',' + CONVERT(varchar, [Scale]) + ')' WHEN [MaxLength]=-1 THEN [DataType] + '(max)' WHEN [MaxLength] IS NULL THEN [DataType] ELSE [DataType] + '(' + CONVERT(varchar, [MaxLength]) + ')' END AS [DataType], [IsNullable], [DefaultValue], [Collation], [Precision], [InternalId], [Expression], CASE WHEN [Expression] IS NOT NULL THEN 1 ELSE 0 END AS [IsCalculated], CASE WHEN [IsIdentity]=1 THEN ' identity(1,1)' ELSE NULL END AS [TypeModifier] FROM [source]" ); var indexes = await connection.QueryAsync <Index>( @"SELECT [x].[object_id] AS [ObjectId], [x].[name] AS [Name], CONVERT(bit, CASE WHEN [x].[type_desc]='CLUSTERED' THEN 1 ELSE 0 END) AS [IsClustered], CASE WHEN [x].[is_primary_key]=1 THEN 1 WHEN [x].[is_unique]=1 AND [x].[is_unique_constraint]=0 THEN 2 WHEN [x].[is_unique_constraint]=1 THEN 3 WHEN [x].[is_unique]=0 THEN 4 END AS [Type], [x].[index_id] AS [InternalId] FROM [sys].[indexes] [x] INNER JOIN [sys].[tables] [t] ON [x].[object_id]=[t].[object_id] WHERE [t].[type_desc]='USER_TABLE' AND [x].[type]<>0" ); var indexCols = await connection.QueryAsync <IndexColumnResult>( @"SELECT [xcol].[object_id], [xcol].[index_id], [col].[name], [xcol].[key_ordinal], [xcol].[is_descending_key] FROM [sys].[index_columns] [xcol] INNER JOIN [sys].[indexes] [x] ON [xcol].[object_id]=[x].[object_id] AND [xcol].[index_id]=[x].[index_id] INNER JOIN [sys].[columns] [col] ON [xcol].[object_id]=[col].[object_id] AND [xcol].[column_id]=[col].[column_id] INNER JOIN [sys].[tables] [t] ON [x].[object_id]=[t].[object_id] WHERE [t].[type_desc]='USER_TABLE'" ); var columnLookup = columns.ToLookup(row => row.ObjectId); var indexLookup = indexes.ToLookup(row => row.ObjectId); var indexColLookup = indexCols.ToLookup(row => new IndexKey() { object_id = row.object_id, index_id = row.index_id }); foreach (var x in indexes) { var indexKey = new IndexKey() { object_id = x.ObjectId, index_id = x.InternalId }; x.Columns = indexColLookup[indexKey].Select(row => new Index.Column() { Name = row.name, Order = row.key_ordinal, SortDirection = (row.is_descending_key) ? SortDirection.Descending : SortDirection.Ascending }); } foreach (var t in tables) { t.Columns = columnLookup[t.ObjectId].ToArray(); foreach (var col in t.Columns) { col.Parent = t; } t.Indexes = indexLookup[t.ObjectId].ToArray(); foreach (var x in t.Indexes) { x.Parent = t; } } return(tables); }
public void Update(IndexKey indexKey, string data) { Remove(indexKey); Add(indexKey, data); }
public static string Dump(this IndexKey value) { return(string.Format("{0}{1}{0}", value.Type == IndexDataType.String ? "'" : "", value.ToString())); }
public Task <long> Count(string keyName) { var key = new IndexKey(this, keyName, false); return(new IndexManagerFactory(redis, redis.Database).Create(key).Count()); }
protected void UpdateTaskInfo(Action<RebuildInfo> action) { var indexKey = new IndexKey(Context.Instance, Context.ModelType, Context.Culture); var info = _taskInfoManager.Load(indexKey) ?? new RebuildInfo(); action(info); _taskInfoManager.Save(indexKey, info); }