internal static IBits GetDocsWithField(this FieldCache fc, IndexReader reader, String field) { return(_docsWithFieldCache.GetOrAdd(new Key <string, IndexReader>(field, reader), key => DocsWithFieldCacheEntry_CreateValue(key.Item2, new Entry(key.Item1, null), false))); }
public void TestSqlConnectionExecuteNonQueryFromQueryBuilderCreateQueryAll() { // Setup var tables = Helper.CreateIdentityTables(10); var fields = FieldCache.Get <IdentityTable>(); using (var connection = new SqlConnection(Database.ConnectionStringForRepoDb)) { // Act connection.InsertAll(tables); // Setup var builder = connection.GetStatementBuilder(); var sql = builder.CreateQueryAll(null, ClassMappedNameCache.Get <IdentityTable>(), fields: fields); // Act var result = connection.ExecuteQuery <IdentityTable>(sql); // Assert Assert.AreEqual(tables.Count(), result.Count()); result.AsList().ForEach(item => { Helper.AssertPropertiesEquality(tables.First(v => v.Id == item.Id), item); }); } }
public void GetFields <T>(FieldCache <T>[] fields, T entry) { int indexFieldOffSet = 0; Data.Position = m_dataPosition; Data.Offset = m_dataOffset; for (int i = 0; i < fields.Length; i++) { FieldCache <T> info = fields[i]; if (i == m_reader.IdFieldIndex) { if (Id != -1) { indexFieldOffSet++; } else { Id = GetFieldValue <int>(0, Data, m_fieldMeta[i], m_columnMeta[i], m_palletData[i], m_commonData[i]); } info.Setter(entry, Convert.ChangeType(Id, info.Field.FieldType)); continue; } object value = null; int fieldIndex = i - indexFieldOffSet; if (fieldIndex >= m_reader.Meta.Length) { info.Setter(entry, Convert.ChangeType(m_refID, info.Field.FieldType)); continue; } if (info.IsArray) { if (arrayReaders.TryGetValue(info.Field.FieldType, out var reader)) { value = reader(Data, m_recordOffset, m_fieldMeta[fieldIndex], m_columnMeta[fieldIndex], m_palletData[fieldIndex], m_commonData[fieldIndex], m_reader.StringTable); } else { throw new Exception("Unhandled array type: " + typeof(T).Name); } } else { if (simpleReaders.TryGetValue(info.Field.FieldType, out var reader)) { value = reader(Id, Data, m_recordOffset, m_fieldMeta[fieldIndex], m_columnMeta[fieldIndex], m_palletData[fieldIndex], m_commonData[fieldIndex], m_reader.StringTable, m_reader); } else { throw new Exception("Unhandled field type: " + typeof(T).Name); } } info.Setter(entry, value); } }
public void ThrowExceptionOnSqlConnectionExecuteQueryFromQueryBuilderCreateQueryForNotBetweenOperationViaListWithMoreVaues() { // Setup var tables = Helper.CreateIdentityTables(10); var values = new List <long> { 1, 3, 7 }; var where = new QueryGroup(new QueryField("Id", Operation.NotBetween, values)); var fields = FieldCache.Get <IdentityTable>(); using (var connection = new SqlConnection(Database.ConnectionStringForRepoDb)) { // Act connection.InsertAll(tables); // Setup var builder = connection.GetStatementBuilder(); var sql = builder.CreateQuery(null, ClassMappedNameCache.Get <IdentityTable>(), fields: fields, where : where); // Act connection.ExecuteQuery <IdentityTable>(sql, where); } }
public void TestSqlConnectionExecuteQueryFromQueryBuilderCreateQueryForNotInOperationViaArray() { // Setup var tables = Helper.CreateIdentityTables(10); var values = new long[] { 1, 3, 4, 8 }; var where = new QueryGroup(new QueryField("Id", Operation.NotIn, values)); var fields = FieldCache.Get <IdentityTable>(); using (var connection = new SqlConnection(Database.ConnectionStringForRepoDb)) { // Act connection.InsertAll(tables); // Setup var builder = connection.GetStatementBuilder(); var sql = builder.CreateQuery(null, ClassMappedNameCache.Get <IdentityTable>(), fields: fields, where : where); // Act var result = connection.ExecuteQuery <IdentityTable>(sql, where); // Assert Assert.AreEqual(6, result.Count()); result.AsList().ForEach(item => { Assert.IsFalse(values.Contains(item.Id)); Helper.AssertPropertiesEquality(tables.First(v => v.Id == item.Id), item); }); } }
public void TestSqlConnectionExecuteNonQueryFromQueryBuilderCreateBatchQuery() { // Setup var tables = Helper.CreateIdentityTables(10); var where = new QueryGroup(new QueryField("Id", Operation.GreaterThanOrEqual, 0)); var fields = FieldCache.Get <IdentityTable>(); using (var connection = new SqlConnection(Database.ConnectionStringForRepoDb)) { // Act connection.InsertAll(tables); // Setup var builder = connection.GetStatementBuilder(); var sql = builder.CreateBatchQuery(null, ClassMappedNameCache.Get <IdentityTable>(), fields: fields, page: 2, rowsPerBatch: 2, orderBy: OrderField.Ascending <IdentityTable>(e => e.Id).AsEnumerable(), where : where); // Act var result = connection.ExecuteQuery <IdentityTable>(sql, where); // Assert Assert.AreEqual(2, result.Count()); result.AsList().ForEach(item => { Helper.AssertPropertiesEquality(tables.First(v => v.Id == item.Id), item); }); } }
public void TestSqlConnectionExecuteNonQueryFromQueryBuilderCreateInsert() { // Setup var table = Helper.CreateIdentityTables(1).First(); var fields = FieldCache.Get <IdentityTable>(); using (var connection = new SqlConnection(Database.ConnectionStringForRepoDb)) { // Setup var dbFields = DbFieldCache.Get(connection, ClassMappedNameCache.Get <IdentityTable>(), null); var builder = connection.GetStatementBuilder(); var sql = builder.CreateInsert(null, ClassMappedNameCache.Get <IdentityTable>(), fields: fields, primaryField: dbFields.FirstOrDefault(e => e.IsPrimary), identityField: dbFields.FirstOrDefault(e => e.IsIdentity)); // Act var id = connection.ExecuteScalar(sql, table); // Assert Assert.IsNotNull(id); // Setup var result = connection.QueryAll <IdentityTable>().First(); // Assert Helper.AssertPropertiesEquality(table, result); } }
public void Test1() { // "542008", "27721116", "98000820" have hash code 3769566006 var value1 = "542008"; var value2 = "27721116"; var value3 = "98000820"; var value4 = "542008"; var cache = new FieldCache(1); var field1 = cache.GetField(value1.ToCharArray(), 0, value1.Length); var field2 = cache.GetField(value2.ToCharArray(), 0, value2.Length); var field3 = cache.GetField(value3.ToCharArray(), 0, value3.Length); var field4 = cache.GetField(value4.ToCharArray(), 0, value4.Length); Assert.Equal(value1, field1); Assert.Equal(value2, field2); Assert.Equal(value3, field3); Assert.Equal(value4, field4); Assert.NotSame(value1, field1); Assert.NotSame(value2, field2); Assert.NotSame(value3, field3); Assert.NotSame(value4, field4); Assert.Same(field1, field4); }
public void TestFieldCacheGetForDerivedClass() { // Act var fields = FieldCache.Get <DerivedClass>().AsList(); // Assert Assert.AreEqual(4, fields.Count()); }
internal TriggerPropertyEntry(string jsonName, TriggerProperty property, PropertyCache typedProperty, FieldCache typedRawField, PropertyCache rawProperty, PropertyCache rawInput) { JsonName = jsonName; Property = property; TypedProperty = typedProperty; TypedRawField = typedRawField; RawProperty = rawProperty; RawInput = rawInput; }
public void Serialize(int id, T row) { BitWriter bitWriter = new BitWriter(m_writer.RecordSize); int indexFieldOffSet = 0; for (int i = 0; i < m_writer.FieldCache.Length; i++) { FieldCache <T> info = m_writer.FieldCache[i]; if (info.IndexMapField && m_writer.Flags.HasFlagExt(DB2Flags.Index)) { indexFieldOffSet++; continue; } int fieldIndex = i - indexFieldOffSet; if (info.IsArray) { if (arrayWriters.TryGetValue(info.Field.FieldType, out var writer)) { writer(bitWriter, m_writer, m_fieldMeta[fieldIndex], (Array)info.Getter(row)); } else { throw new Exception("Unhandled array type: " + typeof(T).Name); } } else { if (simpleWriters.TryGetValue(info.Field.FieldType, out var writer)) { writer(bitWriter, m_writer, m_fieldMeta[fieldIndex], info.Getter(row)); } else { throw new Exception("Unhandled field type: " + typeof(T).Name); } } } // pad to record size if (!m_writer.Flags.HasFlagExt(DB2Flags.Sparse)) { bitWriter.Resize(m_writer.RecordSize); } else { bitWriter.ResizeToMultiple(4); } Records[id] = bitWriter; }
/// <summary> /// /// </summary> /// <typeparam name="TEntity"></typeparam> /// <param name="connection"></param> /// <param name="dbFields"></param> /// <param name="tableName"></param> /// <param name="fields"></param> /// <param name="commandText"></param> /// <returns></returns> private static InsertExecutionContext <TEntity> CreateInternal <TEntity>(IDbConnection connection, IEnumerable <DbField> dbFields, string tableName, IEnumerable <Field> fields, string commandText) where TEntity : class { var typeOfEntity = typeof(TEntity); var dbSetting = connection.GetDbSetting(); var identity = (Field)null; var inputFields = (IEnumerable <DbField>)null; var identityDbField = dbFields?.FirstOrDefault(f => f.IsIdentity); // Set the identity field if (typeOfEntity.IsClassType()) { identity = IdentityCache.Get <TEntity>()?.AsField() ?? FieldCache .Get <TEntity>()? .FirstOrDefault(field => string.Equals(field.Name.AsUnquoted(true, dbSetting), identityDbField?.Name.AsUnquoted(true, dbSetting), StringComparison.OrdinalIgnoreCase)) ?? identityDbField?.AsField(); } // Filter the actual properties for input fields inputFields = dbFields? .Where(dbField => dbField.IsIdentity == false) .Where(dbField => fields.FirstOrDefault(field => string.Equals(field.Name.AsUnquoted(true, dbSetting), dbField.Name.AsUnquoted(true, dbSetting), StringComparison.OrdinalIgnoreCase)) != null) .AsList(); // Variables for the entity action var identityPropertySetter = (Action <TEntity, object>)null; // Get the identity setter if (typeOfEntity.IsClassType() == true && identity != null) { identityPropertySetter = FunctionCache.GetDataEntityPropertySetterCompiledFunction <TEntity>(identity); } // Return the value return(new InsertExecutionContext <TEntity> { CommandText = commandText, InputFields = inputFields, ParametersSetterFunc = FunctionCache.GetDataEntityDbParameterSetterCompiledFunction <TEntity>( string.Concat(typeof(TEntity).FullName, StringConstant.Period, tableName, ".Insert"), inputFields?.AsList(), null, dbSetting), IdentityPropertySetterFunc = identityPropertySetter }); }
private void RemoveRowFromCache(long rowIndex) { if (CacheFields) { var colCount = TableInfo.Columns.Count; for (int i = 0; i < colCount; i++) { FieldCache.Remove(TableInfo.TableName, rowIndex, i); } } }
/// <summary> /// database import as an asynchronous operation. /// </summary> /// <param name="parameters">The parameters.</param> /// <returns><c>true</c> if XXXX, <c>false</c> otherwise.</returns> public async Task <ICollectionImportResult> DatabaseImportAsync(ModCollectionExporterParams parameters) { // Caching sucks in this ORM DbFieldCache.Flush(); FieldCache.Flush(); IdentityCache.Flush(); PrimaryCache.Flush(); if (await IsV4Async(parameters)) { return(await DatabaseImportv3Async(parameters)); } return(await DatabaseImportv2Async(parameters)); }
public void Serialize(int id, T row) { BitWriter bitWriter = new BitWriter(m_writer.RecordSize); StringLengths[id] = 0; for (int i = 0; i < m_writer.FieldCache.Length; i++) { FieldCache <T> info = m_writer.FieldCache[i]; if (info.IsArray) { if (arrayWriters.TryGetValue(info.Field.FieldType, out var writer)) { Array array = (Array)info.Getter(row); writer(bitWriter, m_writer, array); if (array is string[] strings) { StringLengths[id] = (ushort)strings.Sum(x => x.Length == 0 ? 0 : x.Length + 1); } } else { throw new Exception("Unhandled array type: " + typeof(T).Name); } } else { if (simpleWriters.TryGetValue(info.Field.FieldType, out var writer)) { object value = info.Getter(row); writer(bitWriter, m_writer, value); if (value is string strings) { StringLengths[id] = (ushort)(strings.Length == 0 ? 0 : strings.Length + 1); } } else { throw new Exception("Unhandled field type: " + typeof(T).Name); } } } // pad to record size bitWriter.Resize(m_writer.RecordSize); Records[id] = bitWriter; }
static FieldCache_Fields() { DEFAULT = new FieldCacheImpl(); DEFAULT_BYTE_PARSER = new AnonymousClassByteParser(); DEFAULT_SHORT_PARSER = new AnonymousClassShortParser(); DEFAULT_INT_PARSER = new AnonymousClassIntParser(); DEFAULT_FLOAT_PARSER = new AnonymousClassFloatParser(); DEFAULT_LONG_PARSER = new AnonymousClassLongParser(); DEFAULT_DOUBLE_PARSER = new AnonymousClassDoubleParser(); NUMERIC_UTILS_INT_PARSER = new AnonymousClassIntParser1(); NUMERIC_UTILS_FLOAT_PARSER = new AnonymousClassFloatParser1(); NUMERIC_UTILS_LONG_PARSER = new AnonymousClassLongParser1(); NUMERIC_UTILS_DOUBLE_PARSER = new AnonymousClassDoubleParser1(); }
/// <summary> /// /// </summary> /// <param name="entityType"></param> /// <param name="connection"></param> /// <param name="dbFields"></param> /// <param name="tableName"></param> /// <param name="fields"></param> /// <param name="commandText"></param> /// <returns></returns> private static MergeExecutionContext CreateInternal(Type entityType, IDbConnection connection, IEnumerable <DbField> dbFields, string tableName, IEnumerable <Field> fields, string commandText) { var dbSetting = connection.GetDbSetting(); var identity = (Field)null; var inputFields = new List <DbField>(); var identityDbField = dbFields?.FirstOrDefault(f => f.IsIdentity); // Set the identity field identity = IdentityCache.Get(entityType)?.AsField() ?? FieldCache .Get(entityType)? .FirstOrDefault(field => string.Equals(field.Name.AsUnquoted(true, dbSetting), identityDbField?.Name.AsUnquoted(true, dbSetting), StringComparison.OrdinalIgnoreCase)) ?? identityDbField?.AsField(); // Filter the actual properties for input fields inputFields = dbFields? .Where(dbField => fields.FirstOrDefault(field => string.Equals(field.Name.AsUnquoted(true, dbSetting), dbField.Name.AsUnquoted(true, dbSetting), StringComparison.OrdinalIgnoreCase)) != null) .AsList(); // Variables for the entity action var identityPropertySetter = (Action <object, object>)null; // Get the identity setter if (identity != null) { identityPropertySetter = FunctionCache.GetDataEntityPropertySetterCompiledFunction(entityType, identity); } // Return the value return(new MergeExecutionContext { CommandText = commandText, InputFields = inputFields, ParametersSetterFunc = FunctionCache.GetDataEntityDbParameterSetterCompiledFunction(entityType, string.Concat(entityType.FullName, StringConstant.Period, tableName, ".Merge"), inputFields?.AsList(), null, dbSetting), IdentityPropertySetterFunc = identityPropertySetter }); }
public void TestFieldCacheGet() { // Act var properties = typeof(DerivedClass).GetProperties().AsList(); var fields = FieldCache.Get <DerivedClass>().AsList(); // Assert Assert.AreEqual(4, properties.Count()); Assert.AreEqual(4, fields.Count()); properties.ForEach(p => { var field = fields.FirstOrDefault(f => f.Name == p.Name); Assert.IsNotNull(field); Assert.AreEqual(p.PropertyType, field.Type); }); }
public ChangeTracker(object target) { Target = target; var type = Target.GetType(); lock (FieldCache) { if (!FieldCache.ContainsKey(type)) { FieldCache.Add(type, type.GetFields(BindingFlags.Public | BindingFlags.Instance | BindingFlags.FlattenHierarchy).ToList()); } } Fields = FieldCache[type].Select(each => new ChangeTrackerField(each)).ToList(); GetChanges(); }
internal Storage(Stream stream) { DB2Reader reader; using (var bin = new BinaryReader(stream)) { var identifier = new string(bin.ReadChars(4)); stream.Position = 0; switch (identifier) { case "WDC3": reader = new WDC3Reader(stream); break; case "WDC2": reader = new WDC2Reader(stream); break; case "WDC1": reader = new WDC1Reader(stream); break; default: throw new Exception("DBC type " + identifier + " is not supported!"); } } FieldInfo[] fields = typeof(T).GetFields(); FieldCache <T>[] fieldCache = new FieldCache <T> [fields.Length]; for (int i = 0; i < fields.Length; ++i) { bool indexMapAttribute = reader.Flags.HasFlagExt(DB2Flags.Index) ? Attribute.IsDefined(fields[i], typeof(IndexAttribute)) : false; fieldCache[i] = new FieldCache <T>(fields[i], fields[i].FieldType.IsArray, fields[i].GetSetter <T>(), indexMapAttribute); } Parallel.ForEach(reader.AsEnumerable(), row => { T entry = new T(); row.Value.GetFields(fieldCache, entry); TryAdd(row.Key, entry); }); }
public void GetFields <T>(FieldCache <T>[] fields, T entry) { for (int i = 0; i < fields.Length; i++) { FieldCache <T> info = fields[i]; if (info.IndexMapField) { Id = GetFieldValue <int>(Data); info.Setter(entry, Convert.ChangeType(Id, info.Field.FieldType)); continue; } object value = null; if (info.IsArray) { if (arrayReaders.TryGetValue(info.Field.FieldType, out var reader)) { value = reader(Data, m_reader.StringTable, info.Cardinality); } else { throw new Exception("Unhandled array type: " + typeof(T).Name); } } else if (info.IsLocalisedString) { Data.Position += 32 * info.LocaleInfo.Locale; value = simpleReaders[typeof(string)](Data, m_reader.StringTable, m_reader); Data.Position += 32 * (info.LocaleInfo.LocaleCount - info.LocaleInfo.Locale); } else { if (simpleReaders.TryGetValue(info.Field.FieldType, out var reader)) { value = reader(Data, m_reader.StringTable, m_reader); } else { throw new Exception("Unhandled field type: " + typeof(T).Name); } } info.Setter(entry, value); } }
/// <summary> /// Cardinality can be calculated from the file itself /// - Last field of the record : (header.RecordSize - current offset) / sizeof(ValueType) /// - Middle field : (next field offset - current offset) / sizeof(ValueType) /// </summary> /// <typeparam name="T"></typeparam> /// <param name="info"></param> /// <param name="fieldIndex"></param> private void SetCardinality <T>(FieldCache <T> info, int fieldIndex) { int fieldOffset = m_fieldMeta[fieldIndex].Offset; int fieldValueSize = (32 - m_fieldMeta[fieldIndex].Bits) >> 3; int nextOffset; if (fieldIndex + 1 >= m_fieldMeta.Length) { nextOffset = m_reader.RecordSize; // get total record size } else { nextOffset = m_fieldMeta[fieldIndex + 1].Offset; // get next field offset } info.Cardinality = (nextOffset - fieldOffset) / fieldValueSize; }
/// <summary> /// Generic get /// </summary> /// <param name="tag"></param> /// <returns></returns> private byte[] get_field(string name, byte type) { int idx = find_field(name, type); if (idx < 0) { return(null); } FieldCache f = FCache[idx]; if (FIELD_COMPRESS[f.TYPE]) { return(decompress(FCache[idx].VALUE, type)); } else { return(FCache[idx].VALUE); } }
public void GetFields <T>(FieldCache <T>[] fields, T entry) { Data.Position = 0; for (int i = 0; i < fields.Length; i++) { FieldCache <T> info = fields[i]; if (info.IndexMapField) { info.Setter(entry, Convert.ChangeType(Id, info.Field.FieldType)); continue; } object value = null; if (info.IsArray) { if (arrayReaders.TryGetValue(info.Field.FieldType, out var reader)) { value = reader(Data, info.Cardinality); } else { throw new Exception("Unhandled array type: " + typeof(T).Name); } } else { if (simpleReaders.TryGetValue(info.Field.FieldType, out var reader)) { value = reader(Data); } else { throw new Exception("Unhandled field type: " + typeof(T).Name); } } info.Setter(entry, value); } }
public void TestSqlConnectionExecuteNonQueryFromQueryBuilderCreateUpdate() { // Setup var table = Helper.CreateIdentityTables(1).First(); var fields = FieldCache.Get <IdentityTable>(); using (var connection = new SqlConnection(Database.ConnectionStringForRepoDb)) { // Act var id = connection.Insert(table); // Set the properties table.ColumnNVarChar = $"{table.ColumnNVarChar}-Updated"; // Setup var where = new QueryGroup(new QueryField("Id", id)); // Setup var dbFields = DbFieldCache.Get(connection, ClassMappedNameCache.Get <IdentityTable>(), null); var builder = connection.GetStatementBuilder(); var sql = builder.CreateUpdate(null, ClassMappedNameCache.Get <IdentityTable>(), fields: fields, where : where, primaryField: dbFields.FirstOrDefault(e => e.IsPrimary), identityField: dbFields.FirstOrDefault(e => e.IsIdentity)); // Act var affectedRow = connection.ExecuteNonQuery(sql, table); // Assert Assert.AreEqual(1, affectedRow); // Setup var result = connection.QueryAll <IdentityTable>().First(); // Assert Helper.AssertPropertiesEquality(table, result); } }
public void Dispose() { if (FieldCache != null) { FieldCache.Clear(); } if (headerArea != null) { headerArea.Dispose(); } if (recordList != null) { recordList.Dispose(); } if (Registries != null) { Registries.Dispose(); } if (indexSetStore != null) { indexSetStore.Dispose(); } if (Store != null) { if (StoreSystem.CloseStore(Store)) { Store.Dispose(); } } headerArea = null; recordList = null; Registries = null; indexSetStore = null; }
public void TestSQLiteConnectionInsertAsyncForIdentityReusability() { using (var connection = new SQLiteConnection(Database.ConnectionStringSDS)) { // Create the tables Database.CreateSdsTables(connection); // Setup var tables = Helper.CreateSdsCompleteTables(10).AsList(); // Act var insertAllResult = connection.InsertAll <SdsCompleteTable>(tables); // Assert Assert.AreEqual(tables.Count, insertAllResult); Assert.AreEqual(tables.Count, connection.CountAll <SdsCompleteTable>()); // Setup (3) var deleteEntity = tables[2]; // Act (3) var deleteResult = connection.Delete <SdsCompleteTable>(deleteEntity); // Assert Assert.AreEqual(1, deleteResult); Assert.AreEqual(tables.Count - 1, connection.CountAll <SdsCompleteTable>()); // Setup var table = Helper.CreateSdsCompleteTables(1).First(); // Act (3) var fields = FieldCache.Get <SdsCompleteTable>().Where(e => e.Name != "Id"); var insertResult = connection.InsertAsync <SdsCompleteTable>(table, fields: fields).Result; // Assert (3) Assert.AreEqual(deleteEntity.Id, insertResult); } }
//////////////////// OTHER METHODS /////////////////////////////// /// <summary> /// Delete field /// </summary> /// <param name="name"></param> /// <returns></returns> public bool Delete(string name) { int idx = find_field(name, 0); if (idx < 0) { return(false); } // Replace cache item with empty FieldCache f = new FieldCache(); f.DELETED = true; FCache.RemoveAt(idx); FCache.Insert(idx, f); //Delete from the tree FCacheTree.Delete(name); this.serialize(); return(true); }
/// <summary> /// Map the Selection name values from the specified Selection Names provided to the /// RepoDb specific values that have the underlying DB field name (as potentially mapped on the Model). /// All Fields are returned as a default if the value is undefined and/or invalid and cannot be mapped. /// NOTE: Property names and db fields names are not guaranteed to be the same. /// </summary> /// <param name="selectionNamesFilter"></param> /// <returns> /// List of Database fields mapped from all of the available GraphQL Selections mapped to the generics /// model type TEntity specified. As a fallback default, all DB Fields are returned if no Selections are available from the /// GraphQL ParamsContext. /// </returns> public IEnumerable <Field> GetSelectFields(IEnumerable <string> selectionNamesFilter) { // Ensure we are null safe and Get all the fields in that case... if (selectionNamesFilter == null) { //NOTE: Since there's no need to filter we can just get ALL fields from the FieldCache! return(FieldCache.Get <TModel>()); } else { //NOTE: For GraphQL we need to lookup the actual Db field by the Model's Property Name // and then convert to the actual DB field name; which might also be mapped name via RepoDb attribute. // For more info see: https://repodb.net/cacher/propertymappednamecache //TODO: Add Caching Layer here if needed to Cached a Reverse Dictionary of mappings by Model Name! var mappingLookup = PropertyCache.Get <TModel>().ToLookup(p => p.PropertyInfo.Name.ToLower()); var selectFields = selectionNamesFilter .Select(name => mappingLookup[name.ToLower()]?.FirstOrDefault()?.AsField()) .Where(prop => prop != null); return(selectFields); } }
public void Serialize(int id, T row) { BitWriter bitWriter = new BitWriter(m_writer.RecordSize); for (int i = 0; i < m_writer.FieldCache.Length; i++) { FieldCache <T> info = m_writer.FieldCache[i]; if (info.IsArray) { if (arrayWriters.TryGetValue(info.Field.FieldType, out var writer)) { writer(bitWriter, m_writer, (Array)info.Getter(row)); } else { throw new Exception("Unhandled array type: " + typeof(T).Name); } } else { if (simpleWriters.TryGetValue(info.Field.FieldType, out var writer)) { writer(bitWriter, m_writer, info.Getter(row)); } else { throw new Exception("Unhandled field type: " + typeof(T).Name); } } } // pad to record size bitWriter.Resize(m_writer.RecordSize); Records[id] = bitWriter; }
internal Cache(FieldCache wrapper) { this.wrapper = wrapper; }
/// <summary> Quick and dirty convenience method</summary> /// <seealso cref="Check"> /// </seealso> public static Insanity[] CheckSanity(FieldCache cache) { return CheckSanity(cache.GetCacheEntries()); }
static FieldCache_Fields() { DEFAULT = new FieldCacheImpl(); }
/// <summary> Return cached DocValues for input field and reader.</summary> /// <param name="cache">FieldCache so that values of a field are loaded once per reader (RAM allowing) /// </param> /// <param name="field">Field for which values are required. /// </param> /// <seealso cref="ValueSource"> /// </seealso> public abstract DocValues GetCachedFieldValues(FieldCache cache, System.String field, IndexReader reader);
/*(non-Javadoc) <see cref="Lucene.Net.Search.Function.FieldCacheSource.getCachedValues(Lucene.Net.Search.FieldCache, java.lang.String, Lucene.Net.Index.IndexReader) */ public override DocValues GetCachedFieldValues(FieldCache cache, System.String field, IndexReader reader) { int[] arr = cache.GetInts(reader, field, parser); return new AnonymousClassDocValues(arr, this); }
internal StringIndexCache(FieldCache wrapper):base(wrapper) { }
internal LongCache(FieldCache wrapper):base(wrapper) { }
internal DoubleCache(FieldCache wrapper):base(wrapper) { }
internal IntCache(FieldCache wrapper):base(wrapper) { }
internal FloatCache(FieldCache wrapper):base(wrapper) { }
internal ShortCache(FieldCache wrapper):base(wrapper) { }
internal ByteCache(FieldCache wrapper):base(wrapper) { }
internal AutoCache(FieldCache wrapper):base(wrapper) { }
internal CustomCache(FieldCache wrapper):base(wrapper) { }