public override FieldComparer NewComparer(string fieldname, int numHits, int sortPos, bool reversed) { // we keep parallel slots: the parent ids and the child ids int[] parentSlots = new int[numHits]; int[] childSlots = new int[numHits]; SortField[] parentFields = parentSort.GetSort(); int[] parentReverseMul = new int[parentFields.Length]; FieldComparer[] parentComparers = new FieldComparer[parentFields.Length]; for (int i = 0; i < parentFields.Length; i++) { parentReverseMul[i] = parentFields[i].IsReverse ? -1 : 1; parentComparers[i] = parentFields[i].GetComparer(1, i); } SortField[] childFields = childSort.GetSort(); int[] childReverseMul = new int[childFields.Length]; FieldComparer[] childComparers = new FieldComparer[childFields.Length]; for (int i = 0; i < childFields.Length; i++) { childReverseMul[i] = childFields[i].IsReverse ? -1 : 1; childComparers[i] = childFields[i].GetComparer(1, i); } // NOTE: we could return parent ID as value but really our sort "value" is more complex... // So we throw UOE for now. At the moment you really should only use this at indexing time. return(new FieldComparerAnonymousInnerClassHelper(this, parentSlots, childSlots, parentReverseMul, parentComparers, childReverseMul, childComparers)); }
private ToParentBlockJoinFieldComparer(FieldComparer wrappedComparer, Filter parentFilter, Filter childFilter, int spareSlot) { _wrappedComparer = wrappedComparer; _parentFilter = parentFilter; _childFilter = childFilter; _spareSlot = spareSlot; }
public static bool SameAmount(this Statistics res, Statistics other) { if (other == null) { return(false); } return(FieldComparer.Same(res, other, nameof(Statistics.Id))); }
private void initComparer() { if (!ComparerInit) { aComparer = new VikingFS.FieldComparer(this.filePath + @"\" + this.fileName, @"C:\Middleware.txt", this.filePath, "Backup"); ComparerInit = true; } }
/// <summary> /// Saves fields from list to database /// </summary> /// <param name="definedFields"></param> /// <param name="siteGuid"></param> /// <param name="featureGuid"></param> public static void SaveFieldsToDB(List <Field> definedFields, Guid siteGuid, Guid featureGuid, bool deleteOrphans = false) { Guid definitionGuid = definedFields[0].DefinitionGuid; List <Field> savedFields = Field.GetAllForDefinition(definitionGuid, true); FieldComparer fieldComp = new FieldComparer(); SimpleFieldComparer simpleFieldComp = new SimpleFieldComparer(); List <Field> matchedFields = savedFields.Where(i => definedFields.Contains(i, simpleFieldComp)).ToList <Field>(); foreach (Field match in matchedFields) { Field updatedField = definedFields.Where(i => i.Name == match.Name).Single(); if (updatedField != null && !savedFields.Contains(updatedField, fieldComp)) { updatedField.IsDeleted = false; //in case field was deleted, we're going to undelete it updatedField.FieldGuid = match.FieldGuid; updatedField.SiteGuid = match.SiteGuid; updatedField.FeatureGuid = match.FeatureGuid; updatedField.Save(); } } List <Field> newFields = definedFields.Except(matchedFields, simpleFieldComp).ToList(); foreach (Field newField in newFields) { newField.SiteGuid = siteGuid; newField.FeatureGuid = featureGuid; newField.Save(); matchedFields.Add(newField); } savedFields = Field.GetAllForDefinition(definitionGuid, true); /// orphans are those fields which exist in the db but no longer exist in the definition /// if we don't delete the fields, they will continue to show up on the edit page. /// If we delete the fields we have to delete any values associated with them. List <Field> orphans = savedFields.Except(matchedFields, simpleFieldComp).ToList(); if (deleteOrphans) { foreach (Field orphan in orphans) { ItemFieldValue.DeleteByField(orphan.FieldGuid); Field.Delete(orphan.FieldGuid); } } else { foreach (Field orphan in orphans) { Field.MarkAsDeleted(orphan.FieldGuid); } } }
public static bool EnsureFields(Guid siteGuid, ModuleConfiguration config, out List <Field> savedFields, bool deleteOrphanedFieldValues = false) { savedFields = null; List <Field> definedFields = FieldUtils.ParseFieldDefinitionXml(config, siteGuid); FieldComparer fieldComp = new FieldComparer(); if (config.FieldDefinitionGuid != Guid.Empty) { savedFields = Field.GetAllForDefinition(config.FieldDefinitionGuid); } else { return(false); } bool fieldsChanged = false; if (savedFields != null) { if (savedFields.Count != definedFields.Count) { fieldsChanged = true; } else { foreach (Field definedField in definedFields) { if (!savedFields.Contains(definedField, fieldComp)) { fieldsChanged = true; break; } } } } if (savedFields == null || fieldsChanged) { FieldUtils.SaveFieldsToDB(definedFields, siteGuid, config.FeatureGuid, deleteOrphanedFieldValues); savedFields = Field.GetAllForDefinition(config.FieldDefinitionGuid); } if (savedFields == null) { return(false); } return(true); }
protected DBModel CreateModel(string dbName, bool withHistory, DataTable dataTables, DataTable dataFields, DataTable dataRelations) { List <Table> myTables = new List <Table>(); List <Field> myFields = new List <Field>(); FieldComparer fieldComparer = new FieldComparer(); List <Relation> myRelations = new List <Relation>(); HashSet <Field> relFields = new HashSet <Field>(); foreach (DataRow row in dataTables.Rows) { if (withHistory || !row[2].ToString().StartsWith(@"Änderungshistorie zu:")) { myTables.Add(new Table(Convert.ToInt32(row[0]), row[1].ToString(), row[2].ToString(), row[3].ToString(), row[4].ToString())); } } foreach (DataRow row in dataFields.Rows) { var myTable = myTables.Find(table => table.Id == Convert.ToInt32(row[3])); if (myTable != null) { var myField = new Field(Convert.ToInt32(row[0]), row[1].ToString(), row[2].ToString(), myTable, Convert.ToInt32(row[4].ToString())); myTable.Fields.Add(myField); myFields.Add(myField); } myTable?.Fields.Sort(fieldComparer); } foreach (DataRow row in dataRelations.Rows) { var myFieldFrom = myFields.Find(field => field.Id == Convert.ToInt32(row[1])); var myFieldTo = myFields.Find(field => field.Id == Convert.ToInt32(row[2])); relFields.Add(myFieldFrom); relFields.Add(myFieldTo); if (myFieldFrom != null && myFieldTo != null) { if (!myRelations.Exists(rel => rel.FromField.Id == myFieldTo.Id && rel.ToField.Id == myFieldFrom.Id)) { myRelations.Add(new Relation(Convert.ToInt32(row[0]), myFieldFrom, myFieldTo, row[3].ToString().Trim())); } else { myRelations.Find(rel => rel.FromField.Id == myFieldTo.Id && rel.ToField.Id == myFieldFrom.Id).TypeTo = row[3].ToString().Trim(); } } } return(new DBModel(dbName, myTables, myRelations, relFields)); }
/// <summary> /// Sort the Record Array based in the field name provided. (for advanced sorting use SortRecords) /// </summary> /// <param name="fieldName">The field name.</param> /// <param name="records">The records Array.</param> /// <param name="asc">The direction of the sort. True means Ascending.</param> public static void SortRecordsByField(object[] records, string fieldName, bool asc) { if (records.Length > 0 && records[0] != null) { FileHelperEngine engine = new FileHelperEngine(records[0].GetType()); FieldInfo fi = engine.mRecordInfo.GetFieldInfo(fieldName); if (fi == null) { throw new BadUsageException("The record class not contains the field " + fieldName); } IComparer comparer = new FieldComparer(fi, asc); Array.Sort(records, comparer); } }
public int Compare(ICollectedSearchGroup o1, ICollectedSearchGroup o2) { for (int compIDX = 0; ; compIDX++) { FieldComparer fc = outerInstance.comparers[compIDX]; int c = outerInstance.reversed[compIDX] * fc.Compare(o1.ComparerSlot, o2.ComparerSlot); if (c != 0) { return(c); } else if (compIDX == outerInstance.compIDXEnd) { return(o1.TopDoc - o2.TopDoc); } } }
/// <summary> /// Returns a mapping from the old document ID to its new location in the /// sorted index. Implementations can use the auxiliary /// <see cref="Sort(int, DocComparer)"/> to compute the old-to-new permutation /// given a list of documents and their corresponding values. /// <para> /// A return value of <c>null</c> is allowed and means that /// <c>reader</c> is already sorted. /// </para> /// <para> /// <b>NOTE:</b> deleted documents are expected to appear in the mapping as /// well, they will however be marked as deleted in the sorted view. /// </para> /// </summary> internal DocMap Sort(AtomicReader reader) { SortField[] fields = sort.GetSort(); int[] reverseMul = new int[fields.Length]; FieldComparer[] comparers = new FieldComparer[fields.Length]; for (int i = 0; i < fields.Length; i++) { reverseMul[i] = fields[i].IsReverse ? -1 : 1; comparers[i] = fields[i].GetComparer(1, i); comparers[i].SetNextReader(reader.AtomicContext); comparers[i].SetScorer(FAKESCORER); } DocComparer comparer = new DocComparerAnonymousClass(reverseMul, comparers); return(Sort(reader.MaxDoc, comparer)); }
/// <summary> /// Sort the content of a File using the field name provided /// </summary> /// <param name="recordClass">The class for each record of the file.</param> /// <param name="fieldName">The name of the field used to sort the file.</param> /// <param name="asc">The sort direction.</param> /// <param name="sourceFile">The source file.</param> /// <param name="sortedFile">The destination File.</param> public static void SortFileByField(Type recordClass, string fieldName, bool asc, string sourceFile, string sortedFile) { FileHelperEngine engine = new FileHelperEngine(recordClass); FieldInfo fi = engine.mRecordInfo.GetFieldInfo(fieldName); if (fi == null) { throw new BadUsageException("The record class not contains the field " + fieldName); } object[] res = engine.ReadFile(sourceFile); IComparer comparer = new FieldComparer(fi, asc); Array.Sort(res, comparer); engine.WriteFile(sortedFile, res); }
// Returns true if first is < second protected internal override bool LessThan(ShardRef first, ShardRef second) { Debug.Assert(first != second); FieldDoc firstFD = (FieldDoc)shardHits[first.ShardIndex][first.HitIndex]; FieldDoc secondFD = (FieldDoc)shardHits[second.ShardIndex][second.HitIndex]; //System.out.println(" lessThan:\n first=" + first + " doc=" + firstFD.doc + " score=" + firstFD.score + "\n second=" + second + " doc=" + secondFD.doc + " score=" + secondFD.score); for (int compIDX = 0; compIDX < comparers.Length; compIDX++) { FieldComparer comp = comparers[compIDX]; //System.out.println(" cmp idx=" + compIDX + " cmp1=" + firstFD.fields[compIDX] + " cmp2=" + secondFD.fields[compIDX] + " reverse=" + reverseMul[compIDX]); int cmp = reverseMul[compIDX] * comp.CompareValues(firstFD.Fields[compIDX], secondFD.Fields[compIDX]); if (cmp != 0) { //System.out.println(" return " + (cmp < 0)); return(cmp < 0); } } // Tie break: earlier shard wins if (first.ShardIndex < second.ShardIndex) { //System.out.println(" return tb true"); return(true); } else if (first.ShardIndex > second.ShardIndex) { //System.out.println(" return tb false"); return(false); } else { // Tie break in same shard: resolve however the // shard had resolved it: //System.out.println(" return tb " + (first.hitIndex < second.hitIndex)); Debug.Assert(first.HitIndex != second.HitIndex); return(first.HitIndex < second.HitIndex); } }
public Dictionary <FieldInfo, IComparer> searchFields(Type klass) { Dictionary <FieldInfo, IComparer> dictionary = new Dictionary <FieldInfo, IComparer>(); FieldInfo[] fieldInfos = klass.GetFields(); FieldComparer cmp = new FieldComparer(); foreach (FieldInfo f in fieldInfos) { ComparisonAttribute attribute = (ComparisonAttribute)f.GetCustomAttribute(typeof(ComparisonAttribute)); if (attribute != null) { dictionary.Add(f, (IComparer)Activator.CreateInstance(attribute.klass)); } else if (typeof(IComparable).IsAssignableFrom(f.FieldType)) { dictionary.Add(f, cmp); } } return(dictionary); }
public override FieldComparer SetNextReader(AtomicReaderContext context) { DocIdSet innerDocuments = _childFilter.GetDocIdSet(context, null); if (IsEmpty(innerDocuments)) { _childDocuments = null; } else if (innerDocuments is FixedBitSet fixedBitSet) { _childDocuments = fixedBitSet; } else { DocIdSetIterator iterator = innerDocuments.GetIterator(); _childDocuments = iterator != null?ToFixedBitSet(iterator, context.AtomicReader.MaxDoc) : null; } DocIdSet rootDocuments = _parentFilter.GetDocIdSet(context, null); if (IsEmpty(rootDocuments)) { _parentDocuments = null; } else if (rootDocuments is FixedBitSet fixedBitSet) { _parentDocuments = fixedBitSet; } else { DocIdSetIterator iterator = rootDocuments.GetIterator(); _parentDocuments = iterator != null?ToFixedBitSet(iterator, context.AtomicReader.MaxDoc) : null; } _wrappedComparer = _wrappedComparer.SetNextReader(context); return(this); }
static CodeDefinition WriteToQueryStream(Settings settings, SchemaAndErrors json) { CodeDefinition root = new CodeDefinition { Indent = "", NewLineAfterPre = true, NewLineAtEnd = false, PreText = "// Generated Query Text" }; CodeDefinition usings = new CodeDefinition { Indent = "", NewLineAfterPre = false, NewLineAtEnd = true, }; CodeDefinition ns = CodeDefinition.Block("namespace " + settings.Query.Namespace); var ql = CodeDefinition.Block("public static class QL"); ns.Children.Add(ql); if (json.Root.Schema.QueryType != null) { string name = json.Root.Schema.QueryType.Name; ql.Children.Add(CodeDefinition.Block($"public static GraphQL2Net.Support.Command<{settings.Result.Namespace}.{name}> Compile({settings.Query.Namespace}.{name} query, string queryName = null)", $"return new GraphQL2Net.Support.Command<{settings.Result.Namespace}.{name}>(query, \"query\", new {settings.Result.Namespace}.Helper(), queryName);")); } if (json.Root.Schema.MutationType != null) { string name = json.Root.Schema.MutationType.Name; ql.Children.Add(CodeDefinition.Block($"public static GraphQL2Net.Support.Command<{settings.Result.Namespace}.{name}> Compile({settings.Query.Namespace}.{name} query, string queryName = null)", $"return new GraphQL2Net.Support.Command<{settings.Result.Namespace}.{name}>(query, \"mutation\", new {settings.Result.Namespace}.Helper(), queryName ?? \"mutate\");")); } root.Children.Add(usings); root.Children.Add(ns); usings.Children.Add("using GraphQL2Net.Support;"); usings.Children.Add("using ObsoleteAttribute = System.ObsoleteAttribute;"); usings.Children.Add("using NotImplementedException = System.NotImplementedException;"); usings.Children.Add("using IEnumerator = System.Collections.IEnumerator;"); usings.Children.Add("using IEnumerable = System.Collections.IEnumerable;"); usings.Children.Add("using System.Collections.Generic;"); var filtered = settings.GetTypes(json.Root.Schema.Types).ToArray(); foreach (var type in filtered.Where(t => t.Kind == EnumType.InputObject)) { var s = CodeDefinition.Block($"public struct {CodeHelper.Escape(type.Name)} : IInputStruct"); var variables = CodeDefinition.Block("public void GetVariables(Dictionary<string, IVariableReference> variables)"); var write = CodeDefinition.Block("public void Write(GraphQLWriter writer, bool json)"); ns.Children.Add(s); write.Children.Add("writer.Write(\"{ \");"); bool first = true; bool couldBeFirst = true; foreach (var field in type.InputFields) { var c = new CodeDefinition { Indent = "" }; AddComment(field, c); bool canBeNull = field.Type.Kind != EnumType.NonNull; c.Children.Add($"public VariableReference<{GetTypeTextIgnoreNull(field.Type)}>{(canBeNull ? "?" : "")} {CodeHelper.Escape(field.Name)} {{ get; set; }}"); s.Children.Add(c); if (canBeNull) { variables.Children.Add($"if ({CodeHelper.Escape(field.Name)}.HasValue && {CodeHelper.Escape(field.Name)}.Value.Key != null) variables[{CodeHelper.Escape(field.Name)}.Value.Key] = {CodeHelper.Escape(field.Name)}.Value;"); } else { variables.Children.Add($"if ({CodeHelper.Escape(field.Name)}.Key != null) variables[{CodeHelper.Escape(field.Name)}.Key] = {CodeHelper.Escape(field.Name)};"); } CodeDefinition block; if (canBeNull) { block = CodeDefinition.Block($"if ({CodeHelper.Escape(field.Name)}.HasValue)"); if (first) { write.Children.Add("bool first = true;"); } write.Children.Add(block); } else { block = write; } if (first) { first = false; if (canBeNull) { block.Children.Add("first = false;"); } } else { if (couldBeFirst) { block.Children.Add("if (first) first = false;"); block.Children.Add("else if (json) writer.Write(\", \");"); block.Children.Add("else writer.Write(\" \");"); } else { block.Children.Add("if (json) writer.Write(\", \");"); } } block.Children.Add($"if (json) writer.Write(\"\\\"{field.Name}\\\": \");"); block.Children.Add($"else writer.Write(\"{field.Name}: \");"); block.Children.Add($"writer.WriteValue({CodeHelper.Escape(field.Name)}{(canBeNull ? ".Value" : "")});"); if (!canBeNull) { couldBeFirst = false; } } write.Children.Add("writer.Write(\" }\");"); s.Children.Add(variables); s.Children.Add(write); } var comparer = new FieldComparer(); var types = filtered.ToDictionary(p => p.Name); var todo = new Queue <TypeDefinition>(); var rootTypes = new List <string>(); if (json.Root.Schema.QueryType != null) { rootTypes.Add(json.Root.Schema.QueryType.Name); } if (json.Root.Schema.MutationType != null) { rootTypes.Add(json.Root.Schema.MutationType.Name); } var fieldLookup = new Dictionary <string, Field>(); var interfaceLookup = new Dictionary <string, HashSet <string> >(); var originalLookup = new Dictionary <string, string>(); var interfaceOwner = new HashSet <string>(); foreach (var rootName in rootTypes) { todo.Enqueue(types[rootName]); fieldLookup[rootName] = new Field { Name = rootName, Type = new FieldType { Kind = EnumType.Object, Name = rootName } }; originalLookup[rootName] = rootName; } foreach (var current in settings.GetTypes(json.Root.Schema.Types)) { if (current.Kind != EnumType.Object && current.Kind != EnumType.Interface) { continue; } foreach (var field in current.Fields) { Field currentField; string key; if (fieldLookup.TryGetValue(field.Name, out currentField)) { if (comparer.Equals(field, currentField)) { key = field.Name; } else { key = current.Name + "_" + field.Name; } } else { key = field.Name; } key = CodeHelper.Escape(key); fieldLookup[key] = field; HashSet <string> interfaceList; if (!interfaceLookup.TryGetValue(key, out interfaceList)) { interfaceList = new HashSet <string>(); interfaceLookup[key] = interfaceList; } interfaceList.Add(current.Name); interfaceOwner.Add(current.Name); originalLookup[key] = ObjectName(field.Type); } } foreach (var i in interfaceOwner) { var block = CodeDefinition.Block($"public interface {i}_IChild : INode"); ns.Children.Add(block); var fragment = CodeDefinition.Block($"public class {i}Fragment : {i}_IChild, IEnumerable<{i}_IChild>"); fragment.Children.Add("GraphQL2Net.Support.Meta _meta;"); fragment.Children.Add($"public System.Type ResultType {{ get {{ return typeof({settings.Result.Namespace}.{i}); }} }}"); fragment.Children.Add("public IEnumerable<INode> Children { get { return _children; } }"); fragment.Children.Add("public Meta Meta { get { return _meta; } }"); fragment.Children.Add($"private readonly List<{i}_IChild> _children = new List<{i}_IChild>();"); fragment.Children.Add($"public void Add({i}_IChild child) {{ _children.Add(child); }}"); fragment.Children.Add($"public IEnumerator<{i}_IChild> GetEnumerator() {{ return _children.GetEnumerator(); }}"); fragment.Children.Add("IEnumerator IEnumerable.GetEnumerator() { return GetEnumerator(); }"); var c = CodeDefinition.Block($"public {i}Fragment(Meta _Meta = null)"); c.Children.Add("_meta = _Meta;"); var write = CodeDefinition.Block("public void Write(GraphQLWriter writer)"); write.Children.Add($"writer.WriteFragment(this, \"{i}\");"); var getVariables = CodeDefinition.Block("public void GetVariables(Dictionary<string, IVariableReference> variables)"); getVariables.Children.Add("if (_meta != null) { _meta.GetVariables(variables); }"); getVariables.Children.Add("foreach (var child in _children) child.GetVariables(variables);"); fragment.Children.Add(c); fragment.Children.Add(write); fragment.Children.Add(getVariables); ns.Children.Add(fragment); } foreach (var type in settings.GetTypes(json.Root.Schema.Types)) { if (type.Kind == EnumType.Object && type.Interfaces != null && type.Interfaces.Length > 0) { var sb = new StringBuilder(); sb.AppendFormat("public class On{0} : IEnumerable<{0}_IChild>", type.Name); foreach (var i in type.Interfaces) { sb.AppendFormat(", {0}_IChild", i.Name); } var def = CodeDefinition.Block(sb.ToString()); def.Children.Add("GraphQL2Net.Support.Meta _meta;"); def.Children.Add($"public System.Type ResultType {{ get {{ return typeof({settings.Result.Namespace}.{type.Name}); }} }}"); def.Children.Add("public IEnumerable<INode> Children { get { return _children; } }"); def.Children.Add("public Meta Meta { get { return _meta; } }"); def.Children.Add($"private readonly List<{type.Name}_IChild> _children = new List<{type.Name}_IChild>();"); def.Children.Add($"public void Add({type.Name}_IChild child) {{ _children.Add(child); }}"); def.Children.Add($"public IEnumerator<{type.Name}_IChild> GetEnumerator() {{ return _children.GetEnumerator(); }}"); def.Children.Add("IEnumerator IEnumerable.GetEnumerator() { return GetEnumerator(); }"); var c = CodeDefinition.Block($"public On{type.Name}(Meta _Meta = null)"); c.Children.Add("_meta = _Meta;"); var write = CodeDefinition.Block("public void Write(GraphQLWriter writer)"); write.Children.Add($"writer.Write(\"... on {type.Name}\");"); write.Children.Add("writer.Write(_meta);"); write.Children.Add("writer.WriteLine(\" {\");"); write.Children.Add("writer.PushIndent();"); write.Children.Add("foreach (var v in _children) v.Write(writer);"); write.Children.Add("writer.PopIndent();"); write.Children.Add("writer.WriteLine(\"}\");"); var getVariables = CodeDefinition.Block("public void GetVariables(Dictionary<string, IVariableReference> variables)"); getVariables.Children.Add("if (_meta != null) { _meta.GetVariables(variables); }"); getVariables.Children.Add("foreach (var child in _children) child.GetVariables(variables);"); def.Children.Add(c); def.Children.Add(write); def.Children.Add(getVariables); ns.Children.Add(def); } } foreach (var pair in fieldLookup) { var originalType = originalLookup[pair.Key]; var sb = new StringBuilder(); sb.AppendFormat("public class {0}", pair.Key); bool firstInterface = true; bool isContainer = false; if (originalType != null && interfaceOwner.Contains(originalType)) { sb.Append(firstInterface ? " : " : ", "); firstInterface = false; sb.AppendFormat("IEnumerable<{0}_IChild>", originalType); isContainer = true; } HashSet <string> interfaceList; if (interfaceLookup.TryGetValue(pair.Key, out interfaceList)) { foreach (var i in interfaceList) { sb.Append(firstInterface ? " : " : ", "); firstInterface = false; sb.AppendFormat("{0}_IChild", i); } } else { sb.Append(firstInterface ? " : " : ", "); firstInterface = false; sb.Append("INode"); } CodeDefinition typeDefinition = new CodeDefinition(); CodeDefinition cd = CodeDefinition.Block(sb.ToString()); DeprecatableObject obj = pair.Value; typeDefinition.Children.Add(cd); CodeDefinition enumerable = new CodeDefinition(); CodeDefinition constructor = new CodeDefinition(); CodeDefinition fields = new CodeDefinition(); fields.Children.Add("GraphQL2Net.Support.Meta _meta;"); fields.Children.Add($"public System.Type ResultType {{ get {{ return {GetResultType(settings, pair.Value.Type)}; }} }}"); if (isContainer) { fields.Children.Add("public IEnumerable<INode> Children { get { return _children; } }"); } else { fields.Children.Add("public IEnumerable<INode> Children { get { return new INode[0]; } }"); } fields.Children.Add("public Meta Meta { get { return _meta; } }"); cd.Children.Add(fields); cd.Children.Add(enumerable); cd.Children.Add(constructor); CodeDefinition toString = CodeDefinition.Block("public void Write(GraphQLWriter writer)"); cd.Children.Add(toString); if (!rootTypes.Contains(pair.Value.Name)) { toString.Children.Add($"writer.Write(\"{pair.Value.Name}\", _meta);"); } CodeDefinition getVariables = CodeDefinition.Block("public void GetVariables(Dictionary<string, IVariableReference> variables)"); getVariables.Children.Add("if (_meta != null) { _meta.GetVariables(variables); }"); if (pair.Value.Args != null) { foreach (var arg in pair.Value.Args) { getVariables.Children.Add($"if (_{arg.Name}.Key != null) variables[_{arg.Name}.Key] = _{arg.Name};"); } } if (isContainer) { CodeDefinition childVariables = CodeDefinition.Block("foreach (var child in _children)"); childVariables.Children.Add("child.GetVariables(variables);"); getVariables.Children.Add(childVariables); } cd.Children.Add(getVariables); if (isContainer) { enumerable.Children.Add($"private readonly List<{originalType}_IChild> _children = new List<{originalType}_IChild>();"); enumerable.Children.Add($"public void Add({originalType}_IChild child) {{ _children.Add(child); }}"); enumerable.Children.Add($"public IEnumerator<{originalType}_IChild> GetEnumerator() {{ return _children.GetEnumerator(); }}"); enumerable.Children.Add($"IEnumerator IEnumerable.GetEnumerator() {{ return GetEnumerator(); }}"); } bool hasArgs = pair.Value.Args != null && pair.Value.Args.Length > 0; if (hasArgs) { toString.Children.Add("bool hasParameters = false;"); } var c = new CodeDefinition(); var comment = AddComment(pair.Value, c); var func = CodeDefinition.Block(""); c.Children.Add(func); StringBuilder txt = new StringBuilder(); txt.AppendFormat("public {0}(", pair.Key); bool first = true; if (hasArgs) { foreach (var arg in pair.Value.Args) { ScalarDefinition def; var type = arg.Type; while (type.Kind == EnumType.NonNull) { type = type.OfType; } string typeName; if (type.Kind == EnumType.Enum) { typeName = settings.Result.Namespace + "." + type.Name; } else if (ScalarDefinition.ScalarLookup.TryGetValue(type.Name, out def)) { typeName = def.Value; } else if (type.Kind == EnumType.InputObject) { typeName = CodeHelper.Escape(type.Name); } else { typeName = "string"; } if (first) { first = false; } else { txt.Append(", "); } string fieldType = $"VariableReference<{typeName}>"; txt.AppendFormat("{0} {1} = default({0})", fieldType, arg.Name); fields.Children.Add($"private readonly {fieldType} _{arg.Name};"); func.Children.Add($"_{arg.Name} = {arg.Name};"); func.Children.Add($"_{arg.Name}.Type = \"{arg.Type}\";"); if (arg.Description != null) { var sw = new StringWriter(); var writer = new System.Xml.XmlTextWriter(sw); writer.WriteString(arg.Description); writer.Close(); var split = sw.ToString().Split('\n'); if (split.Length == 1) { comment.Children.Add($"<param name=\"{arg.Name}\">{split[0]}</param>"); } else { comment.Children.Add($"<param name=\"{arg.Name}\">"); foreach (var s in split) { comment.Children.Add(s); } comment.Children.Add($"</param>"); } } var block = CodeDefinition.Block($"if (_{arg.Name}.HasValue)"); toString.Children.Add(block); block.Children.Add("if (hasParameters) writer.Write(\", \");"); block.Children.Add("else { hasParameters = true; writer.Write(\"(\"); }"); block.Children.Add($"writer.Write(\"{arg.Name}: \");"); var argType = arg.Type.Kind == EnumType.NonNull ? arg.Type.OfType : arg.Type; block.Children.Add($"writer.Write{(argType.Kind == EnumType.Enum ? "Enum": "")}Value(_{arg.Name});"); } } if (hasArgs) { txt.Append(", "); } txt.Append("Meta _Meta = null"); txt.Append(") {"); func.PreText = txt.ToString(); func.Children.Add("_meta = _Meta;"); constructor.Children.Add(c); if (hasArgs) { toString.Children.Add("if (hasParameters) writer.Write(\")\");"); } toString.Children.Add("writer.Write(_meta);"); if (isContainer) { toString.Children.Add("writer.WriteLine(\" {\");"); toString.Children.Add("writer.PushIndent();"); var realType = pair.Value.Type.Kind == EnumType.NonNull ? pair.Value.Type.OfType : pair.Value.Type; if (realType.Kind == EnumType.Interface) { toString.Children.Add("writer.WriteLine(\"__typename\");"); } toString.Children.Add("foreach (var v in _children) v.Write(writer);"); toString.Children.Add("writer.PopIndent();"); toString.Children.Add("writer.WriteLine(\"}\");"); } else { toString.Children.Add("writer.WriteLine(\"\");"); } ns.Children.Add(typeDefinition); } return(root); }
/// <summary> /// Create <see cref="ToParentBlockJoinFieldComparer.Highest"/> /// </summary> /// <param name="wrappedComparer">The <see cref="FieldComparer"/> on the child / nested level. </param> /// <param name="parentFilter"><see cref="Filter"/> (must produce <see cref="FixedBitSet"/> per-segment) that identifies the parent documents. </param> /// <param name="childFilter"><see cref="Filter"/> that defines which child / nested documents participates in sorting. </param> /// <param name="spareSlot">The extra slot inside the wrapped comparer that is used to compare which nested document /// inside the parent document scope is most competitive. </param> public Highest(FieldComparer wrappedComparer, Filter parentFilter, Filter childFilter, int spareSlot) : base(wrappedComparer, parentFilter, childFilter, spareSlot) { }
public ExpandoObjectComparer(RootComparer rootComparer) : base(rootComparer) { propertyComparer = new CustomPropertyComparer(rootComparer); fieldComparer = new FieldComparer(rootComparer); }
public int Serialize(ref byte[] bytes, int offset, FieldList list, IFormatterResolver formatterResolver) { if (list == null) { return(MessagePackBinary.WriteNil(ref bytes, offset)); } var startOffset = offset; offset += MessagePackBinary.WriteArrayHeader(ref bytes, offset, (list.Count + 1) * 3); // plus one in case of DeletedKeys var state = Serializer.Instance.State; var previousOffset = state.LocalOffset; var comparer = new FieldComparer(list.OriginalBytes); var itemCount = 0; foreach (var pair in list) { bool isValueProducer = pair.Value != null && pair.Value is ValueProducer; state.StartLocalGroup(); var groupStartOffset = offset; var typeId = (int)pair.Key.TypeId; offset += MessagePackBinary.WriteInt32(ref bytes, offset, isValueProducer ? -typeId : typeId); offset += MessagePackBinary.WriteInt32(ref bytes, offset, pair.Key.MemberId); var bodyLength = 0; if (isValueProducer) { bodyLength = formatterResolver.GetFormatter <ValueProducer>().Serialize(ref bytes, offset, (ValueProducer)pair.Value, formatterResolver); } else { bodyLength = FieldMeta.Get(pair.Key).Serialize(pair.Value, ref bytes, offset, formatterResolver); } if (state.WriteOnlyChanged && !comparer.IsChanged(pair.Key, bytes, offset, bodyLength)) { state.UnrollLocalGroup(); offset = groupStartOffset; } else { offset += bodyLength; itemCount++; } } if (state.WriteOnlyChanged) { comparer.RollToTheEnd(); if (comparer.DeletedKeys != null) { offset += MessagePackBinary.WriteInt32(ref bytes, offset, (int)TypeIndex.SystemField); offset += MessagePackBinary.WriteInt32(ref bytes, offset, (int)SystemField.DeletedKeys); offset += formatterResolver.GetFormatter <List <FieldKey> >().Serialize(ref bytes, offset, comparer.DeletedKeys, formatterResolver); itemCount++; } } LastFieldWritten = itemCount; MessagePackBinary.ReWriteArrayHeaderDownwards(ref bytes, startOffset, itemCount * 3); // write real count state.RestoreLocalGroup(previousOffset); return(offset - startOffset); }
public virtual void Collect(int doc) { //System.out.println("FP.collect doc=" + doc); // If orderedGroups != null we already have collected N groups and // can short circuit by comparing this document to the bottom group, // without having to find what group this document belongs to. // Even if this document belongs to a group in the top N, we'll know that // we don't have to update that group. // Downside: if the number of unique groups is very low, this is // wasted effort as we will most likely be updating an existing group. if (m_orderedGroups != null) { for (int compIDX = 0; ; compIDX++) { int c = reversed[compIDX] * comparers[compIDX].CompareBottom(doc); if (c < 0) { // Definitely not competitive. So don't even bother to continue return; } else if (c > 0) { // Definitely competitive. break; } else if (compIDX == compIDXEnd) { // Here c=0. If we're at the last comparer, this doc is not // competitive, since docs are visited in doc Id order, which means // this doc cannot compete with any other document in the queue. return; } } } // TODO: should we add option to mean "ignore docs that // don't have the group field" (instead of stuffing them // under null group)? TGroupValue groupValue = GetDocGroupValue(doc); if (!groupMap.TryGetValue(groupValue, out CollectedSearchGroup <TGroupValue> group)) { // First time we are seeing this group, or, we've seen // it before but it fell out of the top N and is now // coming back if (groupMap.Count < topNGroups) { // Still in startup transient: we have not // seen enough unique groups to start pruning them; // just keep collecting them // Add a new CollectedSearchGroup: CollectedSearchGroup <TGroupValue> sg = new CollectedSearchGroup <TGroupValue>(); sg.GroupValue = CopyDocGroupValue(groupValue, default); sg.ComparerSlot = groupMap.Count; sg.TopDoc = docBase + doc; foreach (FieldComparer fc in comparers) { fc.Copy(sg.ComparerSlot, doc); } groupMap[sg.GroupValue] = sg; if (groupMap.Count == topNGroups) { // End of startup transient: we now have max // number of groups; from here on we will drop // bottom group when we insert new one: BuildSortedSet(); } return; } // We already tested that the document is competitive, so replace // the bottom group with this new group. //CollectedSearchGroup<TGroupValue> bottomGroup = orderedGroups.PollLast(); CollectedSearchGroup <TGroupValue> bottomGroup; UninterruptableMonitor.Enter(m_orderedGroups); try { bottomGroup = m_orderedGroups.Last(); m_orderedGroups.Remove(bottomGroup); } finally { UninterruptableMonitor.Exit(m_orderedGroups); } if (Debugging.AssertsEnabled) { Debugging.Assert(m_orderedGroups.Count == topNGroups - 1); } groupMap.Remove(bottomGroup.GroupValue); // reuse the removed CollectedSearchGroup bottomGroup.GroupValue = CopyDocGroupValue(groupValue, bottomGroup.GroupValue); bottomGroup.TopDoc = docBase + doc; foreach (FieldComparer fc in comparers) { fc.Copy(bottomGroup.ComparerSlot, doc); } groupMap[bottomGroup.GroupValue] = bottomGroup; m_orderedGroups.Add(bottomGroup); if (Debugging.AssertsEnabled) { Debugging.Assert(m_orderedGroups.Count == topNGroups); } int lastComparerSlot = m_orderedGroups.Last().ComparerSlot; foreach (FieldComparer fc in comparers) { fc.SetBottom(lastComparerSlot); } return; } // Update existing group: for (int compIDX = 0; ; compIDX++) { FieldComparer fc = comparers[compIDX]; fc.Copy(spareSlot, doc); int c = reversed[compIDX] * fc.Compare(group.ComparerSlot, spareSlot); if (c < 0) { // Definitely not competitive. return; } else if (c > 0) { // Definitely competitive; set remaining comparers: for (int compIDX2 = compIDX + 1; compIDX2 < comparers.Length; compIDX2++) { comparers[compIDX2].Copy(spareSlot, doc); } break; } else if (compIDX == compIDXEnd) { // Here c=0. If we're at the last comparer, this doc is not // competitive, since docs are visited in doc Id order, which means // this doc cannot compete with any other document in the queue. return; } } // Remove before updating the group since lookup is done via comparers // TODO: optimize this CollectedSearchGroup <TGroupValue> prevLast; if (m_orderedGroups != null) { UninterruptableMonitor.Enter(m_orderedGroups); try { prevLast = m_orderedGroups.Last(); m_orderedGroups.Remove(group); } finally { UninterruptableMonitor.Exit(m_orderedGroups); } if (Debugging.AssertsEnabled) { Debugging.Assert(m_orderedGroups.Count == topNGroups - 1); } } else { prevLast = null; } group.TopDoc = docBase + doc; // Swap slots int tmp = spareSlot; spareSlot = group.ComparerSlot; group.ComparerSlot = tmp; // Re-add the changed group if (m_orderedGroups != null) { m_orderedGroups.Add(group); if (Debugging.AssertsEnabled) { Debugging.Assert(m_orderedGroups.Count == topNGroups); } var newLast = m_orderedGroups.Last(); // If we changed the value of the last group, or changed which group was last, then update bottom: if (group == newLast || prevLast != newLast) { foreach (FieldComparer fc in comparers) { fc.SetBottom(newLast.ComparerSlot); } } } }
public LuceneCustomDocComparer(FieldComparer luceneComparer) { this.m_luceneComparer = luceneComparer; }
public DBModel BuildModel(string dbName, bool withHostory) { string command; SqlCommand sqlCommand; SqlDataAdapter sqlAdapter; dataTables = new DataTable(); dataFields = new DataTable(); dataRelations = new DataTable(); List <Table> myTables = new List <Table>(); List <Field> myFields = new List <Field>(); FieldComparer fieldComparer = new FieldComparer(); List <Relation> myRelations = new List <Relation>(); HashSet <Field> relFields = new HashSet <Field>(); int k = 0; foreach (Field f in myFields) { k++; } string connString = "Server=" + serverName + ";Database=" + dbName + ";Trusted_Connection=" + trusted + (trusted ? ";" : ";User Id=" + user + ";Password="******";"); SqlConnection dbConn = new SqlConnection(connString); try { dbConn.Open(); command = "SELECT TABLEOBJECTID_SL, TABLEOBJECT_S, LANGT49_S, KINDOFOBJECT_S, DBNAME_S FROM " + DWTABLEOBJECTS + " WHERE DBNAME_S != 'MetaDB'"; sqlCommand = new SqlCommand(command, dbConn); sqlAdapter = new SqlDataAdapter(sqlCommand); sqlAdapter.Fill(dataTables); command = "SELECT FIELDID_SL, FIELDNAME_S, LANGF49_S, TABLEOBJECTID_I, ORDERNR_SI FROM " + DWFIELDS; sqlCommand = new SqlCommand(command, dbConn); sqlAdapter = new SqlDataAdapter(sqlCommand); sqlAdapter.Fill(dataFields); command = "SELECT TA_ID_SL, FROMFIELDID_I, TOFIELDID_I, RELATIONTYPE_S FROM " + DWRELATIONS; sqlCommand = new SqlCommand(command, dbConn); sqlAdapter = new SqlDataAdapter(sqlCommand); sqlAdapter.Fill(dataRelations); } catch (Exception e) { } finally { dbConn.Close(); dbConn.Dispose(); } foreach (DataRow row in dataTables.Rows) { if (withHostory || !row[2].ToString().StartsWith(@"Änderungshistorie zu:")) { myTables.Add(new Table(Convert.ToInt32(row[0]), row[1].ToString(), row[2].ToString(), row[3].ToString(), row[4].ToString())); } } foreach (DataRow row in dataFields.Rows) { var myTable = myTables.Find(table => table.Id == Convert.ToInt32(row[3])); if (myTable != null) { var myField = new Field(Convert.ToInt32(row[0]), row[1].ToString(), row[2].ToString(), myTable, Convert.ToInt32(row[3].ToString())); myTable.Fields.Add(myField); myFields.Add(myField); } myTable?.Fields.Sort(fieldComparer); } foreach (DataRow row in dataRelations.Rows) { var myFieldFrom = myFields.Find(field => field.Id == Convert.ToInt32(row[1])); var myFieldTo = myFields.Find(field => field.Id == Convert.ToInt32(row[2])); relFields.Add(myFieldFrom); relFields.Add(myFieldTo); if (myFieldFrom != null && myFieldTo != null) { //if(!myRelations.Exists(rel => rel.FromField.Id == myFieldTo.Id && rel.ToField.Id == myFieldFrom.Id)) // myRelations.Add(new Model.Relation(Convert.ToInt32(row[0]), myFieldFrom, myFieldTo, row[3].ToString())); if (!myRelations.Exists(rel => rel.FromField.Id == myFieldTo.Id && rel.ToField.Id == myFieldFrom.Id)) { myRelations.Add(new Relation(Convert.ToInt32(row[0]), myFieldFrom, myFieldTo, row[3].ToString().Trim())); } else { myRelations.Find(rel => rel.FromField.Id == myFieldTo.Id && rel.ToField.Id == myFieldFrom.Id).TypeTo = row[3].ToString().Trim(); } } } return(new DBModel(dbName, myTables, myRelations, relFields)); }
public static IChangeCalculator BuildCalculator(ILogger?logger) { var attributeComparer = new AttributeComparer(); var attributeMatcher = new AttributeEvaluator(); var attributeProcessor = new AttributeMatchProcessor(attributeMatcher, attributeComparer, logger); var accessModifierChangeTable = new AccessModifiersChangeTable(); var accessModifiersComparer = new AccessModifiersComparer(accessModifierChangeTable); var memberModifiersChangeTable = new PropertyModifiersChangeTable(); var memberModifiersComparer = new PropertyModifiersComparer(memberModifiersChangeTable); var genericTypeElementComparer = new GenericTypeElementComparer(); var fieldModifiersChangeTable = new FieldModifiersChangeTable(); var fieldModifiersComparer = new FieldModifiersComparer(fieldModifiersChangeTable); var fieldComparer = new FieldComparer(accessModifiersComparer, fieldModifiersComparer, attributeProcessor); var fieldMatcher = new FieldEvaluator(); var fieldProcessor = new FieldMatchProcessor(fieldMatcher, fieldComparer, logger); var propertyAccessorAccessModifiersChangeTable = new PropertyAccessorAccessModifiersChangeTable(); var propertyAccessorAccessModifiersComparer = new PropertyAccessorAccessModifiersComparer(propertyAccessorAccessModifiersChangeTable); var propertyAccessorComparer = new PropertyAccessorComparer(propertyAccessorAccessModifiersComparer, attributeProcessor); var propertyAccessorEvaluator = new PropertyAccessorEvaluator(); var propertyAccessorProcessor = new PropertyAccessorMatchProcessor(propertyAccessorEvaluator, propertyAccessorComparer, logger); var propertyComparer = new PropertyComparer(accessModifiersComparer, memberModifiersComparer, propertyAccessorProcessor, attributeProcessor); var propertyMatcher = new PropertyEvaluator(); var propertyProcessor = new PropertyMatchProcessor(propertyMatcher, propertyComparer, logger); var methodEvaluator = new MethodEvaluator(); var methodModifiersChangeTable = new MethodModifiersChangeTable(); var methodModifiersComparer = new MethodModifiersComparer(methodModifiersChangeTable); var parameterModifiersChangeTable = new ParameterModifiersChangeTable(); var parameterModifiersComparer = new ParameterModifiersComparer(parameterModifiersChangeTable); var parameterComparer = new ParameterComparer(parameterModifiersComparer, attributeProcessor); var methodComparer = new MethodComparer(accessModifiersComparer, methodModifiersComparer, genericTypeElementComparer, parameterComparer, attributeProcessor); var methodProcessor = new MethodMatchProcessor(methodEvaluator, methodComparer, logger); var classModifiersChangeTable = new ClassModifiersChangeTable(); var classModifiersComparer = new ClassModifiersComparer(classModifiersChangeTable); var classComparer = new ClassComparer( accessModifiersComparer, classModifiersComparer, genericTypeElementComparer, fieldProcessor, propertyProcessor, methodProcessor, attributeProcessor); var interfaceComparer = new InterfaceComparer( accessModifiersComparer, genericTypeElementComparer, propertyProcessor, methodProcessor, attributeProcessor); var structModifiersChangeTable = new StructModifiersChangeTable(); var structModifiersComparer = new StructModifiersComparer(structModifiersChangeTable); var structComparer = new StructComparer( accessModifiersComparer, structModifiersComparer, genericTypeElementComparer, fieldProcessor, propertyProcessor, methodProcessor, attributeProcessor); var aggregateTypeComparer = new AggregateTypeComparer(classComparer, interfaceComparer, structComparer); var typeEvaluator = new TypeEvaluator(); var typeProcessor = new TypeMatchProcessor(typeEvaluator, aggregateTypeComparer, logger); return(new ChangeCalculator(typeProcessor, logger)); }
public LuceneCustomDocComparerSource(string fieldname, FieldComparer luceneComparer) { m_fieldname = fieldname; m_luceneComparer = luceneComparer; }