示例#1
0
        internal static void ExpandWildcardColumns(IDataExecutionPlanNode source, List <SelectColumn> columnSet, IDictionary <string, DataSource> dataSources, IDictionary <string, Type> parameterTypes)
        {
            // Expand any AllColumns
            if (columnSet.Any(col => col.AllColumns))
            {
                var schema   = source.GetSchema(dataSources, parameterTypes);
                var expanded = new List <SelectColumn>();

                foreach (var col in columnSet)
                {
                    if (!col.AllColumns)
                    {
                        expanded.Add(col);
                        continue;
                    }

                    foreach (var src in schema.Schema.Keys.Where(k => col.SourceColumn == null || k.StartsWith(col.SourceColumn.Replace("*", ""), StringComparison.OrdinalIgnoreCase)).OrderBy(k => k, StringComparer.OrdinalIgnoreCase))
                    {
                        expanded.Add(new SelectColumn
                        {
                            SourceColumn = src,
                            OutputColumn = src.Split('.').Last()
                        });
                    }
                }

                columnSet.Clear();
                columnSet.AddRange(expanded);
            }
        }
示例#2
0
        public override IDataExecutionPlanNode FoldQuery(IDictionary <string, DataSource> dataSources, IQueryExecutionOptions options, IDictionary <string, Type> parameterTypes, IList <OptimizerHint> hints)
        {
            Source        = Source.FoldQuery(dataSources, options, parameterTypes, hints);
            Source.Parent = this;

            // Remove any duplicated column names
            for (var i = Columns.Count - 1; i >= 0; i--)
            {
                if (Columns.IndexOf(Columns[i]) < i)
                {
                    Columns.RemoveAt(i);
                }
            }

            // If one of the fields to include in the DISTINCT calculation is the primary key, there is no possibility of duplicate
            // rows so we can discard the distinct node
            var schema = Source.GetSchema(dataSources, parameterTypes);

            if (!String.IsNullOrEmpty(schema.PrimaryKey) && Columns.Contains(schema.PrimaryKey, StringComparer.OrdinalIgnoreCase))
            {
                return(Source);
            }

            if (Source is FetchXmlScan fetch)
            {
                fetch.FetchXml.distinct          = true;
                fetch.FetchXml.distinctSpecified = true;

                // Ensure there is a sort order applied to avoid paging issues
                if (fetch.Entity.Items == null || !fetch.Entity.Items.OfType <FetchOrderType>().Any())
                {
                    // Sort by each attribute. Make sure we only add one sort per attribute, taking virtual attributes
                    // into account (i.e. don't attempt to sort on statecode and statecodename)
                    var sortedAttributes = new HashSet <string>(StringComparer.OrdinalIgnoreCase);

                    foreach (var column in Columns)
                    {
                        if (!schema.ContainsColumn(column, out var normalized))
                        {
                            continue;
                        }

                        if (!sortedAttributes.Add(normalized))
                        {
                            continue;
                        }

                        var parts = normalized.Split('.');
                        if (parts.Length != 2)
                        {
                            continue;
                        }

                        if (parts[0].Equals(fetch.Alias, StringComparison.OrdinalIgnoreCase))
                        {
                            var attr = dataSources[fetch.DataSource].Metadata[fetch.Entity.name].Attributes.SingleOrDefault(a => a.LogicalName.Equals(parts[1], StringComparison.OrdinalIgnoreCase));

                            if (attr == null)
                            {
                                continue;
                            }

                            if (attr.AttributeOf != null && !sortedAttributes.Add(parts[0] + "." + attr.AttributeOf))
                            {
                                continue;
                            }

                            fetch.Entity.AddItem(new FetchOrderType {
                                attribute = parts[1]
                            });
                        }
                        else
                        {
                            var linkEntity = fetch.Entity.FindLinkEntity(parts[0]);
                            var attr       = dataSources[fetch.DataSource].Metadata[linkEntity.name].Attributes.SingleOrDefault(a => a.LogicalName.Equals(parts[1], StringComparison.OrdinalIgnoreCase));

                            if (attr == null)
                            {
                                continue;
                            }

                            if (attr.AttributeOf != null && !sortedAttributes.Add(parts[0] + "." + attr.AttributeOf))
                            {
                                continue;
                            }

                            linkEntity.AddItem(new FetchOrderType {
                                attribute = parts[1]
                            });
                        }
                    }
                }

                return(fetch);
            }

            // If the data is already sorted by all the distinct columns we can use a stream aggregate instead.
            // We don't mind what order the columns are sorted in though, so long as the distinct columns form a
            // prefix of the sort order.
            var requiredSorts = new HashSet <string>(StringComparer.OrdinalIgnoreCase);

            foreach (var col in Columns)
            {
                if (!schema.ContainsColumn(col, out var column))
                {
                    return(this);
                }

                requiredSorts.Add(column);
            }

            if (!schema.IsSortedBy(requiredSorts))
            {
                return(this);
            }

            var aggregate = new StreamAggregateNode {
                Source = Source
            };

            Source.Parent = aggregate;

            for (var i = 0; i < requiredSorts.Count; i++)
            {
                aggregate.GroupBy.Add(schema.SortOrder[i].ToColumnReference());
            }

            return(aggregate);
        }