internal Table(Command command, TableMD tableMetadata, int tableId) { m_tableMetadata = tableMetadata; m_columns = Command.CreateVarList(); m_keys = command.CreateVarVec(); m_nonnullableColumns = command.CreateVarVec(); m_tableId = tableId; var columnVarMap = new Dictionary<string, ColumnVar>(); foreach (var c in tableMetadata.Columns) { var v = command.CreateColumnVar(this, c); columnVarMap[c.Name] = v; if (!c.IsNullable) { m_nonnullableColumns.Set(v); } } foreach (var c in tableMetadata.Keys) { var v = columnVarMap[c.Name]; m_keys.Set(v); } m_referencedColumns = command.CreateVarVec(m_columns); }
internal CollectionInfo( Var collectionVar, ColumnMap columnMap, VarList flattenedElementVars, VarVec keys, List<SortKey> sortKeys, object discriminatorValue) { m_collectionVar = collectionVar; m_columnMap = columnMap; m_flattenedElementVars = flattenedElementVars; m_keys = keys; m_sortKeys = sortKeys; m_discriminatorValue = discriminatorValue; }
// <summary> // Make a copy of the current node. Also return an ordered list of the new // Vars corresponding to the vars in "varList" // </summary> // <param name="cmd"> current command </param> // <param name="node"> the node to clone </param> // <param name="varList"> list of Vars </param> // <param name="newVarList"> list of "new" Vars </param> // <returns> the cloned node </returns> internal static Node Copy(Command cmd, Node node, VarList varList, out VarList newVarList) { VarMap varMap; var newNode = Copy(cmd, node, out varMap); newVarList = Command.CreateVarList(); foreach (var v in varList) { var newVar = varMap[v]; newVarList.Add(newVar); } return newNode; }
// <summary> // Convert a SingleStreamNestOp into a massive UnionAllOp // </summary> private Node BuildUnionAllSubqueryForNestOp( NestBaseOp nestOp, Node nestNode, VarList drivingNodeVars, VarList discriminatorVarList, out Var discriminatorVar, out List<Dictionary<Var, Var>> varMapList) { var drivingNode = nestNode.Child0; // For each of the NESTED collections... Node unionAllNode = null; VarList unionAllOutputs = null; for (var i = 1; i < nestNode.Children.Count; i++) { // Ensure we only use the driving collection tree once, so other // transformations do not unintentionally change more than one path. // To prevent nodes in the tree from being used in multiple paths, // we copy the driving input on successive nodes. VarList newDrivingNodeVars; Node newDrivingNode; VarList newFlattenedElementVars; Op op; if (i > 1) { newDrivingNode = OpCopier.Copy(Command, drivingNode, drivingNodeVars, out newDrivingNodeVars); // // Bug 450245: If we copied the driver node, then references to driver node vars // from the collection subquery must be patched up // var varRemapper = new VarRemapper(Command); for (var j = 0; j < drivingNodeVars.Count; j++) { varRemapper.AddMapping(drivingNodeVars[j], newDrivingNodeVars[j]); } // Remap all references in the current subquery varRemapper.RemapSubtree(nestNode.Children[i]); // Bug 479183: Remap the flattened element vars newFlattenedElementVars = varRemapper.RemapVarList(nestOp.CollectionInfo[i - 1].FlattenedElementVars); // Create a cross apply for all but the first collection op = Command.CreateCrossApplyOp(); } else { newDrivingNode = drivingNode; newDrivingNodeVars = drivingNodeVars; newFlattenedElementVars = nestOp.CollectionInfo[i - 1].FlattenedElementVars; // Create an outer apply for the first collection, // that way we ensure at least one row for each row in the driver node. op = Command.CreateOuterApplyOp(); } // Create an outer apply with the driver node and the nested collection. var applyNode = Command.CreateNode(op, newDrivingNode, nestNode.Children[i]); // Now create a ProjectOp that augments the output from the OuterApplyOp // with nulls for each column from other collections // Build the VarDefList (the list of vars) for the Project, starting // with the collection discriminator var var varDefListChildren = new List<Node>(); var projectOutputs = Command.CreateVarList(); // Add the collection discriminator var to the output. projectOutputs.Add(discriminatorVarList[i]); // Add all columns from the driving node projectOutputs.AddRange(newDrivingNodeVars); // Add all the vars from all the nested collections; for (var j = 1; j < nestNode.Children.Count; j++) { var otherCollectionInfo = nestOp.CollectionInfo[j - 1]; // For the current nested collection, we just pick the var that's // coming from there and don't need have a new var defined, but for // the rest we construct null values. if (i == j) { projectOutputs.AddRange(newFlattenedElementVars); } else { foreach (var v in otherCollectionInfo.FlattenedElementVars) { var nullOp = Command.CreateNullOp(v.Type); var nullOpNode = Command.CreateNode(nullOp); Var nullOpVar; var nullOpVarDefNode = Command.CreateVarDefNode(nullOpNode, out nullOpVar); varDefListChildren.Add(nullOpVarDefNode); projectOutputs.Add(nullOpVar); } } } var varDefListNode = Command.CreateNode(Command.CreateVarDefListOp(), varDefListChildren); // Now, build up the projectOp var projectOutputsVarSet = Command.CreateVarVec(projectOutputs); var projectOp = Command.CreateProjectOp(projectOutputsVarSet); var projectNode = Command.CreateNode(projectOp, applyNode, varDefListNode); // finally, build the union all if (unionAllNode == null) { unionAllNode = projectNode; unionAllOutputs = projectOutputs; } else { var unionAllMap = new VarMap(); var projectMap = new VarMap(); for (var idx = 0; idx < unionAllOutputs.Count; idx++) { Var outputVar = Command.CreateSetOpVar(unionAllOutputs[idx].Type); unionAllMap.Add(outputVar, unionAllOutputs[idx]); projectMap.Add(outputVar, projectOutputs[idx]); } var unionAllOp = Command.CreateUnionAllOp(unionAllMap, projectMap); unionAllNode = Command.CreateNode(unionAllOp, unionAllNode, projectNode); // Get the output vars from the union-op. This must be in the same order // as the original list of Vars unionAllOutputs = GetUnionOutputs(unionAllOp, unionAllOutputs); } } // We're done building the node, but now we have to build a mapping from // the before-Vars to the after-Vars varMapList = new List<Dictionary<Var, Var>>(); IEnumerator<Var> outputVarsEnumerator = unionAllOutputs.GetEnumerator(); if (!outputVarsEnumerator.MoveNext()) { throw EntityUtil.InternalError(EntityUtil.InternalErrorCode.ColumnCountMismatch, 4, null); // more columns from children than are on the unionAll? } // The discriminator var is always first discriminatorVar = outputVarsEnumerator.Current; // Build a map for each input for (var i = 0; i < nestNode.Children.Count; i++) { var varMap = new Dictionary<Var, Var>(); var varList = (i == 0) ? drivingNodeVars : nestOp.CollectionInfo[i - 1].FlattenedElementVars; foreach (var v in varList) { if (!outputVarsEnumerator.MoveNext()) { throw EntityUtil.InternalError(EntityUtil.InternalErrorCode.ColumnCountMismatch, 5, null); // more columns from children than are on the unionAll? } varMap[v] = outputVarsEnumerator.Current; } varMapList.Add(varMap); } if (outputVarsEnumerator.MoveNext()) { throw EntityUtil.InternalError(EntityUtil.InternalErrorCode.ColumnCountMismatch, 6, null); // at this point, we better be done with both lists... } return unionAllNode; }
// <summary> // "Normalize" each input to the NestOp. // We're now in the context of a MultiStreamNestOp, and we're trying to convert this // into a SingleStreamNestOp. // Normalization specifically refers to // - augmenting each input with a discriminator value (that describes the collection) // - removing the sort node at the root (and capturing this information as part of the sortkeys) // </summary> // <param name="nestOp"> the nestOp </param> // <param name="nestNode"> the nestOp subtree </param> // <param name="discriminatorVarList"> Discriminator Vars for each Collection input </param> // <param name="sortKeys"> SortKeys (postfix) for each Collection input </param> private void NormalizeNestOpInputs( NestBaseOp nestOp, Node nestNode, out VarList discriminatorVarList, out List<List<SortKey>> sortKeys) { discriminatorVarList = Command.CreateVarList(); // We insert a dummy var and value at poistion 0 for the deriving node, which // we should never reference; discriminatorVarList.Add(null); sortKeys = new List<List<SortKey>>(); sortKeys.Add(nestOp.PrefixSortKeys); for (var i = 1; i < nestNode.Children.Count; i++) { var inputNode = nestNode.Children[i]; // Since we're called from ConvertToSingleStreamNest, it is possible that we have a // SingleStreamNest here, because the input to the MultiStreamNest we're converting // may have been a MultiStreamNest that was converted to a SingleStreamNest. var ssnOp = inputNode.Op as SingleStreamNestOp; // If this collection is a SingleStreamNest, we pull up the key information // in it, and pullup the input; if (null != ssnOp) { // Note that the sortKeys argument is 1:1 with the nestOp inputs, that is // each input may have exactly one entry in the list, so we have to combine // all of the sort key components (Prefix+Keys+Discriminator+PostFix) into // one list. var mySortKeys = BuildSortKeyList(ssnOp); sortKeys.Add(mySortKeys); inputNode = inputNode.Child0; } else { // If the current collection has a SortNode specified, then pull that // out, and add the information to the list of postfix SortColumns var sortOp = inputNode.Op as SortOp; if (null != sortOp) { inputNode = inputNode.Child0; // bypass the sort node // Add the sort keys to the list of postfix sort keys sortKeys.Add(sortOp.Keys); } else { // No postfix sort keys for this case sortKeys.Add(new List<SortKey>()); } } // #447304: Ensure that any SortKey Vars will be projected from the input in addition to showing up in the postfix sort keys // by adding them to the FlattenedElementVars for this NestOp input's CollectionInfo. var flattenedElementVars = nestOp.CollectionInfo[i - 1].FlattenedElementVars; foreach (var sortKey in sortKeys[i]) { if (!flattenedElementVars.Contains(sortKey.Var)) { flattenedElementVars.Add(sortKey.Var); } } // Add a discriminator column to the collection-side - this must // happen before the outer-apply is added on; we need to use the value of // the discriminator to distinguish between null and empty collections Var discriminatorVar; var augmentedInput = AugmentNodeWithInternalIntegerConstant(inputNode, i, out discriminatorVar); nestNode.Children[i] = augmentedInput; discriminatorVarList.Add(discriminatorVar); } }
/// <summary> /// Build up a new varlist, where each structured var has been replaced by its /// corresponding flattened vars /// </summary> /// <param name="varList"> the varlist to flatten </param> /// <returns> the new flattened varlist </returns> private VarList FlattenVarList(VarList varList) { var newVarList = Command.CreateVarList(FlattenVars(varList)); return newVarList; }
/// <summary> /// Another overload - with an additional discriminatorValue. /// Should this be a subtype instead? /// </summary> /// <param name="collectionVar">the collectionVar</param> /// <param name="columnMap">column map for the collection element</param> /// <param name="flattenedElementVars">elementVars with any nested collections pulled up</param> /// <param name="keys">keys specific to this collection</param> /// <param name="sortKeys">sort keys specific to this collecion</param> /// <param name="discriminatorValue">discriminator value for this collection (under the current nestOp)</param> /// <returns>a new CollectionInfo instance</returns> internal static CollectionInfo CreateCollectionInfo(Var collectionVar, ColumnMap columnMap, VarList flattenedElementVars, VarVec keys, List<InternalTrees.SortKey> sortKeys, object discriminatorValue) { return new CollectionInfo(collectionVar, columnMap, flattenedElementVars, keys, sortKeys, discriminatorValue); }
// <summary> // Create a mapped copy of the input VarList - each var from the input varlist // is represented by its mapped var (and in exactly the same order) in the output // varlist // </summary> // <param name="varList"> varList to map </param> // <returns> mapped varlist </returns> private VarList Copy(VarList varList) { var newVarList = Command.CreateVarList(MapVars(varList)); return newVarList; }
static string _get(string s, VarList vars) { string url = Evaluate(ref s, vars); return string.IsNullOrEmpty(s) ? Web.HttpGet(url) : Web.HttpGet(url, Encoding.GetEncoding(Evaluate(ref s, vars))); }
static string _format(string s, VarList vars) { string format = Evaluate(ref s, vars); var args = new ArrayList(); while (!string.IsNullOrEmpty(s)) { args.Add(Evaluate(ref s, vars)); } return string.Format(format, args.ToArray()); }
static string _encode(string s, VarList vars) { return HttpUtility.UrlEncode(Evaluate(ref s, vars)); }
static string _decode(string s, VarList vars) { string ss = Evaluate(ref s, vars); return !string.IsNullOrEmpty(s) ? HttpUtility.UrlDecode(ss, Encoding.GetEncoding(Evaluate(ref s, vars))) : Util.Decode(ss); }
static string Evaluate(ref string s, VarList vars) { TokenType type; //string ss = s; string token = GetToken(ref s, out type); if (!string.IsNullOrEmpty(token)) { switch (type) { case TokenType.Const: return token.Replace("\\\"", "\"").Replace("\\\\", "\\").Replace("\\r", "\r").Replace("\\n", "\n").Replace("\\t", "\t"); case TokenType.Variable: return vars[token]; case TokenType.Function: return CallFunction(token, vars); } } //throw new Exception(string.Format("[Script] Get token fail: \"{0}\"", ss)); return string.Empty; }
static string CallFunction(string s, VarList vars) { int i = s.IndexOf('('); string fname = s.Remove(i).Trim(); s = s.Substring(i + 1); if (funcs.ContainsKey(fname)) { return funcs[fname](s, vars); } throw new Exception("[Script] Unkown function: " + fname); }
/// <summary> /// Remap the given varList using the given varMap /// </summary> /// <param name="command"></param> /// <param name="varMap"></param> /// <param name="varList"></param> internal static VarList RemapVarList(Command command, Dictionary<Var, Var> varMap, VarList varList) { VarRemapper varRemapper = new VarRemapper(command, varMap); return varRemapper.RemapVarList(varList); }
private void Map(VarList varList) { VarList newList = Command.CreateVarList(MapVars(varList)); varList.Clear(); varList.AddRange(newList); }
static string _lsubstr(string s, VarList vars) { return __substr(s, vars, false); }
static string _prefer(string s, VarList vars) { string ss = Evaluate(ref s, vars); string start, end; int i, j, n, max, max_n; bool not_http; // find the best index start = Evaluate(ref s, vars); end = Evaluate(ref s, vars); max = 0; max_n = 0; i = 0; j = 0; n = 0; while (ss.IndexOf(start, i, StringComparison.Ordinal) >= 0) { i = ss.IndexOf(start, i, StringComparison.Ordinal) + start.Length; j = ss.IndexOf(end, i, StringComparison.Ordinal); Int32.TryParse(ss.Substring(i, j - i), out j); if (j > max) { max = j; max_n = n; } n++; } vars["prefer"] = max.ToString(); // pick the prefer item start = Evaluate(ref s, vars); end = Evaluate(ref s, vars); not_http = !start.StartsWith("http", StringComparison.Ordinal); i = 0; j = 0; n = 0; while (ss.IndexOf(start, i, StringComparison.Ordinal) >= 0) { i = ss.IndexOf(start, j, StringComparison.Ordinal); if (not_http) { i += start.Length; } j = ss.IndexOf(end, i, StringComparison.Ordinal); if (n == max_n) { break; } n++; } return ss.Substring(i, j - i); }
static string _replace(string s, VarList vars) { return Evaluate(ref s, vars).Replace(Evaluate(ref s, vars), Evaluate(ref s, vars)); }
/// <summary> /// Create a PhysicalProjectOp - with a columnMap describing the output /// </summary> /// <param name="outputVars">list of output vars</param> /// <param name="columnMap">columnmap describing the output element</param> /// <returns></returns> internal PhysicalProjectOp CreatePhysicalProjectOp(VarList outputVars, SimpleCollectionColumnMap columnMap) { return new PhysicalProjectOp(outputVars, columnMap); }
static string _substr(string s, VarList vars) { return __substr(s, vars, true); }
/// <summary> /// Does the gvien VarList overlap with the given VarVec /// </summary> private static bool HasVarReferences(VarList listToCheck, VarVec vars) { foreach (var var in vars) { if (listToCheck.Contains(var)) { return true; } } return false; }
static string __substr(string s, VarList vars, bool substr) { string ss = Evaluate(ref s, vars); string start = Evaluate(ref s, vars); int i; if (!Int32.TryParse(start, out i)) { i = substr ? ss.IndexOf(start, StringComparison.Ordinal) : ss.LastIndexOf(start, StringComparison.Ordinal); if (i < 0) { return null; } if (!start.StartsWith("http", StringComparison.Ordinal)) { i += start.Length; } } if (!string.IsNullOrEmpty(s)) { int j; string end = Evaluate(ref s, vars); if (Int32.TryParse(end, out j)) { return ss.Substring(i, j); } j = ss.IndexOf(end, i, StringComparison.Ordinal); if (j >= 0) { return ss.Substring(i, j - i); } } return ss.Substring(i); }
// <summary> // convert MultiStreamNestOp to SingleStreamNestOp // </summary> // <remarks> // A MultiStreamNestOp is typically of the form M(D, N1, N2, ..., Nk) // where D is the driver stream, and N1, N2 etc. represent the collections. // In general, this can be converted into a SingleStreamNestOp over: // (D+ outerApply N1) AugmentedUnionAll (D+ outerApply N2) ... // Where: // D+ is D with an extra discriminator column that helps to identify // the specific collection. // AugmentedUnionAll is simply a unionAll where each branch of the // unionAll is augmented with nulls for the corresponding columns // of other tables in the branch // The simple case where there is only a single nested collection is easier // to address, and can be represented by: // MultiStreamNest(D, N1) => SingleStreamNest(OuterApply(D, N1)) // The more complex case, where there is more than one nested column, requires // quite a bit more work: // MultiStreamNest(D, X, Y,...) => SingleStreamNest(UnionAll(Project{"1", D1...Dn, X1...Xn, nY1...nYn}(OuterApply(D, X)), Project{"2", D1...Dn, nX1...nXn, Y1...Yn}(OuterApply(D, Y)), ...)) // Where: // D is the driving collection // D1...Dn are the columns from the driving collection // X is the first nested collection // X1...Xn are the columns from the first nested collection // nX1...nXn are null values for all columns from the first nested collection // Y is the second nested collection // Y1...Yn are the columns from the second nested collection // nY1...nYn are null values for all columns from the second nested collection // </remarks> private Node ConvertToSingleStreamNest( Node nestNode, Dictionary<Var, ColumnMap> varRefReplacementMap, VarList flattenedOutputVarList, out SimpleColumnMap[] parentKeyColumnMaps) { #if DEBUG var input = Dump.ToXml(nestNode); #endif //DEBUG var nestOp = (MultiStreamNestOp)nestNode.Op; // We can't convert this node to a SingleStreamNest until all it's MultiStreamNest // inputs are converted, so do that first. for (var i = 1; i < nestNode.Children.Count; i++) { var chi = nestNode.Children[i]; if (chi.Op.OpType == OpType.MultiStreamNest) { var chiCi = nestOp.CollectionInfo[i - 1]; var childFlattenedOutputVars = Command.CreateVarList(); SimpleColumnMap[] childKeyColumnMaps; nestNode.Children[i] = ConvertToSingleStreamNest( chi, varRefReplacementMap, childFlattenedOutputVars, out childKeyColumnMaps); // Now this may seem odd here, and it may look like we should have done this // inside the recursive ConvertToSingleStreamNest call above, but that call // doesn't have access to the CollectionInfo for it's parent, which is what // we need to manipulate before we enter the loop below where we try and fold // THIS nestOp nodes into a singleStreamNestOp. var childColumnMap = ColumnMapTranslator.Translate(chiCi.ColumnMap, varRefReplacementMap); var childKeys = Command.CreateVarVec(((SingleStreamNestOp)nestNode.Children[i].Op).Keys); nestOp.CollectionInfo[i - 1] = Command.CreateCollectionInfo( chiCi.CollectionVar, childColumnMap, childFlattenedOutputVars, childKeys, chiCi.SortKeys, null /*discriminatorValue*/ ); } } // Make sure that the driving node has keys defined. Otherwise we're in // trouble; we must be able to infer keys from the driving node. var drivingNode = nestNode.Child0; var drivingNodeKeys = Command.PullupKeys(drivingNode); if (drivingNodeKeys.NoKeys) { // ALMINEEV: In this case we used to wrap drivingNode into a projection that would also project Edm.NewGuid() thus giving us a synthetic key. // This solution did not work however due to a bug in SQL Server that allowed pulling non-deterministic functions above joins and applies, thus // producing incorrect results. SQL Server bug was filed in "sqlbuvsts01\Sql Server" database as #725272. // The only known path how we can get a keyless drivingNode is if // - drivingNode is over a TVF call // - TVF is declared as Collection(Row) is SSDL (the only form of TVF definitions at the moment) // - TVF is not mapped to entities // Note that if TVF is mapped to entities via function import mapping, and the user query is actually the call of the // function import, we infer keys for the TVF from the c-space entity keys and their mappings. throw new NotSupportedException(Strings.ADP_KeysRequiredForNesting); } // Get a deterministic ordering of Vars from this node. // NOTE: we're using the drivingNode's definitions, which is a VarVec so it // won't match the order of the input's columns, but the key thing is // that we use the same order for all nested children, so it's OK. var drivingNodeInfo = Command.GetExtendedNodeInfo(drivingNode); var drivingNodeVarVec = drivingNodeInfo.Definitions; var drivingNodeVars = Command.CreateVarList(drivingNodeVarVec); // Normalize all collection inputs to the nestOp. Specifically, remove any // SortOps (adding the sort keys to the postfix sortkey list). Additionally, // add a discriminatorVar to each collection child VarList discriminatorVarList; List<List<SortKey>> postfixSortKeyList; NormalizeNestOpInputs(nestOp, nestNode, out discriminatorVarList, out postfixSortKeyList); // Now build up the union-all subquery List<Dictionary<Var, Var>> varMapList; Var outputDiscriminatorVar; var unionAllNode = BuildUnionAllSubqueryForNestOp( nestOp, nestNode, drivingNodeVars, discriminatorVarList, out outputDiscriminatorVar, out varMapList); var drivingNodeVarMap = varMapList[0]; // OK. We've finally created the UnionAll over each of the project/outerApply // combinations. We know that the output columns will be: // // Discriminator, DrivingColumns, Collection1Columns, Collection2Columns, ... // // Now, rebuild the columnMaps, since all of the columns in the original column // maps are now referencing newer variables. To do that, we'll walk the list of // outputs from the unionAll, and construct new VarRefColumnMaps for each one, // and adding it to a ColumnMapPatcher, which we'll use to actually fix everything // up. // // While we're at it, we'll build a new list of top-level output columns, which // should include only the Discriminator, the columns from the driving collection, // and and one column for each of the nested collections. // Start building the flattenedOutputVarList that the top level PhysicalProjectOp // is to output. flattenedOutputVarList.AddRange(RemapVars(drivingNodeVars, drivingNodeVarMap)); var flattenedOutputVarVec = Command.CreateVarVec(flattenedOutputVarList); var nestOpOutputs = Command.CreateVarVec(flattenedOutputVarVec); // Add any adjustments to the driving nodes vars to the column map patcher foreach (var kv in drivingNodeVarMap) { if (kv.Key != kv.Value) { varRefReplacementMap[kv.Key] = new VarRefColumnMap(kv.Value); } } RemapSortKeys(nestOp.PrefixSortKeys, drivingNodeVarMap); var newPostfixSortKeys = new List<SortKey>(); var newCollectionInfoList = new List<CollectionInfo>(); // Build the discriminator column map, and ensure it's in the outputs var discriminatorColumnMap = new VarRefColumnMap(outputDiscriminatorVar); nestOpOutputs.Set(outputDiscriminatorVar); if (!flattenedOutputVarVec.IsSet(outputDiscriminatorVar)) { flattenedOutputVarList.Add(outputDiscriminatorVar); flattenedOutputVarVec.Set(outputDiscriminatorVar); } // Build the key column maps, and ensure they're in the outputs as well. var parentKeys = RemapVarVec(drivingNodeKeys.KeyVars, drivingNodeVarMap); parentKeyColumnMaps = new SimpleColumnMap[parentKeys.Count]; var index = 0; foreach (var keyVar in parentKeys) { parentKeyColumnMaps[index] = new VarRefColumnMap(keyVar); index++; if (!flattenedOutputVarVec.IsSet(keyVar)) { flattenedOutputVarList.Add(keyVar); flattenedOutputVarVec.Set(keyVar); } } // Now that we've handled the driving node, deal with each of the // nested inputs, in sequence. for (var i = 1; i < nestNode.Children.Count; i++) { var ci = nestOp.CollectionInfo[i - 1]; var postfixSortKeys = postfixSortKeyList[i]; RemapSortKeys(postfixSortKeys, varMapList[i]); newPostfixSortKeys.AddRange(postfixSortKeys); var newColumnMap = ColumnMapTranslator.Translate(ci.ColumnMap, varMapList[i]); var newFlattenedElementVars = RemapVarList(ci.FlattenedElementVars, varMapList[i]); var newCollectionKeys = RemapVarVec(ci.Keys, varMapList[i]); RemapSortKeys(ci.SortKeys, varMapList[i]); var newCollectionInfo = Command.CreateCollectionInfo( ci.CollectionVar, newColumnMap, newFlattenedElementVars, newCollectionKeys, ci.SortKeys, i); newCollectionInfoList.Add(newCollectionInfo); // For a collection Var, we add the flattened elementVars for the // collection in place of the collection Var itself, and we create // a new column map to represent all the stuff we've done. foreach (var v in newFlattenedElementVars) { if (!flattenedOutputVarVec.IsSet(v)) { flattenedOutputVarList.Add(v); flattenedOutputVarVec.Set(v); } } nestOpOutputs.Set(ci.CollectionVar); var keyColumnMapIndex = 0; var keyColumnMaps = new SimpleColumnMap[newCollectionInfo.Keys.Count]; foreach (var keyVar in newCollectionInfo.Keys) { keyColumnMaps[keyColumnMapIndex] = new VarRefColumnMap(keyVar); keyColumnMapIndex++; } var collectionColumnMap = new DiscriminatedCollectionColumnMap( TypeUtils.CreateCollectionType(newCollectionInfo.ColumnMap.Type), newCollectionInfo.ColumnMap.Name, newCollectionInfo.ColumnMap, keyColumnMaps, parentKeyColumnMaps, discriminatorColumnMap, newCollectionInfo.DiscriminatorValue ); varRefReplacementMap[ci.CollectionVar] = collectionColumnMap; } // Finally, build up the SingleStreamNest Node var newSsnOp = Command.CreateSingleStreamNestOp( parentKeys, nestOp.PrefixSortKeys, newPostfixSortKeys, nestOpOutputs, newCollectionInfoList, outputDiscriminatorVar); var newNestNode = Command.CreateNode(newSsnOp, unionAllNode); #if DEBUG var size = input.Length; // GC.KeepAlive makes FxCop Grumpy. var output = Dump.ToXml(newNestNode); #endif //DEBUG return newNestNode; }
public static bool Parse(Downloader downloader, ref string url) { var vars = new VarList(); reset: int retrys = 3; retry: bool bExec = false; string name = null; string script = null; string s; int line = 0; int i; vars["url"] = url; try { foreach (string ss in File.ReadAllLines(ScriptFile)) { s = script = ss.TrimStart(); line++; // line number if (bExec) { if (string.IsNullOrEmpty(s)) { break; // Blank line, End } if (s.StartsWith("^", StringComparison.Ordinal)) { continue; // fall through } i = s.IndexOf('='); name = s.Substring(0, i).TrimEnd(); s = s.Substring(i + 1); vars[name] = Evaluate(ref s, vars); if (Trace) { downloader.ReportStatus(string.Format("[{0}] {1} = {2}\r\n", line, name, vars[name])); } if (name == "result") { break; } if (name == "test") { // check result WebHeaderCollection headers = Web.HttpHead(vars["test"]); if (headers != null) { vars["result"] = vars["test"]; break; // Got a valid result, End } } else if (name == "url") { // url changed url = vars["url"]; downloader.ReportProgress(Downloader.CHANGEURL, url); vars.Clear(); goto reset; } else if (name == "regexp") { // find match links MatchCollection m = Regex.Matches(Web.HttpGet(url), vars[name]); if (m.Count > 0) { i = 0; while (i < m.Count) { s = Util.GetSubStr(m[i++].Value, "\"", "\""); if (s.StartsWith("//", StringComparison.Ordinal)) { #if false s = Regex.Match(url, "https?:").Value + s; #else continue; // skip #endif } if (s.StartsWith("/", StringComparison.Ordinal)) { s = Regex.Match(url, "https?://[^/]+/").Value + s.Remove(0, 1); } else { s = url.Remove(url.LastIndexOf("/", StringComparison.Ordinal) + 1) + s; } downloader.ReportProgress(Downloader.NEWURL, s); } } break; } else if (name == "rs" && !vars["rs"].Contains("http")) { // no result break; // End } } else if (s.StartsWith("^", StringComparison.Ordinal) || s.StartsWith("http", StringComparison.Ordinal)) { bExec = Regex.IsMatch(url, s, RegexOptions.IgnoreCase); } } if (bExec) { downloader.Referer = string.IsNullOrEmpty(vars["referer"]) ? url : vars["referer"]; url = vars["result"]; downloader.FileName = HttpUtility.HtmlDecode(vars["filename"]); if (!string.IsNullOrEmpty(vars["dir"])) { downloader.SavePath += "\\" + vars["dir"]; } } } catch (Exception ex) { if (ex is WebException || ex is IOException) { throw; //ex; } if (retrys-- > 0) goto retry; throw new Exception(string.Format("Script got error: [{0}] {1}", line, script)); } finally { vars.Clear(); } return true; }
// <summary> // Produce a "mapped" varList // </summary> private VarList RemapVarList(VarList varList, Dictionary<Var, Var> varMap) { var newVarList = Command.CreateVarList(RemapVars(varList, varMap)); return newVarList; }
/// <summary> /// Produce a a new remapped varList /// </summary> /// <param name="varList"></param> /// <returns>remapped varList</returns> internal VarList RemapVarList(VarList varList) { return Command.CreateVarList(MapVars(varList)); }
// <summary> // Get back an ordered list of outputs from a union-all op. The ordering should // be identical to the ordered list "leftVars" which describes the left input of // the unionAllOp // </summary> // <param name="unionOp"> the unionall Op </param> // <param name="leftVars"> vars of the left input </param> // <returns> output vars ordered in the same way as the left input </returns> private static VarList GetUnionOutputs(UnionAllOp unionOp, VarList leftVars) { var varMap = unionOp.VarMap[0]; Dictionary<Var, Var> reverseVarMap = varMap.GetReverseMap(); var unionAllVars = Command.CreateVarList(); foreach (var v in leftVars) { var newVar = reverseVarMap[v]; unionAllVars.Add(newVar); } return unionAllVars; }
public void packByRuleFile(String filePath) { //检验打包列表文件 FileStream fsRuleFile = new FileStream(filePath, FileMode.Open); ArrayList arrayBuff = null; try { arrayBuff = IOUtil.readTextLinesGBK(fsRuleFile); int rowID = 0; varList = new VarList(this); allPacks = new ArrayList(); while (true) { String s = (String)arrayBuff[rowID]; if (s.Trim().Equals("#defineBegin")) { rowID++; rowID = varList.readElement(arrayBuff, rowID); } else if (s.Trim().Equals("#packBegin")) { rowID++; FilePackElement filePack = new FilePackElement(this); rowID = filePack.readElement(arrayBuff, rowID); allPacks.Add(filePack); } rowID++; if (rowID < 0 || rowID >= arrayBuff.Count) { break; } } } catch (Exception) { } //生成包裹序列 }