Exemplo n.º 1
0
 public virtual void AddChildNodes(ICollection<ExprNode> childNodeColl)
 {
     ChildNodes = (ExprNode[]) CollectionUtil.ArrayExpandAddElements(ChildNodes, childNodeColl);
 }
Exemplo n.º 2
0
        public override void Flush(IDictionary <string, TermsHashConsumerPerField> fieldsToFlush, SegmentWriteState state)
        {
            // Gather all FieldData's that have postings, across all
            // ThreadStates
            IList <FreqProxTermsWriterPerField> allFields = new List <FreqProxTermsWriterPerField>();

            foreach (TermsHashConsumerPerField f in fieldsToFlush.Values)
            {
                FreqProxTermsWriterPerField perField = (FreqProxTermsWriterPerField)f;
                if (perField.termsHashPerField.bytesHash.Count > 0)
                {
                    allFields.Add(perField);
                }
            }

            int numAllFields = allFields.Count;

            // Sort by field name
            CollectionUtil.IntroSort(allFields);

            FieldsConsumer consumer = state.SegmentInfo.Codec.PostingsFormat.FieldsConsumer(state);

            bool success = false;

            try
            {
                TermsHash termsHash = null;

                /*
                 * Current writer chain:
                 * FieldsConsumer
                 * -> IMPL: FormatPostingsTermsDictWriter
                 *  -> TermsConsumer
                 *    -> IMPL: FormatPostingsTermsDictWriter.TermsWriter
                 *      -> DocsConsumer
                 *        -> IMPL: FormatPostingsDocsWriter
                 *          -> PositionsConsumer
                 *            -> IMPL: FormatPostingsPositionsWriter
                 */

                for (int fieldNumber = 0; fieldNumber < numAllFields; fieldNumber++)
                {
                    FieldInfo fieldInfo = allFields[fieldNumber].fieldInfo;

                    FreqProxTermsWriterPerField fieldWriter = allFields[fieldNumber];

                    // If this field has postings then add them to the
                    // segment
                    fieldWriter.Flush(fieldInfo.Name, consumer, state);

                    TermsHashPerField perField = fieldWriter.termsHashPerField;
                    if (Debugging.AssertsEnabled)
                    {
                        Debugging.Assert(termsHash == null || termsHash == perField.termsHash);
                    }
                    termsHash = perField.termsHash;
                    int numPostings = perField.bytesHash.Count;
                    perField.Reset();
                    perField.ShrinkHash(numPostings);
                    fieldWriter.Reset();
                }

                if (termsHash != null)
                {
                    termsHash.Reset();
                }
                success = true;
            }
            finally
            {
                if (success)
                {
                    IOUtils.Dispose(consumer);
                }
                else
                {
                    IOUtils.DisposeWhileHandlingException(consumer);
                }
            }
        }
Exemplo n.º 3
0
        public virtual void runTestQuery(SpatialMatchConcern concern, SpatialTestQuery q)
        {
            String        msg = q.toString(); //"Query: " + q.args.toString(ctx);
            SearchResults got = executeQuery(makeQuery(q), Math.Max(100, q.ids.size() + 1));

            if (storeShape && got.numFound > 0)
            {
                //check stored value is there
                assertNotNull(got.results[0].document.Get(strategy.FieldName));
            }
            if (concern.orderIsImportant)
            {
                IEnumerator <String> ids = q.ids.GetEnumerator();
                foreach (SearchResult r in got.results)
                {
                    String id = r.document.Get("id");
                    if (!ids.MoveNext())
                    {
                        fail(msg + " :: Did not get enough results.  Expect" + q.ids + ", got: " + got.toDebugString());
                    }
                    assertEquals("out of order: " + msg, ids.Current, id);
                }

                if (ids.MoveNext())
                {
                    fail(msg + " :: expect more results then we got: " + ids.Current);
                }
            }
            else
            {
                // We are looking at how the results overlap
                if (concern.resultsAreSuperset)
                {
                    ISet <string> found = new HashSet <string>();
                    foreach (SearchResult r in got.results)
                    {
                        found.add(r.document.Get("id"));
                    }
                    foreach (String s in q.ids)
                    {
                        if (!found.contains(s))
                        {
                            fail("Results are mising id: " + s + " :: " + found);
                        }
                    }
                }
                else
                {
                    List <string> found = new List <string>();
                    foreach (SearchResult r in got.results)
                    {
                        found.Add(r.document.Get("id"));
                    }

                    // sort both so that the order is not important
                    CollectionUtil.TimSort(q.ids);
                    CollectionUtil.TimSort(found);
                    assertEquals(msg, q.ids.toString(), found.toString());
                }
            }
        }
 public AttributeOverrideDateTime(string name, IDictionary <long, DateTime> values) : base(name)
 {
     ArgumentAssert.IsNotEmpty(values, "values");
     CollectionUtil.UnionDictionaries(_values, values);
 }
Exemplo n.º 5
0
 public override bool IsExistsProperty(EventBean eventBean)
 {
     var array = BaseNestableEventUtil.CheckedCastUnderlyingObjectArray(eventBean);
     return CollectionUtil.ArrayExistsAtIndex((Array) array[propertyIndex], index);
 }
Exemplo n.º 6
0
 /// <summary>
 /// Returns a key-value pair collection that represents the current <see cref="T:PropertyBag"/>.
 /// </summary>
 /// <returns>A <see cref="IDictionary{T,U}"/> instance that represents the current <see cref="T:PropertyBag"/>.</returns>
 internal IDictionary <string, object> ToDictionary()
 {
     return(CollectionUtil.NewDictionary(_bag));
 }
Exemplo n.º 7
0
        public static GroupByClauseExpressions GetGroupByRollupExpressions(
            IList <GroupByClauseElement> groupByElements,
            SelectClauseSpecRaw selectClauseSpec,
            ExprNode optionalHavingNode,
            IList <OrderByItem> orderByList,
            ExprNodeSubselectDeclaredDotVisitor visitor)
        {
            if (groupByElements == null || groupByElements.Count == 0)
            {
                return(null);
            }

            // walk group-by-elements, determine group-by expressions and rollup nodes
            var groupByExpressionInfo = GroupByToRollupNodes(groupByElements);

            // obtain expression nodes, collect unique nodes and assign index
            var distinctGroupByExpressions = new List <ExprNode>();
            var expressionToIndex          = new Dictionary <ExprNode, int>();

            foreach (ExprNode exprNode in groupByExpressionInfo.Expressions)
            {
                var found = false;
                for (var i = 0; i < distinctGroupByExpressions.Count; i++)
                {
                    ExprNode other = distinctGroupByExpressions[i];
                    // find same expression
                    if (ExprNodeUtility.DeepEquals(exprNode, other))
                    {
                        expressionToIndex.Put(exprNode, i);
                        found = true;
                        break;
                    }
                }

                // not seen before
                if (!found)
                {
                    expressionToIndex.Put(exprNode, distinctGroupByExpressions.Count);
                    distinctGroupByExpressions.Add(exprNode);
                }
            }

            // determine rollup, validate it is either (not both)
            var hasGroupingSet = false;
            var hasRollup      = false;

            foreach (var element in groupByElements)
            {
                if (element is GroupByClauseElementGroupingSet)
                {
                    hasGroupingSet = true;
                }
                if (element is GroupByClauseElementRollupOrCube)
                {
                    hasRollup = true;
                }
            }

            // no-rollup or grouping-sets means simply validate
            var groupByExpressions = distinctGroupByExpressions.ToArray();

            if (!hasRollup && !hasGroupingSet)
            {
                return(new GroupByClauseExpressions(groupByExpressions));
            }

            // evaluate rollup node roots
            IList <GroupByRollupNodeBase> nodes = groupByExpressionInfo.Nodes;
            var perNodeCombinations             = new Object[nodes.Count][];
            var context = new GroupByRollupEvalContext(expressionToIndex);

            try {
                for (var i = 0; i < nodes.Count; i++)
                {
                    var node         = nodes[i];
                    var combinations = node.Evaluate(context);
                    perNodeCombinations[i] = new Object[combinations.Count];
                    for (var j = 0; j < combinations.Count; j++)
                    {
                        perNodeCombinations[i][j] = combinations[j];
                    }
                }
            }
            catch (GroupByRollupDuplicateException ex) {
                if (ex.Indexes.Length == 0)
                {
                    throw new ExprValidationException("Failed to validate the group-by clause, found duplicate specification of the overall grouping '()'");
                }
                else
                {
                    var writer    = new StringWriter();
                    var delimiter = "";
                    for (var i = 0; i < ex.Indexes.Length; i++)
                    {
                        writer.Write(delimiter);
                        writer.Write(groupByExpressions[ex.Indexes[i]].ToExpressionStringMinPrecedenceSafe());
                        delimiter = ", ";
                    }
                    throw new ExprValidationException("Failed to validate the group-by clause, found duplicate specification of expressions (" + writer.ToString() + ")");
                }
            }

            // enumerate combinations building an index list
            var combinationEnumeration            = new CombinationEnumeration(perNodeCombinations);
            ICollection <int>         combination = new SortedSet <int>();
            ICollection <MultiKeyInt> indexList   = new LinkedHashSet <MultiKeyInt>();

            while (combinationEnumeration.MoveNext())
            {
                combination.Clear();
                Object[] combinationOA = combinationEnumeration.Current;
                foreach (var indexes in combinationOA)
                {
                    var indexarr = (int[])indexes;
                    foreach (var anIndex in indexarr)
                    {
                        combination.Add(anIndex);
                    }
                }
                var indexArr = CollectionUtil.IntArray(combination);
                indexList.Add(new MultiKeyInt(indexArr));
            }

            // obtain rollup levels
            var rollupLevels = new int[indexList.Count][];
            var count        = 0;

            foreach (var mk in indexList)
            {
                rollupLevels[count++] = mk.Keys;
            }
            var numberOfLevels = rollupLevels.Length;

            if (numberOfLevels == 1 && rollupLevels[0].Length == 0)
            {
                throw new ExprValidationException("Failed to validate the group-by clause, the overall grouping '()' cannot be the only grouping");
            }

            // obtain select-expression copies for rewrite
            var expressions = selectClauseSpec.SelectExprList;
            var selects     = new ExprNode[numberOfLevels][];

            for (var i = 0; i < numberOfLevels; i++)
            {
                selects[i] = new ExprNode[expressions.Count];
                for (var j = 0; j < expressions.Count; j++)
                {
                    SelectClauseElementRaw selectRaw = expressions[j];
                    if (!(selectRaw is SelectClauseExprRawSpec))
                    {
                        throw new ExprValidationException("Group-by with rollup requires that the select-clause does not use wildcard");
                    }
                    var compiled = (SelectClauseExprRawSpec)selectRaw;
                    selects[i][j] = CopyVisitExpression(compiled.SelectExpression, visitor);
                }
            }

            // obtain having-expression copies for rewrite
            ExprNode[] optHavingNodeCopy = null;
            if (optionalHavingNode != null)
            {
                optHavingNodeCopy = new ExprNode[numberOfLevels];
                for (var i = 0; i < numberOfLevels; i++)
                {
                    optHavingNodeCopy[i] = CopyVisitExpression(optionalHavingNode, visitor);
                }
            }

            // obtain orderby-expression copies for rewrite
            ExprNode[][] optOrderByCopy = null;
            if (orderByList != null && orderByList.Count > 0)
            {
                optOrderByCopy = new ExprNode[numberOfLevels][];
                for (var i = 0; i < numberOfLevels; i++)
                {
                    optOrderByCopy[i] = new ExprNode[orderByList.Count];
                    for (var j = 0; j < orderByList.Count; j++)
                    {
                        OrderByItem element = orderByList[j];
                        optOrderByCopy[i][j] = CopyVisitExpression(element.ExprNode, visitor);
                    }
                }
            }

            return(new GroupByClauseExpressions(groupByExpressions, rollupLevels, selects, optHavingNodeCopy, optOrderByCopy));
        }
Exemplo n.º 8
0
            public void Run(RegressionEnvironment env)
            {
                var path      = new RegressionPath();
                var eplSchema =
                    "create schema Lvl1(Intarr int[]);\n" +
                    "@public @buseventtype create schema Lvl0(Lvl1 Lvl1, IndexNumber int, Id string);\n";

                env.CompileDeploy(eplSchema, path);

                var epl = "@Name('s0') select " +
                          "Lvl1.Intarr[IndexNumber] as c0, " +
                          "Lvl1.Intarr.size() as c1, " +
                          "me.Lvl1.Intarr[IndexNumber] as c2, " +
                          "me.Lvl1.Intarr.size() as c3 " +
                          "from Lvl0 as me";

                env.CompileDeploy(soda, epl, path).AddListener("s0");
                var fields = "c0,c1,c2,c3".SplitCsv();
                var @out   = env.Statement("s0").EventType;

                foreach (var field in fields)
                {
                    Assert.AreEqual(typeof(int?), @out.GetPropertyType(field));
                }

                var Lvl1 = CollectionUtil.BuildMap("Intarr", new int?[] { 1, 2, 3 });
                var Lvl0 = CollectionUtil.BuildMap("Lvl1", Lvl1, "IndexNumber", 2);

                env.SendEventMap(Lvl0, "Lvl0");
                AssertProps(env.Listener("s0").AssertOneGetNewAndReset(), fields, 3, 3, 3, 3);

                // Invalid tests
                // not an index expression
                TryInvalidCompile(
                    env,
                    path,
                    "select Lvl1[IndexNumber] from Lvl0",
                    "Failed to validate select-clause expression 'Lvl1[IndexNumber]': Invalid array operation for property 'Lvl1'");
                TryInvalidCompile(
                    env,
                    path,
                    "select me.Lvl1[IndexNumber] from Lvl0 as me",
                    "Failed to validate select-clause expression 'me.Lvl1[IndexNumber]': Invalid array operation for property 'Lvl1'");

                // two index expressions
                TryInvalidCompile(
                    env,
                    path,
                    "select Lvl1.Intarr[IndexNumber, IndexNumber] from Lvl0",
                    "Failed to validate select-clause expression 'Lvl1.Intarr[IndexNumber,IndexNumber]': Incorrect number of index expressions for array operation, expected a single expression returning an integer value but received 2 expressions for operation on type array of System.Nullable<System.Int32>");
                TryInvalidCompile(
                    env,
                    path,
                    "select me.Lvl1.Intarr[IndexNumber, IndexNumber] from Lvl0 as me",
                    "Failed to validate select-clause expression 'me.Lvl1.Intarr[IndexNumber,IndexNumber]': Incorrect number of index expressions for array operation, expected a single expression returning an integer value but received 2 expressions for operation on type array of System.Nullable<System.Int32>");

                // double-array
                TryInvalidCompile(
                    env,
                    path,
                    "select Lvl1.Intarr[IndexNumber][IndexNumber] from Lvl0",
                    "Failed to validate select-clause expression 'Lvl1.Intarr[IndexNumber][IndexNumber]': Could not perform array operation on type class System.Nullable<System.Int32>");
                TryInvalidCompile(
                    env,
                    path,
                    "select me.Lvl1.Intarr[IndexNumber][IndexNumber] from Lvl0 as me",
                    "Failed to validate select-clause expression 'me.Lvl1.Intarr[IndexNumber][IndexNumber]': Could not perform array operation on type class System.Nullable<System.Int32>");

                // wrong index expression type
                TryInvalidCompile(
                    env,
                    path,
                    "select Lvl1.Intarr[Id] from Lvl0",
                    "Failed to validate select-clause expression 'Lvl1.Intarr[Id]': Incorrect index expression for array operation, expected an expression returning an integer value but the expression 'Id' returns 'System.String' for operation on type array of System.Nullable<System.Int32>");
                TryInvalidCompile(
                    env,
                    path,
                    "select me.Lvl1.Intarr[Id] from Lvl0 as me",
                    "Failed to validate select-clause expression 'me.Lvl1.Intarr[Id]': Incorrect index expression for array operation, expected an expression returning an integer value but the expression 'Id' returns 'System.String' for operation on type array of System.Nullable<System.Int32>");

                env.UndeployAll();
            }
Exemplo n.º 9
0
 public void Run(RegressionEnvironment env)
 {
     Validate(env, "{1, 2, 3}.aggregate(0, (result, value) => result + value)", 6);
     Validate(env, "{1, 2, 3}.aggregate(0, (result, value, index) => result + value + index*10)", 36);
     Validate(env, "{1, 2, 3}.aggregate(0, (result, value, index, size) => result + value + index*10 + size*100)", 936);
     Validate(env, "{1, 2, 3}.allOf(v => v > 0)", true);
     Validate(env, "{1, 2, 3}.allOf(v => v > 1)", false);
     Validate(env, "{1, 2, 3}.allOf((v, index) => case when index < 2 then true else v > 1 end)", true);
     Validate(env, "{1, 2, 3}.allOf((v, index, size) => v > 1 or size >= 3)", true);
     Validate(env, "{1, 2, 3}.anyOf(v => v > 1)", true);
     Validate(env, "{1, 2, 3}.anyOf(v => v > 3)", false);
     Validate(env, "{1, 2, 3}.anyOf( (v, index) => case when index < 2 then false else v = 3 end)", true);
     Validate(env, "{1, 2, 3}.anyOf( (v, index, size) => v > 100 or size >= 3)", true);
     Validate(env, "{1, 2, 3}.average()", 2.0);
     Validate(env, "{1, 2, 3}.average(v => v+1)", 3d);
     Validate(env, "{1, 2, 3}.average((v, index) => v+10*index)", 12d);
     Validate(env, "{1, 2, 3}.average((v, index, size) => v+10*index + 100*size)", 312d);
     Validate(env, "{1, 2, 3}.countOf()", 3);
     Validate(env, "{1, 2, 3}.countOf(v => v < 2)", 1);
     Validate(env, "{1, 2, 3}.countOf( (v, index) => v > index)", 3);
     Validate(env, "{1, 2, 3}.countOf( (v, index, size) => v >= size)", 1);
     Validate(env, "{1, 2, 3}.except({1})", new object[] { 2, 3 });
     Validate(env, "{1, 2, 3}.intersect({2,3})", new object[] { 2, 3 });
     Validate(env, "{1, 2, 3}.firstOf()", 1);
     Validate(env, "{1, 2, 3}.firstOf(v => v / 2 = 1)", 2);
     Validate(env, "{1, 2, 3}.firstOf((v, index) => index = 1)", 2);
     Validate(env, "{1, 2, 3}.firstOf((v, index, size) => v = size-1)", 2);
     Validate(env, "{1, 2, 3}.intersect({2, 3})", new object[] { 2, 3 });
     Validate(env, "{1, 2, 3}.lastOf()", 3);
     Validate(env, "{1, 2, 3}.lastOf(v => v < 3)", 2);
     Validate(env, "{1, 2, 3}.lastOf((v, index) => index < 2 )", 2);
     Validate(env, "{1, 2, 3}.lastOf((v, index, size) => index < size - 2 )", 1);
     Validate(env, "{1, 2, 3, 2, 1}.leastFrequent()", 3);
     Validate(env, "{1, 2, 3, 2, 1}.leastFrequent(v => case when v = 3 then 4 else v end)", 4);
     Validate(env, "{1, 2, 3, 2, 1}.leastFrequent((v, index) => case when index = 2 then 4 else v end)", 4);
     Validate(env, "{1, 2, 3, 2, 1}.leastFrequent((v, index, size) => case when index = size - 2 then 4 else v end)", 2);
     Validate(env, "{1, 2, 3, 2, 1}.max()", 3);
     Validate(env, "{1, 2, 3, 2, 1}.max(v => case when v >= 3 then 0 else v end)", 2);
     Validate(env, "{1, 2, 3, 2, 1}.max((v, index) => case when index = 2 then 0 else v end)", 2);
     Validate(env, "{1, 2, 3, 2, 1}.max((v, index, size) => case when index > size - 4 then 0 else v end)", 2);
     Validate(env, "{1, 2, 3, 2, 1}.min()", 1);
     Validate(env, "{1, 2, 3, 2, 1}.min(v => v + 1)", 2);
     Validate(env, "{1, 2, 3, 2, 1}.min((v, index) => v - index)", -3);
     Validate(env, "{1, 2, 3, 2, 1}.min((v, index, size) => v - size)", -4);
     Validate(env, "{1, 2, 3, 2, 1, 2}.mostFrequent()", 2);
     Validate(env, "{1, 2, 3, 2, 1, 2}.mostFrequent(v => case when v = 2 then 10 else v end)", 10);
     Validate(env, "{1, 2, 3, 2, 1, 2}.mostFrequent((v, index) => case when index > 2 then 4 else v end)", 4);
     Validate(env, "{1, 2, 3, 2, 1, 2}.mostFrequent((v, index, size) => case when size > 3 then 0 else v end)", 0);
     Validate(env, "{2, 3, 2, 1}.orderBy()", new object[] { 1, 2, 2, 3 });
     Validate(env, "{2, 3, 2, 1}.orderBy(v => -v)", new object[] { 3, 2, 2, 1 });
     Validate(env, "{2, 3, 2, 1}.orderBy((v, index) => index)", new object[] { 2, 3, 2, 1 });
     Validate(env, "{2, 3, 2, 1}.orderBy((v, index, size) => case when index < size - 2 then v else -v end)", new object[] { 2, 1, 2, 3 });
     Validate(env, "{2, 3, 2, 1}.distinctOf()", new object[] { 2, 3, 1 });
     Validate(env, "{2, 3, 2, 1}.distinctOf(v => case when v > 1 then 0 else -1 end)", new object[] { 2, 1 });
     Validate(env, "{2, 3, 2, 1}.distinctOf((v, index) => case when index = 0 then 1 else 2 end)", new object[] { 2, 3 });
     Validate(env, "{2, 3, 2, 1}.distinctOf((v, index, size) => case when index+1=size then 1 else 2 end)", new object[] { 2, 1 });
     Validate(env, "{2, 3, 2, 1}.reverse()", new object[] { 1, 2, 3, 2 });
     Validate(env, "{1, 2, 3}.sequenceEqual({1})", false);
     Validate(env, "{1, 2, 3}.sequenceEqual({1, 2, 3})", true);
     Validate(env, "{1, 2, 3}.sumOf()", 6);
     Validate(env, "{1, 2, 3}.sumOf(v => v+1)", 9);
     Validate(env, "{1, 2, 3}.sumOf((v, index) => v + index)", 1 + 3 + 5);
     Validate(env, "{1, 2, 3}.sumOf((v, index, size) => v+index+size)", 18);
     Validate(env, "{1, 2, 3}.take(2)", new object[] { 1, 2 });
     Validate(env, "{1, 2, 3}.takeLast(2)", new object[] { 2, 3 });
     Validate(env, "{1, 2, 3}.takeWhile(v => v < 3)", new object[] { 1, 2 });
     Validate(env, "{1, 2, 3}.takeWhile((v,ind) => ind < 2)", new object[] { 1, 2 });
     Validate(env, "{1, 2, -1, 4, 5, 6}.takeWhile((v,ind) => ind < 5 and v > 0)", new object[] { 1, 2 });
     Validate(env, "{1, 2, -1, 4, 5, 6}.takeWhile((v,ind,sz) => ind < sz - 5 and v > 0)", new object[] { 1 });
     Validate(env, "{1, 2, 3}.takeWhileLast(v => v > 1)", new object[] { 2, 3 });
     Validate(env, "{1, 2, 3}.takeWhileLast((v,ind) => ind < 2)", new object[] { 2, 3 });
     Validate(env, "{1, 2, -1, 4, 5, 6}.takeWhileLast((v,ind) => ind < 5 and v > 0)", new object[] { 4, 5, 6 });
     Validate(env, "{1, 2, -1, 4, 5, 6}.takeWhileLast((v,ind,sz) => ind < sz-4 and v > 0)", new object[] { 5, 6 });
     Validate(env, "{1, 2, 3}.union({4, 5})", new object[] { 1, 2, 3, 4, 5 });
     Validate(env, "{1, 2, 3}.where(v => v != 2)", new object[] { 1, 3 });
     Validate(env, "{1, 2, 3}.where((v, index) => v != 2 and index < 2)", new object[] { 1 });
     Validate(env, "{1, 2, 3}.where((v, index, size) => v != 2 and index < size - 2)", new object[] { 1 });
     Validate(
         env,
         "{1, 2, 3}.groupby(k => 'K' || Convert.ToString(k))",
         CollectionUtil.BuildMap("K1", SingletonList(1), "K2", SingletonList(2), "K3", SingletonList(3)));
     Validate(
         env,
         "{1, 2, 3}.groupby(k => 'K' || Convert.ToString(k), v => 'V' || Convert.ToString(v))",
         CollectionUtil.BuildMap("K1", SingletonList("V1"), "K2", SingletonList("V2"), "K3", SingletonList("V3")));
     Validate(
         env,
         "{1, 2, 3}.groupby((k, i) => 'K' || Convert.ToString(k) || \"_\" || Convert.ToString(i), (v, i) => 'V' || Convert.ToString(v) || \"_\" || Convert.ToString(i))",
         CollectionUtil.BuildMap("K1_0", SingletonList("V1_0"), "K2_1", SingletonList("V2_1"), "K3_2", SingletonList("V3_2")));
     Validate(
         env,
         "{1, 2, 3}.groupby((k, i, s) => 'K' || Convert.ToString(k) || \"_\" || Convert.ToString(s), (v, i, s) => 'V' || Convert.ToString(v) || \"_\" || Convert.ToString(s))",
         CollectionUtil.BuildMap("K1_3", SingletonList("V1_3"), "K2_3", SingletonList("V2_3"), "K3_3", SingletonList("V3_3")));
     Validate(env, "{1, 2, 3, 2, 1}.maxby(v => v)", 3);
     Validate(env, "{1, 2, 3, 2, 1}.maxby((v, index) => case when index < 3 then -1 else 0 end)", 2);
     Validate(env, "{1, 2, 3, 2, 1}.maxby((v, index, size) => case when index < size - 2 then -1 else 0 end)", 2);
     Validate(env, "{1, 2, 3, 2, 1}.minby(v => v)", 1);
     Validate(env, "{1, 2, 3, 2, 1}.minby((v, index) => case when index < 3 then -1 else 0 end)", 1);
     Validate(env, "{1, 2, 3, 2, 1}.minby((v, index, size) => case when index < size - 2 then -1 else 0 end)", 1);
     Validate(env, "{'A','B','C'}.selectFrom(v => '<' || v || '>')", Arrays.AsList("<A>", "<B>", "<C>"));
     Validate(env, "{'A','B','C'}.selectFrom((v, index) => v || '_' || Convert.ToString(index))", Arrays.AsList("A_0", "B_1", "C_2"));
     Validate(env, "{'A','B','C'}.selectFrom((v, index, size) => v || '_' || Convert.ToString(size))", Arrays.AsList("A_3", "B_3", "C_3"));
     ValidateWithVerifier(
         env,
         "{1, 2, 3}.arrayOf()",
         result => EPAssertionUtil.AssertEqualsExactOrder(result.Unwrap <int>(), new int[] { 1, 2, 3 }));
     ValidateWithVerifier(
         env,
         "{1, 2, 3}.arrayOf(v => v+1)",
         result => EPAssertionUtil.AssertEqualsExactOrder(result.Unwrap <int>(), new int[] { 2, 3, 4 }));
     ValidateWithVerifier(
         env,
         "{1, 2, 3}.arrayOf((v, index) => v+index)",
         result => EPAssertionUtil.AssertEqualsExactOrder(result.Unwrap <int>(), new int[] { 1, 3, 5 }));
     ValidateWithVerifier(
         env,
         "{1, 2, 3}.arrayOf((v, index, size) => v+index+size)",
         result => EPAssertionUtil.AssertEqualsExactOrder(result.Unwrap <int>(), new int[] { 4, 6, 8 }));
     Validate(
         env,
         "{1, 2, 3}.toMap(k => 'K' || Convert.ToString(k), v => 'V' || Convert.ToString(v))",
         CollectionUtil.BuildMap("K1", "V1", "K2", "V2", "K3", "V3"));
     Validate(
         env,
         "{1, 2, 3}.toMap((k, i) => 'K' || Convert.ToString(k) || \"_\" || Convert.ToString(i), (v, i) => 'V' || Convert.ToString(v) || \"_\" || Convert.ToString(i))",
         CollectionUtil.BuildMap("K1_0", "V1_0", "K2_1", "V2_1", "K3_2", "V3_2"));
     Validate(
         env,
         "{1, 2, 3}.toMap((k, i, s) => 'K' || Convert.ToString(k) || \"_\" || Convert.ToString(s), (v, i, s) => 'V' || Convert.ToString(v) || \"_\" || Convert.ToString(s))",
         CollectionUtil.BuildMap("K1_3", "V1_3", "K2_3", "V2_3", "K3_3", "V3_3"));
 }
Exemplo n.º 10
0
        private void ValidateContextDetail(
            EPServicesContext servicesContext,
            StatementContext statementContext,
            ISet <string> eventTypesReferenced,
            ContextDetail contextDetail,
            AgentInstanceContext agentInstanceContext)
        {
            if (contextDetail is ContextDetailPartitioned)
            {
                var segmented = (ContextDetailPartitioned)contextDetail;
                foreach (var partition in segmented.Items)
                {
                    ValidateNotTable(servicesContext, partition.FilterSpecRaw.EventTypeName);
                    var raw      = new FilterStreamSpecRaw(partition.FilterSpecRaw, ViewSpec.EMPTY_VIEWSPEC_ARRAY, null, new StreamSpecOptions());
                    var compiled = raw.Compile(statementContext, eventTypesReferenced, false, Collections.GetEmptyList <int>(), false, true, false, null);
                    if (!(compiled is FilterStreamSpecCompiled))
                    {
                        throw new ExprValidationException("Partition criteria may not include named windows");
                    }
                    var result = (FilterStreamSpecCompiled)compiled;
                    partition.FilterSpecCompiled = result.FilterSpec;
                }
            }
            else if (contextDetail is ContextDetailCategory)
            {
                // compile filter
                var category = (ContextDetailCategory)contextDetail;
                ValidateNotTable(servicesContext, category.FilterSpecRaw.EventTypeName);
                var raw    = new FilterStreamSpecRaw(category.FilterSpecRaw, ViewSpec.EMPTY_VIEWSPEC_ARRAY, null, new StreamSpecOptions());
                var result = (FilterStreamSpecCompiled)raw.Compile(statementContext, eventTypesReferenced, false, Collections.GetEmptyList <int>(), false, true, false, null);
                category.FilterSpecCompiled = result.FilterSpec;
                servicesContext.StatementEventTypeRefService.AddReferences(statementContext.StatementName, CollectionUtil.ToArray(eventTypesReferenced));

                // compile expressions
                foreach (var item in category.Items)
                {
                    ValidateNotTable(servicesContext, category.FilterSpecRaw.EventTypeName);
                    var filterSpecRaw = new FilterSpecRaw(category.FilterSpecRaw.EventTypeName, Collections.SingletonList(item.Expression), null);
                    var rawExpr       = new FilterStreamSpecRaw(filterSpecRaw, ViewSpec.EMPTY_VIEWSPEC_ARRAY, null, new StreamSpecOptions());
                    var compiled      = (FilterStreamSpecCompiled)rawExpr.Compile(statementContext, eventTypesReferenced, false, Collections.GetEmptyList <int>(), false, true, false, null);
                    item.SetCompiledFilter(compiled.FilterSpec, agentInstanceContext);
                }
            }
            else if (contextDetail is ContextDetailHash)
            {
                var hashed = (ContextDetailHash)contextDetail;
                foreach (var hashItem in hashed.Items)
                {
                    var raw = new FilterStreamSpecRaw(hashItem.FilterSpecRaw, ViewSpec.EMPTY_VIEWSPEC_ARRAY, null, new StreamSpecOptions());
                    ValidateNotTable(servicesContext, hashItem.FilterSpecRaw.EventTypeName);
                    var result = (FilterStreamSpecCompiled)raw.Compile(statementContext, eventTypesReferenced, false, Collections.GetEmptyList <int>(), false, true, false, null);
                    hashItem.FilterSpecCompiled = result.FilterSpec;

                    // validate parameters
                    var streamTypes       = new StreamTypeServiceImpl(result.FilterSpec.FilterForEventType, null, true, statementContext.EngineURI);
                    var validationContext = new ExprValidationContext(
                        streamTypes,
                        statementContext.EngineImportService,
                        statementContext.StatementExtensionServicesContext, null,
                        statementContext.SchedulingService,
                        statementContext.VariableService, statementContext.TableService,
                        GetDefaultAgentInstanceContext(statementContext), statementContext.EventAdapterService,
                        statementContext.StatementName, statementContext.StatementId, statementContext.Annotations,
                        statementContext.ContextDescriptor, statementContext.ScriptingService, false, false, false, false,
                        null, false);
                    ExprNodeUtility.Validate(ExprNodeOrigin.CONTEXT, Collections.SingletonList(hashItem.Function), validationContext);
                }
            }
            else if (contextDetail is ContextDetailInitiatedTerminated)
            {
                var def            = (ContextDetailInitiatedTerminated)contextDetail;
                var startCondition = ValidateRewriteContextCondition(servicesContext, statementContext, def.Start, eventTypesReferenced, new MatchEventSpec(), new LinkedHashSet <string>());
                var endCondition   = ValidateRewriteContextCondition(servicesContext, statementContext, def.End, eventTypesReferenced, startCondition.Matches, startCondition.AllTags);
                def.Start = startCondition.Condition;
                def.End   = endCondition.Condition;

                if (def.DistinctExpressions != null)
                {
                    if (!(startCondition.Condition is ContextDetailConditionFilter))
                    {
                        throw new ExprValidationException("Distinct-expressions require a stream as the initiated-by condition");
                    }
                    var distinctExpressions = def.DistinctExpressions;
                    if (distinctExpressions.Length == 0)
                    {
                        throw new ExprValidationException("Distinct-expressions have not been provided");
                    }
                    var filter = (ContextDetailConditionFilter)startCondition.Condition;
                    if (filter.OptionalFilterAsName == null)
                    {
                        throw new ExprValidationException("Distinct-expressions require that a stream name is assigned to the stream using 'as'");
                    }
                    var types             = new StreamTypeServiceImpl(filter.FilterSpecCompiled.FilterForEventType, filter.OptionalFilterAsName, true, servicesContext.EngineURI);
                    var validationContext = new ExprValidationContext(
                        types,
                        statementContext.EngineImportService,
                        statementContext.StatementExtensionServicesContext, null,
                        statementContext.SchedulingService,
                        statementContext.VariableService, statementContext.TableService,
                        GetDefaultAgentInstanceContext(statementContext), statementContext.EventAdapterService,
                        statementContext.StatementName, statementContext.StatementId, statementContext.Annotations,
                        statementContext.ContextDescriptor, statementContext.ScriptingService, false, false, true, false,
                        null, false);
                    for (var i = 0; i < distinctExpressions.Length; i++)
                    {
                        ExprNodeUtility.ValidatePlainExpression(ExprNodeOrigin.CONTEXTDISTINCT, ExprNodeUtility.ToExpressionStringMinPrecedenceSafe(distinctExpressions[i]), distinctExpressions[i]);
                        distinctExpressions[i] = ExprNodeUtility.GetValidatedSubtree(ExprNodeOrigin.CONTEXTDISTINCT, distinctExpressions[i], validationContext);
                    }
                }
            }
            else if (contextDetail is ContextDetailNested)
            {
                var nested = (ContextDetailNested)contextDetail;
                foreach (var nestedContext in nested.Contexts)
                {
                    ValidateContextDetail(servicesContext, statementContext, eventTypesReferenced, nestedContext.ContextDetail, agentInstanceContext);
                }
            }
            else
            {
                throw new IllegalStateException("Unrecognized context detail " + contextDetail);
            }
        }
Exemplo n.º 11
0
            public void Run(RegressionEnvironment env)
            {
                var path = new RegressionPath();
                var epl  =
                    "create schema Lvl1(Id string);\n" +
                    "@public @buseventtype create schema Lvl0(Lvl1 Lvl1[], IndexNumber int, Lvl0Id string);\n" +
                    "@Name('s0') select Lvl1[IndexNumber].Id as c0, me.Lvl1[IndexNumber].Id as c1 from Lvl0 as me";

                env.CompileDeploy(epl, path).AddListener("s0");
                var fields = "c0,c1".SplitCsv();
                var @out   = env.Statement("s0").EventType;

                foreach (var field in fields)
                {
                    Assert.AreEqual(typeof(string), @out.GetPropertyType(field));
                }

                var Lvl1One = CollectionUtil.BuildMap("Id", "a");
                var Lvl1Two = CollectionUtil.BuildMap("Id", "b");
                var Lvl0    = CollectionUtil.BuildMap("Lvl1", new[] { Lvl1One, Lvl1Two }, "IndexNumber", 1);

                env.SendEventMap(Lvl0, "Lvl0");
                AssertProps(env.Listener("s0").AssertOneGetNewAndReset(), fields, "b", "b");

                // Invalid tests
                // array value but no array provided
                TryInvalidCompile(
                    env,
                    path,
                    "select Lvl1.Id from Lvl0",
                    "Failed to validate select-clause expression 'Lvl1.Id': Failed to resolve property 'Lvl1.Id' (property 'Lvl1' is an indexed property and requires an index or enumeration method to access values)");
                TryInvalidCompile(
                    env,
                    path,
                    "select me.Lvl1.Id from Lvl0 as me",
                    "Failed to validate select-clause expression 'me.Lvl1.Id': Property named 'Lvl1.Id' is not valid in stream 'me' (did you mean 'Lvl0Id'?)");

                // not an index expression
                TryInvalidCompile(
                    env,
                    path,
                    "select Lvl1.Id[IndexNumber] from Lvl0",
                    "Failed to validate select-clause expression 'Lvl1.Id[IndexNumber]': Could not find event property or method named 'Id' in collection of events of type 'Lvl1'");
                TryInvalidCompile(
                    env,
                    path,
                    "select me.Lvl1.Id[IndexNumber] from Lvl0 as me",
                    "Failed to validate select-clause expression 'me.Lvl1.Id[IndexNumber]': Could not find event property or method named 'Id' in collection of events of type 'Lvl1'");

                // two index expressions
                TryInvalidCompile(
                    env,
                    path,
                    "select Lvl1[IndexNumber, IndexNumber].Id from Lvl0",
                    "Failed to validate select-clause expression 'Lvl1[IndexNumber,IndexNumber].Id': Incorrect number of index expressions for array operation, expected a single expression returning an integer value but received 2 expressions for property 'Lvl1'");
                TryInvalidCompile(
                    env,
                    path,
                    "select me.Lvl1[IndexNumber, IndexNumber].Id from Lvl0 as me",
                    "Failed to validate select-clause expression 'me.Lvl1[IndexNumber,IndexNumber].Id': Incorrect number of index expressions for array operation, expected a single expression returning an integer value but received 2 expressions for property 'Lvl1'");

                // double-array
                TryInvalidCompile(
                    env,
                    path,
                    "select Lvl1[IndexNumber][IndexNumber].Id from Lvl0",
                    "Failed to validate select-clause expression 'Lvl1[IndexNumber][IndexNumber].Id': Could not perform array operation on type event type 'Lvl1'");
                TryInvalidCompile(
                    env,
                    path,
                    "select me.Lvl1[IndexNumber][IndexNumber].Id from Lvl0 as me",
                    "Failed to validate select-clause expression 'me.Lvl1[IndexNumber][IndexNumber].Id': Could not perform array operation on type event type 'Lvl1'");

                // wrong index expression type
                TryInvalidCompile(
                    env,
                    path,
                    "select Lvl1[Lvl0Id].Id from Lvl0",
                    "Failed to validate select-clause expression 'Lvl1[Lvl0Id].Id': Incorrect index expression for array operation, expected an expression returning an integer value but the expression 'Lvl0Id' returns 'System.String' for property 'Lvl1'");
                TryInvalidCompile(
                    env,
                    path,
                    "select me.Lvl1[Lvl0Id].Id from Lvl0 as me",
                    "Failed to validate select-clause expression 'me.Lvl1[Lvl0Id].Id': Incorrect index expression for array operation, expected an expression returning an integer value but the expression 'Lvl0Id' returns 'System.String' for property 'Lvl1'");

                env.UndeployAll();
            }
Exemplo n.º 12
0
        public override EPStatementStartResult StartInternal(
            EPServicesContext services,
            StatementContext statementContext,
            bool isNewStatement,
            bool isRecoveringStatement,
            bool isRecoveringResilient)
        {
            if (_statementSpec.OptionalContextName != null)
            {
                throw new ExprValidationException("A create-context statement cannot itself be associated to a context, please declare a nested context instead");
            }
            var context = _statementSpec.ContextDesc;
            var agentInstanceContext = GetDefaultAgentInstanceContext(statementContext);

            // compile filter specs, if any
            ISet <string> eventTypesReferenced = new HashSet <string>();

            ValidateContextDetail(services, statementContext, eventTypesReferenced, context.ContextDetail, agentInstanceContext);
            services.StatementEventTypeRefService.AddReferences(statementContext.StatementName, CollectionUtil.ToArray(eventTypesReferenced));

            // define output event type
            var typeName = "EventType_Context_" + context.ContextName;
            var statementResultEventType = services.EventAdapterService.CreateAnonymousMapType(typeName, Collections.GetEmptyMap <string, object>(), true);

            // add context - does not activate that context
            services.ContextManagementService.AddContextSpec(services, agentInstanceContext, context, isRecoveringResilient, statementResultEventType);

            EPStatementStopMethod stopMethod = new ProxyEPStatementStopMethod(() =>
            {
                // no action
            });

            EPStatementDestroyMethod destroyMethod = new ProxyEPStatementDestroyMethod(() => services.ContextManagementService.DestroyedContext(context.ContextName));

            return(new EPStatementStartResult(new ZeroDepthStreamNoIterate(statementResultEventType), stopMethod, destroyMethod));
        }
Exemplo n.º 13
0
        private bool Phase(Agent agent)
        {
            if (PhaseFrame.ContainsKey(agent.Unit.Tag) && Tyr.Bot.Frame - PhaseFrame[agent.Unit.Tag] <= 224)
            {
                return(false);
            }

            foreach (Unit unit in Tyr.Bot.Enemies())
            {
                if (UnitTypes.BuildingTypes.Contains(unit.UnitType))
                {
                    continue;
                }

                if (agent.Unit.UnitType == UnitTypes.ZERGLING ||
                    agent.Unit.UnitType == UnitTypes.BROODLING ||
                    agent.Unit.UnitType == UnitTypes.EGG ||
                    agent.Unit.UnitType == UnitTypes.LARVA)
                {
                    continue;
                }

                if (unit.IsFlying)
                {
                    continue;
                }

                if (SC2Util.DistanceSq(unit.Pos, agent.Unit.Pos) > 10 * 10)
                {
                    continue;
                }

                int  count     = 0;
                bool closeAlly = false;
                foreach (Agent ally in Tyr.Bot.UnitManager.Agents.Values)
                {
                    if (ally.Unit.IsFlying)
                    {
                        continue;
                    }
                    if (ally.DistanceSq(unit) <= 2 * 2)
                    {
                        closeAlly = true;
                        break;
                    }
                }
                if (closeAlly)
                {
                    break;
                }
                foreach (Unit unit2 in Tyr.Bot.Enemies())
                {
                    if (UnitTypes.BuildingTypes.Contains(unit.UnitType))
                    {
                        continue;
                    }

                    if (unit.UnitType == UnitTypes.ZERGLING ||
                        agent.Unit.UnitType == UnitTypes.BROODLING ||
                        agent.Unit.UnitType == UnitTypes.EGG ||
                        agent.Unit.UnitType == UnitTypes.LARVA)
                    {
                        continue;
                    }

                    if (unit.IsFlying)
                    {
                        continue;
                    }

                    if (SC2Util.DistanceSq(unit.Pos, unit2.Pos) <= 3 * 3)
                    {
                        count++;
                    }
                }
                if (count >= 6)
                {
                    agent.Order(2346, SC2Util.To2D(unit.Pos));
                    CollectionUtil.Add(PhaseFrame, agent.Unit.Tag, Tyr.Bot.Frame);
                    return(true);
                }
            }
            return(false);
        }
Exemplo n.º 14
0
 public virtual string ToString(string?format, IFormatProvider?formatProvider)
 => CollectionUtil.ToString(formatProvider, format, this);
Exemplo n.º 15
0
 //期待通りのエラーメッセージが出ていることを確認するために必要
 public string GetDataAt(int index)
 {
     return(CollectionUtil.GetItemFromLinkedList(_items, index).Data);
 }
Exemplo n.º 16
0
        public void UpdateMergeTests()
        {
            ConnectorAttribute     expected, actual;
            Configuration          config  = new MockConfiguration(false);
            ConnectorFacadeFactory factory = ConnectorFacadeFactory.GetInstance();
            SafeType <Connector>   clazz   = SafeType <Connector> .Get <MockUpdateConnector>();

            // **test only**
            APIConfiguration impl = TestHelpers.CreateTestConfiguration(clazz, config);

            impl.SetTimeout(SafeType <APIOperation> .Get <GetApiOp>(), APIConstants.NO_TIMEOUT);
            impl.SetTimeout(SafeType <APIOperation> .Get <UpdateApiOp>(), APIConstants.NO_TIMEOUT);
            impl.SetTimeout(SafeType <APIOperation> .Get <SearchApiOp>(), APIConstants.NO_TIMEOUT);
            ConnectorFacade facade = factory.NewInstance(impl);
            // sniff test to make sure we can get an object..
            ConnectorObject obj = facade.GetObject(ObjectClass.ACCOUNT, NewUid(1), null);

            Assert.AreEqual(NewUid(1), obj.Uid);
            // ok lets add an attribute that doesn't exist..
            String ADDED     = "somthing to add to the object";
            String ATTR_NAME = "added";
            ICollection <ConnectorAttribute> addAttrSet;

            addAttrSet = CollectionUtil.NewSet((IEnumerable <ConnectorAttribute>)obj.GetAttributes());
            addAttrSet.Add(ConnectorAttributeBuilder.Build(ATTR_NAME, ADDED));
            Name name = obj.Name;

            addAttrSet.Remove(name);
            Uid uid = facade.AddAttributeValues(ObjectClass.ACCOUNT, obj.Uid, ConnectorAttributeUtil.FilterUid(addAttrSet), null);

            // get back the object and see if there are the same..
            addAttrSet.Add(name);
            ConnectorObject addO = new ConnectorObject(ObjectClass.ACCOUNT, addAttrSet);

            obj = facade.GetObject(ObjectClass.ACCOUNT, NewUid(1), null);
            Assert.AreEqual(addO, obj);
            // attempt to add on to an existing attribute..
            addAttrSet.Remove(name);
            uid = facade.AddAttributeValues(ObjectClass.ACCOUNT, obj.Uid, ConnectorAttributeUtil.FilterUid(addAttrSet), null);
            // get the object back out and check on it..
            obj      = facade.GetObject(ObjectClass.ACCOUNT, uid, null);
            expected = ConnectorAttributeBuilder.Build(ATTR_NAME, ADDED, ADDED);
            actual   = obj.GetAttributeByName(ATTR_NAME);
            Assert.AreEqual(expected, actual);
            // attempt to delete a value from an attribute..
            ICollection <ConnectorAttribute> deleteAttrs = CollectionUtil.NewSet((IEnumerable <ConnectorAttribute>)addO.GetAttributes());

            deleteAttrs.Remove(name);
            uid      = facade.RemoveAttributeValues(ObjectClass.ACCOUNT, addO.Uid, ConnectorAttributeUtil.FilterUid(deleteAttrs), null);
            obj      = facade.GetObject(ObjectClass.ACCOUNT, uid, null);
            expected = ConnectorAttributeBuilder.Build(ATTR_NAME, ADDED);
            actual   = obj.GetAttributeByName(ATTR_NAME);
            Assert.AreEqual(expected, actual);
            // attempt to delete an attribute that doesn't exist..
            ICollection <ConnectorAttribute> nonExist = new HashSet <ConnectorAttribute>();

            nonExist.Add(NewUid(1));
            nonExist.Add(ConnectorAttributeBuilder.Build("does not exist", "asdfe"));
            uid = facade.RemoveAttributeValues(ObjectClass.ACCOUNT, addO.Uid, ConnectorAttributeUtil.FilterUid(nonExist), null);
            obj = facade.GetObject(ObjectClass.ACCOUNT, NewUid(1), null);
            Assert.IsTrue(obj.GetAttributeByName("does not exist") == null);
        }
 /// <summary>
 /// Sort by score the list of <see cref="WeightedFragInfo"/>
 /// </summary>
 public override IList <WeightedFragInfo> GetWeightedFragInfoList(IList <WeightedFragInfo> src)
 {
     CollectionUtil.TimSort(src, new ScoreComparer());
     return(src);
 }
Exemplo n.º 18
0
 public void UnionWith(IDictionary <string, int> indexes)
 {
     CollectionUtil.UnionDictionaries(this, indexes);
 }
Exemplo n.º 19
0
        public bool Fungal(Agent agent)
        {
            if (agent.Unit.Energy < 75)
            {
                return(false);
            }

            if (FungalTargets.ContainsKey(agent.Unit.Tag) && Tyr.Bot.Frame - FungalTargets[agent.Unit.Tag].Frame <= 22)
            {
                return(true);
            }

            foreach (UnitLocation mine in Tyr.Bot.EnemyMineManager.Mines)
            {
                bool closeFungal = false;
                foreach (FungalTarget fungal in FungalTargets.Values)
                {
                    if (SC2Util.DistanceSq(mine.Pos, fungal.Pos) <= 3 * 3)
                    {
                        closeFungal = true;
                        break;
                    }
                }
                if (closeFungal)
                {
                    continue;
                }
                if (SC2Util.DistanceSq(mine.Pos, agent.Unit.Pos) <= 10 * 10)
                {
                    CollectionUtil.Add(FungalTargets, agent.Unit.Tag, new FungalTarget()
                    {
                        Pos = SC2Util.To2D(mine.Pos), Frame = Tyr.Bot.Frame, InfestorTag = agent.Unit.Tag
                    });
                    agent.Order(74, SC2Util.To2D(mine.Pos));
                    return(true);
                }
            }

            foreach (Unit unit in Tyr.Bot.Enemies())
            {
                if (UnitTypes.BuildingTypes.Contains(unit.UnitType))
                {
                    continue;
                }

                if (unit.UnitType == UnitTypes.ZERGLING ||
                    unit.UnitType == UnitTypes.BROODLING ||
                    unit.UnitType == UnitTypes.OVERLORD)
                {
                    continue;
                }

                bool closeFungal = false;
                foreach (FungalTarget fungal in FungalTargets.Values)
                {
                    if (SC2Util.DistanceSq(unit.Pos, fungal.Pos) <= 4 * 4)
                    {
                        closeFungal = true;
                        break;
                    }
                }
                if (closeFungal)
                {
                    continue;
                }

                if (unit.UnitType == UnitTypes.BANSHEE || unit.UnitType == UnitTypes.TEMPEST || (unit.UnitType == UnitTypes.BATTLECRUISER && unit.Health < 200))
                {
                    CollectionUtil.Add(FungalTargets, agent.Unit.Tag, new FungalTarget()
                    {
                        Pos = SC2Util.To2D(unit.Pos), Frame = Tyr.Bot.Frame, InfestorTag = agent.Unit.Tag
                    });
                    agent.Order(74, SC2Util.To2D(unit.Pos));
                    return(true);
                }

                if (SC2Util.DistanceSq(unit.Pos, agent.Unit.Pos) <= 10 * 10)
                {
                    int count = 0;
                    foreach (Unit unit2 in Tyr.Bot.Enemies())
                    {
                        if (UnitTypes.BuildingTypes.Contains(unit.UnitType))
                        {
                            continue;
                        }

                        if (unit.UnitType == UnitTypes.ZERGLING ||
                            unit.UnitType == UnitTypes.BROODLING ||
                            unit.UnitType == UnitTypes.OVERLORD)
                        {
                            continue;
                        }

                        if (SC2Util.DistanceSq(unit.Pos, unit2.Pos) <= 3 * 3)
                        {
                            count++;
                        }
                    }
                    if (count >= 6)
                    {
                        CollectionUtil.Add(FungalTargets, agent.Unit.Tag, new FungalTarget()
                        {
                            Pos = SC2Util.To2D(unit.Pos), Frame = Tyr.Bot.Frame, InfestorTag = agent.Unit.Tag
                        });
                        agent.Order(74, SC2Util.To2D(unit.Pos));
                        return(true);
                    }
                }
            }
            return(false);
        }
Exemplo n.º 20
0
        public void OnFrame(Tyr tyr)
        {
            Counts          = new Dictionary <uint, int>();
            CompletedCounts = new Dictionary <uint, int>();
            HashSet <ulong> existingUnits = new HashSet <ulong>();

            foreach (Base b in tyr.BaseManager.Bases)
            {
                b.BuildingCounts     = new Dictionary <uint, int>();
                b.BuildingsCompleted = new Dictionary <uint, int>();
            }
            ActiveOrders = new HashSet <uint>();

            FoodExpected = 0;
            // Update our unit set.
            foreach (Unit unit in tyr.Observation.Observation.RawData.Units)
            {
                if (unit.Owner == tyr.PlayerId)
                {
                    // Count how many of each unitType we have.
                    CollectionUtil.Increment(Counts, unit.UnitType);
                    if (UnitTypes.EquivalentTypes.ContainsKey(unit.UnitType))
                    {
                        foreach (uint t in UnitTypes.EquivalentTypes[unit.UnitType])
                        {
                            CollectionUtil.Increment(Counts, t);
                        }
                    }
                    if (unit.BuildProgress >= 0.9999f)
                    {
                        CollectionUtil.Increment(CompletedCounts, unit.UnitType);
                        if (UnitTypes.EquivalentTypes.ContainsKey(unit.UnitType))
                        {
                            foreach (uint t in UnitTypes.EquivalentTypes[unit.UnitType])
                            {
                                CollectionUtil.Increment(CompletedCounts, t);
                            }
                        }
                    }

                    if (unit.Orders != null && unit.Orders.Count > 0 && Abilities.Creates.ContainsKey(unit.Orders[0].AbilityId))
                    {
                        CollectionUtil.Increment(Counts, Abilities.Creates[unit.Orders[0].AbilityId]);
                    }

                    if (unit.BuildProgress < 1 && unit.UnitType == UnitTypes.PYLON)
                    {
                        FoodExpected += 8;
                    }
                    if (unit.Orders != null && unit.Orders.Count > 0 && unit.Orders[0].AbilityId == 1344)
                    {
                        FoodExpected += 8;
                    }
                    if (unit.Orders != null && unit.Orders.Count > 0 && unit.Orders[0].AbilityId == 1216)
                    {
                        CollectionUtil.Increment(Counts, UnitTypes.LAIR);
                    }

                    if (unit.UnitType == UnitTypes.EGG)
                    {
                        CollectionUtil.Increment(Counts, Abilities.Creates[unit.Orders[0].AbilityId]);
                    }


                    existingUnits.Add(unit.Tag);

                    if (Agents.ContainsKey(unit.Tag))
                    {
                        Agent agent = Agents[unit.Tag];
                        agent.Unit = unit;

                        if (unit.UnitType == UnitTypes.LARVA &&
                            agent.LastAbility >= 0 &&
                            Abilities.Creates.ContainsKey((uint)agent.LastAbility))
                        {
                            CollectionUtil.Increment(Counts, Abilities.Creates[(uint)agent.LastAbility]);
                        }
                        agent.Command = null;
                        if (agent.Base != null)
                        {
                            CollectionUtil.Increment(agent.Base.BuildingCounts, unit.UnitType);
                            if (unit.BuildProgress >= 0.9999f)
                            {
                                CollectionUtil.Increment(agent.Base.BuildingsCompleted, unit.UnitType);
                            }
                        }
                    }
                    else
                    {
                        if (DisappearedUnits.ContainsKey(unit.Tag))
                        {
                            Agents.Add(unit.Tag, DisappearedUnits[unit.Tag]);
                            DisappearedUnits[unit.Tag].Unit = unit;
                        }
                        else
                        {
                            Agent agent = new Agent(unit);
                            Agents.Add(unit.Tag, agent);
                            tyr.TaskManager.NewAgent(agent);
                        }
                    }

                    if (unit.Orders != null && unit.Orders.Count > 0)
                    {
                        ActiveOrders.Add(unit.Orders[0].AbilityId);
                    }
                }
            }

            foreach (BuildRequest request in ConstructionTask.Task.BuildRequests)
            {
                // Count how many of each unitType we intend to build.
                CollectionUtil.Increment(Counts, request.Type);
                if (request.Type == UnitTypes.PYLON)
                {
                    FoodExpected += 8;
                }
                if (request.Base != null)
                {
                    CollectionUtil.Increment(request.Base.BuildingCounts, request.Type);
                }

                if (request.worker.Unit.Orders == null ||
                    request.worker.Unit.Orders.Count == 0 ||
                    request.worker.Unit.Orders[0].AbilityId != BuildingType.LookUp[request.Type].Ability)
                {
                    tyr.ReservedMinerals += BuildingType.LookUp[request.Type].Minerals;
                    tyr.ReservedGas      += BuildingType.LookUp[request.Type].Gas;
                    string workerAbility = "";
                    if (request.worker.Unit.Orders != null &&
                        request.worker.Unit.Orders.Count > 0)
                    {
                        workerAbility = " Ability: " + request.worker.Unit.Orders[0].AbilityId;
                    }
                    tyr.DrawText("Reserving: " + BuildingType.LookUp[request.Type].Name + workerAbility);
                }
            }

            foreach (BuildRequest request in ConstructionTask.Task.UnassignedRequests)
            {
                // Count how many of each unitType we intend to build.
                CollectionUtil.Increment(Counts, request.Type);
                FoodExpected += 8;
                if (request.Base != null)
                {
                    CollectionUtil.Increment(request.Base.BuildingCounts, request.Type);
                }

                tyr.ReservedMinerals += BuildingType.LookUp[request.Type].Minerals;
                tyr.ReservedGas      += BuildingType.LookUp[request.Type].Gas;
                tyr.DrawText("Reserving: " + BuildingType.LookUp[request.Type].Name);
            }

            // Remove dead units.
            if (tyr.Observation != null &&
                tyr.Observation.Observation != null &&
                tyr.Observation.Observation.RawData != null &&
                tyr.Observation.Observation.RawData.Event != null &&
                tyr.Observation.Observation.RawData.Event.DeadUnits != null)
            {
                foreach (ulong deadUnit in tyr.Observation.Observation.RawData.Event.DeadUnits)
                {
                    Agents.Remove(deadUnit);
                }
            }
        }
Exemplo n.º 21
0
        //public static void main( string[] args ) throws Exception {
        //  Analyzer analyzer = new WhitespaceAnalyzer(Version.LUCENE_CURRENT);
        //  QueryParser parser = new QueryParser(Version.LUCENE_CURRENT,  "f", analyzer );
        //  Query query = parser.parse( "a x:b" );
        //  FieldQuery fieldQuery = new FieldQuery( query, true, false );

        //  Directory dir = new RAMDirectory();
        //  IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(Version.LUCENE_CURRENT, analyzer));
        //  Document doc = new Document();
        //  IndexableFieldType ft = new IndexableFieldType(TextField.TYPE_STORED);
        //  ft.setStoreTermVectors(true);
        //  ft.setStoreTermVectorOffsets(true);
        //  ft.setStoreTermVectorPositions(true);
        //  doc.add( new Field( "f", ft, "a a a b b c a b b c d e f" ) );
        //  doc.add( new Field( "f", ft, "b a b a f" ) );
        //  writer.addDocument( doc );
        //  writer.close();

        //  IndexReader reader = IndexReader.open(dir1);
        //  new FieldTermStack( reader, 0, "f", fieldQuery );
        //  reader.close();
        //}

        /// <summary>
        /// a constructor.
        /// </summary>
        /// <param name="reader"><see cref="IndexReader"/> of the index</param>
        /// <param name="docId">document id to be highlighted</param>
        /// <param name="fieldName">field of the document to be highlighted</param>
        /// <param name="fieldQuery"><see cref="FieldQuery"/> object</param>
        /// <exception cref="IOException">If there is a low-level I/O error</exception>
        public FieldTermStack(IndexReader reader, int docId, string fieldName, FieldQuery fieldQuery)
        {
            this.fieldName = fieldName;

            ISet <string> termSet = fieldQuery.GetTermSet(fieldName);

            // just return to make null snippet if un-matched fieldName specified when fieldMatch == true
            if (termSet == null)
            {
                return;
            }

            Fields vectors = reader.GetTermVectors(docId);

            if (vectors == null)
            {
                // null snippet
                return;
            }

            Terms vector = vectors.GetTerms(fieldName);

            if (vector == null)
            {
                // null snippet
                return;
            }

            CharsRef             spare     = new CharsRef();
            TermsEnum            termsEnum = vector.GetEnumerator();
            DocsAndPositionsEnum dpEnum    = null;
            BytesRef             text;

            int numDocs = reader.MaxDoc;

            while (termsEnum.MoveNext())
            {
                text = termsEnum.Term;
                UnicodeUtil.UTF8toUTF16(text, spare);
                string term = spare.ToString();
                if (!termSet.Contains(term))
                {
                    continue;
                }
                dpEnum = termsEnum.DocsAndPositions(null, dpEnum);
                if (dpEnum == null)
                {
                    // null snippet
                    return;
                }

                dpEnum.NextDoc();

                // For weight look here: http://lucene.apache.org/core/3_6_0/api/core/org/apache/lucene/search/DefaultSimilarity.html
                float weight = (float)(Math.Log(numDocs / (double)(reader.DocFreq(new Term(fieldName, text)) + 1)) + 1.0);

                int freq = dpEnum.Freq;

                for (int i = 0; i < freq; i++)
                {
                    int pos = dpEnum.NextPosition();
                    if (dpEnum.StartOffset < 0)
                    {
                        return; // no offsets, null snippet
                    }
                    termList.Add(new TermInfo(term, dpEnum.StartOffset, dpEnum.EndOffset, pos, weight));
                }
            }

            // sort by position
            CollectionUtil.TimSort(termList);

            // now look for dups at the same position, linking them together
            int      currentPos = -1;
            TermInfo previous   = null;
            TermInfo first      = null;

            for (int i = 0; i < termList.Count;)
            {
                TermInfo current = termList[i];
                if (current.Position == currentPos)
                {
                    if (Debugging.AssertsEnabled)
                    {
                        Debugging.Assert(previous != null);
                    }
                    previous.SetNext(current);
                    previous = current;
                    //iterator.Remove();

                    // LUCENENET NOTE: Remove, but don't advance the i position (since removing will advance to the next item)
                    termList.RemoveAt(i);
                }
                else
                {
                    if (previous != null)
                    {
                        previous.SetNext(first);
                    }
                    previous   = first = current;
                    currentPos = current.Position;

                    // LUCENENET NOTE: Only increment the position if we don't do a delete.
                    i++;
                }
            }

            if (previous != null)
            {
                previous.SetNext(first);
            }
        }
 protected internal virtual void IndexInstruction(IValidatingMigrationInstruction instruction)
 {
     CollectionUtil.AddToMapOfLists(InstructionsBySourceScope, instruction.SourceActivity, instruction);
     CollectionUtil.AddToMapOfLists(InstructionsByTargetScope, instruction.TargetActivity, instruction);
 }
        /// <summary>
        /// Called whenever the running merges have changed, to pause &amp; unpause
        /// threads. This method sorts the merge threads by their merge size in
        /// descending order and then pauses/unpauses threads from first to last --
        /// that way, smaller merges are guaranteed to run before larger ones.
        /// </summary>
        protected virtual void UpdateMergeThreads()
        {
            lock (this)
            {
                // Only look at threads that are alive & not in the
                // process of stopping (ie have an active merge):
                IList <MergeThread> activeMerges = new List <MergeThread>();

                int threadIdx = 0;
                while (threadIdx < m_mergeThreads.Count)
                {
                    MergeThread mergeThread = m_mergeThreads[threadIdx];
                    if (!mergeThread.IsAlive)
                    {
                        // Prune any dead threads
                        m_mergeThreads.RemoveAt(threadIdx);
                        continue;
                    }
                    if (mergeThread.CurrentMerge != null)
                    {
                        activeMerges.Add(mergeThread);
                    }
                    threadIdx++;
                }

                // Sort the merge threads in descending order.
                CollectionUtil.TimSort(activeMerges, compareByMergeDocCount);

                int pri = mergeThreadPriority;
                int activeMergeCount = activeMerges.Count;
                for (threadIdx = 0; threadIdx < activeMergeCount; threadIdx++)
                {
                    MergeThread          mergeThread = activeMerges[threadIdx];
                    MergePolicy.OneMerge merge       = mergeThread.CurrentMerge;
                    if (merge == null)
                    {
                        continue;
                    }

                    // pause the thread if maxThreadCount is smaller than the number of merge threads.
                    bool doPause = threadIdx < activeMergeCount - maxThreadCount;

                    if (IsVerbose)
                    {
                        if (doPause != merge.IsPaused)
                        {
                            if (doPause)
                            {
                                Message("pause thread " + mergeThread.Name);
                            }
                            else
                            {
                                Message("unpause thread " + mergeThread.Name);
                            }
                        }
                    }
                    if (doPause != merge.IsPaused)
                    {
                        merge.SetPause(doPause);
                    }

                    if (!doPause)
                    {
                        if (IsVerbose)
                        {
                            Message("set priority of merge thread " + mergeThread.Name + " to " + pri);
                        }
                        mergeThread.SetThreadPriority((ThreadPriority)pri);
                        pri = Math.Min((int)ThreadPriority.Highest, 1 + pri);
                    }
                }
            }
        }
Exemplo n.º 24
0
        /// <summary>
        ///
        /// </summary>
        /// <param name="processInstanceExecution"></param>
        protected internal virtual void HandleProcessInstanceExecution(IExecutionEntity processInstanceExecution)
        {
            IExecutionEntityManager executionEntityManager = commandContext.ExecutionEntityManager;

            string processInstanceId = processInstanceExecution.Id;

            // No parent execution == process instance id
            logger.LogDebug($"No parent execution found. Verifying if process instance {processInstanceId} can be stopped.");

            IExecutionEntity            superExecution             = processInstanceExecution.SuperExecution;
            ISubProcessActivityBehavior subProcessActivityBehavior = null;

            // copy variables before destroying the ended sub process instance (call activity)
            if (superExecution != null)
            {
                FlowNode superExecutionElement = (FlowNode)superExecution.CurrentFlowElement;
                subProcessActivityBehavior = (ISubProcessActivityBehavior)superExecutionElement.Behavior;
                try
                {
                    subProcessActivityBehavior.Completing(superExecution, processInstanceExecution);
                }
                //catch (Exception e)
                //{
                //    //logger.error("Error while completing sub process of execution {}", processInstanceExecution, e);
                //    throw e;
                //}
                catch (Exception e)
                {
                    logger.LogError($"Error while completing sub process of execution {processInstanceExecution}.Exception Message: {e.Message}");
                    throw new ActivitiException("Error while completing sub process of execution " + processInstanceExecution, e);
                }
            }

            int activeExecutions = GetNumberOfActiveChildExecutionsForProcessInstance(executionEntityManager, processInstanceId, superExecution);

            if (activeExecutions == 0)
            {
                logger.LogDebug($"No active executions found. Ending process instance {processInstanceId} ");

                // note the use of execution here vs processinstance execution for getting the flowelement
                executionEntityManager.DeleteProcessInstanceExecutionEntity(processInstanceId, execution.CurrentFlowElement?.Id, null, false, false);
            }
            else
            {
                logger.LogDebug($"Active executions found. Process instance {processInstanceId} will not be ended.");
            }

            Process process = ProcessDefinitionUtil.GetProcess(processInstanceExecution.ProcessDefinitionId);

            // Execute execution listeners for process end.
            if (CollectionUtil.IsNotEmpty(process.ExecutionListeners))
            {
                ExecuteExecutionListeners(process, processInstanceExecution, BaseExecutionListenerFields.EVENTNAME_END);
            }

            // and trigger execution afterwards if doing a call activity
            if (superExecution != null)
            {
                superExecution.SubProcessInstance = null;
                try
                {
                    subProcessActivityBehavior.Completed(superExecution);
                }
                //catch (Exception e)
                //{
                //    logger.error("Error while completing sub process of execution {}", processInstanceExecution, e);
                //    throw e;
                //}
                catch (Exception e)
                {
                    logger.LogError($"Error while completing sub process of execution {processInstanceExecution}. Exception Messgae: {e.Message}.");
                    throw new ActivitiException("Error while completing sub process of execution " + processInstanceExecution, e);
                }
            }
        }
Exemplo n.º 25
0
 public IEnumerable <AttributeInfo> GetAllowedAttributes(IPsiSourceFile sourceFile, int offset, bool strict = false)
 {
     return(CollectionUtil.EnumerateAll(OwnAttributes, InheritedAttributes,
                                        declaredElementsCache.GetAdditionalAttributesForTag(sourceFile, this, offset, strict)));
 }
Exemplo n.º 26
0
        /// <summary>
        /// Initialize the deleter: find all previous commits in
        /// the <see cref="Directory"/>, incref the files they reference, call
        /// the policy to let it delete commits.  this will remove
        /// any files not referenced by any of the commits. </summary>
        /// <exception cref="IOException"> if there is a low-level IO error </exception>
        public IndexFileDeleter(Directory directory, IndexDeletionPolicy policy, SegmentInfos segmentInfos, InfoStream infoStream, IndexWriter writer, bool initialIndexExists)
        {
            this.infoStream = infoStream;
            this.writer     = writer;

            string currentSegmentsFile = segmentInfos.GetSegmentsFileName();

            if (infoStream.IsEnabled("IFD"))
            {
                infoStream.Message("IFD", "init: current segments file is \"" + currentSegmentsFile + "\"; deletionPolicy=" + policy);
            }

            this.policy    = policy;
            this.directory = directory;

            // First pass: walk the files and initialize our ref
            // counts:
            long currentGen = segmentInfos.Generation;

            CommitPoint currentCommitPoint = null;

            string[] files = null;
            try
            {
                files = directory.ListAll();
            }
#pragma warning disable 168
            catch (DirectoryNotFoundException e)
#pragma warning restore 168
            {
                // it means the directory is empty, so ignore it.
                files = new string[0];
            }

            if (currentSegmentsFile != null)
            {
                Regex r = IndexFileNames.CODEC_FILE_PATTERN;
                foreach (string fileName in files)
                {
                    if (!fileName.EndsWith("write.lock", StringComparison.Ordinal) && !fileName.Equals(IndexFileNames.SEGMENTS_GEN, StringComparison.Ordinal) &&
                        (r.IsMatch(fileName) || fileName.StartsWith(IndexFileNames.SEGMENTS, StringComparison.Ordinal)))
                    {
                        // Add this file to refCounts with initial count 0:
                        GetRefCount(fileName);

                        if (fileName.StartsWith(IndexFileNames.SEGMENTS, StringComparison.Ordinal))
                        {
                            // this is a commit (segments or segments_N), and
                            // it's valid (<= the max gen).  Load it, then
                            // incref all files it refers to:
                            if (infoStream.IsEnabled("IFD"))
                            {
                                infoStream.Message("IFD", "init: load commit \"" + fileName + "\"");
                            }
                            SegmentInfos sis = new SegmentInfos();
                            try
                            {
                                sis.Read(directory, fileName);
                            }
#pragma warning disable 168
                            catch (FileNotFoundException e)
#pragma warning restore 168
                            {
                                // LUCENE-948: on NFS (and maybe others), if
                                // you have writers switching back and forth
                                // between machines, it's very likely that the
                                // dir listing will be stale and will claim a
                                // file segments_X exists when in fact it
                                // doesn't.  So, we catch this and handle it
                                // as if the file does not exist
                                if (infoStream.IsEnabled("IFD"))
                                {
                                    infoStream.Message("IFD", "init: hit FileNotFoundException when loading commit \"" + fileName + "\"; skipping this commit point");
                                }
                                sis = null;
                            }
                            // LUCENENET specific - .NET (thankfully) only has one FileNotFoundException, so we don't need this
                            //catch (NoSuchFileException)
                            //{
                            //    // LUCENE-948: on NFS (and maybe others), if
                            //    // you have writers switching back and forth
                            //    // between machines, it's very likely that the
                            //    // dir listing will be stale and will claim a
                            //    // file segments_X exists when in fact it
                            //    // doesn't.  So, we catch this and handle it
                            //    // as if the file does not exist
                            //    if (infoStream.IsEnabled("IFD"))
                            //    {
                            //        infoStream.Message("IFD", "init: hit FileNotFoundException when loading commit \"" + fileName + "\"; skipping this commit point");
                            //    }
                            //    sis = null;
                            //}
                            // LUCENENET specific - since NoSuchDirectoryException subclasses FileNotFoundException
                            // in Lucene, we need to catch it here to be on the safe side.
                            catch (System.IO.DirectoryNotFoundException)
                            {
                                // LUCENE-948: on NFS (and maybe others), if
                                // you have writers switching back and forth
                                // between machines, it's very likely that the
                                // dir listing will be stale and will claim a
                                // file segments_X exists when in fact it
                                // doesn't.  So, we catch this and handle it
                                // as if the file does not exist
                                if (infoStream.IsEnabled("IFD"))
                                {
                                    infoStream.Message("IFD", "init: hit FileNotFoundException when loading commit \"" + fileName + "\"; skipping this commit point");
                                }
                                sis = null;
                            }
                            catch (IOException /*e*/)
                            {
                                if (SegmentInfos.GenerationFromSegmentsFileName(fileName) <= currentGen && directory.FileLength(fileName) > 0)
                                {
                                    throw; // LUCENENET: CA2200: Rethrow to preserve stack details (https://docs.microsoft.com/en-us/visualstudio/code-quality/ca2200-rethrow-to-preserve-stack-details)
                                }
                                else
                                {
                                    // Most likely we are opening an index that
                                    // has an aborted "future" commit, so suppress
                                    // exc in this case
                                    sis = null;
                                }
                            }
                            if (sis != null)
                            {
                                CommitPoint commitPoint = new CommitPoint(commitsToDelete, directory, sis);
                                if (sis.Generation == segmentInfos.Generation)
                                {
                                    currentCommitPoint = commitPoint;
                                }
                                commits.Add(commitPoint);
                                IncRef(sis, true);

                                if (lastSegmentInfos == null || sis.Generation > lastSegmentInfos.Generation)
                                {
                                    lastSegmentInfos = sis;
                                }
                            }
                        }
                    }
                }
            }

            if (currentCommitPoint == null && currentSegmentsFile != null && initialIndexExists)
            {
                // We did not in fact see the segments_N file
                // corresponding to the segmentInfos that was passed
                // in.  Yet, it must exist, because our caller holds
                // the write lock.  this can happen when the directory
                // listing was stale (eg when index accessed via NFS
                // client with stale directory listing cache).  So we
                // try now to explicitly open this commit point:
                SegmentInfos sis = new SegmentInfos();
                try
                {
                    sis.Read(directory, currentSegmentsFile);
                }
                catch (IOException e)
                {
                    throw new CorruptIndexException("failed to locate current segments_N file \"" + currentSegmentsFile + "\"" + e.ToString(), e);
                }
                if (infoStream.IsEnabled("IFD"))
                {
                    infoStream.Message("IFD", "forced open of current segments file " + segmentInfos.GetSegmentsFileName());
                }
                currentCommitPoint = new CommitPoint(commitsToDelete, directory, sis);
                commits.Add(currentCommitPoint);
                IncRef(sis, true);
            }

            // We keep commits list in sorted order (oldest to newest):
            CollectionUtil.TimSort(commits);

            // Now delete anything with ref count at 0.  These are
            // presumably abandoned files eg due to crash of
            // IndexWriter.
            foreach (KeyValuePair <string, RefCount> entry in refCounts)
            {
                RefCount rc       = entry.Value;
                string   fileName = entry.Key;
                if (0 == rc.count)
                {
                    if (infoStream.IsEnabled("IFD"))
                    {
                        infoStream.Message("IFD", "init: removing unreferenced file \"" + fileName + "\"");
                    }
                    DeleteFile(fileName);
                }
            }

            // Finally, give policy a chance to remove things on
            // startup:
            this.policy.OnInit(commits);

            // Always protect the incoming segmentInfos since
            // sometime it may not be the most recent commit
            Checkpoint(segmentInfos, false);

            startingCommitDeleted = currentCommitPoint == null ? false : currentCommitPoint.IsDeleted;

            DeleteCommits();
        }
Exemplo n.º 27
0
        private ExprDotEvalParam GetBodyAndParameter(
            String enumMethodUsedName,
            int parameterNum,
            ExprNode parameterNode,
            EventType inputEventType,
            Type collectionComponentType,
            ExprValidationContext validationContext,
            IList <ExprDotEvalParam> priorParameters,
            DotMethodFP footprint)
        {
            // handle an expression that is a constant or other (not =>)
            if (!(parameterNode is ExprLambdaGoesNode))
            {
                // no node subtree validation is required here, the chain parameter validation has taken place in ExprDotNode.validate
                // validation of parameter types has taken place in footprint matching
                return(new ExprDotEvalParamExpr(parameterNum, parameterNode, parameterNode.ExprEvaluator));
            }

            var goesNode = (ExprLambdaGoesNode)parameterNode;

            // Get secondary
            var additionalTypes = GetAddStreamTypes(
                enumMethodUsedName, goesNode.GoesToNames, inputEventType, collectionComponentType, priorParameters, validationContext.EventAdapterService);
            var additionalStreamNames = goesNode.GoesToNames.ToArray();

            ValidateDuplicateStreamNames(validationContext.StreamTypeService.StreamNames, additionalStreamNames);

            // add name and type to list of known types
            var addTypes =
                (EventType[])
                CollectionUtil.ArrayExpandAddElements(
                    validationContext.StreamTypeService.EventTypes, additionalTypes);
            var addNames =
                (String[])
                CollectionUtil.ArrayExpandAddElements(
                    validationContext.StreamTypeService.StreamNames, additionalStreamNames);

            var types = new StreamTypeServiceImpl(
                addTypes, addNames, new bool[addTypes.Length], null, false);

            // validate expression body
            var filter = goesNode.ChildNodes[0];

            try
            {
                var filterValidationContext = new ExprValidationContext(types, validationContext);
                filter = ExprNodeUtility.GetValidatedSubtree(ExprNodeOrigin.DECLAREDEXPRBODY, filter, filterValidationContext);
            }
            catch (ExprValidationException ex)
            {
                throw new ExprValidationException(
                          "Error validating enumeration method '" + enumMethodUsedName + "' parameter " + parameterNum + ": " +
                          ex.Message, ex);
            }

            var filterEvaluator = filter.ExprEvaluator;
            var expectedType    = footprint.Parameters[parameterNum].ParamType;

            // Lambda-methods don't use a specific expected return-type, so passing null for type is fine.
            DotMethodUtil.ValidateSpecificType(
                enumMethodUsedName, DotMethodTypeEnum.ENUM, expectedType, null, filterEvaluator.ReturnType, parameterNum,
                filter);

            var numStreamsIncoming = validationContext.StreamTypeService.EventTypes.Length;

            return(new ExprDotEvalParamLambda(
                       parameterNum, filter, filterEvaluator,
                       numStreamsIncoming, goesNode.GoesToNames, additionalTypes));
        }
Exemplo n.º 28
0
        public static ExprNodeUtilMethodDesc ResolveMethodAllowWildcardAndStream(
            string className,
            Type optionalClass,
            string methodName,
            IList<ExprNode> parameters,
            bool allowWildcard,
            EventType wildcardType,
            ExprNodeUtilResolveExceptionHandler exceptionHandler,
            string functionName,
            StatementRawInfo statementRawInfo,
            StatementCompileTimeServices services)
        {
            var paramTypes = new Type[parameters.Count];
            var childForges = new ExprForge[parameters.Count];
            var count = 0;
            var allowEventBeanType = new bool[parameters.Count];
            var allowEventBeanCollType = new bool[parameters.Count];
            var childEvalsEventBeanReturnTypesForges = new ExprForge[parameters.Count];
            var allConstants = true;
            
            foreach (var childNode in parameters) {
                if (!EnumMethodResolver.IsEnumerationMethod(methodName, services.ImportServiceCompileTime) && childNode is ExprLambdaGoesNode) {
                    throw new ExprValidationException(
                        "Unrecognized lambda-expression encountered as parameter to UDF or static method '" +
                        methodName +
                        "'");
                }

                if (childNode is ExprWildcard) {
                    if (wildcardType == null || !allowWildcard) {
                        throw new ExprValidationException("Failed to resolve wildcard parameter to a given event type");
                    }

                    childForges[count] = new ExprEvalStreamNumUnd(0, wildcardType.UnderlyingType);
                    childEvalsEventBeanReturnTypesForges[count] = new ExprEvalStreamNumEvent(0);
                    paramTypes[count] = wildcardType.UnderlyingType;
                    allowEventBeanType[count] = true;
                    allConstants = false;
                    count++;
                    continue;
                }

                if (childNode is ExprStreamUnderlyingNode) {
                    var und = (ExprStreamUnderlyingNode) childNode;
                    var tableMetadata = services.TableCompileTimeResolver.ResolveTableFromEventType(und.EventType);
                    if (tableMetadata == null) {
                        childForges[count] = childNode.Forge;
                        childEvalsEventBeanReturnTypesForges[count] = new ExprEvalStreamNumEvent(und.StreamId);
                    }
                    else {
                        childForges[count] = new ExprEvalStreamTable(
                            und.StreamId,
                            und.EventType.UnderlyingType,
                            tableMetadata);
                        childEvalsEventBeanReturnTypesForges[count] =
                            new ExprEvalStreamNumEventTable(und.StreamId, tableMetadata);
                    }

                    paramTypes[count] = childForges[count].EvaluationType;
                    allowEventBeanType[count] = true;
                    allConstants = false;
                    count++;
                    continue;
                }

                if (childNode.Forge is ExprEnumerationForge) {
                    var enumeration = (ExprEnumerationForge) childNode.Forge;
                    var eventType = enumeration.GetEventTypeSingle(statementRawInfo, services);
                    childForges[count] = childNode.Forge;
                    paramTypes[count] = childForges[count].EvaluationType;
                    allConstants = false;
                    if (eventType != null) {
                        childEvalsEventBeanReturnTypesForges[count] = new ExprEvalStreamNumEnumSingleForge(enumeration);
                        allowEventBeanType[count] = true;
                        count++;
                        continue;
                    }

                    var eventTypeColl = enumeration.GetEventTypeCollection(statementRawInfo, services);
                    if (eventTypeColl != null) {
                        childEvalsEventBeanReturnTypesForges[count] = new ExprEvalStreamNumEnumCollForge(enumeration);
                        allowEventBeanCollType[count] = true;
                        count++;
                        continue;
                    }
                }

                paramTypes[count] = childNode.Forge.EvaluationType;
                childForges[count] = childNode.Forge;
                count++;
                if (!childNode.Forge.ForgeConstantType.IsCompileTimeConstant) {
                    allConstants = false;
                }
            }

            // Try to resolve the method
            MethodInfo method;
            try {
                if (optionalClass != null) {
                    method = services.ImportServiceCompileTime.ResolveMethod(
                        optionalClass,
                        methodName,
                        paramTypes,
                        allowEventBeanType);
                }
                else {
                    method = services.ImportServiceCompileTime.ResolveMethodOverloadChecked(
                        className,
                        methodName,
                        paramTypes,
                        allowEventBeanType,
                        allowEventBeanCollType,
                        services.ClassProvidedExtension);
                }
            }
            catch (Exception e) {
                throw exceptionHandler.Handle(e);
            }

            var parameterTypes = method.GetParameterTypes();
            
            // rewrite those evaluator that should return the event itself
            if (CollectionUtil.IsAnySet(allowEventBeanType)) {
                for (var i = 0; i < parameters.Count; i++) {
                    if (allowEventBeanType[i] && parameterTypes[i] == typeof(EventBean)) {
                        childForges[i] = childEvalsEventBeanReturnTypesForges[i];
                    }
                }
            }

            // rewrite those evaluators that should return the event collection
            if (CollectionUtil.IsAnySet(allowEventBeanCollType)) {
                for (var i = 0; i < parameters.Count; i++) {
                    if (allowEventBeanCollType[i] && (parameterTypes[i] == typeof(ICollection<EventBean>))) {
                        childForges[i] = childEvalsEventBeanReturnTypesForges[i];
                    }
                }
            }

            // add an evaluator if the method expects a context object
            if (!method.IsVarArgs() &&
                parameterTypes.Length > 0 &&
                parameterTypes[parameterTypes.Length - 1] == typeof(EPLMethodInvocationContext)) {
                var node = new ExprEvalMethodContext(functionName);
                childForges = (ExprForge[]) CollectionUtil.ArrayExpandAddSingle(childForges, node);
            }

            // handle varargs
            if (method.IsVarArgs()) {
                // handle context parameter
                var numMethodParams = parameterTypes.Length;
                if (numMethodParams > 1 && parameterTypes[numMethodParams - 2] == typeof(EPLMethodInvocationContext)) {
                    var rewrittenForges = new ExprForge[childForges.Length + 1];
                    Array.Copy(childForges, 0, rewrittenForges, 0, numMethodParams - 2);
                    rewrittenForges[numMethodParams - 2] = new ExprEvalMethodContext(functionName);
                    Array.Copy(
                        childForges,
                        numMethodParams - 2,
                        rewrittenForges,
                        numMethodParams - 1,
                        childForges.Length - (numMethodParams - 2));
                    childForges = rewrittenForges;
                }

                childForges = ExprNodeUtilityMake.MakeVarargArrayForges(method, childForges);
            }

            var localInlinedClass = services.ClassProvidedExtension.IsLocalInlinedClass(method.DeclaringType);
            return new ExprNodeUtilMethodDesc(allConstants, childForges, method, localInlinedClass);
        }
        /// <summary>
        /// Gets Recipient Type/Database from Exchange database, this method can be more general, but it is ok
        /// for out needs
        /// </summary>
        /// <param name="oc">object class, currently the moethod works for <see cref="ObjectClass.ACCOUNT"/> only</param>
        /// <param name="cobject">connector object to get the recipient type/database for</param>
        /// <param name="attToGet">attributes to get</param>
        /// <returns>Connector Object with recipient type added</returns>
        /// <exception cref="ConnectorException">In case of some troubles in powershell (if the
        /// user is not found we get this exception too)</exception>
        private ConnectorObject AddExchangeAttributes(ObjectClass oc, ConnectorObject cobject, IEnumerable <string> attToGet)
        {
            ExchangeUtility.NullCheck(oc, "name", this.configuration);
            ExchangeUtility.NullCheck(oc, "cobject", this.configuration);

            // we support ACCOUNT only or there is nothing to add
            if (!oc.Is(ObjectClass.ACCOUNT_NAME) || attToGet == null)
            {
                return(cobject);
            }

            // check it is not deleted object
            bool?deleted = ExchangeUtility.GetAttValue(AttIsDeleted, cobject.GetAttributes()) as bool?;

            if (deleted != null && deleted == true)
            {
                // do nothing, it is deleted object
                return(cobject);
            }

            ICollection <string> lattToGet = CollectionUtil.NewCaseInsensitiveSet();

            CollectionUtil.AddAll(lattToGet, attToGet);
            foreach (string att in attToGet)
            {
                if (cobject.GetAttributeByName(att) != null && att != AttDatabase)
                {
                    lattToGet.Remove(att);
                }
            }

            if (lattToGet.Count == 0)
            {
                return(cobject);
            }

            ConnectorObjectBuilder cobjBuilder = new ConnectorObjectBuilder();

            cobjBuilder.AddAttributes(cobject.GetAttributes());

            PSExchangeConnector.CommandInfo cmdInfo = PSExchangeConnector.CommandInfo.GetUser;

            // prepare the connector attribute list to get the command
            ICollection <ConnectorAttribute> attributes = new Collection <ConnectorAttribute> {
                cobject.Name
            };

            // get the command
            Command cmd = ExchangeUtility.GetCommand(cmdInfo, attributes, this.configuration);
            ICollection <PSObject> foundObjects = this.InvokePipeline(cmd);
            PSObject user = null;

            if (foundObjects != null && foundObjects.Count == 1)
            {
                user = GetFirstElement(foundObjects);
                foreach (var info in user.Properties)
                {
                    ConnectorAttribute att = GetAsAttribute(info);
                    if (att != null && lattToGet.Contains(att.Name))
                    {
                        cobjBuilder.AddAttribute(att);
                        lattToGet.Remove(att.Name);
                    }
                }

                if (lattToGet.Count == 0)
                {
                    return(cobjBuilder.Build());
                }
            }

            if (user == null)
            {
                // nothing to do
                return(cobject);
            }

            string rcptType = user.Members[AttRecipientType].Value.ToString();

            foundObjects = null;

            // get detailed information
            if (rcptType == RcptTypeMailBox)
            {
                foundObjects = this.InvokePipeline(ExchangeUtility.GetCommand(PSExchangeConnector.CommandInfo.GetMailbox, attributes, this.configuration));
            }
            else if (rcptType == RcptTypeMailUser)
            {
                foundObjects = this.InvokePipeline(ExchangeUtility.GetCommand(PSExchangeConnector.CommandInfo.GetMailUser, attributes, this.configuration));
            }

            if (foundObjects != null && foundObjects.Count == 1)
            {
                PSObject userDetails = GetFirstElement(foundObjects);
                foreach (var info in userDetails.Properties)
                {
                    ConnectorAttribute att = GetAsAttribute(info);
                    if (att != null && lattToGet.Contains(att.Name))
                    {
                        cobjBuilder.AddAttribute(att);
                        lattToGet.Remove(att.Name);
                    }
                }
            }

            return(cobjBuilder.Build());
        }
Exemplo n.º 30
0
 public virtual void AddChildNode(ExprNode childNode)
 {
     ChildNodes = (ExprNode[]) CollectionUtil.ArrayExpandAddSingle(ChildNodes, childNode);
 }