Пример #1
0
        /// Estimate must be lower bound of final cost!
        private static int calcEstimate(Grid grid, ImmutableList <Transition> schedule)
        {
            // find set of scheduled rooms
            Set <RoomIdentifier> scheduledRooms = new HashSet <RoomIdentifier>();

            for (Transition transition : schedule)
            {
                scheduledRooms.add(transition.getSource());
                scheduledRooms.add(transition.getDestination());
            }

            // calculate number of missing rooms
            int missing = 0;

            for (RoomIdentifier room : scheduledRooms)
            {
                if (!grid.containsRoom(room))
                {
                    missing++;
                }
            }

            // calculate result
            int result = 0;

            result += missing * roomCost;           // at least these cost will occur for creating rooms
            result += schedule.size() * scriptCost; // at least these scripts will be created
            return(result);
        }
Пример #2
0
            public Set getAvailableEnvironmentVariables()
            {
                Set strings = new HashSet();

                strings.add("QTTEST");
                strings.add("QTTEST2");
                strings.add("QTTESTEMPTY");
                return(strings);
            }
Пример #3
0
        /**
         * Adds the interfaces to the set
         */
        public void addInterfaces(HashSet <String> interfaceSet)
        {
            interfaceSet.add(getName().toLowerCase(Locale.ENGLISH));

            for (String name : getInterfaces())
            {
                interfaceSet.add(name.toLowerCase(Locale.ENGLISH));
            }
        }
Пример #4
0
        public void Hashset_ToCsharp()
        {
            var two = new HashSet <string>()
            {
                "cat", "dog"
            };

            two.add("horse").ShouldBe(true);
            two.add("cat").ShouldBe(false);
        }
        static BytecodeHelper()
        {
            IterableOrIteratorElementTypes = new HashMap <String, TypeKind>();
            IterableOrIteratorElementTypes["stab/lang/BooleanIterable"] = TypeKind.Boolean;
            IterableOrIteratorElementTypes["stab/lang/BooleanIterator"] = TypeKind.Boolean;
            IterableOrIteratorElementTypes["stab/lang/ByteIterable"]    = TypeKind.Byte;
            IterableOrIteratorElementTypes["stab/lang/ByteIterator"]    = TypeKind.Byte;
            IterableOrIteratorElementTypes["stab/lang/ShortIterable"]   = TypeKind.Short;
            IterableOrIteratorElementTypes["stab/lang/ShortIterator"]   = TypeKind.Short;
            IterableOrIteratorElementTypes["stab/lang/CharIterable"]    = TypeKind.Char;
            IterableOrIteratorElementTypes["stab/lang/CharIterator"]    = TypeKind.Char;
            IterableOrIteratorElementTypes["stab/lang/IntIterable"]     = TypeKind.Int;
            IterableOrIteratorElementTypes["stab/lang/IntIterator"]     = TypeKind.Int;
            IterableOrIteratorElementTypes["stab/lang/LongIterable"]    = TypeKind.Long;
            IterableOrIteratorElementTypes["stab/lang/LongIterator"]    = TypeKind.Long;
            IterableOrIteratorElementTypes["stab/lang/FloatIterable"]   = TypeKind.Float;
            IterableOrIteratorElementTypes["stab/lang/FloatIterator"]   = TypeKind.Float;
            IterableOrIteratorElementTypes["stab/lang/DoubleIterable"]  = TypeKind.Double;
            IterableOrIteratorElementTypes["stab/lang/DoubleIterator"]  = TypeKind.Double;

            IterableTypes = new HashSet <String>();
            IterableTypes.add("stab/lang/BooleanIterable");
            IterableTypes.add("stab/lang/ByteIterable");
            IterableTypes.add("stab/lang/ShortIterable");
            IterableTypes.add("stab/lang/CharIterable");
            IterableTypes.add("stab/lang/IntIterable");
            IterableTypes.add("stab/lang/LongIterable");
            IterableTypes.add("stab/lang/FloatIterable");
            IterableTypes.add("stab/lang/DoubleIterable");
            IterableTypes.add("java/lang/Iterable");
        }
Пример #6
0
        private Set <MemberInfo> getAllTypeInfos()
        {
            if (allTypeInfos == null)
            {
                var l = new HashSet <MemberInfo>();
                var currentPackage = packageNames.aggregate("", (p, q) => p + q + "/");

                // Top level types
                foreach (var type in typeSystem.AllClassNames.where (p => p.indexOf('/') == -1))
                {
                    var info = MemberInfo.getInfo(typeSystem.getType(type));
                    if (info.Type.PackageName.equals(currentPackage) || info.IsPublic)
                    {
                        l.add(info);
                    }
                }

                // Types from enclosing packages
                var pkg        = "";
                var classNames = typeSystem.AllClassNames;
                foreach (var name in packageNames)
                {
                    pkg       += name + "/";
                    classNames = classNames.where (p => p.startsWith(pkg));
                    foreach (var type in classNames.where (p => p.indexOf('/', pkg.length()) == -1))
                    {
                        var info = MemberInfo.getInfo(typeSystem.getType(type));
                        if (info.Type.PackageName.equals(currentPackage) || info.IsPublic)
                        {
                            l.add(info);
                        }
                    }
                }

                // Types from using directives
                foreach (var pinfo in packageInfos)
                {
                    foreach (var name in pinfo.UsedPackages)
                    {
                        pkg = name + "/";
                        foreach (var type in typeSystem.AllClassNames.where (p => p.startsWith(pkg) && p.indexOf('/', pkg.length()) == -1))
                        {
                            var info = MemberInfo.getInfo(typeSystem.getType(type));
                            if (info.Type.PackageName.equals(currentPackage) || info.IsPublic)
                            {
                                l.add(info);
                            }
                        }
                    }
                }

                allTypeInfos = l;
            }
            return(allTypeInfos);
        }
Пример #7
0
        public StringUtil()
        {
            viet_set = new HashSet <char>();
            var vv = "ẮẰẲẴẶĂẤẦẨẪẬÂÁÀÃẢẠĐẾỀỂỄỆÊÉÈẺẼẸÍÌỈĨỊỐỒỔỖỘÔỚỜỞỠỢƠÓÒÕỎỌỨỪỬỮỰƯÚÙỦŨỤÝỲỶỸỴAĂÂBCDĐEÊGHIKLMNOÔƠPQRSTUƯVXYaăâbcdđeêghiklmnoôơpqrstuưvxy";

            vv += "A a Ă ă Â â E e Ê ê I i O o Ơ ơ Ô ô U u Ư ư Y y";
            vv += "À à Ằ ằ Ầ ầ È è Ề ề Ì ì Ò ò Ờ ờ Ồ ồ Ù ù Ừ ừ Ỳ ỳ";
            vv += "Ả ả Ẳ ẳ Ẩ ẩ Ẻ ẻ Ể ể Ỉ ỉ Ỏ ỏ Ở ở Ổ ổ Ủ ủ Ử ử Ỷ ỷ";
            vv += "Ã ã Ẵ ẵ Ẫ ẫ Ẽ ẽ Ễ ễ Ĩ ĩ Õ õ Ỡ ỡ Ỗ ỗ Ũ ũ Ữ ữ Ỹ ỹ";
            vv += "Á á Ắ ắ Ấ ấ É é Ế ế Í í Ó ó Ớ ớ Ỗ ỗ Ú ú Ứ ứ Ý ý";
            vv += "Ạ ạ Ặ ặ Ậ ậ Ẹ ẹ Ệ ệ Ị ị Ọ ọ Ợ ợ Ộ ộ Ụ ụ Ự ự Ỵ ỵ";
            vv += (char)0x1DC4;

            /*
             * The Vietnamese alphabets are listed in several noncontiguous Unicode ranges:
             * Basic Latin {U+0000..U+007F}, Latin-1 Supplement {U+0080..U+00FF}, Latin Extended-A, -B {U+0100..U+024F},
             * Latin Extended Additional {U+1E00..U+1EFF}, and Combining Diacritical Marks {U+0300.. U+036F}.
             * The Vietnamese đồng currency symbol is ₫ (U+20AB).
             */
            foreach (var c in vv)
            {
                viet_set.add(c);
            }
            viet_set.Remove(' ');
            for (char c = (char)0x0300; c <= (char)0x036F; c++)
            {
                viet_set.add(c);
            }
            for (char c = (char)0x1E00; c <= (char)0x1EFF; c++)
            {
                viet_set.add(c);
            }

            String s = "!\"@$%&'()*+,./:;<=>?[\\]^_`{|}~\r\n"; //@-

            s += ",  ,《。》、?;:‘’“”【{】}——=+、|·~!¥%……&*()";       //@-#
            s += "`~!@¥%……—×()——+-=【】{}:;’'”",./<>?’‘”“";      //#
            s += "� ★☆,。?, !";
            s += "©»¥「」";
            s += "[¡, !, \", ', (, ), -, °, :, ;, ?]-\"#";

            set = new HashSet <Char>();
            foreach (char c in s.toCharArray())
            {
                if (isWord(c))
                {
                    continue;
                }
                set.add(c);
            }
            set.add((char)0);
            set.add((char)8203);
            set.add((char)0x0E00);//Thai
        }
Пример #8
0
//JAVA TO C# CONVERTER WARNING: Method 'throws' clauses are not available in .NET:
//ORIGINAL LINE: public synchronized void checkIn(T t) throws org.maltparser.core.exception.MaltChainedException
        public override void checkIn(T t)
        {
            lock (this)
            {
                resetObject(t);
                inuse.remove(t);
                if (available.size() < keepThreshold)
                {
                    available.add(t);
                }
            }
        }
        static void HashSetTest()
        {
            HashSet <int> evenNumbers = new HashSet <int>();
            HashSet <int> oddNumbers  = new HashSet <int>();

            for (int i = 0; i < 5; i++)
            {
                evenNumbers.add(i * 2);

                oddNumbers.add((i * 2) + 1);
            }

            Console.WriteLine("Even numbers: " + evenNumbers.size());
            Console.WriteLine("Odd numbers: " + oddNumbers.size());

            Console.WriteLine("\n\nEvens:");
            evenNumbers.print();
            Console.WriteLine("\n\nOdds:");
            oddNumbers.print();

            oddNumbers.remove(7);
            oddNumbers.remove(3);

            Console.WriteLine("\n\nOdds:");
            oddNumbers.print();
        }
Пример #10
0
 public void TestReplaceParent() {
     Persister persister = new Persister();
     Set<String> children = new HashSet<String>();
     RealParent parent = new RealParent(children);
     children.add("Tom");
     children.add("Dick");
     children.add("Harry");
     StringWriter writer = new StringWriter();
     persister.write(parent, writer);
     String text = writer.toString();
     System.out.println(text);
     AssertEquals(text.indexOf("Tom"), -1);
     AssertEquals(text.indexOf("Dick"), -1);
     AssertEquals(text.indexOf("Harry"), -1);
     validate(persister, parent);
 }
        public static string gae_users_create_login_url(Env env,
                                                        string destinationUrl,
                                                        @Optional string authDomain,
                                                        @Optional string federatedIdentity,
                                                        @Optional Value attributesRequest)
        {
            Set <String> attributeSet = null;

            if (!attributesRequest.isDefault())
            {
                attributeSet = new HashSet <String>();

                ArrayValue array = attributesRequest.toArrayValue(env);

                for (Map.Entry <Value, Value> entrySet : array.entrySet())
                {
                    attributeSet.add(entrySet.getValue().ToString());
                }
            }

            return(GaeUserService.createLoginURL(destinationUrl,
                                                 authDomain,
                                                 federatedIdentity,
                                                 attributeSet));
        }
Пример #12
0
        private void dumpSearchGraph(PrintStream @out, SearchState searchState)
        {
            LinkedList linkedList = new LinkedList();
            HashSet    hashSet    = new HashSet();

            this.startDump(@out);
            linkedList.add(new StateLevel(searchState, 0));
            while (!linkedList.isEmpty())
            {
                StateLevel  stateLevel = (StateLevel)linkedList.remove(0);
                int         level      = stateLevel.getLevel();
                SearchState state      = stateLevel.getState();
                if (!hashSet.contains(state.getSignature()))
                {
                    hashSet.add(state.getSignature());
                    this.startDumpNode(@out, state, level);
                    SearchStateArc[] successors = state.getSuccessors();
                    for (int i = successors.Length - 1; i >= 0; i--)
                    {
                        SearchState state2 = successors[i].getState();
                        this.dumpArc(@out, state, successors[i], level);
                        if (this.depthFirst)
                        {
                            linkedList.add(0, new StateLevel(state2, level + 1));
                        }
                        else
                        {
                            linkedList.add(new StateLevel(state2, level + 1));
                        }
                    }
                    this.endDumpNode(@out, state, level);
                }
            }
            this.endDump(@out);
        }
Пример #13
0
        private static void calcCoAccessible(Fst fst, State state, ArrayList arrayList, HashSet hashSet)
        {
            ArrayList arrayList2 = new ArrayList();
            Iterator  iterator   = arrayList.iterator();

            while (iterator.hasNext())
            {
                ArrayList arrayList3 = (ArrayList)iterator.next();
                int       num        = arrayList3.lastIndexOf(state);
                if (num != -1 && (state.getFinalWeight() != fst.getSemiring().zero() || hashSet.contains(state)))
                {
                    for (int i = num; i > -1; i--)
                    {
                        if (!hashSet.contains(arrayList3.get(i)))
                        {
                            arrayList2.add(arrayList3.get(i));
                            hashSet.add(arrayList3.get(i));
                        }
                    }
                }
            }
            iterator = arrayList2.iterator();
            while (iterator.hasNext())
            {
                State state2 = (State)iterator.next();
                Connect.calcCoAccessible(fst, state2, arrayList, hashSet);
            }
        }
 private void CreateRandomIndexes(int maxSegments)
 {
     dir = NewDirectory();
     numDocs = AtLeast(150);
     int numTerms = TestUtil.NextInt(Random(), 1, numDocs / 5);
     ISet<string> randomTerms = new HashSet<string>();
     while (randomTerms.size() < numTerms)
     {
         randomTerms.add(TestUtil.RandomSimpleString(Random()));
     }
     terms = new List<string>(randomTerms);
     int seed = Random().Next();
     IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(new Random(seed)));
     iwc.SetMergePolicy(TestSortingMergePolicy.NewSortingMergePolicy(sort));
     iw = new RandomIndexWriter(new Random(seed), dir, iwc);
     for (int i = 0; i < numDocs; ++i)
     {
         Document doc = RandomDocument();
         iw.AddDocument(doc);
         if (i == numDocs / 2 || (i != numDocs - 1 && Random().nextInt(8) == 0))
         {
             iw.Commit();
         }
         if (Random().nextInt(15) == 0)
         {
             string term = RandomInts.RandomFrom(Random(), terms);
             iw.DeleteDocuments(new Term("s", term));
         }
     }
     reader = iw.Reader;
 }
Пример #15
0
        public override Set getVocabulary()
        {
            HashSet hashSet = new HashSet();

            try
            {
                BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(this.location.openStream()));
                for (;;)
                {
                    string text = bufferedReader.readLine();
                    if (text == null)
                    {
                        break;
                    }
                    if (String.instancehelper_length(text) != 0)
                    {
                        hashSet.add(String.instancehelper_trim(text));
                    }
                }
                bufferedReader.close();
            }
            catch (IOException ex)
            {
                Throwable.instancehelper_printStackTrace(ex);
            }
            return(hashSet);
        }
Пример #16
0
        private void sameSizeDifferentElements(List <DSString> the_one, List <DSString> the_two)
        {
            //make sure the number of items returned is the same
            Assert.AreEqual(the_one.size(), the_two.size());

            //make sure the search is not
            bool found = false;

            for (int i = 0; i < the_one.size(); i++)
            {
                if (!the_one.get(i).Equals(the_two.get(i)))
                {
                    found = true;
                }
            }
            Assert.IsTrue(found);

            //make sure all elements found cover all vertices possible
            Set <DSString> one = new HashSet <DSString>();
            Set <DSString> two = new HashSet <DSString>();

            for (int i = 0; i < the_one.size(); i++)
            {
                one.add(the_one.get(i));
                two.add(the_two.get(i));
            }

            Assert.AreEqual(one.size(), two.size());
        }
Пример #17
0
        public HashSet <string> GetOverriddenRulesWithDifferentFIRST()
        {
            // walk every rule in this grammar and compare FIRST set with
            // those in imported grammars.
            HashSet <string> rules = new HashSet <string>();

            for (Iterator it = getRules().iterator(); it.hasNext();)
            {
                Rule r = (Rule)it.next();
                //[email protected](r.name+" FIRST="+r.FIRST);
                for (int i = 0; i < delegates.size(); i++)
                {
                    Grammar g            = delegates.get(i);
                    Rule    importedRule = g.getRule(r.name);
                    if (importedRule != null)     // exists in imported grammar
                    // [email protected](r.name+" exists in imported grammar: FIRST="+importedRule.FIRST);
                    {
                        if (!r.FIRST.equals(importedRule.FIRST))
                        {
                            rules.add(r.name);
                        }
                    }
                }
            }
            return(rules);
        }
 protected internal virtual void showTokenCount()
 {
     if (this.logger.isLoggable(Level.INFO))
     {
         HashSet  hashSet  = new HashSet();
         Iterator iterator = this.activeList.iterator();
         while (iterator.hasNext())
         {
             for (Token token = (Token)iterator.next(); token != null; token = token.getPredecessor())
             {
                 hashSet.add(token);
             }
         }
         this.logger.info(new StringBuilder().append("Token Lattice size: ").append(hashSet.size()).toString());
         hashSet  = new HashSet();
         iterator = this.resultList.iterator();
         while (iterator.hasNext())
         {
             for (Token token = (Token)iterator.next(); token != null; token = token.getPredecessor())
             {
                 hashSet.add(token);
             }
         }
         this.logger.info(new StringBuilder().append("Result Lattice size: ").append(hashSet.size()).toString());
     }
 }
Пример #19
0
        public virtual Collection getNbest(int n)
        {
            HashSet hashSet = new HashSet();
            BoundedPriorityQueue boundedPriorityQueue = new BoundedPriorityQueue(n);

            boundedPriorityQueue.add(new Nbest.NBestPath(this, "<s>", this.lattice.getInitialNode(), (double)0f, (double)0f));
            while (hashSet.size() < n && boundedPriorityQueue.size() > 0)
            {
                Nbest.NBestPath nbestPath = (Nbest.NBestPath)boundedPriorityQueue.poll();
                if (nbestPath.node.equals(this.lattice.terminalNode))
                {
                    hashSet.add(nbestPath.path);
                }
                else
                {
                    Iterator iterator = nbestPath.node.getLeavingEdges().iterator();
                    while (iterator.hasNext())
                    {
                        Edge            edge          = (Edge)iterator.next();
                        Node            toNode        = edge.getToNode();
                        double          num           = nbestPath.forwardScore + edge.getAcousticScore() + edge.getLMScore();
                        double          num2          = num + toNode.getBackwardScore();
                        string          newPathString = this.getNewPathString(nbestPath, toNode);
                        Nbest.NBestPath item          = new Nbest.NBestPath(this, newPathString, toNode, num2, num);
                        boundedPriorityQueue.add(item);
                    }
                }
            }
            return(hashSet);
        }
Пример #20
0
        private void CreateRandomIndexes(int maxSegments)
        {
            dir     = NewDirectory();
            numDocs = AtLeast(150);
            int           numTerms    = TestUtil.NextInt(Random(), 1, numDocs / 5);
            ISet <string> randomTerms = new HashSet <string>();

            while (randomTerms.size() < numTerms)
            {
                randomTerms.add(TestUtil.RandomSimpleString(Random()));
            }
            terms = new List <string>(randomTerms);
            int seed = Random().Next();
            IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(new Random(seed)));

            iwc.SetMergePolicy(TestSortingMergePolicy.NewSortingMergePolicy(sort));
            iw = new RandomIndexWriter(new Random(seed), dir, iwc);
            for (int i = 0; i < numDocs; ++i)
            {
                Document doc = RandomDocument();
                iw.AddDocument(doc);
                if (i == numDocs / 2 || (i != numDocs - 1 && Random().nextInt(8) == 0))
                {
                    iw.Commit();
                }
                if (Random().nextInt(15) == 0)
                {
                    string term = RandomInts.RandomFrom(Random(), terms);
                    iw.DeleteDocuments(new Term("s", term));
                }
            }
            reader = iw.Reader;
        }
Пример #21
0
        public void createLexicon(string rawlexicontext)
        {
            // initialise objects
            words          = new HashSet <WordElement>();
            indexByID      = new Dictionary <string, WordElement>();
            indexByBase    = new Dictionary <string, List <WordElement> >();
            indexByVariant = new Dictionary <string, List <WordElement> >();

            var doc = new XmlDocument();

            doc.Load(new XmlTextReader(new StringReader(rawlexicontext)));

            if (doc != null)
            {
                var lexRoot   = doc.DocumentElement;
                var wordNodes = lexRoot?.SelectNodes("//word");
                for (var i = 0; i < wordNodes?.Count; i++)
                {
                    var wordNode = wordNodes[i];
                    // ignore things that aren't elements
                    if (wordNode.NodeType == XmlNodeType.Element)
                    {
                        var word = convertNodeToWord(wordNode);
                        if (word != null)
                        {
                            words.add(word);
                            IndexWord(word);
                        }
                    }
                }
            }

            addSpecialCases();
        }
Пример #22
0
        public static void apply(Fst fst)
        {
            if (fst.getSemiring() == null)
            {
                [email protected]("Fst has no semiring.");
                return;
            }
            HashSet hashSet  = new HashSet();
            HashSet hashSet2 = new HashSet();

            ArrayList[] array     = new ArrayList[fst.getNumStates()];
            ArrayList   arrayList = new ArrayList();

            arrayList.add(new ArrayList());
            Connect.depthFirstSearch(fst, hashSet, arrayList, array, hashSet2);
            HashSet hashSet3 = new HashSet();

            for (int i = 0; i < fst.getNumStates(); i++)
            {
                State state = fst.getState(i);
                if (!hashSet.contains(state) && !hashSet2.contains(state))
                {
                    hashSet3.add(state);
                }
            }
            fst.deleteStates(hashSet3);
        }
Пример #23
0
        internal virtual void setConfigurableClass(Class @class)
        {
            this.ownerClass = @class;
            if (this.isInstanciated())
            {
                string text = "class is already instantiated";

                throw new RuntimeException(text);
            }
            HashSet  hashSet  = new HashSet();
            Map      map      = PropertySheet.parseClass(this.ownerClass);
            Iterator iterator = map.entrySet().iterator();

            while (iterator.hasNext())
            {
                Map.Entry entry = (Map.Entry)iterator.next();
                try
                {
                    string text2 = (string)((Field)entry.getKey()).get(null, PropertySheet.__GetCallerID());
                    if (!PropertySheet.assertionsDisabled && hashSet.contains(text2))
                    {
                        object obj = new StringBuilder().append("duplicate property-name for different properties: ").append(text2).append(" for the class ").append(@class).toString();

                        throw new AssertionError(obj);
                    }
                    this.registerProperty(text2, new S4PropWrapper((java.lang.annotation.Annotation)entry.getValue()));
                    hashSet.add(text2);
                }
                catch (IllegalAccessException ex)
                {
                    Throwable.instancehelper_printStackTrace(ex);
                }
                continue;
            }
        }
Пример #24
0
 /**
  * Adds the interfaces to the set
  */
 public void addTraits(HashSet <String> traitSet)
 {
     for (String name : getTraits())
     {
         traitSet.add(name.toLowerCase(Locale.ENGLISH));
     }
 }
        // Traverses across the BST, in order, adding elements to the set
        bool traverse(Node <T> node, int k, HashSet <Node <T> > set)
        {
            // If the node has a left child, traverse it first
            if (node.left != null)
            {
                return(traverse(node.left, k, set));
            }
            // Check to see if the set contains the element that would sum
            // with the node we're checking's element to equal k
            if (set.contains(k - node.element))
            {
                return(true);
            }
            // Add node's element to the set
            set.add(node.element);

            // If the node has a right child, traverse it after
            if (node.right != null)
            {
                return(traverse(node.right, k, set));
            }
            else
            {
                // No two node's with elements summing k exist in the BST,
                // since you reached the end and found nothing
                return(false);
            }
        }
Пример #26
0
        public static Set collectStates(SentenceHMMState start)
        {
            HashSet    hashSet    = new HashSet();
            LinkedList linkedList = new LinkedList();

            linkedList.add(start);
            while (!linkedList.isEmpty())
            {
                SentenceHMMState sentenceHMMState = (SentenceHMMState)linkedList.remove(0);
                hashSet.add(sentenceHMMState);
                SearchStateArc[] successors = sentenceHMMState.getSuccessors();
                SearchStateArc[] array      = successors;
                int num = array.Length;
                for (int i = 0; i < num; i++)
                {
                    SearchStateArc   searchStateArc    = array[i];
                    SentenceHMMState sentenceHMMState2 = (SentenceHMMState)searchStateArc.getState();
                    if (!hashSet.contains(sentenceHMMState2) && !linkedList.contains(sentenceHMMState2))
                    {
                        linkedList.add(sentenceHMMState2);
                    }
                }
            }
            return(hashSet);
        }
Пример #27
0
        public override void run()
        {
            Linguist   linguist   = this.getLinguist();
            LinkedList linkedList = new LinkedList();
            HashSet    hashSet    = new HashSet();
            int        num        = 0;

            linkedList.add(linguist.getSearchGraph().getInitialState());
            while (!linkedList.isEmpty())
            {
                SearchState searchState = (SearchState)linkedList.remove(0);
                if (!hashSet.contains(searchState))
                {
                    num++;
                    this.incrementStateTypeCount(searchState);
                    hashSet.add(searchState);
                    SearchStateArc[] successors = searchState.getSuccessors();
                    for (int i = successors.Length - 1; i >= 0; i--)
                    {
                        SearchState state = successors[i].getState();
                        linkedList.add(state);
                    }
                }
            }
            [email protected]("# ----------- linguist stats ------------ ");
            [email protected](new StringBuilder().append("# Total states: ").append(num).toString());
            this.dumpStateTypeCounts();
        }
Пример #28
0
        /**
         * quick-and-dirty routine for getting morph variants should be replaced by
         * something better!
         *
         * @param word
         * @return
         */

        public HashSet <string> getVariants(WordElement word)
        {
            var variants = new HashSet <string>();

            variants.Add(word.getBaseForm());
            var category = word.getCategory();

            if (category is ILexicalCategory)
            {
                switch (category.enumType)
                {
                case (int)LexicalCategoryEnum.NOUN:
                    variants.add(getVariant(word, LexicalFeature.PLURAL, "s"));
                    break;

                case (int)LexicalCategoryEnum.ADJECTIVE:
                    variants
                    .add(getVariant(word, LexicalFeature.COMPARATIVE, "er"));
                    variants
                    .add(getVariant(word, LexicalFeature.SUPERLATIVE, "est"));
                    break;

                case (int)LexicalCategoryEnum.VERB:
                    variants.add(getVariant(word, LexicalFeature.PRESENT3S, "s"));
                    variants.add(getVariant(word, LexicalFeature.PAST, "ed"));
                    variants.add(getVariant(word, LexicalFeature.PAST_PARTICIPLE,
                                            "ed"));
                    variants.add(getVariant(word,
                                            LexicalFeature.PRESENT_PARTICIPLE, "ing"));
                    break;
                }
            }
            return(variants);
        }
Пример #29
0
        public Set keySet()
        {
            var s = new HashSet();

            foreach (var k in c.AllKeys)
            {
                s.add(k);
            }
            return(s);
        }
Пример #30
0
 public override void StartAutoIndexingProperty(string propName)
 {
     _propertyKeysToInclude.getAndUpdate(current =>
     {
         ISet <string> updated = new HashSet <string>();
         updated.addAll(current);
         updated.add(propName);
         return(updated);
     });
 }
Пример #31
0
        public Set entrySet()
        {
            var s = new HashSet();

            foreach (string k in c)
            {
                var entry = new MapEntry(k, c.GetValues(k));
                s.add(entry);
            }
            return(s);
        }
Пример #32
0
        private IEnumerable <BytesRef> AsSet(params string[] values)
        {
            HashSet <BytesRef> result = new HashSet <BytesRef>();

            foreach (string value in values)
            {
                result.add(new BytesRef(value));
            }

            return(result);
        }
        /**
         * Return a Dynamic Bayesian Network of the Umbrella World Network.
         * 
         * @return a Dynamic Bayesian Network of the Umbrella World Network.
         */

        public static DynamicBayesianNetwork getUmbrellaWorldNetwork()
        {
            FiniteNode prior_rain_tm1 = new FullCPTNode(ExampleRV.RAIN_tm1_RV,
                                                        new double[] {0.5, 0.5});

            BayesNet priorNetwork = new BayesNet(prior_rain_tm1);

            // Prior belief state
            FiniteNode rain_tm1 = new FullCPTNode(ExampleRV.RAIN_tm1_RV,
                                                  new double[] {0.5, 0.5});
            // Transition Model
            FiniteNode rain_t = new FullCPTNode(ExampleRV.RAIN_t_RV, new double[]
                                                                         {
                                                                             // R_t-1 = true, R_t = true
                                                                             0.7,
                                                                             // R_t-1 = true, R_t = false
                                                                             0.3,
                                                                             // R_t-1 = false, R_t = true
                                                                             0.3,
                                                                             // R_t-1 = false, R_t = false
                                                                             0.7
                                                                         }, rain_tm1);
            // Sensor Model
            FiniteNode umbrealla_t = new FullCPTNode(ExampleRV.UMBREALLA_t_RV,
                                                     new double[]
                                                         {
                                                             // R_t = true, U_t = true
                                                             0.9,
                                                             // R_t = true, U_t = false
                                                             0.1,
                                                             // R_t = false, U_t = true
                                                             0.2,
                                                             // R_t = false, U_t = false
                                                             0.8
                                                         }, rain_t);

            Map<RandomVariable, RandomVariable> X_0_to_X_1 = new HashMap<RandomVariable, RandomVariable>();
            X_0_to_X_1.put(ExampleRV.RAIN_tm1_RV, ExampleRV.RAIN_t_RV);
            Set<RandomVariable> E_1 = new HashSet<RandomVariable>();
            E_1.add(ExampleRV.UMBREALLA_t_RV);

            return new DynamicBayesNet(priorNetwork, X_0_to_X_1, E_1, rain_tm1);
        }
 /**
  * Makes a bunch of single-char tokens (the max # unique terms will at most be 26).
  * puts the # unique terms into expected, to be checked against the norm.
  */
 private string AddValue()
 {
     StringBuilder sb = new StringBuilder();
     HashSet<string> terms = new HashSet<string>();
     int num = TestUtil.NextInt(Random(), 0, 255);
     for (int i = 0; i < num; i++)
     {
         sb.append(' ');
         char term = (char)TestUtil.NextInt(Random(), 'a', 'z');
         sb.append(term);
         terms.add("" + term);
     }
     expected.Add(terms.size());
     return sb.toString();
 }
Пример #35
0
        public virtual void _minimizeRuleSet( HashSet<string> ruleDefs, HashSet<string> ruleRefs, CompositeGrammarTree p )
        {
            var localRuleDefs = new HashSet<string>();
            foreach ( Rule r in p.grammar.Rules )
            {
                if ( !ruleDefs.contains( r.name ) )
                {
                    localRuleDefs.add( r.name );
                    ruleDefs.add( r.name );
                }
            }
            System.Console.Out.WriteLine( "rule defs for " + p.grammar.name + ": " + localRuleDefs );

            // remove locally-defined rules not in ref set
            // find intersection of local rules and references from delegator
            // that is set of rules needed by delegator
            HashSet<string> localRuleDefsSatisfyingRefsFromBelow = new HashSet<string>();
            foreach ( string r in ruleRefs )
            {
                if ( localRuleDefs.contains( r ) )
                {
                    localRuleDefsSatisfyingRefsFromBelow.add( r );
                }
            }

            // now get list of refs from localRuleDefsSatisfyingRefsFromBelow.
            // Those rules are also allowed in this delegate
            foreach ( GrammarAST refAST in p.grammar.ruleRefs )
            {
                if ( localRuleDefsSatisfyingRefsFromBelow.contains( refAST.enclosingRuleName ) )
                {
                    // found rule ref within needed rule
                }
            }

            // remove rule refs not in the new rule def set

            // walk all children, adding rules not already defined
            if ( p.children != null )
            {
                foreach ( CompositeGrammarTree @delegate in p.children )
                {
                    _minimizeRuleSet( ruleDefs, ruleRefs, @delegate );
                }
            }
        }
Пример #36
0
 public virtual void oldminimizeRuleSet()
 {
     // first walk to remove all overridden rules
     var ruleDefs = new HashSet<string>();
     var ruleRefs = new HashSet<string>();
     foreach ( GrammarAST refAST in delegateGrammarTreeRoot.grammar.ruleRefs )
     {
         string rname = refAST.Text;
         ruleRefs.add( rname );
     }
     _minimizeRuleSet( ruleDefs,
                      ruleRefs,
                      delegateGrammarTreeRoot );
     System.Console.Out.WriteLine( "overall rule defs: " + ruleDefs );
 }
Пример #37
0
    static Encoding()
    {
        byte[] testBuf = new byte[0x7F];
        for (int i = 0; i < 0x7F; i++) {
            if (isAsciiSupersetnessSensitive(i)) {
                testBuf[i] = (byte) i;
            } else {
                testBuf[i] = (byte) 0x20;
            }
        }

        Set<Encoding> encodings = new HashSet<Encoding>();

        SortedMap<String, Charset> charsets = Charset.availableCharsets();
        foreach (Map.Entry<String, Charset> entry in charsets.entrySet()) {
            Charset cs = entry.getValue();
            String name = toNameKey(cs.name());
            String canonName = toAsciiLowerCase(cs.name());
            if (!isBanned(name)) {
                name = name.intern();
                bool asciiSuperset = asciiMapsToBasicLatin(testBuf, cs);
                Encoding enc = new Encoding(canonName.intern(), cs,
                        asciiSuperset, isObscure(name), isShouldNot(name),
                        isLikelyEbcdic(name, asciiSuperset));
                encodings.add(enc);
                Set<String> aliases = cs.aliases();
                foreach (String alias in aliases) {
                    encodingByCookedName.put(toNameKey(alias).intern(), enc);
                }
            }
        }
        // Overwrite possible overlapping aliases with the real things--just in
        // case
        foreach (Encoding encoding in encodings) {
            encodingByCookedName.put(toNameKey(encoding.getCanonName()),
                    encoding);
        }
        UTF8 = forName("utf-8");
        UTF16 = forName("utf-16");
        UTF16BE = forName("utf-16be");
        UTF16LE = forName("utf-16le");
        WINDOWS1252 = forName("windows-1252");
        try {
            forName("iso-8859-1").actualHtmlEncoding = forName("windows-1252");
        } catch (UnsupportedCharsetException e) {
        }
        try {
            forName("iso-8859-9").actualHtmlEncoding = forName("windows-1254");
        } catch (UnsupportedCharsetException e) {
        }
        try {
            forName("iso-8859-11").actualHtmlEncoding = forName("windows-874");
        } catch (UnsupportedCharsetException e) {
        }
        try {
            forName("x-iso-8859-11").actualHtmlEncoding = forName("windows-874");
        } catch (UnsupportedCharsetException e) {
        }
        try {
            forName("tis-620").actualHtmlEncoding = forName("windows-874");
        } catch (UnsupportedCharsetException e) {
        }
        try {
            forName("gb_2312-80").actualHtmlEncoding = forName("gbk");
        } catch (UnsupportedCharsetException e) {
        }
        try {
            forName("gb2312").actualHtmlEncoding = forName("gbk");
        } catch (UnsupportedCharsetException e) {
        }
        try {
            encodingByCookedName.put("x-x-big5", forName("big5"));
        } catch (UnsupportedCharsetException e) {
        }
        try {
            encodingByCookedName.put("euc-kr", forName("windows-949"));
        } catch (UnsupportedCharsetException e) {
        }
        try {
            encodingByCookedName.put("ks_c_5601-1987", forName("windows-949"));
        } catch (UnsupportedCharsetException e) {
        }
    }
 private void checkHits(SpatialArgs args, int assertNumFound, int[] assertIds)
 {
     SearchResults got = executeQuery(strategy.MakeQuery(args), 100);
     assertEquals("" + args, assertNumFound, got.numFound);
     if (assertIds != null)
     {
         ISet<int?> gotIds = new HashSet<int?>();
         foreach (SearchResult result in got.results)
         {
             gotIds.add(int.Parse(result.document.Get("id"), CultureInfo.InvariantCulture));
         }
         foreach (int assertId in assertIds)
         {
             assertTrue("has " + assertId, gotIds.contains(assertId));
         }
     }
 }
        private void CreateRandomIndexes()
        {
            dir1 = NewDirectory();
            dir2 = NewDirectory();
            int numDocs = AtLeast(150);
            int numTerms = TestUtil.NextInt(Random(), 1, numDocs / 5);
            ISet<string> randomTerms = new HashSet<string>();
            while (randomTerms.size() < numTerms)
            {
                randomTerms.add(TestUtil.RandomSimpleString(Random()));
            }
            terms = new List<string>(randomTerms);
            long seed = Random().NextLong();
            IndexWriterConfig iwc1 = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(new Random((int)seed)));
            IndexWriterConfig iwc2 = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(new Random((int)seed)));
            iwc2.SetMergePolicy(NewSortingMergePolicy(sort));
            RandomIndexWriter iw1 = new RandomIndexWriter(new Random((int)seed), dir1, iwc1);
            RandomIndexWriter iw2 = new RandomIndexWriter(new Random((int)seed), dir2, iwc2);
            for (int i = 0; i < numDocs; ++i)
            {
                if (Random().nextInt(5) == 0 && i != numDocs - 1)
                {
                    string term = RandomInts.RandomFrom(Random(), terms);
                    iw1.DeleteDocuments(new Term("s", term));
                    iw2.DeleteDocuments(new Term("s", term));
                }
                Document doc = randomDocument();
                iw1.AddDocument(doc);
                iw2.AddDocument(doc);
                if (Random().nextInt(8) == 0)
                {
                    iw1.Commit();
                    iw2.Commit();
                }
            }
            // Make sure we have something to merge
            iw1.Commit();
            iw2.Commit();
            Document doc2 = randomDocument();
            // NOTE: don't use RIW.addDocument directly, since it sometimes commits
            // which may trigger a merge, at which case forceMerge may not do anything.
            // With field updates this is a problem, since the updates can go into the
            // single segment in the index, and threefore the index won't be sorted.
            // This hurts the assumption of the test later on, that the index is sorted
            // by SortingMP.
            iw1.w.AddDocument(doc2);
            iw2.w.AddDocument(doc2);

            if (DefaultCodecSupportsFieldUpdates())
            {
                // update NDV of docs belonging to one term (covers many documents)
                long value = Random().NextLong();
                string term = RandomInts.RandomFrom(Random(), terms);
                iw1.w.UpdateNumericDocValue(new Term("s", term), "ndv", value);
                iw2.w.UpdateNumericDocValue(new Term("s", term), "ndv", value);
            }

            iw1.ForceMerge(1);
            iw2.ForceMerge(1);
            iw1.Dispose();
            iw2.Dispose();
            reader = DirectoryReader.Open(dir1);
            sortedReader = DirectoryReader.Open(dir2);
        }
Пример #40
0
 public HashSet<string> GetOverriddenRulesWithDifferentFIRST()
 {
     // walk every rule in this grammar and compare FIRST set with
     // those in imported grammars.
     HashSet<string> rules = new HashSet<string>();
     for (Iterator it = getRules().iterator(); it.hasNext();) {
         Rule r = (Rule)it.next();
         //[email protected](r.name+" FIRST="+r.FIRST);
         for (int i = 0; i < delegates.size(); i++) {
             Grammar g = delegates.get(i);
             Rule importedRule = g.getRule(r.name);
             if ( importedRule != null ) { // exists in imported grammar
                 // [email protected](r.name+" exists in imported grammar: FIRST="+importedRule.FIRST);
                 if ( !r.FIRST.equals(importedRule.FIRST) ) {
                     rules.add(r.name);
                 }
             }
         }
     }
     return rules;
 }
        public void TestWithContexts()
        {
            Directory dir = NewDirectory();
            IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
            iwc.SetMergePolicy(NewLogMergePolicy());
            RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, iwc);
            KeyValuePair<List<string>, IDictionary<string, Document>> res = GenerateIndexDocuments(AtLeast(1000), true, true);
            IDictionary<string, Document> docs = res.Value;
            List<string> invalidDocTerms = res.Key;
            foreach (Document doc in docs.Values)
            {
                writer.AddDocument(doc);
            }
            writer.Commit();
            writer.Dispose();
            IndexReader ir = DirectoryReader.Open(dir);
            IDictionary dictionary = new DocumentDictionary(ir, FIELD_NAME, WEIGHT_FIELD_NAME, PAYLOAD_FIELD_NAME, CONTEXT_FIELD_NAME);
            IInputIterator inputIterator = dictionary.EntryIterator;
            BytesRef f;
            while ((f = inputIterator.Next()) != null)
            {
                string field = f.Utf8ToString();
                Document doc = docs.ContainsKey(field) ? docs[field] : null;
                docs.Remove(field);
                //Document doc = docs.remove(f.utf8ToString());
                assertTrue(f.equals(new BytesRef(doc.Get(FIELD_NAME))));
                IndexableField weightField = doc.GetField(WEIGHT_FIELD_NAME);
                assertEquals(inputIterator.Weight, (weightField != null) ? Convert.ToInt64(weightField.NumericValue) : 0);
                assertTrue(inputIterator.Payload.equals(doc.GetField(PAYLOAD_FIELD_NAME).BinaryValue));
                ISet<BytesRef> oriCtxs = new HashSet<BytesRef>();
                IEnumerable<BytesRef> contextSet = inputIterator.Contexts;
                foreach (IndexableField ctxf in doc.GetFields(CONTEXT_FIELD_NAME))
                {
                    oriCtxs.add(ctxf.BinaryValue);
                }
                assertEquals(oriCtxs.size(), contextSet.Count());
            }

            foreach (string invalidTerm in invalidDocTerms)
            {
                var invalid = docs[invalidTerm];
                docs.Remove(invalidTerm);
                assertNotNull(invalid);
            }
            assertTrue(!docs.Any());

            ir.Dispose();
            dir.Dispose();
        }
Пример #42
0
		/// <summary>
		/// Determine which genotype fields are in use in the genotypes in VC </summary>
		/// <param name="vc"> </param>
		/// <returns> an ordered list of genotype fields in use in VC.  If vc has genotypes this will always include GT first </returns>
//JAVA TO C# CONVERTER WARNING: 'final' parameters are not allowed in .NET:
//ORIGINAL LINE: public static List<String> calcVCFGenotypeKeys(final VariantContext vc, final VCFHeader header)
		public static IList<string> calcVCFGenotypeKeys(VariantContext vc, VCFHeader header)
		{
			Set<string> keys = new HashSet<string>();

			bool sawGoodGT = false;
			bool sawGoodQual = false;
			bool sawGenotypeFilter = false;
			bool sawDP = false;
			bool sawAD = false;
			bool sawPL = false;
			foreach (Genotype g in vc.Genotypes)
			{
				keys.addAll(g.ExtendedAttributes.Keys);
				if (g.Available)
				{
					sawGoodGT = true;
				}
				if (g.hasGQ())
				{
					sawGoodQual = true;
				}
				if (g.hasDP())
				{
					sawDP = true;
				}
				if (g.hasAD())
				{
					sawAD = true;
				}
				if (g.hasPL())
				{
					sawPL = true;
				}
				if (g.Filtered)
				{
					sawGenotypeFilter = true;
				}
			}

			if (sawGoodQual)
			{
				keys.add(VCFConstants.GENOTYPE_QUALITY_KEY);
			}
			if (sawDP)
			{
				keys.add(VCFConstants.DEPTH_KEY);
			}
			if (sawAD)
			{
				keys.add(VCFConstants.GENOTYPE_ALLELE_DEPTHS);
			}
			if (sawPL)
			{
				keys.add(VCFConstants.GENOTYPE_PL_KEY);
			}
			if (sawGenotypeFilter)
			{
				keys.add(VCFConstants.GENOTYPE_FILTER_KEY);
			}

			IList<string> sortedList = ParsingUtils.sortList(new List<string>(keys));

			// make sure the GT is first
			if (sawGoodGT)
			{
				IList<string> newList = new List<string>(sortedList.Count + 1);
				newList.Add(VCFConstants.GENOTYPE_KEY);
				newList.AddRange(sortedList);
				sortedList = newList;
			}

			if (sortedList.Count == 0 && header.hasGenotypingData())
			{
				// this needs to be done in case all samples are no-calls
				return Collections.singletonList(VCFConstants.GENOTYPE_KEY);
			}
			else
			{
				return sortedList;
			}
		}
Пример #43
0
        public virtual void runTestQuery(SpatialMatchConcern concern, SpatialTestQuery q)
        {
            String msg = q.toString(); //"Query: " + q.args.toString(ctx);
            SearchResults got = executeQuery(makeQuery(q), Math.Max(100, q.ids.size() + 1));
            if (storeShape && got.numFound > 0)
            {
                //check stored value is there
                assertNotNull(got.results[0].document.Get(strategy.FieldName));
            }
            if (concern.orderIsImportant)
            {
                IEnumerator<String> ids = q.ids.GetEnumerator();
                foreach (SearchResult r in got.results)
                {
                    String id = r.document.Get("id");
                    if (!ids.MoveNext())
                    {
                        fail(msg + " :: Did not get enough results.  Expect" + q.ids + ", got: " + got.toDebugString());
                    }
                    assertEquals("out of order: " + msg, ids.Current, id);
                }

                if (ids.MoveNext())
                {
                    fail(msg + " :: expect more results then we got: " + ids.Current);
                }
            }
            else
            {
                // We are looking at how the results overlap
                if (concern.resultsAreSuperset)
                {
                    ISet<string> found = new HashSet<string>();
                    foreach (SearchResult r in got.results)
                    {
                        found.add(r.document.Get("id"));
                    }
                    foreach (String s in q.ids)
                    {
                        if (!found.contains(s))
                        {
                            fail("Results are mising id: " + s + " :: " + found);
                        }
                    }
                }
                else
                {
                    List<string> found = new List<string>();
                    foreach (SearchResult r in got.results)
                    {
                        found.Add(r.document.Get("id"));
                    }

                    // sort both so that the order is not important
                    CollectionUtil.TimSort(q.ids);
                    CollectionUtil.TimSort(found);
                    assertEquals(msg, q.ids.toString(), found.toString());
                }
            }
        }
        public void TestWithContext()
        {
            Directory dir = NewDirectory();
            IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
            iwc.SetMergePolicy(NewLogMergePolicy());
            RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, iwc);
            IDictionary<string, Document> docs = GenerateIndexDocuments(AtLeast(100));
            foreach (Document doc in docs.Values)
            {
                writer.AddDocument(doc);
            }
            writer.Commit();
            writer.Dispose();

            IndexReader ir = DirectoryReader.Open(dir);
            ValueSource[] toAdd = new ValueSource[] { new LongFieldSource(WEIGHT_FIELD_NAME_1), new LongFieldSource(WEIGHT_FIELD_NAME_2), new LongFieldSource(WEIGHT_FIELD_NAME_3) };
            IDictionary dictionary = new DocumentValueSourceDictionary(ir, FIELD_NAME, new SumFloatFunction(toAdd), PAYLOAD_FIELD_NAME, CONTEXTS_FIELD_NAME);
            IInputIterator inputIterator = dictionary.EntryIterator;
            BytesRef f;
            while ((f = inputIterator.Next()) != null)
            {
                string field = f.Utf8ToString();
                Document doc = docs.ContainsKey(field) ? docs[field] : null;
                docs.Remove(field);
                long w1 = Convert.ToInt64(doc.GetField(WEIGHT_FIELD_NAME_1).NumericValue);
                long w2 = Convert.ToInt64(doc.GetField(WEIGHT_FIELD_NAME_2).NumericValue);
                long w3 = Convert.ToInt64(doc.GetField(WEIGHT_FIELD_NAME_3).NumericValue);
                assertTrue(f.equals(new BytesRef(doc.Get(FIELD_NAME))));
                assertEquals(inputIterator.Weight, (w1 + w2 + w3));
                assertTrue(inputIterator.Payload.equals(doc.GetField(PAYLOAD_FIELD_NAME).BinaryValue));
                ISet<BytesRef> originalCtxs = new HashSet<BytesRef>();
                foreach (IndexableField ctxf in doc.GetFields(CONTEXTS_FIELD_NAME))
                {
                    originalCtxs.add(ctxf.BinaryValue);
                }
                assertEquals(originalCtxs, inputIterator.Contexts);
            }
            assertTrue(!docs.Any());
            ir.Dispose();
            dir.Dispose();
        }
Пример #45
0
        public void TestTerms()
        {
            Random random = Random();
            int num = AtLeast(10000);
#pragma warning disable 612, 618
            IComparer<BytesRef> comparator = random.nextBoolean() ? BytesRef.UTF8SortedAsUnicodeComparer : BytesRef.UTF8SortedAsUTF16Comparer;
#pragma warning restore 612, 618
            IDictionary<BytesRef, KeyValuePair<long, BytesRef>> sorted = new SortedDictionary<BytesRef, KeyValuePair<long, BytesRef>>(comparator); //new TreeMap<>(comparator);
            IDictionary<BytesRef, long> sortedWithoutPayload = new SortedDictionary<BytesRef, long>(comparator); //new TreeMap<>(comparator);
            IDictionary<BytesRef, KeyValuePair<long, ISet<BytesRef>>> sortedWithContext = new SortedDictionary<BytesRef, KeyValuePair<long, ISet<BytesRef>>>(comparator); //new TreeMap<>(comparator);
            IDictionary<BytesRef, KeyValuePair<long, KeyValuePair<BytesRef, ISet<BytesRef>>>> sortedWithPayloadAndContext = new SortedDictionary<BytesRef, KeyValuePair<long, KeyValuePair<BytesRef, ISet<BytesRef>>>>(comparator); //new TreeMap<>(comparator);
            Input[] unsorted = new Input[num];
            Input[] unsortedWithoutPayload = new Input[num];
            Input[] unsortedWithContexts = new Input[num];
            Input[] unsortedWithPayloadAndContext = new Input[num];
            ISet<BytesRef> ctxs;
            for (int i = 0; i < num; i++)
            {
                BytesRef key2;
                BytesRef payload;
                ctxs = new HashSet<BytesRef>();
                do
                {
                    key2 = new BytesRef(TestUtil.RandomUnicodeString(random));
                    payload = new BytesRef(TestUtil.RandomUnicodeString(random));
                    for (int j = 0; j < AtLeast(2); j++)
                    {
                        ctxs.add(new BytesRef(TestUtil.RandomUnicodeString(random)));
                    }
                } while (sorted.ContainsKey(key2));
                long value = random.Next();
                sortedWithoutPayload.Put(key2, value);
                sorted.Put(key2, new KeyValuePair<long, BytesRef>(value, payload));
                sortedWithContext.Put(key2, new KeyValuePair<long, ISet<BytesRef>>(value, ctxs));
                sortedWithPayloadAndContext.Put(key2, new KeyValuePair<long, KeyValuePair<BytesRef, ISet<BytesRef>>>(value, new KeyValuePair<BytesRef, ISet<BytesRef>>(payload, ctxs)));
                unsorted[i] = new Input(key2, value, payload);
                unsortedWithoutPayload[i] = new Input(key2, value);
                unsortedWithContexts[i] = new Input(key2, value, ctxs);
                unsortedWithPayloadAndContext[i] = new Input(key2, value, payload, ctxs);
            }

            // test the sorted iterator wrapper with payloads
            IInputIterator wrapper = new SortedInputIterator(new InputArrayIterator(unsorted), comparator);
            IEnumerator<KeyValuePair<BytesRef, KeyValuePair<long, BytesRef>>> expected = sorted.GetEnumerator();
            while (expected.MoveNext())
            {
                KeyValuePair<BytesRef, KeyValuePair<long, BytesRef>> entry = expected.Current;


                assertEquals(entry.Key, wrapper.Next());
                assertEquals(Convert.ToInt64(entry.Value.Key), wrapper.Weight);
                assertEquals(entry.Value.Value, wrapper.Payload);
            }
            assertNull(wrapper.Next());

            // test the sorted iterator wrapper with contexts
            wrapper = new SortedInputIterator(new InputArrayIterator(unsortedWithContexts), comparator);
            IEnumerator<KeyValuePair<BytesRef, KeyValuePair<long, ISet<BytesRef>>>> actualEntries = sortedWithContext.GetEnumerator();
            while (actualEntries.MoveNext())
            {
                KeyValuePair<BytesRef, KeyValuePair<long, ISet<BytesRef>>> entry = actualEntries.Current;
                assertEquals(entry.Key, wrapper.Next());
                assertEquals(Convert.ToInt64(entry.Value.Key), wrapper.Weight);
                ISet<BytesRef> actualCtxs = entry.Value.Value;
                assertEquals(actualCtxs, wrapper.Contexts);
            }
            assertNull(wrapper.Next());

            // test the sorted iterator wrapper with contexts and payload
            wrapper = new SortedInputIterator(new InputArrayIterator(unsortedWithPayloadAndContext), comparator);
            IEnumerator<KeyValuePair<BytesRef, KeyValuePair<long, KeyValuePair<BytesRef, ISet<BytesRef>>>>> expectedPayloadContextEntries = sortedWithPayloadAndContext.GetEnumerator();
            while (expectedPayloadContextEntries.MoveNext())
            {
                KeyValuePair<BytesRef, KeyValuePair<long, KeyValuePair<BytesRef, ISet<BytesRef>>>> entry = expectedPayloadContextEntries.Current;
                assertEquals(entry.Key, wrapper.Next());
                assertEquals(Convert.ToInt64(entry.Value.Key), wrapper.Weight);
                ISet<BytesRef> actualCtxs = entry.Value.Value.Value;
                assertEquals(actualCtxs, wrapper.Contexts);
                BytesRef actualPayload = entry.Value.Value.Key;
                assertEquals(actualPayload, wrapper.Payload);
            }
            assertNull(wrapper.Next());

            // test the unsorted iterator wrapper with payloads
            wrapper = new UnsortedInputIterator(new InputArrayIterator(unsorted));
            IDictionary<BytesRef, KeyValuePair<long, BytesRef>> actual = new SortedDictionary<BytesRef, KeyValuePair<long, BytesRef>>(); //new TreeMap<>();
            BytesRef key;
            while ((key = wrapper.Next()) != null)
            {
                long value = wrapper.Weight;
                BytesRef payload = wrapper.Payload;
                actual.Put(BytesRef.DeepCopyOf(key), new KeyValuePair<long, BytesRef>(value, BytesRef.DeepCopyOf(payload)));
            }
            assertEquals(sorted, actual);

            // test the sorted iterator wrapper without payloads
            IInputIterator wrapperWithoutPayload = new SortedInputIterator(new InputArrayIterator(unsortedWithoutPayload), comparator);
            IEnumerator<KeyValuePair<BytesRef, long>> expectedWithoutPayload = sortedWithoutPayload.GetEnumerator();
            while (expectedWithoutPayload.MoveNext())
            {
                KeyValuePair<BytesRef, long> entry = expectedWithoutPayload.Current;


                assertEquals(entry.Key, wrapperWithoutPayload.Next());
                assertEquals(Convert.ToInt64(entry.Value), wrapperWithoutPayload.Weight);
                assertNull(wrapperWithoutPayload.Payload);
            }
            assertNull(wrapperWithoutPayload.Next());

            // test the unsorted iterator wrapper without payloads
            wrapperWithoutPayload = new UnsortedInputIterator(new InputArrayIterator(unsortedWithoutPayload));
            IDictionary<BytesRef, long> actualWithoutPayload = new SortedDictionary<BytesRef, long>(); //new TreeMap<>();
            while ((key = wrapperWithoutPayload.Next()) != null)
            {
                long value = wrapperWithoutPayload.Weight;
                assertNull(wrapperWithoutPayload.Payload);
                actualWithoutPayload.Put(BytesRef.DeepCopyOf(key), value);
            }
            assertEquals(sortedWithoutPayload, actualWithoutPayload);
        }
Пример #46
0
        protected virtual void AssertOperation(IDictionary<String, IShape> indexedDocs,
                                       SpatialOperation operation, IShape queryShape)
        {
            //Generate truth via brute force
            ISet<string> expectedIds = new HashSet<string>();
            foreach (var stringShapeEntry in indexedDocs)
            {
                if (operation.Evaluate(stringShapeEntry.Value, queryShape))
                    expectedIds.add(stringShapeEntry.Key);
            }

            SpatialTestQuery testQuery = new SpatialTestQuery();
            testQuery.args = new SpatialArgs(operation, queryShape);
            testQuery.ids = new List<string>(expectedIds);
            runTestQuery(SpatialMatchConcern.FILTER, testQuery);
        }