public void Set(Type policyInterface, object policy) { List.Set(Type, Name, policyInterface, policy); }
/// <summary> /// Serialize state descriptors, edge descriptors, and decision→state map /// into list of ints: /// grammar-type, (ANTLRParser.LEXER, ...) /// max token type, /// num states, /// state-0-type ruleIndex, state-1-type ruleIndex, ... /// </summary> /// <remarks> /// Serialize state descriptors, edge descriptors, and decision→state map /// into list of ints: /// grammar-type, (ANTLRParser.LEXER, ...) /// max token type, /// num states, /// state-0-type ruleIndex, state-1-type ruleIndex, ... state-i-type ruleIndex optional-arg ... /// num rules, /// rule-1-start-state rule-1-args, rule-2-start-state rule-2-args, ... /// (args are token type,actionIndex in lexer else 0,0) /// num modes, /// mode-0-start-state, mode-1-start-state, ... (parser has 0 modes) /// num sets /// set-0-interval-count intervals, set-1-interval-count intervals, ... /// num total edges, /// src, trg, edge-type, edge arg1, optional edge arg2 (present always), ... /// num decisions, /// decision-0-start-state, decision-1-start-state, ... /// Convenient to pack into unsigned shorts to make as Java string. /// </remarks> public virtual List<int> Serialize() { List<int> data = new List<int>(); data.Add(ATNDeserializer.SerializedVersion); SerializeUUID(data, ATNDeserializer.SerializedUuid); // convert grammar type to ATN const to avoid dependence on ANTLRParser data.Add((int)(atn.grammarType)); data.Add(atn.maxTokenType); int nedges = 0; IDictionary<IntervalSet, int> setIndices = new Dictionary<IntervalSet, int>(); IList<IntervalSet> sets = new List<IntervalSet>(); // dump states, count edges and collect sets while doing so List<int> nonGreedyStates = new List<int>(); List<int> sllStates = new List<int>(); List<int> precedenceStates = new List<int>(); data.Add(atn.states.Count); foreach (ATNState s in atn.states) { if (s == null) { // might be optimized away data.Add((int)(StateType.InvalidType)); continue; } StateType stateType = s.StateType; if (s is DecisionState) { DecisionState decisionState = (DecisionState)s; if (decisionState.nonGreedy) { nonGreedyStates.Add(s.stateNumber); } if (decisionState.sll) { sllStates.Add(s.stateNumber); } } if (s is RuleStartState && ((RuleStartState)s).isPrecedenceRule) { precedenceStates.Add(s.stateNumber); } data.Add((int)(stateType)); if (s.ruleIndex == -1) { data.Add(char.MaxValue); } else { data.Add(s.ruleIndex); } if (s.StateType == StateType.LoopEnd) { data.Add(((LoopEndState)s).loopBackState.stateNumber); } else { if (s is BlockStartState) { data.Add(((BlockStartState)s).endState.stateNumber); } } if (s.StateType != StateType.RuleStop) { // the deserializer can trivially derive these edges, so there's no need to serialize them nedges += s.NumberOfTransitions; } for (int i = 0; i < s.NumberOfTransitions; i++) { Transition t = s.Transition(i); TransitionType edgeType = Transition.serializationTypes.Get(t.GetType()); if (edgeType == TransitionType.Set || edgeType == TransitionType.NotSet) { SetTransition st = (SetTransition)t; if (!setIndices.ContainsKey(st.set)) { sets.Add(st.set); setIndices.Put(st.set, sets.Count - 1); } } } } // non-greedy states data.Add(nonGreedyStates.Size()); for (int i_1 = 0; i_1 < nonGreedyStates.Size(); i_1++) { data.Add(nonGreedyStates.Get(i_1)); } // SLL decisions data.Add(sllStates.Size()); for (int i_2 = 0; i_2 < sllStates.Size(); i_2++) { data.Add(sllStates.Get(i_2)); } // precedence states data.Add(precedenceStates.Size()); for (int i_3 = 0; i_3 < precedenceStates.Size(); i_3++) { data.Add(precedenceStates.Get(i_3)); } int nrules = atn.ruleToStartState.Length; data.Add(nrules); for (int r = 0; r < nrules; r++) { ATNState ruleStartState = atn.ruleToStartState[r]; data.Add(ruleStartState.stateNumber); bool leftFactored = ruleNames[ruleStartState.ruleIndex].IndexOf(ATNSimulator.RuleVariantDelimiter) >= 0; data.Add(leftFactored ? 1 : 0); if (atn.grammarType == ATNType.Lexer) { if (atn.ruleToTokenType[r] == TokenConstants.Eof) { data.Add(char.MaxValue); } else { data.Add(atn.ruleToTokenType[r]); } } } int nmodes = atn.modeToStartState.Count; data.Add(nmodes); if (nmodes > 0) { foreach (ATNState modeStartState in atn.modeToStartState) { data.Add(modeStartState.stateNumber); } } int nsets = sets.Count; data.Add(nsets); foreach (IntervalSet set in sets) { bool containsEof = set.Contains(TokenConstants.Eof); if (containsEof && set.GetIntervals()[0].b == TokenConstants.Eof) { data.Add(set.GetIntervals().Count - 1); } else { data.Add(set.GetIntervals().Count); } data.Add(containsEof ? 1 : 0); foreach (Interval I in set.GetIntervals()) { if (I.a == TokenConstants.Eof) { if (I.b == TokenConstants.Eof) { continue; } else { data.Add(0); } } else { data.Add(I.a); } data.Add(I.b); } } data.Add(nedges); foreach (ATNState s_1 in atn.states) { if (s_1 == null) { // might be optimized away continue; } if (s_1.StateType == StateType.RuleStop) { continue; } for (int i = 0; i_3 < s_1.NumberOfTransitions; i_3++) { Transition t = s_1.Transition(i_3); if (atn.states[t.target.stateNumber] == null) { throw new InvalidOperationException("Cannot serialize a transition to a removed state."); } int src = s_1.stateNumber; int trg = t.target.stateNumber; TransitionType edgeType = Transition.serializationTypes.Get(t.GetType()); int arg1 = 0; int arg2 = 0; int arg3 = 0; switch (edgeType) { case TransitionType.Rule: { trg = ((RuleTransition)t).followState.stateNumber; arg1 = ((RuleTransition)t).target.stateNumber; arg2 = ((RuleTransition)t).ruleIndex; arg3 = ((RuleTransition)t).precedence; break; } case TransitionType.Precedence: { PrecedencePredicateTransition ppt = (PrecedencePredicateTransition)t; arg1 = ppt.precedence; break; } case TransitionType.Predicate: { PredicateTransition pt = (PredicateTransition)t; arg1 = pt.ruleIndex; arg2 = pt.predIndex; arg3 = pt.isCtxDependent ? 1 : 0; break; } case TransitionType.Range: { arg1 = ((RangeTransition)t).from; arg2 = ((RangeTransition)t).to; if (arg1 == TokenConstants.Eof) { arg1 = 0; arg3 = 1; } break; } case TransitionType.Atom: { arg1 = ((AtomTransition)t).label; if (arg1 == TokenConstants.Eof) { arg1 = 0; arg3 = 1; } break; } case TransitionType.Action: { ActionTransition at = (ActionTransition)t; arg1 = at.ruleIndex; arg2 = at.actionIndex; if (arg2 == -1) { arg2 = unchecked((int)(0xFFFF)); } arg3 = at.isCtxDependent ? 1 : 0; break; } case TransitionType.Set: { arg1 = setIndices.Get(((SetTransition)t).set); break; } case TransitionType.NotSet: { arg1 = setIndices.Get(((SetTransition)t).set); break; } case TransitionType.Wildcard: { break; } } data.Add(src); data.Add(trg); data.Add((int)(edgeType)); data.Add(arg1); data.Add(arg2); data.Add(arg3); } } int ndecisions = atn.decisionToState.Count; data.Add(ndecisions); foreach (DecisionState decStartState in atn.decisionToState) { data.Add(decStartState.stateNumber); } // // LEXER ACTIONS // if (atn.grammarType == ATNType.Lexer) { data.Add(atn.lexerActions.Length); foreach (ILexerAction action in atn.lexerActions) { data.Add((int)(action.ActionType)); switch (action.ActionType) { case LexerActionType.Channel: { int channel = ((LexerChannelAction)action).Channel; data.Add(channel != -1 ? channel : unchecked((int)(0xFFFF))); data.Add(0); break; } case LexerActionType.Custom: { int ruleIndex = ((LexerCustomAction)action).RuleIndex; int actionIndex = ((LexerCustomAction)action).ActionIndex; data.Add(ruleIndex != -1 ? ruleIndex : unchecked((int)(0xFFFF))); data.Add(actionIndex != -1 ? actionIndex : unchecked((int)(0xFFFF))); break; } case LexerActionType.Mode: { int mode = ((LexerModeAction)action).Mode; data.Add(mode != -1 ? mode : unchecked((int)(0xFFFF))); data.Add(0); break; } case LexerActionType.More: { data.Add(0); data.Add(0); break; } case LexerActionType.PopMode: { data.Add(0); data.Add(0); break; } case LexerActionType.PushMode: { mode = ((LexerPushModeAction)action).Mode; data.Add(mode != -1 ? mode : unchecked((int)(0xFFFF))); data.Add(0); break; } case LexerActionType.Skip: { data.Add(0); data.Add(0); break; } case LexerActionType.Type: { int type = ((LexerTypeAction)action).Type; data.Add(type != -1 ? type : unchecked((int)(0xFFFF))); data.Add(0); break; } default: { string message = string.Format(CultureInfo.CurrentCulture, "The specified lexer action type {0} is not valid.", action.ActionType); throw new ArgumentException(message); } } } } // don't adjust the first value since that's the version number for (int i_4 = 1; i_4 < data.Size(); i_4++) { if (data.Get(i_4) < char.MinValue || data.Get(i_4) > char.MaxValue) { throw new NotSupportedException("Serialized ATN data element out of range."); } int value = (data.Get(i_4) + 2) & unchecked((int)(0xFFFF)); data.Set(i_4, value); } return data; }
/// <summary> /// Split /// <c><ID> = <e:expr> ;</c> /// into 4 chunks for tokenizing by /// <see cref="Tokenize(string)"/> /// . /// </summary> internal virtual IList <Chunk> Split(string pattern) { int p = 0; int n = pattern.Length; IList <Chunk> chunks = new List <Chunk>(); StringBuilder buf = new StringBuilder(); // find all start and stop indexes first, then collect IList <int> starts = new List <int>(); IList <int> stops = new List <int>(); while (p < n) { if (p == pattern.IndexOf(escape + start, p)) { p += escape.Length + start.Length; } else { if (p == pattern.IndexOf(escape + stop, p)) { p += escape.Length + stop.Length; } else { if (p == pattern.IndexOf(start, p)) { starts.Add(p); p += start.Length; } else { if (p == pattern.IndexOf(stop, p)) { stops.Add(p); p += stop.Length; } else { p++; } } } } } // System.out.println(""); // System.out.println(starts); // System.out.println(stops); if (starts.Count > stops.Count) { throw new ArgumentException("unterminated tag in pattern: " + pattern); } if (starts.Count < stops.Count) { throw new ArgumentException("missing start tag in pattern: " + pattern); } int ntags = starts.Count; for (int i = 0; i < ntags; i++) { if (starts[i] >= stops[i]) { throw new ArgumentException("tag delimiters out of order in pattern: " + pattern); } } // collect into chunks now if (ntags == 0) { string text = Sharpen.Runtime.Substring(pattern, 0, n); chunks.Add(new TextChunk(text)); } if (ntags > 0 && starts[0] > 0) { // copy text up to first tag into chunks string text = Sharpen.Runtime.Substring(pattern, 0, starts[0]); chunks.Add(new TextChunk(text)); } for (int i_1 = 0; i_1 < ntags; i_1++) { // copy inside of <tag> string tag = Sharpen.Runtime.Substring(pattern, starts[i_1] + start.Length, stops[i_1]); string ruleOrToken = tag; string label = null; int colon = tag.IndexOf(':'); if (colon >= 0) { label = Sharpen.Runtime.Substring(tag, 0, colon); ruleOrToken = Sharpen.Runtime.Substring(tag, colon + 1, tag.Length); } chunks.Add(new TagChunk(label, ruleOrToken)); if (i_1 + 1 < ntags) { // copy from end of <tag> to start of next string text = Sharpen.Runtime.Substring(pattern, stops[i_1] + stop.Length, starts[i_1 + 1]); chunks.Add(new TextChunk(text)); } } if (ntags > 0) { int afterLastTag = stops[ntags - 1] + stop.Length; if (afterLastTag < n) { // copy text from end of last tag to end string text = Sharpen.Runtime.Substring(pattern, afterLastTag, n); chunks.Add(new TextChunk(text)); } } // strip out the escape sequences from text chunks but not tags for (int i_2 = 0; i_2 < chunks.Count; i_2++) { Chunk c = chunks[i_2]; if (c is TextChunk) { TextChunk tc = (TextChunk)c; string unescaped = tc.Text.Replace(escape, string.Empty); if (unescaped.Length < tc.Text.Length) { chunks.Set(i_2, new TextChunk(unescaped)); } } } return(chunks); }
/// <summary> /// Replace all elements of list, where predicate matches with newElement /// </summary> /// <param name="list">Source list</param> /// <param name="predicate">predicate that has to match</param> /// <param name="newElement">Element that should be set</param> /// <typeparam name="T">Type of list entries</typeparam> public static void UpdateWhere <T>(this List <T> list, Func <T, bool> predicate, T newElement) { list.Set(list.Select(element => predicate(element) ? newElement : element).ToList()); }
/// <summary> /// Gets/sets an item in this list at the given index. /// </summary> object System.Collections.IList.this[int index] { get { return(List.Get(index)); } set { List.Set(index, (T)value); } }
public ChartCommandsOptimizingExecutorProxy(int executionLoopDelay, IChartCommandsExecutor @base) : base(@base) { updateLoop(); return; ///////////////////////////// async void updateLoop() { // Simple logic at the moment while (true) { await Task.Delay(executionLoopDelay); if (_commandsQueue.Count > 0) { optimizeCommandsSequence(); // Because ui thread can change the list while awaiting var commands = _commandsQueue.ToArray(); try { await base.ExecuteCommandsAsync(commands); } catch (Exception ex) { Logger.LogErrorEverywhere("Ошибка обновления графика", ex); _commandsQueue.Clear(); } finally { foreach (var command in commands) { _commandsQueue.Remove(command); } } } void optimizeCommandsSequence() { for (int i = 0; i < _commandsQueue.Count; i++) { var command = _commandsQueue[i]; switch (command.Command) { case ChartCommand.INITIALIZE: _commandsQueue.Set(null, 0, i); break; case ChartCommand.CLEAR: case ChartCommand.RENDER_AREA: for (int k = 0; k < i; k++) { var c = _commandsQueue[k]; if (c != null && (c.Command == ChartCommand.RENDER_AREA || c.Command == ChartCommand.CLEAR)) { _commandsQueue[k] = null; } } break; } } _commandsQueue.RemoveAll(c => c == null); } } } }
/// <summary> /// Split /// <code><ID> = <e:expr> ;</code> /// into 4 chunks for tokenizing by /// <see cref="Tokenize(string)"/> /// . /// </summary> internal virtual IList<Chunk> Split(string pattern) { int p = 0; int n = pattern.Length; IList<Chunk> chunks = new List<Chunk>(); StringBuilder buf = new StringBuilder(); // find all start and stop indexes first, then collect IList<int> starts = new List<int>(); IList<int> stops = new List<int>(); while (p < n) { if (p == pattern.IndexOf(escape + start, p)) { p += escape.Length + start.Length; } else { if (p == pattern.IndexOf(escape + stop, p)) { p += escape.Length + stop.Length; } else { if (p == pattern.IndexOf(start, p)) { starts.Add(p); p += start.Length; } else { if (p == pattern.IndexOf(stop, p)) { stops.Add(p); p += stop.Length; } else { p++; } } } } } // System.out.println(""); // System.out.println(starts); // System.out.println(stops); if (starts.Count > stops.Count) { throw new ArgumentException("unterminated tag in pattern: " + pattern); } if (starts.Count < stops.Count) { throw new ArgumentException("missing start tag in pattern: " + pattern); } int ntags = starts.Count; for (int i = 0; i < ntags; i++) { if (starts[i] >= stops[i]) { throw new ArgumentException("tag delimiters out of order in pattern: " + pattern); } } // collect into chunks now if (ntags == 0) { string text = Sharpen.Runtime.Substring(pattern, 0, n); chunks.Add(new TextChunk(text)); } if (ntags > 0 && starts[0] > 0) { // copy text up to first tag into chunks string text = Sharpen.Runtime.Substring(pattern, 0, starts[0]); chunks.Add(new TextChunk(text)); } for (int i_1 = 0; i_1 < ntags; i_1++) { // copy inside of <tag> string tag = Sharpen.Runtime.Substring(pattern, starts[i_1] + start.Length, stops[i_1]); string ruleOrToken = tag; string label = null; int colon = tag.IndexOf(':'); if (colon >= 0) { label = Sharpen.Runtime.Substring(tag, 0, colon); ruleOrToken = Sharpen.Runtime.Substring(tag, colon + 1, tag.Length); } chunks.Add(new TagChunk(label, ruleOrToken)); if (i_1 + 1 < ntags) { // copy from end of <tag> to start of next string text = Sharpen.Runtime.Substring(pattern, stops[i_1] + stop.Length, starts[i_1 + 1]); chunks.Add(new TextChunk(text)); } } if (ntags > 0) { int afterLastTag = stops[ntags - 1] + stop.Length; if (afterLastTag < n) { // copy text from end of last tag to end string text = Sharpen.Runtime.Substring(pattern, afterLastTag, n); chunks.Add(new TextChunk(text)); } } // strip out the escape sequences from text chunks but not tags for (int i_2 = 0; i_2 < chunks.Count; i_2++) { Chunk c = chunks[i_2]; if (c is TextChunk) { TextChunk tc = (TextChunk)c; string unescaped = tc.Text.Replace(escape, string.Empty); if (unescaped.Length < tc.Text.Length) { chunks.Set(i_2, new TextChunk(unescaped)); } } } return chunks; }
public bool Calculate(int d) { prevBeginK = beginK; prevEndK = endK; beginK = ForceKIntoRange(middleK - d); endK = ForceKIntoRange(middleK + d); // TODO: handle i more efficiently // TODO: walk snake(k, getX(d, k)) only once per (d, k) // TODO: move end points out of the loop to avoid conditionals inside the loop // go backwards so that we can avoid temp vars for (int k = endK; k >= beginK; k -= 2) { int left = -1, right = -1; long leftSnake = -1L, rightSnake = -1L; // TODO: refactor into its own function int i; if (k > prevBeginK) { i = GetIndex(d - 1, k - 1); left = x[i]; int end = Snake(k - 1, left); leftSnake = left != end? NewSnake(k - 1, end) : _snake[i]; if (Meets(d, k - 1, end, leftSnake)) { return(true); } left = GetLeft(end); } if (k < prevEndK) { i = GetIndex(d - 1, k + 1); right = x[i]; int end = Snake(k + 1, right); rightSnake = right != end? NewSnake(k + 1, end) : _snake[i]; if (Meets(d, k + 1, end, rightSnake)) { return(true); } right = GetRight(end); } int newX; long newSnakeTmp; if (k >= prevEndK || (k > prevBeginK && IsBetter(left, right))) { newX = left; newSnakeTmp = leftSnake; } else { newX = right; newSnakeTmp = rightSnake; } if (Meets(d, k, newX, newSnakeTmp)) { return(true); } AdjustMinMaxK(k, newX); i = GetIndex(d, k); x.Set(i, newX); _snake.Set(i, newSnakeTmp); } return(false); }
public void TestSubListExceptions() { List <String> holder = new ArrayList <String>(16); for (int i = 0; i < 10; i++) { holder.Add(i.ToString()); } // parent change should cause sublist concurrentmodification fail List <String> sub = holder.SubList(0, holder.Size()); holder.Add(11.ToString()); try { sub.Size(); Assert.Fail("Should throw ConcurrentModificationException."); } catch (ConcurrentModificationException) {} try { sub.Add(12.ToString()); Assert.Fail("Should throw ConcurrentModificationException."); } catch (ConcurrentModificationException) {} try { sub.Add(0, 11.ToString()); Assert.Fail("Should throw ConcurrentModificationException."); } catch (ConcurrentModificationException) {} try { sub.Clear(); Assert.Fail("Should throw ConcurrentModificationException."); } catch (ConcurrentModificationException) {} try { sub.Contains(11.ToString()); Assert.Fail("Should throw ConcurrentModificationException."); } catch (ConcurrentModificationException) {} try { sub.Get(9); Assert.Fail("Should throw ConcurrentModificationException."); } catch (ConcurrentModificationException) {} try { sub.IndexOf(10.ToString()); Assert.Fail("Should throw ConcurrentModificationException."); } catch (ConcurrentModificationException) {} try { sub.IsEmpty(); Assert.Fail("Should throw ConcurrentModificationException."); } catch (ConcurrentModificationException) {} try { sub.Iterator(); Assert.Fail("Should throw ConcurrentModificationException."); } catch (ConcurrentModificationException) {} try { sub.LastIndexOf(10.ToString()); Assert.Fail("Should throw ConcurrentModificationException."); } catch (ConcurrentModificationException) {} try { sub.ListIterator(); Assert.Fail("Should throw ConcurrentModificationException."); } catch (ConcurrentModificationException) {} try { sub.ListIterator(0); Assert.Fail("Should throw ConcurrentModificationException."); } catch (ConcurrentModificationException) {} try { sub.Remove(0); Assert.Fail("Should throw ConcurrentModificationException."); } catch (ConcurrentModificationException) {} try { sub.Remove(9); Assert.Fail("Should throw ConcurrentModificationException."); } catch (ConcurrentModificationException) {} try { sub.Set(0, 0.ToString()); Assert.Fail("Should throw ConcurrentModificationException."); } catch (ConcurrentModificationException) {} try { sub.Size(); Assert.Fail("Should throw ConcurrentModificationException."); } catch (ConcurrentModificationException) {} try { sub.ToArray(); Assert.Fail("Should throw ConcurrentModificationException."); } catch (ConcurrentModificationException) {} try { sub.ToString(); Assert.Fail("Should throw ConcurrentModificationException."); } catch (ConcurrentModificationException) {} holder.Clear(); }
public void Initialize(AIMachineBehaviour b, IEnumerable <AIMachineTransition> t) { behaviour = b; transitions.Set(t); }
/// <exception cref="System.Exception"/> public override Document NextDoc() { IList <IList <CoreLabel> > allWords = new List <IList <CoreLabel> >(); IList <Tree> allTrees = new List <Tree>(); IList <IList <Mention> > allGoldMentions = new List <IList <Mention> >(); IList <IList <Mention> > allPredictedMentions; IList <ICoreMap> allSentences = new List <ICoreMap>(); Annotation docAnno = new Annotation(string.Empty); Pattern docPattern = Pattern.Compile("<DOC>(.*?)</DOC>", Pattern.Dotall + Pattern.CaseInsensitive); Pattern sentencePattern = Pattern.Compile("(<s>|<hl>|<dd>|<DATELINE>)(.*?)(</s>|</hl>|</dd>|</DATELINE>)", Pattern.Dotall + Pattern.CaseInsensitive); Matcher docMatcher = docPattern.Matcher(fileContents); if (!docMatcher.Find(currentOffset)) { return(null); } currentOffset = docMatcher.End(); string doc = docMatcher.Group(1); Matcher sentenceMatcher = sentencePattern.Matcher(doc); string ner = null; //Maintain current document ID. Pattern docIDPattern = Pattern.Compile("<DOCNO>(.*?)</DOCNO>", Pattern.Dotall + Pattern.CaseInsensitive); Matcher docIDMatcher = docIDPattern.Matcher(doc); if (docIDMatcher.Find()) { currentDocumentID = docIDMatcher.Group(1); } else { currentDocumentID = "documentAfter " + currentDocumentID; } while (sentenceMatcher.Find()) { string sentenceString = sentenceMatcher.Group(2); IList <CoreLabel> words = tokenizerFactory.GetTokenizer(new StringReader(sentenceString)).Tokenize(); // FIXING TOKENIZATION PROBLEMS for (int i = 0; i < words.Count; i++) { CoreLabel w = words[i]; if (i > 0 && w.Word().Equals("$")) { if (!words[i - 1].Word().EndsWith("PRP") && !words[i - 1].Word().EndsWith("WP")) { continue; } words[i - 1].Set(typeof(CoreAnnotations.TextAnnotation), words[i - 1].Word() + "$"); words.Remove(i); i--; } else { if (w.Word().Equals("\\/")) { if (words[i - 1].Word().Equals("</COREF>")) { continue; } w.Set(typeof(CoreAnnotations.TextAnnotation), words[i - 1].Word() + "\\/" + words[i + 1].Word()); words.Remove(i + 1); words.Remove(i - 1); } } } // END FIXING TOKENIZATION PROBLEMS IList <CoreLabel> sentence = new List <CoreLabel>(); // MUC accepts embedded coref mentions, so we need to keep a stack for the mentions currently open Stack <Mention> stack = new Stack <Mention>(); IList <Mention> mentions = new List <Mention>(); allWords.Add(sentence); allGoldMentions.Add(mentions); foreach (CoreLabel word in words) { string w = word.Get(typeof(CoreAnnotations.TextAnnotation)); // found regular token: WORD/POS if (!w.StartsWith("<") && w.Contains("\\/") && w.LastIndexOf("\\/") != w.Length - 2) { int i_1 = w.LastIndexOf("\\/"); string w1 = Sharpen.Runtime.Substring(w, 0, i_1); // we do NOT set POS info here. We take the POS tags from the parser! word.Set(typeof(CoreAnnotations.TextAnnotation), w1); word.Remove(typeof(CoreAnnotations.OriginalTextAnnotation)); sentence.Add(word); } else { // found the start SGML tag for a NE, e.g., "<ORGANIZATION>" if (w.StartsWith("<") && !w.StartsWith("<COREF") && !w.StartsWith("</")) { Pattern nerPattern = Pattern.Compile("<(.*?)>"); Matcher m = nerPattern.Matcher(w); m.Find(); ner = m.Group(1); } else { // found the end SGML tag for a NE, e.g., "</ORGANIZATION>" if (w.StartsWith("</") && !w.StartsWith("</COREF")) { Pattern nerPattern = Pattern.Compile("</(.*?)>"); Matcher m = nerPattern.Matcher(w); m.Find(); string ner1 = m.Group(1); if (ner != null && !ner.Equals(ner1)) { throw new Exception("Unmatched NE labels in MUC file: " + ner + " v. " + ner1); } ner = null; } else { // found the start SGML tag for a coref mention if (w.StartsWith("<COREF")) { Mention mention = new Mention(); // position of this mention in the sentence mention.startIndex = sentence.Count; // extract GOLD info about this coref chain. needed for eval Pattern idPattern = Pattern.Compile("ID=\"(.*?)\""); Pattern refPattern = Pattern.Compile("REF=\"(.*?)\""); Matcher m = idPattern.Matcher(w); m.Find(); mention.mentionID = System.Convert.ToInt32(m.Group(1)); m = refPattern.Matcher(w); if (m.Find()) { mention.originalRef = System.Convert.ToInt32(m.Group(1)); } // open mention. keep track of all open mentions using the stack stack.Push(mention); } else { // found the end SGML tag for a coref mention if (w.Equals("</COREF>")) { Mention mention = stack.Pop(); mention.endIndex = sentence.Count; // this is a closed mention. add it to the final list of mentions // System.err.printf("Found MENTION: ID=%d, REF=%d\n", mention.mentionID, mention.originalRef); mentions.Add(mention); } else { word.Remove(typeof(CoreAnnotations.OriginalTextAnnotation)); sentence.Add(word); } } } } } } StringBuilder textContent = new StringBuilder(); for (int i_2 = 0; i_2 < sentence.Count; i_2++) { CoreLabel w = sentence[i_2]; w.Set(typeof(CoreAnnotations.IndexAnnotation), i_2 + 1); w.Set(typeof(CoreAnnotations.UtteranceAnnotation), 0); if (i_2 > 0) { textContent.Append(" "); } textContent.Append(w.GetString <CoreAnnotations.TextAnnotation>()); } ICoreMap sentCoreMap = new Annotation(textContent.ToString()); allSentences.Add(sentCoreMap); sentCoreMap.Set(typeof(CoreAnnotations.TokensAnnotation), sentence); } // assign goldCorefClusterID IDictionary <int, Mention> idMention = Generics.NewHashMap(); // temporary use foreach (IList <Mention> goldMentions in allGoldMentions) { foreach (Mention m in goldMentions) { idMention[m.mentionID] = m; } } foreach (IList <Mention> goldMentions_1 in allGoldMentions) { foreach (Mention m in goldMentions_1) { if (m.goldCorefClusterID == -1) { if (m.originalRef == -1) { m.goldCorefClusterID = m.mentionID; } else { int @ref = m.originalRef; while (true) { Mention m2 = idMention[@ref]; if (m2.goldCorefClusterID != -1) { m.goldCorefClusterID = m2.goldCorefClusterID; break; } else { if (m2.originalRef == -1) { m2.goldCorefClusterID = m2.mentionID; m.goldCorefClusterID = m2.goldCorefClusterID; break; } else { @ref = m2.originalRef; } } } } } } } docAnno.Set(typeof(CoreAnnotations.SentencesAnnotation), allSentences); stanfordProcessor.Annotate(docAnno); if (allSentences.Count != allWords.Count) { throw new InvalidOperationException("allSentences != allWords"); } for (int i_3 = 0; i_3 < allSentences.Count; i_3++) { IList <CoreLabel> annotatedSent = allSentences[i_3].Get(typeof(CoreAnnotations.TokensAnnotation)); IList <CoreLabel> unannotatedSent = allWords[i_3]; IList <Mention> mentionInSent = allGoldMentions[i_3]; foreach (Mention m in mentionInSent) { m.dependency = allSentences[i_3].Get(typeof(SemanticGraphCoreAnnotations.EnhancedDependenciesAnnotation)); } if (annotatedSent.Count != unannotatedSent.Count) { throw new InvalidOperationException("annotatedSent != unannotatedSent"); } for (int j = 0; j < sz; j++) { CoreLabel annotatedWord = annotatedSent[j]; CoreLabel unannotatedWord = unannotatedSent[j]; if (!annotatedWord.Get(typeof(CoreAnnotations.TextAnnotation)).Equals(unannotatedWord.Get(typeof(CoreAnnotations.TextAnnotation)))) { throw new InvalidOperationException("annotatedWord != unannotatedWord"); } } allWords.Set(i_3, annotatedSent); allTrees.Add(allSentences[i_3].Get(typeof(TreeCoreAnnotations.TreeAnnotation))); } // extract predicted mentions allPredictedMentions = mentionFinder.ExtractPredictedMentions(docAnno, maxID, dictionaries); // add the relevant fields to mentions and order them for coref return(Arrange(docAnno, allWords, allTrees, allPredictedMentions, allGoldMentions, true)); }
/// <summary> /// リストからすべての要素を削除して、指定したコレクションの要素を List に追加します /// </summary> public void Set(params T[] collection) { m_list.Set(collection); mChanged.Call(); }
//TODO remove debug code in Main public static int Main(string[] args) { // -bt Event -pp $(ProjectPath) -cn $(ConfigurationName) -pn $(PlatformName) -tp $(TargetPath) if (args.Length > 0 && args[0].StartsWith("debug")) { var projectRoot = Path.GetFullPath(Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location) + @"\..\..\.."); switch (args[0]) { case "debug1": args = new [] { "-bt", "AfterBuild", "-pp", $@"{projectRoot}\KsWare.MSBuildTargets.DemoApp\KsWare.MSBuildTargets.DemoApp.csproj", "-cn", "Release", "-pn", "Any CPU", "-tp", $@"{projectRoot}\KsWare.MSBuildTargets.DemoApp\bin\Debug\KsWare.MSBuildTargets.DemoApp.exe" }; break; case "debug2": args = new[] { "-bt", "AfterBuild", "-pp", $@"{projectRoot}\KsWare.MSBuildTargets\KsWare.MSBuildTargets.csproj", "-cn", "Debug", "-pn", "Any CPU", "-tp", $@"{projectRoot}\KsWare.MSBuildTargets\bin\debug\KsWare.MSBuildTargets.exe" }; break; case "debug3": args = new[] { "-bt", "BeforeBuild", "-pp", $@"{projectRoot}\KsWare.MSBuildTargets.DemoApp\KsWare.MSBuildTargets.DemoApp.csproj", "-cn", "Debug", "-pn", "Any CPU", "-tp", $@"{projectRoot}\KsWare.MSBuildTargets.DemoApp\bin\Debug\KsWare.MSBuildTargets.DemoApp.exe" }; break; } } var properties = new List <ConfigurationFile.Property>(); for (int i = 0; i < args.Length; i++) { var param = args[i]; var paramL = args[i].ToLowerInvariant(); switch (paramL) { case "-test": TestMode = true; break; case "-bt": properties.Set(N.Target, args[++i]); break; case "-pp": properties.Set(N.IDE.ProjectPath, args[++i]); break; case "-cn": properties.Set(N.IDE.ConfigurationName, args[++i]); break; case "-pn": properties.Set(N.IDE.PlatformName, args[++i]); break; case "-tp": properties.Set(N.IDE.TargetPath, args[++i]); break; // case "-version": Configuration.Version = args[++i]; break; // case "-suffix": Configuration.Suffix = args[++i]; break; // case "-outputdirectory": Configuration.OutputDirectory = args[++i]; break; // default: Configuration.Options.Add(param);break; } } try { //TODO message .. Expected properties.GetValue(N.IDE.ProjectPath) not to be <null> or empty because Project path not specified!, but found <null>. properties.GetValue(N.Target)?.ToLowerInvariant().Should().BeOneOf("Target is invalid!", "beforebuild", "afterbuild"); properties.GetValue(N.IDE.ProjectPath).Should().NotBeNullOrEmpty("Project path not specified!"); File.Exists(properties.GetValue(N.IDE.ProjectPath)).Should().BeTrue("Project path not found!"); properties.GetValue(N.IDE.TargetPath).Should().NotBeNullOrEmpty("Target path not specified!"); properties.GetValue(N.IDE.ConfigurationName).Should().NotBeNullOrEmpty("Configuration name not specified!"); properties.GetValue(N.IDE.PlatformName).Should().NotBeNullOrEmpty("Platform name not specified!"); } catch (Exception ex) { Console.WriteLine("Usage: KsWare.MSBuildTargets.exe -bt BeforeBuild -pp $(ProjectPath) -cn $(ConfigurationName) -pn $(PlatformName) -tp $(TargetPath)"); Console.Error.WriteLine(ex.Message); if (Assembly.GetEntryAssembly() == Assembly.GetExecutingAssembly()) { Environment.Exit(1); } return(1); } var directory = Path.GetDirectoryName(properties.GetValue(N.IDE.ProjectPath)); ConfigurationFile.LoadRecursive(directory, properties); var commands = Configuration.GetCommands(properties.GetValue(N.Target), properties.GetValue(N.IDE.ConfigurationName), true); foreach (var command in commands) { if (command.Flags.Contains("ignore", StringComparer.OrdinalIgnoreCase)) { continue; } if (string.IsNullOrWhiteSpace(command.CommandLine)) { continue; } var result = CallCommand(command.CommandLine); if (result != 0) { return(result); } } return(0); }
/// <summary> /// 设置值 /// </summary> /// <typeparam name="T">泛型</typeparam> /// <param name="list">列表</param> /// <param name="key">对应键</param> /// <param name="m">值</param> /// <returns>返回所有对应键值</returns>s public bool Set <T>(List <T> list, string key, T m) { return(list.Set(key, m)); }
public void Set(Type type, string?name, Type policyInterface, object policy) { List.Set(type, name, policyInterface, policy); }
void VerifyNearPositions() { jumpPossibilities = new List <Jump>(); //isStucked = false; bool nextX = false; var _x = (int)position.x + (direction ? 0 : 1); var _y = (int)position.y; for (int x = _x; direction ? (x <= _x + XValidationDistance) : (x >= _x - XValidationDistance); x = direction ? (x + 1) : (x - 1)) { for (int y = data[x].Count - 1; y >= 0; y--) { //Debug.DrawLine(new Vector2(x, Mathf.Abs(y - 10)), new Vector2(x + 0.5f, Mathf.Abs(y - 10) + 0.5f), Color.red, 0.3f); var realY = Mathf.Abs(y - 10); var trap = data[x][y].Trap;//.IndexOf('-') != -1; if (trap.HasValue && !forceToJump) { if (trap == 6) { if (y == Mathf.Abs(_y - 11) && x == _x + 1) // _x + 1 - O 1 representa quanto blocos antes o player pulará o osbtaculo (Obs.: pode ser de 1 ~ 3) { jumpPossibilities.Set(new Vector3(x, realY), new Vector3(0, 250, 0), 2.5f, false); forceToJump = true; } } var shootable = GameObject.FindGameObjectsWithTag("FireBall").ToList(); shootable.AddRange(GameObject.FindGameObjectsWithTag("flecha")); //shootable.ForEach(a => print("fire: " + a.transform.position + " | " + transform.position + " | " + a.transform.localScale ) ); shootable = shootable.Where(c => c.transform.position.y >= _y - .5f && c.transform.position.y <= _y + .5f && ((c.transform.position.x >= transform.position.x - 1.5 && c.transform.localScale.x > 0) || (c.transform.position.x <= transform.position.x + 1.5 && c.transform.localScale.x < 0))).ToList(); if (shootable.Count() > 0) { jumpPossibilities.Set(new Vector3(x, realY), new Vector3(0, 250, 0), 1.5f, false); forceToJump = true; } } if (lastPossibility.PlayerPosition.HasValue && backing && lastPossibility.PlayerPosition.Value.x > _x) { flip = true; flipX = x; backing = false; } //Pulo para frente. //if (!GroundInOrBelow(Mathf.Abs(_y - 10), NextXElement(_x, 1)) && GetValue(NextXElement(_x, 1), Mathf.Abs(_y - 10) - 1) == 0) //verificar se o valor esta a frente e abaixo do personagem e não bloco acima dele if (GetValue(x, y) == 0 && x == NextXElement(_x, 1) && y == Mathf.Abs(_y - 11) && !GroundInOrBelow(y, x, y - 2, false) && GetValue(PrevXElement(x, 1), y) != 0) //GetValue(NextXElement(_x, 1), Mathf.Abs(_y - 10) - 1) == 0 { //Debug.DrawLine(new Vector2(x, Mathf.Abs(y - 10)), new Vector2(x + 0.5f, Mathf.Abs(y - 10) + 0.5f), Color.red, 0.3f); var XdistanceToJump = 0; for (var __x = 1; __x <= MaxXToJump + 1; __x++) { if (!GroundInOrBelow(Mathf.Abs(_y - 11), NextXElement(_x, __x), Mathf.Abs(_y - 11 - (_y > 2 ? 2 : 0))) && (GetValue(NextXElement(_x, __x), Mathf.Abs(_y - 10) - 1) == 0)) { XdistanceToJump++; } else { break; } } //altura mais longe do que o personagem consegue pular if (XdistanceToJump != 0) { print(XdistanceToJump); if (XdistanceToJump > MaxXToJump) { if (!CanGoStraightInBelow(_x, Mathf.Abs(_y - 10)) && !GroundInOrBelow(Mathf.Abs(_y - 10), x)) { flip = true; flipX = x; } } //caso ele consiga pular verificar se há um bloco acima else if (GetValue(NextXElement(_x, XdistanceToJump + 1), Mathf.Abs(_y - 10) - 1) == 0) { //if (!GroundInOrBelow(Mathf.Abs(_y - 10), NextXElement(_x, 1)) && GetValue(NextXElement(_x, 1), Mathf.Abs(_y - 10) - 1) == 0) //{ // lastPossibility.PlayerPosition = new Vector3(_x, Mathf.Abs(_y - 10), 0); // lastPossibility.GroundPosition = new Vector3(x, y, 0); // lastPossibility.Jumped = false; // if (Random.Range(0, 100) <= JumpProblability) // { // lastPossibility.Jumped = true; // jumpPossibilities.Set(new Vector3(x, Mathf.Abs(y - 10)), new Vector3(0, 250, 0), XdistanceToJump * 1.5f); // } //} //else jumpPossibilities.Set(new Vector3(x, realY), new Vector3(0, 250, 0), XdistanceToJump * 1.5f); } //caso não consiga pular, porem é pouco espaço else if (!CanGoStraightInBelow(_x, Mathf.Abs(_y - 10)) && !GroundInOrBelow(Mathf.Abs(_y - 10), x)) { flip = true; } } } if (GetValue(x, y) != 0 && GetValue(x, y) != 2 && (GetValue(PrevXElement(x, 1), y) == 0 || GetValue(PrevXElement(x, 1), y) == 2)) { //bloqueio a frente if (realY == _y) { var canPassBelow = CanPassBelow(_x, y, x); if (BlockedGrounds(null, x, y) > MaxYToJump && !canPassBelow) { flip = true; flipX = x; } else if (lastPossibility.PlayerPosition.HasValue && lastPossibility.PlayerPosition.Value.x == _x) { //lastPossibility.Jumped = true; int height = (int)lastPossibility.GroundPosition.Value.y - _y; jumpPossibilities.Set(new Vector3(x, Mathf.Abs(y - 10)), new Vector3(0, 350 + (50 * (height - 1)), 0), XDistanceToJump, true); //lastPossibility = new LastPossibility(); } else if (!canPassBelow) { forceToJump = true; } } //Pulo para blocos acima. //tem bloco acima e a frente e até a distancia determinada if (x == _x + XDistanceToJump && realY > _y && (realY - _y <= MaxYToJump)) { //tem bloco acima do que será pulado if (GetValue(x, y - 1) != 0 || data[x][y - 1].Trap.HasValue) { continue; } //bloco acima bloquando pulo for (var i = 0; i < XDistanceToJump; i++) { if (GroundInOrBelow(Mathf.Abs(_y - 8), _x + i, (y - 1 < 0 ? 0 : y - 1), false)) { nextX = true; break; } } if (nextX) { nextX = false; break; } int height = Mathf.Abs(y - 10) - _y; jumpPossibilities.Set(new Vector3(x, Mathf.Abs(y - 10)), new Vector3(0, 350 + (50 * (height - 1)), 0), XDistanceToJump, true); //if (forceToJump.HasValue) //{ // lastPossibility = new LastPossibility(); // if (forceToJump.Value) // { // int height = Mathf.Abs(y - 10) - _y; // jumpPossibilities.Set(new Vector3(x, Mathf.Abs(y - 10)), new Vector3(0, 300 + (50 * (height - 1)), 0), XDistanceToJump, true); // } // forceToJump = null; //} //else //{ // lastPossibility.PlayerPosition = new Vector3(_x, Mathf.Abs(_y - 10), 0); // lastPossibility.GroundPosition = new Vector3(x, y, 0); // lastPossibility.Jumped = false; // if (Random.Range(0, 100) <= JumpProblability || flip) // { // lastPossibility.Jumped = true; // int height = Mathf.Abs(y - 10) - _y; // jumpPossibilities.Set(new Vector3(x, Mathf.Abs(y - 10)), new Vector3(0, 300 + (50 * (height - 1)), 0), XDistanceToJump, true); // } //} } } } } var canGoStraight = CanGoStraight(_x, Mathf.Abs(_y - 10)); if (flip) { jumpPossibilities = jumpPossibilities.Where(x => x.Up).ToList(); } if (canGoStraight == PossibleWaysToGoStraight.Straight && !flip) { jumpPossibilities = jumpPossibilities.Where(x => !walked.Contains(new Vector3(x.Position.x, x.Position.y))).ToList(); backing = false; } if (jumpPossibilities.Count > 0) { if (canGoStraight != PossibleWaysToGoStraight.Cant && !flip && !forceToJump) { lastPossibility.PlayerPosition = new Vector3(_x, _y, 0); lastPossibility.GroundPosition = new Vector3(jumpPossibilities.First().Position.x, jumpPossibilities.First().Position.y, 0); lastPossibility.Jumped = false; lastPossibility.Direction = direction; } if ((Random.Range(0, 100) <= JumpProblability || flip || forceToJump || canGoStraight == PossibleWaysToGoStraight.Cant) && Rigidbody.velocity.y < .2f) { lastPossibility.Jumped = true; var i = Random.Range(0, jumpPossibilities.Count); Rigidbody.AddForce(jumpPossibilities[i].Force); jumpSpeed = jumpPossibilities[i].JumpSpeed; animator.SetBool("moving", false); animator.SetBool("jump", true); } else if (canGoStraight == PossibleWaysToGoStraight.StraightJump && Rigidbody.velocity.y < .2f) { jumpPossibilities = jumpPossibilities.Where(x => !x.Up).ToList(); var i = Random.Range(0, jumpPossibilities.Count); if (jumpPossibilities.Count > 0) { Rigidbody.AddForce(jumpPossibilities[i].Force); jumpSpeed = jumpPossibilities[i].JumpSpeed; animator.SetBool("moving", false); animator.SetBool("jump", true); } } forceToJump = false; backing = false; } else if (flip) { if (backing) { _backing = true; } //StartCoroutine(BackToLastPossibility()); //BackToLastPossibility(); Flip(); forceToJump = lastPossibility.Jumped ? false : true; } //isStucked = false; flip = false; }
/// <summary> /// Serialize state descriptors, edge descriptors, and decision→state map /// into list of ints: /// grammar-type, (ANTLRParser.LEXER, ...) /// max token type, /// num states, /// state-0-type ruleIndex, state-1-type ruleIndex, ... /// </summary> /// <remarks> /// Serialize state descriptors, edge descriptors, and decision→state map /// into list of ints: /// grammar-type, (ANTLRParser.LEXER, ...) /// max token type, /// num states, /// state-0-type ruleIndex, state-1-type ruleIndex, ... state-i-type ruleIndex optional-arg ... /// num rules, /// rule-1-start-state rule-1-args, rule-2-start-state rule-2-args, ... /// (args are token type,actionIndex in lexer else 0,0) /// num modes, /// mode-0-start-state, mode-1-start-state, ... (parser has 0 modes) /// num sets /// set-0-interval-count intervals, set-1-interval-count intervals, ... /// num total edges, /// src, trg, edge-type, edge arg1, optional edge arg2 (present always), ... /// num decisions, /// decision-0-start-state, decision-1-start-state, ... /// Convenient to pack into unsigned shorts to make as Java string. /// </remarks> public virtual List <int> Serialize() { List <int> data = new List <int>(); data.Add(ATNDeserializer.SerializedVersion); SerializeUUID(data, ATNDeserializer.SerializedUuid); // convert grammar type to ATN const to avoid dependence on ANTLRParser data.Add((int)(atn.grammarType)); data.Add(atn.maxTokenType); int nedges = 0; IDictionary <IntervalSet, int> setIndices = new Dictionary <IntervalSet, int>(); IList <IntervalSet> sets = new List <IntervalSet>(); // dump states, count edges and collect sets while doing so List <int> nonGreedyStates = new List <int>(); List <int> sllStates = new List <int>(); List <int> precedenceStates = new List <int>(); data.Add(atn.states.Count); foreach (ATNState s in atn.states) { if (s == null) { // might be optimized away data.Add((int)(StateType.InvalidType)); continue; } StateType stateType = s.StateType; if (s is DecisionState) { DecisionState decisionState = (DecisionState)s; if (decisionState.nonGreedy) { nonGreedyStates.Add(s.stateNumber); } if (decisionState.sll) { sllStates.Add(s.stateNumber); } } if (s is RuleStartState && ((RuleStartState)s).isPrecedenceRule) { precedenceStates.Add(s.stateNumber); } data.Add((int)(stateType)); if (s.ruleIndex == -1) { data.Add(char.MaxValue); } else { data.Add(s.ruleIndex); } if (s.StateType == StateType.LoopEnd) { data.Add(((LoopEndState)s).loopBackState.stateNumber); } else { if (s is BlockStartState) { data.Add(((BlockStartState)s).endState.stateNumber); } } if (s.StateType != StateType.RuleStop) { // the deserializer can trivially derive these edges, so there's no need to serialize them nedges += s.NumberOfTransitions; } for (int i = 0; i < s.NumberOfTransitions; i++) { Transition t = s.Transition(i); TransitionType edgeType = Transition.serializationTypes.Get(t.GetType()); if (edgeType == TransitionType.Set || edgeType == TransitionType.NotSet) { SetTransition st = (SetTransition)t; if (!setIndices.ContainsKey(st.set)) { sets.AddItem(st.set); setIndices.Put(st.set, sets.Count - 1); } } } } // non-greedy states data.Add(nonGreedyStates.Size()); for (int i_1 = 0; i_1 < nonGreedyStates.Size(); i_1++) { data.Add(nonGreedyStates.Get(i_1)); } // SLL decisions data.Add(sllStates.Size()); for (int i_2 = 0; i_2 < sllStates.Size(); i_2++) { data.Add(sllStates.Get(i_2)); } // precedence states data.Add(precedenceStates.Size()); for (int i_3 = 0; i_3 < precedenceStates.Size(); i_3++) { data.Add(precedenceStates.Get(i_3)); } int nrules = atn.ruleToStartState.Length; data.Add(nrules); for (int r = 0; r < nrules; r++) { ATNState ruleStartState = atn.ruleToStartState[r]; data.Add(ruleStartState.stateNumber); bool leftFactored = ruleNames[ruleStartState.ruleIndex].IndexOf(ATNSimulator.RuleVariantDelimiter) >= 0; data.Add(leftFactored ? 1 : 0); if (atn.grammarType == ATNType.Lexer) { if (atn.ruleToTokenType[r] == TokenConstants.Eof) { data.Add(char.MaxValue); } else { data.Add(atn.ruleToTokenType[r]); } } } int nmodes = atn.modeToStartState.Count; data.Add(nmodes); if (nmodes > 0) { foreach (ATNState modeStartState in atn.modeToStartState) { data.Add(modeStartState.stateNumber); } } int nsets = sets.Count; data.Add(nsets); foreach (IntervalSet set in sets) { bool containsEof = set.Contains(TokenConstants.Eof); if (containsEof && set.GetIntervals()[0].b == TokenConstants.Eof) { data.Add(set.GetIntervals().Count - 1); } else { data.Add(set.GetIntervals().Count); } data.Add(containsEof ? 1 : 0); foreach (Interval I in set.GetIntervals()) { if (I.a == TokenConstants.Eof) { if (I.b == TokenConstants.Eof) { continue; } else { data.Add(0); } } else { data.Add(I.a); } data.Add(I.b); } } data.Add(nedges); foreach (ATNState s_1 in atn.states) { if (s_1 == null) { // might be optimized away continue; } if (s_1.StateType == StateType.RuleStop) { continue; } for (int i = 0; i_3 < s_1.NumberOfTransitions; i_3++) { Transition t = s_1.Transition(i_3); if (atn.states[t.target.stateNumber] == null) { throw new InvalidOperationException("Cannot serialize a transition to a removed state."); } int src = s_1.stateNumber; int trg = t.target.stateNumber; TransitionType edgeType = Transition.serializationTypes.Get(t.GetType()); int arg1 = 0; int arg2 = 0; int arg3 = 0; switch (edgeType) { case TransitionType.Rule: { trg = ((RuleTransition)t).followState.stateNumber; arg1 = ((RuleTransition)t).target.stateNumber; arg2 = ((RuleTransition)t).ruleIndex; arg3 = ((RuleTransition)t).precedence; break; } case TransitionType.Precedence: { PrecedencePredicateTransition ppt = (PrecedencePredicateTransition)t; arg1 = ppt.precedence; break; } case TransitionType.Predicate: { PredicateTransition pt = (PredicateTransition)t; arg1 = pt.ruleIndex; arg2 = pt.predIndex; arg3 = pt.isCtxDependent ? 1 : 0; break; } case TransitionType.Range: { arg1 = ((RangeTransition)t).from; arg2 = ((RangeTransition)t).to; if (arg1 == TokenConstants.Eof) { arg1 = 0; arg3 = 1; } break; } case TransitionType.Atom: { arg1 = ((AtomTransition)t).label; if (arg1 == TokenConstants.Eof) { arg1 = 0; arg3 = 1; } break; } case TransitionType.Action: { ActionTransition at = (ActionTransition)t; arg1 = at.ruleIndex; arg2 = at.actionIndex; if (arg2 == -1) { arg2 = unchecked ((int)(0xFFFF)); } arg3 = at.isCtxDependent ? 1 : 0; break; } case TransitionType.Set: { arg1 = setIndices.Get(((SetTransition)t).set); break; } case TransitionType.NotSet: { arg1 = setIndices.Get(((SetTransition)t).set); break; } case TransitionType.Wildcard: { break; } } data.Add(src); data.Add(trg); data.Add((int)(edgeType)); data.Add(arg1); data.Add(arg2); data.Add(arg3); } } int ndecisions = atn.decisionToState.Count; data.Add(ndecisions); foreach (DecisionState decStartState in atn.decisionToState) { data.Add(decStartState.stateNumber); } // // LEXER ACTIONS // if (atn.grammarType == ATNType.Lexer) { data.Add(atn.lexerActions.Length); foreach (ILexerAction action in atn.lexerActions) { data.Add((int)(action.GetActionType())); switch (action.GetActionType()) { case LexerActionType.Channel: { int channel = ((LexerChannelAction)action).GetChannel(); data.Add(channel != -1 ? channel : unchecked ((int)(0xFFFF))); data.Add(0); break; } case LexerActionType.Custom: { int ruleIndex = ((LexerCustomAction)action).GetRuleIndex(); int actionIndex = ((LexerCustomAction)action).GetActionIndex(); data.Add(ruleIndex != -1 ? ruleIndex : unchecked ((int)(0xFFFF))); data.Add(actionIndex != -1 ? actionIndex : unchecked ((int)(0xFFFF))); break; } case LexerActionType.Mode: { int mode = ((LexerModeAction)action).GetMode(); data.Add(mode != -1 ? mode : unchecked ((int)(0xFFFF))); data.Add(0); break; } case LexerActionType.More: { data.Add(0); data.Add(0); break; } case LexerActionType.PopMode: { data.Add(0); data.Add(0); break; } case LexerActionType.PushMode: { mode = ((LexerPushModeAction)action).GetMode(); data.Add(mode != -1 ? mode : unchecked ((int)(0xFFFF))); data.Add(0); break; } case LexerActionType.Skip: { data.Add(0); data.Add(0); break; } case LexerActionType.Type: { int type = ((LexerTypeAction)action).GetType(); data.Add(type != -1 ? type : unchecked ((int)(0xFFFF))); data.Add(0); break; } default: { string message = string.Format(CultureInfo.CurrentCulture, "The specified lexer action type {0} is not valid.", action.GetActionType()); throw new ArgumentException(message); } } } } // don't adjust the first value since that's the version number for (int i_4 = 1; i_4 < data.Size(); i_4++) { if (data.Get(i_4) < char.MinValue || data.Get(i_4) > char.MaxValue) { throw new NotSupportedException("Serialized ATN data element out of range."); } int value = (data.Get(i_4) + 2) & unchecked ((int)(0xFFFF)); data.Set(i_4, value); } return(data); }
/// <summary>Returns the lexicon-based segmentation following heuristic h.</summary> /// <remarks> /// Returns the lexicon-based segmentation following heuristic h. /// Note that buildSegmentationLattice must be run first. /// Two heuristics are currently available -- MINWORDS and MAXWORDS -- /// to respectively minimize and maximize the number of segment /// (where each segment is a lexicon word, if possible). /// </remarks> /// <param name="h">Heuristic to use for segmentation.</param> /// <returns>Segmented sentence.</returns> /// <exception cref="System.NotSupportedException"/> /// <seealso cref="BuildSegmentationLattice(string)"/> public virtual List <Word> SegmentWords(MaxMatchSegmenter.MatchHeuristic h) { if (lattice == null || len < 0) { throw new NotSupportedException("segmentWords must be run first"); } IList <Word> segmentedWords = new List <Word>(); // Init dynamic programming: double[] costs = new double[len + 1]; IList <DFSATransition <Word, int> > bptrs = new List <DFSATransition <Word, int> >(); for (int i = 0; i < len + 1; ++i) { bptrs.Add(null); } costs[0] = 0.0; for (int i_1 = 1; i_1 <= len; ++i_1) { costs[i_1] = double.MaxValue; } // DP: for (int start = 0; start < len; ++start) { DFSAState <Word, int> fromState = states[start]; ICollection <DFSATransition <Word, int> > trs = fromState.Transitions(); foreach (DFSATransition <Word, int> tr in trs) { DFSAState <Word, int> toState = tr.GetTarget(); double lcost = tr.Score(); int end = toState.StateID(); //logger.debug("start="+start+" end="+end+" word="+tr.getInput()); if (h == MaxMatchSegmenter.MatchHeuristic.Minwords) { // Minimize number of words: if (costs[start] + 1 < costs[end]) { costs[end] = costs[start] + lcost; bptrs.Set(end, tr); } } else { //logger.debug("start="+start+" end="+end+" word="+tr.getInput()); if (h == MaxMatchSegmenter.MatchHeuristic.Maxwords) { // Maximze number of words: if (costs[start] + 1 < costs[end]) { costs[end] = costs[start] - lcost; bptrs.Set(end, tr); } } else { throw new NotSupportedException("unimplemented heuristic"); } } } } // Extract min-cost path: int i_2 = len; while (i_2 > 0) { DFSATransition <Word, int> tr = bptrs[i_2]; DFSAState <Word, int> fromState = tr.GetSource(); Word word = tr.GetInput(); if (!word.Word().Equals(" ")) { segmentedWords.Add(0, word); } i_2 = fromState.StateID(); } // Print lattice density ([1,+inf[) : if equal to 1, it means // there is only one segmentation using words of the lexicon. return(new List <Word>(segmentedWords)); }
public bool Run() { while (PC < program.Count) { if (debug) { this.ShowProgram(); this.ShowHeap(); this.ShowStack(); } switch (program[PC]) { case (int)IS.NOP: ++PC; break; case (int)IS.IMOV: ++PC; AX = program[PC]; ++PC; break; case (int)IS.PUSH: datStack.Set(SP++, AX); ++PC; break; case (int)IS.IPUSH: ++PC; datStack.Set(SP++, program[PC]); ++PC; break; case (int)IS.TOP: AX = datStack[SP - 1]; ++PC; break; case (int)IS.POP: --SP; ++PC; break; case (int)IS.LFS: AX = datStack[SP - 1 + datStack[SP - 1]]; ++PC; break; case (int)IS.STS: datStack.Set(SP - 1 + datStack[SP - 1], AX); ++PC; break; case (int)IS.LFH: AX = datHeap.Get(datStack[SP - 1]); ++PC; break; case (int)IS.STH: datHeap.Set(datStack[SP - 1], AX); ++PC; break; case (int)IS.JMP: ++PC; PC = program[PC]; break; case (int)IS.JZ: ++PC; PC = AX == 0 ? program[PC] : PC + 1; break; case (int)IS.JNZ: ++PC; PC = AX != 0 ? program[PC] : PC + 1; break; case (int)IS.CALL: ++PC; datStack.Set(SP++, PC + 1); PC = program[PC]; break; case (int)IS.CALLS: ++PC; int addr = datStack[--SP]; datStack.Set(SP++, PC); PC = addr; break; case (int)IS.RET: PC = datStack[--SP]; break; case (int)IS.ADD: datStack[SP - 1] = datStack[SP - 1] + AX; ++PC; break; case (int)IS.SUB: datStack[SP - 1] = datStack[SP - 1] - AX; ++PC; break; case (int)IS.NEG: datStack.Set(SP++, -AX); ++PC; break; case (int)IS.MUL: datStack[SP - 1] = datStack[SP - 1] * AX; ++PC; break; case (int)IS.DIV: datStack[SP - 1] = datStack[SP - 1] / AX; ++PC; break; case (int)IS.MOD: datStack[SP - 1] = datStack[SP - 1] % AX; ++PC; break; case (int)IS.POW: datStack[SP - 1] = LuaVMHelper.Pow(datStack[SP - 1], AX); ++PC; break; case (int)IS.AND: datStack[SP - 1] = (datStack[SP - 1] != 0 && AX != 0) ? 1 : 0; ++PC; break; case (int)IS.OR: datStack[SP - 1] = (datStack[SP - 1] != 0 || AX != 0) ? 1 : 0; ++PC; break; case (int)IS.NOT: datStack.Set(SP, (AX != 0) ? 0 : 1); ++SP; ++PC; break; case (int)IS.EQ: datStack[SP - 1] = (datStack[SP - 1] == AX) ? 1 : 0;; ++PC; break; case (int)IS.NEQ: datStack[SP - 1] = (datStack[SP - 1] != AX) ? 1 : 0;; ++PC; break; case (int)IS.LT: datStack[SP - 1] = (datStack[SP - 1] < AX) ? 1 : 0;; ++PC; break; case (int)IS.LE: datStack[SP - 1] = (datStack[SP - 1] <= AX) ? 1 : 0;; ++PC; break; case (int)IS.GT: datStack[SP - 1] = (datStack[SP - 1] > AX) ? 1 : 0;; ++PC; break; case (int)IS.GE: datStack[SP - 1] = (datStack[SP - 1] >= AX) ? 1 : 0;; ++PC; break; case (int)IS.STRSUB: strStack.Add(strStack[AX].Substring(datStack[SP - 2], datStack[SP - 1])); SP -= 2; AX = strStack.Count - 1; ++PC; break; case (int)IS.STRCON: strStack[AX] = strStack[AX] + strStack[datStack[--SP]]; ++PC; break; case (int)IS.STRLEN: AX = strStack[AX].Length; ++PC; break; case (int)IS.STRCPY: strStack.Add(strStack[AX]); AX = strStack.Count - 1; ++PC; break; case (int)IS.STRADD: strStack[AX] += (char)datStack[--SP]; ++PC; break; case (int)IS.STRCMP: AX = LuaVMHelper.StrCmp(strStack[datStack[--SP]], strStack[AX]); ++PC; break; case (int)IS.STRFMT: strStack.Add(AX.ToString()); AX = strStack.Count - 1; ++PC; break; case (int)IS.PRINT: Console.WriteLine(strStack[AX]); ++PC; break; } if (debug) { Console.ReadKey(); } } return(true); }
/// <summary> /// Gets/sets an item in this list at the given index. /// </summary> T System.Collections.Generic.IList <T> .this[int index] { get { return(List.Get(index)); } set { List.Set(index, value); } }
/// <summary>Parses the _revisions dict from a document into an array of revision ID strings.</summary> internal static IList<string> ParseCouchDBRevisionHistory(IDictionary<String, Object> docProperties) { var revisions = docProperties.Get ("_revisions").AsDictionary<string,object> (); if (revisions == null) { return new List<string>(); } var ids = revisions ["ids"].AsList<string> (); if (ids == null || ids.Count == 0) { return new List<string>(); } var revIDs = new List<string>(ids); var start = Convert.ToInt64(revisions.Get("start")); for (var i = 0; i < revIDs.Count; i++) { var revID = revIDs[i]; revIDs.Set(i, Sharpen.Extensions.ToString(start--) + "-" + revID); } return revIDs; }
/// <summary> /// Delete all elements out of the list, where predicate matches /// </summary> /// <param name="list">Source list</param> /// <param name="predicate">predicate that has to match</param> /// <typeparam name="T">Type of list entries</typeparam> public static void DeleteWhere <T>(this List <T> list, Func <T, bool> predicate) { list.Set(list.Where(element => !predicate.Invoke(element)).ToList()); }