public AssumptionViolatedException(object value, Matcher matcher) { base.\u002Ector(!(value is Exception) ? (Exception) null : (Exception) value); AssumptionViolatedException violatedException = this; this.fValue = value; this.fMatcher = matcher; }
public void FolderExclude() { var matcher = new Matcher(); matcher.AddInclude(@"**/*.*"); matcher.AddExclude(@"obj"); matcher.AddExclude(@"bin"); matcher.AddExclude(@".*"); ExecuteAndVerify(matcher, @"src/project", "src/project/source1.cs", "src/project/sub/source2.cs", "src/project/sub/source3.cs", "src/project/sub2/source4.cs", "src/project/sub2/source5.cs", "src/project/compiler/preprocess/preprocess-source1.cs", "src/project/compiler/preprocess/sub/preprocess-source2.cs", "src/project/compiler/preprocess/sub/sub/preprocess-source3.cs", "src/project/compiler/preprocess/sub/sub/preprocess-source3.txt", "src/project/compiler/shared/shared1.cs", "src/project/compiler/shared/shared1.txt", "src/project/compiler/shared/sub/shared2.cs", "src/project/compiler/shared/sub/shared2.txt", "src/project/compiler/shared/sub/sub/sharedsub.cs", "src/project/compiler/resources/resource.res", "src/project/compiler/resources/sub/resource2.res", "src/project/compiler/resources/sub/sub/resource3.res", "src/project/content1.txt"); }
public void IncludeCaseInsensitive(string root, string includePattern, string[] expectedFiles) { var matcher = new Matcher(StringComparison.OrdinalIgnoreCase); matcher.AddInclude(includePattern); ExecuteAndVerify(matcher, root, expectedFiles.Select(f => root + "/" + f).ToArray()); }
/// <summary> /// Create a new <seealso cref="PatternKeywordMarkerFilter"/>, that marks the current /// token as a keyword if the tokens term buffer matches the provided /// <seealso cref="Pattern"/> via the <seealso cref="KeywordAttribute"/>. /// </summary> /// <param name="in"> /// TokenStream to filter </param> /// <param name="pattern"> /// the pattern to apply to the incoming term buffer /// </param> public PatternKeywordMarkerFilter(TokenStream @in, Pattern pattern) : base(@in) { termAtt = AddAttribute<ICharTermAttribute>(); this.matcher = pattern.matcher(""); }
public static void That(object actual, Matcher matcher) { if (!matcher.Match(actual)) { throw new ExpectationException(matcher.FailureMessage); } }
public void RegexWithGroupMatchTest() { var match = new Matcher<string, string> { { Case.Rx(new Regex("this\\s(\\d{3})")), s => s } }.ToFunc(); Assert.Equal("123", match("this 123")); }
public void EmptyCollectionWhenNoFilesPresent() { var matcher = new Matcher(); var scenario = new FileSystemGlobbingTestContext(@"c:\files\", matcher) .Include("alpha.txt") .Execute(); scenario.AssertExact(); }
public void DuplicatePatterns(string pattern1, string pattern2) { var matcher = new Matcher(); matcher.AddInclude(pattern1); matcher.AddInclude(pattern2); ExecuteAndVerify(matcher, @"src/project", "src/project/sub/source2.cs"); }
public static void assumeThat(object actual, Matcher matcher) { if (matcher.matches(actual)) return; object obj = actual; Matcher matcher1 = matcher; Throwable.__\u003CsuppressFillInStackTrace\u003E(); throw new AssumptionViolatedException(obj, matcher1); }
public void RegexCaseMatch() { var match = new Matcher<string, string> { { Case.Rx(new Regex("\\d{3}")), s => "Success" } }.ToFunc(); Assert.Equal("Success", match("123")); }
public void MismatchedFileIsIgnored() { var matcher = new Matcher(); var scenario = new FileSystemGlobbingTestContext(@"c:\files\", matcher) .Include("alpha.txt") .Files("omega.txt") .Execute(); scenario.AssertExact(); }
public void SlashPolarityIsIgnored(string includePattern, string filePath) { var matcher = new Matcher(); var scenario = new FileSystemGlobbingTestContext(@"c:\files\", matcher) .Include(includePattern) .Files("one/two.txt", filePath, "three/four.txt") .Execute(); scenario.AssertExact("beta/alpha.txt"); }
public void PatternMatchingWorks(string includePattern, string[] matchesExpected) { var matcher = new Matcher(); var scenario = new FileSystemGlobbingTestContext(@"c:\files\", matcher) .Include(includePattern) .Files("alpha.txt", "beta.txt", "gamma.dat") .Execute(); scenario.AssertExact(matchesExpected); }
public void FolderNamesAreTraversed() { var matcher = new Matcher(); var scenario = new FileSystemGlobbingTestContext(@"c:\files\", matcher) .Include("beta/alpha.txt") .Files("beta/alpha.txt") .Execute(); scenario.AssertExact("beta/alpha.txt"); }
/// <summary> /// Initializes a new instance of the <see cref="BuildableExpectation"/> class. /// </summary> /// <param name="expectationDescription">The expectation description.</param> /// <param name="requiredCountMatcher">The required count matcher.</param> /// <param name="matchingCountMatcher">The matching count matcher.</param> public BuildableExpectation(string expectationDescription, Matcher requiredCountMatcher, Matcher matchingCountMatcher) { _expectationDescription = expectationDescription; _requiredCountMatcher = requiredCountMatcher; _matchingCountMatcher = matchingCountMatcher; ArgumentsMatcher = new ArgumentsMatcher(); IsValid = false; }
public void DoubleParentsWithRecursiveSearch() { var matcher = new Matcher(); matcher.AddInclude(@"..\..\lib\**\*.cs"); ExecuteAndVerify(matcher, @"src/project", "lib/source6.cs", "lib/sub3/source7.cs", "lib/sub4/source8.cs"); }
public virtual CombinableMatcher or(Matcher matcher) { Matcher[] matcherArray = new Matcher[2]; int index1 = 0; Matcher matcher1 = matcher; matcherArray[index1] = matcher1; int index2 = 1; Matcher matcher2 = this.fMatcher; matcherArray[index2] = matcher2; return new CombinableMatcher(CoreMatchers.anyOf(matcherArray)); }
static void Main(string[] args) { var match = new Matcher<string, string> { { Case.Rx(new Regex("test\\s(\\d{3})")), a => a }, { _ => true, (string _) => "nope" } }.ToFunc(); Console.WriteLine(match("test 333")); Console.ReadKey(); }
public void ExcludeMayEndInDirectoryName() { var matcher = new Matcher(); var scenario = new FileSystemGlobbingTestContext(@"c:\files\", matcher) .Include("*.cs", "*/*.cs", "*/*/*.cs") .Exclude("bin", "one/two") .Files("one/x.cs", "two/x.cs", "one/two/x.cs", "x.cs", "bin/x.cs", "bin/two/x.cs") .Execute(); scenario.AssertExact("one/x.cs", "two/x.cs", "x.cs"); }
public void StringMatching() { var match = new Matcher<string, int> { {s => string.IsNullOrEmpty(s), (string _) => 0}, {s => true, s => s.Length} }.ToFunc(); Assert.Equal(0, match(null)); Assert.Equal(4, match("test")); }
public void ExcludeCanHaveSurroundingRecursiveWildcards() { var matcher = new Matcher(); var scenario = new FileSystemGlobbingTestContext(@"c:\files\", matcher) .Include("**") .Exclude("**/x/**") .Files("x/1", "1/x/2", "1/x", "x", "1", "1/2") .Execute(); scenario.AssertExact("1/x", "x", "1", "1/2"); }
public void OptionTypeMatching() { var match = new Matcher<Option<int>, int?> { // Predicate matching requires explicit type annotation {i => i is None, (Option<int> _) => null}, {i => i as Some<int>, i => i.Value}, }.ToFunc(); Assert.Equal(null, match(new None<int>())); Assert.Equal(10, match(new Some<int>(10))); }
/*[12]binary<operand,operator>: operand S (operator S @operand S)* ;*/ public bool binary(Matcher operand, Matcher @operator) { return And(()=> operand() && S() && OptRepeat(()=> And(()=> @operator() && S() && ( operand() || Fatal("<<operand>> expected")) && S() ) ) ); }
public void MultipleTypesMatching() { var match = new Matcher<object, string> { {s => s as string, s => s}, {sb => sb as StringBuilder, sb => sb.ToString()}, {o => true, (bool _) => "Unknown object"} }.ToFunc(); Assert.Equal("string", match("string")); Assert.Equal(new StringBuilder("string builder").ToString(), match(new StringBuilder("string builder"))); Assert.Equal("Unknown object", match(100500)); }
public FileSystemGlobbingTestContext(string basePath, Matcher matcher) { _basePath = basePath; _recorder = new FileSystemOperationRecorder(); _patternMatching = matcher; _directoryInfo = new MockDirectoryInfo( recorder: _recorder, parentDirectory: null, fullName: _basePath, name: ".", paths: new string[0]); }
public void ExcludeFolderRecursively() { var matcher = new Matcher(); var scenario = new FileSystemGlobbingTestContext(@"c:\files\", matcher) .Include("*.*") .Include("../sibling/**/*.*") .Exclude("../sibling/exc/**/*.*") .Exclude("../sibling/inc/2.txt") .Files("main/1.txt", "main/2.txt", "sibling/1.txt", "sibling/inc/1.txt", "sibling/inc/2.txt", "sibling/exc/1.txt", "sibling/exc/2.txt") .SubDirectory("main") .Execute(); scenario.AssertExact("1.txt", "2.txt", "../sibling/1.txt", "../sibling/inc/1.txt"); }
public void TestMatch() { Matcher matcher = new Matcher(); int[] expected = new int[] { 10, 50, 30, 98 }; int clipLimit = 100; int delta = 5; int[] actual = new int[] { 12, 55, 25, 110 }; Assert.IsTrue(matcher.Match(expected, actual, clipLimit, delta)); actual = new int[] { 10, 60, 30, 98 }; Assert.IsTrue(!matcher.Match(expected, actual, clipLimit, delta)); actual = new int[] { 10, 50, 30 }; Assert.IsTrue(!matcher.Match(expected, actual, clipLimit, delta)); }
/// <summary> /// e{x} /// </summary> public bool For(int count, Matcher rule) { PegNode prevCur = Tree.Cur; PegTree.AddPolicy prevPolicy = Tree.Policy; int pos = _pos; int i = 0; for(i = 0; i < count; ++i) { if (!rule()) { _pos = pos; RestoreTree(prevCur, prevPolicy); return false; } } return true; }
protected RewriteSubruleSpec(SpanFactory<ShapeNode> spanFactory, MatcherSettings<ShapeNode> matcherSettings, Pattern<Word, ShapeNode> leftEnv, Pattern<Word, ShapeNode> rightEnv) { if (leftEnv != null && !leftEnv.IsEmpty) { MatcherSettings<ShapeNode> leftEnvMatcherSettings = matcherSettings.DeepClone(); leftEnvMatcherSettings.Direction = Direction.RightToLeft; leftEnvMatcherSettings.AnchoredToStart = true; _leftEnvMatcher = new Matcher<Word, ShapeNode>(spanFactory, leftEnv, leftEnvMatcherSettings); } if (rightEnv != null && !rightEnv.IsEmpty) { MatcherSettings<ShapeNode> rightEnvMatcherSettings = matcherSettings.DeepClone(); rightEnvMatcherSettings.Direction = Direction.LeftToRight; rightEnvMatcherSettings.AnchoredToStart = true; _rightEnvMatcher = new Matcher<Word, ShapeNode>(spanFactory, rightEnv, rightEnvMatcherSettings); } }
public void RegisterAction(Matcher matcher) { SymbolBase outcome = matcher.Outcome; AmbiguousSymbol ambiguous; Symbol deterministic; if (outcome == null) { actionToTokenProducer[matcher.Index] = new TokenProducerInfo { MainTokenId = -1, Disambiguation = matcher.Disambiguation, RealActions = SparseIntSetType.Instance.Of(matcher.Index), PossibleTokens = tokenSetType.Empty }; } else if ((ambiguous = outcome as AmbiguousSymbol) != null) { actionToTokenProducer[matcher.Index] = new TokenProducerInfo { MainTokenId = ambiguous.MainToken, Disambiguation = matcher.Disambiguation, RealActions = SparseIntSetType.Instance.Of(matcher.Index), PossibleTokens = tokenSetType.Of(ambiguous.Tokens) }; } else if ((deterministic = outcome as Symbol) != null) { actionToTokenProducer[matcher.Index] = new TokenProducerInfo { MainTokenId = deterministic.Index, Disambiguation = matcher.Disambiguation, RealActions = SparseIntSetType.Instance.Of(matcher.Index), PossibleTokens = tokenSetType.Of(deterministic.Index) }; } }
public override void Use(Player p, string message, CommandData data) { if (message.Length == 0) { Help(p); return; } bool preciseTP = message.CaselessStarts(precisePrefix); if (preciseTP) { message = message.Substring(precisePrefix.Length); } string[] args = message.SplitSpaces(); if (args.Length >= 3) { TeleportCoords(p, args, preciseTP); return; } Player target = null; PlayerBot bot = null; if (args.Length == 1) { target = PlayerInfo.FindMatches(p, args[0]); if (target == null) { return; } if (!CheckPlayer(p, target, data)) { return; } } else if (args[0].CaselessEq("bot")) { bot = Matcher.FindBots(p, args[1]); if (bot == null) { return; } } else { Help(p); return; } SavePreTeleportState(p); Level lvl = bot != null ? bot.level : target.level; if (p.level != lvl) { PlayerActions.ChangeMap(p, lvl.name); } if (target != null && target.Loading) { p.Message("Waiting for " + target.ColoredName + " %Sto spawn.."); target.BlockUntilLoad(10); } // Player wasn't able to join target map, so don't move if (p.level != lvl) { return; } Position pos = bot != null ? bot.Pos : target.Pos; Orientation rot = bot != null ? bot.Rot : target.Rot; p.BlockUntilLoad(10); //Wait for player to spawn in new map p.SendPos(Entities.SelfID, pos, rot); }
public EntitasSystem(EntitiasWorld world, int threadCount) : base(world.GetGroup(Matcher <EntitasEntity> .AllOf(0, 1)), threadCount) { }
void when_created() { Pool pool = null; Group groupA = null; GroupObserver observer = null; before = () => { pool = new Pool(CID.NumComponents); groupA = pool.GetGroup(Matcher.AllOf(new [] { CID.ComponentA })); }; context["when observing with eventType OnEntityAdded"] = () => { before = () => { observer = new GroupObserver(groupA, GroupEventType.OnEntityAdded); }; it["returns collected entities"] = () => { var e = pool.CreateEntity(); e.AddComponentA(); var entities = observer.collectedEntities; entities.Count.should_be(1); entities.should_contain(e); }; it["only returns matching collected entities"] = () => { var e = pool.CreateEntity(); e.AddComponentA(); var e2 = pool.CreateEntity(); e2.AddComponentB(); var entities = observer.collectedEntities; entities.Count.should_be(1); entities.should_contain(e); }; it["collects entites only once"] = () => { var e = pool.CreateEntity(); e.AddComponentA(); e.RemoveComponentA(); e.AddComponentA(); var entities = observer.collectedEntities; entities.Count.should_be(1); entities.should_contain(e); }; it["returns empty list when no entities were collected"] = () => { observer.collectedEntities.should_be_empty(); }; it["clears collected entities on deactivation"] = () => { var e = pool.CreateEntity(); e.AddComponentA(); observer.Deactivate(); observer.collectedEntities.should_be_empty(); }; it["doesn't collect entities when deactivated"] = () => { observer.Deactivate(); var e = pool.CreateEntity(); e.AddComponentA(); observer.collectedEntities.should_be_empty(); }; it["continues collecting when activated"] = () => { observer.Deactivate(); var e1 = pool.CreateEntity(); e1.AddComponentA(); observer.Activate(); var e2 = pool.CreateEntity(); e2.AddComponentA(); var entities = observer.collectedEntities; entities.Count.should_be(1); entities.should_contain(e2); }; it["clears collected entites"] = () => { var e = pool.CreateEntity(); e.AddComponentA(); observer.ClearCollectedEntities(); observer.collectedEntities.should_be_empty(); }; }; context["when observing with eventType OnEntityRemoved"] = () => { before = () => { observer = new GroupObserver(groupA, GroupEventType.OnEntityRemoved); }; it["returns collected entities"] = () => { var e = pool.CreateEntity(); e.AddComponentA(); observer.collectedEntities.should_be_empty(); e.RemoveComponentA(); var entities = observer.collectedEntities; entities.Count.should_be(1); entities.should_contain(e); }; }; context["when observing with eventType OnEntityAddedOrRemoved"] = () => { before = () => { observer = new GroupObserver(groupA, GroupEventType.OnEntityAddedOrRemoved); }; it["returns collected entities"] = () => { var e = pool.CreateEntity(); e.AddComponentA(); var entities = observer.collectedEntities; entities.Count.should_be(1); entities.should_contain(e); observer.ClearCollectedEntities(); e.RemoveComponentA(); entities = observer.collectedEntities; entities.Count.should_be(1); entities.should_contain(e); }; }; context["when observing multiple groups"] = () => { Group groupB = null; before = () => { groupB = pool.GetGroup(Matcher.AllOf(new[] { CID.ComponentB })); }; it["throws when goup count != eventType count"] = expect <GroupObserverException>(() => { observer = new GroupObserver( new [] { groupA }, new [] { GroupEventType.OnEntityAdded, GroupEventType.OnEntityAdded } ); }); context["when observing with eventType OnEntityAdded"] = () => { before = () => { observer = new GroupObserver( new [] { groupA, groupB }, new [] { GroupEventType.OnEntityAdded, GroupEventType.OnEntityAdded } ); }; it["returns collected entities"] = () => { var eA = pool.CreateEntity(); eA.AddComponentA(); var eB = pool.CreateEntity(); eB.AddComponentB(); var entities = observer.collectedEntities; entities.Count.should_be(2); entities.should_contain(eA); entities.should_contain(eB); }; }; context["when observing with eventType OnEntityRemoved"] = () => { before = () => { observer = new GroupObserver( new [] { groupA, groupB }, new [] { GroupEventType.OnEntityRemoved, GroupEventType.OnEntityRemoved } ); }; it["returns collected entities"] = () => { var eA = pool.CreateEntity(); eA.AddComponentA(); var eB = pool.CreateEntity(); eB.AddComponentB(); observer.collectedEntities.should_be_empty(); eA.RemoveComponentA(); eB.RemoveComponentB(); var entities = observer.collectedEntities; entities.Count.should_be(2); entities.should_contain(eA); entities.should_contain(eB); }; }; context["when observing with eventType OnEntityAddedOrRemoved"] = () => { before = () => { observer = new GroupObserver( new [] { groupA, groupB }, new [] { GroupEventType.OnEntityAddedOrRemoved, GroupEventType.OnEntityAddedOrRemoved } ); }; it["returns collected entities"] = () => { var eA = pool.CreateEntity(); eA.AddComponentA(); var eB = pool.CreateEntity(); eB.AddComponentB(); var entities = observer.collectedEntities; entities.Count.should_be(2); entities.should_contain(eA); entities.should_contain(eB); observer.ClearCollectedEntities(); eA.RemoveComponentA(); eB.RemoveComponentB(); entities = observer.collectedEntities; entities.Count.should_be(2); entities.should_contain(eA); entities.should_contain(eB); }; }; context["when observing with mixed eventTypes"] = () => { before = () => { observer = new GroupObserver( new [] { groupA, groupB }, new [] { GroupEventType.OnEntityAdded, GroupEventType.OnEntityRemoved } ); }; it["returns collected entities"] = () => { var eA = pool.CreateEntity(); eA.AddComponentA(); var eB = pool.CreateEntity(); eB.AddComponentB(); var entities = observer.collectedEntities; entities.Count.should_be(1); entities.should_contain(eA); observer.ClearCollectedEntities(); eA.RemoveComponentA(); eB.RemoveComponentB(); entities = observer.collectedEntities; entities.Count.should_be(1); entities.should_contain(eB); }; }; }; }
private static Task <int> ExecuteAsync(CancellationToken cancellationToken) { var includePattern = _includesOption.Values; var excludePattern = _excludesOption.Values; // When no include pattern is specified, we decide to include all recursive ('**') if (!includePattern.Any()) { includePattern.Add("**"); } var matcher = new Matcher(StringComparison.OrdinalIgnoreCase); matcher.AddIncludePatterns(includePattern); matcher.AddExcludePatterns(excludePattern); var stopwatch = Stopwatch.StartNew(); var items = matcher.Execute(_pathArgument.Value); int totalItems = items.Count; TimeSpan getItemsElapsed = stopwatch.Elapsed; void ExecuteWithProgressBar(Action <string> itemAction, Action <DirectoryInfo, Func <bool> > rootPathAction) { var options = new ProgressBarOptions { ProgressCharacter = '─', CollapseWhenFinished = false }; using var progressBar = new ProgressBar(totalItems, "Start remove items...", options); var i = 0; foreach (string path in items.OrderByDescending(x => x.Length)) { string shrinkedPath = PathFormatter.ShrinkPath(path, Console.BufferWidth - 44); progressBar.Message = $"Remove item {i + 1} of {totalItems}: {shrinkedPath}"; itemAction(path); progressBar.Tick($"Removed item {i + 1} of {totalItems}: {shrinkedPath}"); ++i; } var rootPathDirectoryInfo = new DirectoryInfo(matcher.RootPath); var rootPathCheck = new Func <bool>(() => rootPathDirectoryInfo.Exists && rootPathDirectoryInfo.GetFileSystemInfos("*", SearchOption.AllDirectories).Length == 0); if ((_skipPathOption.HasValue() || !rootPathCheck()) && (_skipPathOption.HasValue() || !_tryRunOption.HasValue())) { return; } using ChildProgressBar childProgressBar = progressBar.Spawn(1, "child actions", options); { string shrinkedPath = PathFormatter.ShrinkPath(matcher.RootPath, Console.BufferWidth - 44); childProgressBar.Message = $"Remove empty root path: {shrinkedPath}"; rootPathAction(rootPathDirectoryInfo, rootPathCheck); childProgressBar.Tick($"Removed empty root path: {shrinkedPath}"); } } void ExecuteQuiet(Action <string> itemAction, Action <DirectoryInfo, Func <bool> > rootPathAction) { foreach (string path in items.OrderByDescending(x => x.Length)) { itemAction(path); } var rootPathDirectoryInfo = new DirectoryInfo(matcher.RootPath); var rootPathCheck = new Func <bool>(() => rootPathDirectoryInfo.Exists && rootPathDirectoryInfo.GetFileSystemInfos("*", SearchOption.AllDirectories).Length == 0); if (!_skipPathOption.HasValue() && rootPathCheck() || !_skipPathOption.HasValue() && _tryRunOption.HasValue()) { rootPathAction(rootPathDirectoryInfo, rootPathCheck); } } if (totalItems > 0) { var retryPolicy = Policy.Handle <Exception>().OrResult <bool>(r => r).WaitAndRetry(25, c => TimeSpan.FromMilliseconds(250)); var itemAction = new Action <string>(path => { if (_tryRunOption.HasValue()) { Thread.Sleep(1); } else { if (PathExtensions.IsDirectory(path)) { var di = new DirectoryInfo(path); retryPolicy.Execute(() => { di.Refresh(); if (di.Exists) { di.Attributes = FileAttributes.Normal; di.Delete(true); } di.Refresh(); return(di.Exists); }); } else { var fi = new FileInfo(path); retryPolicy.Execute(() => { fi.Refresh(); if (fi.Exists) { fi.Attributes = FileAttributes.Normal; fi.Delete(); } fi.Refresh(); return(fi.Exists); }); } } }); var rootPathAction = new Action <DirectoryInfo, Func <bool> >((di, check) => { if (_tryRunOption.HasValue()) { Thread.Sleep(1); } else { retryPolicy.Execute(() => { di.Refresh(); if (check()) { di.Attributes = FileAttributes.Normal; di.Delete(); } di.Refresh(); return(check()); }); } }); if (!_listItemsOption.HasValue() && !_quietOption.HasValue()) { ExecuteWithProgressBar(itemAction, rootPathAction); } else if (_listItemsOption.HasValue() && !_quietOption.HasValue()) { foreach (string path in items.OrderByDescending(x => x.Length)) { Console.WriteLine(path); } if (!_skipPathOption.HasValue()) { Console.WriteLine(matcher.RootPath); } } else if (!_listItemsOption.HasValue() && _quietOption.HasValue()) { ExecuteQuiet(itemAction, rootPathAction); } } stopwatch.Stop(); TimeSpan completeElapsed = stopwatch.Elapsed; if (_listItemsOption.HasValue() || _quietOption.HasValue()) { return(Task.FromResult(Ok)); } PrintSummary(totalItems, completeElapsed, getItemsElapsed); return(Task.FromResult(Ok)); }
void when_compounding_matchers() { context["allOf"] = () => { AllOfMatcher allAB = null; AllOfMatcher allBC = null; AnyOfMatcher anyAB = null; AnyOfMatcher anyBC = null; AllOfCompoundMatcher compound = null; before = () => { allAB = Matcher.AllOf(new[] { CID.ComponentB, CID.ComponentA }); allBC = Matcher.AllOf(new[] { CID.ComponentC, CID.ComponentB }); anyAB = Matcher.AnyOf(new[] { CID.ComponentB, CID.ComponentA }); anyBC = Matcher.AnyOf(new[] { CID.ComponentC, CID.ComponentB }); }; it["has all indices in order"] = () => { compound = Matcher.AllOf(allAB, allBC); compound.indices.should_be(new [] { CID.ComponentA, CID.ComponentB, CID.ComponentC }); }; it["has all indices in order (mixed)"] = () => { compound = Matcher.AllOf(allAB, anyBC); compound.indices.should_be(new [] { CID.ComponentA, CID.ComponentB, CID.ComponentC }); }; it["matches"] = () => { compound = Matcher.AllOf(allAB, allBC); var e = this.CreateEntity(); e.AddComponentA(); e.AddComponentB(); e.AddComponentC(); compound.Matches(e).should_be_true(); }; it["matches (mixed)"] = () => { compound = Matcher.AllOf(allAB, anyBC); var e = this.CreateEntity(); e.AddComponentA(); e.AddComponentB(); compound.Matches(e).should_be_true(); }; it["doesn't match"] = () => { compound = Matcher.AllOf(allAB, allBC); var e = this.CreateEntity(); e.AddComponentB(); e.AddComponentC(); compound.Matches(e).should_be_false(); }; it["doesn't match (mixed)"] = () => { compound = Matcher.AllOf(anyAB, anyBC); var e = this.CreateEntity(); e.AddComponentC(); compound.Matches(e).should_be_false(); }; }; context["anyOf"] = () => { AllOfMatcher allAB = null; AllOfMatcher allBC = null; AnyOfMatcher anyBC = null; AnyOfCompoundMatcher compound = null; before = () => { allAB = Matcher.AllOf(new[] { CID.ComponentB, CID.ComponentA }); allBC = Matcher.AllOf(new[] { CID.ComponentC, CID.ComponentB }); anyBC = Matcher.AnyOf(new[] { CID.ComponentC, CID.ComponentB }); }; it["has all indices in order"] = () => { compound = Matcher.AnyOf(allAB, allBC); compound.indices.should_be(new [] { CID.ComponentA, CID.ComponentB, CID.ComponentC }); }; it["has all indices in order (mixed)"] = () => { compound = Matcher.AnyOf(allAB, anyBC); compound.indices.should_be(new [] { CID.ComponentA, CID.ComponentB, CID.ComponentC }); }; it["matches"] = () => { compound = Matcher.AnyOf(allBC, allAB); var e = this.CreateEntity(); e.AddComponentB(); e.AddComponentC(); compound.Matches(e).should_be_true(); }; it["matches (mixed)"] = () => { compound = Matcher.AnyOf(allAB, anyBC); var e = this.CreateEntity(); e.AddComponentC(); compound.Matches(e).should_be_true(); }; it["doesn't match"] = () => { compound = Matcher.AnyOf(allAB, allBC); var e = this.CreateEntity(); e.AddComponentA(); e.AddComponentC(); compound.Matches(e).should_be_false(); }; }; context["noneOf"] = () => { AllOfMatcher allAB = null; AllOfMatcher allBC = null; AllOfMatcher allAC = null; AnyOfMatcher anyBC = null; NoneOfCompoundMatcher compound = null; before = () => { allAB = Matcher.AllOf(new[] { CID.ComponentB, CID.ComponentA }); allBC = Matcher.AllOf(new[] { CID.ComponentC, CID.ComponentB }); allAC = Matcher.AllOf(new[] { CID.ComponentC, CID.ComponentA }); anyBC = Matcher.AnyOf(new[] { CID.ComponentC, CID.ComponentB }); }; it["has all indices in order"] = () => { compound = Matcher.NoneOf(allAB, allBC); compound.indices.should_be(new [] { CID.ComponentA, CID.ComponentB, CID.ComponentC }); }; it["matches"] = () => { compound = Matcher.NoneOf(allAB, allAC); var e = this.CreateEntity(); e.AddComponentB(); e.AddComponentC(); compound.Matches(e).should_be_true(); }; it["matches (mixed)"] = () => { compound = Matcher.NoneOf(allAB, anyBC); var e = this.CreateEntity(); e.AddComponentA(); compound.Matches(e).should_be_true(); }; it["doesn't match"] = () => { compound = Matcher.NoneOf(allAB, anyBC); var e = this.CreateEntity(); e.AddComponentC(); compound.Matches(e).should_be_false(); }; }; context["equals"] = () => { it["doesn't equal when only indices are same"] = () => { var all1 = Matcher.AllOf(0, 1); var all2 = Matcher.AllOf(2, 3); var c1 = Matcher.AllOf(all1, all2); var any1 = Matcher.AnyOf(0, 1); var any2 = Matcher.AnyOf(2, 3); var c2 = Matcher.AllOf(any1, any2); c1.Equals(c2).should_be_false(); }; it["doesn't equal when not same type"] = () => { var all1 = Matcher.AllOf(0, 1); var all2 = Matcher.AllOf(2, 3); var c1 = Matcher.AllOf(all1, all2); var c2 = Matcher.AnyOf(all1, all2); c1.Equals(c2).should_be_false(); }; it["equals when equal"] = () => { var all1 = Matcher.AllOf(0, 1); var all2 = Matcher.AllOf(2, 3); var c1 = Matcher.AllOf(all1, all2); var all3 = Matcher.AllOf(0, 1); var all4 = Matcher.AllOf(2, 3); var c2 = Matcher.AllOf(all3, all4); c1.Equals(c2).should_be_true(); }; }; context["nested"] = () => { it["works like a charme"] = () => { var allAB = Matcher.AllOf(CID.ComponentA, CID.ComponentB); var allCD = Matcher.AllOf(CID.ComponentC, CID.ComponentD); var allEF = Matcher.AllOf(CID.ComponentE, CID.ComponentF); var anyEF = Matcher.AnyOf(CID.ComponentE, CID.ComponentF); var c1 = Matcher.AllOf(allAB, allCD, anyEF); var c2 = Matcher.AllOf(allAB, allCD, allEF); var c3 = Matcher.AnyOf(allAB, allCD, allEF); var e = this.CreateEntity(); e.AddComponentA(); e.AddComponentB(); e.AddComponentC(); e.AddComponentD(); e.AddComponentE(); c1.Matches(e).should_be_true(); c2.Matches(e).should_be_false(); c3.Matches(e).should_be_true(); var nested1 = Matcher.AllOf(c1, c2); var nested2 = Matcher.AnyOf(c1, c2); nested1.Matches(e).should_be_false(); nested2.Matches(e).should_be_true(); nested1.indices.should_be(new [] { CID.ComponentA, CID.ComponentB, CID.ComponentC, CID.ComponentD, CID.ComponentE, CID.ComponentF }); nested2.indices.should_be(new [] { CID.ComponentA, CID.ComponentB, CID.ComponentC, CID.ComponentD, CID.ComponentE, CID.ComponentF }); var nestedAll = Matcher.AllOf(nested1, nested2); var nestedAny = Matcher.AnyOf(nested1, nested2); nestedAll.Matches(e).should_be_false(); nestedAny.Matches(e).should_be_true(); Matcher.NoneOf(nestedAll, nestedAny).Matches(e).should_be_false(); }; }; context["can ToString"] = () => { AllOfMatcher allOf = null; AnyOfMatcher anyOf = null; NoneOfMatcher noneOf = null; before = () => { allOf = Matcher.AllOf(CID.ComponentA, CID.ComponentB); anyOf = Matcher.AnyOf(CID.ComponentC, CID.ComponentD); noneOf = Matcher.NoneOf(CID.ComponentE, CID.ComponentF); }; it["AllOfCompoundMatcher"] = () => { var m = Matcher.AllOf(allOf, anyOf, noneOf); m.ToString().should_be("AllOf(AllOf(1, 2), AnyOf(3, 4), NoneOf(5, 6))"); }; it["AnyOfCompoundMatcher"] = () => { var m = Matcher.AnyOf(allOf, anyOf, noneOf); m.ToString().should_be("AnyOf(AllOf(1, 2), AnyOf(3, 4), NoneOf(5, 6))"); }; it["NoneOfCompoundMatcher"] = () => { var m = Matcher.NoneOf(allOf, anyOf, noneOf); m.ToString().should_be("NoneOf(AllOf(1, 2), AnyOf(3, 4), NoneOf(5, 6))"); }; }; }
public void Star_Filter_Matches_Everything() { Assert.IsTrue(Matcher.MatchWithWildcards("*", "xxx", StringComparison.OrdinalIgnoreCase)); Assert.IsTrue(Matcher.MatchWithWildcards("*", "", StringComparison.OrdinalIgnoreCase)); Assert.IsTrue(Matcher.MatchWithWildcards("*", null, StringComparison.OrdinalIgnoreCase)); }
protected override ICollector <TestEntity> GetTrigger(IContext <TestEntity> context) { return(context.CreateCollector(Matcher <TestEntity> .AllOf(0))); }
private static string IsEnglish(string Ep, string Ec) { string chp = Ep; string chc = Ec; Matcher mp = patE.Matcher(chp); // previous char is [a-z] Matcher mc = patE.Matcher(chc); // current char is [a-z] Matcher mpC = patEC.Matcher(chp); // previous char is [A-Z] Matcher mcC = patEC.Matcher(chc); // current char is [A-Z] if (mp.Matches() && mcC.Matches()) { return("BND"); } else { // [a-z][A-Z] if (mp.Matches() && mc.Matches()) { return("ENG"); } else { // [a-z][a-z] if (mpC.Matches() && mcC.Matches()) { return("BCC"); } else { // [A-Z][A-Z] if (mp.Matches() && !mc.Matches() && !mcC.Matches()) { return("e1"); } else { // [a-z][^A-Za-z] if (mc.Matches() && !mp.Matches() && !mpC.Matches()) { return("e2"); } else { // [^A-Za-z][a-z] if (mpC.Matches() && !mc.Matches() && !mcC.Matches()) { return("e3"); } else { // [A-Z][^A-Za-z] if (mcC.Matches() && !mp.Matches() && !mpC.Matches()) { return("e4"); } else { // [^A-Za-z][A-Z] return(string.Empty); } } } } } } } }
public virtual ICollection <string> FeaturesCpC(PaddedList <IN> cInfo, int loc) { ICollection <string> features = new List <string>(); CoreLabel c = cInfo[loc]; CoreLabel c1 = cInfo[loc + 1]; CoreLabel c2 = cInfo[loc + 2]; CoreLabel c3 = cInfo[loc + 3]; CoreLabel p = cInfo[loc - 1]; CoreLabel p2 = cInfo[loc - 2]; CoreLabel p3 = cInfo[loc - 3]; string charc = c.Get(typeof(CoreAnnotations.CharAnnotation)); if (charc == null) { charc = string.Empty; } string charc1 = c1.Get(typeof(CoreAnnotations.CharAnnotation)); if (charc1 == null) { charc1 = string.Empty; } string charc2 = c2.Get(typeof(CoreAnnotations.CharAnnotation)); if (charc2 == null) { charc2 = string.Empty; } string charc3 = c3.Get(typeof(CoreAnnotations.CharAnnotation)); if (charc3 == null) { charc3 = string.Empty; } string charp = p.Get(typeof(CoreAnnotations.CharAnnotation)); if (charp == null) { charp = string.Empty; } string charp2 = p2.Get(typeof(CoreAnnotations.CharAnnotation)); if (charp2 == null) { charp2 = string.Empty; } string charp3 = p3.Get(typeof(CoreAnnotations.CharAnnotation)); if (charp3 == null) { charp3 = string.Empty; } /* * N-gram features. N is upto 2. */ if (flags.useWord2) { // features.add(charc +"c"); // features.add(charc1+"c1"); // features.add(charp +"p"); // features.add(charp +charc +"pc"); // if( flags.useMsr ){ // features.add(charc +charc1 +"cc1"); // features.add(charp + charc1 +"pc1"); // } features.Add(charc + "::c"); features.Add(charc1 + "::c1"); features.Add(charp + "::p"); features.Add(charp2 + "::p2"); // trying to restore the features that Huishin described in SIGHAN 2005 paper features.Add(charc + charc1 + "::cn"); features.Add(charp + charc + "::pc"); features.Add(charp + charc1 + "::pn"); features.Add(charp2 + charp + "::p2p"); features.Add(charp2 + charc + "::p2c"); features.Add(charc2 + charc + "::n2c"); features.Add("|word2"); } /* * Radical N-gram features. N is upto 4. * Smoothing method of N-gram, because there are too many characters in Chinese. * (It works better than N-gram when they are used individually. less sparse) */ char rcharc; char rcharc1; char rcharc2; char rcharc3; char rcharp; char rcharp1; char rcharp2; char rcharp3; if (charc.Length == 0) { rcharc = 'n'; } else { rcharc = RadicalMap.GetRadical(charc[0]); } if (charc1.Length == 0) { rcharc1 = 'n'; } else { rcharc1 = RadicalMap.GetRadical(charc1[0]); } if (charc2.Length == 0) { rcharc2 = 'n'; } else { rcharc2 = RadicalMap.GetRadical(charc2[0]); } if (charc3.Length == 0) { rcharc3 = 'n'; } else { rcharc3 = RadicalMap.GetRadical(charc3[0]); } if (charp.Length == 0) { rcharp = 'n'; } else { rcharp = RadicalMap.GetRadical(charp[0]); } if (charp2.Length == 0) { rcharp2 = 'n'; } else { rcharp2 = RadicalMap.GetRadical(charp2[0]); } if (charp3.Length == 0) { rcharp3 = 'n'; } else { rcharp3 = RadicalMap.GetRadical(charp3[0]); } if (flags.useRad2) { features.Add(rcharc + "rc"); features.Add(rcharc1 + "rc1"); features.Add(rcharp + "rp"); features.Add(rcharp + rcharc + "rpc"); features.Add(rcharc + rcharc1 + "rcc1"); features.Add(rcharp + rcharc + rcharc1 + "rpcc1"); features.Add("|rad2"); } /* non-word dictionary:SEEM bi-gram marked as non-word */ if (flags.useDict2) { NonDict2 nd = new NonDict2(flags); features.Add(nd.CheckDic(charp + charc, flags) + "nondict"); features.Add("|useDict2"); } if (flags.useOutDict2) { if (outDict == null) { logger.Info("reading " + flags.outDict2 + " as a seen lexicon"); outDict = new CorpusDictionary(flags.outDict2, true); } features.Add(outDict.GetW(charp + charc) + "outdict"); // -1 0 features.Add(outDict.GetW(charc + charc1) + "outdict"); // 0 1 features.Add(outDict.GetW(charp2 + charp) + "outdict"); // -2 -1 features.Add(outDict.GetW(charp2 + charp + charc) + "outdict"); // -2 -1 0 features.Add(outDict.GetW(charp3 + charp2 + charp) + "outdict"); // -3 -2 -1 features.Add(outDict.GetW(charp + charc + charc1) + "outdict"); // -1 0 1 features.Add(outDict.GetW(charc + charc1 + charc2) + "outdict"); // 0 1 2 features.Add(outDict.GetW(charp + charc + charc1 + charc2) + "outdict"); } // -1 0 1 2 /* * (CTB/ASBC/HK/PK/MSR) POS information of each characters. * If a character falls into some function categories, * it is very likely there is a boundary. * A lot of Chinese function words belong to single characters. * This feature is also good for numbers and punctuations. * DE* are grouped into DE. */ if (flags.useCTBChar2 || flags.useASBCChar2 || flags.useHKChar2 || flags.usePKChar2 || flags.useMSRChar2) { string[] tagsets; // the "useChPos" now only works for CTB and PK if (flags.useChPos) { if (flags.useCTBChar2) { tagsets = new string[] { "AD", "AS", "BA", "CC", "CD", "CS", "DE", "DT", "ETC", "IJ", "JJ", "LB", "LC", "M", "NN", "NR", "NT", "OD", "P", "PN", "PU", "SB", "SP", "VA", "VC", "VE", "VV" }; } else { if (flags.usePKChar2) { //tagsets = new String[]{"r", "j", "t", "a", "nz", "l", "vn", "i", "m", "ns", "nr", "v", "n", "q", "Ng", "b", "d", "nt"}; tagsets = new string[] { "2", "3", "4" }; } else { throw new Exception("only support settings for CTB and PK now."); } } } else { //logger.info("Using Derived features"); tagsets = new string[] { "2", "3", "4" }; } if (taDetector == null) { taDetector = new TagAffixDetector(flags); } foreach (string tagset in tagsets) { features.Add(taDetector.CheckDic(tagset + "p", charp) + taDetector.CheckDic(tagset + "i", charp) + taDetector.CheckDic(tagset + "s", charc) + taDetector.CheckInDic(charp) + taDetector.CheckInDic(charc) + tagset + "prep-sufc"); } } // features.add("|ctbchar2"); // Added a constant feature several times!! /* * In error analysis, we found English words and numbers are often separated. * Rule 1: isNumber feature: check if the current and previous char is a number. * Rule 2: Disambiguation of time point and time duration. * Rule 3: isEnglish feature: check if the current and previous character is an english letter. * Rule 4: English name feature: check if the current char is a conjunct pu for English first and last name, since there is no space between two names. * Most of PUs are a good indicator for word boundary, but - and . is a strong indicator that there is no boundry within a previous , a follow char and it. */ if (flags.useRule2) { /* Reduplication features */ // previous character == current character if (charp.Equals(charc)) { features.Add("11"); } // previous character == next character if (charp.Equals(charc1)) { features.Add("22"); } // current character == next next character // fire only when usePk and useHk are both false. // Notice: this should be (almost) the same as the "22" feature, but we keep it for now. if (!flags.usePk && !flags.useHk) { if (charc.Equals(charc2)) { features.Add("33"); } } char cur1 = ' '; char cur2 = ' '; char cur = ' '; char pre = ' '; // actually their length must be either 0 or 1 if (charc1.Length > 0) { cur1 = charc1[0]; } if (charc2.Length > 0) { cur2 = charc2[0]; } if (charc.Length > 0) { cur = charc[0]; } if (charp.Length > 0) { pre = charp[0]; } string prer = rcharp.ToString(); // the radical of previous character Pattern E = Pattern.Compile("[a-zA-Z]"); Pattern N = Pattern.Compile("[0-9]"); Matcher m = E.Matcher(charp); Matcher ce = E.Matcher(charc); Matcher pe = E.Matcher(charp2); Matcher cn = N.Matcher(charc); Matcher pn = N.Matcher(charp2); // if current and previous characters are numbers... if (cur >= '0' && cur <= '9' && pre >= '0' && pre <= '9') { if (cur == '9' && pre == '1' && cur1 == '9' && cur2 >= '0' && cur2 <= '9') { //199x features.Add("YR"); } else { features.Add("2N"); } } else { // if current and previous characters are not both numbers // but previous char is a number // i.e. patterns like "1N" , "2A", etc if (pre >= '0' && pre <= '9') { features.Add("1N"); } else { // if previous character is an English character if (m.Matches()) { features.Add("E"); } else { // if the previous character contains no radical (and it exist) if (prer.Equals(".") && charp.Length == 1) { // fire only when usePk and useHk are both false. Not sure why. -pichuan if (!flags.useHk && !flags.usePk) { if (ce.Matches()) { features.Add("PU+E"); } if (pe.Matches()) { features.Add("E+PU"); } if (cn.Matches()) { features.Add("PU+N"); } if (pn.Matches()) { features.Add("N+PU"); } } features.Add("PU"); } } } } string engType = IsEnglish(charp, charc); string engPU = IsEngPU(charp); if (!engType.Equals(string.Empty)) { features.Add(engType); } if (!engPU.Equals(string.Empty) && !engType.Equals(string.Empty)) { features.Add(engPU + engType); } } //end of use rule // features using "Character.getType" information! string origS = c.Get(typeof(CoreAnnotations.OriginalCharAnnotation)); char origC = ' '; if (origS.Length > 0) { origC = origS[0]; } int type = char.GetType(origC); switch (type) { case char.UppercaseLetter: case char.LowercaseLetter: { // A-Z and full-width A-Z // a-z and full-width a-z features.Add("CHARTYPE-LETTER"); break; } case char.DecimalDigitNumber: { features.Add("CHARTYPE-DECIMAL_DIGIT_NUMBER"); break; } case char.OtherLetter: { // mostly chinese chars features.Add("CHARTYPE-OTHER_LETTER"); break; } default: { // other types features.Add("CHARTYPE-MISC"); break; } } return(features); }
public void SetPool(Pool pool) { _pool = pool; _group = _pool.GetGroup(Matcher.AllOf(CoreMatcher.Wound, CoreMatcher.CurrentHitPoint, CoreMatcher.HitPointRegen)); }
// todo: give options for document splitting. A line or the whole file or sentence splitting as now public virtual IEnumerator <IList <In> > GetIterator(Reader r) { ITokenizer <In> tokenizer = tokenizerFactory.GetTokenizer(r); // PTBTokenizer.newPTBTokenizer(r, false, true); IList <In> words = new List <In>(); IN previous = null; StringBuilder prepend = new StringBuilder(); /* * This changes SGML tags into whitespace -- it should maybe be moved elsewhere */ while (tokenizer.MoveNext()) { IN w = tokenizer.Current; string word = w.Get(typeof(CoreAnnotations.TextAnnotation)); Matcher m = sgml.Matcher(word); if (m.Matches()) { string before = StringUtils.GetNotNullString(w.Get(typeof(CoreAnnotations.BeforeAnnotation))); string after = StringUtils.GetNotNullString(w.Get(typeof(CoreAnnotations.AfterAnnotation))); prepend.Append(before).Append(word); if (previous != null) { string previousTokenAfter = StringUtils.GetNotNullString(previous.Get(typeof(CoreAnnotations.AfterAnnotation))); previous.Set(typeof(CoreAnnotations.AfterAnnotation), previousTokenAfter + word + after); } } else { // previous.appendAfter(w.word() + w.after()); string before = StringUtils.GetNotNullString(w.Get(typeof(CoreAnnotations.BeforeAnnotation))); if (prepend.Length > 0) { prepend.Append(before); w.Set(typeof(CoreAnnotations.BeforeAnnotation), prepend.ToString()); prepend = new StringBuilder(); } words.Add(w); previous = w; } } IList <IList <In> > sentences = wts.Process(words); string after_1 = string.Empty; IN last = null; foreach (IList <In> sentence in sentences) { int pos = 0; foreach (IN w in sentence) { w.Set(typeof(CoreAnnotations.PositionAnnotation), int.ToString(pos)); after_1 = StringUtils.GetNotNullString(w.Get(typeof(CoreAnnotations.AfterAnnotation))); w.Remove(typeof(CoreAnnotations.AfterAnnotation)); last = w; } } if (last != null) { last.Set(typeof(CoreAnnotations.AfterAnnotation), after_1); } return(sentences.GetEnumerator()); }
public virtual async Task <ProjectInfo> LoadProjectInfoAsync(string folderPath, Matcher fileMatcher, CancellationToken cancellationToken) { var projectId = ProjectId.CreateNewId(debugName: folderPath); var documents = await LoadDocumentInfosAsync(projectId, folderPath, FileExtension, fileMatcher); if (documents.IsDefaultOrEmpty) { return(null); } return(ProjectInfo.Create( projectId, version: default,
/// <summary> /// Parse and construct an /// <see cref="URIish">URIish</see> /// from a string /// </summary> /// <param name="s"></param> /// <exception cref="Sharpen.URISyntaxException">Sharpen.URISyntaxException</exception> public URIish(string s) { // // // start a group containing hostname and all options only // availabe when a hostname is there // // // // open a catpuring group the the user-home-dir part // // // close the optional group containing hostname // // // // // // // // // // // // // // // // // if (StringUtils.IsEmptyOrNull(s)) { throw new URISyntaxException("The uri was empty or null", JGitText.Get().cannotParseGitURIish ); } Matcher matcher = SINGLE_SLASH_FILE_URI.Matcher(s); if (matcher.Matches()) { scheme = matcher.Group(1); rawPath = CleanLeadingSlashes(matcher.Group(2), scheme); path = Unescape(rawPath); return; } matcher = FULL_URI.Matcher(s); if (matcher.Matches()) { scheme = matcher.Group(1); user = Unescape(matcher.Group(2)); pass = Unescape(matcher.Group(3)); host = Unescape(matcher.Group(4)); if (matcher.Group(5) != null) { port = System.Convert.ToInt32(matcher.Group(5)); } rawPath = CleanLeadingSlashes(N2e(matcher.Group(6)) + N2e(matcher.Group(7)), scheme ); path = Unescape(rawPath); return; } matcher = RELATIVE_SCP_URI.Matcher(s); if (matcher.Matches()) { user = matcher.Group(1); pass = matcher.Group(2); host = matcher.Group(3); rawPath = matcher.Group(4); path = rawPath; return; } matcher = ABSOLUTE_SCP_URI.Matcher(s); if (matcher.Matches()) { user = matcher.Group(1); pass = matcher.Group(2); host = matcher.Group(3); rawPath = matcher.Group(4); path = rawPath; return; } matcher = LOCAL_FILE.Matcher(s); if (matcher.Matches()) { rawPath = matcher.Group(1); path = rawPath; return; } throw new URISyntaxException(s, JGitText.Get().cannotParseGitURIish); }
/// <summary> /// Returns all SDF elements, that match given SDF Path. /// </summary> /// <param name="path">SDF Path for elements to match to.</param> /// <returns>List of all SDF elements matching given SDF Path as <c>Match</c> instances.</returns> public List <Match> Find(string path) { return(Matcher.Match(this, path)); }
public void Discover(ShapeTableBuilder builder) { if (_logger.IsEnabled(LogLevel.Information)) { _logger.LogInformation("Start discovering shapes"); } var harvesterInfos = _harvesters.Select(harvester => new { harvester, subPaths = harvester.SubPaths() }); var activeFeatures = _featureManager.GetEnabledFeaturesAsync().Result; var activeExtensions = Once(activeFeatures); var hits = activeExtensions.Select(extensionDescriptor => { if (_logger.IsEnabled(LogLevel.Information)) { _logger.LogInformation("Start discovering candidate views filenames"); } var matcher = new Matcher(); foreach (var extension in _shapeTemplateViewEngines.SelectMany(x => x.TemplateFileExtensions)) { matcher.AddInclude(string.Format("*.{0}", extension)); } var pathContexts = harvesterInfos.SelectMany(harvesterInfo => harvesterInfo.subPaths.Select(subPath => { var basePath = _fileSystem.Combine(extensionDescriptor.Location, extensionDescriptor.Id); var virtualPath = _fileSystem.Combine(basePath, subPath); var files = _fileSystem.ListFiles(virtualPath, matcher).ToReadOnlyCollection(); return(new { harvesterInfo.harvester, basePath, subPath, virtualPath, files }); })).ToList(); if (_logger.IsEnabled(LogLevel.Information)) { _logger.LogInformation("Done discovering candidate views filenames"); } var fileContexts = pathContexts.SelectMany(pathContext => _shapeTemplateViewEngines.SelectMany(ve => { return(pathContext.files.Select( file => new { fileName = Path.GetFileNameWithoutExtension(file.Name), fileVirtualPath = "~/" + _fileSystem.Combine(pathContext.virtualPath, file.Name), pathContext })); })); var shapeContexts = fileContexts.SelectMany(fileContext => { var harvestShapeInfo = new HarvestShapeInfo { SubPath = fileContext.pathContext.subPath, FileName = fileContext.fileName, TemplateVirtualPath = fileContext.fileVirtualPath }; var harvestShapeHits = fileContext.pathContext.harvester.HarvestShape(harvestShapeInfo); return(harvestShapeHits.Select(harvestShapeHit => new { harvestShapeInfo, harvestShapeHit, fileContext })); }); return(shapeContexts.Select(shapeContext => new { extensionDescriptor, shapeContext }).ToList()); }).SelectMany(hits2 => hits2); foreach (var iter in hits) { // templates are always associated with the namesake feature of module or theme var hit = iter; var featureDescriptors = iter.extensionDescriptor.Features.Where(fd => fd.Id == hit.extensionDescriptor.Id); foreach (var featureDescriptor in featureDescriptors) { if (_logger.IsEnabled(LogLevel.Debug)) { _logger.LogDebug("Binding {0} as shape [{1}] for feature {2}", hit.shapeContext.harvestShapeInfo.TemplateVirtualPath, iter.shapeContext.harvestShapeHit.ShapeType, featureDescriptor.Id); } builder.Describe(iter.shapeContext.harvestShapeHit.ShapeType) .From(new Feature { Descriptor = featureDescriptor }) .BoundAs( hit.shapeContext.harvestShapeInfo.TemplateVirtualPath, shapeDescriptor => displayContext => RenderAsync(shapeDescriptor, displayContext, hit.shapeContext.harvestShapeInfo, hit.shapeContext.harvestShapeHit)); } } if (_logger.IsEnabled(LogLevel.Information)) { _logger.LogInformation("Done discovering shapes"); } }
protected MultiEntityProcessingSystem(Scene scene, Matcher matcher) : base(matcher) { _init_scene = scene; }
private static Matcher <SyntaxTrivia> Match(SyntaxKind kind, string description) { return(Matcher.Single <SyntaxTrivia>(t => t.CSharpKind() == kind, description)); }
public void Initialize() { tanks = pool.GetGroup(Matcher.AllOf(Matcher.Tank, Matcher.Id)); }
protected Matcher createMatcherA() { return((Matcher)Matcher.AllOf(CID.ComponentA)); }
public MatchType(string type, Lifetime lifetime) { _matcher = new Matcher <string>(x => x, type); Lifetime = lifetime; }
public TransferSystem(Matcher matcher, Entity player) : base(matcher) { _player = player; _enabled = true; }
public bool Apply(IFunctionSpecificationAnalyser aNativeFunction, IFunctionAssembler aAssembler) { var matcher = Matcher.CType(new TupleCType( aNativeFunction.CurrentParameterType, aNativeFunction.NextParameterType)); if (!matcher.Match(new TupleCType( new PointerCType(new VariableCType("element-type")), new NamedCType("int")))) { return(false); } var elementType = matcher.BoundVariables["element-type"] as NamedCType; if (elementType == null) { return(false); } string arg1name = aNativeFunction.CurrentParameter.Name; string arg2name = aNativeFunction.NextParameter.Name; if (arg2name != "num_" + arg1name) { return(false); } string managedType; switch (elementType.Name) { case "int": managedType = "int"; break; default: return(false); } // Finally, we are sure that this is an array of handles. // Now we need to marshal the cursed thing! string paramName = aNativeFunction.CurrentParameter.Name; aAssembler.AddPInvokeParameter( new CSharpType("IntPtr"), paramName, "array_" + paramName + ".IntPtr"); aAssembler.AddPInvokeParameter( new CSharpType("int"), "num_" + paramName, "array_" + paramName + ".Length"); aAssembler.AddManagedParameter( paramName, new CSharpType(managedType + "[]")); aAssembler.InsertBeforeCall("using (var array_" + paramName + " = SpotifyMarshalling.ArrayToNativeArray(" + paramName + "))"); aAssembler.InsertBeforeCall("{"); aAssembler.IncreaseIndent(); aAssembler.InsertAfterCall("Array.Copy(array_" + paramName + ".Value(), " + paramName + ", " + paramName + ".Length);"); aAssembler.DecreaseIndent(); aAssembler.InsertAfterCall("}"); aNativeFunction.ConsumeArgument(); aNativeFunction.ConsumeArgument(); return(true); }
void when_creating_matcher() { Entity eA = null; Entity eC = null; Entity eAB = null; Entity eABC = null; before = () => { eA = this.CreateEntity(); eC = this.CreateEntity(); eAB = this.CreateEntity(); eABC = this.CreateEntity(); eA.AddComponentA(); eC.AddComponentC(); eAB.AddComponentA(); eAB.AddComponentB(); eABC.AddComponentA(); eABC.AddComponentB(); eABC.AddComponentC(); }; context["allOf"] = () => { IMatcher m = null; before = () => m = Matcher.AllOf(new [] { CID.ComponentA, CID.ComponentA, CID.ComponentB }); it["doesn't match"] = () => { m.Matches(eA).should_be_false(); }; it["matches"] = () => { m.Matches(eAB).should_be_true(); m.Matches(eABC).should_be_true(); }; it["gets triggering types without duplicates"] = () => { m.indices.Length.should_be(2); m.indices.should_contain(CID.ComponentA); m.indices.should_contain(CID.ComponentB); }; }; context["anyOf"] = () => { IMatcher m = null; before = () => m = Matcher.AnyOf(new [] { CID.ComponentA, CID.ComponentA, CID.ComponentB }); it["doesn't match"] = () => { m.Matches(eC).should_be_false(); }; it["matches"] = () => { m.Matches(eA).should_be_true(); m.Matches(eAB).should_be_true(); m.Matches(eABC).should_be_true(); }; }; context["noneOf"] = () => { IMatcher m = null; before = () => m = Matcher.NoneOf(new [] { CID.ComponentA, CID.ComponentB }); it["doesn't match"] = () => { m.Matches(eA).should_be_false(); m.Matches(eAB).should_be_false(); }; it["matches"] = () => { m.Matches(eC).should_be_true(); m.Matches(this.CreateEntity()).should_be_true(); }; }; context["equals"] = () => { it["equals equal AllOfMatcher"] = () => { var m1 = allOfAB(); var m2 = allOfAB(); m1.should_not_be_same(m2); m1.Equals(m2).should_be_true(); }; it["equals equal AllOfMatcher independent from the order of indices"] = () => { var m1 = allOfAB(); var m2 = Matcher.AllOf(new [] { CID.ComponentB, CID.ComponentA }); m1.should_not_be_same(m2); m1.Equals(m2).should_be_true(); }; it["doesn't equal different AllOfMatcher"] = () => { var m1 = Matcher.AllOf(new [] { CID.ComponentA }); var m2 = allOfAB(); m1.Equals(m2).should_be_false(); }; it["generates same hash for equal AllOfMatcher"] = () => { var m1 = allOfAB(); var m2 = allOfAB(); m1.GetHashCode().should_be(m2.GetHashCode()); }; it["generates same hash independent from the order of indices"] = () => { var m1 = Matcher.AllOf(new [] { CID.ComponentA, CID.ComponentB }); var m2 = Matcher.AllOf(new [] { CID.ComponentB, CID.ComponentA }); m1.GetHashCode().should_be(m2.GetHashCode()); }; it["AllOfMatcher doesn't equal AnyOfMatcher with same indices"] = () => { var m1 = Matcher.AllOf(new [] { CID.ComponentA, CID.ComponentB }); var m2 = Matcher.AnyOf(new [] { CID.ComponentA, CID.ComponentB }); m1.Equals(m2).should_be_false(); }; }; }
internal static Configuration GetConfiguration(ParameterizableConfigurationElement patternConfiguration, Matcher matcher) { if (patternConfiguration == null) { patternConfiguration = new ParameterizableConfigurationElement(); } IQueryable <Configuration> configurations = DataContext.Instance.ConfigurationSet .Where(c => c.Matcher.MatcherId == matcher.MatcherId && c.Parameters.Count == patternConfiguration.Parameters.Count && c.Parameters.All(p => patternConfiguration.Parameters.AllKeys.Contains(p.Name))); Configuration configuration = null; foreach (Configuration dbConfiguration in configurations) { if (dbConfiguration.Parameters.All(p => patternConfiguration.Parameters[p.Name].Value == p.Value)) { configuration = dbConfiguration; break; } } bool pendingChanges = false; if (configuration == null) { configuration = DataContext.Instance.ConfigurationSet.Add(new Configuration { Matcher = matcher, Description = patternConfiguration.Description }); foreach (NameValueConfigurationElement parameter in patternConfiguration.Parameters) { configuration.Parameters.Add(new Parameter { Name = parameter.Name, Value = parameter.Value }); } pendingChanges = true; } else if (configuration.Description != patternConfiguration.Description) { configuration.Description = patternConfiguration.Description; pendingChanges = true; } if (pendingChanges) { DataContext.Instance.SaveChanges(); } return(configuration); }
public MatchedSystem(Matcher matcher) : base(matcher) { }
public void SetUp() { _matcher = new Matcher(); }
/// <summary> /// Create a new <seealso cref="PatternKeywordMarkerFilter"/>, that marks the current /// token as a keyword if the tokens term buffer matches the provided /// <seealso cref="Pattern"/> via the <seealso cref="KeywordAttribute"/>. /// </summary> /// <param name="in"> /// TokenStream to filter </param> /// <param name="pattern"> /// the pattern to apply to the incoming term buffer /// </param> public PatternKeywordMarkerFilter(TokenStream @in, Pattern pattern) : base(@in) { this.matcher = pattern.matcher(""); }
// Method to simulate the fetching of data from the wtx device : By adding and changing paths to the data buffer(='_databuffer') and by calling an event in TestJetbusConnection with invoke. public JObject Fetch(out FetchId id, Matcher matcher, Action <JToken> fetchCallback, Action <bool, JToken> responseCallback, double responseTimeoutMs) { path = ""; Event = ""; data = 0; if (!_connection._dataBuffer.ContainsKey("6144/00")) // Only if the dictionary does not contain a path(for example "6014/01") the dictionary will be filled: { _connection._dataBuffer.Add("6144/00", simulateJTokenInstance("6144/00", "add", 0)["value"]); // Read 'gross value' _connection._dataBuffer.Add("601A/01", simulateJTokenInstance("601A/01", "add", 0)["value"]); // Read 'net value' _connection._dataBuffer.Add("6153/00", simulateJTokenInstance("6153/00", "add", 1)["value"]); // Read 'weight moving detection' _connection._dataBuffer.Add("6012/01", simulateJTokenInstance("6012/01", "add", 1)["value"]); // Read 'Weighing device 1 (scale) weight status' _connection._dataBuffer.Add("SDO", simulateJTokenInstance("SDO", "add", 1)["value"]); _connection._dataBuffer.Add("FRS1", simulateJTokenInstance("FRS1", "add", 1)["value"]); _connection._dataBuffer.Add("NDS", simulateJTokenInstance("NDS", "add", 1)["value"]); _connection._dataBuffer.Add("6013/01", simulateJTokenInstance("6013/01", "add", 4)["value"]); // Read 'Weight decimal point', f.e. = 4. _connection._dataBuffer.Add("IM1", simulateJTokenInstance("IM1", "add", 1)["value"]); _connection._dataBuffer.Add("IM2", simulateJTokenInstance("IM2", "add", 1)["value"]); _connection._dataBuffer.Add("IM3", simulateJTokenInstance("IM3", "add", 1)["value"]); _connection._dataBuffer.Add("IM4", simulateJTokenInstance("IM4", "add", 1)["value"]); _connection._dataBuffer.Add("OM1", simulateJTokenInstance("OM1", "add", 1)["value"]); _connection._dataBuffer.Add("OM2", simulateJTokenInstance("OM2", "add", 1)["value"]); _connection._dataBuffer.Add("OM3", simulateJTokenInstance("OM3", "add", 1)["value"]); _connection._dataBuffer.Add("OM4", simulateJTokenInstance("OM4", "add", 1)["value"]); _connection._dataBuffer.Add("OS1", simulateJTokenInstance("OS1", "add", 1)["value"]); _connection._dataBuffer.Add("OS2", simulateJTokenInstance("OS2", "add", 1)["value"]); _connection._dataBuffer.Add("OS3", simulateJTokenInstance("OS3", "add", 1)["value"]); _connection._dataBuffer.Add("OS4", simulateJTokenInstance("OS4", "add", 1)["value"]); _connection._dataBuffer.Add("CFT", simulateJTokenInstance("CFT", "add", 1)["value"]); _connection._dataBuffer.Add("FFT", simulateJTokenInstance("FFT", "add", 1)["value"]); _connection._dataBuffer.Add("TMD", simulateJTokenInstance("TMD", "add", 1)["value"]); _connection._dataBuffer.Add("UTL", simulateJTokenInstance("UTL", "add", 1)["value"]); _connection._dataBuffer.Add("LTL", simulateJTokenInstance("LTL", "add", 1)["value"]); _connection._dataBuffer.Add("MSW", simulateJTokenInstance("MSW", "add", 1)["value"]); _connection._dataBuffer.Add("EWT", simulateJTokenInstance("EWT", "add", 1)["value"]); _connection._dataBuffer.Add("TAD", simulateJTokenInstance("TAD", "add", 1)["value"]); _connection._dataBuffer.Add("CBT", simulateJTokenInstance("CBT", "add", 1)["value"]); _connection._dataBuffer.Add("CBK", simulateJTokenInstance("CBK", "add", 1)["value"]); _connection._dataBuffer.Add("FBK", simulateJTokenInstance("FBK", "add", 1)["value"]); _connection._dataBuffer.Add("FBT", simulateJTokenInstance("FBT", "add", 1)["value"]); _connection._dataBuffer.Add("SYD", simulateJTokenInstance("SYD", "add", 1)["value"]); _connection._dataBuffer.Add("VCT", simulateJTokenInstance("VCT", "add", 1)["value"]); _connection._dataBuffer.Add("EMD", simulateJTokenInstance("EMD", "add", 1)["value"]); _connection._dataBuffer.Add("CFD", simulateJTokenInstance("CFD", "add", 1)["value"]); _connection._dataBuffer.Add("FFD", simulateJTokenInstance("FFD", "add", 1)["value"]); _connection._dataBuffer.Add("SDM", simulateJTokenInstance("SDM", "add", 1)["value"]); _connection._dataBuffer.Add("SDS", simulateJTokenInstance("SDS", "add", 1)["value"]); _connection._dataBuffer.Add("RFT", simulateJTokenInstance("RFT", "add", 1)["value"]); _connection._dataBuffer.Add("MDT", simulateJTokenInstance("MDT", "add", 1)["value"]); _connection._dataBuffer.Add("FFM", simulateJTokenInstance("FFM", "add", 1)["value"]); _connection._dataBuffer.Add("OSN", simulateJTokenInstance("OSN", "add", 1)["value"]); _connection._dataBuffer.Add("FFL", simulateJTokenInstance("FFL", "add", 1)["value"]); _connection._dataBuffer.Add("DL1", simulateJTokenInstance("DL1", "add", 1)["value"]); _connection._dataBuffer.Add("6002/02", simulateJTokenInstance("6002/02", "add", 1801543519)["value"]); // = Status _connection._dataBuffer.Add("2020/25", simulateJTokenInstance("2020/25", "add", 0xA)["value"]); // = Limit value status _connection._dataBuffer.Add("2010/07", simulateJTokenInstance("2010/07", "add", 1)["value"]); _connection._dataBuffer.Add("2110/06", simulateJTokenInstance("2110/06", "add", 1)["value"]); _connection._dataBuffer.Add("2110/07", simulateJTokenInstance("2110/07", "add", 1)["value"]); _connection._dataBuffer.Add("2020/18", simulateJTokenInstance("2020/18", "add", 0)["value"]); // = Status digital input 1 _connection._dataBuffer.Add("2020/19", simulateJTokenInstance("2020/19", "add", 0)["value"]); // = Status digital input 2 _connection._dataBuffer.Add("2020/1A", simulateJTokenInstance("2020/1A", "add", 0)["value"]); // = Status digital input 3 _connection._dataBuffer.Add("2020/1B", simulateJTokenInstance("2020/1B", "add", 0)["value"]); // = Status digital input 4 _connection._dataBuffer.Add("2020/1E", simulateJTokenInstance("2020/1E", "add", 0)["value"]); // = Status digital output 1 _connection._dataBuffer.Add("2020/1F", simulateJTokenInstance("2020/1F", "add", 0)["value"]); // = Status digital output 2 _connection._dataBuffer.Add("2020/20", simulateJTokenInstance("2020/20", "add", 0)["value"]); // = Status digital output 3 _connection._dataBuffer.Add("2020/21", simulateJTokenInstance("2020/21", "add", 0)["value"]); // = Status digital output 4 _connection._dataBuffer.Add("6014/01", simulateJTokenInstance("6014/01", "add", 0x4C0000)["value"]); // Read Unit, prefix or fixed parameters - for t. _connection._dataBuffer.Add("6002/01", simulateJTokenInstance("6002/01", "add", 0)["value"]); _connection._dataBuffer.Add("6152/00", simulateJTokenInstance("6152/00", "add", 15000)["value"]); _connection._dataBuffer.Add("6015/01", simulateJTokenInstance("6015/01", "add", 0x004C0000)["value"]); } // For the different unit cases : lb, g, kg, t JToken JTokenobj = simulateJTokenInstance(path, Event, data); id = null; fetchCallback = (JToken x) => _connection.OnFetchData(JTokenobj); fetchCallback.Invoke(JTokenobj); return(JTokenobj.ToObject <JObject>()); }
private static TimeValue Parse(Matcher matcher, System.Func <ZoneId> defaultZone) { return(new TimeValue(OffsetTime.of(parseTime(matcher), ParseOffset(matcher, defaultZone)))); }
private bool ScanForProjects() { _logger.LogInformation(string.Format("Scanning '{0}' for DNX projects", _env.Path)); var anyProjects = false; // Single project in this folder var projectInThisFolder = Path.Combine(_env.Path, "project.json"); if (File.Exists(projectInThisFolder)) { if (_context.TryAddProject(projectInThisFolder)) { _logger.LogInformation(string.Format("Found project '{0}'.", projectInThisFolder)); anyProjects = true; } } else { IEnumerable <string> paths; #if DNX451 if (_options.Projects != "**/project.json") { var matcher = new Matcher(); matcher.AddIncludePatterns(_options.Projects.Split(';')); paths = matcher.GetResultsInFullPath(_env.Path); } else { paths = _directoryEnumerator.SafeEnumerateFiles(_env.Path, "project.json"); } #else // The matcher works on CoreCLR but Omnisharp still targets aspnetcore50 instead of // dnxcore50 paths = _directoryEnumerator.SafeEnumerateFiles(_env.Path, "project.json"); #endif foreach (var path in paths) { string projectFile = null; if (Path.GetFileName(path) == "project.json") { projectFile = path; } else { projectFile = Path.Combine(path, "project.json"); if (!File.Exists(projectFile)) { projectFile = null; } } if (string.IsNullOrEmpty(projectFile)) { continue; } if (!_context.TryAddProject(projectFile)) { continue; } _logger.LogInformation(string.Format("Found project '{0}'.", projectFile)); anyProjects = true; } } return(anyProjects); }