public FileSystem() { FileProvider = new LocalFileProvider(); InputPaths = new PathCollection( new NormalizedPath("theme"), new NormalizedPath("input")); }
public async Task Work_ScriptCreationStateModel_VerificationFailed_PublishProfilePathIsNotFilled_Async() { // Arrange var project = new SqlProject("a", "b", "c"); var configuration = ConfigurationModel.GetDefault(); var previousVersion = new Version(1, 0); Task HandleWorkInProgressChanged(bool arg) => Task.CompletedTask; var directories = new DirectoryPaths("projectDirectory", "latestArtifactsDirectory", "newArtifactsDirectory"); var sourcePaths = new DeploySourcePaths("newDacpacPath", "", "previousDacpacPath"); var targetPaths = new DeployTargetPaths("deployScriptPath", "deployReportPath"); var paths = new PathCollection(directories, sourcePaths, targetPaths); var model = new ScriptCreationStateModel(project, configuration, previousVersion, true, HandleWorkInProgressChanged) { Paths = paths }; var fsaMock = new Mock <IFileSystemAccess>(); var loggerMock = new Mock <ILogger>(); loggerMock.SetupGet(m => m.DocumentationBaseUrl).Returns("foobasebar"); IWorkUnit <ScriptCreationStateModel> unit = new VerifyPathsUnit(fsaMock.Object, loggerMock.Object); // Act await unit.Work(model, CancellationToken.None); // Assert Assert.AreEqual(StateModelState.PathsVerified, model.CurrentState); Assert.IsFalse(model.Result); loggerMock.Verify(m => m.LogErrorAsync(It.Is <string>(s => s.Contains(ConfigurationModel.UseSinglePublishProfileSpecialKeyword) && s.Contains("foobasebarpublish-profile-path"))), Times.Once); fsaMock.Verify(m => m.CheckIfFileExists(It.IsAny <string>()), Times.Never); }
public async Task Work_ScriptCreationStateModel_NoRepositoryConfigured_Async() { // Arrange var project = new SqlProject("a", "b", "c"); var configuration = ConfigurationModel.GetDefault(); var previousVersion = new Version(1, 2, 3); Task HandleWorkInProgressChanged(bool arg) => Task.CompletedTask; var directories = new DirectoryPaths("projectDirectory", "latestArtifactsDirectory", "newArtifactsDirectory"); var sourcePaths = new DeploySourcePaths("newDacpacPath", "publishProfilePath", "previousDacpacPath"); var targetPaths = new DeployTargetPaths("deployScriptPath", "deployReportPath"); var paths = new PathCollection(directories, sourcePaths, targetPaths); var model = new ScriptCreationStateModel(project, configuration, previousVersion, true, HandleWorkInProgressChanged) { Paths = paths }; var fsaMock = new Mock <IFileSystemAccess>(); var loggerMock = new Mock <ILogger>(); IWorkUnit <ScriptCreationStateModel> unit = new CopyDacpacToSharedDacpacRepositoryUnit(fsaMock.Object, loggerMock.Object); // Act await unit.Work(model, CancellationToken.None); // Assert Assert.AreEqual(StateModelState.TriedToCopyDacpacToSharedDacpacRepository, model.CurrentState); Assert.IsNull(model.Result); fsaMock.Verify(m => m.EnsureDirectoryExists(It.IsAny <string>()), Times.Never); fsaMock.Verify(m => m.CopyFile(It.IsAny <string>(), It.IsAny <string>()), Times.Never); loggerMock.Verify(m => m.LogErrorAsync(It.IsAny <string>()), Times.Never); loggerMock.Verify(m => m.LogErrorAsync(It.IsAny <Exception>(), It.IsAny <string>()), Times.Never); }
public void ParseExact_should_generate_correct_values(string input, string format) { var result = PathCollection.ParseExact(input, format); Assert.Contains("/usr/local/bin", result); Assert.Contains("/usr/bin", result); }
public async Task Work_ScriptCreationStateModel_CompleteRun_Async() { // Arrange var fsaMock = new Mock <IFileSystemAccess>(); var loggerMock = new Mock <ILogger>(); IWorkUnit <ScriptCreationStateModel> unit = new CleanNewArtifactsDirectoryUnit(fsaMock.Object, loggerMock.Object); var project = new SqlProject("a", "b", "c"); var configuration = ConfigurationModel.GetDefault(); var previousVersion = new Version(1, 0); Task HandlerFunc(bool b) => Task.CompletedTask; var directories = new DirectoryPaths("projectDirectory", "latestArtifactsDirectory", "newArtifactsDirectory"); var sourcePaths = new DeploySourcePaths("newDacpacPath", "publishProfilePath", "previousDacpacPath"); var targetPaths = new DeployTargetPaths("deployScriptPath", "deployReportPath"); var paths = new PathCollection(directories, sourcePaths, targetPaths); var model = new ScriptCreationStateModel(project, configuration, previousVersion, true, HandlerFunc) { Paths = paths }; // Act await unit.Work(model, CancellationToken.None); // Assert Assert.AreEqual(StateModelState.TriedToCleanArtifactsDirectory, model.CurrentState); Assert.IsNull(model.Result); fsaMock.Verify(m => m.TryToCleanDirectory("newArtifactsDirectory"), Times.Once); loggerMock.Verify(m => m.LogInfoAsync(It.IsNotNull <string>()), Times.Once); }
public async Task Modify_ReplaceSpecialKeyword_PreviousVersion_Async() { // Arrange IScriptModifier s = new AddCustomHeaderModifier(); const string input = "foobar"; var project = new SqlProject("a", "b", "c"); project.ProjectProperties.DacVersion = new Version(1, 3, 0); var configuration = new ConfigurationModel { CustomHeader = "Script base version: {PREVIOUS_VERSION}" }; var directories = new DirectoryPaths("projectDirectory", "latestArtifactsDirectory", "newArtifactsDirectory"); var sourcePaths = new DeploySourcePaths("newDacpacPath", "publishProfilePath", "previousDacpacPath"); var targetPaths = new DeployTargetPaths("deployScriptPath", "deployReportPath"); var paths = new PathCollection(directories, sourcePaths, targetPaths); var model = new ScriptModificationModel(input, project, configuration, paths, new Version(1, 2, 0), false); // Act await s.ModifyAsync(model); // Assert Assert.IsNotNull(model.CurrentScript); Assert.AreEqual("Script base version: 1.2.0\r\nfoobar", model.CurrentScript); }
public ScssPlugin(SiteObject site) : base(site) { Includes = new PathCollection(); SetValue("includes", Includes, true); site.SetValue("scss", this, true); site.Content.ContentProcessors.Add(new ScssProcessor(this)); }
private async Task TryToDeleteRefactorLogInternal(IStateModel stateModel, SqlProject project, ConfigurationModel configuration, PathCollection paths) { if (!configuration.DeleteRefactorlogAfterVersionedScriptGeneration) { stateModel.CurrentState = StateModelState.DeletedRefactorLog; return; } await _logger.LogInfoAsync("Deleting refactorlog files ..."); var deletedFiles = _fileSystemAccess.TryToCleanDirectory(paths.Directories.ProjectDirectory, "*.refactorlog"); if (deletedFiles.Length == 0) { await _logger.LogTraceAsync("No files were deleted."); } else { foreach (var deletedFile in deletedFiles) { _visualStudioAccess.RemoveItemFromProjectRoot(project, deletedFile); await _logger.LogTraceAsync($"Deleted file {deletedFile} ..."); } } stateModel.CurrentState = StateModelState.DeletedRefactorLog; }
public async Task Work_ScaffoldingStateModel_CopyFailed_Async() { // Arrange var project = new SqlProject("a", "b", "c"); var configuration = ConfigurationModel.GetDefault(); var targetVersion = new Version(1, 2, 3); Task HandleWorkInProgressChanged(bool arg) => Task.CompletedTask; var directories = new DirectoryPaths("projectDirectory", "latestArtifactsDirectory", "newArtifactsDirectory"); var sourcePaths = new DeploySourcePaths("newDacpacPath", "publishProfilePath", "previousDacpacPath"); var targetPaths = new DeployTargetPaths("deployScriptPath", "deployReportPath"); var paths = new PathCollection(directories, sourcePaths, targetPaths); var model = new ScaffoldingStateModel(project, configuration, targetVersion, HandleWorkInProgressChanged) { Paths = paths }; var bsMock = new Mock <IBuildService>(); bsMock.Setup(m => m.CopyBuildResultAsync(project, paths.Directories.NewArtifactsDirectory)).ReturnsAsync(false); IWorkUnit <ScaffoldingStateModel> unit = new CopyBuildResultUnit(bsMock.Object); // Act await unit.Work(model, CancellationToken.None); // Assert Assert.AreEqual(StateModelState.TriedToCopyBuildResult, model.CurrentState); Assert.IsFalse(model.Result); }
private void RecalculateAllPaths() { PathCollection.Clear(); foreach (var start in StructureManager) { if (!PathCollection.ContainsKey(start.Position)) { PathCollection.Add(start.Position, new Dictionary <HexagonNode, Path>()); } foreach (var destination in StructureManager) { try { if (!PathCollection[start.Position].ContainsKey(destination.Position)) { Path path = new Path(PathFinding.AStar(start.Position, destination.Position).ToArray()); AddPath(path); if (path.AllHops.Count > 2) { var containedPaths = path.GetContainedPaths(); foreach (var containedPath in containedPaths) { AddPath(containedPath); } } } } catch (NoPathFoundException <HexagonNode> ) { } } } }
public async Task Work_ScaffoldingStateModel_LoadedSuccessful_Async() { // Arrange var project = new SqlProject("a", "b", "c"); var configuration = ConfigurationModel.GetDefault(); var targetVersion = new Version(1, 2, 3); Task HandleWorkInProgressChanged(bool arg) => Task.CompletedTask; var model = new ScaffoldingStateModel(project, configuration, targetVersion, HandleWorkInProgressChanged); var directories = new DirectoryPaths("projectDirectory", "latestArtifactsDirectory", "newArtifactsDirectory"); var sourcePaths = new DeploySourcePaths("newDacpacPath", "publishProfilePath", "previousDacpacPath"); var targetPaths = new DeployTargetPaths("deployScriptPath", "deployReportPath"); var paths = new PathCollection(directories, sourcePaths, targetPaths); var spsMock = new Mock <ISqlProjectService>(); spsMock.Setup(m => m.TryLoadPathsForScaffoldingAsync(project, configuration)).ReturnsAsync(paths); IWorkUnit <ScaffoldingStateModel> unit = new LoadPathsUnit(spsMock.Object); // Act await unit.Work(model, CancellationToken.None); // Assert Assert.AreEqual(StateModelState.PathsLoaded, model.CurrentState); Assert.IsNull(model.Result); }
private async Task VerifyPathsInternal(IStateModel stateModel, PathCollection paths) { await _logger.LogInfoAsync("Verifying paths ..."); if (string.IsNullOrWhiteSpace(paths.DeploySources.PublishProfilePath)) { stateModel.Result = false; stateModel.CurrentState = StateModelState.PathsVerified; await _logger.LogErrorAsync("Failed to find publish profile. " + $"The {nameof(ConfigurationModel.PublishProfilePath)} is set to \"{ConfigurationModel.UseSinglePublishProfileSpecialKeyword}\", but there's more than one publish profile in the directory. " + $"Please read the documentation at {_logger.DocumentationBaseUrl}publish-profile-path for more details."); return; } if (_fileSystemAccess.CheckIfFileExists(paths.DeploySources.PublishProfilePath)) { stateModel.CurrentState = StateModelState.PathsVerified; return; } stateModel.Result = false; stateModel.CurrentState = StateModelState.PathsVerified; await _logger.LogErrorAsync($"Failed to find publish profile at \"{paths.DeploySources.PublishProfilePath}\". " + $"Please read the documentation at {_logger.DocumentationBaseUrl}publish-profile-path for more details."); }
public void IsRequestAuthorized_rawUrlIsExcludedButIncludedBeforeThat_shouldDisallowRequest() { // Arrange var pathCollection = new PathCollection { new PathConfig { Name = "included", Path = ".*", Type = PathConfig.PathType.Include }, new PathConfig { Name = "excluded", Path = "/public/.*", Type = PathConfig.PathType.Exclude } }; var service = new HmacHttpService(ServiceId.ToString(), pathCollection, "signature") { UserRepository = new Mock <IUserRepository>().Object, AppRepository = new Mock <IAppRepository>().Object, HmacService = new HmacSha256Service() }; // Act var actual = service.IsRequestAuthorized(GetValidRawUrl(false), CreateInvalidQueryString()); // Assert Assert.AreEqual(StatusCode.ParameterMissing, actual); }
public SearchPlugin(SiteObject site, BundlePlugin bundlePlugin, ResourcePlugin resourcePlugin) : base(site) { BundlePlugin = bundlePlugin; ResourcePlugin = resourcePlugin; Enable = false; Engine = DefaultKind; Url = (string)DefaultUrl; SearchEngines = new List <SearchEngine>() { new LunrSearchEngine(this), new SqliteSearchEngine(this) }; Excludes = new PathCollection(); SetValue("excludes", Excludes, true); site.SetValue("search", this, true); var processor = new SearchProcessorDispatch(this); site.Content.BeforeLoadingProcessors.Add(processor); // It is important to insert the processor at the beginning // because we output values used by the BundlePlugin site.Content.BeforeProcessingProcessors.Insert(0, processor); site.Content.AfterRunningProcessors.Add(processor); }
public async Task Work_ScaffoldingStateModel_InvalidCharsInNewDacpacPath_Async() { // Arrange var project = new SqlProject("a", "b", "c"); var configuration = ConfigurationModel.GetDefault(); configuration.SharedDacpacRepositoryPath = "C:\\Temp\\Test\\"; var targetVersion = new Version(1, 2, 3); Task HandleWorkInProgressChanged(bool arg) => Task.CompletedTask; var directories = new DirectoryPaths("projectDirectory", "latestArtifactsDirectory", "newArtifactsDirectory"); var sourcePaths = new DeploySourcePaths("newDacpacPath" + new string(Path.GetInvalidPathChars()), "publishProfilePath", "previousDacpacPath"); var targetPaths = new DeployTargetPaths("deployScriptPath", "deployReportPath"); var paths = new PathCollection(directories, sourcePaths, targetPaths); var model = new ScaffoldingStateModel(project, configuration, targetVersion, HandleWorkInProgressChanged) { Paths = paths }; var fsaMock = new Mock <IFileSystemAccess>(); var loggerMock = new Mock <ILogger>(); IWorkUnit <ScaffoldingStateModel> unit = new CopyDacpacToSharedDacpacRepositoryUnit(fsaMock.Object, loggerMock.Object); // Act await unit.Work(model, CancellationToken.None); // Assert Assert.AreEqual(StateModelState.TriedToCopyDacpacToSharedDacpacRepository, model.CurrentState); Assert.IsFalse(model.Result); fsaMock.Verify(m => m.EnsureDirectoryExists(It.IsAny <string>()), Times.Never); fsaMock.Verify(m => m.CopyFile(It.IsAny <string>(), It.IsAny <string>()), Times.Never); loggerMock.Verify(m => m.LogInfoAsync("Copying DACPAC to shared DACPAC repository ..."), Times.Once); loggerMock.Verify(m => m.LogErrorAsync(It.Is <string>(s => s.StartsWith("Failed to copy DACPAC to shared DACPAC repository: "))), Times.Once); }
public void IsRequestAuthorized_includeParametersThatHaveNotBeenUsedForSignatureGeneration_ignoreTheseAdditionalParameters_shouldAllowRequest() { // Arrange var pathCollection = new PathCollection { new PathConfig { Name = "included-1", Path = "/public/.*", Type = PathConfig.PathType.Include }, new PathConfig { Name = "included-2", Path = ".*", Type = PathConfig.PathType.Include, ExcludeParameters = "p1, p2" } }; var service = CreateService(DefaultSignatureParameterKey, false, pathCollection); // Act var queryString = CreateValidQueryString(); queryString.Add("p1", "v1"); queryString.Add("p2", "v2"); var actualUrlWithParametersExcluded = service.IsRequestAuthorized(GetValidRawUrl(), queryString); var actualUrlWithParametersNotExcluded = service.IsRequestAuthorized(GetValidRawUrl(false), queryString); // Assert Assert.AreEqual(StatusCode.Authorized, actualUrlWithParametersExcluded); Assert.AreEqual(StatusCode.InvalidSignature, actualUrlWithParametersNotExcluded); }
public async Task Work_ScriptCreationStateModel_CompleteRun_NoFilesDeleted_ConfigurationDisabled_Async() { // Arrange var fsaMock = new Mock <IFileSystemAccess>(); var vsaMock = new Mock <IVisualStudioAccess>(); var loggerMock = new Mock <ILogger>(); IWorkUnit <ScriptCreationStateModel> unit = new DeleteRefactorLogUnit(fsaMock.Object, vsaMock.Object, loggerMock.Object); var project = new SqlProject("a", "b", "c"); var configuration = ConfigurationModel.GetDefault(); configuration.DeleteRefactorlogAfterVersionedScriptGeneration = false; var previousVersion = new Version(1, 0); Task HandlerFunc(bool b) => Task.CompletedTask; var directories = new DirectoryPaths("projectDirectory", "latestArtifactsDirectory", "newArtifactsDirectory"); var sourcePaths = new DeploySourcePaths("newDacpacPath", "publishProfilePath", "previousDacpacPath"); var targetPaths = new DeployTargetPaths("deployScriptPath", "deployReportPath"); var paths = new PathCollection(directories, sourcePaths, targetPaths); var model = new ScriptCreationStateModel(project, configuration, previousVersion, true, HandlerFunc) { Paths = paths }; // Act await unit.Work(model, CancellationToken.None); // Assert Assert.AreEqual(StateModelState.DeletedRefactorLog, model.CurrentState); Assert.IsNull(model.Result); fsaMock.Verify(m => m.TryToCleanDirectory(It.IsAny <string>(), It.IsAny <string>()), Times.Never); vsaMock.Verify(m => m.RemoveItemFromProjectRoot(project, It.IsAny <string>()), Times.Never); loggerMock.Verify(m => m.LogInfoAsync(It.IsAny <string>()), Times.Never); loggerMock.Verify(m => m.LogTraceAsync(It.IsAny <string>()), Times.Never); }
public async Task Work_ScriptCreationStateModel_FailedToWriteModifiedScript_Async() { // Arrange var exception = new IOException("foo"); const string baseScript = "foo bar"; const string expectedResultScript = "foo bar ab"; var smHeaderMock = new Mock <IScriptModifier>(); var smFooterMock = new Mock <IScriptModifier>(); var mpsMock = new Mock <IScriptModifierProviderService>(); var fsaMock = new Mock <IFileSystemAccess>(); fsaMock.Setup(m => m.ReadFileAsync("deployScriptPath")) .ReturnsAsync(baseScript); fsaMock.Setup(m => m.WriteFileAsync("deployScriptPath", It.IsAny <string>())) .Throws(exception); var loggerMock = new Mock <ILogger>(); IWorkUnit <ScriptCreationStateModel> unit = new ModifyDeploymentScriptUnit(mpsMock.Object, fsaMock.Object, loggerMock.Object); var project = new SqlProject("a", "b", "c"); var configuration = ConfigurationModel.GetDefault(); var previousVersion = new Version(1, 0); const bool createLatest = false; Task HandlerFunc(bool b) => Task.CompletedTask; var directories = new DirectoryPaths("projectDirectory", "latestArtifactsDirectory", "newArtifactsDirectory"); var sourcePaths = new DeploySourcePaths("newDacpacPath", "publishProfilePath", "previousDacpacPath"); var targetPaths = new DeployTargetPaths("deployScriptPath", "deployReportPath"); var paths = new PathCollection(directories, sourcePaths, targetPaths); var model = new ScriptCreationStateModel(project, configuration, previousVersion, createLatest, HandlerFunc) { Paths = paths }; mpsMock.Setup(m => m.GetScriptModifiers(configuration)).Returns(new Dictionary <ScriptModifier, IScriptModifier> { { ScriptModifier.AddCustomFooter, smFooterMock.Object }, { ScriptModifier.AddCustomHeader, smHeaderMock.Object } }); smHeaderMock.Setup(m => m.ModifyAsync(It.IsNotNull <ScriptModificationModel>())) .Callback((ScriptModificationModel modificationModel) => modificationModel.CurrentScript += " a") .Returns(Task.CompletedTask); smFooterMock.Setup(m => m.ModifyAsync(It.IsNotNull <ScriptModificationModel>())) .Callback((ScriptModificationModel modificationModel) => modificationModel.CurrentScript += "b") .Returns(Task.CompletedTask); // Act await unit.Work(model, CancellationToken.None); // Assert Assert.AreEqual(StateModelState.ModifiedDeploymentScript, model.CurrentState); Assert.IsFalse(model.Result); mpsMock.Verify(m => m.GetScriptModifiers(It.IsAny <ConfigurationModel>()), Times.Once); fsaMock.Verify(m => m.ReadFileAsync(paths.DeployTargets.DeployScriptPath), Times.Once); fsaMock.Verify(m => m.WriteFileAsync(paths.DeployTargets.DeployScriptPath, expectedResultScript), Times.Once); loggerMock.Verify(m => m.LogInfoAsync(It.IsAny <string>()), Times.Exactly(2)); smHeaderMock.Verify(m => m.ModifyAsync(It.IsNotNull <ScriptModificationModel>()), Times.Once); smFooterMock.Verify(m => m.ModifyAsync(It.IsNotNull <ScriptModificationModel>()), Times.Once); loggerMock.Verify(m => m.LogErrorAsync(exception, "Failed to write the modified script"), Times.Once); }
/// <inheritdoc/> public RectangleF GetPath(ICollection <IPath> output) { foreach (var path in _paths) { output.Add(path); } return(PathCollection.GetBounds(_paths)); }
public void Should_Use_PathComparer_Default_If_Comparer_Is_Null() { // Given var collection = new PathCollection(); // Then collection.Comparer.ShouldBeSameAs(PathComparer.Default); }
public void Should_Use_PathComparer_Default_If_Comparer_Is_Null() { // Given var collection = new PathCollection(); // Then Assert.Equal(PathComparer.Default, collection.Comparer); }
public void ShouldReturnTheNumberOfPathsInTheCollection() { // Given PathCollection collection = new PathCollection(new[] { _upperCaseA, _upperCaseB }); // When, Then Assert.AreEqual(2, collection.Count); }
public static string ProcessArgs(Project project, string args) { lastFileFromTemplate = QuickGenerator.QuickSettings.GenerateClass.LastFileFromTemplate; lastFileOptions = QuickGenerator.QuickSettings.GenerateClass.LastFileOptions; if (lastFileFromTemplate != null) { string fileName = Path.GetFileNameWithoutExtension(lastFileFromTemplate); args = args.Replace("$(FileName)", fileName); if (args.Contains("$(FileNameWithPackage)") || args.Contains("$(Package)")) { string package = ""; string path = lastFileFromTemplate; // Find closest parent string classpath=""; if(project!=null) classpath = project.AbsoluteClasspaths.GetClosestParent(path); // Can't find parent, look in global classpaths if (classpath == null) { PathCollection globalPaths = new PathCollection(); foreach (string cp in ProjectManager.PluginMain.Settings.GlobalClasspaths) globalPaths.Add(cp); classpath = globalPaths.GetClosestParent(path); } if (classpath != null) { if (project != null) { // Parse package name from path package = Path.GetDirectoryName(ProjectPaths.GetRelativePath(classpath, path)); package = package.Replace(Path.DirectorySeparatorChar, '.'); } } args = args.Replace("$(Package)", package); if (package.Length!=0) args = args.Replace("$(FileNameWithPackage)", package + "." + fileName); else args = args.Replace("$(FileNameWithPackage)", fileName); if (lastFileOptions != null) { args = ProcessFileTemplate(args); if (processOnSwitch == null) lastFileOptions = null; } } lastFileFromTemplate = null; } return args; }
public FileSystem() { FileProviders = new FileProviderCollection(new LocalFileProvider()); InputPaths = new PathCollection <DirectoryPath>(new[] { new DirectoryPath("theme"), new DirectoryPath("input") }); }
public FileSystem() { FileProviders = new FileProviderCollection(new LocalFileProvider()); InputPaths = new PathCollection<DirectoryPath>(new[] { new DirectoryPath("theme"), new DirectoryPath("input") }); }
public TextureInfo ConstructTextureWithGpxTrack(TileRange tiles, BoundingBox bbox, string fileName, TextureImageFormat mimeType, IEnumerable <GeoPoint> gpxPoints, bool drawGpxVertices = false, Rgba32 color = default(Rgba32), float lineWidth = 5f) { // where is the bbox in the final image ? // get pixel in full map int zoomLevel = tiles.Tiles.First().TileInfo.Zoom; var projectedBbox = ConvertWorldToMap(bbox, zoomLevel, tiles.TileSize); var tilesBbox = GetTilesBoundingBox(tiles); int xOffset = (int)(tilesBbox.xMin - projectedBbox.xMin); int yOffset = (int)(tilesBbox.yMin - projectedBbox.yMin); //DrawDebugBmpBbox(tiles, localBbox, tilesBbox, fileName, mimeType); int tileSize = tiles.TileSize; var pointsOnTexture = gpxPoints .Select(pt => TileUtils.PositionToGlobalPixel(new LatLong(pt.Latitude, pt.Longitude), zoomLevel, tiles.TileSize)) .Select(pt => new PointF((float)(pt.X - (int)projectedBbox.xMin), (float)(pt.Y - (int)projectedBbox.yMin))); using (Image <Rgba32> outputImage = new Image <Rgba32>((int)projectedBbox.Width, (int)projectedBbox.Height)) { foreach (var tile in tiles.Tiles) { using (Image <Rgba32> tileImg = Image.Load(tile.Image)) { int x = (tile.TileInfo.X - tiles.Start.X) * tileSize + xOffset; int y = (tile.TileInfo.Y - tiles.Start.Y) * tileSize + yOffset; outputImage.Mutate(o => o .DrawImage(tileImg, new Point(x, y), 1f) ); } } outputImage.Mutate(o => o .DrawLines(color == default(Rgba32) ? new Rgba32(1, 0, 0, 1f) : color, lineWidth, pointsOnTexture.ToArray()) ); if (drawGpxVertices) { PathCollection pc = new PathCollection(pointsOnTexture.Select(p => new EllipsePolygon(p, new SizeF(10f, 10f)))); outputImage.Mutate(o => o.Draw(GraphicsOptions.Default, Pens.Solid(Rgba32.Violet, 3), pc)); } // with encoder //IImageEncoder encoder = ConvertFormat(mimeType); //outputImage.Save(fileName, encoder); outputImage.Save(fileName); } return(new TextureInfo(fileName, mimeType, (int)projectedBbox.Width, (int)projectedBbox.Height, zoomLevel, projectedBbox)); }
public void Should_Return_The_Number_Of_Paths_In_The_Collection() { // Given var collection = new PathCollection( new Path[] { new DirectoryPath("A"), new FilePath("A.txt"), new DirectoryPath("B"), new FilePath("B.txt") }, new PathComparer(false)); // When, Then Assert.Equal(4, collection.Count); }
private async Task CleanArtifactsDirectoryInternal(IStateModel stateModel, PathCollection paths) { await _logger.LogInfoAsync("Cleaning artifacts directory ..."); // Even if this operation fails, there's no reason to make the whole process fail. // Therefore this will not set the stateModel.Result property. _fileSystemAccess.TryToCleanDirectory(paths.Directories.NewArtifactsDirectory); stateModel.CurrentState = StateModelState.TriedToCleanArtifactsDirectory; }
public void Should_Respect_File_System_Case_Sensitivity_When_Adding_FilePath(bool caseSensitive, int expectedCount) { // Given var collection = new PathCollection(new FilePath[] { "A.TXT" }, new PathComparer(caseSensitive)); // When collection.Add(new FilePath("a.txt")); // Then Assert.Equal(expectedCount, collection.Count); }
public void Should_Add_FilePath_If_Not_Already_Present() { // Given var collection = new PathCollection(new FilePath[] { "A.txt" }, new PathComparer(false)); // When collection.Add(new FilePath("B.txt")); // Then Assert.Equal(2, collection.Count); }
public void Should_Return_New_Collection_When_Removing_FilePaths() { // Given var collection = new PathCollection(new FilePath[] { "A.txt", "B.txt", "C.txt" }, new PathComparer(false)); // When var result = collection - new FilePath[] { "B.txt", "C.txt" }; // Then Assert.False(ReferenceEquals(result, collection)); }
public void Should_Respect_File_System_Case_Sensitivity_When_Removing_FilePaths(bool caseSensitive, int expectedCount) { // Given var collection = new PathCollection(new FilePath[] { "A.TXT", "B.TXT", "C.TXT" }, new PathComparer(caseSensitive)); // When var result = collection - new FilePath[] { "b.txt", "c.txt" }; // Then Assert.Equal(expectedCount, result.Count); }
public void ReadApplications() { ReadStartElement("applications"); applications = new PathCollection(); ReadPaths("application", applications); if (applications.Count > 0) { project.OutputPath = Path.Combine(outputPath, Path.GetFileNameWithoutExtension(applications[0]) + ".swf"); } }
public event ChangedHandler ClasspathChanged; // inner operation changed the classpath public Project(string path, CompilerOptions compilerOptions) { this.path = path; this.compilerOptions = compilerOptions; TestMovieBehavior = TestMovieBehavior.Default; classpaths = new PathCollection(); compileTargets = new PathCollection(); hiddenPaths = new HiddenPathCollection(); libraryAssets = new AssetCollection(this); InputPath = ""; OutputPath = ""; PreBuildEvent = ""; PostBuildEvent = ""; }
public GameMakerFile() { Sprites = new SpriteCollection(); Sounds = new SoundCollection(); Backgrounds = new BackgroundCollection(); Paths = new PathCollection(); Scripts = new ScriptCollection(); Fonts = new FontCollection(); TimeLines = new TimeLineCollection(); Objects = new ObjectCollection(); Rooms = new RoomCollection(); Triggers = new TriggerCollection(); Includes = new IncludedFileCollection(); Constants = new ConstantCollection(); Information = new GameInformation(); Settings = new GameSettings(); ResourceTree = new ResourceTree(); }
public void IsRequestAuthorized_rawUrlIsExcludedButIncludedBeforeThat_shouldDisallowRequest() { // Arrange var pathCollection = new PathCollection { new PathConfig {Name = "included", Path = ".*", Type = PathConfig.PathType.Include}, new PathConfig {Name = "excluded", Path = "/public/.*", Type = PathConfig.PathType.Exclude} }; var service = new HmacHttpService(ServiceId.ToString(), pathCollection, "signature") { UserRepository = new Mock<IUserRepository>().Object, AppRepository = new Mock<IAppRepository>().Object, HmacService = new HmacSha256Service() }; // Act var actual = service.IsRequestAuthorized(GetValidRawUrl(false), CreateInvalidQueryString()); // Assert Assert.AreEqual(StatusCode.ParameterMissing, actual); }
private static void ConsolidatePaths(Symbol symbol, DVexWriter writer) { List<FillStyle> fills = new List<FillStyle>(); List<StrokeStyle> strokes = new List<StrokeStyle>(); fills.Add( new SolidFill(Color.Transparent) ); strokes.Add( new SolidStroke(0.0F, Color.Transparent) ); ArrayList allPaths = new ArrayList(); ArrayList allSrs = new ArrayList(); // Find all used colors/strokes, and the F0,F1,S info for each seg foreach(Shape sh in symbol.Shapes) { foreach(IShapeData s in sh.ShapeData) { int fill = 0; int stroke = 0; if (!fills.Contains(shape.Fills[s.FillIndex])) { fill = fills.Add(shape.Fills[s.FillIndex]); } else { fill = fills.IndexOf(shape.Fills[s.FillIndex]); } if( !strokes.Contains(shape.Strokes[s.StrokeIndex]) ) { stroke = strokes.Add(shape.Strokes[s.StrokeIndex]); } else { stroke = strokes.IndexOf(shape.Strokes[s.StrokeIndex]); } // break path into shape records foreach(IPathPrimitive ipp in s.Path) { if(ipp is IShapeData) { IShapeData ip = (IShapeData)ipp; if(allPaths.Contains(ip)) { // this must be a fill1 if it is a dup int index = allPaths.IndexOf(ip); Shrec sr = (Shrec)allSrs[index]; Shrec newShrec = new Shrec(0, 0); newShrec.F0 = (sr.F0 == 0) ? fill : sr.F0 ; newShrec.F1 = (sr.F1 == 0) ? fill : sr.F1 ; newShrec.S = (sr.S == 0) ? stroke : sr.S ; allSrs[index] = newShrec; } else { allSrs.Add(new Shrec(fill, stroke)); allPaths.Add(ip); } } } } // end groups } // end shapes // ok, now write out colors // sort fills by rgb, argb, and gradients ArrayList orderedFills = new ArrayList(); ArrayList rgbas = new ArrayList(); ArrayList gfs = new ArrayList(); foreach(Fill sf in fills) { if(sf is SolidFill) { if( ((SolidFill)sf).Color.A == 255 || (SolidFill)sf == fills[0]) // 'no fill' { orderedFills.Add(sf); } else { rgbas.Add(sf); } } else if(sf is GradientFill) { gfs.Add(sf); } else { // bitmap fills orderedFills.Add(new SolidFill(Color.Gray)); }; } SolidFill[] wrgbs = new SolidFill[orderedFills.Count]; wrgbs[0] = new SolidFill(Color.FromArgb(255,0,0,0)); int fRgb = 1; foreach(Fill f in orderedFills) { if(f != fills[0]) { wrgbs[fRgb++] = (SolidFill)f; } } int fRgba = 0; SolidFill[] wrgbas = new SolidFill[rgbas.Count]; foreach(Fill f in rgbas) { orderedFills.Add(f); wrgbas[fRgba++] = (SolidFill)f; } int fGr = 0; GradientFill[] wgfs = new GradientFill[gfs.Count]; foreach(Fill f in gfs) { orderedFills.Add(f); wgfs[fGr++] = (GradientFill)(f); } writer.WriteNbitColorDefs(wrgbs); writer.WriteNbitColorDefs(wrgbas); writer.WriteNbitGradientDefs(wgfs); //writer.WriteRgbColorDefs(wrgbs); //writer.WriteRgbaColorDefs(wrgbas); //writer.WriteGradientColorDefs(wgfs); // ok, colors written, now strokes // write out all the stroke defs second // get counts int wrgbCount = 0; int wrgbaCount = 0; foreach(Stroke st in strokes) { if(st.Color.A == 255 || st == strokes[0]) {wrgbCount++;} else{wrgbaCount++;} } // create stroke arrays Stroke[] wsrgbs = new Stroke[wrgbCount]; Stroke[] wsrgbas = new Stroke[wrgbaCount]; int sRgb = 0; int sRgba = 0; foreach(Stroke st in strokes) { if( st.Color.A == 255 || st == strokes[0]) { wsrgbs[sRgb++] = st; } else { wsrgbas[sRgba++] = st; } } // now write the stroke data writer.WriteNbitStrokeDefs(wsrgbs); writer.WriteNbitStrokeDefs(wsrgbas); //writer.WriteRgbStrokeDefs(wsrgbs); //writer.WriteRgbaStrokeDefs(wsrgbas); // and now paths // valid pathsegs must have the same F0, F1, and S ArrayList tempPaths = new ArrayList(); ArrayList tempSrsAl = new ArrayList(); PathCollection pc = new PathCollection(); Shrec curShrec = Shrec.Empty; for(int i = 0; i < allSrs.Count; i++) //Shrec sr in srsAl) { Shrec sr = (Shrec)allSrs[i]; if(sr.Equals(curShrec) || curShrec.Equals(Shrec.Empty)) { //add to path pc.Add((IShapeData)allPaths[i]); } else { // write to hash tempPaths.Add(pc); tempSrsAl.Add(curShrec); pc = new PathCollection(); pc.Add((IShapeData)allPaths[i]); } curShrec = sr; } if(!tempSrsAl.Contains(curShrec)) { tempPaths.Add(pc); tempSrsAl.Add(curShrec); } // split non contig paths ArrayList paths = new ArrayList(); ArrayList srsAl = new ArrayList(); foreach(PathCollection pcoll in tempPaths) { //pcoll.ReorderPath(); PathCollection[] pcolls = pcoll.SplitPath(); foreach(PathCollection splitP in pcolls) { paths.Add(splitP); srsAl.Add(tempSrsAl[tempPaths.IndexOf(pcoll)] ); //writer.WritePath(splitP.PointSegments); } } IShapeData[][] ips = new IShapeData[paths.Count][]; for(int i = 0; i < paths.Count; i++) { ips[i] = ((PathCollection)paths[i]).PointSegments; } writer.WritePaths(ips); // convert to array Shrec[] srs = new Shrec[srsAl.Count]; for(int i = 0; i < srsAl.Count; i++) { srs[i] = (Shrec)srsAl[i]; } // and finally, uses - must be sorted by fill color // use order Fill1 (no strokes), fill0[stroke], stroke only's // for each fill index{..}, then dangling strokes ArrayList shapeRecords = new ArrayList(); // start at 1 to avoid empty fills foreach(Fill f in orderedFills) { int curFill = fills.IndexOf(f); if(curFill != 0) { // all F1's of this color first ArrayList Fs = new ArrayList(); for(int i = 0; i < srs.Length; i++) { if(srs[i].F0 == curFill) { // add use for F0 ShapeRecord curSr = new ShapeRecord(); curSr.Fill = orderedFills.IndexOf(f); curSr.Stroke = srs[i].S; curSr.Path = i; Fs.Add(curSr); } if(srs[i].F1 == curFill ) { // add use for F1 ShapeRecord curSr = new ShapeRecord(); curSr.Fill = orderedFills.IndexOf(f); curSr.Stroke = 0; curSr.Path = i; Fs.Add(curSr); } } //now sort the F1s from tip to tail if(Fs.Count > 0) { ArrayList finalFs = new ArrayList(); finalFs.Add(Fs[0]); PointF end = ((PathCollection)paths[((ShapeRecord)Fs[0]).Path]).LastPoint; Fs.RemoveAt(0); while(Fs.Count > 0) { bool found = false; foreach(ShapeRecord sr in Fs) { PathCollection srp = (PathCollection)paths[sr.Path]; if(srp.FirstPoint == end) { end = srp.LastPoint; finalFs.Add(sr); Fs.Remove(sr); found = true; break; } } if(found == false) { finalFs.Add(Fs[0]); end = ( (PathCollection)paths[ ((ShapeRecord)Fs[0]).Path] ).LastPoint; Fs.RemoveAt(0); } } // and write them foreach(ShapeRecord sr in finalFs) { shapeRecords.Add(sr); } } } } for(int i = 0; i < srs.Length; i++) { if(srs[i].F0 == 0 && srs[i].F1 == 0) { // must be stroke ShapeRecord curSr = new ShapeRecord(); curSr.Fill = 0; curSr.Stroke = srs[i].S; curSr.Path = i; shapeRecords.Add(curSr); } } // convert to array ShapeRecord[] srecs = new ShapeRecord[shapeRecords.Count]; for(int i = 0; i < shapeRecords.Count; i++) { srecs[i] = (ShapeRecord)shapeRecords[i]; } writer.WriteUses(srecs); }
private void ReadModules() { ReadStartElement("modules"); PathCollection targets = new PathCollection(); while (Name == "module") { string app = GetAttribute("application") ?? ""; if (app == mainApp) { project.OutputPath = Path.Combine(outputPath, GetAttribute("destPath") ?? ""); } Read(); } }
protected abstract void ProcessResource( PathCollection aPaths );
protected override void ProcessResource( PathCollection aPaths ) { if ( !aPaths.Any() ) return; OnCategoryProcessing( ResourceTypes.Paths ); Directory.CreateDirectory( Directories.Paths ); var previous = SetCurrentDirectory( Directories.Paths ); foreach ( var path in aPaths ) { var document = new XElement( "Path", CreateIndexedResourceNodes( path ), new XComment( "Node below refers to: " + FindResourceName( m_gmk.Rooms, path.BackgroundRoom ) ), new XElement( "BackgroundRoom", path.BackgroundRoom ), new XElement( "ConnectionKind", path.ConnectionKind ), new XElement( "Closed", path.Closed ), new XElement( "Precision", path.Precision ), new XElement( "SnapX", path.SnapX ), new XElement( "SnapY", path.SnapY ), new XElement( "Points", from point in path.Points select new XElement( "Point", new XElement( "X", point.X ), new XElement( "Y", point.Y ), new XElement( "Speed", point.Speed ) ) ) ); SaveDocument( document, SafeResourceFilename( path ) + ".xml" ); OnResourceProcessed( path.Name ); } OnCategoryProcessed( ResourceTypes.Paths ); SetCurrentDirectory( previous ); }
private static HmacHttpService CreateService(string signatureParameterKey, bool useHexEncoding, PathCollection pathCollection) { var mockAppRepository = new Mock<IAppRepository>(); mockAppRepository.Setup(x => x.Get(It.IsAny<Guid>(), It.IsAny<Guid>())).Returns(new AppModel {Enabled = true, Secret = Secret}); return new HmacHttpService(ServiceId.ToString(), pathCollection, signatureParameterKey) { UserRepository = new Mock<IUserRepository>().Object, AppRepository = mockAppRepository.Object, HmacService = signatureParameterKey == DefaultSignatureParameterKey ? new HmacSha256Service {UseHexEncoding = useHexEncoding} : new HmacSha256Service {SignatureParameterKey = signatureParameterKey, UseHexEncoding = useHexEncoding} }; }
public void OnClasspathChanged() { absClasspaths = null; if (ClasspathChanged != null) ClasspathChanged(this); }
public void IsRequestAuthorized_includeParameterThatHasNotBeenUsedForSignatureGeneration_ignoreOtherAdditionalParameters_shouldDisallowRequest() { // Arrange var pathCollection = new PathCollection { new PathConfig {Name = "included-1", Path = "/public/.*", Type = PathConfig.PathType.Include}, new PathConfig {Name = "included-2", Path = ".*", Type = PathConfig.PathType.Include, ExcludeParameters = "p1, p2"} }; var service = CreateService(DefaultSignatureParameterKey, false, pathCollection); // Act var queryString = CreateValidQueryString(); queryString.Add("p1", "v1"); queryString.Add("p3", "v2"); var actual = service.IsRequestAuthorized(GetValidRawUrl(), queryString); // Assert Assert.AreEqual(StatusCode.InvalidSignature, actual); }
public string ProcessArgs(Project project, string args) { if (lastFileFromTemplate != null) { string fileName = Path.GetFileNameWithoutExtension(lastFileFromTemplate); args = args.Replace("$(FileName)", fileName); if (args.Contains("$(FileNameWithPackage)") || args.Contains("$(Package)")) { string package = ""; string path = lastFileFromTemplate; // Find closest parent string classpath = project.AbsoluteClasspaths.GetClosestParent(path); // Can't find parent, look in global classpaths if (classpath == null) { PathCollection globalPaths = new PathCollection(); foreach (string cp in ProjectManager.PluginMain.Settings.GlobalClasspaths) globalPaths.Add(cp); classpath = globalPaths.GetClosestParent(path); } if (classpath != null) { // Parse package name from path package = Path.GetDirectoryName(ProjectPaths.GetRelativePath(classpath, path)); package = package.Replace(Path.DirectorySeparatorChar, '.'); } args = args.Replace("$(Package)", package); args = args.Replace("$(ProName)", proName); args = args.Replace("$(ProRecName)", receiveProName); args = args.Replace("$(ProSendName)", sendProName); if (package != "") args = args.Replace("$(FileNameWithPackage)", package + "." + fileName); else args = args.Replace("$(FileNameWithPackage)", fileName); } } return args; }