public void InspectShouldExitIfNoProjectWithVersionIsFound() { TempProject.CreateFromProjectContents(_testSetup.WorkingDirectory, "csproj", @"<Project Sdk=""Microsoft.NET.Sdk""> <PropertyGroup> </PropertyGroup> </Project>"); var workingCopy = WorkingCopy.Discover(_testSetup.WorkingDirectory); Should.Throw <CommandLineExitException>(() => workingCopy.Inspect()); _testPlatformAbstractions.Messages.ShouldHaveSingleItem(); _testPlatformAbstractions.Messages[0].ShouldEndWith(" that have a <Version> defined in their csproj file."); }
public void ShouldReleaseAsSpecifiedVersion() { TempProject.CreateCsharpProject(Path.Join(_testSetup.WorkingDirectory, "project1"), "1.1.0"); CommitAll(_testSetup.Repository); var workingCopy = WorkingCopy.Discover(_testSetup.WorkingDirectory); workingCopy.Versionize(new VersionizeOptions { ReleaseAs = "2.0.0" }); _testSetup.Repository.Tags.Select(t => t.FriendlyName).ShouldBe(new[] { "v2.0.0" }); }
public void ShouldSupportFsharpProjects() { TempProject.CreateFsharpProject(_testSetup.WorkingDirectory); var workingCopy = WorkingCopy.Discover(_testSetup.WorkingDirectory); var fileCommitter = new FileCommitter(_testSetup); // Release an initial version fileCommitter.CommitChange("chore: initial commit"); workingCopy.Versionize(new VersionizeOptions()); var versionTagNames = VersionTagNames.ToList(); versionTagNames.ShouldBe(new[] { "v1.0.0" }); }
public ClassificationStepsContext() { CsvAccountOperationManager = new CsvAccountOperationManager(); Transformer = new UnifiedAccountOperationPatternTransformer( new PlaceInfoResolver(PlaceProvider.Load(new PlacesRepository()))); WorkingCopy = new WorkingCopy(new FileSystemAdapter(new MockFileSystem()), @"c:\WorkingCopy"); AccountCommandRepository = new AccountCommandRepository(WorkingCopy); var operationsRepository = new OperationsRepository(WorkingCopy, CsvAccountOperationManager, Transformer); var cacheManager = new CacheManager(new NoCache()); OperationsManager = new OperationsManager(cacheManager, operationsRepository); ImportManager = new ImportManager(cacheManager, AccountCommandRepository, OperationsManager); AccountId = Guid.NewGuid(); }
public void ShouldPreformADryRun() { TempCsProject.Create(_testSetup.WorkingDirectory); File.WriteAllText(Path.Join(_testSetup.WorkingDirectory, "hello.txt"), "First commit"); CommitAll(_testSetup.Repository); File.WriteAllText(Path.Join(_testSetup.WorkingDirectory, "hello.txt"), "Second commit"); CommitAll(_testSetup.Repository); var workingCopy = WorkingCopy.Discover(_testSetup.WorkingDirectory); workingCopy.Versionize(dryrun: true, skipDirtyCheck: true); _testPlatformAbstractions.Messages.Count.ShouldBe(4); _testPlatformAbstractions.Messages[0].ShouldBe("Discovered 1 versionable projects"); }
public void ShouldExitForInvalidReleaseAsReleases() { TempProject.CreateCsharpProject(_testSetup.WorkingDirectory); var workingCopy = WorkingCopy.Discover(_testSetup.WorkingDirectory); var fileCommitter = new FileCommitter(_testSetup); // Release an initial version fileCommitter.CommitChange("chore: initial commit"); workingCopy.Versionize(new VersionizeOptions()); // Release as lower than current version fileCommitter.CommitChange("feat: some feature"); Should.Throw <CommandLineExitException>(() => workingCopy.Versionize(new VersionizeOptions { ReleaseAs = "0.9.0" })); }
public void ShouldExitIfWorkingCopyIsDirty() { var workingDirectory = TempDir.Create(); using var tempRepository = TempRepository.Create(workingDirectory); TempCsProject.Create(workingDirectory); var workingCopy = WorkingCopy.Discover(workingDirectory); Should.Throw <CommandLineExitException>(() => workingCopy.Versionize()); _testPlatformAbstractions.Messages.ShouldHaveSingleItem(); _testPlatformAbstractions.Messages[0].ShouldBe($"Repository {workingDirectory} is dirty. Please commit your changes."); Cleanup.DeleteDirectory(workingDirectory); }
protected void Application_BeginRequest(object sender, EventArgs e) { //检查文件是否有变动 var dt = Directory.GetLastWriteTime(ConfigurationManager.AppSettings["App_Data_Dir"]); if (!_lastWriteTime.HasValue) {//第一次 AssemblyContainer.SetInstanceNull(); CommentsDocContainer.SetInstanceNull(); WorkingCopy.CopySourceToTarget(); _lastWriteTime = dt; } else if (dt > _lastWriteTime.Value) {//后续 需要重启website File.SetLastWriteTimeUtc(Server.MapPath("~/Global.asax"), DateTime.UtcNow); } }
public void ShouldExitIfProjectsUseInconsistentNaming() { var workingDirectory = TempDir.Create(); using var tempRepository = TempRepository.Create(workingDirectory); TempCsProject.Create(Path.Join(workingDirectory, "project1"), "1.1.0"); TempCsProject.Create(Path.Join(workingDirectory, "project2"), "2.0.0"); CommitAll(tempRepository); var workingCopy = WorkingCopy.Discover(workingDirectory); Should.Throw <CommandLineExitException>(() => workingCopy.Versionize()); _testPlatformAbstractions.Messages[0].ShouldBe($"Some projects in {workingDirectory} have an inconsistent <Version> defined in their csproj file. Please update all versions to be consistent or remove the <Version> elements from projects that should not be versioned"); Cleanup.DeleteDirectory(workingDirectory); }
public void ShouldEmitAUsefulErrorMessageForDuplicateTags() { TempProject.CreateCsharpProject(Path.Join(_testSetup.WorkingDirectory, "project1"), "1.1.0"); CommitAll(_testSetup.Repository); var workingCopy = WorkingCopy.Discover(_testSetup.WorkingDirectory); workingCopy.Versionize(new VersionizeOptions { ReleaseAs = "2.0.0" }); Should.Throw <CommandLineExitException>(() => workingCopy.Versionize(new VersionizeOptions { ReleaseAs = "2.0.0" })); _testPlatformAbstractions.Messages.Last().ShouldBe("Version 2.0.0 already exists. Please use a different version."); }
public void LoadContent() { string[] args = Environment.GetCommandLineArgs(); string sPath = ".", sStdErr; if (args.Length > 1) { sPath = args[1].TrimEnd(m_acParamRemoveChars); } // check for URL parameter if (sPath.IndexOf('/') > 0) { MessageBox.Show("Only local paths allowed:\n\n" + sPath, "Error"); this.Close(); } else { EnableControls(false); m_ctrlTreeView.DataContext = null; m_wcRoot = WorkingCopy.GetExternals(this, out sStdErr, m_ViewRoot, System.IO.Path.GetFullPath(sPath), null); /*if (null != m_wcRoot) * { * m_ViewRoot = new TreeItem (m_wcRoot); * m_ViewContext = new TreeItem (new WorkingCopy ()); * m_ViewContext.m_Childs.Add (m_ViewRoot); * * m_ctrlTreeView.DataContext = null; // required for final update of layout * m_ctrlTreeView.DataContext = m_ViewContext; * }*/ EnableControls(true); m_ctrlTreeView.Focus(); if (null == m_wcRoot) { MessageBox.Show("No externals or no working copy found:\n\n" + sStdErr, "Error"); this.Close(); } } }
public void ShouldIgnoreInsignificantCommits() { var workingDirectory = TempDir.Create(); using var tempRepository = TempRepository.Create(workingDirectory); TempCsProject.Create(workingDirectory); var workingFilePath = Path.Join(workingDirectory, "hello.txt"); // Create and commit a test file File.WriteAllText(workingFilePath, "First line of text"); CommitAll(tempRepository); // Run versionize var workingCopy = WorkingCopy.Discover(workingDirectory); workingCopy.Versionize(); // Add insignificant change File.AppendAllText(workingFilePath, "This is another line of text"); CommitAll(tempRepository, "chore: Added line of text"); // Get last commit var lastCommit = tempRepository.Head.Tip; // Run versionize, ignoring insignificant commits try { workingCopy.Versionize(ignoreInsignificant: true); throw new InvalidOperationException("Expected to throw in Versionize call"); } catch (CommandLineExitException ex) { ex.ExitCode.ShouldBe(0); } lastCommit.ShouldBe(tempRepository.Head.Tip); // Cleanup Cleanup.DeleteDirectory(workingDirectory); }
public void ShouldExitWithNonZeroExitCodeForInsignificantCommits() { TempProject.CreateCsharpProject(_testSetup.WorkingDirectory); var workingCopy = WorkingCopy.Discover(_testSetup.WorkingDirectory); var fileCommitter = new FileCommitter(_testSetup); // Release an initial version first fileCommitter.CommitChange("chore: initial commit"); workingCopy.Versionize(new VersionizeOptions()); // Insignificant change release fileCommitter.CommitChange("chore: insignificant change"); Should.Throw <CommandLineExitException>(() => workingCopy.Versionize(new VersionizeOptions { ExitInsignificantCommits = true })); _testPlatformAbstractions.Messages.Last().ShouldStartWith("Version was not affected by commits since last release"); }
private WorkingCopy DefineWorkingCopy(string workingCopyName, string workingCopyPath) { WorkingCopy workingCopy = null; if (!string.IsNullOrWhiteSpace(workingCopyName) || !string.IsNullOrWhiteSpace(workingCopyPath)) { if (string.IsNullOrWhiteSpace(workingCopyName)) { workingCopyName = Path.GetFileName(workingCopyPath); } if (string.IsNullOrWhiteSpace(workingCopyPath)) { workingCopyPath = PathBuilder.Combine(ResolveSourceDirectory(), workingCopyName); } workingCopy = new WorkingCopy(workingCopyName, workingCopyPath); } return(workingCopy); }
public void InspectShouldExitForProjectsInconsistentVersion() { TempProject.CreateFromProjectContents(_testSetup.WorkingDirectory + "/project1", "csproj", @"<Project Sdk=""Microsoft.NET.Sdk""> <PropertyGroup> <Version>1.0.0</Version> </PropertyGroup> </Project>"); TempProject.CreateFromProjectContents(_testSetup.WorkingDirectory + "/project2", "csproj", @"<Project Sdk=""Microsoft.NET.Sdk""> <PropertyGroup> <Version>2.0.0</Version> </PropertyGroup> </Project>"); var workingCopy = WorkingCopy.Discover(_testSetup.WorkingDirectory); Should.Throw <CommandLineExitException>(() => workingCopy.Inspect()); _testPlatformAbstractions.Messages.ShouldHaveSingleItem(); _testPlatformAbstractions.Messages[0].ShouldContain("have an inconsistent <Version> defined in their csproj file"); }
public void ShouldAddSuffixToReleaseCommitMessage() { TempCsProject.Create(_testSetup.WorkingDirectory); var workingFilePath = Path.Join(_testSetup.WorkingDirectory, "hello.txt"); // Create and commit a test file File.WriteAllText(workingFilePath, "First line of text"); CommitAll(_testSetup.Repository); // Run versionize var workingCopy = WorkingCopy.Discover(_testSetup.WorkingDirectory); var suffix = "[skip ci]"; workingCopy.Versionize(releaseCommitMessageSuffix: suffix); // Get last commit var lastCommit = _testSetup.Repository.Head.Tip; lastCommit.Message.ShouldContain(suffix); }
public void ShouldPerformADryRun() { TempProject.CreateCsharpProject(_testSetup.WorkingDirectory); File.WriteAllText(Path.Join(_testSetup.WorkingDirectory, "hello.txt"), "First commit"); CommitAll(_testSetup.Repository, "feat: first commit"); var workingCopy = WorkingCopy.Discover(_testSetup.WorkingDirectory); workingCopy.Versionize(new VersionizeOptions { DryRun = true, SkipDirty = true }); _testPlatformAbstractions.Messages.Count.ShouldBe(7); _testPlatformAbstractions.Messages[0].ShouldBe("Discovered 1 versionable projects"); _testPlatformAbstractions.Messages[3].ShouldBe("\n---"); _testPlatformAbstractions.Messages[4].ShouldContain("* first commit"); _testPlatformAbstractions.Messages[5].ShouldBe("---\n"); var wasChangelogWritten = File.Exists(Path.Join(_testSetup.WorkingDirectory, "CHANGELOG.md")); Assert.False(wasChangelogWritten); }
public void Solve(Nonogram n) { var rowsPossibleStates = Utils.PossibleStatesForRows(n.Width, n.RowDescriptors); var colsPossibleStates = Utils.PossibleStatesForRows(n.Height, n.ColumnDescriptors); var initWorkingCopy = new WorkingCopy() { AssignedRows = Enumerable.Repeat <List <CellState> >(null, n.Height).ToList() }; var solution = FindSolution(initWorkingCopy, n, rowsPossibleStates); if (solution == null) { throw new Exception("wtf??????????/"); } for (int i = 0; i < n.Height; i++) { for (int j = 0; j < n.Width; j++) { n.Cells[i][j].State = solution[i][j]; } } }
public void ShouldIgnoreInsignificantCommits() { TempProject.CreateCsharpProject(_testSetup.WorkingDirectory); var workingFilePath = Path.Join(_testSetup.WorkingDirectory, "hello.txt"); // Create and commit a test file File.WriteAllText(workingFilePath, "First line of text"); CommitAll(_testSetup.Repository); // Run versionize var workingCopy = WorkingCopy.Discover(_testSetup.WorkingDirectory); workingCopy.Versionize(new VersionizeOptions()); // Add insignificant change File.AppendAllText(workingFilePath, "This is another line of text"); CommitAll(_testSetup.Repository, "chore: Added line of text"); // Get last commit var lastCommit = _testSetup.Repository.Head.Tip; // Run versionize, ignoring insignificant commits try { workingCopy.Versionize(new VersionizeOptions { IgnoreInsignificantCommits = true }); throw new InvalidOperationException("Expected to throw in Versionize call"); } catch (CommandLineExitException ex) { ex.ExitCode.ShouldBe(0); } lastCommit.ShouldBe(_testSetup.Repository.Head.Tip); }
public void ShouldExitForInvalidPrereleaseSequences() { TempProject.CreateCsharpProject(_testSetup.WorkingDirectory); var workingCopy = WorkingCopy.Discover(_testSetup.WorkingDirectory); var fileCommitter = new FileCommitter(_testSetup); // Release an initial version fileCommitter.CommitChange("chore: initial commit"); workingCopy.Versionize(new VersionizeOptions()); // Prerelease a minor beta fileCommitter.CommitChange("feat: feature pre-release"); workingCopy.Versionize(new VersionizeOptions { Prerelease = "beta" }); // Try Prerelease a minor alpha fileCommitter.CommitChange("feat: feature pre-release"); Should.Throw <CommandLineExitException>(() => workingCopy.Versionize(new VersionizeOptions { Prerelease = "alpha" })); }
public void ShouldDiscoverGitWorkingCopies() { var workingCopy = WorkingCopy.Discover(Directory.GetCurrentDirectory()); Assert.NotNull(workingCopy); }
public static bool Extract(string archiveFilename, string destination, Stats stats) { stats.Title = Path.GetFileName(archiveFilename); ArchiveReader archive = new ArchiveReader(archiveFilename, stats); bool writeEnabled = (destination != null); Dictionary<string, ZipFile> openZips = new Dictionary<string, ZipFile>(); openZips[archive.ArchiveName.ToLowerInvariant()] = archive.zipFile; FileStream openFile = null; IAsyncResult openFileRead = null; string openFilePathLC = null; long totalSize = 0; long totalSizeDone = 0; // Setup cache Dictionary<string, ExtractedData> dataCache = new Dictionary<string, ExtractedData>(); int time = 0; foreach (File file in archive.files) { totalSize += file.Size; foreach (int hashIndex in file.HashIndices) { if (stats.Canceled) return false; string path = archive.GetString(archive.hashes[hashIndex].Path).ToLowerInvariant(); if (!dataCache.ContainsKey(path)) dataCache.Add(path, new ExtractedData()); dataCache[path].Refs.Add(time++); } } string stateFile = writeEnabled ? Path.Combine(destination, Settings.StateFile) : null; stats.Status = "Loading working copy state"; WorkingCopy workingCopy = writeEnabled ? WorkingCopy.Load(stateFile) : new WorkingCopy(); WorkingCopy newWorkingFiles = new WorkingCopy(); List<WorkingHash> oldWorkingHashes = new List<WorkingHash>(); if (writeEnabled) { int oldCount = workingCopy.Count; workingCopy = WorkingCopy.HashLocalFiles(destination, stats, workingCopy); if (workingCopy.Count > oldCount) { stats.Status = "Saving working copy state"; workingCopy.Save(stateFile); } } foreach(WorkingFile wf in workingCopy.GetAll()) { foreach(WorkingHash wh in wf.Hashes) { wh.File = wf; } oldWorkingHashes.AddRange(wf.Hashes); } oldWorkingHashes.Sort(); if (stats.Canceled) return false; string tmpPath = null; if (writeEnabled) { tmpPath = Path.Combine(destination, Settings.TmpDirectory); Directory.CreateDirectory(tmpPath); } Dictionary<ExtractedData, bool> loaded = new Dictionary<ExtractedData, bool>(); float waitingForDecompression = 0.0f; float mbUnloadedDueToMemoryPressure = 0.0f; stats.Status = writeEnabled ? "Extracting" : "Verifying"; stats.WriteStartTime = DateTime.Now; foreach (File file in archive.files) { string tmpFileName = null; FileStream outFile = null; if (writeEnabled) { // Quickpath - see if the file exists and has correct content WorkingFile workingFile = workingCopy.Find(Path.Combine(destination, file.Name)); if (workingFile != null && workingFile.ExistsOnDisk() && !workingFile.IsModifiedOnDisk()) { if (new Hash(workingFile.Hash).CompareTo(new Hash(file.Hash)) == 0) { // The file is already there - no need to extract it stats.Status = "Skipped " + file.Name; workingFile.UserModified = false; newWorkingFiles.Add(workingFile); stats.Unmodified += file.Size; totalSizeDone += file.Size; continue; } } int tmpFileNamePostfix = 0; do { tmpFileName = Path.Combine(tmpPath, file.Name + (tmpFileNamePostfix == 0 ? string.Empty : ("-" + tmpFileNamePostfix.ToString()))); tmpFileNamePostfix++; } while (System.IO.File.Exists(tmpFileName)); Directory.CreateDirectory(Path.GetDirectoryName(tmpFileName)); outFile = new FileStream(tmpFileName, FileMode.CreateNew, FileAccess.Write); // Avoid fragmentation outFile.SetLength(file.Size); outFile.Position = 0; } List<WorkingHash> workingHashes = new List<WorkingHash>(); try { stats.Progress = 0; stats.Status = (writeEnabled ? "Extracting " : "Verifying ") + file.Name; SHA1CryptoServiceProvider sha1Provider = new SHA1CryptoServiceProvider(); Queue<MemoryStreamRef> writeQueue = new Queue<MemoryStreamRef>(); int p = 0; for (int i = 0; i < file.HashIndices.Count; i++) { if (stats.Canceled) { stats.Status = "Canceled. No files were modified."; return false; } // Prefetch for (; p < file.HashIndices.Count; p++) { if (writeQueue.Count > 0 && writeQueue.Peek().Ready.WaitOne(TimeSpan.Zero)) break; // Some data is ready - go process it int prefetchSize = 0; Dictionary<MemoryStream, bool> prefetchedStreams = new Dictionary<MemoryStream, bool>(); foreach(MemoryStreamRef memStreamRef in writeQueue) { prefetchedStreams[memStreamRef.MemStream] = true; } foreach(MemoryStream prefetchedStream in prefetchedStreams.Keys) { prefetchSize += (int)prefetchedStream.Length; } if (writeQueue.Count > 0 && prefetchSize > Settings.WritePrefetchSize) break; // We have prefetched enough data HashSource hashSrc = archive.hashes[file.HashIndices[p]]; string path = archive.GetString(hashSrc.Path).ToLowerInvariant(); ExtractedData data = dataCache[path]; // See if we have the hash on disk. Try our best not to seek too much WorkingHash onDiskHash = null; long bestSeekDistance = long.MaxValue; int idx = oldWorkingHashes.BinarySearch(new WorkingHash() { Hash = hashSrc.Hash }); if (idx >= 0) { while (idx - 1 >= 0 && oldWorkingHashes[idx - 1].Hash.Equals(hashSrc.Hash)) idx--; for (; idx < oldWorkingHashes.Count && oldWorkingHashes[idx].Hash.Equals(hashSrc.Hash); idx++) { WorkingHash wh = oldWorkingHashes[idx]; long seekDistance; if (openFile != null && openFilePathLC == wh.File.NameLowercase) { seekDistance = Math.Abs(openFile.Position - wh.Offset); } else { seekDistance = long.MaxValue; } if (onDiskHash == null || seekDistance < bestSeekDistance) { onDiskHash = wh; bestSeekDistance = seekDistance; } } } if (onDiskHash != null && ((openFilePathLC == onDiskHash.File.NameLowercase) || (onDiskHash.File.ExistsOnDisk() && !onDiskHash.File.IsModifiedOnDisk()))) { MemoryStream memStream = new MemoryStream(onDiskHash.Length); memStream.SetLength(onDiskHash.Length); // Finish the last read if (openFileRead != null) { openFile.EndRead(openFileRead); openFileRead = null; } // Open other file if (openFilePathLC != onDiskHash.File.NameLowercase) { if (openFile != null) openFile.Close(); openFile = new FileStream(onDiskHash.File.NameLowercase, FileMode.Open, FileAccess.Read, FileShare.Read, Settings.FileStreamBufferSize, FileOptions.None); openFilePathLC = onDiskHash.File.NameLowercase; System.Diagnostics.Debug.Write(Path.GetFileName(onDiskHash.File.NameMixedcase)); } System.Diagnostics.Debug.Write(onDiskHash.Offset == openFile.Position ? "." : "S"); if (openFile.Position != onDiskHash.Offset) openFile.Position = onDiskHash.Offset; openFileRead = openFile.BeginRead(memStream.GetBuffer(), 0, (int)memStream.Length, null, null); writeQueue.Enqueue(new MemoryStreamRef() { Ready = openFileRead.AsyncWaitHandle, MemStream = memStream, Offset = 0, Length = (int)memStream.Length, CacheLine = null, Hash = hashSrc.Hash }); stats.ReadFromWorkingCopy += hashSrc.Length; } else { // Locate and load the zipentry ZipEntry pZipEntry; path = path.Replace("\\", "/"); if (path.StartsWith("/")) { pZipEntry = archive.zipFile[path.Substring(1)]; } else { int slashIndex = path.IndexOf("/"); string zipPath = path.Substring(0, slashIndex); string entryPath = path.Substring(slashIndex + 1); if (!openZips.ContainsKey(zipPath)) openZips[zipPath] = new ZipFile(Path.Combine(Path.GetDirectoryName(archiveFilename), zipPath)); pZipEntry = openZips[zipPath][entryPath]; } if (data.Data == null) { stats.ReadFromArchiveDecompressed += pZipEntry.UncompressedSize; stats.ReadFromArchiveCompressed += pZipEntry.CompressedSize; data.AsycDecompress(pZipEntry); } loaded[data] = true; writeQueue.Enqueue(new MemoryStreamRef() { Ready = data.LoadDone, MemStream = data.Data, Offset = hashSrc.Offset, Length = hashSrc.Length, CacheLine = data, Hash = hashSrc.Hash }); } } MemoryStreamRef writeItem = writeQueue.Dequeue(); while (writeItem.Ready.WaitOne(TimeSpan.FromSeconds(0.01)) == false) { waitingForDecompression += 0.01f; } // Write output if (writeEnabled) { workingHashes.Add(new WorkingHash() { Hash = writeItem.Hash, Offset = outFile.Position, Length = writeItem.Length }); outFile.Write(writeItem.MemStream.GetBuffer(), (int)writeItem.Offset, writeItem.Length); } // Verify SHA1 sha1Provider.TransformBlock(writeItem.MemStream.GetBuffer(), (int)writeItem.Offset, writeItem.Length, writeItem.MemStream.GetBuffer(), (int)writeItem.Offset); stats.TotalWritten += writeItem.Length; totalSizeDone += writeItem.Length; stats.Title = string.Format("{0:F0}% {1}", 100 * (float)totalSizeDone / (float)totalSize , Path.GetFileName(archiveFilename)); stats.Progress = (float)i / (float)file.HashIndices.Count; // Unload if it is not needed anymore if (writeItem.CacheLine != null) { writeItem.CacheLine.Refs.RemoveAt(0); if (writeItem.CacheLine.Refs.Count == 0) { StreamPool.Release(ref writeItem.CacheLine.Data); writeItem.CacheLine.LoadDone = null; loaded.Remove(writeItem.CacheLine); } } // Unload some data if we are running out of memory while (loaded.Count * Settings.MaxZipEntrySize > Settings.WriteCacheSize) { ExtractedData maxRef = null; foreach (ExtractedData ed in loaded.Keys) { if (maxRef == null || ed.Refs[0] > maxRef.Refs[0]) maxRef = ed; } maxRef.LoadDone.WaitOne(); // Check that we are not evicting something from the write queue bool inQueue = false; foreach(MemoryStreamRef memRef in writeQueue) { if (memRef.CacheLine == maxRef) inQueue = true; } if (inQueue) break; mbUnloadedDueToMemoryPressure += (float)maxRef.Data.Length / 1024 / 1024; StreamPool.Release(ref maxRef.Data); maxRef.LoadDone = null; loaded.Remove(maxRef); } } stats.Progress = 0; sha1Provider.TransformFinalBlock(new byte[0], 0, 0); byte[] sha1 = sha1Provider.Hash; if (new Hash(sha1).CompareTo(new Hash(file.Hash)) != 0) { MessageBox.Show("The checksum of " + file.Name + " does not match original value. The file is corrupted.", "Critical error", MessageBoxButtons.OK, MessageBoxIcon.Error); if (writeEnabled) { stats.Status = "Extraction failed. Checksum mismatch."; } else { stats.Status = "Verification failed. Checksum mismatch."; } return false; } } finally { if (outFile != null) outFile.Close(); } if (writeEnabled) { FileInfo fileInfo = new FileInfo(tmpFileName); WorkingFile workingFile = new WorkingFile() { NameMixedcase = Path.Combine(destination, file.Name), Size = fileInfo.Length, Created = fileInfo.CreationTime, Modified = fileInfo.LastWriteTime, Hash = file.Hash, TempFileName = tmpFileName, Hashes = workingHashes }; newWorkingFiles.Add(workingFile); } } stats.Progress = 0; stats.Title = string.Format("100% {0}", Path.GetFileName(archiveFilename)); // Close sources foreach (ZipFile zip in openZips.Values) { zip.Dispose(); } if (openFileRead != null) openFile.EndRead(openFileRead); if (openFile != null) openFile.Close(); // Replace the old working copy with new one if (writeEnabled) { List<string> deleteFilesLC = new List<string>(); List<string> deleteFilesAskLC = new List<string>(); List<string> keepFilesLC = new List<string>(); stats.Status = "Preparing to move files"; // Delete all non-user-modified files foreach (WorkingFile workingFile in workingCopy.GetAll()) { if (!workingFile.UserModified && workingFile.ExistsOnDisk() && !workingFile.IsModifiedOnDisk()) { WorkingFile newWF = newWorkingFiles.Find(workingFile.NameLowercase); // Do not delete if it is was skipped 'fast-path' file if (newWF != null && newWF.TempFileName == null) continue; deleteFilesLC.Add(workingFile.NameLowercase); } } // Find obstructions for new files foreach (WorkingFile newWorkingFile in newWorkingFiles.GetAll()) { if (newWorkingFile.TempFileName != null && newWorkingFile.ExistsOnDisk() && !deleteFilesLC.Contains(newWorkingFile.NameLowercase)) { deleteFilesAskLC.Add(newWorkingFile.NameLowercase); } } // Ask the user for permission to delete StringBuilder sb = new StringBuilder(); sb.AppendLine("Do you want to override local changes in the following files?"); int numLines = 0; foreach (string deleteFileAskLC in deleteFilesAskLC) { sb.AppendLine(deleteFileAskLC); numLines++; if (numLines > 30) { sb.AppendLine("..."); sb.AppendLine("(" + deleteFilesAskLC.Count + " files in total)"); break; } } if (deleteFilesAskLC.Count > 0) { DialogResult overrideAnswer = Settings.AlwaysOverwrite ? DialogResult.Yes : MessageBox.Show(sb.ToString(), "Override files", MessageBoxButtons.YesNoCancel); if (overrideAnswer == DialogResult.Cancel) { stats.Status = "Canceled. No files were modified."; return false; } if (overrideAnswer == DialogResult.Yes) { deleteFilesLC.AddRange(deleteFilesAskLC); } else { keepFilesLC = deleteFilesAskLC; } deleteFilesAskLC.Clear(); } // Delete files foreach (string deleteFileLC in deleteFilesLC) { stats.Status = "Deleting " + Path.GetFileName(deleteFileLC); while (true) { try { FileInfo fileInfo = new FileInfo(deleteFileLC); if (fileInfo.IsReadOnly) { fileInfo.IsReadOnly = false; } System.IO.File.Delete(deleteFileLC); workingCopy.Remove(deleteFileLC); break; } catch (Exception e) { DialogResult deleteAnswer = MessageBox.Show("Can not delete file " + deleteFileLC + Environment.NewLine + e.Message, "Error", MessageBoxButtons.AbortRetryIgnore); if (deleteAnswer == DialogResult.Retry) continue; if (deleteAnswer == DialogResult.Ignore) break; if (deleteAnswer == DialogResult.Abort) { stats.Status = "Canceled. Some files were deleted."; return false; } } } } // Move the new files foreach (WorkingFile newWorkingFile in newWorkingFiles.GetAll()) { if (!keepFilesLC.Contains(newWorkingFile.NameLowercase) && newWorkingFile.TempFileName != null) { stats.Status = "Moving " + Path.GetFileName(newWorkingFile.NameMixedcase); while (true) { try { Directory.CreateDirectory(Path.GetDirectoryName(newWorkingFile.NameMixedcase)); System.IO.File.Move(newWorkingFile.TempFileName, newWorkingFile.NameMixedcase); workingCopy.Add(newWorkingFile); break; } catch (Exception e) { DialogResult moveAnswer = MessageBox.Show("Error when moving " + newWorkingFile.TempFileName + Environment.NewLine + e.Message, "Error", MessageBoxButtons.AbortRetryIgnore); if (moveAnswer == DialogResult.Retry) continue; if (moveAnswer == DialogResult.Ignore) break; if (moveAnswer == DialogResult.Abort) { stats.Status = "Canceled. Some files were deleted or overridden."; return false; } } } } } stats.Status = "Saving working copy state"; workingCopy.Save(stateFile); stats.Status = "Deleting temporary directory"; try { if (Directory.Exists(tmpPath)) Directory.Delete(tmpPath, true); } catch { } } stats.EndTime = DateTime.Now; if (writeEnabled) { stats.Status = "Extraction finished"; } else { stats.Status = "Verification finished"; } return true; }
/// <summary> /// Syncs an entry to a master copy. /// </summary> /// <param name="masterCopy">The entry to update to.</param> protected override void SynchronizeWorkingCopy(IKeePassEntry masterCopy) { WorkingCopy.SyncTo(masterCopy, false); }
public void TestBuildGroup() { var startStep = new WorkingCopyStep { Code = "start", StatusId = "all", ActionFinished = true, Success = true, PostedNext = true, Finished = true, }; var a1Step1Step = new WorkingCopyStep { Code = "step1", StatusId = "a1", ActionFinished = true, Success = true }; var a2Step1Step = new WorkingCopyStep { Code = "step1", StatusId = "a2", ActionFinished = false, Success = true }; var work = new WorkingCopy(); work.Steps.Add(startStep); work.Steps.Add(a1Step1Step); work.Steps.Add(a2Step1Step); work.Flows.Add(new WorkingCopyFlow { FromStep = new WorkingCopyFlowSeed(startStep), ToStep = new WorkingCopyFlowSeed(a1Step1Step) }); work.Flows.Add(new WorkingCopyFlow { FromStep = new WorkingCopyFlowSeed(startStep), ToStep = new WorkingCopyFlowSeed(a2Step1Step) }); var workflow = new WorkFlowConfig { Flows = new List <WorkFlowConfigFlow> { new WorkFlowConfigFlow { CurrentStepCode = "start", NextStepCode = "step1", NextOn = FlowNextType.OnSuccess }, new WorkFlowConfigFlow { CurrentStepCode = "step1", NextStepCode = "step2", NextOn = FlowNextType.OnGroupAnySuccess, GroupStartStepCode = "start" }, new WorkFlowConfigFlow { CurrentStepCode = "step1", NextStepCode = "step3", NextOn = FlowNextType.OnGroupAllSuccess, GroupStartStepCode = "start" }, } }; var flow = workflow.Flows.FirstOrDefault(x => x.CurrentStepCode == "step1" && x.NextStepCode == "step2"); var groups = WorkingCopyGroup.BuildGroup(work, workflow, flow, string.Empty, "ut"); Assert.IsNotNull(groups); Assert.AreEqual(1, groups.Count()); var group = groups.First(); Assert.IsNotNull(group); Assert.AreEqual(flow.Id, group.FLowId); Assert.AreEqual(flow.GroupStartStepCode, group.StartStepCode); Assert.AreEqual(flow.CurrentStepCode, group.EndStepCode); Assert.IsFalse(group.Fulfilled); Assert.IsTrue(group.AnySuccess); Assert.IsFalse(group.AnyFail); Assert.IsFalse(group.AllSuccess); Assert.IsFalse(group.AllFail); Assert.IsFalse(group.PostedNext); Assert.IsNotNull(group.Steps); Assert.AreEqual(3, group.Steps.Count); }
public void ShouldDiscoverGitWorkingCopies() { var workingCopy = WorkingCopy.Discover(Directory.GetCurrentDirectory()); workingCopy.ShouldNotBeNull(); }
public void ShouldAggregatePrereleases() { TempProject.CreateCsharpProject(_testSetup.WorkingDirectory); var workingCopy = WorkingCopy.Discover(_testSetup.WorkingDirectory); var fileCommitter = new FileCommitter(_testSetup); // Release an initial version fileCommitter.CommitChange("feat: initial commit"); workingCopy.Versionize(new VersionizeOptions { AggregatePrereleases = true }); // Prerelease as patch alpha fileCommitter.CommitChange("fix: a fix"); workingCopy.Versionize(new VersionizeOptions { Prerelease = "alpha", AggregatePrereleases = true }); // Prerelease as minor alpha fileCommitter.CommitChange("feat: a feature"); workingCopy.Versionize(new VersionizeOptions { Prerelease = "alpha", AggregatePrereleases = true }); // Full release workingCopy.Versionize(new VersionizeOptions { AggregatePrereleases = true }); // Full release fileCommitter.CommitChange("feat: another feature"); workingCopy.Versionize(new VersionizeOptions { AggregatePrereleases = true }); var versionTagNames = VersionTagNames.ToList(); versionTagNames.ShouldBe(new[] { "v1.0.0", "v1.0.1-alpha.0", "v1.1.0", "v1.1.0-alpha.0", "v1.2.0" }); var commitDate = DateTime.Now.ToString("yyyy-M-d"); var changelogContents = File.ReadAllText(Path.Join(_testSetup.WorkingDirectory, "CHANGELOG.md")); var sb = new ChangelogStringBuilder(); sb.Append(ChangelogOptions.Preamble); sb.Append("<a name=\"1.2.0\"></a>"); sb.Append($"## 1.2.0 ({commitDate})", 2); sb.Append("### Features", 2); sb.Append("* another feature", 2); sb.Append("<a name=\"1.1.0\"></a>"); sb.Append($"## 1.1.0 ({commitDate})", 2); sb.Append("### Features", 2); sb.Append("* a feature", 2); sb.Append("### Bug Fixes", 2); sb.Append("* a fix", 2); sb.Append("<a name=\"1.1.0-alpha.0\"></a>"); sb.Append($"## 1.1.0-alpha.0 ({commitDate})", 2); sb.Append("### Features", 2); sb.Append("* a feature", 2); sb.Append("### Bug Fixes", 2); sb.Append("* a fix", 2); sb.Append("<a name=\"1.0.1-alpha.0\"></a>"); sb.Append($"## 1.0.1-alpha.0 ({commitDate})", 2); sb.Append("### Bug Fixes", 2); sb.Append("* a fix", 2); sb.Append("<a name=\"1.0.0\"></a>"); sb.Append($"## 1.0.0 ({commitDate})", 2); sb.Append("### Features", 2); sb.Append("* initial commit", 2); Assert.Equal(sb.Build(), changelogContents); }
public void ShouldExitIfWorkingCopyDoesNotExist() { var directoryWithoutWorkingCopy = Path.Combine(Path.GetTempPath(), "ShouldExitIfWorkingCopyDoesNotExist"); Assert.Throws <CommandLineExitException>(() => WorkingCopy.Discover(directoryWithoutWorkingCopy)); }
public void ShouldDiscoverGitWorkingCopies() { var workingCopy = WorkingCopy.Discover(_testSetup.WorkingDirectory); workingCopy.ShouldNotBeNull(); }
private bool CreateSubCopies(IDataElement iDataElement, string sRelativePath) { // traverse through childs first foreach (IDataElement itm in iDataElement.DataChilds) { string sPath = sRelativePath; if (itm.Name.Length > 0) { sPath = sPath + "/" + itm.Name; } if (!CreateSubCopies(itm, sPath)) { return(false); } } // if working copy, create a copy for the links above if ((iDataElement is WorkingCopy) && (sRelativePath.Length > 0)) ///< only applies if working copy of sub/external (not same as top project) { WorkingCopy wc = (WorkingCopy)iDataElement; if (null == wc.m_xInfo) { wc.m_xInfo = SvnXmlInfo.GetInfo(wc.m_sPath); } string sURLVersionPath2, sURLVersionBase = General.GetVersionBase(wc.m_xInfo.m_Entries[0].m_sURL, out sURLVersionPath2), sURL = sURLVersionBase + m_sURLVersionPath; if (sURLVersionBase.Length > 0) { // check, if target path exists SvnXmlInfo xTarget = SvnXmlInfo.GetInfo(sURL); if ((xTarget == null) || (xTarget.m_Entries.Count == 0)) { int iRet = svn.Copy(wc.m_sPath, sURL, "hierarchal copy " + m_sRootProject + sRelativePath + " as " + m_sURLVersionPath, true); if (iRet != 0) { return(false); } } else { if (!m_bIgnoreExistingTargets) { MainWindow wnd = (MainWindow)iDataElement.MainWnd; switch (MessageBox.Show("Target URL\n" + sURL + "\nalready exists.\n\nContinue?\n\n(cancel = do not ask again)", "Target exists", MessageBoxButton.YesNoCancel)) { case MessageBoxResult.No: return(false); case MessageBoxResult.Cancel: m_bIgnoreExistingTargets = true; break; } } } } } // re-map version elements to project-specific copy if (iDataElement is External) { External ext = (External)iDataElement; if (ext.m_sURLVersionBase.Length > 0) { ext.m_sURLVersionPathTemp = ext.m_sURLVersionPath; ext.m_sURLVersionPath = m_sURLVersionPath; ext.m_sPegRevisionTemp = ext.m_sPegRevision; ext.m_sPegRevision = ""; ext.m_sOpRevisionTemp = ext.m_sOpRevision; ext.m_sOpRevision = ""; // update view if (iDataElement.TreeItem != null) { MainWindow wnd = (MainWindow)m_iRootElement.MainWnd; TreeItem ti = (TreeItem)iDataElement.TreeItem; ti.VersionPath = ti.VersionPath; ti.Revision = ti.Revision; wnd.RefreshTreeView(); } } } // apply changes of externals if (iDataElement is FolderWithExternals) { FolderWithExternals fld = (FolderWithExternals)iDataElement; fld.ApplyChanges(false, true, true); } return(true); }
public static int Main(string[] args) { var app = new CommandLineApplication { Name = "versionize", UsePagerForHelpText = false }; app.HelpOption(); app.VersionOption("-v|--version", GetVersion()); var optionWorkingDirectory = app.Option("-w|--workingDir <WORKING_DIRECTORY>", "Directory containing projects to version", CommandOptionType.SingleValue); var optionDryRun = app.Option("-d|--dry-run", "Skip changing versions in projects, changelog generation and git commit", CommandOptionType.NoValue); var optionSkipDirty = app.Option("--skip-dirty", "Skip git dirty check", CommandOptionType.NoValue); var optionReleaseAs = app.Option("-r|--release-as <VERSION>", "Specify the release version manually", CommandOptionType.SingleValue); var optionSilent = app.Option("--silent", "Suppress output to console", CommandOptionType.NoValue); var optionSkipCommit = app.Option("--skip-commit", "Skip commit and git tag after updating changelog and incrementing the version", CommandOptionType.NoValue); var optionIgnoreInsignificant = app.Option("-i|--ignore-insignificant-commits", "Do not bump the version if no significant commits (fix, feat or BREAKING) are found", CommandOptionType.NoValue); var optionExitInsignificant = app.Option("--exit-insignificant-commits", "Exits with a non zero exit code if no significant commits (fix, feat or BREAKING) are found", CommandOptionType.NoValue); var optionIncludeAllCommitsInChangelog = app.Option("--changelog-all", "Include all commits in the changelog not just fix, feat and breaking changes", CommandOptionType.NoValue); var optionCommitSuffix = app.Option("--commit-suffix", "Suffix to be added to the end of the release commit message (e.g. [skip ci])", CommandOptionType.SingleValue); var optionPrerelease = app.Option("-p|--pre-release", "Release as pre-release version with given pre release label.", CommandOptionType.SingleValue); var optionAggregatePrereleases = app.Option("-a|--aggregate-pre-releases", "Include all pre-release commits in the changelog since the last full version.", CommandOptionType.NoValue); var inspectCmd = app.Command("inspect", inspectCmd => inspectCmd.OnExecute(() => { var cwd = optionWorkingDirectory.Value() ?? Directory.GetCurrentDirectory(); WorkingCopy .Discover(cwd) .Inspect(); return(0); })); inspectCmd.Description = "Prints the current version to stdout"; app.OnExecute(() => { var cwd = optionWorkingDirectory.Value() ?? Directory.GetCurrentDirectory(); var jsonFileConfig = FromJsonFile(Path.Join(cwd, ".versionize")); var options = MergeWithOptions(jsonFileConfig, new VersionizeOptions { DryRun = optionDryRun.HasValue(), SkipDirty = optionSkipDirty.HasValue(), SkipCommit = optionSkipCommit.HasValue(), ReleaseAs = optionReleaseAs.Value(), IgnoreInsignificantCommits = optionIgnoreInsignificant.HasValue(), ExitInsignificantCommits = optionExitInsignificant.HasValue(), CommitSuffix = optionCommitSuffix.Value(), Prerelease = optionPrerelease.Value(), Changelog = ChangelogOptions.Default, AggregatePrereleases = optionAggregatePrereleases.HasValue(), }, optionIncludeAllCommitsInChangelog.HasValue()); CommandLineUI.Verbosity = MergeBool(optionSilent.HasValue(), jsonFileConfig?.Silent) ? LogLevel.Silent : LogLevel.All; WorkingCopy .Discover(cwd) .Versionize(options); return(0); }); try { return(app.Execute(args)); } catch (Exception ex) when(ex is UnrecognizedCommandParsingException || ex is InvalidPrereleaseIdentifierException) { return(CommandLineUI.Exit(ex.Message, 1)); } catch (LibGit2Sharp.NotFoundException e) { return(CommandLineUI.Exit($@" Error: LibGit2Sharp.NotFoundException This is most likely caused by running versionize against a git repository cloned with depth --1. In case you're using the actions/checkout@v2 in github actions you could specify fetch-depth: '1'. For more detail see https://github.com/actions/checkout Exception detail: {e}", 1)); } }