public virtual void RepositoryWithSubmodule() { WriteTrashFile("file.txt", "content"); Git git = Git.Wrap(db); git.Add().AddFilepattern("file.txt").Call(); git.Commit().SetMessage("create file").Call(); ObjectId id = ObjectId.FromString("abcd1234abcd1234abcd1234abcd1234abcd1234"); string path = "sub"; DirCache cache = db.LockDirCache(); DirCacheEditor editor = cache.Editor(); editor.Add(new _PathEdit_96(id, path)); editor.Commit(); FileBasedConfig modulesConfig = new FileBasedConfig(new FilePath(db.WorkTree, Constants .DOT_GIT_MODULES), db.FileSystem); modulesConfig.SetString(ConfigConstants.CONFIG_SUBMODULE_SECTION, path, ConfigConstants .CONFIG_KEY_PATH, path); string url = "git://server/repo.git"; modulesConfig.SetString(ConfigConstants.CONFIG_SUBMODULE_SECTION, path, ConfigConstants .CONFIG_KEY_URL, url); modulesConfig.Save(); Repository subRepo = Git.CloneRepository().SetURI(db.Directory.ToURI().ToString() ).SetDirectory(new FilePath(db.WorkTree, path)).Call().GetRepository(); AddRepoToClose(subRepo); NUnit.Framework.Assert.IsNotNull(subRepo); SubmoduleWalk generator = SubmoduleWalk.ForIndex(db); NUnit.Framework.Assert.IsTrue(generator.Next()); NUnit.Framework.Assert.IsNull(generator.GetConfigUrl()); NUnit.Framework.Assert.AreEqual(url, generator.GetModulesUrl()); SubmoduleSyncCommand command = new SubmoduleSyncCommand(db); IDictionary <string, string> synced = command.Call(); NUnit.Framework.Assert.IsNotNull(synced); NUnit.Framework.Assert.AreEqual(1, synced.Count); KeyValuePair <string, string> module = synced.EntrySet().Iterator().Next(); NUnit.Framework.Assert.AreEqual(path, module.Key); NUnit.Framework.Assert.AreEqual(url, module.Value); generator = SubmoduleWalk.ForIndex(db); NUnit.Framework.Assert.IsTrue(generator.Next()); NUnit.Framework.Assert.AreEqual(url, generator.GetConfigUrl()); Repository subModRepository = generator.GetRepository(); AddRepoToClose(subModRepository); StoredConfig submoduleConfig = subModRepository.GetConfig(); NUnit.Framework.Assert.AreEqual(url, submoduleConfig.GetString(ConfigConstants.CONFIG_REMOTE_SECTION , Constants.DEFAULT_REMOTE_NAME, ConfigConstants.CONFIG_KEY_URL)); }
public void testEntriesWithin() { DirCache dc = DirCache.read(db); string[] paths = { "a.", "a/b", "a/c", "a/d", "a0b" }; DirCacheEntry[] ents = new DirCacheEntry[paths.Length]; for (int i = 0; i < paths.Length; i++) { ents[i] = new DirCacheEntry(paths[i]); ents[i].setFileMode(FileMode.RegularFile); } int aFirst = 1; int aLast = 3; DirCacheBuilder b = dc.builder(); for (int i = 0; i < ents.Length; i++) { b.add(ents[i]); } b.finish(); Assert.AreEqual(paths.Length, dc.getEntryCount()); for (int i = 0; i < ents.Length; i++) { Assert.AreSame(ents[i], dc.getEntry(i)); } DirCacheEntry[] aContents = dc.getEntriesWithin("a"); Assert.IsNotNull(aContents); Assert.AreEqual(aLast - aFirst + 1, aContents.Length); for (int i = aFirst, j = 0; i <= aLast; i++, j++) { Assert.AreSame(ents[i], aContents[j]); } aContents = dc.getEntriesWithin("a/"); Assert.IsNotNull(aContents); Assert.AreEqual(aLast - aFirst + 1, aContents.Length); for (int i = aFirst, j = 0; i <= aLast; i++, j++) { Assert.AreSame(ents[i], aContents[j]); } Assert.IsNotNull(dc.getEntriesWithin("a.")); Assert.AreEqual(0, dc.getEntriesWithin("a.").Length); Assert.IsNotNull(dc.getEntriesWithin("a0b")); Assert.AreEqual(0, dc.getEntriesWithin("a0b.").Length); Assert.IsNotNull(dc.getEntriesWithin("zoo")); Assert.AreEqual(0, dc.getEntriesWithin("zoo.").Length); }
public void testSingleSubtree_NoRecursion() { DirCache dc = DirCache.read(db); string[] paths = { "a.", "a/b", "a/c", "a/d", "a0b" }; var ents = new DirCacheEntry[paths.Length]; for (int i = 0; i < paths.Length; i++) { ents[i] = new DirCacheEntry(paths[i]); ents[i].setFileMode(FileMode.RegularFile); } DirCacheBuilder b = dc.builder(); for (int i = 0; i < ents.Length; i++) { b.add(ents[i]); } b.finish(); string[] expPaths = { "a.", "a", "a0b" }; FileMode[] expModes = { FileMode.RegularFile, FileMode.Tree, FileMode.RegularFile }; var expPos = new[] { 0, -1, 4 }; var iter = new DirCacheIterator(dc); var tw = new GitSharp.Core.TreeWalk.TreeWalk(db); tw.reset(); tw.addTree(iter); tw.Recursive = false; int pathIdx = 0; while (tw.next()) { Assert.AreSame(iter, tw.getTree <DirCacheIterator>(0, typeof(DirCacheIterator))); Assert.AreEqual(expModes[pathIdx].Bits, tw.getRawMode(0)); Assert.AreSame(expModes[pathIdx], tw.getFileMode(0)); Assert.AreEqual(expPaths[pathIdx], tw.getPathString()); if (expPos[pathIdx] >= 0) { Assert.AreEqual(expPos[pathIdx], iter.Pointer); Assert.AreSame(ents[expPos[pathIdx]], iter.getDirCacheEntry()); } else { Assert.AreSame(FileMode.Tree, tw.getFileMode(0)); } pathIdx++; } Assert.AreEqual(expPaths.Length, pathIdx); }
/** * Construct a tree from a specific listing of file entries. * * @param entries * the files to include in the tree. The collection does not need * to be sorted properly and may be empty. * @return reference to the tree specified by the entry list. * @throws Exception */ public RevTree tree(params DirCacheEntry[] entries) { DirCache dc = DirCache.newInCore(); DirCacheBuilder b = dc.builder(); foreach (DirCacheEntry e in entries) { b.add(e); } b.finish(); return(pool.lookupTree(dc.writeTree(writer))); }
public void testEmptyTree_WithTreeWalk() { DirCache dc = DirCache.read(db); Assert.AreEqual(0, dc.getEntryCount()); var tw = new TreeWalk(db); tw.reset(); tw.addTree(new DirCacheIterator(dc)); Assert.IsFalse(tw.next()); }
// // // // // // Assert that every specified index entry has the same last modification // timestamp as the associated file /// <exception cref="System.IO.IOException"></exception> private void CheckConsistentLastModified(params string[] pathes) { DirCache dc = db.ReadDirCache(); FilePath workTree = db.WorkTree; foreach (string path in pathes) { NUnit.Framework.Assert.AreEqual(new FilePath(workTree, path).LastModified(), dc.GetEntry (path).LastModified, "IndexEntry with path " + path + " has lastmodified with is different from the worktree file" ); } }
public static bool Load() { if (File.Exists(cacheFile)) { dc = LZ4MessagePackSerializer.Deserialize <DirCache>(File.ReadAllBytes(cacheFile)); return(true); } else { return(false); } }
public void testRecursiveFiltering() { var ow = new ObjectWriter(db); ObjectId aSth = ow.WriteBlob("a.sth".getBytes()); ObjectId aTxt = ow.WriteBlob("a.txt".getBytes()); ObjectId bSth = ow.WriteBlob("b.sth".getBytes()); ObjectId bTxt = ow.WriteBlob("b.txt".getBytes()); DirCache dc = DirCache.read(db); DirCacheBuilder builder = dc.builder(); var aSthEntry = new DirCacheEntry("a.sth"); aSthEntry.setFileMode(FileMode.RegularFile); aSthEntry.setObjectId(aSth); var aTxtEntry = new DirCacheEntry("a.txt"); aTxtEntry.setFileMode(FileMode.RegularFile); aTxtEntry.setObjectId(aTxt); builder.add(aSthEntry); builder.add(aTxtEntry); var bSthEntry = new DirCacheEntry("sub/b.sth"); bSthEntry.setFileMode(FileMode.RegularFile); bSthEntry.setObjectId(bSth); var bTxtEntry = new DirCacheEntry("sub/b.txt"); bTxtEntry.setFileMode(FileMode.RegularFile); bTxtEntry.setObjectId(bTxt); builder.add(bSthEntry); builder.add(bTxtEntry); builder.finish(); ObjectId treeId = dc.writeTree(ow); var tw = new GitSharp.Core.TreeWalk.TreeWalk(db); tw.Recursive = true; tw.setFilter(PathSuffixFilter.create(".txt")); tw.addTree(treeId); var paths = new LinkedList <string>(); while (tw.next()) { paths.AddLast(tw.getPathString()); } var expected = new LinkedList <string>(); expected.AddLast("a.txt"); expected.AddLast("sub/b.txt"); Assert.AreEqual(expected, paths); }
public FullTextIndex(string path) { this.fullpath = path; var dir = Path.GetDirectoryName(path); var fname = Path.GetFileName(path); this.lastModified = File.GetLastWriteTimeUtc(path); DirCache dcache; if (!dirCaches.TryGetValue(dir, out dcache)) { dcache = new DirCache(); dcache.read(dir + ".search_index"); dirCaches[dir] = dcache; } if (dcache.cache.ContainsKey(fname)) { var fti = dcache.cache[fname]; if (fti.lastModified == this.lastModified) { this.data = fti.data; this.suffix_array = fti.suffix_array; return; } } try { data = System.Text.Encoding.UTF8.GetBytes(File.ReadAllText(path).ToLowerInvariant()); } catch (IOException) { return; } catch (UnauthorizedAccessException) { return; } if (suffix_array == null) { var temp = new int[data.Length + 3]; for (int i = 0; i < data.Length; i++) { temp[i] = data[i]; } temp[data.Length] = temp[data.Length + 1] = temp[data.Length + 2] = 0; suffix_array = new int[data.Length]; for (int i = 0; i < data.Length; i++) { suffix_array[i] = i; } suffixArray(temp, suffix_array, data.Length, 255); dcache.Add(this); } }
private ObjectId Commit(ObjectWriter ow, DirCache treeB, ObjectId[] parentIds) { var c = new Core.Commit(db) { TreeId = treeB.writeTree(ow), Author = new PersonIdent("A U Thor", "a.u.thor", 1L, 0) }; c.Committer = c.Author; c.ParentIds = parentIds; c.Message = "Tree " + c.TreeId.Name; return(ow.WriteCommit(c)); }
protected RevTree tree(params DirCacheEntry[] entries) { DirCache dc = DirCache.newInCore(); DirCacheBuilder b = dc.builder(); foreach (DirCacheEntry e in entries) { b.add(e); } b.finish(); return(rw.lookupTree(dc.writeTree(_ow))); }
public void testPathFilterGroup_DoesNotSkipTail() { DirCache dc = DirCache.read(db); var mode = FileMode.RegularFile; string[] paths = { "a.", "a/b", "a/c", "a/d", "a0b" }; var ents = new DirCacheEntry[paths.Length]; for (int i = 0; i < paths.Length; i++) { ents[i] = new DirCacheEntry(paths[i]); ents[i].setFileMode(mode); } DirCacheBuilder builder = dc.builder(); for (int i = 0; i < ents.Length; i++) { builder.add(ents[i]); } builder.finish(); const int expIdx = 2; DirCacheBuilder b = dc.builder(); var tw = new GitSharp.Core.TreeWalk.TreeWalk(db); tw.reset(); tw.addTree(new DirCacheBuildIterator(b)); tw.Recursive = true; tw.setFilter(PathFilterGroup.createFromStrings(new[] { paths[expIdx] })); Assert.IsTrue(tw.next(), "found " + paths[expIdx]); var c = tw.getTree <DirCacheIterator>(0, typeof(DirCacheIterator)); Assert.IsNotNull(c); Assert.AreEqual(expIdx, c.Pointer); Assert.AreSame(ents[expIdx], c.getDirCacheEntry()); Assert.AreEqual(paths[expIdx], tw.getPathString()); Assert.AreEqual(mode.Bits, tw.getRawMode(0)); Assert.AreSame(mode, tw.getFileMode(0)); b.add(c.getDirCacheEntry()); Assert.IsFalse(tw.next(), "no more entries"); b.finish(); Assert.AreEqual(ents.Length, dc.getEntryCount()); for (int i = 0; i < ents.Length; i++) { Assert.AreSame(ents[i], dc.getEntry(i)); } }
/// <param name="local"></param> /// <param name="inCore"></param> protected internal ResolveMerger(Repository local, bool inCore) : base(local) { DiffAlgorithm.SupportedAlgorithm diffAlg = local.GetConfig().GetEnum(ConfigConstants .CONFIG_DIFF_SECTION, null, ConfigConstants.CONFIG_KEY_ALGORITHM, DiffAlgorithm.SupportedAlgorithm .HISTOGRAM); mergeAlgorithm = new MergeAlgorithm(DiffAlgorithm.GetAlgorithm(diffAlg)); commitNames = new string[] { "BASE", "OURS", "THEIRS" }; this.inCore = inCore; if (inCore) { dircache = DirCache.NewInCore(); } }
/// <exception cref="System.Exception"></exception> private ObjectId Commit(ObjectInserter odi, DirCache treeB, ObjectId[] parentIds) { NGit.CommitBuilder c = new NGit.CommitBuilder(); c.TreeId = treeB.WriteTree(odi); c.Author = new PersonIdent("A U Thor", "a.u.thor", 1L, 0); c.Committer = c.Author; c.SetParentIds(parentIds); c.Message = "Tree " + c.TreeId.Name; ObjectId id = odi.Insert(c); odi.Flush(); return(id); }
public void testBuildEmpty() { DirCache dc = DirCache.Lock(db); DirCacheBuilder b = dc.builder(); Assert.IsNotNull(b); b.finish(); dc.write(); Assert.IsTrue(dc.commit()); dc = DirCache.read(db); Assert.AreEqual(0, dc.getEntryCount()); }
public virtual void TestRevert() { // B---P---T // // Revert P, this should result in a tree with a // from B and t from T as the change to a in P // and addition of t in P is reverted. // // We use the standard merge, but change the order // of the sources. // DirCache treeB = db.ReadDirCache(); DirCache treeP = db.ReadDirCache(); DirCache treeT = db.ReadDirCache(); { DirCacheBuilder b = treeB.Builder(); DirCacheBuilder p = treeP.Builder(); DirCacheBuilder t = treeT.Builder(); b.Add(MakeEntry("a", FileMode.REGULAR_FILE)); p.Add(MakeEntry("a", FileMode.REGULAR_FILE, "q")); p.Add(MakeEntry("p-fail", FileMode.REGULAR_FILE)); t.Add(MakeEntry("a", FileMode.REGULAR_FILE, "q")); t.Add(MakeEntry("p-fail", FileMode.REGULAR_FILE)); t.Add(MakeEntry("t", FileMode.REGULAR_FILE)); b.Finish(); p.Finish(); t.Finish(); } ObjectInserter ow = db.NewObjectInserter(); ObjectId B = Commit(ow, treeB, new ObjectId[] { }); ObjectId P = Commit(ow, treeP, new ObjectId[] { B }); ObjectId T = Commit(ow, treeT, new ObjectId[] { P }); ThreeWayMerger twm = ((ThreeWayMerger)MergeStrategy.SIMPLE_TWO_WAY_IN_CORE.NewMerger (db)); twm.SetBase(P); bool merge = twm.Merge(new ObjectId[] { B, T }); NUnit.Framework.Assert.IsTrue(merge); TreeWalk tw = new TreeWalk(db); tw.Recursive = true; tw.Reset(twm.GetResultTreeId()); NUnit.Framework.Assert.IsTrue(tw.Next()); NUnit.Framework.Assert.AreEqual("a", tw.PathString); AssertCorrectId(treeB, tw); NUnit.Framework.Assert.IsTrue(tw.Next()); NUnit.Framework.Assert.AreEqual("t", tw.PathString); AssertCorrectId(treeT, tw); NUnit.Framework.Assert.IsFalse(tw.Next()); }
public void testTrivialTwoWay_concurrentSubtreeChange() { DirCache treeB = DirCache.read(db); DirCache treeO = DirCache.read(db); DirCache treeT = DirCache.read(db); { DirCacheBuilder b = treeB.builder(); DirCacheBuilder o = treeO.builder(); DirCacheBuilder t = treeT.builder(); b.add(MakeEntry("d/o", FileMode.RegularFile)); b.add(MakeEntry("d/t", FileMode.RegularFile)); o.add(MakeEntry("d/o", FileMode.RegularFile, "o !")); o.add(MakeEntry("d/t", FileMode.RegularFile)); t.add(MakeEntry("d/o", FileMode.RegularFile)); t.add(MakeEntry("d/t", FileMode.RegularFile, "t !")); b.finish(); o.finish(); t.finish(); } var ow = new ObjectWriter(db); ObjectId B = Commit(ow, treeB, new ObjectId[] { }); ObjectId O = Commit(ow, treeO, new[] { B }); ObjectId T = Commit(ow, treeT, new[] { B }); Merger ourMerger = MergeStrategy.SimpleTwoWayInCore.NewMerger(db); bool merge = ourMerger.Merge(new[] { O, T }); Assert.IsTrue(merge); var tw = new GitSharp.Core.TreeWalk.TreeWalk(db) { Recursive = true }; tw.reset(ourMerger.GetResultTreeId()); Assert.IsTrue(tw.next()); Assert.AreEqual("d/o", tw.getPathString()); AssertCorrectId(treeO, tw); Assert.IsTrue(tw.next()); Assert.AreEqual("d/t", tw.getPathString()); AssertCorrectId(treeT, tw); Assert.IsFalse(tw.next()); }
public virtual void RepositoryWithDifferentRevCheckedOutSubmodule() { ObjectId id = ObjectId.FromString("abcd1234abcd1234abcd1234abcd1234abcd1234"); string path = "sub"; DirCache cache = db.LockDirCache(); DirCacheEditor editor = cache.Editor(); editor.Add(new _PathEdit_317(id, path)); editor.Commit(); string url = "git://server/repo.git"; StoredConfig config = ((FileBasedConfig)db.GetConfig()); config.SetString(ConfigConstants.CONFIG_SUBMODULE_SECTION, path, ConfigConstants. CONFIG_KEY_URL, url); config.Save(); FileBasedConfig modulesConfig = new FileBasedConfig(new FilePath(db.WorkTree, Constants .DOT_GIT_MODULES), db.FileSystem); modulesConfig.SetString(ConfigConstants.CONFIG_SUBMODULE_SECTION, path, ConfigConstants .CONFIG_KEY_PATH, path); modulesConfig.SetString(ConfigConstants.CONFIG_SUBMODULE_SECTION, path, ConfigConstants .CONFIG_KEY_URL, url); modulesConfig.Save(); Repository subRepo = Git.Init().SetBare(false).SetDirectory(new FilePath(db.WorkTree , path)).Call().GetRepository(); NUnit.Framework.Assert.IsNotNull(subRepo); RefUpdate update = subRepo.UpdateRef(Constants.HEAD, true); update.SetNewObjectId(ObjectId.FromString("aaaa0000aaaa0000aaaa0000aaaa0000aaaa0000" )); update.ForceUpdate(); SubmoduleStatusCommand command = new SubmoduleStatusCommand(db); IDictionary <string, SubmoduleStatus> statuses = command.Call(); NUnit.Framework.Assert.IsNotNull(statuses); NUnit.Framework.Assert.AreEqual(1, statuses.Count); KeyValuePair <string, SubmoduleStatus> module = statuses.EntrySet().Iterator().Next (); NUnit.Framework.Assert.IsNotNull(module); NUnit.Framework.Assert.AreEqual(path, module.Key); SubmoduleStatus status = module.Value; NUnit.Framework.Assert.IsNotNull(status); NUnit.Framework.Assert.AreEqual(path, status.GetPath()); NUnit.Framework.Assert.AreEqual(id, status.GetIndexId()); NUnit.Framework.Assert.AreEqual(update.GetNewObjectId(), status.GetHeadId()); NUnit.Framework.Assert.AreEqual(SubmoduleStatusType.REV_CHECKED_OUT, status.GetType ()); }
/// <exception cref="System.InvalidOperationException"></exception> /// <exception cref="System.IO.IOException"></exception> private void PrescanTwoTrees(ObjectId head, ObjectId merge) { DirCache dc = db.LockDirCache(); try { dco = new DirCacheCheckout(db, head, dc, merge); dco.PreScanTwoTrees(); } finally { dc.Unlock(); } }
/// <exception cref="System.IO.IOException"></exception> private void Checkout() { DirCache dc = db.LockDirCache(); try { dco = new DirCacheCheckout(db, theHead, dc, theMerge); dco.Checkout(); } finally { dc.Unlock(); } }
public void testCorruptChecksumAtFooter() { var dc = new DirCache(pathOf("gitgit.index.badchecksum")); try { dc.read(); Assert.Fail("Cache loaded despite corrupt checksum"); } catch (CorruptObjectException err) { Assert.AreEqual("DIRC checksum mismatch", err.Message); } }
private void ResetIndexForPaths(RevCommit commit) { DirCache dc = null; DirCacheEditor edit; try { dc = repo.LockDirCache(); edit = dc.Editor(); TreeWalk tw = new TreeWalk(repo); tw.AddTree(new DirCacheIterator(dc)); tw.AddTree(commit.Tree); tw.Filter = PathFilterGroup.CreateFromStrings(filepaths); tw.Recursive = true; while (tw.Next()) { string path = tw.PathString; // DirCacheIterator dci = tw.getTree(0, DirCacheIterator.class); CanonicalTreeParser tree = tw.GetTree <CanonicalTreeParser>(1); if (tree == null) { // file is not in the commit, remove from index edit.Add(new DirCacheEditor.DeletePath(path)); } else { // revert index to commit // it seams that there is concurrent access to tree // variable, therefore we need to keep references to // entryFileMode and entryObjectId in local // variables FileMode entryFileMode = tree.EntryFileMode; ObjectId entryObjectId = tree.EntryObjectId; edit.Add(new _PathEdit_305(entryFileMode, entryObjectId, path)); } } edit.Commit(); } catch (IOException e) { throw new RuntimeException(e); } finally { if (dc != null) { dc.Unlock(); } } }
/// <exception cref="System.InvalidOperationException"></exception> /// <exception cref="System.IO.IOException"></exception> public override void PrescanTwoTrees(Tree head, Tree merge) { DirCache dc = db.LockDirCache(); try { dco = new DirCacheCheckout(db, head.GetId(), dc, merge.GetId()); dco.PreScanTwoTrees(); } finally { dc.Unlock(); } }
/// <exception cref="System.IO.IOException"></exception> public override void Checkout() { DirCache dc = db.LockDirCache(); try { dco = new DirCacheCheckout(db, theHead.GetId(), dc, theMerge.GetId()); dco.Checkout(); } finally { dc.Unlock(); } }
public void testEmptyCache_CreateEmptyCacheTree() { DirCache dc = DirCache.read(db); DirCacheTree tree = dc.getCacheTree(true); Assert.IsNotNull(tree); Assert.AreSame(tree, dc.getCacheTree(false)); Assert.AreSame(tree, dc.getCacheTree(true)); Assert.AreEqual(string.Empty, tree.getNameString()); Assert.AreEqual(string.Empty, tree.getPathString()); Assert.AreEqual(0, tree.getChildCount()); Assert.AreEqual(0, tree.getEntrySpan()); Assert.IsFalse(tree.isValid()); }
/// <summary>Checkout paths into index and working directory</summary> /// <returns>this instance</returns> /// <exception cref="System.IO.IOException">System.IO.IOException</exception> /// <exception cref="NGit.Api.Errors.RefNotFoundException">NGit.Api.Errors.RefNotFoundException /// </exception> protected internal virtual NGit.Api.CheckoutCommand CheckoutPaths() { RevWalk revWalk = new RevWalk(repo); DirCache dc = repo.LockDirCache(); try { DirCacheEditor editor = dc.Editor(); TreeWalk startWalk = new TreeWalk(revWalk.GetObjectReader()); startWalk.Recursive = true; if (!checkoutAllPaths) { startWalk.Filter = PathFilterGroup.CreateFromStrings(paths); } bool checkoutIndex = startCommit == null && startPoint == null; if (!checkoutIndex) { startWalk.AddTree(revWalk.ParseCommit(GetStartPoint()).Tree); } else { startWalk.AddTree(new DirCacheIterator(dc)); } FilePath workTree = repo.WorkTree; ObjectReader r = repo.ObjectDatabase.NewReader(); try { while (startWalk.Next()) { ObjectId blobId = startWalk.GetObjectId(0); FileMode mode = startWalk.GetFileMode(0); editor.Add(new _PathEdit_349(this, checkoutIndex, blobId, mode, workTree, r, startWalk .PathString)); } editor.Commit(); } finally { startWalk.Release(); r.Release(); } } finally { dc.Unlock(); revWalk.Release(); } return(this); }
public void testBuildOneFile_FinishWriteCommit() { string path = "a-File-path"; var mode = FileMode.RegularFile; long lastModified = 1218123387057L; int Length = 1342; DirCacheEntry entOrig; DirCache dc = DirCache.Lock(db); DirCacheBuilder b = dc.builder(); Assert.IsNotNull(b); entOrig = new DirCacheEntry(path); entOrig.setFileMode(mode); entOrig.setLastModified(lastModified); entOrig.setLength(Length); Assert.AreNotSame(path, entOrig.getPathString()); Assert.AreEqual(path, entOrig.getPathString()); Assert.AreEqual(ObjectId.ZeroId, entOrig.getObjectId()); Assert.AreEqual(mode.Bits, entOrig.getRawMode()); Assert.AreEqual(0, entOrig.getStage()); Assert.AreEqual(lastModified, entOrig.getLastModified()); Assert.AreEqual(Length, entOrig.getLength()); Assert.IsFalse(entOrig.isAssumeValid()); b.add(entOrig); b.finish(); Assert.AreEqual(1, dc.getEntryCount()); Assert.AreSame(entOrig, dc.getEntry(0)); dc.write(); Assert.IsTrue(dc.commit()); dc = DirCache.read(db); Assert.AreEqual(1, dc.getEntryCount()); DirCacheEntry entRead = dc.getEntry(0); Assert.AreNotSame(entOrig, entRead); Assert.AreEqual(path, entRead.getPathString()); Assert.AreEqual(ObjectId.ZeroId, entOrig.getObjectId()); Assert.AreEqual(mode.Bits, entOrig.getRawMode()); Assert.AreEqual(0, entOrig.getStage()); Assert.AreEqual(lastModified, entOrig.getLastModified()); Assert.AreEqual(Length, entOrig.getLength()); Assert.IsFalse(entOrig.isAssumeValid()); }
public void testUnsupportedRequiredExtension() { var dc = new DirCache(pathOf("gitgit.index.aaaa")); try { dc.read(); Assert.Fail("Cache loaded an unsupported extension"); } catch (CorruptObjectException err) { Assert.AreEqual("DIRC extension 'aaaa'" + " not supported by this version.", err.Message); } }
/// <exception cref="System.IO.IOException"></exception> private void AssumeUnchanged(string path) { DirCache dirc = db.LockDirCache(); DirCacheEntry ent = dirc.GetEntry(path); if (ent != null) { ent.IsAssumeValid = true; } dirc.Write(); if (!dirc.Commit()) { throw new IOException("could not commit"); } }
public virtual void TestHardResetOnTag() { SetupRepository(); string tagName = "initialtag"; git.Tag().SetName(tagName).SetObjectId(secondCommit).SetMessage("message").Call(); DirCacheEntry preReset = DirCache.Read(db.GetIndexFile(), db.FileSystem).GetEntry (indexFile.GetName()); NUnit.Framework.Assert.IsNotNull(preReset); git.Add().AddFilepattern(untrackedFile.GetName()).Call(); git.Reset().SetRef(tagName).SetMode(ResetCommand.ResetType.HARD).Call(); ObjectId head = db.Resolve(Constants.HEAD); NUnit.Framework.Assert.AreEqual(secondCommit, head); }
public virtual void TestReadIndex_LsFiles() { IDictionary<string, DirCacheCGitCompatabilityTest.CGitIndexRecord> ls = ReadLsFiles (); DirCache dc = new DirCache(index, FS.DETECTED); NUnit.Framework.Assert.AreEqual(0, dc.GetEntryCount()); dc.Read(); NUnit.Framework.Assert.AreEqual(ls.Count, dc.GetEntryCount()); { Iterator<DirCacheCGitCompatabilityTest.CGitIndexRecord> rItr = ls.Values.Iterator (); for (int i = 0; rItr.HasNext(); i++) { AssertEqual(rItr.Next(), dc.GetEntry(i)); } } }
public FullTextIndex(string path) { this.fullpath = path; var dir = Path.GetDirectoryName(path); var fname = Path.GetFileName(path); this.lastModified = File.GetLastWriteTimeUtc(path); DirCache dcache; if (!dirCaches.TryGetValue(dir, out dcache)) { dcache = new DirCache(); dcache.read(dir + ".search_index"); dirCaches[dir] = dcache; } if (dcache.cache.ContainsKey(fname)) { var fti = dcache.cache[fname]; if (fti.lastModified == this.lastModified) { this.data = fti.data; this.suffix_array = fti.suffix_array; return; } } try { data = System.Text.Encoding.UTF8.GetBytes(File.ReadAllText(path).ToLowerInvariant()); } catch (IOException) { return; } catch (UnauthorizedAccessException) { return; } if (suffix_array == null) { var temp = new int[data.Length + 3]; for (int i = 0; i < data.Length; i++) temp[i] = data[i]; temp[data.Length] = temp[data.Length + 1] = temp[data.Length + 2] = 0; suffix_array = new int[data.Length]; for (int i = 0; i < data.Length; i++) suffix_array[i] = i; suffixArray(temp, suffix_array, data.Length, 255); dcache.Add(this); } }
// consider each of the files, see if it is suitable for series "ser" and episode "epi" // if so, add a rcitem for copy to "fi" public bool FindMissingEp(DirCache dirCache, ItemMissing me, ItemList addTo, ActionCopyMoveRename.Op whichOp) { string showname = me.Episode.SI.ShowName; int season = me.Episode.SeasonNumber; //String ^toName = FilenameFriendly(Settings->NamingStyle->NameFor(me->PE)); int epnum = me.Episode.EpNum; // TODO: find a 'best match', or use first ? showname = Helpers.SimplifyName(showname); foreach (DirCacheEntry dce in dirCache) { if (this.ActionCancel) return true; bool matched = false; try { if (!dce.HasUsefulExtension_NotOthersToo) // not a usefile file extension continue; if (TVSettings.Instance.IgnoreSamples && dce.LowerName.Contains("sample") && ((dce.Length / (1024 * 1024)) < TVSettings.Instance.SampleFileMaxSizeMB)) continue; matched = Regex.Match(dce.SimplifiedFullName, "\\b" + showname + "\\b", RegexOptions.IgnoreCase).Success; // if we don't match the main name, then test the aliases if (!matched) { foreach (string alias in me.Episode.SI.AliasNames) { string aliasName = Helpers.SimplifyName(alias); matched = Regex.Match(dce.SimplifiedFullName, "\\b" + aliasName + "\\b", RegexOptions.IgnoreCase).Success; if (matched) break; } } if (matched) { int seasF; int epF; if ((TVDoc.FindSeasEp(dce.TheFile, out seasF, out epF, me.Episode.SI) && (seasF == season) && (epF == epnum)) || (me.Episode.SI.UseSequentialMatch && TVDoc.MatchesSequentialNumber(dce.TheFile.Name, ref seasF, ref epF, me.Episode) && (seasF == season) && (epF == epnum))) { FileInfo fi = new FileInfo(me.TheFileNoExt + dce.TheFile.Extension); // don't remove the base search folders bool doTidyup = true; foreach (String folder in this.mDoc.SearchFolders) { // http://stackoverflow.com/questions/1794025/how-to-check-whether-2-directoryinfo-objects-are-pointing-to-the-same-directory if (String.Compare(folder.ToLower().TrimEnd('\\'), fi.Directory.FullName.ToLower().TrimEnd('\\'), StringComparison.InvariantCultureIgnoreCase) == 0) { doTidyup = false; break; } } addTo.Add(new ActionCopyMoveRename(whichOp, dce.TheFile, fi, me.Episode, doTidyup ? TVSettings.Instance.Tidyup : null)); DownloadIdentifiersController di = new DownloadIdentifiersController(); // if we're copying/moving a file across, we might also want to make a thumbnail or NFO for it addTo.Add(di.ProcessEpisode(me.Episode, fi)); return true; } } } catch (System.IO.PathTooLongException e) { string t = "Path too long. " + dce.TheFile.FullName + ", " + e.Message; t += ". Try to display more info?"; DialogResult dr = MessageBox.Show(t, "Path too long", MessageBoxButtons.YesNo, MessageBoxIcon.Exclamation); if (dr == DialogResult.Yes) { t = "DirectoryName " + dce.TheFile.DirectoryName + ", File name: " + dce.TheFile.Name; t += matched ? ", matched. " : ", no match. "; if (matched) { t += "Show: " + me.Episode.TheSeries.Name + ", Season " + season + ", Ep " + epnum + ". "; t += "To: " + me.TheFileNoExt; } MessageBox.Show(t, "Path too long", MessageBoxButtons.OK, MessageBoxIcon.Exclamation); } } } return false; }
/// <summary>Run the diff operation.</summary> /// <remarks> /// Run the diff operation. Until this is called, all lists will be empty. /// <p> /// The operation may be aborted by the progress monitor. In that event it /// will report what was found before the cancel operation was detected. /// Callers should ignore the result if monitor.isCancelled() is true. If a /// progress monitor is not needed, callers should use /// <see cref="Diff()">Diff()</see> /// instead. Progress reporting is crude and approximate and only intended /// for informing the user. /// </remarks> /// <param name="monitor">for reporting progress, may be null</param> /// <param name="estWorkTreeSize">number or estimated files in the working tree</param> /// <param name="estIndexSize">number of estimated entries in the cache</param> /// <param name="title"></param> /// <returns>if anything is different between index, tree, and workdir</returns> /// <exception cref="System.IO.IOException">System.IO.IOException</exception> public virtual bool Diff(ProgressMonitor monitor, int estWorkTreeSize, int estIndexSize , string title) { dirCache = repository.ReadDirCache(); TreeWalk treeWalk = new TreeWalk(repository); treeWalk.Recursive = true; // add the trees (tree, dirchache, workdir) if (tree != null) { treeWalk.AddTree(tree); } else { treeWalk.AddTree(new EmptyTreeIterator()); } treeWalk.AddTree(new DirCacheIterator(dirCache)); treeWalk.AddTree(initialWorkingTreeIterator); ICollection<TreeFilter> filters = new AList<TreeFilter>(4); if (monitor != null) { // Get the maximum size of the work tree and index // and add some (quite arbitrary) if (estIndexSize == 0) { estIndexSize = dirCache.GetEntryCount(); } int total = Math.Max(estIndexSize * 10 / 9, estWorkTreeSize * 10 / 9); monitor.BeginTask(title, total); filters.AddItem(new IndexDiff.ProgressReportingFilter(monitor, total)); } if (filter != null) { filters.AddItem(filter); } filters.AddItem(new SkipWorkTreeFilter(INDEX)); filters.AddItem(new IndexDiffFilter(INDEX, WORKDIR)); treeWalk.Filter = AndTreeFilter.Create(filters); while (treeWalk.Next()) { AbstractTreeIterator treeIterator = treeWalk.GetTree<AbstractTreeIterator>(TREE); DirCacheIterator dirCacheIterator = treeWalk.GetTree<DirCacheIterator>(INDEX); WorkingTreeIterator workingTreeIterator = treeWalk.GetTree<WorkingTreeIterator>(WORKDIR ); if (treeIterator != null) { if (dirCacheIterator != null) { if (!treeIterator.IdEqual(dirCacheIterator) || treeIterator.EntryRawMode != dirCacheIterator .EntryRawMode) { // in repo, in index, content diff => changed changed.AddItem(treeWalk.PathString); } } else { // in repo, not in index => removed removed.AddItem(treeWalk.PathString); if (workingTreeIterator != null) { untracked.AddItem(treeWalk.PathString); } } } else { if (dirCacheIterator != null) { // not in repo, in index => added added.AddItem(treeWalk.PathString); } else { // not in repo, not in index => untracked if (workingTreeIterator != null && !workingTreeIterator.IsEntryIgnored()) { untracked.AddItem(treeWalk.PathString); } } } if (dirCacheIterator != null) { if (workingTreeIterator == null) { // in index, not in workdir => missing missing.AddItem(treeWalk.PathString); } else { if (workingTreeIterator.IsModified(dirCacheIterator.GetDirCacheEntry(), true)) { // in index, in workdir, content differs => modified modified.AddItem(treeWalk.PathString); } } } } // consume the remaining work if (monitor != null) { monitor.EndTask(); } if (added.IsEmpty() && changed.IsEmpty() && removed.IsEmpty() && missing.IsEmpty( ) && modified.IsEmpty() && untracked.IsEmpty()) { return false; } else { return true; } }
public void Refresh() { this.index = null; this.commitTree = null; this.cache.Clear(); this.changedFiles = null; this.repositoryGraph = null; this.dirCache = null; this.head = null; this.ignoreRules = null; this.remotes = null; this.configs = null; if (!string.IsNullOrEmpty(initFolder)) { try { this.repository = Open(new DirectoryInfo(initFolder)); if (this.repository != null) { dirCache = repository.ReadDirCache(); head = repository.Resolve(Constants.HEAD); if (head == null) { this.commitTree = new Tree(repository); } else { var treeId = ObjectId.FromString(repository.Open(head).GetBytes(), 5); this.commitTree = new Tree(repository, treeId, repository.Open(treeId).GetBytes()); } if (repository.IsBare) throw new NoWorkTreeException(); this.index = new GitIndex(repository); this.index.Read(); this.index.RereadIfNecessary(); try { //load local .gitignore file var ignoreFile = Path.Combine(this.initFolder, Constants.GITIGNORE_FILENAME); if (File.Exists(ignoreFile)) { ignoreRules = File.ReadAllLines(ignoreFile) .Where(line => !line.StartsWith("#") && line.Trim().Length > 0) .Select(line => new IgnoreRule(line)).ToList(); } } catch (Exception ex) { Log.WriteLine("ReadIgnoreFile: {0}\r\n{1}", this.initFolder, ex.ToString()); } } } catch (Exception ex) { this.repository = null; Log.WriteLine("Refresh: {0}\r\n{1}", this.initFolder, ex.ToString()); } } }
public virtual void TestUnsupportedRequiredExtension() { DirCache dc = new DirCache(PathOf("gitgit.index.aaaa"), FS.DETECTED); try { dc.Read(); NUnit.Framework.Assert.Fail("Cache loaded an unsupported extension"); } catch (CorruptObjectException err) { NUnit.Framework.Assert.AreEqual("DIRC extension 'aaaa'" + " not supported by this version." , err.Message); } }
public virtual void TestUnsupportedOptionalExtension() { DirCache dc = new DirCache(PathOf("gitgit.index.ZZZZ"), FS.DETECTED); dc.Read(); NUnit.Framework.Assert.AreEqual(1, dc.GetEntryCount()); NUnit.Framework.Assert.AreEqual("A", dc.GetEntry(0).PathString); }
// consider each of the files, see if it is suitable for series "ser" and episode "epi" // if so, add a rcitem for copy to "fi" public bool FindMissingEp(DirCache dirCache, ItemMissing me, ItemList addTo, ActionCopyMoveRename.Op whichOp) { string showname = me.Episode.SI.ShowName; int season = me.Episode.SeasonNumber; //String ^toName = FilenameFriendly(Settings->NamingStyle->NameFor(me->PE)); int epnum = me.Episode.EpNum; // TODO: find a 'best match', or use first ? showname = Helpers.SimplifyName(showname); foreach (DirCacheEntry dce in dirCache) { if (this.ActionCancel) return true; bool matched = false; try { if (!dce.HasUsefulExtension_NotOthersToo) // not a usefile file extension continue; if (this.Settings.IgnoreSamples && dce.LowerName.Contains("sample") && ((dce.Length / (1024 * 1024)) < this.Settings.SampleFileMaxSizeMB)) continue; matched = Regex.Match(dce.SimplifiedFullName, "\\b" + showname + "\\b", RegexOptions.IgnoreCase).Success; // if we don't match the main name, then test the aliases if (!matched) { foreach (string alias in me.Episode.SI.AliasNames) { string aliasName = Helpers.SimplifyName(alias); matched = Regex.Match(dce.SimplifiedFullName, "\\b" + aliasName + "\\b", RegexOptions.IgnoreCase).Success; if (matched) break; } } if (matched) { int seasF; int epF; // String ^fn = file->Name; if ((this.FindSeasEp(dce.TheFile, out seasF, out epF, me.Episode.SI) && (seasF == season) && (epF == epnum)) || (me.Episode.SI.UseSequentialMatch && this.MatchesSequentialNumber(dce.TheFile.Name, ref seasF, ref epF, me.Episode) && (seasF == season) && (epF == epnum))) { FileInfo fi = new FileInfo(me.TheFileNoExt + dce.TheFile.Extension); addTo.Add(new ActionCopyMoveRename(whichOp, dce.TheFile, fi, me.Episode)); // if we're copying/moving a file across, we might also want to make a thumbnail or NFO for it this.ThumbnailAndNFOCheck(me.Episode, fi, addTo); return true; } } } catch (System.IO.PathTooLongException e) { string t = "Path too long. " + dce.TheFile.FullName + ", " + e.Message; t += ". Try to display more info?"; DialogResult dr = MessageBox.Show(t, "Path too long", MessageBoxButtons.YesNo, MessageBoxIcon.Exclamation); if (dr == DialogResult.Yes) { t = "DirectoryName " + dce.TheFile.DirectoryName + ", File name: " + dce.TheFile.Name; t += matched ? ", matched. " : ", no match. "; if (matched) { t += "Show: " + me.Episode.TheSeries.Name + ", Season " + season + ", Ep " + epnum + ". "; t += "To: " + me.TheFileNoExt; } MessageBox.Show(t, "Path too long", MessageBoxButtons.OK, MessageBoxIcon.Exclamation); } } } return false; }
public virtual void TestCorruptChecksumAtFooter() { DirCache dc = new DirCache(PathOf("gitgit.index.badchecksum"), FS.DETECTED); try { dc.Read(); NUnit.Framework.Assert.Fail("Cache loaded despite corrupt checksum"); } catch (CorruptObjectException err) { NUnit.Framework.Assert.AreEqual("DIRC checksum mismatch", err.Message); } }
public virtual void TestReadWriteV3() { FilePath file = PathOf("gitgit.index.v3"); DirCache dc = new DirCache(file, FS.DETECTED); dc.Read(); NUnit.Framework.Assert.AreEqual(10, dc.GetEntryCount()); AssertV3TreeEntry(0, "dir1/file1.txt", false, false, dc); AssertV3TreeEntry(1, "dir2/file2.txt", true, false, dc); AssertV3TreeEntry(2, "dir3/file3.txt", false, false, dc); AssertV3TreeEntry(3, "dir3/file3a.txt", true, false, dc); AssertV3TreeEntry(4, "dir4/file4.txt", true, false, dc); AssertV3TreeEntry(5, "dir4/file4a.txt", false, false, dc); AssertV3TreeEntry(6, "file.txt", true, false, dc); AssertV3TreeEntry(7, "newdir1/newfile1.txt", false, true, dc); AssertV3TreeEntry(8, "newdir1/newfile2.txt", false, true, dc); AssertV3TreeEntry(9, "newfile.txt", false, true, dc); ByteArrayOutputStream bos = new ByteArrayOutputStream(); dc.WriteTo(bos); byte[] indexBytes = bos.ToByteArray(); byte[] expectedBytes = IOUtil.ReadFully(file); CollectionAssert.AreEquivalent(expectedBytes, indexBytes); }
internal DirCacheIterator(NGit.Dircache.DirCacheIterator p, DirCacheTree dct) : base (p, p.path, p.pathLen + 1) { cache = p.cache; tree = dct; treeStart = p.ptr; treeEnd = treeStart + tree.GetEntrySpan(); subtreeId = p.subtreeId; ptr = p.ptr; ParseEntry(); }
public virtual void TestTreeWalk_LsFiles() { Repository db = CreateBareRepository(); IDictionary<string, DirCacheCGitCompatabilityTest.CGitIndexRecord> ls = ReadLsFiles (); DirCache dc = new DirCache(index, db.FileSystem); NUnit.Framework.Assert.AreEqual(0, dc.GetEntryCount()); dc.Read(); NUnit.Framework.Assert.AreEqual(ls.Count, dc.GetEntryCount()); { Iterator<DirCacheCGitCompatabilityTest.CGitIndexRecord> rItr = ls.Values.Iterator (); TreeWalk tw = new TreeWalk(db); tw.Recursive = true; tw.AddTree(new DirCacheIterator(dc)); while (rItr.HasNext()) { DirCacheIterator dcItr; NUnit.Framework.Assert.IsTrue(tw.Next()); dcItr = tw.GetTree<DirCacheIterator>(0); NUnit.Framework.Assert.IsNotNull(dcItr); AssertEqual(rItr.Next(), dcItr.GetDirCacheEntry()); } } }
private static void AssertV3TreeEntry(int indexPosition, string path, bool skipWorkTree , bool intentToAdd, DirCache dc) { DirCacheEntry entry = dc.GetEntry(indexPosition); NUnit.Framework.Assert.AreEqual(path, entry.PathString); Assert.AreEqual(skipWorkTree, entry.IsSkipWorkTree); Assert.AreEqual(intentToAdd, entry.IsIntentToAdd); }
protected BTCore(SetProgressDelegate setprog) { this.SetProg = setprog; this.HashCache = new System.Collections.Generic.Dictionary<string, System.Collections.Generic.List<HashCacheItem>>(); this.CacheChecks = this.CacheItems = this.CacheHits = 0; this.FileCache = null; this.FileCacheIsFor = null; this.FileCacheWithSubFolders = false; }
/// <exception cref="System.Exception"></exception> private ObjectId Commit(ObjectInserter odi, DirCache treeB, ObjectId[] parentIds) { NGit.CommitBuilder c = new NGit.CommitBuilder(); c.TreeId = treeB.WriteTree(odi); c.Author = new PersonIdent("A U Thor", "a.u.thor", 1L, 0); c.Committer = c.Author; c.SetParentIds(parentIds); c.Message = "Tree " + c.TreeId.Name; ObjectId id = odi.Insert(c); odi.Flush(); return id; }
private void AssertCorrectId(DirCache treeT, TreeWalk tw) { NUnit.Framework.Assert.AreEqual(treeT.GetEntry(tw.PathString).GetObjectId(), tw.GetObjectId (0)); }
/// <exception cref="System.IO.IOException"></exception> private void BuildIndex(Dictionary<string, string> indexEntries) { dirCache = new DirCache(db.GetIndexFile(), db.FileSystem); if (indexEntries != null) { NUnit.Framework.Assert.IsTrue(dirCache.Lock()); DirCacheEditor editor = dirCache.Editor(); foreach (KeyValuePair<string, string> e in indexEntries.EntrySet()) { WriteTrashFile(e.Key, e.Value); ObjectInserter inserter = db.NewObjectInserter(); ObjectId id = inserter.Insert(Constants.OBJ_BLOB, Constants.Encode(e.Value)); editor.Add(new DirCacheEditor.DeletePath(e.Key)); editor.Add(new _PathEdit_284(id, e.Key)); } NUnit.Framework.Assert.IsTrue(editor.Commit()); } }
public virtual void TestReadIndex_DirCacheTree() { IDictionary<string, DirCacheCGitCompatabilityTest.CGitIndexRecord> cList = ReadLsFiles (); IDictionary<string, DirCacheCGitCompatabilityTest.CGitLsTreeRecord> cTree = ReadLsTree (); DirCache dc = new DirCache(index, FS.DETECTED); NUnit.Framework.Assert.AreEqual(0, dc.GetEntryCount()); dc.Read(); NUnit.Framework.Assert.AreEqual(cList.Count, dc.GetEntryCount()); DirCacheTree jTree = dc.GetCacheTree(false); NUnit.Framework.Assert.IsNotNull(jTree); NUnit.Framework.Assert.AreEqual(string.Empty, jTree.GetNameString()); NUnit.Framework.Assert.AreEqual(string.Empty, jTree.GetPathString()); NUnit.Framework.Assert.IsTrue(jTree.IsValid()); NUnit.Framework.Assert.AreEqual(ObjectId.FromString("698dd0b8d0c299f080559a1cffc7fe029479a408" ), jTree.GetObjectId()); NUnit.Framework.Assert.AreEqual(cList.Count, jTree.GetEntrySpan()); AList<DirCacheCGitCompatabilityTest.CGitLsTreeRecord> subtrees = new AList<DirCacheCGitCompatabilityTest.CGitLsTreeRecord >(); foreach (DirCacheCGitCompatabilityTest.CGitLsTreeRecord r in cTree.Values) { if (FileMode.TREE.Equals(r.mode)) { subtrees.AddItem(r); } } NUnit.Framework.Assert.AreEqual(subtrees.Count, jTree.GetChildCount()); for (int i = 0; i < jTree.GetChildCount(); i++) { DirCacheTree sj = jTree.GetChild(i); DirCacheCGitCompatabilityTest.CGitLsTreeRecord sc = subtrees[i]; NUnit.Framework.Assert.AreEqual(sc.path, sj.GetNameString()); NUnit.Framework.Assert.AreEqual(sc.path + "/", sj.GetPathString()); NUnit.Framework.Assert.IsTrue(sj.IsValid()); NUnit.Framework.Assert.AreEqual(sc.id, sj.GetObjectId()); } }
/// <summary>Construct a new editor.</summary> /// <remarks>Construct a new editor.</remarks> /// <param name="dc">the cache this editor will eventually update.</param> /// <param name="ecnt"> /// estimated number of entries the editor will have upon /// completion. This sizes the initial entry table. /// </param> protected internal BaseDirCacheEditor(DirCache dc, int ecnt) { cache = dc; entries = new DirCacheEntry[ecnt]; }
/// <summary> /// Constructs a DirCacheCeckout for checking out one tree, merging with the /// index. /// </summary> /// <remarks> /// Constructs a DirCacheCeckout for checking out one tree, merging with the /// index. As iterator over the working tree this constructor creates a /// standard /// <see cref="NGit.Treewalk.FileTreeIterator">NGit.Treewalk.FileTreeIterator</see> /// </remarks> /// <param name="repo">the repository in which we do the checkout</param> /// <param name="dc">the (already locked) Dircache for this repo</param> /// <param name="mergeCommitTree">the id of the tree of the</param> /// <exception cref="System.IO.IOException">System.IO.IOException</exception> public DirCacheCheckout(Repository repo, DirCache dc, ObjectId mergeCommitTree) : this(repo, null, dc, mergeCommitTree, new FileTreeIterator(repo)) { }
protected void BuildFileCache(string folder, bool subFolders) { if ((this.FileCache == null) || (this.FileCacheIsFor == null) || (this.FileCacheIsFor != folder) || (this.FileCacheWithSubFolders != subFolders)) { this.FileCache = new DirCache(null, folder, subFolders, null); this.FileCacheIsFor = folder; this.FileCacheWithSubFolders = subFolders; } }
/// <param name="local"></param> /// <param name="inCore"></param> protected internal ResolveMerger(Repository local, bool inCore) : base(local) { DiffAlgorithm.SupportedAlgorithm diffAlg = local.GetConfig().GetEnum(ConfigConstants .CONFIG_DIFF_SECTION, null, ConfigConstants.CONFIG_KEY_ALGORITHM, DiffAlgorithm.SupportedAlgorithm .HISTOGRAM); mergeAlgorithm = new MergeAlgorithm(DiffAlgorithm.GetAlgorithm(diffAlg)); commitNames = new string[] { "BASE", "OURS", "THEIRS" }; oi = GetObjectInserter(); this.inCore = inCore; if (inCore) { dircache = DirCache.NewInCore(); } }
public void LookForMissingEps(SetProgressDelegate prog) { // for each ep we have noticed as being missing // look through the monitored folders for it this.Stats().FindAndOrganisesDone++; prog.Invoke(0); ItemList newList = new ItemList(); ItemList toRemove = new ItemList(); int fileCount = 0; foreach (string s in this.SearchFolders) fileCount += DirCache.CountFiles(s, true); int c = 0; DirCache dirCache = new DirCache(); foreach (String s in this.SearchFolders) { if (this.ActionCancel) return; c = dirCache.AddFolder(prog, c, fileCount, s, true, this.Settings); } c = 0; int totalN = this.TheActionList.Count; foreach (Item action1 in this.TheActionList) { if (this.ActionCancel) return; prog.Invoke(50 + 50 * (++c) / (totalN + 1)); // second 50% of progress bar if (action1 is ItemMissing) { if (this.FindMissingEp(dirCache, (ItemMissing) (action1), newList, ActionCopyMoveRename.Op.Copy)) toRemove.Add(action1); } } if (this.Settings.KeepTogether) this.KeepTogether(newList); prog.Invoke(100); if (!this.Settings.LeaveOriginals) { // go through and change last of each operation on a given source file to a 'Move' // ideally do that move within same filesystem // sort based on source file, and destination drive, putting last if destdrive == sourcedrive newList.Sort(new ActionItemSorter()); // sort puts all the CopyMoveRenames together // then set the last of each source file to be a move for (int i = 0; i < newList.Count; i++) { ActionCopyMoveRename cmr1 = newList[i] as ActionCopyMoveRename; bool ok1 = cmr1 != null; if (!ok1) continue; bool last = i == (newList.Count - 1); ActionCopyMoveRename cmr2 = !last ? newList[i + 1] as ActionCopyMoveRename : null; bool ok2 = cmr2 != null; if (ok2) { ActionCopyMoveRename a1 = cmr1; ActionCopyMoveRename a2 = cmr2; if (!Helpers.Same(a1.From, a2.From)) a1.Operation = ActionCopyMoveRename.Op.Move; } else { // last item, or last copymoverename item in the list ActionCopyMoveRename a1 = cmr1; a1.Operation = ActionCopyMoveRename.Op.Move; } } } foreach (Item i in toRemove) this.TheActionList.Remove(i); foreach (Item i in newList) this.TheActionList.Add(i); // if (Settings->ExportFOXML) // ExportFOXML(Settings->ExportFOXMLTo); }
/// <summary>Sets the DirCache which shall be used by this merger.</summary> /// <remarks> /// Sets the DirCache which shall be used by this merger. If the DirCache is /// not set explicitly this merger will implicitly get and lock a default /// DirCache. If the DirCache is explicitly set the caller is responsible to /// lock it in advance. Finally the merger will call /// <see cref="NGit.Dircache.DirCache.Commit()">NGit.Dircache.DirCache.Commit()</see> /// which requires that the DirCache is locked. If /// the /// <see cref="MergeImpl()">MergeImpl()</see> /// returns without throwing an exception the lock /// will be released. In case of exceptions the caller is responsible to /// release the lock. /// </remarks> /// <param name="dc">the DirCache to set</param> public virtual void SetDirCache(DirCache dc) { this.dircache = dc; }
private void bnCreate_Click(object sender, System.EventArgs e) { this.txtEmailText.Text = "Working... This may take a while."; this.txtEmailText.Update(); string txt = ""; txt += "From: " + this.txtName.Text + " <" + this.txtEmail.Text + ">" + "\r\n"; txt += "Subject: TVRename bug report" + "\r\n"; txt += "\r\n"; txt += "TVRename version: " + Version.DisplayVersionString() + "\r\n"; txt += "UserAppDataPath is " + System.Windows.Forms.Application.UserAppDataPath + "\r\n"; txt += "EpGuidePath is " + UI.EpGuidePath() + "\r\n"; txt += "\r\n"; txt += "==== Brief Description ====" + "\r\n"; txt += this.txtDesc1.Text + "\r\n"; txt += "\r\n"; txt += "==== Description ====" + "\r\n"; txt += this.txtDesc2.Text + "\r\n"; txt += "\r\n"; txt += "==== Frequency ====" + "\r\n"; txt += this.txtFreq.Text + "\r\n"; txt += "\r\n"; txt += "==== Notes and Comments ====" + "\r\n"; txt += this.txtComments.Text + "\r\n"; txt += "\r\n"; if (this.cbSettings.Checked) { txt += "==== Settings Files ====" + "\r\n"; txt += "\r\n"; txt += "---- TVRenameSettings.xml" + "\r\n"; txt += "\r\n"; try { StreamReader sr = new StreamReader(PathManager.TVDocSettingsFile.FullName); txt += sr.ReadToEnd(); sr.Close(); txt += "\r\n"; } catch { txt += "Error reading TVRenameSettings.xml\r\n"; } txt += "\r\n"; } if (this.cbFOScan.Checked || this.cbFolderScan.Checked) { txt += "==== Filename processors ====\r\n"; foreach (FilenameProcessorRE s in this.mDoc.Settings.FNPRegexs) txt += (s.Enabled ? "Enabled" : "Disabled") + " \"" + s.RE + "\" " + (s.UseFullPath ? "(FullPath)" : "") + "\r\n"; txt += "\r\n"; } if (this.cbFOScan.Checked) { txt += "==== Finding & Organising Directory Scan ====" + "\r\n"; txt += "\r\n"; DirCache dirC = new DirCache(); foreach (string efi in this.mDoc.SearchFolders) dirC.AddFolder(null, 0, 0, efi, true, this.mDoc.Settings); foreach (DirCacheEntry fi in dirC) { int seas; int ep; bool r = this.mDoc.FindSeasEp(fi.TheFile, out seas, out ep, null); bool useful = fi.HasUsefulExtension_NotOthersToo; txt += fi.TheFile.FullName + " (" + (r ? "OK" : "No") + " " + seas + "," + ep + " " + (useful ? fi.TheFile.Extension : "-") + ")" + "\r\n"; } txt += "\r\n"; } if (this.cbFolderScan.Checked) { txt += "==== Media Folders Directory Scan ====" + "\r\n"; foreach (ShowItem si in this.mDoc.GetShowItems(true)) { foreach (System.Collections.Generic.KeyValuePair<int, List<ProcessedEpisode>> kvp in si.SeasonEpisodes) { int snum = kvp.Key; if (((snum == 0) && (si.CountSpecials)) || !si.AllFolderLocations(this.mDoc.Settings).ContainsKey(snum)) continue; // skip specials foreach (string folder in si.AllFolderLocations(this.mDoc.Settings)[snum]) { txt += si.TVDBCode + " : " + si.ShowName + " : S" + snum + "\r\n"; txt += "Folder: " + folder; txt += "\r\n"; DirCache files = new DirCache(); if (Directory.Exists(folder)) files.AddFolder(null, 0, 0, folder, true, this.mDoc.Settings); foreach (DirCacheEntry fi in files) { int seas; int ep; bool r = this.mDoc.FindSeasEp(fi.TheFile, out seas, out ep, si); bool useful = fi.HasUsefulExtension_NotOthersToo; txt += fi.TheFile.FullName + " (" + (r ? "OK" : "No") + " " + seas + "," + ep + " " + (useful ? fi.TheFile.Extension : "-") + ")" + "\r\n"; } txt += "\r\n"; } } txt += "\r\n"; } this.mDoc.UnlockShowItems(); txt += "\r\n"; } this.txtEmailText.Text = txt; }
/// <summary>Create a new iterator for an already loaded DirCache instance.</summary> /// <remarks> /// Create a new iterator for an already loaded DirCache instance. /// <p> /// The iterator implementation may copy part of the cache's data during /// construction, so the cache must be read in prior to creating the /// iterator. /// </remarks> /// <param name="dc">the cache to walk. It must be already loaded into memory.</param> public DirCacheIterator(DirCache dc) { cache = dc; tree = dc.GetCacheTree(true); treeStart = 0; treeEnd = tree.GetEntrySpan(); subtreeId = new byte[Constants.OBJECT_ID_LENGTH]; if (!Eof) { ParseEntry(); } }
/// <exception cref="System.IO.IOException"></exception> protected internal override bool MergeImpl() { bool implicitDirCache = false; if (dircache == null) { dircache = GetRepository().LockDirCache(); implicitDirCache = true; } try { builder = dircache.Builder(); DirCacheBuildIterator buildIt = new DirCacheBuildIterator(builder); tw = new NameConflictTreeWalk(db); tw.AddTree(MergeBase()); tw.AddTree(sourceTrees[0]); tw.AddTree(sourceTrees[1]); tw.AddTree(buildIt); if (workingTreeIterator != null) { tw.AddTree(workingTreeIterator); } while (tw.Next()) { if (!ProcessEntry(tw.GetTree<CanonicalTreeParser>(T_BASE), tw.GetTree<CanonicalTreeParser >(T_OURS), tw.GetTree<CanonicalTreeParser>(T_THEIRS), tw.GetTree<DirCacheBuildIterator >(T_INDEX), (workingTreeIterator == null) ? null : tw.GetTree<WorkingTreeIterator >(T_FILE))) { CleanUp(); return false; } if (tw.IsSubtree && enterSubtree) { tw.EnterSubtree(); } } if (!inCore) { // All content-merges are successfully done. If we can now write the // new index we are on quite safe ground. Even if the checkout of // files coming from "theirs" fails the user can work around such // failures by checking out the index again. if (!builder.Commit()) { CleanUp(); throw new IndexWriteException(); } builder = null; // No problem found. The only thing left to be done is to checkout // all files from "theirs" which have been selected to go into the // new index. Checkout(); } else { builder.Finish(); builder = null; } if (GetUnmergedPaths().IsEmpty()) { resultTree = dircache.WriteTree(oi); return true; } else { resultTree = null; return false; } } finally { if (implicitDirCache) { dircache.Unlock(); } } }
/// <summary> /// Constructs a DirCacheCeckout for merging and checking out two trees (HEAD /// and mergeCommitTree) and the index. /// </summary> /// <remarks> /// Constructs a DirCacheCeckout for merging and checking out two trees (HEAD /// and mergeCommitTree) and the index. /// </remarks> /// <param name="repo">the repository in which we do the checkout</param> /// <param name="headCommitTree">the id of the tree of the head commit</param> /// <param name="dc">the (already locked) Dircache for this repo</param> /// <param name="mergeCommitTree">the id of the tree we want to fast-forward to</param> /// <param name="workingTree">an iterator over the repositories Working Tree</param> /// <exception cref="System.IO.IOException">System.IO.IOException</exception> public DirCacheCheckout(Repository repo, ObjectId headCommitTree, DirCache dc, ObjectId mergeCommitTree, WorkingTreeIterator workingTree) { this.repo = repo; this.dc = dc; this.headCommitTree = headCommitTree; this.mergeCommitTree = mergeCommitTree; this.workingTree = workingTree; }
/// <summary>Construct a new builder.</summary> /// <remarks>Construct a new builder.</remarks> /// <param name="dc">the cache this builder will eventually update.</param> /// <param name="ecnt"> /// estimated number of entries the builder will have upon /// completion. This sizes the initial entry table. /// </param> protected internal DirCacheBuilder(DirCache dc, int ecnt) : base(dc, ecnt) { }
/// <summary> /// Constructs a DirCacheCeckout for checking out one tree, merging with the /// index. /// </summary> /// <remarks> /// Constructs a DirCacheCeckout for checking out one tree, merging with the /// index. /// </remarks> /// <param name="repo">the repository in which we do the checkout</param> /// <param name="dc">the (already locked) Dircache for this repo</param> /// <param name="mergeCommitTree">the id of the tree we want to fast-forward to</param> /// <param name="workingTree">an iterator over the repositories Working Tree</param> /// <exception cref="System.IO.IOException">System.IO.IOException</exception> public DirCacheCheckout(Repository repo, DirCache dc, ObjectId mergeCommitTree, WorkingTreeIterator workingTree) : this(repo, null, dc, mergeCommitTree, workingTree) { }