protected internal override FilePath DiscoverGitPrefix() { string path = SystemReader.GetInstance().Getenv("PATH"); FilePath gitExe = SearchPath(path, "git"); if (gitExe != null) { return gitExe.GetParentFile().GetParentFile(); } if (SystemReader.GetInstance().IsMacOS()) { // On MacOSX, PATH is shorter when Eclipse is launched from the // Finder than from a terminal. Therefore try to launch bash as a // login shell and search using that. // string w = ReadPipe(UserHome(), new string[] { "bash", "--login", "-c", "which git" }, Encoding.Default.Name()); // // if (w == null || w.Length == 0) { return null; } FilePath parentFile = new FilePath(w).GetParentFile(); if (parentFile == null) { return null; } return parentFile.GetParentFile(); } return null; }
/// <summary>Create a new lock for a pack file.</summary> /// <remarks>Create a new lock for a pack file.</remarks> /// <param name="packFile">location of the <code>pack-*.pack</code> file.</param> /// <param name="fs">the filesystem abstraction used by the repository.</param> public PackLock(FilePath packFile, FS fs) { FilePath p = packFile.GetParentFile(); string n = packFile.GetName(); keepFile = new FilePath(p, Sharpen.Runtime.Substring(n, 0, n.Length - 5) + ".keep" ); this.fs = fs; }
public override void SetUp() { base.SetUp(); home = new FilePath(trash, "home"); FileUtils.Mkdir(home); configFile = new FilePath(new FilePath(home, ".ssh"), Constants.CONFIG); FileUtils.Mkdir(configFile.GetParentFile()); Runtime.SetProperty("user.name", "jex_junit"); osc = new OpenSshConfig(home, configFile); }
/// <exception cref="System.IO.IOException"></exception> private FileObjectDatabase.AlternateHandle OpenAlternate(FilePath objdir) { FilePath parent = objdir.GetParentFile(); if (RepositoryCache.FileKey.IsGitRepository(parent, fs)) { RepositoryCache.FileKey key = RepositoryCache.FileKey.Exact(parent, fs); FileRepository db = (FileRepository)RepositoryCache.Open(key); return new FileObjectDatabase.AlternateRepository(db); } NGit.Storage.File.ObjectDirectory db_1 = new NGit.Storage.File.ObjectDirectory(config , objdir, null, fs); return new FileObjectDatabase.AlternateHandle(db_1); }
public override void Add(HostKey hostkey, UserInfo userinfo) { int type = hostkey.type; string host = hostkey.GetHost(); byte[] key = hostkey.key; HostKey hk = null; lock (pool) { for (int i = 0; i < pool.Count; i++) { hk = (HostKey)(pool[i]); if (hk.IsMatched(host) && hk.type == type) { } } } hk = hostkey; pool.Add(hk); string bar = GetKnownHostsRepositoryID(); if (bar != null) { bool foo = true; FilePath goo = new FilePath(bar); if (!goo.Exists()) { foo = false; if (userinfo != null) { foo = userinfo.PromptYesNo(bar + " does not exist.\n" + "Are you sure you want to create it?" ); goo = goo.GetParentFile(); if (foo && goo != null && !goo.Exists()) { foo = userinfo.PromptYesNo("The parent directory " + goo + " does not exist.\n" + "Are you sure you want to create it?"); if (foo) { if (!goo.Mkdirs()) { userinfo.ShowMessage(goo + " has not been created."); foo = false; } else { userinfo.ShowMessage(goo + " has been succesfully created.\nPlease check its access permission." ); } } } if (goo == null) { foo = false; } } } if (foo) { try { Sync(bar); } catch (Exception e) { System.Console.Error.WriteLine("sync known_hosts: " + e); } } } }
/// <exception cref="System.IO.FileNotFoundException"></exception> /// <exception cref="System.IO.IOException"></exception> private void SetupReflog(string logName, byte[] data) { FilePath logfile = new FilePath(db.Directory, logName); if (!logfile.GetParentFile().Mkdirs() && !logfile.GetParentFile().IsDirectory()) { throw new IOException("oops, cannot create the directory for the test reflog file" + logfile); } FileOutputStream fileOutputStream = new FileOutputStream(logfile); try { fileOutputStream.Write(data); } finally { fileOutputStream.Close(); } }
public virtual void Test000_openrepo_alternate_index_file_and_objdirs() { FilePath repo1Parent = new FilePath(trash.GetParentFile(), "r1"); FilePath indexFile = new FilePath(trash, "idx"); FilePath objDir = new FilePath(trash, "../obj"); FilePath altObjDir = ((ObjectDirectory)db.ObjectDatabase).GetDirectory(); Repository repo1initial = new FileRepository(new FilePath(repo1Parent, Constants. DOT_GIT)); repo1initial.Create(); repo1initial.Close(); FilePath theDir = new FilePath(repo1Parent, Constants.DOT_GIT); FileRepository r = new FileRepositoryBuilder().SetGitDir(theDir).SetObjectDirectory (objDir).AddAlternateObjectDirectory(altObjDir).SetIndexFile(indexFile).Build(); // // // // AssertEqualsPath(theDir, r.Directory); AssertEqualsPath(theDir.GetParentFile(), r.WorkTree); AssertEqualsPath(indexFile, r.GetIndexFile()); AssertEqualsPath(objDir, ((ObjectDirectory)r.ObjectDatabase).GetDirectory()); NUnit.Framework.Assert.IsNotNull(r.Open(ObjectId.FromString("6db9c2ebf75590eef973081736730a9ea169a0c4" ))); // Must close or the default repo pack files created by this test gets // locked via the alternate object directories on Windows. r.Close(); }
public virtual void RepositoryWithRootLevelSubmoduleRelativeRef() { ObjectId id = ObjectId.FromString("abcd1234abcd1234abcd1234abcd1234abcd1234"); string path = "sub"; FilePath dotGit = new FilePath(db.WorkTree, path + FilePath.separatorChar + Constants .DOT_GIT); if (!dotGit.GetParentFile().Exists()) { dotGit.GetParentFile().Mkdirs(); } FilePath modulesGitDir = new FilePath(db.Directory, "modules" + FilePath.separatorChar + path); new FileWriter(dotGit).Append("gitdir: " + "../" + Constants.DOT_GIT + "/modules/" + path).Close(); FileRepositoryBuilder builder = new FileRepositoryBuilder(); builder.SetWorkTree(new FilePath(db.WorkTree, path)); builder.Build().Create(); DirCache cache = db.LockDirCache(); DirCacheEditor editor = cache.Editor(); editor.Add(new _PathEdit_203(id, path)); editor.Commit(); SubmoduleWalk gen = SubmoduleWalk.ForIndex(db); NUnit.Framework.Assert.IsTrue(gen.Next()); NUnit.Framework.Assert.AreEqual(path, gen.GetPath()); NUnit.Framework.Assert.AreEqual(id, gen.GetObjectId()); NUnit.Framework.Assert.AreEqual(new FilePath(db.WorkTree, path), gen.GetDirectory ()); NUnit.Framework.Assert.IsNull(gen.GetConfigUpdate()); NUnit.Framework.Assert.IsNull(gen.GetConfigUrl()); NUnit.Framework.Assert.IsNull(gen.GetModulesPath()); NUnit.Framework.Assert.IsNull(gen.GetModulesUpdate()); NUnit.Framework.Assert.IsNull(gen.GetModulesUrl()); Repository subRepo = gen.GetRepository(); AddRepoToClose(subRepo); NUnit.Framework.Assert.IsNotNull(subRepo); NUnit.Framework.Assert.AreEqual(modulesGitDir, subRepo.Directory); NUnit.Framework.Assert.AreEqual(new FilePath(db.WorkTree, path), subRepo.WorkTree ); NUnit.Framework.Assert.IsFalse(gen.Next()); }
/// <summary>Write a string as a UTF-8 file.</summary> /// <remarks>Write a string as a UTF-8 file.</remarks> /// <param name="f"> /// file to write the string to. Caller is responsible for making /// sure it is in the trash directory or will otherwise be cleaned /// up at the end of the test. If the parent directory does not /// exist, the missing parent directories are automatically /// created. /// </param> /// <param name="body">content to write to the file.</param> /// <exception cref="System.IO.IOException">the file could not be written.</exception> public static void Write(FilePath f, string body) { FileUtils.Mkdirs(f.GetParentFile(), true); TextWriter w = new OutputStreamWriter(new FileOutputStream(f), "UTF-8"); try { w.Write(body); } finally { w.Close(); } }
/// <exception cref="NGit.Errors.CorruptObjectException"></exception> /// <exception cref="System.IO.IOException"></exception> /// <exception cref="NGit.Errors.MissingObjectException"></exception> /// <exception cref="NGit.Errors.IncorrectObjectTypeException"></exception> /// <exception cref="NGit.Errors.CheckoutConflictException"></exception> /// <exception cref="NGit.Errors.IndexWriteException"></exception> private bool DoCheckout() { toBeDeleted.Clear(); ObjectReader objectReader = repo.ObjectDatabase.NewReader(); try { if (headCommitTree != null) { PreScanTwoTrees(); } else { PrescanOneTree(); } if (!conflicts.IsEmpty()) { if (failOnConflict) { throw new NGit.Errors.CheckoutConflictException(Sharpen.Collections.ToArray(conflicts , new string[conflicts.Count])); } else { CleanUpConflicts(); } } // update our index builder.Finish(); FilePath file = null; string last = string.Empty; // when deleting files process them in the opposite order as they have // been reported. This ensures the files are deleted before we delete // their parent folders for (int i = removed.Count - 1; i >= 0; i--) { string r = removed[i]; file = new FilePath(repo.WorkTree, r); if (!file.Delete() && file.Exists()) { // The list of stuff to delete comes from the index // which will only contain a directory if it is // a submodule, in which case we shall not attempt // to delete it. A submodule is not empty, so it // is safe to check this after a failed delete. if (!file.IsDirectory()) { toBeDeleted.AddItem(r); } } else { if (!IsSamePrefix(r, last)) { RemoveEmptyParents(new FilePath(repo.WorkTree, last)); } last = r; } } if (file != null) { RemoveEmptyParents(file); } foreach (string path in updated.Keys) { // ... create/overwrite this file ... file = new FilePath(repo.WorkTree, path); if (!file.GetParentFile().Mkdirs()) { } // ignore DirCacheEntry entry = dc.GetEntry(path); // submodules are handled with separate operations if (FileMode.GITLINK.Equals(entry.RawMode)) { continue; } CheckoutEntry(repo, file, entry, objectReader); } // commit the index builder - a new index is persisted if (!builder.Commit()) { throw new IndexWriteException(); } } finally { objectReader.Release(); } return toBeDeleted.Count == 0; }
/// <summary>Create a new lock for any file.</summary> /// <remarks>Create a new lock for any file.</remarks> /// <param name="f">the file that will be locked.</param> /// <param name="fs"> /// the file system abstraction which will be necessary to perform /// certain file system operations. /// </param> public LockFile(FilePath f, FS fs) { @ref = f; lck = new FilePath(@ref.GetParentFile(), @ref.GetName() + SUFFIX); this.fs = fs; }
/// <summary>Execute this checkout</summary> /// <returns> /// <code>false</code> if this method could not delete all the files /// which should be deleted (e.g. because of of the files was /// locked). In this case /// <see cref="GetToBeDeleted()">GetToBeDeleted()</see> /// lists the files /// which should be tried to be deleted outside of this method. /// Although <code>false</code> is returned the checkout was /// successful and the working tree was updated for all other files. /// <code>true</code> is returned when no such problem occurred /// </returns> /// <exception cref="System.IO.IOException">System.IO.IOException</exception> public virtual bool Checkout() { toBeDeleted.Clear(); if (headCommitTree != null) { PreScanTwoTrees(); } else { PrescanOneTree(); } if (!conflicts.IsEmpty()) { if (failOnConflict) { dc.Unlock(); throw new CheckoutConflictException(Sharpen.Collections.ToArray(conflicts, new string [conflicts.Count])); } else { CleanUpConflicts(); } } // update our index builder.Finish(); FilePath file = null; string last = string.Empty; // when deleting files process them in the opposite order as they have // been reported. This ensures the files are deleted before we delete // their parent folders for (int i = removed.Count - 1; i >= 0; i--) { string r = removed[i]; file = new FilePath(repo.WorkTree, r); if (!file.Delete() && file.Exists()) { toBeDeleted.AddItem(r); } else { if (!IsSamePrefix(r, last)) { RemoveEmptyParents(file); } last = r; } } if (file != null) { RemoveEmptyParents(file); } foreach (string path in updated.Keys) { // ... create/overwrite this file ... file = new FilePath(repo.WorkTree, path); file.GetParentFile().Mkdirs(); file.CreateNewFile(); DirCacheEntry entry = dc.GetEntry(path); CheckoutEntry(repo, file, entry); } // commit the index builder - a new index is persisted if (!builder.Commit()) { dc.Unlock(); throw new IndexWriteException(); } return toBeDeleted.Count == 0; }
private static bool Rename(FilePath src, FilePath dst) { if (src.RenameTo(dst)) { return true; } FilePath dir = dst.GetParentFile(); if ((dir.Exists() || !dir.Mkdirs()) && !dir.IsDirectory()) { return false; } return src.RenameTo(dst); }
/// <summary>Create a new pack indexer utility.</summary> /// <remarks>Create a new pack indexer utility.</remarks> /// <param name="db"></param> /// <param name="src"> /// stream to read the pack data from. If the stream is buffered /// use /// <see cref="BUFFER_SIZE">BUFFER_SIZE</see> /// as the buffer size for the stream. /// </param> /// <param name="dstBase"></param> /// <exception cref="System.IO.IOException">the output packfile could not be created. /// </exception> public IndexPack(Repository db, InputStream src, FilePath dstBase) { repo = db; objectDatabase = db.ObjectDatabase.NewCachedDatabase(); @in = src; inflater = new IndexPack.InflaterStream(this); readCurs = objectDatabase.NewReader(); buf = new byte[BUFFER_SIZE]; readBuffer = new byte[BUFFER_SIZE]; objectDigest = Constants.NewMessageDigest(); tempObjectId = new MutableObjectId(); packDigest = Constants.NewMessageDigest(); if (dstBase != null) { FilePath dir = dstBase.GetParentFile(); string nam = dstBase.GetName(); dstPack = new FilePath(dir, nam + ".pack"); dstIdx = new FilePath(dir, nam + ".idx"); packOut = new RandomAccessFile(dstPack, "rw"); packOut.SetLength(0); } else { dstPack = null; dstIdx = null; } }
private void RemoveEmptyParents(FilePath f) { FilePath parentFile = f.GetParentFile(); while (!parentFile.Equals(root)) { if (parentFile.List().Length == 0) { parentFile.Delete(); } else { break; } parentFile = parentFile.GetParentFile(); } }
private void CheckConflictsWithFile(FilePath file) { if (file.IsDirectory()) { AList<string> childFiles = ListFiles(file); Sharpen.Collections.AddAll(conflicts, childFiles); } else { FilePath parent = file.GetParentFile(); while (!parent.Equals(root)) { if (parent.IsDirectory()) { break; } if (parent.IsFile()) { conflicts.AddItem(Repository.StripWorkDir(root, parent)); break; } parent = parent.GetParentFile(); } } }
private void RemoveEmptyParents(FilePath f) { FilePath parentFile = f.GetParentFile(); while (!parentFile.Equals(repo.WorkTree)) { if (!parentFile.Delete()) { break; } parentFile = parentFile.GetParentFile(); } }
/// <summary>Guess the proper path for a Git repository.</summary> /// <remarks> /// Guess the proper path for a Git repository. /// <p> /// The method performs some basic guessing to locate the repository. /// Searched paths are: /// <ol> /// <li> /// <code>directory</code> /// // assume exact match</li> /// <li> /// <code>directory</code> /// + "/.git" // assume working directory</li> /// <li> /// <code>directory</code> /// + ".git" // assume bare</li> /// </ol> /// </remarks> /// <param name="directory">location to guess from. Several permutations are tried.</param> /// <param name="fs"> /// the file system abstraction which will be necessary to /// perform certain file system operations. /// </param> /// <returns> /// the actual directory location if a better match is found; /// null if there is no suitable match. /// </returns> public static FilePath Resolve(FilePath directory, FS fs) { if (IsGitRepository(directory, fs)) { return directory; } if (IsGitRepository(new FilePath(directory, Constants.DOT_GIT), fs)) { return new FilePath(directory, Constants.DOT_GIT); } string name = directory.GetName(); FilePath parent = directory.GetParentFile(); if (IsGitRepository(new FilePath(parent, name + Constants.DOT_GIT_EXT), fs)) { return new FilePath(parent, name + Constants.DOT_GIT_EXT); } return null; }
/// <summary> /// Updates the file in the working tree with content and mode from an entry /// in the index. /// </summary> /// <remarks> /// Updates the file in the working tree with content and mode from an entry /// in the index. The new content is first written to a new temporary file in /// the same directory as the real file. Then that new file is renamed to the /// final filename. /// <p> /// TODO: this method works directly on File IO, we may need another /// abstraction (like WorkingTreeIterator). This way we could tell e.g. /// Eclipse that Files in the workspace got changed /// </p> /// </remarks> /// <param name="repo"></param> /// <param name="f"> /// the file to be modified. The parent directory for this file /// has to exist already /// </param> /// <param name="entry">the entry containing new mode and content</param> /// <param name="or">object reader to use for checkout</param> /// <exception cref="System.IO.IOException">System.IO.IOException</exception> public static void CheckoutEntry(Repository repo, FilePath f, DirCacheEntry entry , ObjectReader or) { ObjectLoader ol = or.Open(entry.GetObjectId()); FilePath parentDir = f.GetParentFile(); FilePath tmpFile = FilePath.CreateTempFile("._" + f.GetName(), null, parentDir); WorkingTreeOptions opt = repo.GetConfig().Get(WorkingTreeOptions.KEY); FileOutputStream rawChannel = new FileOutputStream(tmpFile); OutputStream channel; if (opt.GetAutoCRLF() == CoreConfig.AutoCRLF.TRUE) { channel = new AutoCRLFOutputStream(rawChannel); } else { channel = rawChannel; } try { ol.CopyTo(channel); } finally { channel.Close(); } FS fs = repo.FileSystem; if (opt.IsFileMode() && fs.SupportsExecute()) { if (FileMode.EXECUTABLE_FILE.Equals(entry.RawMode)) { if (!fs.CanExecute(tmpFile)) { fs.SetExecute(tmpFile, true); } } else { if (fs.CanExecute(tmpFile)) { fs.SetExecute(tmpFile, false); } } } if (!tmpFile.RenameTo(f)) { // tried to rename which failed. Let' delete the target file and try // again FileUtils.Delete(f); if (!tmpFile.RenameTo(f)) { throw new IOException(MessageFormat.Format(JGitText.Get().couldNotWriteFile, tmpFile .GetPath(), f.GetPath())); } } entry.LastModified = f.LastModified(); if (opt.GetAutoCRLF() != CoreConfig.AutoCRLF.FALSE) { entry.SetLength(f.Length()); } else { // AutoCRLF wants on-disk-size entry.SetLength((int)ol.GetSize()); } }
public virtual void Test000_openrepo_default_gitDirAndWorkTreeSet() { FilePath repo1Parent = new FilePath(trash.GetParentFile(), "r1"); Repository repo1initial = new FileRepository(new FilePath(repo1Parent, Constants. DOT_GIT)); repo1initial.Create(); repo1initial.Close(); FilePath theDir = new FilePath(repo1Parent, Constants.DOT_GIT); FileRepository r = new FileRepositoryBuilder().SetGitDir(theDir).SetWorkTree(repo1Parent .GetParentFile()).Build(); AssertEqualsPath(theDir, r.Directory); AssertEqualsPath(repo1Parent.GetParentFile(), r.WorkTree); AssertEqualsPath(new FilePath(theDir, "index"), r.GetIndexFile()); AssertEqualsPath(new FilePath(theDir, "objects"), ((ObjectDirectory)r.ObjectDatabase ).GetDirectory()); }
public virtual void TestDeleteRecursive() { FilePath f1 = new FilePath(trash, "test/test/a"); FileUtils.Mkdirs(f1.GetParentFile()); FileUtils.CreateNewFile(f1); FilePath f2 = new FilePath(trash, "test/test/b"); FileUtils.CreateNewFile(f2); FilePath d = new FilePath(trash, "test"); FileUtils.Delete(d, FileUtils.RECURSIVE); NUnit.Framework.Assert.IsFalse(d.Exists()); try { FileUtils.Delete(d, FileUtils.RECURSIVE); NUnit.Framework.Assert.Fail("recursive deletion of non-existing directory must fail" ); } catch (IOException) { } // expected try { FileUtils.Delete(d, FileUtils.RECURSIVE | FileUtils.SKIP_MISSING); } catch (IOException) { NUnit.Framework.Assert.Fail("recursive deletion of non-existing directory must not fail with option SKIP_MISSING" ); } }
/// <summary>Get the lock file corresponding to the given file.</summary> /// <remarks>Get the lock file corresponding to the given file.</remarks> /// <param name="file"></param> /// <returns>lock file</returns> internal static FilePath GetLockFile(FilePath file) { return new FilePath(file.GetParentFile(), file.GetName() + SUFFIX); }
/// <exception cref="NGit.Errors.NoWorkTreeException"></exception> /// <exception cref="System.IO.IOException"></exception> private void Checkout() { ObjectReader r = db.ObjectDatabase.NewReader(); try { foreach (KeyValuePair<string, DirCacheEntry> entry in toBeCheckedOut.EntrySet()) { FilePath f = new FilePath(db.WorkTree, entry.Key); CreateDir(f.GetParentFile()); DirCacheCheckout.CheckoutEntry(db, f, entry.Value, r); modifiedFiles.AddItem(entry.Key); } // Iterate in reverse so that "folder/file" is deleted before // "folder". Otherwise this could result in a failing path because // of a non-empty directory, for which delete() would fail. for (int i = toBeDeleted.Count - 1; i >= 0; i--) { string fileName = toBeDeleted[i]; FilePath f = new FilePath(db.WorkTree, fileName); if (!f.Delete()) { failingPaths.Put(fileName, ResolveMerger.MergeFailureReason.COULD_NOT_DELETE); } modifiedFiles.AddItem(fileName); } } finally { r.Release(); } }
/// <exception cref="NGit.Errors.NoWorkTreeException"></exception> /// <exception cref="System.IO.IOException"></exception> private void Checkout() { foreach (KeyValuePair<string, DirCacheEntry> entry in toBeCheckedOut.EntrySet()) { FilePath f = new FilePath(db.WorkTree, entry.Key); if (entry.Value != null) { CreateDir(f.GetParentFile()); DirCacheCheckout.CheckoutEntry(db, f, entry.Value); } else { if (!f.Delete()) { failingPaths.Put(entry.Key, ResolveMerger.MergeFailureReason.COULD_NOT_DELETE); } } modifiedFiles.AddItem(entry.Key); } }
/// <summary>Writes merged file content to the working tree.</summary> /// <remarks> /// Writes merged file content to the working tree. In case /// <see cref="inCore">inCore</see> /// is set and we don't have a working tree the content is written to a /// temporary file /// </remarks> /// <param name="result">the result of the content merge</param> /// <returns>the file to which the merged content was written</returns> /// <exception cref="System.IO.FileNotFoundException">System.IO.FileNotFoundException /// </exception> /// <exception cref="System.IO.IOException">System.IO.IOException</exception> private FilePath WriteMergedFile(MergeResult<RawText> result) { MergeFormatter fmt = new MergeFormatter(); FilePath of = null; FileOutputStream fos; if (!inCore) { FilePath workTree = db.WorkTree; if (workTree == null) { // TODO: This should be handled by WorkingTreeIterators which // support write operations throw new NGit.Errors.NotSupportedException(); } of = new FilePath(workTree, tw.PathString); FilePath parentFolder = of.GetParentFile(); if (!parentFolder.Exists()) { parentFolder.Mkdirs(); } fos = new FileOutputStream(of); try { fmt.FormatMerge(fos, result, Arrays.AsList(commitNames), Constants.CHARACTER_ENCODING ); } finally { fos.Close(); } } else { if (!result.ContainsConflicts()) { // When working inCore, only trivial merges can be handled, // so we generate objects only in conflict free cases of = FilePath.CreateTempFile("merge_", "_temp", null); fos = new FileOutputStream(of); try { fmt.FormatMerge(fos, result, Arrays.AsList(commitNames), Constants.CHARACTER_ENCODING ); } finally { fos.Close(); } } } return of; }