IsDirectory() public method

public IsDirectory ( ) : bool
return bool
Beispiel #1
0
		public virtual void Test001_Initalize()
		{
			FilePath gitdir = new FilePath(trash, Constants.DOT_GIT);
			FilePath hooks = new FilePath(gitdir, "hooks");
			FilePath objects = new FilePath(gitdir, "objects");
			FilePath objects_pack = new FilePath(objects, "pack");
			FilePath objects_info = new FilePath(objects, "info");
			FilePath refs = new FilePath(gitdir, "refs");
			FilePath refs_heads = new FilePath(refs, "heads");
			FilePath refs_tags = new FilePath(refs, "tags");
			FilePath HEAD = new FilePath(gitdir, "HEAD");
			NUnit.Framework.Assert.IsTrue(trash.IsDirectory(), "Exists " + trash);
			NUnit.Framework.Assert.IsTrue(hooks.IsDirectory(), "Exists " + hooks);
			NUnit.Framework.Assert.IsTrue(objects.IsDirectory(), "Exists " + objects);
			NUnit.Framework.Assert.IsTrue(objects_pack.IsDirectory(), "Exists " + objects_pack
				);
			NUnit.Framework.Assert.IsTrue(objects_info.IsDirectory(), "Exists " + objects_info
				);
			NUnit.Framework.Assert.AreEqual(2L, objects.ListFiles().Length);
			NUnit.Framework.Assert.IsTrue(refs.IsDirectory(), "Exists " + refs);
			NUnit.Framework.Assert.IsTrue(refs_heads.IsDirectory(), "Exists " + refs_heads);
			NUnit.Framework.Assert.IsTrue(refs_tags.IsDirectory(), "Exists " + refs_tags);
			NUnit.Framework.Assert.IsTrue(HEAD.IsFile(), "Exists " + HEAD);
			NUnit.Framework.Assert.AreEqual(23, HEAD.Length());
		}
 public BlobStore(string path)
 {
     this.path = path;
     FilePath directory = new FilePath(path);
     directory.Mkdirs();
     if (!directory.IsDirectory()) {
         throw new InvalidOperationException(string.Format("Unable to create directory for: {0}", directory));
     }
 }
Beispiel #3
0
		public static FilePath[] RecursiveListFiles(FilePath dir, FileFilter filter)
		{
			if (!dir.IsDirectory())
			{
				throw new ArgumentException(dir + " is not a directory");
			}
			IList<FilePath> fileList = new List<FilePath>();
			RecursiveListFilesHelper(dir, filter, fileList);
			return Sharpen.Collections.ToArray(fileList, new FilePath[fileList.Count]);
		}
		public static bool DeleteRecursive(FilePath fileOrDirectory)
		{
			if (fileOrDirectory.IsDirectory())
			{
				foreach (FilePath child in fileOrDirectory.ListFiles())
				{
					DeleteRecursive(child);
				}
			}
			bool result = fileOrDirectory.Delete() || !fileOrDirectory.Exists();
			return result;
		}
Beispiel #5
0
			public override bool Accept(FilePath f)
			{
				if (f.IsDirectory())
				{
					return true;
				}
				string name = f.GetName();
				int i = name.LastIndexOf('.');
				if (i > 0 && i < name.Length - 1)
				{
					string ext = Sharpen.Runtime.Substring(name, i + 1).ToLower();
					if (ext.Equals("js"))
					{
						return true;
					}
				}
				return false;
			}
        public BlobStore(string path, SymmetricKey encryptionKey)
        {
            if (path == null) {
                throw new ArgumentNullException("path");
            }

            _path = path;
            EncryptionKey = encryptionKey;
            FilePath directory = new FilePath(path);
            if (directory.Exists() && directory.IsDirectory()) {
                // Existing blob-store.
                VerifyExistingStore();
            } else {
                // New blob store; create directory:
                directory.Mkdirs();
                if (!directory.IsDirectory()) {
                    throw new InvalidOperationException(string.Format("Unable to create directory for: {0}", directory));
                }

                if (encryptionKey != null) {
                    MarkEncrypted(true);
                }
            }
        }
		/// <exception cref="System.IO.IOException"></exception>
		internal virtual void PrescanTwoTrees()
		{
			new IndexTreeWalker(index, head, merge, root, new _AbstractIndexTreeVisitor_267(this
				)).Walk();
			// if there's a conflict, don't list it under
			// to-be-removed, since that messed up our next
			// section
			removed.RemoveAll(conflicts);
			foreach (string path in updated.Keys)
			{
				if (index.GetEntry(path) == null)
				{
					FilePath file = new FilePath(root, path);
					if (file.IsFile())
					{
						conflicts.AddItem(path);
					}
					else
					{
						if (file.IsDirectory())
						{
							CheckConflictsWithFile(file);
						}
					}
				}
			}
			conflicts.RemoveAll(removed);
		}
 public FileInfo TempDir()
 {
     FilePath directory = new FilePath(path);
     FilePath tempDirectory = new FilePath(directory, "temp_attachments");
     tempDirectory.Mkdirs();
     if (!tempDirectory.IsDirectory())
     {
         throw new InvalidOperationException(string.Format("Unable to create directory for: {0}"
             , tempDirectory));
     }
     return tempDirectory;
 }
Beispiel #9
0
		/// <summary>Get submodule repository at path</summary>
		/// <param name="parent"></param>
		/// <param name="path"></param>
		/// <returns>repository or null if repository doesn't exist</returns>
		/// <exception cref="System.IO.IOException">System.IO.IOException</exception>
		public static Repository GetSubmoduleRepository(FilePath parent, string path)
		{
			FilePath subWorkTree = new FilePath(parent, path);
			if (!subWorkTree.IsDirectory())
			{
				return null;
			}
			FilePath workTree = new FilePath(parent, path);
			try
			{
				return new RepositoryBuilder().SetMustExist(true).SetFS(FS.DETECTED).SetWorkTree(
					workTree).Build();
			}
			catch (RepositoryNotFoundException)
			{
				//
				//
				//
				//
				return null;
			}
		}
Beispiel #10
0
 public Manager(Context context, ManagerOptions options)
 {
     Log.I(Database.Tag, "Starting Manager version: %s", Couchbase.Lite.Manager.Version
         );
     this.context = context;
     this.directoryFile = context.GetFilesDir();
     this.options = (options != null) ? options : DefaultOptions;
     this.databases = new Dictionary<string, Database>();
     this.replications = new AList<Replication>();
     directoryFile.Mkdirs();
     if (!directoryFile.IsDirectory())
     {
         throw new IOException(string.Format("Unable to create directory for: %s", directoryFile
             ));
     }
     UpgradeOldDatabaseFiles(directoryFile);
     workExecutor = Executors.NewSingleThreadScheduledExecutor();
 }
 public virtual void TestMergeNonVersionedPaths()
 {
     Git git = new Git(db);
     WriteTrashFile("a", "1\na\n3\n");
     WriteTrashFile("b", "1\nb\n3\n");
     WriteTrashFile("c/c/c", "1\nc\n3\n");
     git.Add().AddFilepattern("a").AddFilepattern("b").AddFilepattern("c/c/c").Call();
     RevCommit initialCommit = git.Commit().SetMessage("initial").Call();
     CreateBranch(initialCommit, "refs/heads/side");
     CheckoutBranch("refs/heads/side");
     WriteTrashFile("a", "1\na(side)\n3\n");
     WriteTrashFile("b", "1\nb(side)\n3\n");
     git.Add().AddFilepattern("a").AddFilepattern("b").Call();
     RevCommit secondCommit = git.Commit().SetMessage("side").Call();
     NUnit.Framework.Assert.AreEqual("1\nb(side)\n3\n", Read(new FilePath(db.WorkTree,
         "b")));
     CheckoutBranch("refs/heads/master");
     NUnit.Framework.Assert.AreEqual("1\nb\n3\n", Read(new FilePath(db.WorkTree, "b"))
         );
     WriteTrashFile("a", "1\na(main)\n3\n");
     WriteTrashFile("c/c/c", "1\nc(main)\n3\n");
     git.Add().AddFilepattern("a").AddFilepattern("c/c/c").Call();
     git.Commit().SetMessage("main").Call();
     WriteTrashFile("d", "1\nd\n3\n");
     NUnit.Framework.Assert.IsTrue(new FilePath(db.WorkTree, "e").Mkdir());
     MergeCommandResult result = git.Merge().Include(secondCommit.Id).SetStrategy(MergeStrategy
         .RESOLVE).Call();
     NUnit.Framework.Assert.AreEqual(MergeStatus.CONFLICTING, result.GetMergeStatus());
     NUnit.Framework.Assert.AreEqual("1\n<<<<<<< HEAD\na(main)\n=======\na(side)\n>>>>>>> 86503e7e397465588cc267b65d778538bffccb83\n3\n"
         , Read(new FilePath(db.WorkTree, "a")));
     NUnit.Framework.Assert.AreEqual("1\nb(side)\n3\n", Read(new FilePath(db.WorkTree,
         "b")));
     NUnit.Framework.Assert.AreEqual("1\nc(main)\n3\n", Read(new FilePath(db.WorkTree,
         "c/c/c")));
     NUnit.Framework.Assert.AreEqual("1\nd\n3\n", Read(new FilePath(db.WorkTree, "d"))
         );
     FilePath dir = new FilePath(db.WorkTree, "e");
     NUnit.Framework.Assert.IsTrue(dir.IsDirectory());
     NUnit.Framework.Assert.AreEqual(1, result.GetConflicts().Count);
     NUnit.Framework.Assert.AreEqual(3, result.GetConflicts().Get("a")[0].Length);
     NUnit.Framework.Assert.AreEqual(RepositoryState.MERGING, db.GetRepositoryState());
 }
Beispiel #12
0
 /// <exception cref="NGit.Errors.CorruptObjectException"></exception>
 /// <exception cref="System.IO.IOException"></exception>
 /// <exception cref="NGit.Errors.MissingObjectException"></exception>
 /// <exception cref="NGit.Errors.IncorrectObjectTypeException"></exception>
 /// <exception cref="NGit.Errors.CheckoutConflictException"></exception>
 /// <exception cref="NGit.Errors.IndexWriteException"></exception>
 private bool DoCheckout()
 {
     toBeDeleted.Clear();
     ObjectReader objectReader = repo.ObjectDatabase.NewReader();
     try
     {
         if (headCommitTree != null)
         {
             PreScanTwoTrees();
         }
         else
         {
             PrescanOneTree();
         }
         if (!conflicts.IsEmpty())
         {
             if (failOnConflict)
             {
                 throw new NGit.Errors.CheckoutConflictException(Sharpen.Collections.ToArray(conflicts
                     , new string[conflicts.Count]));
             }
             else
             {
                 CleanUpConflicts();
             }
         }
         // update our index
         builder.Finish();
         FilePath file = null;
         string last = string.Empty;
         // when deleting files process them in the opposite order as they have
         // been reported. This ensures the files are deleted before we delete
         // their parent folders
         for (int i = removed.Count - 1; i >= 0; i--)
         {
             string r = removed[i];
             file = new FilePath(repo.WorkTree, r);
             if (!file.Delete() && file.Exists())
             {
                 // The list of stuff to delete comes from the index
                 // which will only contain a directory if it is
                 // a submodule, in which case we shall not attempt
                 // to delete it. A submodule is not empty, so it
                 // is safe to check this after a failed delete.
                 if (!file.IsDirectory())
                 {
                     toBeDeleted.AddItem(r);
                 }
             }
             else
             {
                 if (!IsSamePrefix(r, last))
                 {
                     RemoveEmptyParents(new FilePath(repo.WorkTree, last));
                 }
                 last = r;
             }
         }
         if (file != null)
         {
             RemoveEmptyParents(file);
         }
         foreach (string path in updated.Keys)
         {
             // ... create/overwrite this file ...
             file = new FilePath(repo.WorkTree, path);
             if (!file.GetParentFile().Mkdirs())
             {
             }
             // ignore
             DirCacheEntry entry = dc.GetEntry(path);
             // submodules are handled with separate operations
             if (FileMode.GITLINK.Equals(entry.RawMode))
             {
                 continue;
             }
             CheckoutEntry(repo, file, entry, objectReader);
         }
         // commit the index builder - a new index is persisted
         if (!builder.Commit())
         {
             throw new IndexWriteException();
         }
     }
     finally
     {
         objectReader.Release();
     }
     return toBeDeleted.Count == 0;
 }
 // Oh well. They don't have a known hosts in home.
 // Oh well. They don't have a known hosts in home.
 private static void Identities(JSch sch, FS fs)
 {
     FilePath home = fs.UserHome();
     if (home == null)
     {
         return;
     }
     FilePath sshdir = new FilePath(home, ".ssh");
     if (sshdir.IsDirectory())
     {
         LoadIdentity(sch, new FilePath(sshdir, "identity"));
         LoadIdentity(sch, new FilePath(sshdir, "id_rsa"));
         LoadIdentity(sch, new FilePath(sshdir, "id_dsa"));
     }
 }
Beispiel #14
0
			public bool Accept(FilePath pathname)
			{
				return pathname.IsDirectory() && !pathname.GetName().Equals("CVS");
			}
Beispiel #15
0
 /// <summary>
 /// Creates the directory named by this abstract pathname, including any
 /// necessary but nonexistent parent directories.
 /// </summary>
 /// <remarks>
 /// Creates the directory named by this abstract pathname, including any
 /// necessary but nonexistent parent directories. Note that if this operation
 /// fails it may have succeeded in creating some of the necessary parent
 /// directories.
 /// </remarks>
 /// <param name="d">directory to be created</param>
 /// <param name="skipExisting">
 /// if
 /// <code>true</code>
 /// skip creation of the given directory if it
 /// already exists in the file system
 /// </param>
 /// <exception cref="System.IO.IOException">
 /// if creation of
 /// <code>d</code>
 /// fails. This may occur if
 /// <code>d</code>
 /// did exist when the method was called. This can therefore
 /// cause IOExceptions during race conditions when multiple
 /// concurrent threads all try to create the same directory.
 /// </exception>
 public static void Mkdirs(FilePath d, bool skipExisting)
 {
     if (!d.Mkdirs())
     {
         if (skipExisting && d.IsDirectory())
         {
             return;
         }
         throw new IOException(MessageFormat.Format(JGitText.Get().mkDirsFailed, d.GetAbsolutePath
             ()));
     }
 }
		private void CheckConflictsWithFile(FilePath file)
		{
			if (file.IsDirectory())
			{
				AList<string> childFiles = ListFiles(file);
				Sharpen.Collections.AddAll(conflicts, childFiles);
			}
			else
			{
				FilePath parent = file.GetParentFile();
				while (!parent.Equals(root))
				{
					if (parent.IsDirectory())
					{
						break;
					}
					if (parent.IsFile())
					{
						conflicts.AddItem(Repository.StripWorkDir(root, parent));
						break;
					}
					parent = parent.GetParentFile();
				}
			}
		}
Beispiel #17
0
			internal FileEntry(FilePath f, FS fs)
			{
				file = f;
				if (f.IsDirectory())
				{
					if (new FilePath(f, Constants.DOT_GIT).Exists())
					{
						mode = FileMode.GITLINK;
					}
					else
					{
						mode = FileMode.TREE;
					}
				}
				else
				{
					if (fs.CanExecute(file))
					{
						mode = FileMode.EXECUTABLE_FILE;
					}
					else
					{
						mode = FileMode.REGULAR_FILE;
					}
				}
			}
Beispiel #18
0
		public virtual void TestMkdir()
		{
			FilePath d = new FilePath(trash, "test");
			FileUtils.Mkdir(d);
			NUnit.Framework.Assert.IsTrue(d.Exists() && d.IsDirectory());
			try
			{
				FileUtils.Mkdir(d);
				NUnit.Framework.Assert.Fail("creation of existing directory must fail");
			}
			catch (IOException)
			{
			}
			// expected
			FileUtils.Mkdir(d, true);
			NUnit.Framework.Assert.IsTrue(d.Exists() && d.IsDirectory());
			NUnit.Framework.Assert.IsTrue(d.Delete());
			FilePath f = new FilePath(trash, "test");
			FileUtils.CreateNewFile(f);
			try
			{
				FileUtils.Mkdir(d);
				NUnit.Framework.Assert.Fail("creation of directory having same path as existing file must"
					 + " fail");
			}
			catch (IOException)
			{
			}
			// expected
			NUnit.Framework.Assert.IsTrue(f.Delete());
		}
		/// <exception cref="System.IO.IOException"></exception>
		public static void CopyFolder(FilePath src, FilePath dest)
		{
			if (src.IsDirectory())
			{
				//if directory not exists, create it
				if (!dest.Exists())
				{
					dest.Mkdir();
				}
				//list all the directory contents
				string[] files = src.List();
				foreach (string file in files)
				{
					//construct the src and dest file structure
					FilePath srcFile = new FilePath(src, file);
					FilePath destFile = new FilePath(dest, file);
					//recursive copy
					CopyFolder(srcFile, destFile);
				}
			}
			else
			{
				CopyFile(src, dest);
			}
		}
Beispiel #20
0
		public virtual void TestMkdirs()
		{
			FilePath root = new FilePath(trash, "test");
			NUnit.Framework.Assert.IsTrue(root.Mkdir());
			FilePath d = new FilePath(root, "test/test");
			FileUtils.Mkdirs(d);
			NUnit.Framework.Assert.IsTrue(d.Exists() && d.IsDirectory());
			try
			{
				FileUtils.Mkdirs(d);
				NUnit.Framework.Assert.Fail("creation of existing directory hierarchy must fail");
			}
			catch (IOException)
			{
			}
			// expected
			FileUtils.Mkdirs(d, true);
			NUnit.Framework.Assert.IsTrue(d.Exists() && d.IsDirectory());
			FileUtils.Delete(root, FileUtils.RECURSIVE);
			FilePath f = new FilePath(trash, "test");
			FileUtils.CreateNewFile(f);
			try
			{
				FileUtils.Mkdirs(d);
				NUnit.Framework.Assert.Fail("creation of directory having path conflicting with existing"
					 + " file must fail");
			}
			catch (IOException)
			{
			}
			// expected
			NUnit.Framework.Assert.IsTrue(f.Delete());
		}
Beispiel #21
0
		/// <summary>Delete file or folder</summary>
		/// <param name="f">
		/// <code>File</code>
		/// to be deleted
		/// </param>
		/// <param name="options">
		/// deletion options,
		/// <code>RECURSIVE</code>
		/// for recursive deletion of
		/// a subtree,
		/// <code>RETRY</code>
		/// to retry when deletion failed.
		/// Retrying may help if the underlying file system doesn't allow
		/// deletion of files being read by another thread.
		/// </param>
		/// <exception cref="System.IO.IOException">
		/// if deletion of
		/// <code>f</code>
		/// fails. This may occur if
		/// <code>f</code>
		/// didn't exist when the method was called. This can therefore
		/// cause IOExceptions during race conditions when multiple
		/// concurrent threads all try to delete the same file.
		/// </exception>
		public static void Delete(FilePath f, int options)
		{
			if ((options & SKIP_MISSING) != 0 && !f.Exists())
			{
				return;
			}
			if ((options & RECURSIVE) != 0 && f.IsDirectory())
			{
				FilePath[] items = f.ListFiles();
				if (items != null)
				{
					foreach (FilePath c in items)
					{
						Delete(c, options);
					}
				}
			}
			if (!f.Delete())
			{
				if ((options & RETRY) != 0 && f.Exists())
				{
					for (int i = 1; i < 10; i++)
					{
						try
						{
							Sharpen.Thread.Sleep(100);
						}
						catch (Exception)
						{
						}
						// ignore
						if (f.Delete())
						{
							return;
						}
					}
				}
				throw new IOException(MessageFormat.Format(JGitText.Get().deleteFileFailed, f.GetAbsolutePath
					()));
			}
		}
Beispiel #22
0
		/// <exception cref="NSch.SftpException"></exception>
		public virtual void Get(string src, string dst, SftpProgressMonitor monitor, int 
			mode)
		{
			// System.out.println("get: "+src+" "+dst);
			src = RemoteAbsolutePath(src);
			dst = LocalAbsolutePath(dst);
			try
			{
				ArrayList v = Glob_remote(src);
				int vsize = v.Count;
				if (vsize == 0)
				{
					throw new SftpException(SSH_FX_NO_SUCH_FILE, "No such file");
				}
				FilePath dstFile = new FilePath(dst);
				bool isDstDir = dstFile.IsDirectory();
				StringBuilder dstsb = null;
				if (isDstDir)
				{
					if (!dst.EndsWith(file_separator))
					{
						dst += file_separator;
					}
					dstsb = new StringBuilder(dst);
				}
				else
				{
					if (vsize > 1)
					{
						throw new SftpException(SSH_FX_FAILURE, "Copying multiple files, but destination is missing or a file."
							);
					}
				}
				for (int j = 0; j < vsize; j++)
				{
					string _src = (string)(v[j]);
					SftpATTRS attr = _stat(_src);
					if (attr.IsDir())
					{
						throw new SftpException(SSH_FX_FAILURE, "not supported to get directory " + _src);
					}
					string _dst = null;
					if (isDstDir)
					{
						int i = _src.LastIndexOf('/');
						if (i == -1)
						{
							dstsb.Append(_src);
						}
						else
						{
							dstsb.Append(Sharpen.Runtime.Substring(_src, i + 1));
						}
						_dst = dstsb.ToString();
						dstsb.Delete(dst.Length, _dst.Length);
					}
					else
					{
						_dst = dst;
					}
					if (mode == RESUME)
					{
						long size_of_src = attr.GetSize();
						long size_of_dst = new FilePath(_dst).Length();
						if (size_of_dst > size_of_src)
						{
							throw new SftpException(SSH_FX_FAILURE, "failed to resume for " + _dst);
						}
						if (size_of_dst == size_of_src)
						{
							return;
						}
					}
					if (monitor != null)
					{
						monitor.Init(SftpProgressMonitor.GET, _src, _dst, attr.GetSize());
						if (mode == RESUME)
						{
							monitor.Count(new FilePath(_dst).Length());
						}
					}
					FileOutputStream fos = null;
					try
					{
						if (mode == OVERWRITE)
						{
							fos = new FileOutputStream(_dst);
						}
						else
						{
							fos = new FileOutputStream(_dst, true);
						}
						// append
						// System.err.println("_get: "+_src+", "+_dst);
						_get(_src, fos, monitor, mode, new FilePath(_dst).Length());
					}
					finally
					{
						if (fos != null)
						{
							fos.Close();
						}
					}
				}
			}
			catch (Exception e)
			{
				if (e is SftpException)
				{
					throw (SftpException)e;
				}
				if (e is Exception)
				{
					throw new SftpException(SSH_FX_FAILURE, string.Empty, (Exception)e);
				}
				throw new SftpException(SSH_FX_FAILURE, string.Empty);
			}
		}
		/// <exception cref="System.IO.IOException"></exception>
		private void CreateDir(FilePath f)
		{
			if (!f.IsDirectory() && !f.Mkdirs())
			{
				FilePath p = f;
				while (p != null && !p.Exists())
				{
					p = p.GetParentFile();
				}
				if (p == null || p.IsDirectory())
				{
					throw new IOException(JGitText.Get().cannotCreateDirectory);
				}
				FileUtils.Delete(p);
				if (!f.Mkdirs())
				{
					throw new IOException(JGitText.Get().cannotCreateDirectory);
				}
			}
		}
Beispiel #24
0
 public Manager(FilePath directoryFile, ManagerOptions options)
 {
     this.directoryFile = directoryFile;
     this.options = (options != null) ? options : DefaultOptions;
     this.databases = new Dictionary<string, Database>();
     this.replications = new AList<Replication>();
     directoryFile.Mkdirs();
     if (!directoryFile.IsDirectory())
     {
         throw new IOException(string.Format("Unable to create directory for: %s", directoryFile
             ));
     }
     UpgradeOldDatabaseFiles(directoryFile);
     workExecutor = Executors.NewSingleThreadScheduledExecutor();
 }