public virtual void TestLockMissing_RealIndex() { FilePath idx = new FilePath(db.Directory, "index"); FilePath lck = new FilePath(db.Directory, "index.lock"); NUnit.Framework.Assert.IsFalse(idx.Exists()); NUnit.Framework.Assert.IsFalse(lck.Exists()); DirCache dc = db.LockDirCache(); NUnit.Framework.Assert.IsNotNull(dc); NUnit.Framework.Assert.IsFalse(idx.Exists()); NUnit.Framework.Assert.IsTrue(lck.Exists()); NUnit.Framework.Assert.AreEqual(0, dc.GetEntryCount()); dc.Unlock(); NUnit.Framework.Assert.IsFalse(idx.Exists()); NUnit.Framework.Assert.IsFalse(lck.Exists()); }
/// <exception cref="System.IO.IOException"></exception> public static void CopyFile(FilePath sourceFile, FilePath destFile) { if (!destFile.Exists()) { destFile.CreateNewFile(); } FileChannel source = null; FileChannel destination = null; try { source = new FileInputStream(sourceFile).GetChannel(); destination = new FileOutputStream(destFile).GetChannel(); destination.TransferFrom(source, 0, source.Size()); } finally { if (source != null) { source.Close(); } if (destination != null) { destination.Close(); } } }
public virtual void TestDeleteFile() { FilePath f = new FilePath(trash, "test"); FileUtils.CreateNewFile(f); FileUtils.Delete(f); NUnit.Framework.Assert.IsFalse(f.Exists()); try { FileUtils.Delete(f); NUnit.Framework.Assert.Fail("deletion of non-existing file must fail"); } catch (IOException) { } // expected try { FileUtils.Delete(f, FileUtils.SKIP_MISSING); } catch (IOException) { NUnit.Framework.Assert.Fail("deletion of non-existing file must not fail with option SKIP_MISSING" ); } }
public virtual void TestReadMissing_RealIndex() { FilePath idx = new FilePath(db.Directory, "index"); NUnit.Framework.Assert.IsFalse(idx.Exists()); DirCache dc = db.ReadDirCache(); NUnit.Framework.Assert.IsNotNull(dc); NUnit.Framework.Assert.AreEqual(0, dc.GetEntryCount()); }
/// <summary>Create a FileDocument</summary> /// <param name="file"></param> public FileDocument(FilePath file) { if (!file.Exists()) { throw new RuntimeException("File Not Found"); } this.file = file; }
public virtual void TestReadMissing_TempIndex() { FilePath idx = new FilePath(db.Directory, "tmp_index"); NUnit.Framework.Assert.IsFalse(idx.Exists()); DirCache dc = DirCache.Read(idx, db.FileSystem); NUnit.Framework.Assert.IsNotNull(dc); NUnit.Framework.Assert.AreEqual(0, dc.GetEntryCount()); }
public static bool DeleteRecursive(FilePath fileOrDirectory) { if (fileOrDirectory.IsDirectory()) { foreach (FilePath child in fileOrDirectory.ListFiles()) { DeleteRecursive(child); } } bool result = fileOrDirectory.Delete() || !fileOrDirectory.Exists(); return result; }
public virtual void TestInitNonEmptyRepository() { FilePath directory = CreateTempDirectory("testInitRepository2"); FilePath someFile = new FilePath(directory, "someFile"); someFile.CreateNewFile(); NUnit.Framework.Assert.IsTrue(someFile.Exists()); NUnit.Framework.Assert.IsTrue(directory.ListFiles().Length > 0); InitCommand command = new InitCommand(); command.SetDirectory(directory); Repository repository = command.Call().GetRepository(); AddRepoToClose(repository); NUnit.Framework.Assert.IsNotNull(repository); }
/// <exception cref="System.Exception"></exception> public virtual void TestUpgradeOldDatabaseFiles() { string directoryName = "test-directory-" + Runtime.CurrentTimeMillis(); string normalFilesDir = GetRootDirectory().GetAbsolutePath(); string fakeFilesDir = string.Format("%s/%s", normalFilesDir, directoryName); FilePath directory = new FilePath(fakeFilesDir); if (!directory.Exists()) { bool result = directory.Mkdir(); if (!result) { throw new IOException("Unable to create directory " + directory); } } FilePath oldTouchDbFile = new FilePath(directory, string.Format("old%s", Manager. DatabaseSuffixOld)); oldTouchDbFile.CreateNewFile(); FilePath newCbLiteFile = new FilePath(directory, string.Format("new%s", Manager.DatabaseSuffix )); newCbLiteFile.CreateNewFile(); FilePath migratedOldFile = new FilePath(directory, string.Format("old%s", Manager .DatabaseSuffix)); migratedOldFile.CreateNewFile(); base.StopCBLite(); manager = new Manager(new FilePath(GetRootDirectory(), directoryName), Manager.DefaultOptions ); NUnit.Framework.Assert.IsTrue(migratedOldFile.Exists()); //cannot rename old.touchdb in old.cblite, old.cblite already exists NUnit.Framework.Assert.IsTrue(oldTouchDbFile.Exists()); NUnit.Framework.Assert.IsTrue(newCbLiteFile.Exists()); FilePath dir = new FilePath(GetRootDirectory(), directoryName); NUnit.Framework.Assert.AreEqual(3, dir.ListFiles().Length); base.StopCBLite(); migratedOldFile.Delete(); manager = new Manager(new FilePath(GetRootDirectory(), directoryName), Manager.DefaultOptions ); //rename old.touchdb in old.cblite, previous old.cblite already doesn't exist NUnit.Framework.Assert.IsTrue(migratedOldFile.Exists()); NUnit.Framework.Assert.IsTrue(oldTouchDbFile.Exists() == false); NUnit.Framework.Assert.IsTrue(newCbLiteFile.Exists()); dir = new FilePath(GetRootDirectory(), directoryName); NUnit.Framework.Assert.AreEqual(2, dir.ListFiles().Length); }
public BlobStore(string path, SymmetricKey encryptionKey) { if (path == null) { throw new ArgumentNullException("path"); } _path = path; EncryptionKey = encryptionKey; FilePath directory = new FilePath(path); if (directory.Exists() && directory.IsDirectory()) { // Existing blob-store. VerifyExistingStore(); } else { // New blob store; create directory: directory.Mkdirs(); if (!directory.IsDirectory()) { throw new InvalidOperationException(string.Format("Unable to create directory for: {0}", directory)); } if (encryptionKey != null) { MarkEncrypted(true); } } }
/// <summary> /// グローバル設定ファイル読み込み /// </summary> public GrobalConfigEntity LoadGrobalConfig() { FilePath gitconfig = new FilePath(Environment.GetFolderPath(Environment.SpecialFolder.UserProfile), ".gitconfig"); if (!gitconfig.Exists()) { return null; } FileBasedConfig config = new FileBasedConfig(gitconfig, FS.Detect()); config.Load(); /* string text = config.ToText(); foreach (string section in config.GetSections()) { Console.Out.WriteLine("section = {0}", section); if (config.GetSubsections(section).Count > 0) { foreach (string subsection in config.GetSubsections(section)) { Console.Out.WriteLine(" subsection = {0}", subsection); foreach (string name in config.GetNames(section, subsection)) { Console.Out.WriteLine(" name = {0} / value = {1}", name, config.GetString(section, subsection, name)); } } } else { foreach (string name in config.GetNames(section)) { Console.Out.WriteLine(" name = {0} / value = {1}", name, config.GetString(section, null, name)); } } } */ GrobalConfigEntity result = new GrobalConfigEntity(); result.EMail = config.GetString("user", null, "email"); result.Name = config.GetString("user", null, "name"); if (result.EMail == null || result.Name == null) { return null; } return result; }
/// <summary>Delete file or folder</summary> /// <param name="f"> /// <code>File</code> /// to be deleted /// </param> /// <param name="options"> /// deletion options, /// <code>RECURSIVE</code> /// for recursive deletion of /// a subtree, /// <code>RETRY</code> /// to retry when deletion failed. /// Retrying may help if the underlying file system doesn't allow /// deletion of files being read by another thread. /// </param> /// <exception cref="System.IO.IOException"> /// if deletion of /// <code>f</code> /// fails. This may occur if /// <code>f</code> /// didn't exist when the method was called. This can therefore /// cause IOExceptions during race conditions when multiple /// concurrent threads all try to delete the same file. /// </exception> public static void Delete(FilePath f, int options) { if ((options & SKIP_MISSING) != 0 && !f.Exists()) { return; } if ((options & RECURSIVE) != 0 && f.IsDirectory()) { FilePath[] items = f.ListFiles(); if (items != null) { foreach (FilePath c in items) { Delete(c, options); } } } if (!f.Delete()) { if ((options & RETRY) != 0 && f.Exists()) { for (int i = 1; i < 10; i++) { try { Sharpen.Thread.Sleep(100); } catch (Exception) { } // ignore if (f.Delete()) { return; } } } throw new IOException(MessageFormat.Format(JGitText.Get().deleteFileFailed, f.GetAbsolutePath ())); } }
/// <summary> /// 設定のファイル保存 /// </summary> /// <param name="config">設定</param> public void SaveConfig(ConfigEntity config) { FilePath path = new FilePath(MainWindowModel.ApplicationDataPath); if (!path.Exists()) { path.Mkdir(); } string json = JsonConvert.SerializeObject(config, new Newtonsoft.Json.Converters.StringEnumConverter()); StreamWriter sw = new StreamWriter(MainWindowModel.ConfigFile, false, Encoding.UTF8); sw.Write(json); sw.Close(); }
public virtual void TestMergeRemovingFoldersWithoutFastForward() { FilePath folder1 = new FilePath(db.WorkTree, "folder1"); FilePath folder2 = new FilePath(db.WorkTree, "folder2"); FileUtils.Mkdir(folder1); FileUtils.Mkdir(folder2); FilePath file = new FilePath(folder1, "file1.txt"); Write(file, "folder1--file1.txt"); file = new FilePath(folder1, "file2.txt"); Write(file, "folder1--file2.txt"); file = new FilePath(folder2, "file1.txt"); Write(file, "folder--file1.txt"); file = new FilePath(folder2, "file2.txt"); Write(file, "folder2--file2.txt"); Git git = new Git(db); git.Add().AddFilepattern(folder1.GetName()).AddFilepattern(folder2.GetName()).Call (); RevCommit @base = git.Commit().SetMessage("adding folders").Call(); RecursiveDelete(folder1); RecursiveDelete(folder2); git.Rm().AddFilepattern("folder1/file1.txt").AddFilepattern("folder1/file2.txt"). AddFilepattern("folder2/file1.txt").AddFilepattern("folder2/file2.txt").Call(); RevCommit other = git.Commit().SetMessage("removing folders on 'branch'").Call(); git.Checkout().SetName(@base.Name).Call(); file = new FilePath(folder2, "file3.txt"); Write(file, "folder2--file3.txt"); git.Add().AddFilepattern(folder2.GetName()).Call(); git.Commit().SetMessage("adding another file").Call(); MergeCommandResult result = git.Merge().Include(other.Id).SetStrategy(MergeStrategy .RESOLVE).Call(); NUnit.Framework.Assert.AreEqual(MergeStatus.MERGED, result.GetMergeStatus()); NUnit.Framework.Assert.IsFalse(folder1.Exists()); }
/// <summary> /// Deletes the <see cref="Couchbase.Lite.Database" />. /// </summary> /// <exception cref="Couchbase.Lite.CouchbaseLiteException"> /// Thrown if an issue occurs while deleting the <see cref="Couchbase.Lite.Database" /></exception> public void Delete() { if (_isOpen && !Close()) { throw new CouchbaseLiteException("The database was open, and could not be closed", StatusCode.InternalServerError); } Manager.ForgetDatabase(this); if (!Exists()) { return; } var file = new FilePath(Path); var fileJournal = new FilePath(AttachmentStorePath + "-journal"); var deleteStatus = file.Delete(); if (fileJournal.Exists()) { deleteStatus &= fileJournal.Delete(); } //recursively delete attachments path var attachmentsFile = new FilePath(AttachmentStorePath); var deleteAttachmentStatus = FileDirUtils.DeleteRecursive(attachmentsFile); //recursively delete path where attachments stored( see getAttachmentStorePath()) var lastDotPosition = Path.LastIndexOf('.'); if (lastDotPosition > 0) { var attachmentsFileUpFolder = new FilePath(Path.Substring(0, lastDotPosition)); FileDirUtils.DeleteRecursive(attachmentsFileUpFolder); } if (!deleteStatus) { Log.V(Tag, String.Format("Error deleting the SQLite database file at {0}", file.GetAbsolutePath())); } if (!deleteStatus) { throw new CouchbaseLiteException("Was not able to delete the database file", StatusCode.InternalServerError); } if (!deleteAttachmentStatus) { throw new CouchbaseLiteException("Was not able to delete the attachments files", StatusCode.InternalServerError); } }
public virtual void TestWriteThumbnail() { ExifThumbnailDirectory directory = ExifReaderTest.ProcessBytes<ExifThumbnailDirectory>("Tests/Data/manuallyAddedThumbnail.jpg.app1"); Sharpen.Tests.IsTrue(directory.HasThumbnailData()); FilePath thumbnailFile = FilePath.CreateTempFile("thumbnail", ".jpg"); try { directory.WriteThumbnail(thumbnailFile.GetAbsolutePath()); FilePath file = new FilePath(thumbnailFile.GetAbsolutePath()); Sharpen.Tests.AreEqual(2970, file.Length()); Sharpen.Tests.IsTrue(file.Exists()); } finally { if (!thumbnailFile.Delete()) { NUnit.Framework.Assert.Fail("Unable to delete temp thumbnail file."); } } }
public virtual void TestDeleteRecursive() { FilePath f1 = new FilePath(trash, "test/test/a"); FileUtils.Mkdirs(f1.GetParentFile()); FileUtils.CreateNewFile(f1); FilePath f2 = new FilePath(trash, "test/test/b"); FileUtils.CreateNewFile(f2); FilePath d = new FilePath(trash, "test"); FileUtils.Delete(d, FileUtils.RECURSIVE); NUnit.Framework.Assert.IsFalse(d.Exists()); try { FileUtils.Delete(d, FileUtils.RECURSIVE); NUnit.Framework.Assert.Fail("recursive deletion of non-existing directory must fail" ); } catch (IOException) { } // expected try { FileUtils.Delete(d, FileUtils.RECURSIVE | FileUtils.SKIP_MISSING); } catch (IOException) { NUnit.Framework.Assert.Fail("recursive deletion of non-existing directory must not fail with option SKIP_MISSING" ); } }
/// <exception cref="System.IO.IOException"></exception> public static void CopyFolder(FilePath src, FilePath dest) { if (src.IsDirectory()) { //if directory not exists, create it if (!dest.Exists()) { dest.Mkdir(); } //list all the directory contents string[] files = src.List(); foreach (string file in files) { //construct the src and dest file structure FilePath srcFile = new FilePath(src, file); FilePath destFile = new FilePath(dest, file); //recursive copy CopyFolder(srcFile, destFile); } } else { CopyFile(src, dest); } }
/// <exception cref="NGit.Errors.CorruptObjectException"></exception> /// <exception cref="System.IO.IOException"></exception> /// <exception cref="NGit.Errors.MissingObjectException"></exception> /// <exception cref="NGit.Errors.IncorrectObjectTypeException"></exception> /// <exception cref="NGit.Errors.CheckoutConflictException"></exception> /// <exception cref="NGit.Errors.IndexWriteException"></exception> private bool DoCheckout() { toBeDeleted.Clear(); ObjectReader objectReader = repo.ObjectDatabase.NewReader(); try { if (headCommitTree != null) { PreScanTwoTrees(); } else { PrescanOneTree(); } if (!conflicts.IsEmpty()) { if (failOnConflict) { throw new NGit.Errors.CheckoutConflictException(Sharpen.Collections.ToArray(conflicts , new string[conflicts.Count])); } else { CleanUpConflicts(); } } // update our index builder.Finish(); FilePath file = null; string last = string.Empty; // when deleting files process them in the opposite order as they have // been reported. This ensures the files are deleted before we delete // their parent folders for (int i = removed.Count - 1; i >= 0; i--) { string r = removed[i]; file = new FilePath(repo.WorkTree, r); if (!file.Delete() && file.Exists()) { // The list of stuff to delete comes from the index // which will only contain a directory if it is // a submodule, in which case we shall not attempt // to delete it. A submodule is not empty, so it // is safe to check this after a failed delete. if (!file.IsDirectory()) { toBeDeleted.AddItem(r); } } else { if (!IsSamePrefix(r, last)) { RemoveEmptyParents(new FilePath(repo.WorkTree, last)); } last = r; } } if (file != null) { RemoveEmptyParents(file); } foreach (string path in updated.Keys) { // ... create/overwrite this file ... file = new FilePath(repo.WorkTree, path); if (!file.GetParentFile().Mkdirs()) { } // ignore DirCacheEntry entry = dc.GetEntry(path); // submodules are handled with separate operations if (FileMode.GITLINK.Equals(entry.RawMode)) { continue; } CheckoutEntry(repo, file, entry, objectReader); } // commit the index builder - a new index is persisted if (!builder.Commit()) { throw new IndexWriteException(); } } finally { objectReader.Release(); } return toBeDeleted.Count == 0; }
public virtual void TestMkdirs() { FilePath root = new FilePath(trash, "test"); NUnit.Framework.Assert.IsTrue(root.Mkdir()); FilePath d = new FilePath(root, "test/test"); FileUtils.Mkdirs(d); NUnit.Framework.Assert.IsTrue(d.Exists() && d.IsDirectory()); try { FileUtils.Mkdirs(d); NUnit.Framework.Assert.Fail("creation of existing directory hierarchy must fail"); } catch (IOException) { } // expected FileUtils.Mkdirs(d, true); NUnit.Framework.Assert.IsTrue(d.Exists() && d.IsDirectory()); FileUtils.Delete(root, FileUtils.RECURSIVE); FilePath f = new FilePath(trash, "test"); FileUtils.CreateNewFile(f); try { FileUtils.Mkdirs(d); NUnit.Framework.Assert.Fail("creation of directory having path conflicting with existing" + " file must fail"); } catch (IOException) { } // expected NUnit.Framework.Assert.IsTrue(f.Delete()); }
public virtual void TestMkdir() { FilePath d = new FilePath(trash, "test"); FileUtils.Mkdir(d); NUnit.Framework.Assert.IsTrue(d.Exists() && d.IsDirectory()); try { FileUtils.Mkdir(d); NUnit.Framework.Assert.Fail("creation of existing directory must fail"); } catch (IOException) { } // expected FileUtils.Mkdir(d, true); NUnit.Framework.Assert.IsTrue(d.Exists() && d.IsDirectory()); NUnit.Framework.Assert.IsTrue(d.Delete()); FilePath f = new FilePath(trash, "test"); FileUtils.CreateNewFile(f); try { FileUtils.Mkdir(d); NUnit.Framework.Assert.Fail("creation of directory having same path as existing file must" + " fail"); } catch (IOException) { } // expected NUnit.Framework.Assert.IsTrue(f.Delete()); }
public static bool RemoveItemIfExists(string path) { FilePath f = new FilePath(path); return f.Delete() || !f.Exists(); }
/// <exception cref="System.IO.IOException"></exception> public override void VisitEntry(TreeEntry m, GitIndex.Entry i, FilePath file) { if (m != null) { if (!file.IsFile()) { this._enclosing.CheckConflictsWithFile(file); } } else { if (file.Exists()) { this._enclosing.removed.AddItem(i.GetName()); this._enclosing.conflicts.Remove(i.GetName()); } } }
/// <summary>Loads the script at the given URL.</summary> /// <remarks>Loads the script at the given URL.</remarks> private string LoadSource(string sourceUrl) { string source = null; int hash = sourceUrl.IndexOf('#'); if (hash >= 0) { sourceUrl = Sharpen.Runtime.Substring(sourceUrl, 0, hash); } try { Stream @is; if (sourceUrl.IndexOf(':') < 0) { // Can be a file name try { if (sourceUrl.StartsWith("~/")) { string home = SecurityUtilities.GetSystemProperty("user.home"); if (home != null) { string pathFromHome = Sharpen.Runtime.Substring(sourceUrl, 2); FilePath f = new FilePath(new FilePath(home), pathFromHome); if (f.Exists()) { @is = new FileInputStream(f); goto openStream_break; } } } FilePath f_1 = new FilePath(sourceUrl); if (f_1.Exists()) { @is = new FileInputStream(f_1); goto openStream_break; } } catch (SecurityException) { } // No existing file, assume missed http:// if (sourceUrl.StartsWith("//")) { sourceUrl = "http:" + sourceUrl; } else { if (sourceUrl.StartsWith("/")) { sourceUrl = "http://127.0.0.1" + sourceUrl; } else { sourceUrl = "http://" + sourceUrl; } } } @is = (new Uri(sourceUrl)).OpenStream(); openStream_break: ; try { source = Kit.ReadReader(new StreamReader(@is)); } finally { @is.Close(); } } catch (IOException ex) { System.Console.Error.WriteLine("Failed to load source from " + sourceUrl + ": " + ex); } return source; }
/// <summary> /// 設定ファイルの読み込み /// </summary> /// <returns>設定</returns> public ConfigEntity OpenConfig() { FilePath file = new FilePath(MainWindowModel.ConfigFile); if (file.Exists()) { StreamReader sr = new StreamReader(MainWindowModel.ConfigFile, Encoding.UTF8); string json = sr.ReadToEnd(); sr.Close(); return JsonConvert.DeserializeObject<ConfigEntity>(json); ; } return new ConfigEntity(); }
/// <exception cref="NGit.Errors.CorruptObjectException"></exception> /// <exception cref="System.IO.IOException"></exception> public virtual void AssertWorkDir(Dictionary<string, string> i) { TreeWalk walk = new TreeWalk(db); walk.Recursive = true; walk.AddTree(new FileTreeIterator(db)); string expectedValue; string path; int nrFiles = 0; FileTreeIterator ft; while (walk.Next()) { ft = walk.GetTree<FileTreeIterator>(0); path = ft.EntryPathString; expectedValue = i.Get(path); NUnit.Framework.Assert.IsNotNull(expectedValue, "found unexpected file for path " + path + " in workdir"); FilePath file = new FilePath(db.WorkTree, path); NUnit.Framework.Assert.IsTrue(file.Exists()); if (file.IsFile()) { FileInputStream @is = new FileInputStream(file); byte[] buffer = new byte[(int)file.Length()]; int offset = 0; int numRead = 0; while (offset < buffer.Length && (numRead = @is.Read(buffer, offset, buffer.Length - offset)) >= 0) { offset += numRead; } @is.Close(); CollectionAssert.AreEqual (buffer, Sharpen.Runtime.GetBytesForString(i.Get(path)), "unexpected content for path " + path + " in workDir. "); nrFiles++; } } NUnit.Framework.Assert.AreEqual(i.Count, nrFiles, "WorkDir has not the right size." ); }
private void UpgradeOldDatabaseFiles(FilePath directory) { FilePath[] files = directory.ListFiles(new _FilenameFilter_330()); foreach (FilePath file in files) { string oldFilename = file.GetName(); string newFilename = FilenameWithNewExtension(oldFilename, DatabaseSuffixOld, DatabaseSuffix ); FilePath newFile = new FilePath(directory, newFilename); if (newFile.Exists()) { Log.W(Database.Tag, "Cannot rename %s to %s, %s already exists", oldFilename, newFilename , newFilename); continue; } bool ok = file.RenameTo(newFile); if (!ok) { string msg = string.Format("Unable to rename %s to %s", oldFilename, newFilename); throw new InvalidOperationException(msg); } } }
private FilePath GetOutputFile(FilePath parentDir, string className) { string path = className.Replace('.', FilePath.separatorChar); path = System.String.Concat(path, ".class"); FilePath f = new FilePath(parentDir, path); string dirPath = f.GetParent(); if (dirPath != null) { FilePath dir = new FilePath(dirPath); if (!dir.Exists()) { dir.Mkdirs(); } } return f; }
/// <summary> /// Deletes the <see cref="Couchbase.Lite.Database" />. /// </summary> /// <exception cref="Couchbase.Lite.CouchbaseLiteException"> /// Thrown if an issue occurs while deleting the <see cref="Couchbase.Lite.Database" /></exception> public void Delete() { if (_isOpen && !Close()) { throw new CouchbaseLiteException("The database was open, and could not be closed", StatusCode.InternalServerError); } Manager.ForgetDatabase(this); if (!Exists()) { return; } var file = new FilePath(Path); var fileJournal = new FilePath(Path + "-journal"); var fileWal = new FilePath(Path + "-wal"); var fileShm = new FilePath(Path + "-shm"); var deleteStatus = file.Delete(); if (fileJournal.Exists()){ deleteStatus &= fileJournal.Delete(); } if (fileWal.Exists()) { deleteStatus &= fileWal.Delete(); } if (fileShm.Exists()) { deleteStatus &= fileShm.Delete(); } //recursively delete attachments path var attachmentsFile = new FilePath(AttachmentStorePath); var deleteAttachmentStatus = FileDirUtils.DeleteRecursive(attachmentsFile); if (!deleteStatus) { Log.W(TAG, "Error deleting the SQLite database file at {0}", file.GetAbsolutePath()); throw new CouchbaseLiteException("Was not able to delete the database file", StatusCode.InternalServerError); } if (!deleteAttachmentStatus) { Log.W(TAG, "Error deleting the attachment files file at {0}", attachmentsFile.GetAbsolutePath()); throw new CouchbaseLiteException("Was not able to delete the attachments files", StatusCode.InternalServerError); } }
/// <exception cref="System.IO.IOException"></exception> public virtual void TestCreateNewFile() { FilePath f = new FilePath(trash, "x"); FileUtils.CreateNewFile(f); NUnit.Framework.Assert.IsTrue(f.Exists()); try { FileUtils.CreateNewFile(f); NUnit.Framework.Assert.Fail("creation of already existing file must fail"); } catch (IOException) { } // expected FileUtils.Delete(f); }