Directory ResolveDirectory(Directory directory, IEnumerable<string> children) { if (!children.Any()) return directory; string childName = children.First(); Directory info = directory.GetDirectories() .Where(x => string.Compare(x.Name.GetName(), childName, true) == 0) .SingleOrDefault(); if (info != null) { return ResolveDirectory(info, children.Skip(1)); } File file = directory.GetFiles() .Where(x => string.Compare(x.Name.GetName(), childName, true) == 0) .SingleOrDefault(); if (file == null) throw new InvalidOperationException("Could not get directory: " + childName); if (Path.GetExtension(file.Name.GetName()) == ".zip") { var zipFileDirectory = new ZipFileDirectory(file.Name.Name); return ResolveDirectory(zipFileDirectory, children.Skip(1)); } throw new InvalidOperationException("Could not resolve the rest of the path: " + childName); }
/// <summary> /// Create an AzureDirectory /// </summary> /// <param name="storageAccount">storage account to use</param> /// <param name="containerName">name of container (folder in blob storage)</param> /// <param name="cacheDirectory">local Directory object to use for local cache</param> /// <param name="rootFolder">path of the root folder inside the container</param> public AzureDirectory( CloudStorageAccount storageAccount, string containerName = null, Directory cacheDirectory = null, bool compressBlobs = false, string rootFolder = null) { if (storageAccount == null) throw new ArgumentNullException("storageAccount"); if (string.IsNullOrEmpty(containerName)) _containerName = "lucene"; else _containerName = containerName.ToLower(); if (string.IsNullOrEmpty(rootFolder)) _rootFolder = string.Empty; else { rootFolder = rootFolder.Trim('/'); _rootFolder = rootFolder + "/"; } _blobClient = storageAccount.CreateCloudBlobClient(); _initCacheDirectory(cacheDirectory); this.CompressBlobs = compressBlobs; }
private void ReadDirectoryStructure(Directory rootDirectory) { List<File> files = new List<File>(); foreach (var f in rootDirectory.infos.EnumerateFiles()) { File file = new File(f); try { file.file = TagLib.File.Create(file.infos.FullName); if (file.file is TagLib.Image.File) file.file = null; } catch (TagLib.UnsupportedFormatException) { file.file = null; } files.Add(file); } List<Directory> directories = new List<Directory>(); foreach (var f in rootDirectory.infos.EnumerateDirectories()) directories.Add(new Directory(f)); rootDirectory.files = files; rootDirectory.directories = directories; foreach (var directory in directories) { ReadDirectoryStructure(directory); } }
public static void AfterClass() { Dir.Dispose(); Dir = null; FieldInfos = null; TestDoc = null; }
private void Dotest(int ncats, int range) { AtomicInteger numCats = new AtomicInteger(ncats); Directory[] dirs = new Directory[2]; for (int i = 0; i < dirs.Length; i++) { dirs[i] = NewDirectory(); var tw = new DirectoryTaxonomyWriter(dirs[i]); ThreadClass[] addThreads = new ThreadClass[4]; for (int j = 0; j < addThreads.Length; j++) { addThreads[j] = new ThreadAnonymousInnerClassHelper(this, range, numCats, tw); } foreach (ThreadClass t in addThreads) { t.Start(); } foreach (ThreadClass t in addThreads) { t.Join(); } tw.Dispose(); } var tw1 = new DirectoryTaxonomyWriter(dirs[0]); IOrdinalMap map = randomOrdinalMap(); tw1.AddTaxonomy(dirs[1], map); tw1.Dispose(); validate(dirs[0], dirs[1], map); IOUtils.Close(dirs); }
/// <summary> /// Checks whether a certain sub-directory exists. /// </summary> /// <param name="path">The path of the directory.</param> /// <returns> /// <see langword="true" /> if the directory exists, <see langword="false" /> otherwise. /// </returns> /// <exception cref="ArgumentNullException"> /// <paramref name="path" /> is <see langword="null" /> (<see langword="Nothing" /> /// in Visual Basic). /// </exception> public bool Exists(string path) { _ = Requires.NotNullOrWhiteSpace( path, nameof(path)); return(FSDir.Exists(path)); }
/// <summary> /// Gets a specific directory's last write time, in UTC. /// </summary> /// <param name="path">The path of the directory.</param> /// <returns> /// A <see cref="DateTime" /> in UTC. /// </returns> /// <exception cref="ArgumentNullException"> /// <paramref name="path" /> is <see langword="null" /> (<see langword="Nothing" /> /// in Visual Basic). /// </exception> public DateTime GetLastWriteTime(string path) { _ = Requires.NotNullOrWhiteSpace( path, nameof(path)); return(FSDir.GetLastWriteTimeUtc(path)); }
/// <summary> /// Lists all the file-system entries contained at a certain directory. /// </summary> /// <param name="path">The path of the directory.</param> /// <returns> /// An array of file-system entry paths. /// </returns> /// <exception cref="ArgumentNullException"> /// <paramref name="path" /> is <see langword="null" /> (<see langword="Nothing" /> /// in Visual Basic). /// </exception> public string[] GetFileSystemEntries(string path) { _ = Requires.NotNullOrWhiteSpace( path, nameof(path)); return(FSDir.GetFileSystemEntries(path)); }
public List<File> LoadFiles(Directory directory, Func<string, bool> filter, bool getDirectoryIcons = true) { RequestResult result = OAuthUtility.Get("https://api.dropbox.com/1/metadata/auto/", new HttpParameterCollection { { "path", directory.Path }, { "access_token", AccessToken } }); if (IsSuccess(result)) { return null; } return result["contents"].Where(i => Convert.ToBoolean(i["is_dir"]) || filter(i["path"].ToString())).OrderBy(i => Path.GetFileName(i["path"].ToString())).OrderByDescending(i => Convert.ToBoolean(i["is_dir"])).Select(i => Convert.ToBoolean(i["is_dir"]) ? new Directory(directory, i["path"].ToString(), Path.GetFileName(i["path"].ToString()), GetIcon(i)) : new File(directory, i["path"].ToString(), Path.GetFileName(i["path"].ToString()), GetIcon(i))).ToList(); }
/// <summary> /// Creates a new directory. /// </summary> /// <param name="path">The path of the new directory.</param> /// <exception cref="ArgumentNullException"> /// <paramref name="path" /> is <see langword="null" /> (<see langword="Nothing" /> /// in Visual Basic). /// </exception> public void CreateDirectory(string path) { _ = Requires.NotNullOrWhiteSpace( path, nameof(path)); _ = FSDir.CreateDirectory(path); }
public List<File> LoadFiles(Directory directory, Func<string, bool> filter, bool getDirectoryIcons = true) { if (string.IsNullOrEmpty(directory.Path)) { return FileSystemHelper.GetFixedDrives().Select(d => new Directory(directory, d.Name, d.Name, FileSystemHelper.GetImage(d.Name, IconSizeType.Small, imgSize))).ToList<File>(); } FileSystemEntryCollection collection = GetFileSystemEntries(directory.Path, IconSizeType.Small, imgSize, getDirectoryIcons); collection.ShowExtensions = true; return collection.Where(entry => entry is DirectoryEntry || filter(entry.Path)).Select(entry => { return entry is DirectoryEntry ? new Directory(directory, entry.Path, entry.Name, entry.Image) : new File(directory, entry.Path, entry.Name, entry.Image);}).ToList(); }
public bool SetWorkshopItemContentFolder(ulong updateHandle, string contentFolder) { if (!Directory.Exists(contentFolder)) { throw new ArgumentException("content folder doesn't exist"); } return(Steam.SetItemContent(updateHandle, Path.GetFullPath(contentFolder))); }
//public static void IORename(Directory current_directory, string old_name, string new_name) //{ // if (System.IO.File.Exists(current_directory.FullPhysicalPath + new_name)) // { // // new file name is already taken // throw (new System.Data.DuplicateNameException("File already exists")); // } // else // { // if (System.IO.Directory.Exists(current_directory.FullPhysicalPath + old_name)) // { // // directory // if (current_directory.Settings.Recursive && current_directory.Settings.AllowModifySubdirectories) // { // System.IO.Directory.Move(current_directory.FullPhysicalPath + old_name, current_directory.FullPhysicalPath + new_name); // } // else // { // throw (new System.Security.SecurityException("Modifying directories is forbidden in configuration")); // } // } // else if (System.IO.File.Exists(current_directory.FullPhysicalPath + old_name)) // { // // file // string ext = new_name.Contains(".")?new_name.Substring(new_name.LastIndexOf(".")+1):String.Empty; // // validate file type // bool valid_ext = current_directory.Settings.AllowAnyFiletypes; // if (!valid_ext) // { // foreach(string ft_str in current_directory.Settings.AllowedFileTypes) // { // FileType ft = FileType.GetFileType(current_directory.Configuration, ft_str); // if (ft.Extensions.Contains(ext)) // { // valid_ext = true; // break; // } // } // } // if (valid_ext) // { // // extension is valid, proceed // System.IO.File.Move(current_directory.FullPhysicalPath + old_name, current_directory.FullPhysicalPath + new_name); // } // else // { // throw (new ArgumentException("New file extension is not valid.")); // } // } // else // { // // not found // throw (new FileNotFoundException("File or directory " + (current_directory.FullPhysicalPath + old_name) + " not found")); // } // } //} public static void CreateDirectory(Directory current_directory, string dirname) { FtpWebRequest reqFTP = _MakeFtpWebRequest(current_directory, dirname); // Specify the command to be executed. reqFTP.Method = WebRequestMethods.Ftp.MakeDirectory; // Execute the command. FtpWebResponse response = (FtpWebResponse)reqFTP.GetResponse(); response.Close(); }
/// <summary> /// Adds a directory to a specific ZipStream /// </summary> /// <param name="RootDirectory">the root directory</param> /// <param name="CurrentDirectory">the directory to push</param> /// <param name="zStream">stream to write to</param> /// <remarks> /// do not call this method directly, this method is designed be called in a recursive manor. /// </remarks> private void ZipDirectory(Directory RootDirectory, Directory CurrentDirectory, ZipOutputStream zStream) { foreach (var file in CurrentDirectory.Files) { ZipEntry fileEntry = new ZipEntry(file.FilePath); zStream.PutNextEntry(fileEntry); zStream.Write(file.FileContents, 0, file.FileContents.Length); } foreach (var subDirectory in CurrentDirectory.Directories) ZipDirectory(RootDirectory, subDirectory, zStream); }
/// <summary> /// Sets the directory's last write time. /// </summary> /// <param name="path">The path of the directory.</param> /// <param name="lastWriteTime">A <see cref="DateTime" /> with the directory attribute to set.</param> /// <exception cref="ArgumentNullException"> /// <paramref name="path" /> is <see langword="null" /> (<see langword="Nothing" /> /// in Visual Basic). /// </exception> public void SetLastWriteTime( string path, DateTime lastWriteTime) { _ = Requires.NotNullOrWhiteSpace( path, nameof(path)); FSDir.SetLastWriteTime( path, lastWriteTime); }
/// <summary> /// Sets the directory's creation time. /// </summary> /// <param name="path">The path of the directory.</param> /// <param name="creationTime">A <see cref="DateTime" /> with the directory attribute to set.</param> /// <exception cref="ArgumentNullException"> /// <paramref name="path" /> is <see langword="null" /> (<see langword="Nothing" /> /// in Visual Basic). /// </exception> public void SetCreationTime( string path, DateTime creationTime) { _ = Requires.NotNullOrWhiteSpace( path, nameof(path)); FSDir.SetCreationTime( path, creationTime); }
/// <summary> /// Lists all the file-system entries contained at a certain directory with a specific search pattern. /// </summary> /// <param name="path">The path of the directory.</param> /// <param name="searchPattern">The search pattern to use.</param> /// <returns> /// An array of file-system entries paths. /// </returns> /// <exception cref="ArgumentNullException"> /// <paramref name="path" /> or <paramref name="searchPattern" /> is /// <see langword="null" /> (<see langword="Nothing" /> in Visual Basic). /// </exception> public string[] GetFileSystemEntries( string path, string searchPattern) { _ = Requires.NotNullOrWhiteSpace( path, nameof(path)); _ = Requires.NotNullOrWhiteSpace( searchPattern, nameof(searchPattern)); return(FSDir.GetFileSystemEntries( path, searchPattern)); }
/// <summary> /// Moves a directory to another location. /// </summary> /// <param name="sourceDirName">The source directory name.</param> /// <param name="destDirName">The destination directory name.</param> /// <exception cref="ArgumentNullException"> /// <paramref name="destDirName" /> or <paramref name="sourceDirName" /> is <see langword="null" /> ( /// <see langword="Nothing" /> in Visual Basic). /// </exception> public void Move( string sourceDirName, string destDirName) { _ = Requires.NotNullOrWhiteSpace( sourceDirName, nameof(sourceDirName)); _ = Requires.NotNullOrWhiteSpace( destDirName, nameof(destDirName)); FSDir.Move( sourceDirName, destDirName); }
/// <summary> /// Asynchronously sets the directory's last write time. /// </summary> /// <param name="path">The path of the directory.</param> /// <param name="lastWriteTime">A <see cref="DateTime" /> with the directory attribute to set.</param> /// <param name="cancellationToken">The cancellation token.</param> /// <returns>A task.</returns> public Task SetLastWriteTimeAsync( string path, DateTime lastWriteTime, CancellationToken cancellationToken = default) { _ = Requires.NotNullOrWhiteSpace( path, nameof(path)); return(Work.OnThreadPoolAsync( state => FSDir.SetLastWriteTime( state.Path, state.LastWriteTime), (Path: path, LastWriteTime: lastWriteTime), cancellationToken)); }
/// <summary> /// Compresses a directory using Zip compression into a specified directory /// </summary> /// <param name="ArchiveOutputLocation">the output directory path (including file name)</param> /// <param name="ArchiveDirectory">the Directory object to be compressed</param> /// <remarks> /// is recursive /// </remarks> public void CompressDirectory(string ArchiveOutputLocation, Directory ArchiveDirectory) { using (ZipOutputStream zStream = new ZipOutputStream(System.IO.File.Create(ArchiveOutputLocation))) { foreach (var file in ArchiveDirectory.Files) { ZipEntry fileEntry = new ZipEntry(file.FullPath); zStream.PutNextEntry(fileEntry); zStream.Write(file.FileContents, 0, file.FileContents.Length); } foreach (var directory in ArchiveDirectory.Directories) ZipDirectory(ArchiveDirectory, directory, zStream); zStream.Finish(); zStream.Close(); } }
private static IEnumerable <string> EnumerateDirectoriesInternal( string path, string searchPattern, bool recursively) { var localPath = Requires.NotNullOrWhiteSpace( path, nameof(path)); var localSearchPattern = Requires.NotNullOrWhiteSpace( searchPattern, nameof(searchPattern)); return(FSDir.EnumerateDirectories( localPath, localSearchPattern, recursively ? SearchOption.AllDirectories : SearchOption.TopDirectoryOnly)); }
// This method decides what action to take based on the type of // file we are looking at public static void doFile(FileInfo f) { // If this is a directory, walk each file/dir in that directory if (Directory.Exists(f.FullName)) { string[] files = Directory.GetFileSystemEntries(f.FullName); for (int i = 0; i < files.Length; i++) { doFile(new FileInfo(f.FullName + "\\" + files[i])); } } else if ((f.Name.Length > 5) && f.Name.Substring(f.Name.Length - 5).Equals(".java")) { Console.Error.WriteLine(" " + f.FullName); parseFile(f.Name, new FileStream(f.FullName, FileMode.Open, FileAccess.Read)); } }
public void CreateDirectory_Content_from_Readable_files_gets_written_to_disk() { var rootName = Path.GetRandomFileName(); const string fileContent = "Hello World!"; var directory = new Directory(rootName) { root => new TestReadableFile(root, s_File1, fileContent) }; m_Instance.CreateDirectory(directory, Path.GetTempPath()); var expectedFilePath = Path.Combine(Path.GetTempPath(), rootName, s_File1); Assert.True(NativeFile.Exists(expectedFilePath)); Assert.Equal(fileContent, NativeFile.ReadAllText(expectedFilePath)); }
public void CreateTemporaryDirectory_created_directory_gets_deleted_on_dispose() { // create temporary directory var dirName = Path.GetRandomFileName(); var directory = new Directory(dirName); var createdDir = m_Instance.CreateTemporaryDirectory(directory); // assert that the directory was really created Assert.True(NativeDirectory.Exists(Path.Combine(Path.GetTempPath(), dirName))); // dispose the temporary directory createdDir.Dispose(); // directory has to be gone now Assert.False(NativeDirectory.Exists(Path.Combine(Path.GetTempPath(), dirName))); }
internal virtual void AddDocs(Random random, Directory dir, int ndocs, string field, string val, int maxTF, float percentDocs) { Analyzer analyzer = new AnalyzerAnonymousInnerClassHelper(this, random, val, maxTF, percentDocs); Document doc = new Document(); doc.Add(NewStringField(field, val, Field.Store.NO)); IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer).SetOpenMode(OpenMode_e.CREATE).SetMaxBufferedDocs(100).SetMergePolicy(NewLogMergePolicy(100))); for (int i = 0; i < ndocs; i++) { writer.AddDocument(doc); } writer.ForceMerge(1); writer.Dispose(); }
public static void BeforeClass() { TestDoc = new Document(); FieldInfos = new FieldInfos.Builder(); DocHelper.SetupDoc(TestDoc); foreach (IndexableField field in TestDoc) { FieldInfos.AddOrUpdate(field.Name(), field.FieldType()); } Dir = NewDirectory(); IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(NewLogMergePolicy()); conf.MergePolicy.NoCFSRatio = 0.0; IndexWriter writer = new IndexWriter(Dir, conf); writer.AddDocument(TestDoc); writer.Dispose(); FaultyIndexInput.DoFail = false; }
/// <summary> /// Create an AzureDirectory /// </summary> /// <param name="storageAccount">storage account to use</param> /// <param name="catalog">name of catalog (folder in blob storage)</param> /// <param name="cacheDirectory">local Directory object to use for local cache</param> public AzureDirectory( CloudStorageAccount storageAccount, string catalog = null, Directory cacheDirectory = null, bool compressBlobs = false) { if (storageAccount == null) throw new ArgumentNullException("storageAccount"); if (string.IsNullOrEmpty(catalog)) _catalog = "lucene"; else _catalog = catalog.ToLower(); _blobClient = storageAccount.CreateCloudBlobClient(); _initCacheDirectory(cacheDirectory); this.CompressBlobs = compressBlobs; }
/// <summary> /// Deletes a directory, optionally also doing a recursive delete. /// </summary> /// <param name="path">The path of the directory.</param> /// <param name="recursive"> /// If set to <see langword="true" />, does a recursive delete. This is <see langword="false" /> by /// default. /// </param> /// <exception cref="ArgumentNullException"> /// <paramref name="path" /> is <see langword="null" /> (<see langword="Nothing" /> /// in Visual Basic). /// </exception> public void Delete( string path, bool recursive = false) { _ = Requires.NotNullOrWhiteSpace( path, nameof(path)); if (recursive) { FSDir.Delete( path, true); } else { FSDir.Delete(path); } }
public override void SetUp() { base.SetUp(); Directory = NewDirectory(); IndexWriter writer = new IndexWriter(Directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(NewLogMergePolicy())); //writer.setNoCFSRatio(0.0); //writer.infoStream = System.out; FieldType customType = new FieldType(TextField.TYPE_STORED); customType.Tokenized = false; customType.StoreTermVectors = true; for (int i = 0; i < NumDocs; i++) { Documents.Document doc = new Documents.Document(); Field fld = NewField("field", English.IntToEnglish(i), customType); doc.Add(fld); writer.AddDocument(doc); } writer.Dispose(); }
/// <summary> /// Asynchronously moves a directory to another location. /// </summary> /// <param name="sourceDirectoryName">The source directory name.</param> /// <param name="destinationDirectoryName">The destination directory name.</param> /// <param name="cancellationToken">The cancellation token.</param> /// <returns>A task.</returns> public Task MoveAsync( string sourceDirectoryName, string destinationDirectoryName, CancellationToken cancellationToken = default) { _ = Requires.NotNullOrWhiteSpace( sourceDirectoryName, nameof(sourceDirectoryName)); _ = Requires.NotNullOrWhiteSpace( destinationDirectoryName, nameof(destinationDirectoryName)); return(Work.OnThreadPoolAsync( state => FSDir.Move( state.Source, state.Destination), (Source: sourceDirectoryName, Destination: destinationDirectoryName), cancellationToken)); }
private void PopulatePluginAssemblies() { var list = this.GetNode(@"Panel/ItemList") as Godot.ItemList; if (Directory.Exists(PluginsDirectoryName)) { var allFiles = Directory.GetFiles(PluginsDirectoryName, "*.dll"); foreach (var fileName in allFiles) { var assembly = Assembly.LoadFile(fileName); var data = new PluginData(fileName); data.Monsters = new List <AbstractMonster>(); var monsters = assembly.GetTypes().Where(t => t.BaseType == typeof(AbstractMonster)); foreach (var monsterType in monsters) { var instance = (AbstractMonster)monsterType.GetConstructor(new Type[0]).Invoke(new object[0]); data.Monsters.Add(instance); } data.LevelGenerators = new List <ILevelGenerator>(); var generators = assembly.GetTypes().Where(t => t.GetInterfaces().Any(i => i.Name.Contains("ILevelGenerator"))); foreach (var generatorType in generators) { var instance = (ILevelGenerator)generatorType.GetConstructor(new Type[0]).Invoke(new object[0]); data.LevelGenerators.Add(instance); } list.AddItem(fileName); pluginData.Add(data); } } else { list.AddItem("Plugins directory doesn't exist."); } }
public void CreateDirectory() { var rootName = Path.GetRandomFileName(); var directory = new Directory(rootName) { root => new Directory(root, s_Dir1) { dir1 => new EmptyFile(dir1, s_File1) }, root => new Directory(root, s_Dir2) }; m_Instance.CreateDirectory(directory, Path.GetTempPath()); Assert.True(NativeDirectory.Exists(Path.Combine(Path.GetTempPath(), rootName))); Assert.True(NativeDirectory.Exists(Path.Combine(Path.GetTempPath(), rootName, s_Dir1))); Assert.True(NativeFile.Exists(Path.Combine(Path.GetTempPath(), rootName, s_Dir1, s_File1))); Assert.True(NativeDirectory.Exists(Path.Combine(Path.GetTempPath(), rootName, s_Dir2))); }
private static Task <IEnumerable <string> > EnumerateFileSystemEntriesInternalAsync( string path, string searchPattern, bool recursively, CancellationToken cancellationToken) { var localPath = Requires.NotNullOrWhiteSpace( path, nameof(path)); var localSearchPattern = Requires.NotNullOrWhiteSpace( searchPattern, nameof(searchPattern)); return(Work.OnThreadPoolAsync( state => FSDir.EnumerateFileSystemEntries( state.Path, state.Pattern, state.Recursively), (Path: localPath, Pattern: localSearchPattern, Recursively: recursively ? SearchOption.AllDirectories : SearchOption.TopDirectoryOnly), cancellationToken)); }
internal FileTransferer(Directory transferStarter, string localFile, string remoteFile, long totalBytes, TransferDirection dir) { m_transferStarter = transferStarter; m_transferDirection = dir; m_session = transferStarter.Session; m_localFile = localFile; m_remoteFile = remoteFile; m_totalBytes = totalBytes; if (dir == TransferDirection.Upload) { m_streamCopyRoutine = LocalToRemote; m_ftpFileCommandRoutine = m_session.ControlChannel.STOR; m_localFileOpenMode = FileMode.Open; } else { m_streamCopyRoutine = RemoteToLocal; m_ftpFileCommandRoutine = m_session.ControlChannel.RETR; m_localFileOpenMode = FileMode.Create; } }
public static void Load(string filename) { var nbiFile = new XmlDocument(); nbiFile.Load(filename); var dirsNode = nbiFile.GetElementsByTagName("directories")[0]; if (dirsNode != null) { if (dirsNode.Attributes["root"] != null) Directories.Root = dirsNode.Attributes["root"].Value; var dirNodes = nbiFile.GetElementsByTagName("directory"); foreach (XmlNode dirNode in dirNodes) { var key = BuildDirectoryKey(dirNode.Attributes["key"].Value); Directories[key] = new Directory(Directories); if (dirNode.Attributes["path"] != null) Directories[key].Path = dirNode.Attributes["path"].Value; if (dirNode.Attributes["file"] != null) Directories[key].File = dirNode.Attributes["file"].Value; } } var cssNode = nbiFile.GetElementsByTagName("connectionStrings")[0]; if (cssNode != null) { foreach (XmlNode csNode in cssNode.ChildNodes) { var key = BuildConnectionStringKey(csNode.Name, csNode.Attributes["key"].Value); ConnectionStrings[key] = new ConnectionString(); if (csNode.InnerXml != null) ConnectionStrings[key].Value = csNode.InnerXml; } } }
/// <summary> /// Asynchronously deletes a directory. /// </summary> /// <param name="path">The path of the directory.</param> /// <param name="recursive"><see langword="true" /> if deletion should be recursive to all subdirectories.</param> /// <param name="cancellationToken">The cancellation token.</param> /// <returns>A task.</returns> public Task DeleteAsync( string path, bool recursive = false, CancellationToken cancellationToken = default) { _ = Requires.NotNullOrWhiteSpace( path, nameof(path)); if (recursive) { return(Work.OnThreadPoolAsync( state => FSDir.Delete( state.Path, state.Recursive), (Path: path, Recursive: true), cancellationToken)); } return(ReferenceMethodInfoDelete.OnThreadPoolAsync( path, cancellationToken)); }
/// <summary> /// Gets the current directory. /// </summary> /// <returns>The current directory.</returns> public string GetCurrentDirectory() => FSDir.GetCurrentDirectory();
public void MoveTo(String destDirName) { if (destDirName==null) throw new ArgumentNullException(nameof(destDirName)); if (destDirName.Length==0) throw new ArgumentException(Environment.GetResourceString("Argument_EmptyFileName"), nameof(destDirName)); Contract.EndContractBlock(); #if FEATURE_CORECLR FileSecurityState sourceState = new FileSecurityState(FileSecurityStateAccess.Write | FileSecurityStateAccess.Read, DisplayPath, Directory.GetDemandDir(FullPath, true)); sourceState.EnsureState(); #else new FileIOPermission(FileIOPermissionAccess.Write | FileIOPermissionAccess.Read, demandDir, false, false).Demand(); #endif String fullDestDirName = Path.GetFullPath(destDirName); String demandPath; if (!fullDestDirName.EndsWith(Path.DirectorySeparatorChar)) fullDestDirName = fullDestDirName + Path.DirectorySeparatorChar; demandPath = fullDestDirName + '.'; // Demand read & write permission to destination. The reason is // we hand back a DirectoryInfo to the destination that would allow // you to read a directory listing from that directory. Sure, you // had the ability to read the file contents in the old location, // but you technically also need read permissions to the new // location as well, and write is not a true superset of read. #if FEATURE_CORECLR FileSecurityState destState = new FileSecurityState(FileSecurityStateAccess.Write, destDirName, demandPath); destState.EnsureState(); #else new FileIOPermission(FileIOPermissionAccess.Write | FileIOPermissionAccess.Read, demandPath).Demand(); #endif String fullSourcePath; if (FullPath.EndsWith(Path.DirectorySeparatorChar)) fullSourcePath = FullPath; else fullSourcePath = FullPath + Path.DirectorySeparatorChar; if (String.Compare(fullSourcePath, fullDestDirName, StringComparison.OrdinalIgnoreCase) == 0) throw new IOException(Environment.GetResourceString("IO.IO_SourceDestMustBeDifferent")); String sourceRoot = Path.GetPathRoot(fullSourcePath); String destinationRoot = Path.GetPathRoot(fullDestDirName); if (String.Compare(sourceRoot, destinationRoot, StringComparison.OrdinalIgnoreCase) != 0) throw new IOException(Environment.GetResourceString("IO.IO_SourceDestMustHaveSameRoot")); if (!Win32Native.MoveFile(FullPath, destDirName)) { int hr = Marshal.GetLastWin32Error(); if (hr == Win32Native.ERROR_FILE_NOT_FOUND) // A dubious error code { hr = Win32Native.ERROR_PATH_NOT_FOUND; __Error.WinIOError(hr, DisplayPath); } if (hr == Win32Native.ERROR_ACCESS_DENIED) // We did this for Win9x. We can't change it for backcomp. throw new IOException(Environment.GetResourceString("UnauthorizedAccess_IODenied_Path", DisplayPath)); __Error.WinIOError(hr,String.Empty); } FullPath = fullDestDirName; OriginalPath = destDirName; DisplayPath = GetDisplayName(OriginalPath, FullPath); demandDir = new String[] { Directory.GetDemandDir(FullPath, true) }; // Flush any cached information about the directory. _dataInitialised = -1; }
public virtual void ChangeIndexWithAdds(Random random, Directory dir, string origOldName) { // open writer IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).SetOpenMode(OpenMode_e.APPEND).SetMergePolicy(NewLogMergePolicy())); // add 10 docs for (int i = 0; i < 10; i++) { AddDoc(writer, 35 + i); } // make sure writer sees right total -- writer seems not to know about deletes in .del? int expected; if (Compare(origOldName, "24") < 0) { expected = 44; } else { expected = 45; } Assert.AreEqual(expected, writer.NumDocs(), "wrong doc count"); writer.Dispose(); // make sure searching sees right # hits IndexReader reader = DirectoryReader.Open(dir); IndexSearcher searcher = NewSearcher(reader); ScoreDoc[] hits = searcher.Search(new TermQuery(new Term("content", "aaa")), null, 1000).ScoreDocs; Document d = searcher.IndexReader.Document(hits[0].Doc); assertEquals("wrong first document", "0", d.Get("id")); DoTestHits(hits, 44, searcher.IndexReader); reader.Dispose(); // fully merge writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).SetOpenMode(OpenMode_e.APPEND).SetMergePolicy(NewLogMergePolicy())); writer.ForceMerge(1); writer.Dispose(); reader = DirectoryReader.Open(dir); searcher = NewSearcher(reader); hits = searcher.Search(new TermQuery(new Term("content", "aaa")), null, 1000).ScoreDocs; Assert.AreEqual(44, hits.Length, "wrong number of hits"); d = searcher.Doc(hits[0].Doc); DoTestHits(hits, 44, searcher.IndexReader); assertEquals("wrong first document", "0", d.Get("id")); reader.Dispose(); }
/// <summary> /// Randomizes the use of some of hte constructor variations /// </summary> private IndexUpgrader NewIndexUpgrader(Directory dir) { bool streamType = Random().NextBoolean(); int choice = TestUtil.NextInt(Random(), 0, 2); switch (choice) { case 0: return new IndexUpgrader(dir, TEST_VERSION_CURRENT); case 1: return new IndexUpgrader(dir, TEST_VERSION_CURRENT, streamType ? null : Console.Error, false); case 2: return new IndexUpgrader(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, null), false); default: Assert.Fail("case statement didn't get updated when random bounds changed"); break; } return null; // never get here }
private DirectoryEntry AddFileToDirectory(File file, Directory dir, string name, NewFileOptions options) { DirectoryEntry entry; bool createShortNames; if (options != null && options.CreateShortNames.HasValue) { createShortNames = options.CreateShortNames.Value; } else { createShortNames = CreateShortNames; } if (createShortNames) { if (Utilities.Is8Dot3(name.ToUpperInvariant())) { entry = dir.AddEntry(file, name, FileNameNamespace.Win32AndDos); } else { entry = dir.AddEntry(file, name, FileNameNamespace.Win32); dir.AddEntry(file, dir.CreateShortName(name), FileNameNamespace.Dos); } } else { entry = dir.AddEntry(file, name, FileNameNamespace.Posix); } return entry; }
private DirectoryEntry GetDirectoryEntry(Directory dir, string path) { string[] pathElements = path.Split(new char[] { '\\' }, StringSplitOptions.RemoveEmptyEntries); return GetDirectoryEntry(dir, pathElements, 0); }
public void Create(DirectorySecurity directorySecurity) { Directory.InternalCreateDirectory(FullPath, OriginalPath, directorySecurity, true); }
public DirectorySecurity GetAccessControl() { return Directory.GetAccessControl(FullPath, AccessControlSections.Access | AccessControlSections.Owner | AccessControlSections.Group); }
public DirectorySecurity GetAccessControl(AccessControlSections includeSections) { return Directory.GetAccessControl(FullPath, includeSections); }
public void SetAccessControl(DirectorySecurity directorySecurity) { Directory.SetAccessControl(FullPath, directorySecurity); }
public void Delete(bool recursive) { Directory.Delete(FullPath, OriginalPath, recursive, true); }
private int CheckAllSegmentsUpgraded(Directory dir) { SegmentInfos infos = new SegmentInfos(); infos.Read(dir); if (VERBOSE) { Console.WriteLine("checkAllSegmentsUpgraded: " + infos); } foreach (SegmentCommitInfo si in infos.Segments) { Assert.AreEqual(Constants.LUCENE_MAIN_VERSION, si.Info.Version); } return infos.Size(); }
private static void RemoveFileFromDirectory(Directory dir, File file, string name) { List<string> aliases = new List<string>(); DirectoryEntry dirEntry = dir.GetEntryByName(name); if (dirEntry.Details.FileNameNamespace == FileNameNamespace.Dos || dirEntry.Details.FileNameNamespace == FileNameNamespace.Win32) { foreach (var fnStream in file.GetStreams(AttributeType.FileName, null)) { var fnr = fnStream.GetContent<FileNameRecord>(); if ((fnr.FileNameNamespace == FileNameNamespace.Win32 || fnr.FileNameNamespace == FileNameNamespace.Dos) && fnr.ParentDirectory.Value == dir.MftReference.Value) { aliases.Add(fnr.FileName); } } } else { aliases.Add(name); } foreach (var alias in aliases) { DirectoryEntry de = dir.GetEntryByName(alias); dir.RemoveEntry(de); } }
public void Create() { Directory.InternalCreateDirectory(FullPath, OriginalPath, null, true); }
private DirectoryEntry GetDirectoryEntry(Directory dir, string[] pathEntries, int pathOffset) { DirectoryEntry entry; if (pathEntries.Length == 0) { return dir.DirectoryEntry; } else { entry = dir.GetEntryByName(pathEntries[pathOffset]); if (entry != null) { if (pathOffset == pathEntries.Length - 1) { return entry; } else if ((entry.Details.FileAttributes & FileAttributes.Directory) != 0) { return GetDirectoryEntry(GetDirectory(entry.Reference), pathEntries, pathOffset + 1); } else { throw new IOException(string.Format(CultureInfo.InvariantCulture, "{0} is a file, not a directory", pathEntries[pathOffset])); } } else { return null; } } }
public bool IsDirectory(string key) { var path = ComputePath(key); return(IODirectory.Exists(path)); }
private void DumpDirectory(Directory dir, TextWriter writer, string indent) { foreach (DirectoryEntry dirEntry in dir.GetAllEntries(true)) { File file = GetFile(dirEntry.Reference); Directory asDir = file as Directory; writer.WriteLine(indent + "+-" + file.ToString() + " (" + file.IndexInMft + ")"); // Recurse - but avoid infinite recursion via the root dir... if (asDir != null && file.IndexInMft != 5) { DumpDirectory(asDir, writer, indent + "| "); } } }
public override void Delete() { Directory.Delete(FullPath, OriginalPath, false, true); }
public virtual void SearchIndex(Directory dir, string oldName) { //QueryParser parser = new QueryParser("contents", new MockAnalyzer(random)); //Query query = parser.parse("handle:1"); IndexReader reader = DirectoryReader.Open(dir); IndexSearcher searcher = NewSearcher(reader); TestUtil.CheckIndex(dir); // true if this is a 4.0+ index bool is40Index = MultiFields.GetMergedFieldInfos(reader).FieldInfo("content5") != null; // true if this is a 4.2+ index bool is42Index = MultiFields.GetMergedFieldInfos(reader).FieldInfo("dvSortedSet") != null; Debug.Assert(is40Index); // NOTE: currently we can only do this on trunk! Bits liveDocs = MultiFields.GetLiveDocs(reader); for (int i = 0; i < 35; i++) { if (liveDocs.Get(i)) { Document d = reader.Document(i); IList<IndexableField> fields = d.Fields; bool isProxDoc = d.GetField("content3") == null; if (isProxDoc) { int numFields = is40Index ? 7 : 5; Assert.AreEqual(numFields, fields.Count); IndexableField f = d.GetField("id"); Assert.AreEqual("" + i, f.StringValue); f = d.GetField("utf8"); Assert.AreEqual("Lu\uD834\uDD1Ece\uD834\uDD60ne \u0000 \u2620 ab\ud917\udc17cd", f.StringValue); f = d.GetField("autf8"); Assert.AreEqual("Lu\uD834\uDD1Ece\uD834\uDD60ne \u0000 \u2620 ab\ud917\udc17cd", f.StringValue); f = d.GetField("content2"); Assert.AreEqual("here is more content with aaa aaa aaa", f.StringValue); f = d.GetField("fie\u2C77ld"); Assert.AreEqual("field with non-ascii name", f.StringValue); } Fields tfvFields = reader.GetTermVectors(i); Assert.IsNotNull(tfvFields, "i=" + i); Terms tfv = tfvFields.Terms("utf8"); Assert.IsNotNull(tfv, "docID=" + i + " index=" + oldName); } else { // Only ID 7 is deleted Assert.AreEqual(7, i); } } if (is40Index) { // check docvalues fields NumericDocValues dvByte = MultiDocValues.GetNumericValues(reader, "dvByte"); BinaryDocValues dvBytesDerefFixed = MultiDocValues.GetBinaryValues(reader, "dvBytesDerefFixed"); BinaryDocValues dvBytesDerefVar = MultiDocValues.GetBinaryValues(reader, "dvBytesDerefVar"); SortedDocValues dvBytesSortedFixed = MultiDocValues.GetSortedValues(reader, "dvBytesSortedFixed"); SortedDocValues dvBytesSortedVar = MultiDocValues.GetSortedValues(reader, "dvBytesSortedVar"); BinaryDocValues dvBytesStraightFixed = MultiDocValues.GetBinaryValues(reader, "dvBytesStraightFixed"); BinaryDocValues dvBytesStraightVar = MultiDocValues.GetBinaryValues(reader, "dvBytesStraightVar"); NumericDocValues dvDouble = MultiDocValues.GetNumericValues(reader, "dvDouble"); NumericDocValues dvFloat = MultiDocValues.GetNumericValues(reader, "dvFloat"); NumericDocValues dvInt = MultiDocValues.GetNumericValues(reader, "dvInt"); NumericDocValues dvLong = MultiDocValues.GetNumericValues(reader, "dvLong"); NumericDocValues dvPacked = MultiDocValues.GetNumericValues(reader, "dvPacked"); NumericDocValues dvShort = MultiDocValues.GetNumericValues(reader, "dvShort"); SortedSetDocValues dvSortedSet = null; if (is42Index) { dvSortedSet = MultiDocValues.GetSortedSetValues(reader, "dvSortedSet"); } for (int i = 0; i < 35; i++) { int id = Convert.ToInt32(reader.Document(i).Get("id")); Assert.AreEqual(id, dvByte.Get(i)); sbyte[] bytes = new sbyte[] { (sbyte)((int)((uint)id >> 24)), (sbyte)((int)((uint)id >> 16)), (sbyte)((int)((uint)id >> 8)), (sbyte)id }; BytesRef expectedRef = new BytesRef((byte[])(Array)bytes); BytesRef scratch = new BytesRef(); dvBytesDerefFixed.Get(i, scratch); Assert.AreEqual(expectedRef, scratch); dvBytesDerefVar.Get(i, scratch); Assert.AreEqual(expectedRef, scratch); dvBytesSortedFixed.Get(i, scratch); Assert.AreEqual(expectedRef, scratch); dvBytesSortedVar.Get(i, scratch); Assert.AreEqual(expectedRef, scratch); dvBytesStraightFixed.Get(i, scratch); Assert.AreEqual(expectedRef, scratch); dvBytesStraightVar.Get(i, scratch); Assert.AreEqual(expectedRef, scratch); Assert.AreEqual((double)id, BitConverter.Int64BitsToDouble(dvDouble.Get(i)), 0D); Assert.AreEqual((float)id, Number.IntBitsToFloat((int)dvFloat.Get(i)), 0F); Assert.AreEqual(id, dvInt.Get(i)); Assert.AreEqual(id, dvLong.Get(i)); Assert.AreEqual(id, dvPacked.Get(i)); Assert.AreEqual(id, dvShort.Get(i)); if (is42Index) { dvSortedSet.Document = i; long ord = dvSortedSet.NextOrd(); Assert.AreEqual(SortedSetDocValues.NO_MORE_ORDS, dvSortedSet.NextOrd()); dvSortedSet.LookupOrd(ord, scratch); Assert.AreEqual(expectedRef, scratch); } } } ScoreDoc[] hits = searcher.Search(new TermQuery(new Term("content", "aaa")), null, 1000).ScoreDocs; // First document should be #0 Document doc = searcher.IndexReader.Document(hits[0].Doc); assertEquals("didn't get the right document first", "0", doc.Get("id")); DoTestHits(hits, 34, searcher.IndexReader); if (is40Index) { hits = searcher.Search(new TermQuery(new Term("content5", "aaa")), null, 1000).ScoreDocs; DoTestHits(hits, 34, searcher.IndexReader); hits = searcher.Search(new TermQuery(new Term("content6", "aaa")), null, 1000).ScoreDocs; DoTestHits(hits, 34, searcher.IndexReader); } hits = searcher.Search(new TermQuery(new Term("utf8", "\u0000")), null, 1000).ScoreDocs; Assert.AreEqual(34, hits.Length); hits = searcher.Search(new TermQuery(new Term("utf8", "lu\uD834\uDD1Ece\uD834\uDD60ne")), null, 1000).ScoreDocs; Assert.AreEqual(34, hits.Length); hits = searcher.Search(new TermQuery(new Term("utf8", "ab\ud917\udc17cd")), null, 1000).ScoreDocs; Assert.AreEqual(34, hits.Length); reader.Dispose(); }
private static string[] CollectPathsFromToolbox(string toolboxRiderRootPath, string dirName, string searchPattern, bool isMac) { if (!Directory.Exists(toolboxRiderRootPath)) { return(new string[0]); } var channelDirs = Directory.GetDirectories(toolboxRiderRootPath); var paths = channelDirs.SelectMany(channelDir => { try { // use history.json - last entry stands for the active build https://jetbrains.slack.com/archives/C07KNP99D/p1547807024066500?thread_ts=1547731708.057700&cid=C07KNP99D var historyFile = Path.Combine(channelDir, ".history.json"); if (File.Exists(historyFile)) { var json = File.ReadAllText(historyFile); var build = ToolboxHistory.GetLatestBuildFromJson(json); if (build != null) { var buildDir = Path.Combine(channelDir, build); var executablePaths = GetExecutablePaths(dirName, searchPattern, isMac, buildDir); if (executablePaths.Any()) { return(executablePaths); } } } var channelFile = Path.Combine(channelDir, ".channel.settings.json"); if (File.Exists(channelFile)) { var json = File.ReadAllText(channelFile).Replace("active-application", "active_application"); var build = ToolboxInstallData.GetLatestBuildFromJson(json); if (build != null) { var buildDir = Path.Combine(channelDir, build); var executablePaths = GetExecutablePaths(dirName, searchPattern, isMac, buildDir); if (executablePaths.Any()) { return(executablePaths); } } } // changes in toolbox json files format may brake the logic above, so return all found Rider installations return(Directory.GetDirectories(channelDir) .SelectMany(buildDir => GetExecutablePaths(dirName, searchPattern, isMac, buildDir))); } catch (Exception e) { // do not write to Debug.Log, just log it. Logger.Warn($"Failed to get RiderPath from {channelDir}", e); } return(new string[0]); }) .Where(c => !string.IsNullOrEmpty(c)) .ToArray(); return(paths); }
public virtual void ChangeIndexNoAdds(Random random, Directory dir) { // make sure searching sees right # hits DirectoryReader reader = DirectoryReader.Open(dir); IndexSearcher searcher = NewSearcher(reader); ScoreDoc[] hits = searcher.Search(new TermQuery(new Term("content", "aaa")), null, 1000).ScoreDocs; Assert.AreEqual(34, hits.Length, "wrong number of hits"); Document d = searcher.Doc(hits[0].Doc); assertEquals("wrong first document", "0", d.Get("id")); reader.Dispose(); // fully merge IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).SetOpenMode(OpenMode_e.APPEND)); writer.ForceMerge(1); writer.Dispose(); reader = DirectoryReader.Open(dir); searcher = NewSearcher(reader); hits = searcher.Search(new TermQuery(new Term("content", "aaa")), null, 1000).ScoreDocs; Assert.AreEqual(34, hits.Length, "wrong number of hits"); DoTestHits(hits, 34, searcher.IndexReader); reader.Dispose(); }
/// <summary> /// Update to the current version. /// </summary> /// <param name="detectDataChanges">Boolean indicating whether the update should also look for changes in data.</param> /// <param name="progress">Optional object to which update progress is reported.</param> /// <returns>Returns the <see cref="UpdateReport" /> containing all changes.</returns> /// <exception cref="InvalidOperationException">Definition is up-to-date.</exception> public UpdateReport Update(bool detectDataChanges, IProgress <UpdateProgress> progress = null) { if (DefinitionVersion == GameVersion) { throw new InvalidOperationException(); } var previousVersion = DefinitionVersion; var exdPackId = new PackIdentifier("exd", PackIdentifier.DefaultExpansion, 0); var exdPack = Packs.GetPack(exdPackId); var exdOldKeepInMemory = exdPack.KeepInMemory; exdPack.KeepInMemory = true; string tempPath = null; UpdateReport report; try { using (var zip = new ZipFile(StateFile.FullName, ZipEncoding)) { tempPath = ExtractPacks(zip, previousVersion); var previousPack = new PackCollection(Path.Combine(tempPath, previousVersion)); previousPack.GetPack(exdPackId).KeepInMemory = true; var previousDefinition = ReadDefinition(zip); // Override previous definition when current definition version matches. // Definitions may have changed since this was recorded and we want to compare that. if (previousDefinition.Version == _GameData.Definition.Version) { previousDefinition = _GameData.Definition; } var updater = new RelationUpdater(previousPack, previousDefinition, Packs, GameVersion, progress); var changes = updater.Update(detectDataChanges); report = new UpdateReport(previousVersion, GameVersion, changes); var definition = updater.Updated; StorePacks(zip); StoreDefinition(zip, definition, DefinitionFile); StoreDefinition(zip, definition, string.Format("{0}/{1}", definition.Version, DefinitionFile)); StoreReport(zip, report); zip.Save(); GameData.Definition = definition; GameData.Definition.Compile(); } } finally { if (exdPack != null) { exdPack.KeepInMemory = exdOldKeepInMemory; } if (tempPath != null) { try { Directory.Delete(tempPath, true); } catch { Console.Error.WriteLine("Failed to delete temporary directory {0}.", tempPath); } } } return(report); }
private int GetNumberOfSegments(Directory dir) { SegmentInfos infos = new SegmentInfos(); infos.Read(dir); return infos.Size(); }
static Utils() { if (!Directory.Exists(Consts.LocalAppDataPath)) { Directory.CreateDirectory(Consts.LocalAppDataPath); } if (!Directory.Exists(Consts.LogsFolder)) { Directory.CreateDirectory(Consts.LogsFolder); } var programName = Assembly.GetEntryAssembly()?.Location ?? "Wabbajack"; LogFile = Path.Combine(Consts.LogsFolder, Path.GetFileNameWithoutExtension(programName) + ".current.log"); _startTime = DateTime.Now; if (LogFile.FileExists()) { var newPath = Path.Combine(Consts.LogsFolder, Path.GetFileNameWithoutExtension(programName) + new FileInfo(LogFile).LastWriteTime.ToString(" yyyy-MM-dd HH_mm_ss") + ".log"); File.Move(LogFile, newPath, MoveOptions.ReplaceExisting); } var logFiles = Directory.GetFiles(Consts.LogsFolder); if (logFiles.Length >= Consts.MaxOldLogs) { Log($"Maximum amount of old logs reached ({logFiles.Length} >= {Consts.MaxOldLogs})"); var filesToDelete = logFiles .Where(File.Exists) .OrderBy(f => { var fi = new FileInfo(f); return(fi.LastWriteTime); }).Take(logFiles.Length - Consts.MaxOldLogs).ToList(); Log($"Found {filesToDelete.Count} old log files to delete"); var success = 0; var failed = 0; filesToDelete.Do(f => { try { File.Delete(f); success++; } catch (Exception e) { failed++; Log($"Could not delete log at {f}!\n{e}"); } }); Log($"Deleted {success} log files, failed to delete {failed} logs"); } var watcher = new FileSystemWatcher(Consts.LocalAppDataPath); AppLocalEvents = Observable.Merge(Observable.FromEventPattern <FileSystemEventHandler, FileSystemEventArgs>(h => watcher.Changed += h, h => watcher.Changed -= h).Select(e => (FileEventType.Changed, e.EventArgs)), Observable.FromEventPattern <FileSystemEventHandler, FileSystemEventArgs>(h => watcher.Created += h, h => watcher.Created -= h).Select(e => (FileEventType.Created, e.EventArgs)), Observable.FromEventPattern <FileSystemEventHandler, FileSystemEventArgs>(h => watcher.Deleted += h, h => watcher.Deleted -= h).Select(e => (FileEventType.Deleted, e.EventArgs))) .ObserveOn(RxApp.TaskpoolScheduler); watcher.EnableRaisingEvents = true; }