/// <summary> /// Determines whether the given folder contains only the given files, and no others /// </summary> /// <returns></returns> protected bool FolderContainsExclusive(string relFolderPath, IEnumerable<string> filenames, Anomalies anomalies = null) { var dirPath = Path.Combine(this.TargetDir, relFolderPath); var exists = Directory.Exists(dirPath); if (!exists) { return false; } else { var folderFiles = new DirectoryInfo(dirPath).GetFiles(); var filesInFolder = folderFiles.Select(f => f.Name).Except(filenames).ToList(); var expectedFiles = filenames.Except(folderFiles.Select(f => f.Name)).ToList(); if (anomalies != null) { anomalies.ExpectedFiles.AddRange(expectedFiles); anomalies.UnexpectedFiles.AddRange(filesInFolder); } return !(filesInFolder.Any()) && !(expectedFiles.Any()); } }
private static void CompareFiles() { Console.Out.WriteLine("Comparing non-empty files between source repositories."); IEnumerable<FileInfo> lemanFiles = new DirectoryInfo(Directory.GetCurrentDirectory()).GetFiles("ATIRS_TO_ASSOCIATION_???.txt").Where(file => file.Length > 0); IEnumerable<FileInfo> archiveFiles = new DirectoryInfo(Directory.GetCurrentDirectory()).GetFiles("???archived.txt").Where(file => file.Length > 0); foreach (var associationId in lemanFiles.Select(file => file.Name.Substring(21, 3)).Except(archiveFiles.Select(file => file.Name.Substring(0, 3)))) { Console.Out.WriteLine(String.Format("Association {0} found in Leman but not in archives.", associationId)); } foreach (var associationId in archiveFiles.Select(file => file.Name.Substring(0, 3)).Except(lemanFiles.Select(file => file.Name.Substring(21, 3)))) { Console.Out.WriteLine(String.Format("Association {0} found in archives but not in Leman.", associationId)); } foreach (var lemanFile in lemanFiles) { string associationId = lemanFile.Name.Substring(21, 3); Console.Out.WriteLine("Reading files from association {0}.", associationId); List<string> associationFiles = new List<string>(); using (var reader = File.OpenText(String.Format("ATIRS_TO_ASSOCIATION_{0:000}.txt", associationId))) { string file = null; while (((file = reader.ReadLine()) != null) && (file.Trim().Length > 0)) { associationFiles.Add(file); } } Console.Out.WriteLine("Reading archived files."); List<string> archivedFiles = new List<string>(); using (var reader = File.OpenText(String.Format("{0:000}archived.txt", associationId))) { string file = null; while (((file = reader.ReadLine()) != null) && (file.Trim().Length > 0)) { archivedFiles.Add(file); } } Console.Out.WriteLine("Comparing."); var missingFiles = archivedFiles.Except(associationFiles); if (missingFiles.Count() > 0) { using (var writer = File.CreateText(String.Format("{0:000}missing.txt", associationId))) { foreach (var file in missingFiles) { writer.WriteLine(file); Console.Out.WriteLine(file); } } using (var writer = File.CreateText(String.Format("Copy{0:000}Files.bat", associationId))) { foreach (var file in missingFiles) { writer.WriteLine(@"copy cxf\{0} .", file); } } } } }
private static void Demo2_DemonstrateSimultaneousRenderToConsoleAndHtmlFile() { using (StreamWriter demo2Out = new StreamWriter("demo2Out.html")) { // First, let's build a collection of objects to render. var filesInCurrentDirectory = new DirectoryInfo(Directory.GetCurrentDirectory()).GetFiles(); // The anonymous type we're building has properties Name, Extension and CreationTime. // The resulting table will have a column for each of those properties. var objectsToRender = filesInCurrentDirectory .Select(x => new { x.Name, x.Extension, x.CreationTime }); // Next, set up some table renderers. First, a writer that will write the // table to the console. var consoleTableWriter = new ConsoleTableWriter(); consoleTableWriter.SetBorderCharacterSet(TextTableWriter.BasicAsciiBorderCharacterSet); // Now, a writer that will write HTML to an output file. var htmlTableWriter = new HtmlTableWriter(demo2Out); // Now, a single renderer which takes care of writing our single table to // both of the above destinations. var multipleTableRenderer = new MultipleTargetTableWriter(consoleTableWriter, htmlTableWriter); // Finally, we actually render the table TableRenderer.Render(objectsToRender, multipleTableRenderer); } // And launch a browser to display the generated html Process.Start("demo2Out.html"); }
private static void PreLoadAssembliesFromPath(string p) { //Props to Andras Zoltan http://stackoverflow.com/users/157701/andras-zoltan // The below is from question http://stackoverflow.com/questions/3021613/how-to-pre-load-all-deployed-assemblies-for-an-appdomain //get all .dll files from the specified path and load the lot var files = new DirectoryInfo(p) .GetFiles("*.dll", SearchOption.AllDirectories); //you might not want recursion - handy for localised assemblies //though especially. AssemblyName a = null; var assemblies = AppDomain.CurrentDomain.GetAssemblies(); foreach (var s in files.Select(fi => fi.FullName)) { //now get the name of the assembly you've found, without loading it //though (assuming .Net 2+ of course). a = AssemblyName.GetAssemblyName(s); //sanity check - make sure we don't already have an assembly loaded //that, if this assembly name was passed to the loaded, would actually //be resolved as that assembly. Might be unnecessary - but makes me //happy :) if (assemblies.Any(assembly => AssemblyName.ReferenceMatchesDefinition(a, assembly.GetName()))) continue; //crucial - USE THE ASSEMBLY NAME. //in a web app, this assembly will automatically be bound from the //Asp.Net Temporary folder from where the site actually runs. Assembly.Load(a); } }
private ImmutableArray<NotebookDto> CreateNotebooksFromDirectories(DirectoryInfo[] directories) { return directories.Select(dir => new NotebookDto { Name = GetNotebookName(dir), Directory = dir }).ToImmutableArray(); }
//#region Fields //int count; //int current; //#endregion Fields //#region Events //public delegate void StatusUpdateHandler(ModExtractorServiceStatusUpdateEventArgs status); //public event StatusUpdateHandler OnUpdateStatus = delegate { }; //void RaiseUpdateStatus(int count, int currentIndex, string fileName) //{ // OnUpdateStatus(new ModExtractorServiceStatusUpdateEventArgs(count, currentIndex, fileName)); //} //#endregion public List<ModData> LoadModsFromDirectoryAsync(string pathToDirectory) { List<ModData> result = new List<ModData>(); try { var paths = new DirectoryInfo(pathToDirectory).GetFiles().Where(x => x.Extension.ToLowerInvariant() == ".zip" || x.Extension.ToLowerInvariant() == ".disabled"); //count = paths.Count(); //var moddetails = paths.AsParallel().Select(x => LoadModFromZip(x.FullName)); Stopwatch watch = Stopwatch.StartNew(); //foreach (var path in paths.Select(x => x.FullName)) //{ Parallel.ForEach<string>(paths.Select(x => x.FullName), (path) => { try { result.Add(LoadModFromZip(path)); } catch (Exception ex) { log.Error("Error loading '{0}'".Args(path), ex); } } ); //} //foreach (string path in paths.Select(x => x.FullName)) //{ // try // { // result.Add(LoadModFromZip(path)); // } // catch (Exception ex) // { // log.Error("Error loading '{0}'".Args(path), ex); // } //} watch.Stop(); log.Info("Mods loaded in {0} sec", watch.ElapsedMilliseconds / 1000.0); //result.AddRange(moddetails.ToArray()); } catch (Exception ex) { log.Error("Error while loading mods.", ex); } return result; }
private static void Demo1_DemonstrateSimpleRenderToConsole() { // First, let's build a collection of objects to render. var filesInCurrentDirectory = new DirectoryInfo(Directory.GetCurrentDirectory()).GetFiles(); // The anonymous type we're building has properties Name, Extension and CreationTime. // The resulting table will have a column for each of those properties. var objectsToRender = filesInCurrentDirectory .Select(x => new { x.Name, x.Extension, x.CreationTime }); // Now, we render the table to the console. TableRenderer.RenderToConsole(objectsToRender); }
private void GetPathCode(List<string> stocks, string dataPath) { string[] directories = Directory.GetDirectories(dataPath); if (directories != null && directories.Length > 0) { for (int i = 0; i < directories.Length; i++) { GetPathCode(stocks, directories[i]); } } FileInfo[] files = new DirectoryInfo(dataPath).GetFiles(); if (files != null && files.Length > 0) { stocks.AddRange(files.Select<FileInfo, string>(p => p.Name.ToLower().Replace(".day", string.Empty))); } }
public void SetUp() { var pathToImagesTestSet = Path.Combine(@"D:\ReStudio\test1", @"raw\classified"); var testsImagePathList = new DirectoryInfo(pathToImagesTestSet).GetFiles(); _testImagesList = testsImagePathList.Select(fileInfo => { var filenameElements = fileInfo.Name.Split('_'); return new ImageTest { FirstDigit = filenameElements[0], SecondDigit = filenameElements[1], Indice = filenameElements[2], Path = fileInfo.FullName, Image = new Bitmap(fileInfo.FullName) }; }); }
private static void Demo3_DemonstrateRenderToTextFile() { // First, let's build a collection of objects to render. var filesInCurrentDirectory = new DirectoryInfo(Directory.GetCurrentDirectory()).GetFiles(); // The anonymous type we're building has properties Name, Extension and CreationTime. // The resulting table will have a column for each of those properties. var objectsToRender = filesInCurrentDirectory .Select(x => new { x.Name, x.Extension, x.CreationTime }); using (StreamWriter demo3TextOut = new StreamWriter("demo3Out.txt")) { // Finally, we actually render the table TableRenderer.Render(objectsToRender, new TextTableWriter(demo3TextOut)); } // And display the generated file Process.Start("demo3Out.txt"); }
public ActionResult Flicker() { var files = new DirectoryInfo(System.Web.Hosting.HostingEnvironment.MapPath("~/videos")).GetFiles("flicker_*.mp4"); var xml = new XDocument( new XElement("videos", files.Select(f => new XElement("video", "/videos/" + f.Name)).ToArray())); var output = new StringBuilder(); using (var writer = XmlWriter.Create(output)) { xml.WriteTo(writer); writer.Flush(); } return new ContentResult() { Content = output.ToString(), ContentEncoding = Encoding.UTF8, ContentType = "text/xml" }; }
public ActionResult Content() { var files = new DirectoryInfo(ConfigurationManager.AppSettings["ContentVideosPath"]).GetFiles("v*.mp4"); var xml = new XDocument( new XElement("videos", files.Select(f => new XElement("video", "/videos/get/" + f.Name)).ToArray())); var output = new StringBuilder(); using (var writer = XmlWriter.Create(output)) { xml.WriteTo(writer); writer.Flush(); } return new ContentResult() { Content=output.ToString(), ContentEncoding = Encoding.UTF8, ContentType="text/xml" }; }
/// <summary> /// 创建指定包 /// </summary> /// <param name="e"></param> public void BuildPackages(Wrapper.RunworkEventArgs e, UpdateInfo ui) { var targetDir = AuProject.ParseFullPath(AuProject.DestinationDirectory); var appDir = AuProject.ParseFullPath(AuProject.ApplicationDirectory); if (!Directory.Exists(appDir)) throw new ApplicationException("无效的应用程序目录"); if (!Directory.Exists(targetDir)) { try { Directory.CreateDirectory(targetDir); } catch (Exception ex) { throw new ApplicationException("无法创建目标目录", ex); } } e.ReportProgress(0, 0, "正在扫描文件列表..."); FileInfo[] allfiles; try { allfiles = new DirectoryInfo(appDir).GetFiles("*.*", SearchOption.AllDirectories); } catch (Exception ex) { throw new ApplicationException("无法扫描来源目录", ex); } //生成映射,排除忽略列表 e.ReportProgress(0, 0, "正在准备文件列表..."); var projectItems = AuProject.Files.ToDictionary(s => s.Path, StringComparer.OrdinalIgnoreCase); var targetfiles = allfiles.Select(s => new KeyValuePair<string, FileInfo>(s.FullName.Remove(0, appDir.Length).Trim(Path.DirectorySeparatorChar), s)) .Where(s => (!projectItems.ContainsKey(s.Key) && AuProject.DefaultUpdateMethod != UpdateMethod.Ignore) || (projectItems.ContainsKey(s.Key) && projectItems[s.Key].UpdateMethod != UpdateMethod.Ignore)) .ToArray(); //古典版的安装包? if (!AuProject.EnableIncreaseUpdate || AuProject.CreateCompatiblePackage) { var mainPkgId = GetPackageName("main") + "."+AuProject.PackageExtension; var file = System.IO.Path.Combine(targetDir, mainPkgId); Result.Add(mainPkgId, "兼容升级模式(或未开启增量更新时)的升级包文件"); e.Progress.TaskCount = targetfiles.Length; CreateZip("正在生成兼容版升级包,正在压缩 {0}", file, ui.PackagePassword, e, targetfiles); var fileInfo = new System.IO.FileInfo(file); ui.PackageSize = fileInfo.Length; e.ReportProgress(0, 0, "正在计算包文件Hash..."); ui.MD5 = Wrapper.ExtensionMethod.GetFileHash(file); ui.Package = mainPkgId; } if (!AuProject.EnableIncreaseUpdate) return; //生成主文件包 e.ReportProgress(targetfiles.Length, 0, ""); ui.Packages = new List<PackageInfo>(); var mainFiles = targetfiles .Where(s => (!projectItems.ContainsKey(s.Key) && AuProject.DefaultUpdateMethod == UpdateMethod.Always) || (projectItems.ContainsKey(s.Key) && projectItems[s.Key].UpdateMethod == UpdateMethod.Always)) .ToArray(); if (mainFiles.Length > 0) { var mainPkgId = GetPackageName("alwaysintall") + "." + AuProject.PackageExtension; var pkgName = Path.Combine(targetDir, mainPkgId); e.Progress.TaskCount = mainFiles.Length; CreateZip("正在生成全局升级包,正在压缩 {0}", pkgName, ui.PackagePassword, e, mainFiles); Result.Add(mainPkgId, "全局升级包,包含必须更新的文件"); var fileInfo = new System.IO.FileInfo(pkgName); ui.Packages.Add(new PackageInfo() { Version = "0.0.0.0", VerificationLevel = FileVerificationLevel.None, FilePath = "", FileSize = 0L, FileHash = "", PackageHash = Wrapper.ExtensionMethod.GetFileHash(pkgName), PackageName = mainPkgId, PackageSize = fileInfo.Length, Method = UpdateMethod.Always, Files = mainFiles.Select(s => s.Key).ToArray() }); } //针对单个文件生成包 e.Progress.TaskCount = targetfiles.Length; e.Progress.TaskProgress = 0; foreach (var file in targetfiles) { ProjectItem config; if (!projectItems.ContainsKey(file.Key)) { if (AuProject.DefaultUpdateMethod == UpdateMethod.Always || AuProject.DefaultUpdateMethod == UpdateMethod.Ignore) continue; config = new ProjectItem() { UpdateMethod = AuProject.DefaultUpdateMethod, FileVerificationLevel = AuProject.DefaultFileVerificationLevel }; } else { config = projectItems[file.Key]; //fix always pack issue if (config.UpdateMethod != UpdateMethod.VersionCompare && config.UpdateMethod != UpdateMethod.SkipIfExists) continue; } //file info var fdi = System.Diagnostics.FileVersionInfo.GetVersionInfo(file.Value.FullName); //var pkgFileName = file.Key.Replace("\\", "_").Replace(".", "_") + ".zip"; var pkgFileName = GetPackageName(file.Key) + "." + AuProject.PackageExtension; var pkg = Path.Combine(targetDir, pkgFileName); e.ReportProgress(e.Progress.TaskCount, ++e.Progress.TaskProgress, "正在生成增量包 " + file.Key + ", 正在压缩...."); CreateZip(null, pkg, ui.PackagePassword, null, new[] { file }); Result.Add(pkgFileName, "文件【" + file.Key + "】的增量升级包"); var pkgInfo = new System.IO.FileInfo(pkg); ui.Packages.Add(new PackageInfo() { Version = string.IsNullOrEmpty(fdi.FileVersion) ? "0.0.0.0" : Wrapper.ExtensionMethod.ConvertVersionInfo(fdi).ToString(), VerificationLevel = config.FileVerificationLevel, FilePath = file.Key, FileSize = new FileInfo(file.Value.FullName).Length, FileHash = Wrapper.ExtensionMethod.GetFileHash(file.Value.FullName), PackageHash = Wrapper.ExtensionMethod.GetFileHash(pkg), PackageName = pkgFileName, PackageSize = pkgInfo.Length, Method = config.UpdateMethod, Files = new[] { file.Key } }); } }
private static string GetIconPath(string iconTag) { //this is a somewhat hit and miss method... get the highest resolution icon with the file name and in the folder specified. //TODO: improve this... var iconPathRegex = Regex.Match(iconTag, @"@{(.*?\..*?)_.*\?ms-resource://.*/Files/(.*)}"); var iconPath = string.Empty; if (!iconPathRegex.Success) return iconPath; Func<string, string> getLogoPath = p => { var expandedPath = Environment.ExpandEnvironmentVariables($@"{p}"); if (!Directory.Exists(expandedPath)) { return string.Empty; } var folderPaths = new DirectoryInfo(expandedPath).GetDirectories($@"{iconPathRegex.Groups[1].Value}*"); var imageNameWithoutExtension = Path.GetFileNameWithoutExtension(iconPathRegex.Groups[2].Value); if (folderPaths.Length == 0) return string.Empty; FileInfo largestLogoPath = null; Image largestLogoImage = null; foreach ( var matchingLogoFiles in folderPaths.Select(folderPath => folderPath?.GetFiles($@"{imageNameWithoutExtension}*", SearchOption.AllDirectories))) { if (matchingLogoFiles == null) return string.Empty; foreach (var logoFile in matchingLogoFiles) { try { var image = ImageUtils.LoadFileToBitmap(logoFile.FullName); if (largestLogoPath == null) { largestLogoPath = logoFile; largestLogoImage = (Image) image.Clone(); } else { if (image.Width*image.Height > largestLogoImage?.Width*largestLogoImage?.Height) { largestLogoPath = logoFile; largestLogoImage.Dispose(); largestLogoImage = (Image) image.Clone(); } } image.Dispose(); } catch { // ignore } } } largestLogoImage?.Dispose(); return largestLogoPath?.FullName ?? string.Empty; }; var logoPath = getLogoPath(@"%PROGRAMFILES%\WindowsApps\"); return string.IsNullOrEmpty(logoPath) ? getLogoPath(@"%SYSTEMROOT%\SystemApps\") : logoPath; }
private static void Demo6_Grids() { // First, let's build a collection of objects to render. var filesInCurrentDirectory = new DirectoryInfo(Directory.GetCurrentDirectory()).GetFiles(); // The anonymous type we're building has properties Name, Length, CreationTime and LastWriteTime. // The resulting table will have a column for each of those properties. var objectsToRender = filesInCurrentDirectory .Select(x => new { x.Name, x.Length, x.CreationTime, x.LastWriteTime }); // Define the structure of the table - four columns. var tableStructure = TableStructure.Build() .Column("Name") .Column("Length") .Column("CreationTime") .Column("LastWriteTime") .Finish(); ITableWriter tw = new MultipleTargetTableWriter( new ConsoleTableWriter(), new GridTableWriter()); // Now, we render the table int threshold = 4096; TableRenderer.Render(objectsToRender.Where(x => x.Length > threshold), tw, tableStructure); TableRenderer.Render(objectsToRender.Where(x => x.Length <= threshold), tw, tableStructure); }
private static string BuildProject(string projectDirPath, ProjectConfig projectConfig, IDictionary<string, Dependency> builtDependencies) { Console.WriteLine($"Building {projectConfig.Name} ..."); var compileDirPath = Path.Combine(projectDirPath, ".bootstrapCompile"); DeleteDirectoryIfExists(compileDirPath); var srcFiles = new DirectoryInfo(Path.Combine(projectDirPath, "src")).GetFiles("*.adam", SearchOption.AllDirectories); var mainFunctions = srcFiles.Select(srcFile => { var relPath = Path.GetFullPath(srcFile.FullName).Substring(projectDirPath.Length + 1); var csPath = Path.ChangeExtension(relPath, "cs"); return Compile(srcFile.FullName, Path.Combine(compileDirPath, csPath)); }).Combine(); GenerateAssemblyInfo(projectConfig, compileDirPath); if(mainFunctions.Count > 1) throw new Exception("Multiple main functions"); if(mainFunctions.Count == 1) GenerateEntryPoint(compileDirPath, mainFunctions.Single()); var isApp = projectConfig.Template == "app"; var targetDirPath = Path.Combine(projectDirPath, "targets/debug"); DeleteDirectoryIfExists(targetDirPath); var csSrc = new DirectoryInfo(compileDirPath).GetDirectories("src") .Single() .GetFiles("*.cs", SearchOption.AllDirectories); var assemblyName = projectConfig.Name; var assemblyPath = Path.Combine(targetDirPath, assemblyName) + (isApp ? ".exe" : ".dll"); var dependencies = projectConfig.Dependencies.Keys.Concat(CoreDependencies).ToList(); var dependencyPaths = dependencies.Select(d => builtDependencies[d].OutputPath); CompileCSharp(csSrc, dependencyPaths, assemblyPath, isApp); builtDependencies.Add(projectConfig.Name, new Dependency(assemblyPath, dependencies)); return targetDirPath; }
public static IEnumerable<string> Bilddateien_ermitteln(string path) { var fileInfos = new DirectoryInfo(path).GetFiles("*.jpg", SearchOption.TopDirectoryOnly); return fileInfos.Select(x => x.FullName); }
private static void Demo5_DemonstrateColumnGroups() { // First, let's build a collection of objects to render. var filesInCurrentDirectory = new DirectoryInfo(Directory.GetCurrentDirectory()).GetFiles(); // The anonymous type we're building has properties Name, Length, CreationTime and LastWriteTime. // The resulting table will have a column for each of those properties. var objectsToRender = filesInCurrentDirectory .Select(x => new { x.Name, x.Length, x.CreationTime, x.LastWriteTime } ); // Define the structure of the table - two groups of columns, each containing two columns. var tableStructure = TableStructure.Build() .ColumnGroup("Core") .Column("Name") .Column("Length") .ColumnGroup("Time") .Column("CreationTime") .Column("LastWriteTime") .Finish(); ITableWriter tw = new MultipleTargetTableWriter( new ConsoleTableWriter(), new HtmlTableWriter("demo5Out.html")); // Now, we render the table to the console. TableRenderer.Render(objectsToRender, tw, tableStructure); Process.Start("demo5out.html"); }
private static void Main(string[] args) { StaticConfiguration.DisableErrorTraces = false; Console.WriteLine("Sandra.Snow : " + DateTime.Now.ToString("HH:mm:ss") + " : Begin processing"); try { var commands = args.Select(x => x.Split('=')).ToDictionary(x => x[0], x => x[1]); if (commands.ContainsKey("debug")) { DebugHelperExtensions.EnableDebugging(); } if (commands.ContainsKey("vsdebug")) { DebugHelperExtensions.WaitForContinue(); } string currentDir; if (commands.ContainsKey("config")) { currentDir = new FileInfo(commands["config"]).DirectoryName; } else { currentDir = Path.GetDirectoryName(typeof(Program).Assembly.Location); } currentDir.OutputIfDebug(prefixWith: " - Current directory: "); var settings = CreateSettings(currentDir); var extensions = new HashSet<string>(new[] { ".md", ".markdown" }, StringComparer.OrdinalIgnoreCase); var files = new DirectoryInfo(settings.Posts).EnumerateFiles("*", SearchOption.AllDirectories) .Where(x => extensions.Contains(x.Extension)); SetupOutput(settings); StaticPathProvider.Path = settings.CurrentDir; SnowViewLocationConventions.Settings = settings; var posts = files.Select(x => PostParser.GetFileData(x, settings)) .OrderByDescending(x => x.Date) .Where(x => x.Published != Published.Private && !(x is Post.MissingPost)) .ToList(); var pages = new List<Page>(); if (!string.IsNullOrWhiteSpace(settings.Pages)) { pages = new DirectoryInfo(settings.Pages).EnumerateFiles("*", SearchOption.AllDirectories) .Select(x => PagesParser.GetFileData(x, settings)) .OrderByDescending(x => x.Date) .Where(x => x.Published != Published.Private && !(x is Post.MissingPost)) .ToList(); pages.SetPostUrl(settings); TestModule.Pages = pages; } posts.SetPostUrl(settings); posts.UpdatePartsToLatestInSeries(); TestModule.Posts = posts; TestModule.Drafts = posts.Where(x => x.Published == Published.Draft).ToList(); TestModule.Categories = CategoriesPage.Create(posts); TestModule.PostsGroupedByYearThenMonth = ArchivePage.Create(posts); TestModule.MonthYear = ArchiveMenu.Create(posts); TestModule.Settings = settings; var browserComposer = new Browser(with => { with.Module<TestModule>(); with.RootPathProvider<StaticPathProvider>(); with.ViewEngines(typeof(SuperSimpleViewEngineWrapper), typeof(RazorViewEngine)); }); // Compile all Posts and pages posts.ForEach(x => ComposeParsedFiles(x, settings.PostsOutput, browserComposer)); pages.ForEach(x => ComposeParsedFiles(x, settings.PagesOutput, browserComposer)); // Compile all Drafts var drafts = posts.Where(x => x.Published == Published.Draft).ToList(); drafts.ForEach(x => ComposeDrafts(x, settings.PostsOutput, browserComposer)); // Compile all static files foreach (var processFile in settings.ProcessFiles) { var success = ProcessFile(processFile, settings, posts, browserComposer); if (!success) { break; } } foreach (var copyDirectory in settings.CopyDirectories) { var sourceDir = (settings.ThemesDir + Path.DirectorySeparatorChar + settings.Theme + Path.DirectorySeparatorChar + copyDirectory); var destinationDir = copyDirectory; if (copyDirectory.Contains(" => ")) { var directorySplit = copyDirectory.Split(new[] { " => " }, StringSplitOptions.RemoveEmptyEntries); sourceDir = directorySplit[0]; destinationDir = directorySplit[1]; } var source = Path.Combine(settings.CurrentDir, sourceDir); if (!Directory.Exists(source)) { source = Path.Combine(settings.CurrentDir, copyDirectory); if (!Directory.Exists(source)) { copyDirectory.OutputIfDebug("Unable to find the directory, so we're skipping it: "); continue; } } // If the destination directory is "." copy the folder files to the output folder root var destination = destinationDir == "." ? settings.PostsOutput : Path.Combine(settings.PostsOutput, destinationDir); new DirectoryInfo(source).Copy(destination, true); } foreach (var copyFile in settings.CopyFiles) { var sourceFile = (settings.ThemesDir + Path.DirectorySeparatorChar + settings.Theme + Path.DirectorySeparatorChar + copyFile); var source = Path.Combine(settings.CurrentDir, sourceFile); var destinationFile = copyFile; if (!File.Exists(source)) { source = Path.Combine(settings.CurrentDir, copyFile); if (!File.Exists(source)) { copyFile.OutputIfDebug("Unable to find the directory, so we're skipping it: "); continue; } } var destination = Path.Combine(settings.PostsOutput, destinationFile); File.Copy(source, destination, true); } Console.WriteLine("Sandra.Snow : " + DateTime.Now.ToString("HH:mm:ss") + " : Finish processing"); if (commands.ContainsKey("server")) { SnowServer.Start(settings); } if (commands.ContainsKey("debug")) { DebugHelperExtensions.WaitForContinue(); } } catch (Exception ex) { Console.WriteLine(ex.Message); Console.WriteLine(ex.ToString()); DebugHelperExtensions.WaitForContinue(); } }
private void CompileProject(string projectDirPath, ProjectConfig projectConfig, CompiledProjects projects) { Console.WriteLine($"Compiling {projectConfig.Name} ..."); var sourceFiles = new DirectoryInfo(Path.Combine(projectDirPath, "src")).GetFiles("*.adam", SearchOption.AllDirectories); var isApp = projectConfig.Template == "app"; // TODO read trusted from config var package = new PackageSyntax(projectConfig.Name, isApp, projectConfig.Dependencies.Select(d => new PackageReferenceSyntax(d.Key, null, true))); package = package.With(sourceFiles.Select(fileInfo => compiler.Parse(package, new SourceFile(fileInfo)))); if(package.Diagnostics.Count > 0) { PrintDiagnostics(package); return; } var compiledPackage = compiler.Compile(package, projects.Select(p => p.Package)); var compiledProject = new CompiledProject(projectDirPath, compiledPackage); projects.Add(compiledProject); OnProjectCompiled(compiledProject, projects); }
/// <summary> /// Populates the replays listbox /// </summary> private void PopulateReplays() { FileInfo[] files = new DirectoryInfo(oRAData.ReplayDirectory).GetFiles().Where(f => f.Extension == ".osr").OrderBy(f => f.CreationTime).Reverse().ToArray(); //Add replays ReplaysList.BeginInvoke((Action)(() => ReplaysList.Nodes.AddRange(files.Select(f => new TreeNode { Text = f.Name.Substring(0, f.Name.LastIndexOf(".", StringComparison.InvariantCulture)), Name = f.FullName }).ToArray()))); }
public bool TryGetDirFiles(string folderPath, string searchPattern, out List<FileShim> fileList, out string errorMsg) { fileList = null; FileInfo[] fileInfos; errorMsg = null; try { fileInfos = new DirectoryInfo(folderPath).GetFiles(searchPattern, SearchOption.TopDirectoryOnly); } catch (Exception ex) { errorMsg = ex.Details(false, false); return false; } fileList = fileInfos.Select(x => this.File(x.FullName)).ToList(); return true; }
private static void Main(string[] args) { Console.WriteLine("Sandra.Snow : " + DateTime.Now.ToString("HH:mm:ss") + " : Begin processing"); try { var commands = args.Select(x => x.Split('=')).ToDictionary(x => x[0], x => x[1]); if (commands.ContainsKey("debug")) { DebugHelperExtensions.EnableDebugging(); } string currentDir; if (commands.ContainsKey("config")) { currentDir = new FileInfo(commands["config"]).DirectoryName; } else { currentDir = Path.GetDirectoryName(typeof(Program).Assembly.Location); } currentDir.OutputIfDebug(prefixWith: "current directory: "); var settings = CreateSettings(currentDir); var extensions = new HashSet<string>(new[] { ".md", ".markdown" }, StringComparer.OrdinalIgnoreCase); var files = new DirectoryInfo(settings.Posts).EnumerateFiles() .Where(x => extensions.Contains(x.Extension)); SetupOutput(settings); StaticPathProvider.Path = settings.CurrentDir; SnowViewLocationConventions.Settings = settings; var browserParser = new Browser(with => { with.Module<TestModule>(); with.RootPathProvider<StaticPathProvider>(); with.ViewEngine<CustomMarkDownViewEngine>(); }); var posts = files.Select(x => PostParser.GetFileData(x, browserParser, settings)) .OrderByDescending(x => x.Date) .ToList(); posts.SetPostUrl(settings); posts.UpdatePartsToLatestInSeries(); var categories = (from c in posts.SelectMany(x => x.Categories) group c by c into g select new Category { Name = g.Key, Count = g.Count() }).OrderBy(cat => cat.Name).ToList(); TestModule.Posts = posts; TestModule.Categories = categories; TestModule.PostsGroupedByYearThenMonth = GroupStuff(posts); TestModule.MonthYear = GroupMonthYearArchive(posts); TestModule.Settings = settings; var browserComposer = new Browser(with => { with.Module<TestModule>(); with.RootPathProvider<StaticPathProvider>(); with.ViewEngines(typeof(SuperSimpleViewEngineWrapper), typeof(RazorViewEngine)); }); // Compile all Posts posts.ForEach(x => ComposeParsedFiles(x, settings.Output, browserComposer)); // Compile all static files settings.ProcessFiles.ForEach(x => ProcessFiles(x, settings, posts, browserComposer)); foreach (var copyDirectory in settings.CopyDirectories) { var source = Path.Combine(settings.CurrentDir, copyDirectory); var destination = Path.Combine(settings.Output, copyDirectory); new DirectoryInfo(source).Copy(destination, true); } if (commands.ContainsKey("debug")) { DebugHelperExtensions.WaitForContinue(); } Console.WriteLine("Sandra.Snow : " + DateTime.Now.ToString("HH:mm:ss") + " : Finish processing"); } catch (Exception ex) { Console.WriteLine(ex.Message); DebugHelperExtensions.WaitForContinue(); } }
internal void SyncLocal(object localPaths) { var result = LocalSyncResult.Reload; var filesSrc = new List<_NoteFile>(); var filesDest = new List<_NoteFile>(); var tempDir = ""; try { if (PNStatic.Settings.Network.SaveBeforeSync) { PNStatic.FormMain.ApplyAction(MainDialogAction.SaveAll, null); } var paths = localPaths as string[]; if (paths == null || paths.Length != 2) { throw new ArgumentException("Invalid local paths"); } var destDir = paths[0].Trim(); var destDB = paths[1].Trim() != "" ? paths[1].Trim() : destDir; destDB = Path.Combine(destDB, PNStrings.DB_FILE); // create temp directory tempDir = createTempDir(); var tempDBSrc = Path.Combine(tempDir, PNStrings.DB_FILE); var tempDBDest = Path.Combine(tempDir, PNStrings.TEMP_DB_FILE); // copy source db File.Copy(PNPaths.Instance.DBPath, tempDBSrc, true); // build source connection string var srcConnectionString = "data source=\"" + tempDBSrc + "\""; //string srcConnectionString = "metadata=res://*/PNModel.csdl|res://*/PNModel.ssdl|res://*/PNModel.msl;provider=System.Data.SQLite;provider connection string='data source=\"" + tempDBSrc + "\"'"; using (var eSrc = new SQLiteDataObject(srcConnectionString)) { // drop triggers eSrc.Execute(PNStrings.DROP_TRIGGERS); // get listNames of all source notes files var srcNotes = new DirectoryInfo(PNPaths.Instance.DataDir).GetFiles("*" + PNStrings.NOTE_EXTENSION); filesSrc.AddRange(srcNotes.Select(fi => new _NoteFile { Path = fi.FullName, Name = fi.Name, Copy = false })); // get deleted ids if (!PNStatic.Settings.Network.IncludeBinInSync) { var deletedSrc = deletedIDs(eSrc); filesSrc.RemoveAll(nf => deletedSrc.Contains(Path.GetFileNameWithoutExtension(nf.Name))); } // get listNames of all destination notes files var destNotes = new DirectoryInfo(destDir).GetFiles("*" + PNStrings.NOTE_EXTENSION); filesDest.AddRange(destNotes.Select(fir => new _NoteFile { Path = fir.FullName, Name = fir.Name, Copy = false })); if (File.Exists(destDB)) { // copy destination db to temp directory File.Copy(destDB, tempDBDest, true); // build connection string var destConnectionString = "data source=\"" + tempDBDest + "\""; using (var eDest = new SQLiteDataObject(destConnectionString)) { // drop triggers eDest.Execute(PNStrings.DROP_TRIGGERS); if (areTablesDifferent(eSrc, eDest)) { if (SyncComplete != null) { SyncComplete(this, new LocalSyncCompleteEventArgs(LocalSyncResult.AbortVersion)); } return; } //todo: create tables data var tablesData = new Dictionary<string, List<_FieldData>>(); foreach (var tn in _Tables) { var td = new List<_FieldData>(); var sb = new StringBuilder("pragma table_info('"); sb.Append(tn); sb.Append("')"); using (var t = eSrc.FillDataTable(sb.ToString())) { td.AddRange(from DataRow r in t.Rows select new _FieldData { Name = Convert.ToString(r["name"]), Type = Convert.ToString(r["type"]), NotNull = Convert.ToBoolean(r["notnull"]) }); } tablesData.Add(tn, td); } // get deleted ids if (!PNStatic.Settings.Network.IncludeBinInSync) { var deletedDest = deletedIDs(eDest); filesDest.RemoveAll(nf => deletedDest.Contains(Path.GetFileNameWithoutExtension(nf.Name))); } foreach (var sf in filesSrc) { var id = Path.GetFileNameWithoutExtension(sf.Name); // find destination file with same name var df = filesDest.FirstOrDefault(f => f.Name == sf.Name); if (df == null) { sf.Copy = true; //todo if (!insertToAllTables(eSrc, eDest, id, tablesData)) { if (SyncComplete != null) { SyncComplete(this, new LocalSyncCompleteEventArgs(LocalSyncResult.Error)); } return; } } else { // check which note is more up to date var dSrc = File.GetLastWriteTime(sf.Path); var dDest = File.GetLastWriteTime(df.Path); if (dSrc > dDest) { // compare two files if (areFilesDifferent(sf.Path, df.Path)) { // local file is younger then remote - copy it to remote client sf.Copy = true; } } else if (dSrc < dDest) { // compare two files if (areFilesDifferent(sf.Path, df.Path)) { // remote file is younger than local - copy it to local directory df.Copy = true; } } //todo if (!exchangeData(eSrc, eDest, id, tablesData)) { if (SyncComplete != null) { SyncComplete(this, new LocalSyncCompleteEventArgs(LocalSyncResult.Error)); } return; } } } // check remaining destination files var remDest = filesDest.Where(df => !df.Copy); foreach (var df in remDest) { if (filesSrc.All(sf => sf.Name != df.Name)) { df.Copy = true; var id = Path.GetFileNameWithoutExtension(df.Name); //todo if (!exchangeData(eSrc, eDest, id, tablesData)) { if (SyncComplete != null) { SyncComplete(this, new LocalSyncCompleteEventArgs(LocalSyncResult.Error)); } return; } } } // synchronize groups if (!exchangeGroups(eSrc, eDest, tablesData.FirstOrDefault(td => td.Key == "GROUPS"))) { if (SyncComplete != null) { SyncComplete(this, new LocalSyncCompleteEventArgs(LocalSyncResult.Error)); } return; } // restore triggers eSrc.Execute(PNStrings.CREATE_TRIGGERS); eDest.Execute(PNStrings.CREATE_TRIGGERS); } // copy files var filesToCopy = filesSrc.Where(sf => sf.Copy); foreach (var sf in filesToCopy) { var newPath = Path.Combine(destDir, sf.Name); File.Copy(sf.Path, newPath, true); } filesToCopy = filesDest.Where(df => df.Copy); foreach (var df in filesToCopy) { var newPath = Path.Combine(PNPaths.Instance.DataDir, df.Name); File.Copy(df.Path, newPath, true); } if (filesDest.Count(df => df.Copy) == 0) result = LocalSyncResult.None; // copy synchronized db files File.Copy(tempDBSrc, PNPaths.Instance.DBPath, true); File.Copy(tempDBDest, destDB, true); } else { // restore triggers eSrc.Execute(PNStrings.CREATE_TRIGGERS); // just copy all notes files and db file to remote client File.Copy(PNPaths.Instance.DBPath, destDB, true); foreach (var sf in filesSrc) { var newPath = Path.Combine(destDir, sf.Name); File.Copy(sf.Path, newPath, true); } result = LocalSyncResult.None; } } if (SyncComplete != null) { SyncComplete(this, new LocalSyncCompleteEventArgs(result)); } } catch (Exception ex) { PNStatic.LogException(ex); if (SyncComplete != null) { SyncComplete(this, new LocalSyncCompleteEventArgs(LocalSyncResult.Error)); } } finally { if (tempDir != "" && Directory.Exists(tempDir)) { Directory.Delete(tempDir, true); } } }
/// <summary> /// Find the dataset in one of the directories. /// </summary> /// <param name="dirs"></param> /// <returns>The DirectoryInfo object of the dataset directory if found. Null otherwise.</returns> private static DirectoryInfo FindDataset(DirectoryInfo[] dirs, string dsname) { var sanitizedname = dsname.SantizeDSName(); return dirs .Select(d => new DirectoryInfo(Path.Combine(d.FullName, sanitizedname))) .Where(d => d.Exists) .FirstOrDefault(); }
/// <summary> /// Rescans the active directory looking for valid log files /// </summary> /// <returns> /// <see langword="true" /> if we were able to open a log file; /// otherwise, <see langword="false" />. /// </returns> protected bool ScanFiles() { Contract.Requires<InvalidOperationException>(this.IsRunning); Contract.Ensures((this.Status == IntelStatus.Waiting) || (this.Status == IntelStatus.Active) || (this.Status == IntelStatus.InvalidPath)); try { var files = new DirectoryInfo(this.Path) .GetFiles(this.Name + "_*.txt", SearchOption.TopDirectoryOnly); Contract.Assert(files != null); var downtime = IntelExtensions.LastDowntime; var file = files .Select(x => new { File = x, Match = FilenameParser.Match(x.Name) }) .Where(x => x.Match.Success) .Select(x => new { File = x.File, Timestamp = new DateTime( x.Match.Groups[1].ToInt32(), x.Match.Groups[2].ToInt32(), x.Match.Groups[3].ToInt32(), x.Match.Groups[4].ToInt32(), x.Match.Groups[5].ToInt32(), x.Match.Groups[6].ToInt32(), DateTimeKind.Utc) }) .Where(x => x.Timestamp > downtime) .OrderByDescending(x => x.Timestamp) .FirstOrDefault(x => this.OpenFile(x.File)); if (file == null) { this.CloseFile(); } return file != null; } catch (IOException) { return false; } }