// the stream version will get called if the file is in an archive public List <GrepSearchResult> Search(Stream input, string fileName, string searchPattern, SearchType searchType, GrepSearchOption searchOptions, Encoding encoding) { // write the stream to a temp folder, and run the file version of the search string tempFolder = Path.Combine(Utils.GetTempFolder(), "dnGREP-PDF"); // the fileName may contain the partial path of the directory structure in the archive string filePath = Path.Combine(tempFolder, fileName); // use the directory name to also include folders within the archive string directory = Path.GetDirectoryName(filePath); if (!Directory.Exists(directory)) { Directory.CreateDirectory(directory); } using (var fileStream = File.Create(filePath)) { input.Seek(0, SeekOrigin.Begin); input.CopyTo(fileStream); } return(Search(filePath, searchPattern, searchType, searchOptions, encoding)); }
private void RunItJoe() { string csl = ObjCSLEditor.Text; string prepared_citation_javascript = ObjJavaScriptEditor.Text; // Write to file for processor string style_file_filename = Path.GetFullPath(Path.Combine(TempFile.TempDirectoryForQiqqa, @"CSLEditor_CSL.csl")); File.WriteAllText(style_file_filename, csl); // Validate the CSL List <string> csl_parse_results = CSLVerifier.Verify(style_file_filename); if (0 < csl_parse_results.Count) { foreach (string line in csl_parse_results) { LogMessage(line); } } CSLProcessor.GenerateCSLEditorCitations(style_file_filename, prepared_citation_javascript, OnBibliographyReady); }
public void VerifyInstalledFile(string mod, string file) { var src = Path.Combine(MO2Folder, "mods", mod, file); Assert.IsTrue(File.Exists(src), src); var dest = Path.Combine(InstallFolder, "mods", mod, file); Assert.IsTrue(File.Exists(dest), dest); var src_data = File.ReadAllBytes(src); var dest_data = File.ReadAllBytes(dest); Assert.AreEqual(src_data.Length, dest_data.Length); for (int x = 0; x < src_data.Length; x++) { if (src_data[x] != dest_data[x]) { Assert.Fail($"Index {x} of {mod}\\{file} are not the same"); } } }
internal static void DoCheck() { try { Logging.Info("Checking for Dropbox conflicts in {0} for machine {1}", ConfigurationManager.Instance.BaseDirectoryForQiqqa, Environment.MachineName); // Write our version string FULL_FILENAME = Path.GetFullPath(Path.Combine(ConfigurationManager.Instance.BaseDirectoryForQiqqa, PREAMBLE_FILENAME + @"." + Environment.MachineName + @".txt")); File.WriteAllText(FULL_FILENAME, WARNING); // Check for other's versions string[] matching_files = Directory.GetFiles(ConfigurationManager.Instance.BaseDirectoryForQiqqa, PREAMBLE_FILENAME + @"*", SearchOption.TopDirectoryOnly); if (1 < matching_files.Length) { // We have a problem, Houston... // Analytics it FeatureTrackingManager.Instance.UseFeature(Features.Diagnostics_DropBox); // Report it to user NotificationManager.Instance.AddPendingNotification( new NotificationManager.Notification( WARNING, "Danger using 3rd party cloud tools!", NotificationManager.NotificationType.Warning, Icons.No, "I understand!", IUnderstand ) ); } } catch (Exception ex) { Logging.Error(ex, "There was a problem checking for Dropbox."); } }
public async Task CleanedESMTest() { var profile = utils.AddProfile(); var mod = utils.AddMod("Cleaned ESMs"); var update_esm = utils.AddModFile(mod, @"Update.esm", 10); utils.Configure(); var game_file = Path.Combine(utils.GameFolder, "Data", "Update.esm"); utils.GenerateRandomFileData(game_file, 20); var modlist = await CompileAndInstall(profile); utils.VerifyInstalledFile(mod, @"Update.esm"); var compiler = await ConfigureAndRunCompiler(profile); // Update the file and verify that it throws an error. utils.GenerateRandomFileData(game_file, 20); var exception = await Assert.ThrowsExceptionAsync <InvalidGameESMError>(async() => await Install(compiler)); Assert.IsInstanceOfType(exception, typeof(InvalidGameESMError)); }
public void TestReplaceOnFileWithout_UTF8_BOM(SearchType type, GrepSearchOption option, string searchFor, string replaceWith) { // Test for Issue #227 Utils.CopyFiles(sourceFolder + "\\TestCase15", destinationFolder + "\\TestCase15", null, null); GrepCore core = new GrepCore(); List <GrepSearchResult> results = core.Search(Directory.GetFiles(destinationFolder + "\\TestCase15", "books.xml"), type, searchFor, option, -1); Assert.Equal(1, results.Count); string testFile = Path.Combine(destinationFolder, @"TestCase15\books.xml"); Dictionary <string, string> files = new Dictionary <string, string> { { testFile, Guid.NewGuid().ToString() + ".xml" } }; core.Replace(files, type, searchFor, replaceWith, option, -1); using (FileStream stream = File.Open(testFile, FileMode.Open, FileAccess.Read, FileShare.Read)) using (StreamReader reader = new StreamReader(stream, true)) { Assert.Equal(Encoding.UTF8, reader.CurrentEncoding); // check there is no BOM int bb = reader.BaseStream.ReadByte(); Assert.NotEqual(0xEF, bb); Assert.Equal('<', bb); bb = reader.BaseStream.ReadByte(); Assert.NotEqual(0xBB, bb); bb = reader.BaseStream.ReadByte(); Assert.NotEqual(0xBF, bb); } var fileContent = File.ReadAllLines(destinationFolder + "\\TestCase15\\books.xml", Encoding.UTF8); Assert.Equal(38, fileContent.Length); Assert.Equal("<?xml version=\"1.0\" encoding=\"UTF-8\"?>", fileContent[0]); Assert.Equal(" <book category=\"general\">", fileContent[8]); }
public void CleanedESMTest() { var profile = utils.AddProfile(); var mod = utils.AddMod("Cleaned ESMs"); var update_esm = utils.AddModFile(mod, @"Update.esm", 10); utils.Configure(); var game_file = Path.Combine(utils.GameFolder, "Data", "Update.esm"); utils.GenerateRandomFileData(game_file, 20); var modlist = CompileAndInstall(profile); utils.VerifyInstalledFile(mod, @"Update.esm"); var compiler = ConfigureAndRunCompiler(profile); // Update the file and verify that it throws an error. utils.GenerateRandomFileData(game_file, 20); var exception = Assert.ThrowsException <Exception>(() => Install(compiler)); Assert.AreEqual(exception.Message, "Game ESM hash doesn't match, is the ESM already cleaned? Please verify your local game files."); }
public bool Save(out string errorMessage) { errorMessage = default; if (!this._client.CanSave()) { return(false); } var launcherDataFile = Path.Combine(this._basePath, "LauncherData.xml"); var launcherData = UserData.Load(this, launcherDataFile) ?? new UserData(); if (launcherData.SingleplayerData == null) { launcherData.SingleplayerData = new UserGameTypeData(); } launcherData.SingleplayerData.ModDatas.Clear(); foreach (var mod in this.Mods) { launcherData.SingleplayerData.ModDatas.Add(mod.UserModData); } this.BackupFile(launcherDataFile); try { launcherData.Save(launcherDataFile); } catch (Exception e) { errorMessage = "Exception when trying to save the mod list. See the log for details"; this.Log().Error(e); return(false); } this.AnalyzeAssemblies(); return(true); }
private static void AddPathToCompressionList(IDictionary <string, Stream> files, ICollection <Stream> streams, string snapshotPath, int volumePathLength, string vmPath) { var srcPath = Path.Combine(snapshotPath, vmPath.Substring(volumePathLength)); if (Directory.Exists(srcPath)) { foreach (var srcChildPath in Directory.GetFileSystemEntries(srcPath)) { var srcChildPathRel = srcChildPath.Substring(snapshotPath.EndsWith(Path.PathSeparator.ToString(), StringComparison.CurrentCultureIgnoreCase) ? snapshotPath.Length : snapshotPath.Length + 1); var childPath = Path.Combine(vmPath.Substring(0, volumePathLength), srcChildPathRel); AddPathToCompressionList(files, streams, snapshotPath, volumePathLength, childPath); } } else if (File.Exists(srcPath)) { var s = File.OpenRead(srcPath); files.Add(vmPath.Substring(volumePathLength), s); streams.Add(s); } else { var lowerPath = srcPath.ToLowerInvariant(); var isIgnorable = lowerPath.EndsWith(".avhdx") || lowerPath.EndsWith(".vmrs") || lowerPath.EndsWith(".bin") || lowerPath.EndsWith(".vsv"); if (!isIgnorable) { throw new Exception($"Entry \"{srcPath}\" not found in snapshot"); } } }
internal static void Export(WebLibraryDetail web_library_detail, List <PDFDocument> pdf_documents, string base_path, Dictionary <string, PDFDocumentExportItem> pdf_document_export_items) { Logging.Info("Exporting entries to BibTeXTAB separated"); // First work out what fields are available List <string> field_names = null; { HashSet <string> field_names_set = new HashSet <string>(); for (int i = 0; i < pdf_documents.Count; ++i) { PDFDocument pdf_document = pdf_documents[i]; if (!String.IsNullOrEmpty(pdf_document.BibTex)) { BibTexItem item = BibTexParser.ParseOne(pdf_document.BibTex, true); if (null != item) { foreach (var field in item.Fields) { field_names_set.Add(field.Key.ToLower()); } } } } field_names = new List <string>(field_names_set); field_names.Sort(); } // Write out the header DateTime now = DateTime.Now; StringBuilder sb = new StringBuilder(); sb.AppendLine("% -------------------------------------------------------------------------"); sb.AppendLine(String.Format("% This tab separated file was generated by Qiqqa ({0}?ref=EXPTAB)", Common.Configuration.WebsiteAccess.Url_Documentation4Qiqqa)); sb.AppendLine(String.Format("% {0} {1}", now.ToLongDateString(), now.ToLongTimeString())); sb.AppendLine("% Version 1"); sb.AppendLine("% -------------------------------------------------------------------------"); sb.AppendLine(); // Headers sb.AppendFormat("{0}\t", "Fingerprint"); sb.AppendFormat("{0}\t", "Filename"); sb.AppendFormat("{0}\t", "BibTexKey"); sb.AppendFormat("{0}\t", "BibTexType"); foreach (string field_name in field_names) { sb.AppendFormat("{0}\t", FormatFreeText(field_name)); } sb.AppendLine(); // Write out the entries for (int i = 0; i < pdf_documents.Count; ++i) { StatusManager.Instance.UpdateStatus("TabExport", String.Format("Exporting entry {0} of {1}", i, pdf_documents.Count), i, pdf_documents.Count); PDFDocument pdf_document = pdf_documents[i]; sb.AppendFormat("{0}\t", pdf_document.Fingerprint); sb.AppendFormat("{0}\t", pdf_document_export_items.ContainsKey(pdf_document.Fingerprint) ? pdf_document_export_items[pdf_document.Fingerprint].filename : ""); try { if (!String.IsNullOrEmpty(pdf_document.BibTex)) { BibTexItem item = BibTexParser.ParseOne(pdf_document.BibTex, true); if (null != item) { sb.AppendFormat("{0}\t", item.Key); sb.AppendFormat("{0}\t", item.Type); foreach (string field_name in field_names) { sb.AppendFormat("{0}\t", item.ContainsField(field_name) ? FormatFreeText(item[field_name]) : ""); } } } } catch (Exception ex) { Logging.Error(ex, "There was a problem exporting the tab representation for document {0}", pdf_document.Fingerprint); } sb.AppendLine(); } // Write to disk string filename = Path.GetFullPath(Path.Combine(base_path, @"Qiqqa.BibTeX.tab")); File.WriteAllText(filename, sb.ToString()); StatusManager.Instance.UpdateStatus("TabExport", String.Format("Exported your BibTeX tab entries to {0}", filename)); }
internal static void DoBundle() { try { // Try get this info for us which could be useful... // If things are really broken, it may fail, that's ok. string environment_details_filename = null; try { string environmentDetails = "Generated at:" + DateTime.UtcNow.ToString("yyyyMMdd HH:mm:ss") + Environment.NewLine; environmentDetails += ComputerStatistics.GetCommonStatistics(); environment_details_filename = TempFile.GenerateTempFilename("txt"); File.WriteAllText(environment_details_filename, environmentDetails); } catch (Exception ex) { Logging.Warn(ex, "Could not get environment details"); } // Get the destination location SaveFileDialog save_file_dialog = new SaveFileDialog(); save_file_dialog.AddExtension = true; save_file_dialog.CheckPathExists = true; save_file_dialog.DereferenceLinks = true; save_file_dialog.OverwritePrompt = true; save_file_dialog.ValidateNames = true; save_file_dialog.DefaultExt = "7z"; save_file_dialog.Filter = "7Z files (*.7z)|*.7z|All files (*.*)|*.*"; save_file_dialog.FileName = "QiqqaLogs.7z"; // Generate and save if (true == save_file_dialog.ShowDialog()) { string target_filename = save_file_dialog.FileName; string file_list = Path.GetFullPath(Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.LocalApplicationData, Environment.SpecialFolderOption.Create), @"Quantisle/Qiqqa/Logs", @"Qiqqa*.log*")); if (environment_details_filename != null) { file_list += " \"" + environment_details_filename + "\""; } // Delete the target filename if it exists... FileTools.Delete(target_filename); // STDOUT/STDERR string process_parameters = String.Format("a -t7z -mmt=on -mx9 -ssw \"{0}\" \"{1}\"", target_filename, file_list); using (Process process = ProcessSpawning.SpawnChildProcess(ConfigurationManager.Instance.Program7ZIP, process_parameters, ProcessPriorityClass.Normal)) { using (ProcessOutputReader process_output_reader = new ProcessOutputReader(process)) { process.WaitForExit(); Logging.Info("7ZIP Log Bundling progress:\n{0}", process_output_reader.GetOutputsDumpString()); } MessageBoxes.Info("The Qiqqa logs with some diagnostic info have been zipped to the location you specified. Please upload it as issue attachment in your issue filed at https://github.com/jimmejardine/qiqqa-open-source/issues if the support team has requested it. Many thanks!"); FileTools.BrowseToFileInExplorer(target_filename); } } FileTools.Delete(environment_details_filename); } catch (Exception ex) { Logging.Warn(ex, "Problem zipping logs"); MessageBoxes.Error("Unfortunately there was a problem creating the log bundle. Please zip them manually, they are found at C:\\Temp\\Qiqqa.log*. There may be more than one. Thanks!"); } }
/// <summary> /// Saves settings to default location - baseFolder\\dnGREP.Settings.dat /// </summary> public void Save() { Save(Path.Combine(Utils.GetDataFolderPath(), storageFileName)); }
/// <summary> /// Loads settings from default location - baseFolder\\dnGREP.Settings.dat /// </summary> public void Load() { Load(Path.Combine(Utils.GetDataFolderPath(), storageFileName)); }
private void ButtonGet_Click(object sender, RoutedEventArgs e) { if (null == library) { MessageBoxes.Error("You must choose a library..."); return; } TxtData.Text = ""; int MaxRecordCount; if (!int.TryParse(MaxNumberOfRecords.Text, out MaxRecordCount)) { MaxRecordCount = 0; } var items = library.LibraryDB.GetLibraryItems(TxtFingerprint.Text, TxtExtension.Text, MaxRecordCount); if (0 == items.Count) { MessageBoxes.Warn("No entry was found."); } else if (1 == items.Count) { byte[] data = items[0].data; string json = Encoding.UTF8.GetString(data); TxtData.Text = json; } else { MessageBoxes.Warn("{0} entries were found; we're showing them all but you'll only be able to PUT/WRITE the first one!", items.Count); StringBuilder allstr = new StringBuilder(); for (int i = 0; i < items.Count; i++) { if (i > 0) { allstr.Append("\n\n==========================================================\n\n"); } LibraryDB.LibraryItem item = items[i]; byte[] data = item.data; string json = Encoding.UTF8.GetString(data); allstr.AppendLine(json); allstr.Append("\n--------------(decoded metadata)--------------------------\n"); allstr.AppendLine(string.Format("fingerprint: {0}", item.fingerprint)); allstr.AppendLine(string.Format("extension: {0}", item.extension)); allstr.AppendLine(string.Format("MD5 hash: {0}", item.md5)); try { PDFDocument doc = PDFDocument.LoadFromMetaData(library, item.data, null); string bibtexStr = doc.BibTex; if (null == bibtexStr) { bibtexStr = "--(NULL)--"; } else if (String.IsNullOrWhiteSpace(bibtexStr)) { bibtexStr = "--(EMPTY)--"; } else { BibTexItem bibtex = doc.BibTexItem; string bibtexParseErrors; string formattedBibStr; string rawStr; if (bibtex != null) { if (bibtex.Exceptions.Count > 0 || bibtex.Warnings.Count > 0) { bibtexParseErrors = bibtex.GetExceptionsAndMessagesString(); } else { bibtexParseErrors = String.Empty; } formattedBibStr = bibtex.ToBibTex(); if (String.IsNullOrEmpty(formattedBibStr)) { formattedBibStr = "--(EMPTY)--"; } rawStr = bibtex.ToString(); if (String.IsNullOrEmpty(rawStr)) { rawStr = "--(EMPTY)--"; } } else { bibtexParseErrors = "ERROR: This content is utterly INVALID BibTeX as far as the BibTeX parser is concerned!"; formattedBibStr = String.Empty; rawStr = String.Empty; } if (!String.IsNullOrEmpty(formattedBibStr)) { allstr.AppendLine(string.Format("\nBibTeX Formatted:\n {0}", formattedBibStr.Replace("\n", "\n "))); } if (!String.IsNullOrEmpty(rawStr)) { allstr.AppendLine(string.Format("\nBibTeX RAW FMT:\n {0}", rawStr.Replace("\n", "\n "))); } if (!String.IsNullOrEmpty(bibtexParseErrors)) { allstr.AppendLine(string.Format("\nBibTeX Parse Diagnostics:\n {0}", bibtexParseErrors.Replace("\n", "\n "))); } } allstr.AppendLine(string.Format("\nBibTeX RAW INPUT:\n {0}", bibtexStr.Replace("\n", "\n "))); } catch (Exception ex) { allstr.AppendLine(string.Format("*** PARSE ERROR ***:\n {0}", ex.ToString().Replace("\n", "\n "))); } } // also dump the output to file (for diagnostics) string path = Path.GetFullPath(Path.Combine(library.LIBRARY_BASE_PATH, @"Qiqqa.DBexplorer.QueryDump.txt")); // overwrite previous query dump: using (StreamWriter sr = new StreamWriter(path, false /* overwrite */)) { sr.WriteLine(allstr); } TxtData.Text = allstr.ToString(); } }
/// <summary> /// Do not include the initial . in the filetype /// </summary> /// <param name="fingerprint"></param> /// <param name="file_type"></param> /// <returns></returns> internal static string DocumentPath(WebLibraryDetail web_library_detail, string fingerprint, string file_type) { return(Path.GetFullPath(Path.Combine(DocumentBasePath(web_library_detail, fingerprint), String.Format(@"{0}.{1}", fingerprint, file_type)))); }
private static void LoadPlugins() { lock (lockObj) { if (plugins == null) { plugins = new List <GrepPlugin>(); disabledPlugins.Clear(); string pluginPath = Path.Combine(Utils.GetCurrentPath(), "Plugins"); if (Directory.Exists(pluginPath)) { foreach (string pluginFile in Directory.GetFiles(pluginPath, "*.plugin", SearchOption.AllDirectories)) { try { GrepPlugin plugin = new GrepPlugin(pluginFile); if (plugin.LoadPluginSettings()) { if (FrameworkVersionsAreCompatible(plugin.FrameworkVersion, FrameworkVersion)) { if (plugin.Enabled) { plugins.Add(plugin); // many file extensions will map to the same pool of engines, // so keep a common key for the set of extensions foreach (string ext in plugin.Extensions) { string fileExtension = ext.TrimStart('.'); if (!poolKeys.ContainsKey(fileExtension)) { poolKeys.Add(fileExtension, plugin.PluginName); } } logger.Debug(string.Format("Loading plugin: {0} for extensions {1}", plugin.DllFilePath, string.Join(", ", plugin.Extensions.ToArray()))); } else { disabledPlugins.Add(plugin); logger.Debug(string.Format("Plugin skipped, not enabled: {0}", plugin.DllFilePath)); } } else { logger.Error(string.Format("Plugin '{0}' developed under outdated framework. Please update the plugin.", Path.GetFileNameWithoutExtension(pluginFile))); } } else { logger.Error(string.Format("Plugin {0} failed to load", plugin.DllFilePath)); } } catch (Exception ex) { logger.Log <Exception>(LogLevel.Error, "Failed to initialize " + Path.GetFileNameWithoutExtension(pluginFile) + " engine.", ex); } } } foreach (GrepPlugin plugin in plugins) { foreach (string extension in plugin.Extensions) { if (extension != null) { string fileExtension = extension.TrimStart('.'); if (!string.IsNullOrWhiteSpace(fileExtension) && !fileTypeEngines.ContainsKey(fileExtension)) { fileTypeEngines.Add(fileExtension, plugin); } } } } } } }
public override ParseFileResult GetResult() { //string matchFilenameRegex = "([^;:]*) : ([^;:]*) : ([^;:]*)"; string matchFilenameRegex = "(.*?):(.*):(.*)"; Regex rx = new Regex(matchFilenameRegex); foreach (var entry in Entries) { try { if (entry.Item.ContainsField("file")) { #region Do some post processing on the file /* * Samples: * * file = {sample.pdf:files/10/sample.pdf:application/pdf} * file = {Google Books Link:undefined:text/html} * file = {chris bishop - Google Scholar:files/21/scholar.html:text/html} * * *NOTE THE MULTIPLE ATTACHMENTS HERE * file = {Analytics_www.qiqqa.com_20100531_(Qiqqa_utilisation).pdf:files/24/Analytics_www.qiqqa.com_20100531_(Qiqqa_utilisation).pdf:application/pdf;Namecheap.com - Checkout : Secure Payment:files/26/payment.html:text/html} */ // Since there can be multiple attachments, we take the first one with mime type application/pdf, with a valid file. entry.FileType = null; // Assume we've not found a workable attachment string fileValue = entry.Item["file"]; foreach (string attachmentValue in fileValue.Split(';')) { Match match = rx.Match(attachmentValue); if (match.Success) { string fileType = match.Groups[3].Value.ToLower(); if (String.CompareOrdinal(fileType, PDF_MIMETYPE) == 0) { string fn = match.Groups[2].Value; fn = fn.Replace("\\_", "_"); // Zotero escapes underscores. I get the feeling from http://www.citeulike.org/groupforum/1245 that others might not. //fn = fn.Replace("/", "\\"); // Convert to windows slashes for directories. try { fn = Path.GetFullPath(Path.Combine(_importBasePath, fn)); if (File.Exists(fn)) { entry.FileType = "pdf"; // Normalized with other importers entry.Filename = fn; break; // We've found a suitable attachment } } catch (Exception ex) { Logging.Warn(ex, "Ignoring problems with weird filenames like \"undefined\": {0}", fn); } } } else { Logging.Warn("Non-conformant file key in Zotero import: {0}", fileValue); } } #endregion } // Parse notes. string notes = entry.Item["annote"]; if (!String.IsNullOrEmpty(notes)) { //annote = {\textless}p{\textgreater}zotnotes1{\textless}/p{\textgreater}, // Turn back to pseudo html - we may want to support this format later. notes = notes.Replace(@"{\textless}", "<").Replace(@"{\textgreater}", ">"); notes = notes.Replace("<p>", "\r"); //Some basic support // Remove tags notes = StringTools.StripHtmlTags(notes, " "); // Change N to 1 repeated spaces due to missing tags notes = Regex.Replace(notes, @"[\s]+", " ", RegexOptions.Singleline | RegexOptions.IgnoreCase); entry.Notes = notes.Trim(); } } catch (Exception ex) { Logging.Warn(ex, "Exception while parsing Zotero import"); } } return(CreateFinalResult()); }
public bool Initialize(string config, string game, out string errorMessage) { errorMessage = default; if (!this._client.CanInitialize(config, game)) { return(false); } this._runValidation = false; this._basePath = config; if (!Directory.Exists(this._basePath)) { errorMessage = $"{this._basePath} does not exist"; this.Log().Error(errorMessage); return(false); } try { if (!Directory.Exists(Path.Combine(this._basePath, "BannerLordLauncher Backups"))) { Directory.CreateDirectory(Path.Combine(this._basePath, "BannerLordLauncher Backups")); } } catch (Exception e) { this.Log().Error(e); } var launcherData = UserData.Load(this, Path.Combine(this._basePath, "LauncherData.xml")) ?? new UserData(); this._modulePath = Path.Combine(game, "Modules"); this.GameExeFolder = Path.Combine(game, "bin", "Win64_Shipping_Client"); var modulesFolder = Path.Combine(game, "Modules"); if (!Directory.Exists(modulesFolder)) { errorMessage = $"{modulesFolder} does not exist"; this.Log().Error(errorMessage); return(false); } var modules = Directory.EnumerateDirectories(modulesFolder, "*", SearchOption.TopDirectoryOnly) .Select(dir => Module.Load(this, Path.GetFileName(dir), game)).Where(module => module != null).ToList(); if (launcherData.SingleplayerData?.ModDatas != null) { foreach (var mod in launcherData.SingleplayerData.ModDatas) { if (this.Mods.Any(x => x.UserModData.Id.Equals(mod.Id, StringComparison.OrdinalIgnoreCase))) { continue; } var module = modules.FirstOrDefault(x => x.Id == mod.Id); if (module == null) { this.Log().Warn($"{mod.Id} could not be found in {modulesFolder}"); continue; } modules.Remove(module); var modEntry = new ModEntry { Module = module, UserModData = mod }; this.Mods.Add(modEntry); if (modEntry.Module.Official) { modEntry.IsChecked = true; } } } foreach (var module in modules) { if (this.Mods.Any(x => x.Module.Id.Equals(module.Id, StringComparison.OrdinalIgnoreCase))) { continue; } var modEntry = new ModEntry { Module = module, UserModData = new UserModData(module.Id, false) }; this.Mods.Add(modEntry); if (modEntry.Module.Official) { modEntry.IsChecked = true; } } this.AnalyzeAssemblies(); this.BuildDependencies(); this._runValidation = true; this.Validate(); return(true); }
private string MakeFilenameWith2LevelIndirection(string file_type, object token, string extension) { string indirection_characters = fingerprint.Substring(0, 2).ToUpper(); return(Path.GetFullPath(Path.Combine(BasePath, indirection_characters, String.Format("{0}.{1}.{2}.{3}", fingerprint, file_type, token, extension)))); }
public AbsolutePath RelativeTo(AbsolutePath abs) { return(new AbsolutePath(Path.Combine((string)abs, _path))); }
public RelativePath Combine(params string[] paths) { return((RelativePath)Path.Combine(paths.Cons(_path).ToArray())); }
public RelativePath Combine(params RelativePath[] paths) { return((RelativePath)Path.Combine(paths.Select(p => (string)p).Cons(_path).ToArray())); }
public bool Run(string gameExe, string extraGameArguments, out string errorMessage) { errorMessage = default; if (!this._client.CanRun(gameExe, extraGameArguments)) { return(false); } gameExe ??= "Bannerlord.exe"; var actualGameExe = Path.Combine(this.GameExeFolder, gameExe); if (string.IsNullOrEmpty(actualGameExe)) { errorMessage = "Game executable could not be detected"; this.Log().Error(errorMessage); return(false); } if (!File.Exists(actualGameExe)) { errorMessage = $"{actualGameExe} could not be found"; this.Log().Error(errorMessage); return(false); } foreach (var dll in this.GetAssemblies().Distinct()) { try { var fi = new FileInfo(dll); if (!fi.Exists) { continue; } try { if (!fi.AlternateDataStreamExists("Zone.Identifier")) { continue; } var s = fi.GetAlternateDataStream("Zone.Identifier", FileMode.Open); s.Delete(); } catch (Exception e) { this.Log().Error(e); } } catch { // } } extraGameArguments ??= ""; var args = extraGameArguments.Trim() + " " + this.GameArguments().Trim(); this.Log().Warn($"Trying to execute: {actualGameExe} {args}"); var info = new ProcessStartInfo { Arguments = args, FileName = actualGameExe, WorkingDirectory = Path.GetDirectoryName(actualGameExe) ?? throw new InvalidOperationException(), UseShellExecute = false }; try { Process.Start(info); } catch (Exception e) { errorMessage = "Exception when trying to run the game. See the log for details"; this.Log().Error(e); return(false); } return(true); }
internal static string DocumentBasePath(WebLibraryDetail web_library_detail, string fingerprint) { char folder_id = fingerprint[0]; return(Path.GetFullPath(Path.Combine(web_library_detail.LIBRARY_DOCUMENTS_BASE_PATH, String.Format(@"{0}", folder_id)))); }
internal static string GetLibraryDBPath(string base_path) { return(Path.GetFullPath(Path.Combine(base_path, @"Qiqqa.library"))); }
internal static void Export(Library library, List <PDFDocument> pdf_documents, string base_path, Dictionary <string, PDFDocumentExportItem> pdf_document_export_items, bool include_additional_fields) { string filename = Path.GetFullPath(Path.Combine(base_path, @"Qiqqa.bib")); ExportBibTeX(pdf_documents, filename, pdf_document_export_items, include_additional_fields); }
private string MakeFilename(string file_type, object token, string extension) { return(Path.GetFullPath(Path.Combine(BasePath, String.Format("{0}.{1}.{2}.{3}", fingerprint, file_type, token, extension)))); }
internal static string GetLibraryDBTemplatePath() { return(Path.GetFullPath(Path.Combine(ConfigurationManager.Instance.StartupDirectoryForQiqqa, @"DocumentLibrary/Library.Template.s3db"))); }
private string MkLegalSizedPath(string basename, string typeIdStr) { const int PATH_MAX = 240; // must be less than 255 / 260 - see also https://kb.acronis.com/content/39790 string root = Path.GetDirectoryName(basename); string name = Path.GetFileName(basename); string dataname = Path.GetFileNameWithoutExtension(DataFile); string ext = SubStr(Path.GetExtension(DataFile), 1).Trim(); // produce the extension without leading dot if (ext.StartsWith("bib")) { ext = SubStr(ext, 3).Trim(); } if (ext.Length > 0) { ext = "." + ext; } // UNC long filename/path support by forcing this to be a UNC path: string filenamebase = $"{dataname}.{name}{ext}{ExtensionWithDot}"; // first make the full path without the approved/received, so that that bit doesn't make a difference // in the length check and subsequent decision to produce a shorthand filename path or not: // It's not always needed, but do the different shorthand conversions anyway and pick the longest fitting one: string short_tn = SanitizeFilename(CamelCaseShorthand(name)); string short_dn = SanitizeFilename(SubStr(dataname, 0, 10) + CamelCaseShorthand(dataname)); string hash = StreamMD5.FromText(filenamebase).ToUpper(); string short_hash = SubStr(hash, 0, Math.Max(6, 11 - short_tn.Length)); // this variant will fit in the length criterium, guaranteed: string alt_filepath0 = Path.GetFullPath(Path.Combine(root, $"{short_dn}.{short_hash}_{short_tn}{ext}{typeIdStr}{ExtensionWithDot}")); string filepath = alt_filepath0; // next, we construct the longer variants to check if they fit. // // DO NOTE that we create a path without typeIdStr part first, because we want both received and approved files to be based // on the *same* alt selection decision! string picked_alt_filepath = Path.GetFullPath(Path.Combine(root, $"{short_dn}.{short_hash}_{short_tn}{ext}.APPROVEDXYZ{ExtensionWithDot}")); name = SanitizeFilename(name); dataname = SanitizeFilename(dataname); string alt_filepath1 = Path.GetFullPath(Path.Combine(root, $"{short_dn}_{short_hash}.{name}{ext}.APPROVEDXYZ{ExtensionWithDot}")); if (alt_filepath1.Length < PATH_MAX) { filepath = Path.GetFullPath(Path.Combine(root, $"{short_dn}_{short_hash}.{name}{ext}{typeIdStr}{ExtensionWithDot}")); picked_alt_filepath = alt_filepath1; } // second alternative: only pick this one if it fits and produces a longer name: string alt_filepath2 = Path.GetFullPath(Path.Combine(root, $"{dataname}.{short_hash}_{short_tn}{ext}.APPROVEDXYZ{ExtensionWithDot}")); if (alt_filepath2.Length < PATH_MAX && alt_filepath2.Length > picked_alt_filepath.Length) { filepath = Path.GetFullPath(Path.Combine(root, $"{dataname}.{short_hash}_{short_tn}{ext}{typeIdStr}{ExtensionWithDot}")); picked_alt_filepath = alt_filepath2; } else { // third alt: the 'optimally trimmed' test name used as part of the filename: int trim_length = PATH_MAX - alt_filepath0.Length + 10 - 1; string short_dn2 = SanitizeFilename(SubStr(dataname, 0, trim_length) + CamelCaseShorthand(dataname)); string alt_filepath3 = Path.GetFullPath(Path.Combine(root, $"{short_dn2}.{short_hash}_{short_tn}{ext}{typeIdStr}{ExtensionWithDot}")); if (alt_filepath3.Length < PATH_MAX && alt_filepath3.Length > picked_alt_filepath.Length) { filepath = Path.GetFullPath(Path.Combine(root, $"{short_dn2}.{short_hash}_{short_tn}{ext}{typeIdStr}{ExtensionWithDot}")); picked_alt_filepath = alt_filepath3; } } // fourth alt: the full, unadulterated path; if it fits in the length criterium, take it anyway string alt_filepath4 = Path.GetFullPath(Path.Combine(root, $"{dataname}.{name}{ext}.APPROVEDXYZ{ExtensionWithDot}")); if (alt_filepath4.Length < PATH_MAX) { // UNC long filename/path support by forcing this to be a UNC path: filepath = Path.GetFullPath(Path.Combine(root, $"{dataname}.{name}{ext}{typeIdStr}{ExtensionWithDot}")); picked_alt_filepath = alt_filepath4; } return(filepath); }
private void BackupFiles(IList <IVssWMComponent> components, IDictionary <string, string> volumeMap, IDictionary <string, string> snapshotVolumeMap, IDictionary <string, string> vmNamesMap, Options options, ILogger logger) { var streams = new List <Stream>(); try { foreach (var component in components) { string vmBackupPath; if (options.DirectCopy) { vmBackupPath = Path.Combine(options.Output, string.Format(options.OutputFormat, vmNamesMap[component.ComponentName], component.ComponentName, DateTime.Now, "")); } else { vmBackupPath = Path.Combine(options.Output, string.Format(options.OutputFormat, vmNamesMap[component.ComponentName], component.ComponentName, DateTime.Now, options.ZipFormat ? ".zip" : ".7z")); File.Delete(vmBackupPath); } var files = new Dictionary <string, Stream>(); foreach (var file in component.Files) { string path; if (file.IsRecursive) { path = file.Path; } else { path = Path.Combine(file.Path, file.FileSpecification); } // Get the longest matching path var volumePath = volumeMap.Keys.OrderBy(o => o.Length).Reverse() .First(o => path.StartsWith(o, StringComparison.OrdinalIgnoreCase)); var volumeName = volumeMap[volumePath]; // Exclude snapshots var fileName = Path.GetFileName(path.Substring(volumePath.Length)).ToUpperInvariant(); var include = !path.EndsWith("\\*"); var pathItems = path.Split(Path.DirectorySeparatorChar); if (pathItems.Length >= 2) { if (pathItems[pathItems.Length - 2].ToLowerInvariant() == "snapshots") { include = false; } } if (include && options.VhdInclude != null) { if (options.VhdInclude.Count( x => string.CompareOrdinal(x.ToUpperInvariant(), fileName) == 0) == 0) { include = false; } } if (include && options.VhdIgnore != null) { if (options.VhdIgnore.Count( x => string.CompareOrdinal(x.ToUpperInvariant(), fileName) == 0) != 0) { include = false; } } if (include) { if (options.DirectCopy) { DoDirectCopy(vmBackupPath, snapshotVolumeMap[volumeName], volumePath.Length, path); } else { AddPathToCompressionList(files, streams, snapshotVolumeMap[volumeName], volumePath.Length, path); } } else { var errorText = $"Ignoring file {path}"; logger.Info(errorText); Console.WriteLine(errorText); } } if (!options.DirectCopy) { logger.Debug($"Start compression. File: {vmBackupPath}"); if (options.ZipFormat) { if (options.CompressionLevel == -1) { options.CompressionLevel = 6; } using (var zf = new ZipFile(vmBackupPath)) { zf.ParallelDeflateThreshold = -1; zf.UseZip64WhenSaving = Zip64Option.Always; zf.Encryption = EncryptionAlgorithm.WinZipAes256; switch (options.CompressionLevel) { case 0: zf.CompressionLevel = Ionic.Zlib.CompressionLevel.Level0; break; case 1: zf.CompressionLevel = Ionic.Zlib.CompressionLevel.Level1; break; case 2: zf.CompressionLevel = Ionic.Zlib.CompressionLevel.Level2; break; case 3: zf.CompressionLevel = Ionic.Zlib.CompressionLevel.Level3; break; case 4: zf.CompressionLevel = Ionic.Zlib.CompressionLevel.Level4; break; case 5: zf.CompressionLevel = Ionic.Zlib.CompressionLevel.Level5; break; case 6: zf.CompressionLevel = Ionic.Zlib.CompressionLevel.Level6; break; case 7: zf.CompressionLevel = Ionic.Zlib.CompressionLevel.Level7; break; case 8: zf.CompressionLevel = Ionic.Zlib.CompressionLevel.Level8; break; case 9: zf.CompressionLevel = Ionic.Zlib.CompressionLevel.Level9; break; } if (BackupProgress != null) { zf.SaveProgress += (sender, e) => ReportZipProgress(component, volumeMap, e); } if (!string.IsNullOrEmpty(options.Password)) { zf.Password = options.Password; } foreach (var file in files) { logger.Debug($"Adding file: {file.Key}"); zf.AddEntry(file.Key, file.Value); } zf.Save(); } } else { if (options.CompressionLevel == -1) { options.CompressionLevel = 3; } SevenZipBase.SetLibraryPath( Path.Combine(AppDomain.CurrentDomain.SetupInformation.ApplicationBase, "7z.dll")); var sevenZip = new SevenZipCompressor { ArchiveFormat = OutArchiveFormat.SevenZip, CompressionMode = CompressionMode.Create, DirectoryStructure = true, PreserveDirectoryRoot = false }; if (options.MultiThreaded) { sevenZip.CustomParameters.Add("mt", "on"); } sevenZip.CustomParameters.Add("d", "24"); switch (options.CompressionLevel) { case 0: sevenZip.CompressionLevel = CompressionLevel.None; break; case 1: case 2: sevenZip.CompressionLevel = CompressionLevel.Fast; break; case 3: case 4: case 5: sevenZip.CompressionLevel = CompressionLevel.Low; break; case 6: sevenZip.CompressionLevel = CompressionLevel.Normal; break; case 7: case 8: sevenZip.CompressionLevel = CompressionLevel.High; break; case 9: sevenZip.CompressionLevel = CompressionLevel.Ultra; break; } if (BackupProgress != null) { sevenZip.FileCompressionStarted += (sender, e) => { var ebp = new BackupProgressEventArgs { AcrhiveFileName = e.FileName, Action = EventAction.StartingArchive }; _currentFile = e.FileName; Report7ZipProgress(component, volumeMap, ebp); if (_cancel) { e.Cancel = true; } }; sevenZip.FileCompressionFinished += (sender, e) => { var ebp = new BackupProgressEventArgs { AcrhiveFileName = _currentFile, Action = EventAction.ArchiveDone }; _currentFile = string.Empty; Report7ZipProgress(component, volumeMap, ebp); }; sevenZip.Compressing += (sender, e) => { var ebp = new BackupProgressEventArgs { AcrhiveFileName = _currentFile, Action = EventAction.PercentProgress, CurrentEntry = _currentFile, PercentDone = e.PercentDone }; Report7ZipProgress(component, volumeMap, ebp); }; } if (string.IsNullOrEmpty(options.Password)) { sevenZip.CompressStreamDictionary(files, vmBackupPath); } else { sevenZip.CompressStreamDictionary(files, vmBackupPath, options.Password); } } logger.Debug("Compression finished"); if (_cancel) { if (File.Exists(vmBackupPath)) { File.Delete(vmBackupPath); } throw new BackupCancelledException(); } } } } finally { // Make sure that all streams are closed foreach (var s in streams) { s.Close(); } } }