public static IGrepEngine GetReplaceEngine(string fileName, GrepEngineInitParams param, FileFilter filter) { Debug.Assert(param != null); Debug.Assert(filter != null); LoadPlugins(); string fileExtension = Path.GetExtension(fileName).ToLower().TrimStart('.'); if (fileTypeEngines.ContainsKey(fileExtension) && !fileTypeEngines[fileExtension].IsSearchOnly) { IGrepEngine engine = fileTypeEngines[fileExtension].CreateEngine(); if (engine != null && engine.Initialize(param, filter)) { loadedEngines.Add(engine); return(engine); } else { failedEngines[engine.GetType().Name] = "Failed to initialize the plugin. See error log for details."; return(GetPlainTextEngine(fileExtension, param, filter)); } } else { return(GetPlainTextEngine(fileExtension, param, filter)); } }
internal static bool IsSupportedImagePath(string filename) { string extension = Path.GetExtension(filename.ToLower(CultureInfo.CurrentCulture)); if (0 == extension.CompareTo(".jpg")) { return(true); } if (0 == extension.CompareTo(".png")) { return(true); } if (0 == extension.CompareTo(".jpeg")) { return(true); } if (0 == extension.CompareTo(".gif")) { return(true); } if (0 == extension.CompareTo(".bmp")) { return(true); } return(false); }
public async Task DownloadMissingArchives(List <Archive> missing, bool download = true) { if (download) { var result = SendDownloadMetrics(missing); foreach (var a in missing.Where(a => a.State.GetType() == typeof(ManualDownloader.State))) { var outputPath = DownloadFolder.Combine(a.Name); await a.State.Download(a, outputPath); } } await missing.Where(a => a.State.GetType() != typeof(ManualDownloader.State)) .PMap(Queue, UpdateTracker, async archive => { Info($"Downloading {archive.Name}"); var outputPath = DownloadFolder.Combine(archive.Name); if (download) { if (outputPath.Exists) { var origName = Path.GetFileNameWithoutExtension(archive.Name); var ext = Path.GetExtension(archive.Name); var uniqueKey = archive.State.PrimaryKeyString.StringSha256Hex(); outputPath = DownloadFolder.Combine(origName + "_" + uniqueKey + "_" + ext); await outputPath.DeleteAsync(); } } return(await DownloadArchive(archive, download, outputPath)); }); }
public static PDFDocument CreateFromPDF(WebLibraryDetail web_library_detail, string filename, string precalculated_fingerprint__can_be_null) { string fingerprint = precalculated_fingerprint__can_be_null; if (String.IsNullOrEmpty(fingerprint)) { fingerprint = StreamFingerprint.FromFile(filename); } PDFDocument pdf_document = new PDFDocument(web_library_detail); // Store the most important information // // thread-UNSAFE access is permitted as the PDF has just been created so there's no thread-safety risk yet. pdf_document.FileType = Path.GetExtension(filename).TrimStart('.'); pdf_document.Fingerprint = fingerprint; pdf_document.DateAddedToDatabase = DateTime.UtcNow; pdf_document.DateLastModified = DateTime.UtcNow; Directory.CreateDirectory(pdf_document.DocumentBasePath); pdf_document.StoreAssociatedPDFInRepository(filename); List <LibraryDB.LibraryItem> library_items = web_library_detail.Xlibrary.LibraryDB.GetLibraryItems(PDFDocumentFileLocations.METADATA, new List <string>() { pdf_document.Fingerprint }); ASSERT.Test(library_items.Count < 2); if (0 == library_items.Count) { pdf_document.QueueToStorage(); } else { LibraryDB.LibraryItem library_item = null; try { library_item = library_items[0]; pdf_document = LoadFromMetaData(web_library_detail, pdf_document.Fingerprint, library_item.data); } catch (Exception ex) { // keep the unrecognized data around so we may fix it later... Logging.Error(ex, "There was a problem reloading an existing PDF from existing metadata, so overwriting it! (document fingerprint: {0}, data: {1})", pdf_document.Fingerprint, library_item?.MetadataAsString() ?? "???"); // TODO: WARNING: overwriting old (possibly corrupted) records like this can loose you old/corrupted/unsupported metadata content! pdf_document.QueueToStorage(); //pdf_document.SaveToMetaData(); } } return(pdf_document); }
public static bool Convert(string filename, string pdf_filename) { // Check that we know how to convert this! if (!CanConvert(filename)) { return(false); } // Call the appropriate convertor string extension = Path.GetExtension(filename).ToLower(); return(convertors[extension](filename, pdf_filename)); }
public static IGrepEngine GetSearchEngine(string fileName, GrepEngineInitParams param, FileFilter filter, SearchType searchType) { Debug.Assert(param != null); Debug.Assert(filter != null); LoadPlugins(); string fileExtension = Path.GetExtension(fileName).ToLower().TrimStart('.'); lock (lockObj) { if (searchType == SearchType.Hex) { return(GetHexEngine(param, filter)); } IGrepEngine poolEngine = FetchFromPool(fileExtension); if (poolEngine != null) { poolEngine.Initialize(param, filter); return(poolEngine); } if (ArchiveDirectory.Extensions.Contains(fileExtension)) { return(GetArchiveEngine(fileExtension, param, filter)); } if (fileTypeEngines.ContainsKey(fileExtension)) { IGrepEngine engine = fileTypeEngines[fileExtension].CreateEngine(); if (engine != null && engine.Initialize(param, filter)) { loadedEngines.Add(engine); logger.Debug(string.Format("Using plugin: {0} for extension {1}", engine.ToString(), fileExtension)); return(engine); } else { logger.Debug(string.Format("File type engines failed to initialize: {0}, using plainTextEngine", fileExtension)); failedEngines[engine.GetType().Name] = "Failed to initialize the plugin. See error log for details."; return(GetPlainTextEngine(fileExtension, param, filter)); } } else { logger.Debug(string.Format("File type engines has no key for: {0}, using plainTextEngine", fileExtension)); return(GetPlainTextEngine(fileExtension, param, filter)); } } }
private void BackupFile(string file) { var path = Path.Combine(this._basePath, "BannerLordLauncher Backups"); try { if (!Directory.Exists(path)) { Directory.CreateDirectory(path); } } catch (Exception e) { this.Log().Error(e); return; } if (!File.Exists(file)) { return; } var ext = Path.GetExtension(file); var i = 0; var newFile = Path.ChangeExtension(file, $"{ext}.{i:D3}"); Debug.Assert(newFile != null, nameof(newFile) + " != null"); newFile = Path.Combine(path, Path.GetFileName(newFile)); while (File.Exists(newFile)) { i++; newFile = Path.ChangeExtension(file, $"{ext}.{i:D3}"); Debug.Assert(newFile != null, nameof(newFile) + " != null"); newFile = Path.Combine(path, Path.GetFileName(newFile)); } if (i > 999) { return; } try { Debug.Assert(file != null, nameof(file) + " != null"); File.Move(file, newFile); } catch (Exception e) { this.Log().Error(e); } }
public static PDFDocument CreateFromPDF(Library library, string filename, string precalculated_fingerprint__can_be_null) { string fingerprint = precalculated_fingerprint__can_be_null; if (String.IsNullOrEmpty(fingerprint)) { fingerprint = StreamFingerprint.FromFile(filename); } LockObject _lock = new LockObject(); PDFDocument pdf_document = new PDFDocument(_lock, library); // Store the most important information // // thread-UNSAFE access is permitted as the PDF has just been created so there's no thread-safety risk yet. pdf_document.doc.FileType = Path.GetExtension(filename).TrimStart('.'); pdf_document.doc.Fingerprint = fingerprint; pdf_document.doc.DateAddedToDatabase = DateTime.UtcNow; pdf_document.doc.DateLastModified = DateTime.UtcNow; Directory.CreateDirectory(pdf_document.DocumentBasePath); pdf_document.doc.StoreAssociatedPDFInRepository(filename); List <LibraryDB.LibraryItem> library_items = library.LibraryDB.GetLibraryItems(pdf_document.doc.Fingerprint, PDFDocumentFileLocations.METADATA); if (0 == library_items.Count) { pdf_document.QueueToStorage(); } else { try { LibraryDB.LibraryItem library_item = library_items[0]; pdf_document = LoadFromMetaData(library, library_item.data, null); } catch (Exception ex) { Logging.Error(ex, "There was a problem reloading an existing PDF from existing metadata, so overwriting it!"); // TODO: WARNING: overwriting old (possibly corrupted) records like this can loose you old/corrupted/unsupported metadata content! pdf_document.QueueToStorage(); //pdf_document.SaveToMetaData(); } } return(pdf_document); }
void ObservableGrepSearchResults_CollectionChanged(object sender, NotifyCollectionChangedEventArgs e) { List <IGrepResult> toRemove = new List <IGrepResult>(); foreach (var node in SelectedNodes) { FormattedGrepResult item = node as FormattedGrepResult; FormattedGrepLine line = node as FormattedGrepLine; if (item != null && !this.Contains(item)) { toRemove.Add(item); } if (line != null && !this.Contains(line.Parent)) { toRemove.Add(line); } } foreach (var item in toRemove) { SelectedNodes.Remove(item); } if (e.NewItems != null) { foreach (FormattedGrepResult newEntry in e.NewItems.Cast <FormattedGrepResult>()) { string extension = Path.GetExtension(newEntry.GrepResult.FileNameDisplayed); if (extension.Length <= 1) { extension = ".na"; } if (!icons.ContainsKey(extension)) { System.Drawing.Bitmap bitmapIcon = IconHandler.IconFromExtensionShell(extension, IconSize.Small); if (bitmapIcon == null) { bitmapIcon = dnGREP.Common.Properties.Resources.na_icon; } icons[extension] = GetBitmapSource(bitmapIcon); } newEntry.Icon = icons[extension]; } } }
private List <GrepSearchResult> SearchMultiline(Stream input, string file, string searchPattern, GrepSearchOption searchOptions, SearchDelegates.DoSearch searchMethod) { List <GrepSearchResult> searchResults = new List <GrepSearchResult>(); string ext = Path.GetExtension(file); if (ext.StartsWith(".doc", StringComparison.OrdinalIgnoreCase)) { SearchWord(input, file, searchPattern, searchOptions, searchMethod, searchResults); } else if (ext.StartsWith(".xls", StringComparison.OrdinalIgnoreCase)) { SearchExcel(input, file, searchPattern, searchOptions, searchMethod, searchResults); } return(searchResults); }
public static void ReturnToPool(string fileName, IGrepEngine engine) { lock (lockObj) { string fileExtension = Path.GetExtension(fileName).ToLower().TrimStart('.'); if (poolKeys.TryGetValue(fileExtension, out string poolKey)) { if (!GrepEngineFactory.pooledEngines.TryGetValue(poolKey, out Queue <IGrepEngine> pooledEngines)) { pooledEngines = new Queue <IGrepEngine>(); GrepEngineFactory.pooledEngines.Add(poolKey, pooledEngines); } pooledEngines.Enqueue(engine); } } }
internal PDFDocument AssociatePDFWithVanillaReference(string pdf_filename) { PDFDocument new_pdf_document; lock (access_lock) { new_pdf_document = doc.AssociatePDFWithVanillaReference_Part1(pdf_filename); } // Prevent nasty things when the API is used in unintended ways, where the current document already happens to have that file // associated with it: if (this != new_pdf_document) { // Overwrite the new document's metadata with that of the vanilla reference... if (null != new_pdf_document) { #if false string fingerprint = new_pdf_document.Fingerprint; new_pdf_document.dictionary = (DictionaryBasedObject)this.dictionary.Clone(); new_pdf_document.Fingerprint = fingerprint; new_pdf_document.FileType = Path.GetExtension(pdf_filename).TrimStart('.'); #else new_pdf_document.CopyMetaData(this, copy_fingerprint: false); #endif new_pdf_document.QueueToStorage(); // Delete this one Deleted = true; QueueToStorage(); // Tell library to refresh Library.SignalThatDocumentsHaveChanged(this); new_pdf_document.Library.SignalThatDocumentsHaveChanged(new_pdf_document); } else { MessageBoxes.Warn("The reference has not been associated with {0}", pdf_filename); } } return(new_pdf_document); }
internal static void RunUpgrade(SplashScreenWindow splashscreen_window) { Logging.Info("Upgrading from 037 to 038"); string base_directory_path = BaseDirectoryForQiqqa; if (Directory.Exists(base_directory_path)) { int info_library_count, info_item_count; string[] library_directories = Directory.GetDirectories(base_directory_path); info_library_count = 0; foreach (string library_directory in library_directories) { ++info_library_count; Logging.Info("Inspecting directory {0}", library_directory); string documents_directory = Path.GetFullPath(Path.Combine(library_directory, @"documents")); string database_file = Path.GetFullPath(Path.Combine(library_directory, @"Qiqqa.library")); if (!File.Exists(database_file) && Directory.Exists(documents_directory)) { Logging.Warn("We have to upgrade {0}", library_directory); SQLiteUpgrade_LibraryDB library_db = new SQLiteUpgrade_LibraryDB(library_directory); using (var connection = library_db.GetConnection()) { connection.Open(); using (var transaction = connection.BeginTransaction()) { // Get a list of ALL the files in the documents directory... string[] full_filenames = Directory.GetFiles(documents_directory, "*.*", SearchOption.AllDirectories); info_item_count = 0; foreach (string full_filename in full_filenames) { ++info_item_count; splashscreen_window.UpdateMessage("Upgrading library {0}/{1}: {2:P0}", info_library_count, library_directories.Length, info_item_count / (double)full_filenames.Length); string fingerprint = Path.GetFileNameWithoutExtension(full_filename); string extension = Path.GetExtension(full_filename).Trim('.'); if (EXTENSIONS.Contains(extension)) { Logging.Info("Upgrading {0}--{1}", fingerprint, extension); byte[] data = File.ReadAllBytes(full_filename); library_db.PutBlob(connection, transaction, fingerprint, extension, data); } else { Logging.Info("NOT upgrading {0}--{1}", fingerprint, extension); } } transaction.Commit(); } } } } } splashscreen_window.UpdateMessage("Finished migrating libraries."); }
public static bool CanConvert(string filename) { string extension = Path.GetExtension(filename).ToLower(); return(convertors.ContainsKey(extension)); }
private PDFDocument AddNewDocumentToLibrary(string filename, string original_filename, string suggested_download_source, string bibtex, HashSet <string> tags, string comments, bool suppressDialogs, bool suppress_signal_that_docs_have_changed) { // Flag that someone is trying to add to the library. This is used by the background processes to hold off while the library is busy being added to... //Utilities.LockPerfTimer l1_clk = Utilities.LockPerfChecker.Start(); lock (last_pdf_add_time_lock) { //l1_clk.LockPerfTimerStop(); last_pdf_add_time = DateTime.UtcNow; } if (String.IsNullOrEmpty(filename) || filename.EndsWith(".vanilla_reference")) { return(AddVanillaReferenceDocumentToLibrary(bibtex, tags, comments, suppressDialogs, suppress_signal_that_docs_have_changed)); } bool is_a_document_we_can_cope_with = false; if (0 == Path.GetExtension(filename).ToLower().CompareTo(".pdf")) { is_a_document_we_can_cope_with = true; } else { if (DocumentConversion.CanConvert(filename)) { string filename_before_conversion = filename; string filename_after_conversion = TempFile.GenerateTempFilename("pdf"); if (DocumentConversion.Convert(filename_before_conversion, filename_after_conversion)) { is_a_document_we_can_cope_with = true; filename = filename_after_conversion; } } } if (!is_a_document_we_can_cope_with) { string extension = Path.GetExtension(filename); if (!suppressDialogs) { MessageBoxes.Info("This document library does not support {0} files. Free and Premium libraries only support PDF files. Premium+ libraries can automatically convert DOC and DOCX files to PDF.\n\nYou can convert your DOC files to PDFs using the Conversion Tool available on the Start Page Tools menu.\n\nSkipping {1}.", extension, filename); } else { StatusManager.Instance.UpdateStatus("LibraryDocument", String.Format("This document library does not support {0} files.", extension)); } return(null); } // If the PDF does not exist, can not clone if (!File.Exists(filename)) { Logging.Info("Can not add non-existent file to library, so skipping: {0}", filename); return(null); } string fingerprint = StreamFingerprint.FromFile(filename); PDFDocument pdf_document = GetDocumentByFingerprint(fingerprint); // Useful in logging for diagnosing if we're adding the same document again Logging.Info("Fingerprint: {0} - add to library: {1}", fingerprint, (null == pdf_document)); if (null != pdf_document) { // Pdf reportedly exists in database. // Store the pdf in our location pdf_document.StoreAssociatedPDFInRepository(filename); // If the document was previously deleted in metadata, reinstate it if (pdf_document.Deleted) { Logging.Info("The document {0} was deleted, so reinstating it.", fingerprint); pdf_document.Deleted = false; pdf_document.Bindable.NotifyPropertyChanged(() => pdf_document.Deleted); } // Try to add some useful information from the download source if the metadata doesn't already have it if (!String.IsNullOrEmpty(suggested_download_source) && (String.IsNullOrEmpty(pdf_document.DownloadLocation) // or when the new source is a URL we also // *upgrade* our source info by taking up the new URL // as we than assume that a new URL is 'better' i.e. more 'fresh' // than any existing URL or local source file path: || suggested_download_source.StartsWith("http://") || suggested_download_source.StartsWith("https://") || suggested_download_source.StartsWith("ftp://") || suggested_download_source.StartsWith("ftps://")) // *and* the old and new source shouldn't be the same: && suggested_download_source != pdf_document.DownloadLocation) { Logging.Info("The document in the library had no download location or an older one, so inferring it from download: {0} --> {1}", pdf_document.DownloadLocation ?? "(NULL)", suggested_download_source); pdf_document.DownloadLocation = suggested_download_source; pdf_document.Bindable.NotifyPropertyChanged(() => pdf_document.DownloadLocation); } // TODO: *merge* the BibTeX! if (!String.IsNullOrEmpty(bibtex)) { pdf_document.BibTex = bibtex; pdf_document.Bindable.NotifyPropertyChanged(() => pdf_document.BibTex); } // merge = add new tags to existing ones (if any) if (tags != null) { foreach (string tag in tags) { pdf_document.AddTag(tag); // Notify changes called internally } } // TODO: merge comments? // // If we already have comments, then append them to our existing comments (if they are not identical) if (!String.IsNullOrEmpty(comments)) { if (pdf_document.Comments != comments) { pdf_document.Comments = pdf_document.Comments + "\n\n---\n\n\n" + comments; pdf_document.Bindable.NotifyPropertyChanged(() => pdf_document.Comments); } } } else { // Create a new document pdf_document = PDFDocument.CreateFromPDF(this, filename, fingerprint); //pdf_document.OriginalFileName = original_filename; pdf_document.DownloadLocation = suggested_download_source; pdf_document.Bindable.NotifyPropertyChanged(() => pdf_document.DownloadLocation); pdf_document.BibTex = bibtex; pdf_document.Bindable.NotifyPropertyChanged(() => pdf_document.BibTex); if (tags != null) { foreach (string tag in tags) { pdf_document.AddTag(tag); } } pdf_document.Comments = comments; pdf_document.Bindable.NotifyPropertyChanged(() => pdf_document.Comments); Utilities.LockPerfTimer l2_clk = Utilities.LockPerfChecker.Start(); lock (pdf_documents_lock) { l2_clk.LockPerfTimerStop(); // Store in our database - note that we have the lock already pdf_documents[pdf_document.Fingerprint] = pdf_document; } // Get OCR queued pdf_document.PDFRenderer.CauseAllPDFPagesToBeOCRed(); } if (!suppress_signal_that_docs_have_changed) { SignalThatDocumentsHaveChanged(pdf_document); } return(pdf_document); }
public async Task <IActionResult> UploadFileStreaming(string Name) { var guid = Guid.NewGuid(); var key = Encoding.UTF8.GetBytes($"{Path.GetFileNameWithoutExtension(Name)}|{guid.ToString()}|{Path.GetExtension(Name)}").ToHex(); _writeLocks.GetOrAdd(key, new AsyncLock()); await using var fs = _settings.TempPath.Combine(key).Create(); Utils.Log($"Starting Ingest for {key}"); return(Ok(key)); }
public async Task <IActionResult> UploadFileStreaming(string Name) { var guid = Guid.NewGuid(); var key = Encoding.UTF8.GetBytes($"{Path.GetFileNameWithoutExtension(Name)}|{guid.ToString()}|{Path.GetExtension(Name)}").ToHex(); System.IO.File.Create(Path.Combine("public", "files", key)).Close(); Utils.Log($"Starting Ingest for {key}"); return(Ok(key)); }
public static void StoreIcon(string extension, string path) { StoreIcon(extension, path, getMimeType(Path.GetExtension(path))); }
public IEnumerable <string> Validate(ModList modlist) { ConcurrentStack <string> ValidationErrors = new ConcurrentStack <string>(); var nexus_mod_permissions = modlist.Archives .Where(a => a.State is NexusDownloader.State) .PMap(a => (a.Hash, FilePermissions((NexusDownloader.State)a.State), a)) .ToDictionary(a => a.Hash, a => new { permissions = a.Item2, archive = a.a }); modlist.Directives .OfType <PatchedFromArchive>() .PMap(p => { if (nexus_mod_permissions.TryGetValue(p.ArchiveHashPath[0], out var archive)) { var ext = Path.GetExtension(p.ArchiveHashPath.Last()); var url = (archive.archive.State as NexusDownloader.State).NexusURL; if (Consts.AssetFileExtensions.Contains(ext) && !(archive.permissions.CanModifyAssets ?? true)) { ValidationErrors.Push($"{p.To} from {url} is set to disallow asset modification"); } else if (Consts.ESPFileExtensions.Contains(ext) && !(archive.permissions.CanModifyESPs ?? true)) { ValidationErrors.Push($"{p.To} from {url} is set to disallow asset ESP modification"); } } }); modlist.Directives .OfType <FromArchive>() .PMap(p => { if (nexus_mod_permissions.TryGetValue(p.ArchiveHashPath[0], out var archive)) { var url = (archive.archive.State as NexusDownloader.State).NexusURL; if (!(archive.permissions.CanExtractBSAs ?? true) && p.ArchiveHashPath.Skip(1).ButLast().Any(a => Consts.SupportedBSAs.Contains(Path.GetExtension(a).ToLower()))) { ValidationErrors.Push($"{p.To} from {url} is set to disallow BSA Extraction"); } } }); var nexus = NexusApi.NexusApiUtils.ConvertGameName(GameRegistry.Games[modlist.GameType].NexusName); modlist.Archives .Where(a => a.State is NexusDownloader.State) .Where(m => NexusApi.NexusApiUtils.ConvertGameName(((NexusDownloader.State)m.State).GameName) != nexus) .Do(m => { var permissions = FilePermissions((NexusDownloader.State)m.State); if (!(permissions.CanUseInOtherGames ?? true)) { ValidationErrors.Push( $"The modlist is for {nexus} but {m.Name} is for game type {((NexusDownloader.State)m.State).GameName} and is not allowed to be converted to other game types"); } }); modlist.Archives .Where(m => !m.State.IsWhitelisted(ServerWhitelist)) .Do(m => { ValidationErrors.Push($"{m.Name} is not a whitelisted download"); }); return(ValidationErrors.ToList()); }
internal static void RunUpgrade() { Logging.Info("Upgrading from 037 to 038"); string base_directory_path = BaseDirectoryForQiqqa; if (Directory.Exists(base_directory_path)) { int info_library_count, info_item_count; string[] library_directories = Directory.GetDirectories(base_directory_path); info_library_count = 0; foreach (string library_directory in library_directories) { ++info_library_count; Logging.Info("Inspecting directory {0}", library_directory); string documents_directory = Path.GetFullPath(Path.Combine(library_directory, @"documents")); string database_file = LibraryDB.GetLibraryDBPath(library_directory); string database_syncref_file = IntranetLibraryTools.GetLibraryMetadataPath(library_directory); // make sure we skip S3DB internet DB sync directories and only 'go through the upgrade process // when this looks like a viable (local) Qiqqa library: if (!File.Exists(database_file) && Directory.Exists(documents_directory) && !File.Exists(database_syncref_file)) { Logging.Warn("We have to upgrade {0}", library_directory); SQLiteUpgrade_LibraryDB library_db = new SQLiteUpgrade_LibraryDB(library_directory); using (var connection = library_db.GetConnection()) { connection.Open(); using (var transaction = connection.BeginTransaction()) { // Get a list of ALL the files in the documents directory... string[] full_filenames = Directory.GetFiles(documents_directory, "*.*", SearchOption.AllDirectories); info_item_count = 0; foreach (string full_filename in full_filenames) { ++info_item_count; StatusManager.Instance.UpdateStatus("DBUpgrade", String.Format("Upgrading library {0}/{1}", info_library_count, library_directories.Length), info_item_count, full_filenames.Length); string fingerprint = Path.GetFileNameWithoutExtension(full_filename); string extension = Path.GetExtension(full_filename).Trim('.'); if (EXTENSIONS.Contains(extension)) { Logging.Info("Upgrading {0}--{1}", fingerprint, extension); byte[] data = File.ReadAllBytes(full_filename); library_db.PutBlob(connection, transaction, fingerprint, extension, data); } else { Logging.Info("NOT upgrading {0}--{1}", fingerprint, extension); } } transaction.Commit(); } } } } } StatusManager.Instance.UpdateStatus("DBUpgrade", "Finished migrating libraries."); }
public static ulong GetBSAHash(this string name) { name = name.Replace('/', '\\'); return(GetBSAHash(Path.ChangeExtension(name, null), Path.GetExtension(name))); }
private string MkLegalSizedPath(string basename, string typeIdStr) { const int PATH_MAX = 240; // must be less than 255 / 260 - see also https://kb.acronis.com/content/39790 string root = Path.GetDirectoryName(basename); string name = Path.GetFileName(basename); string dataname = Path.GetFileNameWithoutExtension(DataFile); string ext = SubStr(Path.GetExtension(DataFile), 1).Trim(); // produce the extension without leading dot if (ext.StartsWith("bib")) { ext = SubStr(ext, 3).Trim(); } if (ext.Length > 0) { ext = "." + ext; } // UNC long filename/path support by forcing this to be a UNC path: string filenamebase = $"{dataname}.{name}{ext}{ExtensionWithDot}"; // first make the full path without the approved/received, so that that bit doesn't make a difference // in the length check and subsequent decision to produce a shorthand filename path or not: // It's not always needed, but do the different shorthand conversions anyway and pick the longest fitting one: string short_tn = SanitizeFilename(CamelCaseShorthand(name)); string short_dn = SanitizeFilename(SubStr(dataname, 0, 10) + CamelCaseShorthand(dataname)); string hash = StreamMD5.FromText(filenamebase).ToUpper(); string short_hash = SubStr(hash, 0, Math.Max(6, 11 - short_tn.Length)); // this variant will fit in the length criterium, guaranteed: string alt_filepath0 = Path.GetFullPath(Path.Combine(root, $"{short_dn}.{short_hash}_{short_tn}{ext}{typeIdStr}{ExtensionWithDot}")); string filepath = alt_filepath0; // next, we construct the longer variants to check if they fit. // // DO NOTE that we create a path without typeIdStr part first, because we want both received and approved files to be based // on the *same* alt selection decision! string picked_alt_filepath = Path.GetFullPath(Path.Combine(root, $"{short_dn}.{short_hash}_{short_tn}{ext}.APPROVEDXYZ{ExtensionWithDot}")); name = SanitizeFilename(name); dataname = SanitizeFilename(dataname); string alt_filepath1 = Path.GetFullPath(Path.Combine(root, $"{short_dn}_{short_hash}.{name}{ext}.APPROVEDXYZ{ExtensionWithDot}")); if (alt_filepath1.Length < PATH_MAX) { filepath = Path.GetFullPath(Path.Combine(root, $"{short_dn}_{short_hash}.{name}{ext}{typeIdStr}{ExtensionWithDot}")); picked_alt_filepath = alt_filepath1; } // second alternative: only pick this one if it fits and produces a longer name: string alt_filepath2 = Path.GetFullPath(Path.Combine(root, $"{dataname}.{short_hash}_{short_tn}{ext}.APPROVEDXYZ{ExtensionWithDot}")); if (alt_filepath2.Length < PATH_MAX && alt_filepath2.Length > picked_alt_filepath.Length) { filepath = Path.GetFullPath(Path.Combine(root, $"{dataname}.{short_hash}_{short_tn}{ext}{typeIdStr}{ExtensionWithDot}")); picked_alt_filepath = alt_filepath2; } else { // third alt: the 'optimally trimmed' test name used as part of the filename: int trim_length = PATH_MAX - alt_filepath0.Length + 10 - 1; string short_dn2 = SanitizeFilename(SubStr(dataname, 0, trim_length) + CamelCaseShorthand(dataname)); string alt_filepath3 = Path.GetFullPath(Path.Combine(root, $"{short_dn2}.{short_hash}_{short_tn}{ext}{typeIdStr}{ExtensionWithDot}")); if (alt_filepath3.Length < PATH_MAX && alt_filepath3.Length > picked_alt_filepath.Length) { filepath = Path.GetFullPath(Path.Combine(root, $"{short_dn2}.{short_hash}_{short_tn}{ext}{typeIdStr}{ExtensionWithDot}")); picked_alt_filepath = alt_filepath3; } } // fourth alt: the full, unadulterated path; if it fits in the length criterium, take it anyway string alt_filepath4 = Path.GetFullPath(Path.Combine(root, $"{dataname}.{name}{ext}.APPROVEDXYZ{ExtensionWithDot}")); if (alt_filepath4.Length < PATH_MAX) { // UNC long filename/path support by forcing this to be a UNC path: filepath = Path.GetFullPath(Path.Combine(root, $"{dataname}.{name}{ext}{typeIdStr}{ExtensionWithDot}")); picked_alt_filepath = alt_filepath4; } return(filepath); }
public override string GetExtension(string path) { return(AfsPath.GetExtension(path)); }