public ActionResult Add(int?Category, int?Product) { var item = new StoreFile() { Name = "", Link = "", Download = false, OrderNum = db.StoreFiles.Count() + 1 }; if (Category.HasValue) { item.CategoryID = Category; } else if (Product.HasValue) { item.ProductID = Product; } if (Category.HasValue || Product.HasValue) { db.StoreFiles.InsertOnSubmit(item); db.SubmitChanges(); } return(new ContentResult()); }
private bool JoinWithHardlink(StoreFile file1, StoreFile file2) { if (FileUtils.AreHardlinked(file1, file2)) { return(false); } if (_unsealedImplementations.Add(file1.ImplementationPath)) { FileUtils.DisableWriteProtection(file1.ImplementationPath); } if (_unsealedImplementations.Add(file2.ImplementationPath)) { FileUtils.DisableWriteProtection(file2.ImplementationPath); } string tempFile = Path.Combine(_storePath, Path.GetRandomFileName()); try { Log.Info("Hard link: " + file1 + " <=> " + file2); FileUtils.CreateHardlink(tempFile, file2); FileUtils.Replace(tempFile, file1); } finally { if (File.Exists(tempFile)) { File.Delete(tempFile); } } return(true); }
public ActionResult ChangeOrder(string Type, int ID) { var item = db.StoreFiles.FirstOrDefault(x => x.ID == ID); if (item != null) { var list = db.StoreFiles.AsQueryable(); if (item.CategoryID.HasValue) { list = list.Where(x => x.CategoryID == item.CategoryID); } else if (item.ProductID.HasValue) { list = list.Where(x => x.ProductID == item.ProductID); } StoreFile pair = null; pair = Type == "up" ? list.FirstOrDefault(x => x.OrderNum < item.OrderNum) : list.FirstOrDefault(x => x.OrderNum > item.OrderNum); if (pair != null) { var on = item.OrderNum; item.OrderNum = pair.OrderNum; pair.OrderNum = on; db.SubmitChanges(); } } return(new ContentResult()); }
public ItemGlyph GetThumbImageFromZip(MediaModel argMediaModel) { try { ItemGlyph returnItemGlyph = argMediaModel.ModelItemGlyph; IMediaModel newMediaModel = UtilCreateNewMediaObject(argMediaModel, "~zipimage", ".jpg"); // TODO Having an issue where Gramps XML content type is not always correct if (argMediaModel.MediaStorageFile.FInfo.Extension != ".zip") { _commonNotifications.DataLogEntryAdd($"??? {argMediaModel.Id} Inconsistant File Extension ({argMediaModel.MediaStorageFile.FInfo.Extension}) and MIME type ({argMediaModel.FileMimeType}/{argMediaModel.FileMimeSubType})"); return(argMediaModel.ModelItemGlyph); } IMediaModel zipimage; // Check if new zip image file already exists IMediaModel fileExists = DV.MediaDV.GetModelFromHLinkKey(newMediaModel.HLinkKey); if ((!fileExists.Valid) && (argMediaModel.IsMediaStorageFileValid)) { // check if we can get an image for the first page of the PDF zipimage = StoreFile.ExtractZipFileFirstImage(DataStore.Instance.AD.CurrentDataFolder.Value, argMediaModel, newMediaModel); returnItemGlyph = UtilSaveNewMediaObject(returnItemGlyph, zipimage, IconFont.FileArchive); } else { ErrorInfo t = UtilGetPostGlyphErrorInfo("File not found when trying to create image from PDF file", argMediaModel); _commonNotifications.NotifyError(t); } return(returnItemGlyph); } catch (System.Exception ex) { ErrorInfo t = new ErrorInfo("Directory not found when trying to create image from PDF file") { { "Original ID", argMediaModel.Id }, { "Original File", argMediaModel.MediaStorageFilePath }, { "Clipped Id", argMediaModel.Id }, { "New path", "pdfimage" } }; App.Current.Services.GetService <IErrorNotifications>().NotifyException("PDF to Image", ex, t); return(new ItemGlyph()); } }
/// <summary> /// Executes the work-step for a single implementation. /// </summary> public void Work(ManifestDigest manifestDigest) { string?digestString = manifestDigest.Best; if (digestString == null) { return; } string implementationPath = Path.Combine(_storePath, digestString); var manifest = Manifest.Load(Path.Combine(implementationPath, Manifest.ManifestFile), ManifestFormat.FromPrefix(digestString)); string currentDirectory = ""; foreach (var node in manifest) { switch (node) { case ManifestDirectory x: currentDirectory = FileUtils.UnifySlashes(x.FullPath.TrimStart('/')); break; case ManifestFileBase x: if (x.Size == 0) { return; } var key = new DedupKey(x.Size, x.ModifiedTime, manifest.Format, x.Digest); var file = new StoreFile(implementationPath, Path.Combine(currentDirectory, x.Name)); if (_fileHashes.TryGetValue(key, out var existingFile)) { if (!FileUtils.AreHardlinked(file, existingFile)) { if (JoinWithHardlink(file, existingFile)) { SavedBytes += x.Size; } } } else { _fileHashes.Add(key, file); } break; } } }
/// <summary> /// Executes the work-step for a single implementation. /// </summary> public void Work(ManifestDigest manifestDigest) { string digestString = manifestDigest.Best; if (digestString == null) { return; } string implementationPath = Path.Combine(_storePath, digestString); var manifest = Manifest.Load(Path.Combine(implementationPath, Manifest.ManifestFile), ManifestFormat.FromPrefix(digestString)); string currentDirectory = ""; new AggregateDispatcher <ManifestNode> { (ManifestDirectory x) => { currentDirectory = FileUtils.UnifySlashes(x.FullPath.TrimStart('/')); }, (ManifestFileBase x) => { if (x.Size == 0) { return; } var key = new DedupKey(x.Size, x.ModifiedTime, manifest.Format, x.Digest); var file = new StoreFile(implementationPath, Path.Combine(currentDirectory, x.Name)); StoreFile existingFile; if (_fileHashes.TryGetValue(key, out existingFile)) { if (!FileUtils.AreHardlinked(file, existingFile)) { if (JoinWithHardlink(file, existingFile)) { SavedBytes += x.Size; } } } else { _fileHashes.Add(key, file); } } }.Dispatch(manifest); }
/// <summary> /// Stores the data contained in the given file into the file. /// </summary> /// <param name="file">The file in which to store the data.</param> /// <param name="data">The path of the data file.</param> public bool WriteFile(StoreFile file, string dataPath, StoreMode storeMode) { if(file == null || dataPath == null) { throw new ArgumentNullException("file"); } if(File.Exists(dataPath)) { try { byte[] buffer = File.ReadAllBytes(dataPath); SetFileData(file, buffer, storeMode); } catch { file.ResetData(); return false; } } return true; }
/// <summary> /// Stores the given data into the file. /// </summary> /// <param name="file">The file in which to store the data.</param> /// <param name="data">The Stream to store.</param> public void WriteFile(StoreFile file, Stream stream, StoreMode storeMode) { if(file == null || stream == null) { throw new ArgumentNullException("file | stream"); } if(stream.Length > 0) { // allocate buffer byte[] buffer = new byte[(int)stream.Length]; // read from stream if(stream.Read(buffer, 0, (int)stream.Length) != 0) { SetFileData(file, buffer, storeMode); } } else { file.ResetData(); } }
/// <summary> /// Stores the given data into the file. /// </summary> /// <param name="file">The file in which to store the data.</param> /// <param name="data">The data to store.</param> public void WriteFile(StoreFile file, byte[] data, StoreMode storeMode) { if(file == null) { throw new ArgumentNullException("file"); } SetFileData(file, data, storeMode); }
/// <summary> /// Get the contents of the given file. /// </summary> /// <param name="file">The file from which to get the data.</param> public byte[] ReadFile(StoreFile file) { if(file == null) { throw new ArgumentNullException("file"); } // check if there is anything to read if(file.Data == null) { return null; } byte[] data = file.Data; if((file.StoreMode & StoreMode.Encrypted) == StoreMode.Encrypted) { data = DecryptData(data); } if((file.StoreMode & StoreMode.Compressed) == StoreMode.Compressed) { data = DecompressData(data); } if(data.Length > file.RealSize) { byte[] buffer = new byte[file.RealSize]; Buffer.BlockCopy(data, 0, buffer, 0, (int)file.RealSize); return buffer; } return data; }
/// <summary> /// Gets a StoreFile array of the files in the given folder. /// </summary> /// <param name="path">The folder path.</param> public StoreFile[] GetFolderFilesEx(string path) { if(path == null) { throw new ArgumentNullException("path"); } StoreFolder folder = GetFolder(path); if(folder != null) { StoreFile[] files = new StoreFile[folder.Files.Count]; for(int i = 0; i < folder.Files.Count; i++) { files[i] = this.files[folder.Files.Values[i]]; } return files; } return null; }
/// <summary> /// Executes the work-step for a single implementation. /// </summary> public void Work(ManifestDigest manifestDigest) { string digestString = manifestDigest.Best; if (digestString == null) return; string implementationPath = Path.Combine(_storePath, digestString); var manifest = Manifest.Load(Path.Combine(implementationPath, Manifest.ManifestFile), ManifestFormat.FromPrefix(digestString)); string currentDirectory = ""; new AggregateDispatcher<ManifestNode> { (ManifestDirectory x) => { currentDirectory = FileUtils.UnifySlashes(x.FullPath.TrimStart('/')); }, (ManifestFileBase x) => { if (x.Size == 0) return; var key = new DedupKey(x.Size, x.ModifiedTime, manifest.Format, x.Digest); var file = new StoreFile(implementationPath, Path.Combine(currentDirectory, x.Name)); StoreFile existingFile; if (_fileHashes.TryGetValue(key, out existingFile)) { if (!FileUtils.AreHardlinked(file, existingFile)) { if (JoinWithHardlink(file, existingFile)) SavedBytes += x.Size; } } else _fileHashes.Add(key, file); } }.Dispatch(manifest); }
public static void LoadTestFile() { // Load Resource var assemblyExec = Assembly.GetExecutingAssembly(); var resourceName = BasePath + ".Test_Data.GrampsView Test Basic.gpkg"; DataStore.Instance.AD.CurrentInputStream = assemblyExec.GetManifestResourceStream(resourceName); DataStore.Instance.AD.CurrentInputStreamPath = "Test Data/Test_Data.GrampsView Test Basic.gpkg"; // Remove the old dateTime stamps so the files get reloaded even if they have been seen // before TODO CommonLocalSettings.SetReloadDatabase(); GeneralData.setupMocks(); // Other setup IMessenger iocEventAggregator = GeneralData.mocEventAggregator.Object; IStoreXML iocExternalStorage = new StoreXML(GeneralData.iocCommonLogging, GeneralData.iocCommonNotifications); IStorePostLoad iocGrampsStorePostLoad = new StorePostLoad(GeneralData.iocCommonLogging, GeneralData.iocCommonNotifications, iocEventAggregator); IGrampsStoreSerial iocGrampsStoreSerial = new GrampsStoreSerial(GeneralData.iocCommonLogging); IStoreFile iocStoreFile = new StoreFile(); DataRepositoryManager newManager = new DataRepositoryManager(GeneralData.iocCommonLogging, GeneralData.iocCommonNotifications, iocEventAggregator, iocExternalStorage, iocGrampsStorePostLoad, iocGrampsStoreSerial, iocStoreFile); StorePostLoad newPostLoad = new StorePostLoad(GeneralData.iocCommonLogging, GeneralData.iocCommonNotifications, iocEventAggregator); //// Clear the repositories in case we had to restart after being interupted. TODO have //// better mock DataStore.Instance.AD.LoadDataStore(); //DataStore.Instance.AD.CurrentDataFolder.Value = new DirectoryInfo(DataStorePath); //if (DataStore.Instance.AD.CurrentDataFolder.Value.Exists) //{ // DataStore.Instance.AD.CurrentDataFolder.Value.Delete(true); //} //DataStore.Instance.AD.CurrentDataFolder.Value.Create(); // Time to start loading the data DataStoreSetup(); DataRepositoryManager.ClearRepositories(); newManager.StartDataLoad(); newPostLoad.LoadXMLUIItems(null); //// 1) UnTar *.GPKG //iocStoreFile.DataStorageInitialiseAsync().ConfigureAwait(false); //newManager.TriggerLoadGPKGFileAsync().ConfigureAwait(false); //// 2) UnZip new data.GRAMPS file //FileInfoEx GrampsFile = StoreFolder.FolderGetFile(Constants.StorageGRAMPSFileName); //DataStore.Instance.CN.DataLogEntryAdd("Later version of Gramps data file found. Loading it into the program").ConfigureAwait(false); //newManager.TriggerLoadGRAMPSFileAsync(false).ConfigureAwait(false); //// 3) Load new data.XML file //FileInfoEx dataXML = StoreFolder.FolderGetFile(Constants.StorageXMLFileName); //DataStore.Instance.CN.DataLogEntryAdd("Later version of Gramps XML data file found. Loading it into the program").ConfigureAwait(false); //// Load the new data //newManager.TriggerLoadGrampsUnZippedFolderAsync().ConfigureAwait(false); //// Fixup the models and hlinks //StorePostLoad myStorePostLoad = new StorePostLoad(iocCommonLogging, iocEventAggregator, iocPlatformSpecific); //myStorePostLoad.LoadXMLUIItems(null); //DataStore.Instance.CN.DataLogHide(); Assert.True(DataStore.Instance.AD.CurrentInputStream != null); }
/// <summary> /// load media objects from external storage. /// </summary> /// <param name="mediaRepository"> /// The media repository. /// </param> /// <returns> /// Flag showing of loaded successfully. /// </returns> public async Task <bool> LoadMediaObjectsAsync() { localGrampsCommonLogging.LogRoutineEntry("loadMediaObjects"); await DataStore.CN.MajorStatusAdd("Loading Media Objects").ConfigureAwait(false); { // start file load await DataStore.CN.MajorStatusAdd("Loading Media").ConfigureAwait(false); //// Get colour //Application.Current.Resources.TryGetValue("CardBackGroundMedia", out var varCardColour); //Color cardColour = (Color)varCardColour; // Load notes Run query var de = from el in localGrampsXMLdoc.Descendants(ns + "object") select el; try { foreach (XElement pname in de) { // <code> < define name = "object-content" > <ref name= // "SecondaryColor-object" /> < element name = "file" > < attribute name = // "src" > < text /> </ attribute > < attribute name = "mime" > // < text /> // </ attribute > // < optional > // < attribute name = "checksum" > // < text /> // </ attribute > // </ optional > </code> // < optional > // < attribute name = "description" > // < text /> // </ attribute > // </ optional > // </ element > // < zeroOrMore > // < element name = "attribute" > // <ref name="attribute-content"/> // </ element > // </ zeroOrMore > // < zeroOrMore > // < element name = "noteref" > // <ref name="noteref-content"/> // </ element > // </ zeroOrMore > // < optional > // <ref name="date-content"/> // </ optional > // < zeroOrMore > // < element name = "citationref" > // <ref name="citationref-content"/> // </ element > // </ zeroOrMore > // </ element > // </ zeroOrMore > // </ define > IMediaModel loadObject = new MediaModel(); loadObject.LoadBasics(GetBasics(pname)); //IMediaModel loadObject = new MediaModel //{ // // object details // Id = (string)pname.Attribute("id"), // Handle = (string)pname.Attribute("handle"), // Priv = SetPrivateObject((string)pname.Attribute("priv")), // Change = GetDateTime(GetAttribute(pname, "change")), //}; if (loadObject.Id == "O0168") { } // file details XElement filedetails = pname.Element(ns + "file"); if (filedetails != null) { loadObject.FileContentType = (string)filedetails.Attribute("mime"); string mediaFileName = (string)filedetails.Attribute("src"); if (mediaFileName.Length == 0) { DataStore.CN.NotifyError("Error trying to load a media file for object (" + loadObject.Id + ") listed in the GRAMPS file. FileName is null"); loadObject.MediaStorageFile = null; } else { try { string temp = StoreFileUtility.CleanFilePath(mediaFileName); await DataStore.CN.MajorStatusAdd("Loading media file: " + temp).ConfigureAwait(false); loadObject.OriginalFilePath = temp; // Load FileInfoEx and metadata loadObject.MediaStorageFile = await StoreFile.GetStorageFileAsync(loadObject.OriginalFilePath).ConfigureAwait(false); var imageSize = DependencyService.Get <IImageResource>().GetSize(loadObject.MediaStorageFilePath); loadObject.MetaDataHeight = imageSize.Height; loadObject.MetaDataWidth = imageSize.Width; } catch (Exception ex) { DataStore.CN.NotifyException("Error trying to load a media file (" + loadObject.OriginalFilePath + ") listed in the GRAMPS file", ex); throw; } } } // Get description loadObject.GDescription = (string)filedetails.Attribute("description"); // date details XElement dateval = pname.Element(ns + "dateval"); if (dateval != null) { loadObject.GDateValue = SetDate(pname); } // Load NoteRefs loadObject.GNoteRefCollection = GetNoteCollection(pname); // citationref details TODO Event References loadObject.GCitationRefCollection = GetCitationCollection(pname); loadObject.GTagRefCollection = GetTagCollection(pname); loadObject = SetHomeImage(loadObject); // save the object DataStore.DS.MediaData.Add((MediaModel)loadObject); localGrampsCommonLogging.LogVariable("LoadMedia", loadObject.GDescription); } } catch (Exception e) { // TODO handle this DataStore.CN.NotifyException("Loading Media Objects", e); throw; } } await DataStore.CN.MajorStatusDelete().ConfigureAwait(false); localGrampsCommonLogging.LogRoutineExit(nameof(LoadMediaObjectsAsync)); return(true); }
private void SetFileData(StoreFile file, byte[] data, StoreMode storeMode) { if(data == null) { file.ResetData(); } byte[] buffer = data; // compress if((storeMode & StoreMode.Compressed) == StoreMode.Compressed) { buffer = CompressData(buffer); } // encrypt if((storeMode & StoreMode.Encrypted) == StoreMode.Encrypted) { buffer = EncryptData(buffer); } file.StoreMode = storeMode; file.SetData(buffer, data.Length); }
/// <summary> /// Loads the thumbnails etc on the UI thread due to limitations with BitMapImage in /// Background threads. /// </summary> /// <param name="notUsed"> /// The not used. /// </param> private async void LoadXMLUIItems(object notUsed) { _CommonLogging.LogRoutineEntry("LoadXMLUIItems"); await DataStore.CN.ChangeLoadingMessage("Organising data after load").ConfigureAwait(false); { await DataStore.CN.MajorStatusAdd("This will take a while...").ConfigureAwait(false); { // Preload image cache StoreFile ttt = new StoreFile(); //foreach (MediaModel item in DataStore.DS.MediaData.GetList) //{ // item.MediaStorageFile = await StoreFile.GetStorageFileAsync(item.OriginalFilePath).ConfigureAwait(false); // //if (item.Id == "O0196") //{ //} // var imageSize = DependencyService.Get<IImageResource>().GetSize(item.MediaStorageFilePath); // //Debug.WriteLine(imageSize); // item.MetaDataHeight = imageSize.Height; // item.MetaDataWidth = imageSize.Width; //} // Called in order of media linkages from Media outwards await OrganiseMediaRepository().ConfigureAwait(false); await OrganiseSourceRepository().ConfigureAwait(false); await OrganiseCitationRepository().ConfigureAwait(false); await OrganiseEventRepository().ConfigureAwait(false); await OrganiseFamilyRepository().ConfigureAwait(false); await OrganiseHeaderRepository().ConfigureAwait(false); await OrganiseNameMapRepository().ConfigureAwait(false); await OrganiseNoteRepository().ConfigureAwait(false); await OrganisePlaceRepository().ConfigureAwait(false); await OrganiseRepositoryRepository().ConfigureAwait(false); await OrganiseTagRepository().ConfigureAwait(false); // People last as they pretty much depend on everything else await OrganisePersonRepository().ConfigureAwait(false); } await DataStore.CN.MajorStatusDelete().ConfigureAwait(false); } await DataStore.CN.ChangeLoadingMessage(null).ConfigureAwait(false); await DataStore.CN.MajorStatusAdd("Load XML UI Complete - Data ready for display").ConfigureAwait(false); //// save the data in a serial format for next time _EventAggregator.GetEvent <DataSaveSerialEvent>().Publish(null); // let everybody know we have finished loading data _EventAggregator.GetEvent <DataLoadCompleteEvent>().Publish(null); _CommonLogging.LogRoutineExit(nameof(LoadXMLUIItems)); }
/// <summary> /// Synchronize new files from the vault storage provider. The /// SyncPath specifies the path to download changed files from the /// vault storage provider to. It may be populated with existing /// files, and only newer files should be transferred from the vault /// storage provider. /// /// This function is called from the transfer synchronization thread. /// </summary> /// <param name="AccountName">Supplies the account name.</param> /// <param name="SyncPath">Supplies the path to synchronize files to /// from the vault. Existing files should be datestamp compared with /// the vault before being replaced. File are downloaded from the /// vault, not uploaded, with this routine.</param> /// <param name="Context">Supplies a context handle.</param> /// <returns>TRUE on success.</returns> private static int OnSynchronizeAccountFromVault(string AccountName, string SyncPath, IntPtr Context) { try { // // Pass through to the default implementation if the connection // string is not defined in the database. // if (String.IsNullOrEmpty(StoreConnectionString)) { return(1); } string OriginalAccountName = AccountName; // // Canonicalize names to lowercase as the file store may be // case sensitive and maintaining a mapping table in the // database is problematic since the first save for a new // account may be observed before the players record for // that player is created (and a player could log in to two // servers simultaneously and create orphaned records that // way, or similarly during a database outage, etc.). // AccountName = AccountName.ToLowerInvariant(); try { bool DirCreated = false; FileStoreDirectory StoreDirectory = Container.GetDirectoryReference(AccountName); IEnumerable <string> FsFiles = null; Dictionary <string, FileStoreFile> StoreFiles = new Dictionary <string, FileStoreFile>(); // // Build an index of all files in the file store vault. // foreach (FileStoreFile StoreFile in StoreDirectory.GetFiles()) { string[] Segments = StoreFile.Uri.Segments; if (Segments.Length == 0) { continue; } string CharacterFileName = Segments[Segments.Length - 1].ToLowerInvariant(); if (!CharacterFileName.EndsWith(".bic")) { continue; } StoreFiles.Add(CharacterFileName, StoreFile); } // // Enumerate file currently in the file system directory, // transferring each corresponding file from the store // directory if the modified date of the store file is // after the modified date of the local file. // if (Directory.Exists(SyncPath)) { FsFiles = Directory.EnumerateFiles(SyncPath); foreach (string FsFileName in FsFiles) { DateTime FsLastModified = File.GetLastWriteTimeUtc(FsFileName); string CharacterFileName = Path.GetFileName(FsFileName); string Key = CharacterFileName.ToLowerInvariant(); FileStoreFile StoreFile; string TempFileName = null; if (!StoreFiles.TryGetValue(Key, out StoreFile)) { // // This file exists locally but not in the file // store vault. Keep it (any excess files may be // removed by explicit local vault cleanup). // continue; } // // Transfer the file if the file store vault has a more // recent version. // try { TempFileName = Path.GetTempFileName(); try { using (FileStream FsFile = File.Open(TempFileName, FileMode.Create)) { StoreFile.ReadIfModifiedSince(FsFile, new DateTimeOffset(FsLastModified)); } try { File.Copy(TempFileName, FsFileName, true); File.SetLastWriteTimeUtc(FsFileName, StoreFile.LastModified.Value.DateTime); } catch { // // Clean up after a failed attempt to // instantiate copy file. // ALFA.SystemInfo.SafeDeleteFile(FsFileName); throw; } if (VerboseLoggingEnabled) { Logger.Log("ServerVaultConnector.OnSynchronizeAccountFromVault: Downloaded vault file '{0}\\{1}' with modified date {2} -> {3}.", AccountName, CharacterFileName, FsLastModified, File.GetLastWriteTimeUtc(FsFileName)); } } catch (FileStoreConditionNotMetException) { // // This file was not transferred because it is // already up to date. // Logger.Log("ServerVaultConnector.OnSynchronizeAccountFromVault: Vault file '{0}\\{1}' was already up to date with modified date {2}.", AccountName, CharacterFileName, FsLastModified); } } finally { if (!String.IsNullOrEmpty(TempFileName)) { ALFA.SystemInfo.SafeDeleteFile(TempFileName); } } // // Remove this file from the list as it has already // been accounted for (it is up to date or has just // been transferred). StoreFiles.Remove(Key); } } // // Sweep any files that were still not yet processed but // existed in the file store vault. These files are // present on the canonical vault but have not yet been // populated on the local vault, so transfer them now. // foreach (var StoreFile in StoreFiles.Values) { string[] Segments = StoreFile.Uri.Segments; string CharacterFileName = Segments[Segments.Length - 1].ToLowerInvariant(); string FsFileName = SyncPath + Path.DirectorySeparatorChar + CharacterFileName; string TempFileName = null; if (!ALFA.SystemInfo.IsSafeFileName(CharacterFileName)) { throw new ApplicationException("Unsafe filename '" + CharacterFileName + "' on vault for account '" + AccountName + "'."); } if (!DirCreated) { DirCreated = Directory.Exists(SyncPath); // // Create the sync directory if it does not exist. // Attempt to preserve case from the vault store if // possible, but fall back to using the case that // the client specified at login time otherwise. // if (!DirCreated) { try { string OriginalName; StoreFile.FetchAttributes(); OriginalName = StoreFile.Metadata["OriginalFileName"]; if (OriginalName != null && OriginalName.ToLowerInvariant() == AccountName + "/" + CharacterFileName) { OriginalName = OriginalName.Split('/').FirstOrDefault(); DirectoryInfo Parent = Directory.GetParent(SyncPath); Directory.CreateDirectory(Parent.FullName + "\\" + OriginalName); if (VerboseLoggingEnabled) { Logger.Log("ServerVaultConnector.OnSynchronizeAccountFromVault: Created vault directory for account '{0}'.", OriginalName); } DirCreated = true; } } catch (Exception e) { Logger.Log("ServerVaultConnector.OnSynchronizeAccountFromVault: Exception {0} recovering canonical case for creating vault directory '{1}', using account name from client instead.", e, OriginalAccountName); } if (!DirCreated) { Directory.CreateDirectory(SyncPath); DirCreated = true; } } } try { TempFileName = Path.GetTempFileName(); using (FileStream FsFile = File.Open(TempFileName, FileMode.OpenOrCreate)) { StoreFile.Read(FsFile); } try { File.Copy(TempFileName, FsFileName); File.SetLastWriteTimeUtc(FsFileName, StoreFile.LastModified.Value.DateTime); } catch { // // Clean up after a failed attempt to // instantiate a new file. // ALFA.SystemInfo.SafeDeleteFile(FsFileName); throw; } if (VerboseLoggingEnabled) { Logger.Log("ServerVaultConnector.OnSynchronizeAccountFromVault: Downloaded new vault file '{0}\\{1}' with modified date {2}.", AccountName, CharacterFileName, File.GetLastWriteTimeUtc(FsFileName)); } } finally { ALFA.SystemInfo.SafeDeleteFile(TempFileName); } } return(1); } catch (Exception e) { Logger.Log("ServerVaultConnector.OnSynchronizeAccountFromVault('{0}', '{1}'): Exception: {2}", AccountName, SyncPath, e); throw; } } catch { return(0); } }
private bool JoinWithHardlink(StoreFile file1, StoreFile file2) { if (FileUtils.AreHardlinked(file1, file2)) return false; if (_unsealedImplementations.Add(file1.ImplementationPath)) FileUtils.DisableWriteProtection(file1.ImplementationPath); if (_unsealedImplementations.Add(file2.ImplementationPath)) FileUtils.DisableWriteProtection(file2.ImplementationPath); string tempFile = Path.Combine(_storePath, Path.GetRandomFileName()); try { Log.Info("Hard link: " + file1 + " <=> " + file2); FileUtils.CreateHardlink(tempFile, file2); FileUtils.Replace(tempFile, file1); } finally { if (File.Exists(tempFile)) File.Delete(tempFile); } return true; }
/// <summary> /// Creates a file. /// </summary> /// <param name="path">The file path.</param> public StoreFile CreateFile(string path) { if(path == null) { throw new ArgumentNullException("path"); } string[] components = path.Split(Separators, StringSplitOptions.RemoveEmptyEntries); if(components.Length == 0) { return null; } string fileName = components[components.Length - 1]; StoreFolder folder = GetFolder(path.Substring(0, path.Length - fileName.Length)); // check if the folder contains the file if(folder.Files.ContainsKey(fileName)) { return files[folder.Files[fileName]]; } // add the file StoreFile file = new StoreFile(fileName); Guid fileId = Guid.NewGuid(); folder.Files.Add(fileName, fileId); files.Add(fileId, file); return file; }