public override void Run() { try { IOConnectionInfo ioc = _app.GetDb().Ioc; IFileStorage fileStorage = _app.GetFileStorage(ioc); if (fileStorage is CachingFileStorage) { throw new Exception("Cannot sync a cached database!"); } StatusLogger.UpdateMessage(UiStringKey.CheckingDatabaseForChanges); //download file from remote location and calculate hash: StatusLogger.UpdateSubMessage(_app.GetResourceString(UiStringKey.DownloadingRemoteFile)); MemoryStream remoteData = new MemoryStream(); using ( HashingStreamEx hashingRemoteStream = new HashingStreamEx(fileStorage.OpenFileForRead(ioc), false, new SHA256Managed())) { hashingRemoteStream.CopyTo(remoteData); hashingRemoteStream.Close(); if (!MemUtil.ArraysEqual(_app.GetDb().KpDatabase.HashOfFileOnDisk, hashingRemoteStream.Hash)) { _app.TriggerReload(_context); Finish(true); } else { Finish(true, _app.GetResourceString(UiStringKey.RemoteDatabaseUnchanged)); } } } catch (Exception e) { Finish(false, e.Message); } }
private void CommonCleanUpRead(Stream sSource, HashingStreamEx hashedStream) { hashedStream.Close(); m_pbHashOfFileOnDisk = hashedStream.Hash; sSource.Close(); // Reset memory protection settings (to always use reasonable // defaults) m_pwDatabase.MemoryProtection = new MemoryProtectionConfig(); // Remove old backups (this call is required here in order to apply // the default history maintenance settings for people upgrading from // KeePass <= 2.14 to >= 2.15; also it ensures history integrity in // case a different application has created the KDBX file and ignored // the history maintenance settings) m_pwDatabase.MaintainBackups(); // Don't mark database as modified // Expand the root group, such that in case the user accidently // collapses the root group he can simply reopen the database PwGroup pgRoot = m_pwDatabase.RootGroup; if(pgRoot != null) pgRoot.IsExpanded = true; else { Debug.Assert(false); } m_pbHashOfHeader = null; }
public void Save(PwDatabase kpDatabase, Stream stream) { PwDatabaseV3 db =new PwDatabaseV3(); KcpPassword pwd = kpDatabase.MasterKey.GetUserKey<KcpPassword>(); string password = pwd != null ? pwd.Password.ReadString() : ""; KcpKeyFile keyfile = kpDatabase.MasterKey.GetUserKey<KcpKeyFile>(); Stream keyfileContents = null; if (keyfile != null) { keyfileContents = new MemoryStream(keyfile.RawFileData.ReadData()); } db.SetMasterKey(password, keyfileContents); db.NumRounds = (long) kpDatabase.KeyEncryptionRounds; db.Name = kpDatabase.Name; if (kpDatabase.DataCipherUuid.Equals(StandardAesEngine.AesUuid)) { db.Algorithm = PwEncryptionAlgorithm.Rjindal; } else { db.Algorithm = PwEncryptionAlgorithm.Twofish; } //create groups db.Groups.Clear(); var fromGroups = kpDatabase.RootGroup.GetGroups(true); Dictionary<int, PwGroupV3> groupV3s = new Dictionary<int, PwGroupV3>(fromGroups.Count()); foreach (PwGroup g in fromGroups) { if (g == kpDatabase.RootGroup) continue; PwGroupV3 groupV3 = ConvertGroup(g, db); db.Groups.Add(groupV3); groupV3s[groupV3.Id.Id] = groupV3; } //traverse again and assign parents db.RootGroup = ConvertGroup(kpDatabase.RootGroup, db); db.RootGroup.Level = -1; AssignParent(kpDatabase.RootGroup, db, groupV3s); foreach (PwEntry e in kpDatabase.RootGroup.GetEntries(true)) { PwEntryV3 entryV3 = ConvertEntry(e, db); entryV3.Parent = groupV3s[_groupData[e.ParentGroup.Uuid].Id]; entryV3.Parent.ChildEntries.Add(entryV3); entryV3.GroupId = entryV3.Parent.Id.Id; db.Entries.Add(entryV3); } //add meta stream entries: if (db.Groups.Any()) { foreach (var metaEntry in _metaStreams) { metaEntry.GroupId = db.Groups.First().Id.Id; db.Entries.Add(metaEntry); } } HashingStreamEx hashedStream = new HashingStreamEx(stream, true, null); PwDbV3Output output = new PwDbV3Output(db, hashedStream); output.Output(); hashedStream.Close(); HashOfLastStream = hashedStream.Hash; kpDatabase.HashOfLastIO = kpDatabase.HashOfFileOnDisk = HashOfLastStream; stream.Close(); }
private void CommonCleanUpRead(Stream sSource, HashingStreamEx hashedStream) { hashedStream.Close(); m_pbHashOfFileOnDisk = hashedStream.Hash; sSource.Close(); // Reset memory protection settings (to always use reasonable // defaults) m_pwDatabase.MemoryProtection = new MemoryProtectionConfig(); // Remove old backups (this call is required here in order to apply // the default history maintenance settings for people upgrading from // KeePass <= 2.14 to >= 2.15; also it ensures history integrity in // case a different application has created the KDBX file and ignored // the history maintenance settings) m_pwDatabase.MaintainBackups(); // Don't mark database as modified m_pbHashOfHeader = null; }
private void CommonCleanUpRead(Stream sSource, HashingStreamEx hashedStream) { hashedStream.Close(); m_pbHashOfFileOnDisk = hashedStream.Hash; sSource.Close(); }
public override void Close() { if (_closed) return; //write file to cache: //(note: this might overwrite local changes. It's assumed that a sync operation or check was performed before string hash; using (var hashingStream = new HashingStreamEx(File.Create(_cachingFileStorage.CachedFilePath(ioc)), true, new SHA256Managed())) { Position = 0; CopyTo(hashingStream); hashingStream.Close(); hash = MemUtil.ByteArrayToHexString(hashingStream.Hash); } File.WriteAllText(_cachingFileStorage.VersionFilePath(ioc), hash); //update file on remote. This might overwrite changes there as well, see above. Position = 0; if (_cachingFileStorage.IsCached(ioc)) { //if the file already is in the cache, it's ok if writing to remote fails. _cachingFileStorage.TryUpdateRemoteFile(this, ioc, _useFileTransaction, hash); } else { //if not, we don't accept a failure (e.g. invalid credentials would always remain a problem) _cachingFileStorage.UpdateRemoteFile(this, ioc, _useFileTransaction, hash); } base.Close(); _closed = true; }
/// <summary> /// copies the file in ioc to the local cache. Updates the cache version files and returns the new file hash. /// </summary> protected string UpdateCacheFromRemote(IOConnectionInfo ioc, string cachedFilePath) { //note: we might use the file version to check if it's already in the cache and if copying is required. //However, this is safer. string fileHash; //open stream: using (Stream remoteFile = _cachedStorage.OpenFileForRead(ioc)) { using (HashingStreamEx cachedFile = new HashingStreamEx(File.Create(cachedFilePath), true, new SHA256Managed())) { remoteFile.CopyTo(cachedFile); cachedFile.Close(); fileHash = MemUtil.ByteArrayToHexString(cachedFile.Hash); } } //save hash in cache files: File.WriteAllText(VersionFilePath(ioc), fileHash); File.WriteAllText(BaseVersionFilePath(ioc), fileHash); return fileHash; }
public MemoryStream GetRemoteDataAndHash(IOConnectionInfo ioc, out string hash) { MemoryStream remoteData = new MemoryStream(); using (var remoteStream =_cachedStorage.OpenFileForRead(ioc)) { //note: directly copying to remoteData and hashing causes NullReferenceExceptions in FTP and with Digest auth // -> use the temp data approach: MemoryStream tempData = new MemoryStream(); remoteStream.CopyTo(tempData); tempData.Position = 0; HashingStreamEx hashingRemoteStream = new HashingStreamEx(tempData, false, new SHA256Managed()); hashingRemoteStream.CopyTo(remoteData); hashingRemoteStream.Close(); hash = MemUtil.ByteArrayToHexString(hashingRemoteStream.Hash); } remoteData.Position = 0; return remoteData; }
private void CommonCleanUpWrite(Stream sSaveTo, HashingStreamEx hashedStream) { hashedStream.Close(); m_pbHashOfFileOnDisk = hashedStream.Hash; sSaveTo.Close(); m_xmlWriter = null; m_pbHashOfHeader = null; }