private void SyncFromCloud(string fileName) { var fileInfo = new FileInfo(fileName); var cloudFile = CloudDirectory.GetFileReference(fileInfo.Name); var fileSync = new FileSync(CloudDirectory, cloudFile, LocalDirectory, _logger); fileSync.Sync(); }
public void Sync() { _fileScheduler.RunAsync = RunAsync; var sw = Stopwatch.StartNew(); LoadDirectoryMetadata(); _logger.WriteLine("Syncing folder {0}", LocalDirectory.Name); var files = LocalDirectory.GetFiles("*.*", SearchOption.TopDirectoryOnly); var dirs = LocalDirectory.GetDirectories("*.*", SearchOption.TopDirectoryOnly); var missingLocalFiles = _cloudFileNames.Where(i => !_localFileNames.Contains(i)).ToList(); foreach (var file in files) { _fileScheduler.Execute(new Task(() => { SyncFile(file); })); } foreach (var missingLocalFile in missingLocalFiles) { _fileScheduler.Execute(new Task(() => { SyncFromCloud(missingLocalFile); })); } _fileScheduler.WaitAll(); sw.Stop(); _logger.WriteLine("Finish syncing {0} in {1}", LocalDirectory.Name, sw.Elapsed); var missingLocalDirectories = _cloudDirectories.Where(i => !dirs.Select(j => j.Name).Contains(i.Name)); foreach (var dir in dirs) { _directoryScheduler.Execute(new Task(() => { var dirSync = new DirectorySync(CloudDirectory.GetDirectoryReference(dir.Name), dir, _logger); dirSync.Sync(); })); } foreach (var cloudDir in missingLocalDirectories) { _directoryScheduler.Execute(new Task(() => { var dirInfo = new DirectoryInfo(Path.Combine(LocalDirectory.FullName, cloudDir.Name)); dirInfo.Create(); var dirSync = new DirectorySync(cloudDir, dirInfo, _logger); dirSync.Sync(); })); } }
private void LoadDirectoryMetadata() { _logger.WriteLine("Loading directory metadata {0}", CloudDirectory.Name); var items = CloudDirectory.ListFilesAndDirectories(); foreach (var i in items) { if (i is CloudFile) { var file = (CloudFile)i; file.FetchAttributes(); _cloudFiles.Add(file); } else { _cloudDirectories.Add((CloudFileDirectory)i); } } _cloudFileNames = _cloudFiles.Where(i => i.Metadata.ContainsKey(Constants.LocalFileName)).Select(i => i.Metadata[Constants.LocalFileName].ToLowerInvariant()).ToList(); _localFileNames = LocalDirectory.GetFiles("*.*", SearchOption.TopDirectoryOnly) .Select(i => i.FullName.ToLowerInvariant()) .ToList(); }
public static void archiveFolder( String rCloneDirectory, String localDropStream, String localArchiverBuffer, String remoteDropStreamTarget, String remoteArchive, String fileFormatNameRegex, String fileExtenstion, String thesholdInGigabytes) { Stopwatch watch = Stopwatch.StartNew(); String localTempFolder = String.Empty; String localZipDestination = String.Empty; try { ///Timer for diagnosing ///Let's get a temperary name for the temperary folder Logger.Info("Getting Temparary Folder... "); localTempFolder = Organizer.getTempFolderPath(localDropStream, localArchiverBuffer); Logger.Info(String.Format("{0} - {1}", "Temparary Folder Retrieved!", localTempFolder)); ///Where will this zip file be located locally Logger.Info("Creating Time-Stamped folders..."); localZipDestination = Organizer.createTimestampFolders(localDropStream, localArchiverBuffer, fileFormatNameRegex, fileExtenstion); Logger.Info(String.Format("{0}: {1}", "Time-Stamped folders created! Local Zip Destination", localZipDestination)); ///Compress / Remove the folder to be archived Logger.Info(String.Format("{0}: {1}", "Compress and removing target folder to the following location", localTempFolder)); Organizer.compressAndRemoveTargetFolder(localZipDestination); Logger.Info("Successfully compressed and removed folder!"); ///To make the threshold process a little easier, we need to rename any duplicated file names Logger.Info(String.Format("{0}: {1}", "Renaming any duplicated files for removal", localTempFolder)); CDirectory.renameDuplicatedFiles(rCloneDirectory, remoteArchive); Logger.Info("Duplicates renamed / removed!"); ///Serialize localzipdesitination file for parsing FileInfo info = new FileInfo(localZipDestination); ///Get a list of all of the existing files in target archive var existingFiles = CloudDirectory.serializeDirectory(CDirectory.getFilesStatsInDirectory(rCloneDirectory, remoteArchive)); ///Delete any files in cloud over threshold Logger.Info(String.Format("Removing any files over: {0} (GB) At remote Location: {1} Utilizing: {2}", thesholdInGigabytes, remoteArchive, info.Name)); List <FileCloudInfo> filesToRemove = Containment.getFIlesInDirectoryOverThreshold(existingFiles, info, Double.Parse(thesholdInGigabytes)); Logger.Info("Now removing a total of {0} files from cloud directory: {1}", filesToRemove.Count(), remoteArchive); Logger.Debug("Target Files: {0}", String.Concat(filesToRemove.Select(o => String.Format("\n{0} ", o.FilePath)))); //Print out all of the files to remove ///Run Command to Delete *any* target files filesToRemove.ForEach(i => CDelete.deleteDirectory(rCloneDirectory, String.Format(@"{0}/{1}", remoteArchive, i.FilePath))); ///Lots of logging, information regarding deleting items Logger.Info("Ran command to removed files over threshold! Files *removed*: {0} | Memory *Free'd up*: {1} (GB) ", filesToRemove.Count, ByteSizeLib.ByteSize.FromBytes(filesToRemove.Sum(i => i.Length)).GigaBytes, (filesToRemove.Sum(i => i.Length))); ///Moving Zipped file to the cloud storage Logger.Info(String.Format("{0} - Local Temp Folder: {1} RemoteArchive: {2}", "Moving the compressed file to cloud storage!", localTempFolder, remoteArchive)); CMove.moveFile(rCloneDirectory, localTempFolder, remoteArchive, Config.compressionFormat, Config.connectionAttempts); Logger.Info(String.Format("{0}", "Successfully deleted Contents!")); ///Delete the local folder Logger.Info(String.Format("{0}: {1}", "Deleting the following local 'Temp Folder' ", localTempFolder)); System.IO.Directory.Delete(localTempFolder, true); Logger.Info("Successfully deleted the local temp folder!"); ///TODO: Remove this to a later process... Logger.Info(String.Format("{0} - rCloneLocation: {1} gDriveName: {2}", "Deleting requested remote folders", rCloneDirectory, remoteDropStreamTarget)); CDelete.deleteDirectory(rCloneDirectory, remoteDropStreamTarget); Logger.Info(String.Format("{0}", "Deletion of contents command has been ran!")); ///Due to a bug, the cloud software may not "release" files. Resetting it will fix this. Logger.Info(String.Format("{0} - cloudProcessName: {1} cloudProcessPath: {2}", "Restarting Process", Config.cloudProcessName, Config.cloudProcessPath)); Management.restartProcess(Config.cloudProcessName, Config.cloudProcessPath); Logger.Info("Process successully restarted!"); ///Delete the cloud folder Logger.Info(String.Format("{0} - rCloneLocation: {1} gDriveName: {2}", "Emptying Cloud Folder", rCloneDirectory, Config.driveProfileName)); CDelete.emptyTrashFolder(rCloneDirectory, Config.driveProfileName); Logger.Info("Successfully emptied cloud recycle bin"); Logger.Info(String.Format("{0} - Elasped time:{1}", "Archiver has successully been ran!", watch.Elapsed.ToString())); } catch (OrganizerException e) { Logger.Error(e, String.Format("{0} - {1} (Elapsed time before error: {2} ", "Error while prepping files before transfer", e.Message, watch.Elapsed.ToString())); Logger.Trace(e.StackTrace); } catch (Rclone_Move_Exception e) { Logger.Error(e, String.Format("{0} - {1} (Elapsed time before error: {2}", "Error while transfering file to the cloud", e.Message, watch.Elapsed.ToString())); Logger.Trace(e.StackTrace); } catch (Exception e) { Logger.Error(e, String.Format("{0} - {1} (Elapsed time before error: {2} ", "Error while Archiving", e.Message, watch.Elapsed.ToString())); Logger.Trace(e.StackTrace); } finally { ///If the process fails, remove the temperary directory! if (Directory.Exists(localTempFolder)) { Directory.Delete(localTempFolder, true); } } }
/// <summary> /// Storage2s the json. /// </summary> /// <returns></returns> public async Task <string> Storage2JSON() { long dirSize = 0; //This is how it works with a container foreach (var blobItem in _client.ListBlobs(String.Format("{0}/", _userName), true)) { var reference = blobItem.Container.GetBlockBlobReference(blobItem.Uri.AbsoluteUri); await reference.FetchAttributesAsync(); dirSize += reference.Properties.Length; var realPath = reference.Uri.LocalPath; var temp = realPath.Substring(1); var temp2 = realPath.Substring(temp.IndexOf('/') + 1); directory.Add(new DirectoryEntry() { CFile = new CloudFile() { ContentType = reference.Properties.ContentType, FileSize = reference.Properties.Length } , Path = temp2 }); } CloudDirectory root = new CloudDirectory(); root.isRoot = true; CloudDirectory lastDir = null; root.Name = _userStorage.Name; string lastPath = ""; foreach (var fileitem in directory) { string fileName = ""; var items = fileitem.Path.Split(new String[] { "/" }, StringSplitOptions.RemoveEmptyEntries); if (items.Length == 2) { //File is on root level var itemsRoot = items.ToList().GetRange(1, items.Length - 1); fileitem.CFile.FileName = itemsRoot[0]; root.CloudFiles.Add(fileitem.CFile); continue; } var itemsWithoutRootAndFile = items.ToList().GetRange(1, items.Length - 2); var pathstring = String.Join("/", itemsWithoutRootAndFile); fileName = items.Last(); if (!lastPath.Equals(pathstring)) { lastPath = pathstring; bool addedToRoot = false; foreach (var dir in itemsWithoutRootAndFile) { if (!addedToRoot) { lastDir = new CloudDirectory() { Name = dir }; var inRootDir = root.SubDirectories.Where(rs => rs.Name.Equals(dir)).FirstOrDefault(); if (inRootDir == null) { lastDir.Parent = root.Name; root.SubDirectories.Add(lastDir); } else { lastDir = root.SubDirectories.Where(s => s.Name.Equals(dir)).FirstOrDefault(); } addedToRoot = true; } else { var nextDir = new CloudDirectory() { Name = dir }; var inSubDir = lastDir.SubDirectories.Where(ls => ls.Name.Equals(dir)).FirstOrDefault(); if (inSubDir == null) { nextDir.Parent = lastDir.Name; lastDir.SubDirectories.Add(nextDir); } else { nextDir = lastDir.SubDirectories.Where(sub => sub.Name.Equals(dir)).FirstOrDefault(); } lastDir = nextDir; } } } fileitem.CFile.FileName = fileName; lastDir.CloudFiles.Add(fileitem.CFile); lastPath = pathstring; } UserDirectory userdir = new UserDirectory(); userdir.UserName = _userName; userdir.StorageContent = root; root.DirectorySizeInBytes = dirSize; root.DirectorySizeHumanReadable = FileHelper.StrFormatByteSize(dirSize); Wrapper wp = new Wrapper(); wp.UserDirectory = root; wp.Username = _userName; var ser = JsonConvert.SerializeObject(wp); this.directory.Clear(); return(ser); }