public IEnumerable <IStorageFolder> ListFolders(string path) { path = CleanPath(path); var dir = new S3DirectoryInfo(_client, _amazonS3StorageConfiguration.AWSFileBucket, path); return(dir.GetDirectories("*", SearchOption.TopDirectoryOnly).Select(x => new AmazonS3StorageFolder(x)).ToList()); }
private static long GetDirectorySize(S3DirectoryInfo directoryInfo) { var fileInfos = directoryInfo.GetFiles(); var size = fileInfos.Sum(fileInfo => fileInfo.Length); var directoryInfos = directoryInfo.GetDirectories(); size += directoryInfos.Sum(dInfo => GetDirectorySize(dInfo)); return(size); }
public void WritesFileToS3FileSystem() { var syncNet = new Processor(_sourceObject, _targetDirectory, new SyncTaskQueue()); syncNet.ProcessSourceDirectory(); var fileInfos = _s3DirectoryInfo.GetFiles(); Assert.AreEqual(2, fileInfos.Length); var directoryInfos = _s3DirectoryInfo.GetDirectories(); Assert.AreEqual(1, directoryInfos.Length); Assert.AreEqual(DirectoryHelper.SubDirectoryName, directoryInfos[0].Name); var file = fileInfos[0]; using (var sr = file.OpenText()) { Assert.AreEqual(DirectoryHelper.Contents, sr.ReadToEnd()); } }
/// <summary> /// Implementation of the ZephyrDirectory Delete method in Amazon S3 Storage. /// </summary> /// <param name="recurse">Remove all objects in the directory as well. If set to "false", directory must be empty or an exception will be thrown.</param> /// <param name="stopOnError">Stop deleting objects in the directory if an error is encountered.</param> /// <param name="verbose">Log each object that is deleted from the directory.</param> /// <param name="callbackLabel">Optional "label" to be passed into the callback method.</param> /// <param name="callback">Optional method that is called for logging purposes.</param> public override void Delete(bool recurse = true, bool stopOnError = true, bool verbose = true, string callbackLabel = null, Action <string, string> callback = null) { try { if (_client == null) { throw new Exception($"AWSClient Not Set."); } String key = ObjectKey; key = key.Replace('/', '\\'); if (key.EndsWith("\\")) { key = key.Substring(0, key.Length - 1); } S3DirectoryInfo dirInfo = new S3DirectoryInfo(_client.Client, BucketName, key); if (dirInfo.Exists) { if (!recurse) { int dirs = dirInfo.GetDirectories().Length; int files = dirInfo.GetFiles().Length; if (dirs > 0 || files > 0) { throw new Exception($"Directory [{FullName}] is not empty."); } } dirInfo.Delete(recurse); } if (verbose) { Logger.Log($"Directory [{FullName}] Was Deleted.", callbackLabel, callback); } } catch (Exception e) { Logger.Log(e.Message, callbackLabel, callback); if (stopOnError) { throw; } } }
private void ParallelTransferring(IAmazonS3 client, S3DirectoryInfo origin, S3DirectoryInfo target, bool moveFiles = false) { if (origin.Parent.Name.Equals(origin.Bucket.Name)) { Parallel.ForEach(origin.GetFiles(), file => { CopyOrMoveFiles(client, origin, target, moveFiles, file); }); } Parallel.ForEach(origin.GetDirectories(), folder => { Parallel.ForEach(folder.GetFiles(), file => { CopyOrMoveFiles(client, origin, target, moveFiles, file); }); ParallelTransferring(client, folder, target, moveFiles); }); }
static void addFiles(ZipFile zip, S3DirectoryInfo dirInfo, string archiveDirectory) { foreach (var childDirs in dirInfo.GetDirectories()) { var entry = zip.AddDirectoryByName(childDirs.Name); addFiles(zip, childDirs, archiveDirectory + entry.FileName); } foreach (var file in dirInfo.GetFiles()) { using (var stream = file.OpenRead()) { zip.AddEntry(archiveDirectory + file.Name, stream); // Save after adding the file because to force the // immediate read from the S3 Stream since // we don't want to keep that stream open. zip.Save(); } } }
/// <summary> /// Implementation of the ZephyrDirectory GetDirectories method in AmazonS3Storage. /// </summary> /// <returns>An enumeration of AmazonS3ZephyrDirectory objects.</returns> public override IEnumerable <ZephyrDirectory> GetDirectories() { if (_client == null) { throw new Exception($"AWSClient Not Set."); } List <ZephyrDirectory> dirs = new List <ZephyrDirectory>(); S3DirectoryInfo dInfo = new S3DirectoryInfo(this._client.Client, this.BucketName, ObjectKey.Replace('/', '\\')); S3DirectoryInfo[] children = dInfo.GetDirectories(); foreach (S3DirectoryInfo child in children) { dirs.Add(new AwsS3ZephyrDirectory(_client, PathCombine(this.FullName, $"{child.Name}/"))); } return(dirs); }
static void WriteDirectoryStructure(S3DirectoryInfo directory, int level) { StringBuilder indentation = new StringBuilder(); for (int i = 0; i < level; i++) { indentation.Append("\t"); } Console.WriteLine("{0}{1}", indentation, directory.Name); foreach (var file in directory.GetFiles()) { Console.WriteLine("\t{0}{1}", indentation, file.Name); } foreach (var subDirectory in directory.GetDirectories()) { WriteDirectoryStructure(subDirectory, level + 1); } }
/// <summary> /// List bucket contents. /// Filter can be used to designate a directory, as long as it contains a forward slash /// </summary> /// <param name="filter">If it contains a forward slash, is directory</param> /// <returns></returns> public string[] List(string filter) { EnsureBucketName(); var path = ""; var exp = @"^([^\*]+/)([^/]*)$"; if (Regex.IsMatch(filter, exp)) { path = Regex.Replace(filter, exp, "$1"); filter = Regex.Replace(filter, exp, "$2"); } using (var client = Storage.GetClient()) { var s3DirectoryInfo = new S3DirectoryInfo(client, BucketName); foreach (var bit in path.Split('/')) { if (bit.Length > 0) { s3DirectoryInfo = s3DirectoryInfo.GetDirectory(bit); } } return(s3DirectoryInfo // get files .GetFiles(filter) .Select(f => f.Name) // get directories .Union( s3DirectoryInfo .GetDirectories(filter) // add a trailing slash to distinguish directories .Select(f => string.Format("{0}{1}", f.Name, "/")) ) .ToArray()); } }
public IEnumerable <S3DirectoryInfo> EnumerateDirectories(string dirName) { return(BackingDirInfo.GetDirectories(dirName, SearchOption.TopDirectoryOnly)); }
public IEnumerable <IDirectoryObject> GetDirectories() { return(_s3DirectoryInfo.GetDirectories().Select(x => new S3DirectoryObject(_s3Client, x))); }
static void WriteDirectoryStructure(S3DirectoryInfo directory, int level) { StringBuilder indentation = new StringBuilder(); for (int i = 0; i < level; i++) indentation.Append("\t"); Console.WriteLine("{0}{1}", indentation, directory.Name); foreach (var file in directory.GetFiles()) Console.WriteLine("\t{0}{1}", indentation, file.Name); foreach (var subDirectory in directory.GetDirectories()) { WriteDirectoryStructure(subDirectory, level + 1); } }
public IEnumerable<IStorageFolder> ListFolders(string path) { path = CleanPath(path); var dir = new S3DirectoryInfo(_client, _amazonS3StorageConfiguration.AWSFileBucket, path); return dir.GetDirectories("*", SearchOption.TopDirectoryOnly).Select(x => new AmazonS3StorageFolder(x)).ToList(); }