public IEnumerable <IStorageFile> ListFiles(string path) { path = CleanPath(path); var dir = new S3DirectoryInfo(_client, _amazonS3StorageConfiguration.AWSFileBucket, path); return(dir.GetFiles().Where(x => !x.Name.EndsWith("_$folder$")).Select(x => new AmazonS3StorageFile(x, this)).ToList()); }
private static long GetDirectorySize(S3DirectoryInfo directoryInfo) { var fileInfos = directoryInfo.GetFiles(); var size = fileInfos.Sum(fileInfo => fileInfo.Length); var directoryInfos = directoryInfo.GetDirectories(); size += directoryInfos.Sum(dInfo => GetDirectorySize(dInfo)); return(size); }
/// <summary> /// Directory download operation. /// </summary> /// <param name="input"></param> /// <param name="option"></param> /// <param name="cToken"></param> /// <param name="dirInfo"></param> /// <returns>Task</returns> private static List <string> DownloadFiles( DownloadInput input, DownloadOptions option, S3DirectoryInfo dirInfo, CancellationToken cToken ) { var files = dirInfo.GetFiles(input.SearchPattern, option.DownloadFromCurrentDirectoryOnly ? SearchOption.TopDirectoryOnly : SearchOption.AllDirectories); if (option.ThrowErrorIfNoMatches && files.Length < 1) { throw new ArgumentException("Could not find any files matching pattern."); } var filelist = new List <string>(); foreach (var file in files) { if (!file.Exists) { continue; } cToken.ThrowIfCancellationRequested(); var path = Path.Combine(input.DestinationPath, file.Name); try { // Apparently MoveToLocal does not have overwrite as signature :( var localFile = option.DeleteSourceFile ? file.MoveToLocal(path, option.Overwrite) : file.CopyToLocal(path, option.Overwrite); if (!localFile.Exists) { throw new IOException($"Could not find {localFile.FullName} from local filesystem."); } filelist.Add(localFile.FullName); } catch (IOException ex) { // normal exception does not give filename info, which would be nice. throw new IOException($"{path} already exists or insufficient privileges to write file.", ex); } } return(filelist); }
/// <summary> /// Implementation of the ZephyrDirectory Delete method in Amazon S3 Storage. /// </summary> /// <param name="recurse">Remove all objects in the directory as well. If set to "false", directory must be empty or an exception will be thrown.</param> /// <param name="stopOnError">Stop deleting objects in the directory if an error is encountered.</param> /// <param name="verbose">Log each object that is deleted from the directory.</param> /// <param name="callbackLabel">Optional "label" to be passed into the callback method.</param> /// <param name="callback">Optional method that is called for logging purposes.</param> public override void Delete(bool recurse = true, bool stopOnError = true, bool verbose = true, string callbackLabel = null, Action <string, string> callback = null) { try { if (_client == null) { throw new Exception($"AWSClient Not Set."); } String key = ObjectKey; key = key.Replace('/', '\\'); if (key.EndsWith("\\")) { key = key.Substring(0, key.Length - 1); } S3DirectoryInfo dirInfo = new S3DirectoryInfo(_client.Client, BucketName, key); if (dirInfo.Exists) { if (!recurse) { int dirs = dirInfo.GetDirectories().Length; int files = dirInfo.GetFiles().Length; if (dirs > 0 || files > 0) { throw new Exception($"Directory [{FullName}] is not empty."); } } dirInfo.Delete(recurse); } if (verbose) { Logger.Log($"Directory [{FullName}] Was Deleted.", callbackLabel, callback); } } catch (Exception e) { Logger.Log(e.Message, callbackLabel, callback); if (stopOnError) { throw; } } }
public bool Exists(string file_path) { EnsureBucketName(); using (var client = Storage.GetClient()) { var s3DirectoryInfo = new S3DirectoryInfo(client, BucketName); var path = file_path.Split('/'); for (var i = 0; i < path.Length - 1; i++) { s3DirectoryInfo = s3DirectoryInfo.GetDirectory(path[i]); } return(s3DirectoryInfo.GetFiles(path.Last()).Any()); } }
public void Rename(string newName) { var newKey = _key.Replace(Name, newName); var newDirectory = new S3DirectoryInfo(_s3Client, _bucketName, newKey); if (!newDirectory.Exists) { newDirectory.Create(); } foreach (var s3FileInfo in _s3DirectoryInfo.GetFiles()) { s3FileInfo.MoveTo(newDirectory); } _s3DirectoryInfo.Delete(); _s3DirectoryInfo = newDirectory; }
private void ParallelTransferring(IAmazonS3 client, S3DirectoryInfo origin, S3DirectoryInfo target, bool moveFiles = false) { if (origin.Parent.Name.Equals(origin.Bucket.Name)) { Parallel.ForEach(origin.GetFiles(), file => { CopyOrMoveFiles(client, origin, target, moveFiles, file); }); } Parallel.ForEach(origin.GetDirectories(), folder => { Parallel.ForEach(folder.GetFiles(), file => { CopyOrMoveFiles(client, origin, target, moveFiles, file); }); ParallelTransferring(client, folder, target, moveFiles); }); }
static void addFiles(ZipFile zip, S3DirectoryInfo dirInfo, string archiveDirectory) { foreach (var childDirs in dirInfo.GetDirectories()) { var entry = zip.AddDirectoryByName(childDirs.Name); addFiles(zip, childDirs, archiveDirectory + entry.FileName); } foreach (var file in dirInfo.GetFiles()) { using (var stream = file.OpenRead()) { zip.AddEntry(archiveDirectory + file.Name, stream); // Save after adding the file because to force the // immediate read from the S3 Stream since // we don't want to keep that stream open. zip.Save(); } } }
/// <summary> /// Implementation of the ZephyrDirectory GetFiles method in AmazonS3Storage. /// </summary> /// <returns>An enumeration of AmazonS3ZephyrFile objects.</returns> public override IEnumerable <ZephyrFile> GetFiles() { if (_client == null) { throw new Exception($"AWSClient Not Set."); } List <ZephyrFile> files = new List <ZephyrFile>(); S3DirectoryInfo dInfo = new S3DirectoryInfo(this._client.Client, this.BucketName, ObjectKey.Replace('/', '\\')); S3FileInfo[] children = dInfo.GetFiles(); foreach (S3FileInfo child in children) { files.Add(new AwsS3ZephyrFile(_client, PathCombine(this.FullName, child.Name))); } return(files); }
static void WriteDirectoryStructure(S3DirectoryInfo directory, int level) { StringBuilder indentation = new StringBuilder(); for (int i = 0; i < level; i++) { indentation.Append("\t"); } Console.WriteLine("{0}{1}", indentation, directory.Name); foreach (var file in directory.GetFiles()) { Console.WriteLine("\t{0}{1}", indentation, file.Name); } foreach (var subDirectory in directory.GetDirectories()) { WriteDirectoryStructure(subDirectory, level + 1); } }
public void WritesFileToS3FileSystem() { var syncNet = new Processor(_sourceObject, _targetDirectory, new SyncTaskQueue()); syncNet.ProcessSourceDirectory(); var fileInfos = _s3DirectoryInfo.GetFiles(); Assert.AreEqual(2, fileInfos.Length); var directoryInfos = _s3DirectoryInfo.GetDirectories(); Assert.AreEqual(1, directoryInfos.Length); Assert.AreEqual(DirectoryHelper.SubDirectoryName, directoryInfos[0].Name); var file = fileInfos[0]; using (var sr = file.OpenText()) { Assert.AreEqual(DirectoryHelper.Contents, sr.ReadToEnd()); } }
public void AddMissingInfos() { var dir = new S3DirectoryInfo(this._client, this._bucketName); var files = dir.GetFiles(); var context = new DynamoDBContext(this._dynamoDbClient); var allPackages = context.Scan <AnnotationPackageInfo>(null).ToList(); var allFileNames = files.Select(o => o.Name).ToList(); var missingPackageNames = allFileNames.Where(o => !allPackages.Select(x => x.Id).Contains(o)).ToList(); var missingPackages = missingPackageNames.Select(o => new AnnotationPackageInfo { Id = o, IsAnnotated = false }).ToList(); var existingPackages = context.Scan <AnnotationPackageInfo>(new ScanCondition("IsAnnotated", ScanOperator.IsNull)).ToList(); foreach (var existingPackage in existingPackages) { existingPackage.IsAnnotated = false; } var packagesToPatch = missingPackages.Union(existingPackages).ToList(); var batchSize = 25; for (var i = 0; i < packagesToPatch.Count; i += batchSize) { var infoBatch = context.CreateBatchWrite <AnnotationPackageInfo>(); var items = packagesToPatch.GetRange(i, Math.Min(packagesToPatch.Count - i, batchSize)); infoBatch.AddPutItems(items); infoBatch.Execute(); Thread.Sleep(1000); } }
/// <summary> /// List bucket contents. /// Filter can be used to designate a directory, as long as it contains a forward slash /// </summary> /// <param name="filter">If it contains a forward slash, is directory</param> /// <returns></returns> public string[] List(string filter) { EnsureBucketName(); var path = ""; var exp = @"^([^\*]+/)([^/]*)$"; if (Regex.IsMatch(filter, exp)) { path = Regex.Replace(filter, exp, "$1"); filter = Regex.Replace(filter, exp, "$2"); } using (var client = Storage.GetClient()) { var s3DirectoryInfo = new S3DirectoryInfo(client, BucketName); foreach (var bit in path.Split('/')) { if (bit.Length > 0) { s3DirectoryInfo = s3DirectoryInfo.GetDirectory(bit); } } return(s3DirectoryInfo // get files .GetFiles(filter) .Select(f => f.Name) // get directories .Union( s3DirectoryInfo .GetDirectories(filter) // add a trailing slash to distinguish directories .Select(f => string.Format("{0}{1}", f.Name, "/")) ) .ToArray()); } }
public IEnumerable <S3FileInfo> EnumerateFiles(string pattern) { return(BackingDirInfo.GetFiles(pattern, SearchOption.TopDirectoryOnly)); }
static void Main(string[] args) { if (checkRequiredFields()) { using (client = Amazon.AWSClientFactory.CreateAmazonS3Client(RegionEndpoint.USWest2)) { // Creates the bucket. S3DirectoryInfo rootDirectory = new S3DirectoryInfo(client, bucketName); rootDirectory.Create(); // Creates a file at the root of the bucket. S3FileInfo readme = rootDirectory.GetFile("README.txt"); using (StreamWriter writer = new StreamWriter(readme.OpenWrite())) writer.WriteLine("This is my readme file."); DirectoryInfo localRoot = new DirectoryInfo(@"C:\"); DirectoryInfo localCode = localRoot.CreateSubdirectory("code"); // Create a directory called code and write a file to it. S3DirectoryInfo codeDir = rootDirectory.CreateSubdirectory("code"); S3FileInfo codeFile = codeDir.GetFile("Program.cs"); using (StreamWriter writer = new StreamWriter(codeFile.OpenWrite())) { writer.WriteLine("namespace S3FileSystem_Sample"); writer.WriteLine("{"); writer.WriteLine(" class Program"); writer.WriteLine(" {"); writer.WriteLine(" static void Main(string[] args)"); writer.WriteLine(" {"); writer.WriteLine(" Console.WriteLine(\"Hello World\");"); writer.WriteLine(" }"); writer.WriteLine(" }"); writer.WriteLine("}"); } // Create a directory called license and write a file to it. S3DirectoryInfo licensesDir = rootDirectory.CreateSubdirectory("licenses"); S3FileInfo licenseFile = licensesDir.GetFile("license.txt"); using (StreamWriter writer = new StreamWriter(licenseFile.OpenWrite())) writer.WriteLine("A license to code"); Console.WriteLine("Write Directory Structure"); Console.WriteLine("------------------------------------"); WriteDirectoryStructure(rootDirectory, 0); Console.WriteLine("\n\n"); foreach (var file in codeDir.GetFiles()) { Console.WriteLine("Content of {0}", file.Name); Console.WriteLine("------------------------------------"); using (StreamReader reader = file.OpenText()) { Console.WriteLine(reader.ReadToEnd()); } } // Deletes all the files and then the bucket. rootDirectory.Delete(true); } } Console.WriteLine("Press any key to continue..."); Console.ReadKey(); }
static void WriteDirectoryStructure(S3DirectoryInfo directory, int level) { StringBuilder indentation = new StringBuilder(); for (int i = 0; i < level; i++) indentation.Append("\t"); Console.WriteLine("{0}{1}", indentation, directory.Name); foreach (var file in directory.GetFiles()) Console.WriteLine("\t{0}{1}", indentation, file.Name); foreach (var subDirectory in directory.GetDirectories()) { WriteDirectoryStructure(subDirectory, level + 1); } }
public IEnumerable<IStorageFile> ListFiles(string path) { path = CleanPath(path); var dir = new S3DirectoryInfo(_client, _amazonS3StorageConfiguration.AWSFileBucket, path); return dir.GetFiles().Where(x => !x.Name.EndsWith("_$folder$")).Select(x => new AmazonS3StorageFile(x, this)).ToList(); }
private static void Main(string[] args) { if (checkRequiredFields()) { using (client = new AmazonS3Client(new AmazonS3Config() { MaxErrorRetry = 2, ThrottleRetries = true })) { // Creates the bucket. S3DirectoryInfo rootDirectory = new S3DirectoryInfo(client, bucketName); rootDirectory.Create(); // Creates a file at the root of the bucket. S3FileInfo readme = rootDirectory.GetFile("README.txt"); using (StreamWriter writer = new StreamWriter(readme.OpenWrite())) writer.WriteLine("This is my readme file."); // Create a directory called code and write a file to it. S3DirectoryInfo codeDir = rootDirectory.CreateSubdirectory("wiki"); S3FileInfo codeFile = codeDir.GetFile("Phantasmagoria.txt"); using (StreamWriter writer = new StreamWriter(codeFile.OpenWrite())) { writer.WriteLine("Phantasmagoria (About this sound American pronunciation (help·info), also fantasmagorie, fantasmagoria) was a form of horror theatre that "); writer.WriteLine("(among other techniques) used one or more magic lanterns to project frightening images such as skeletons, demons, and "); writer.WriteLine("ghosts onto walls, smoke, or semi-transparent screens, typically using rear projection to keep the lantern out of sight. Mobile or "); writer.WriteLine("portable projectors were used, allowing the projected image to move and change size on the screen, and multiple projecting "); writer.WriteLine("devices allowed for quick switching of different images. In many shows the use of spooky decoration, total darkness, sound "); writer.WriteLine("effects, (auto-)suggestive verbal presentation and sound effects were also key elements. Some shows added all kinds of "); writer.WriteLine("sensory stimulation, including smells and electric shocks. Even required fasting, fatigue (late shows) and drugs have been "); writer.WriteLine("mentioned as methods of making sure spectators would be more convinced of what they saw. The shows started under the "); writer.WriteLine("guise of actual séances in Germany in the late 18th century, and gained popularity through most of Europe (including Britain) "); writer.WriteLine("throughout the 19th century."); } // Create a directory called license and write a file to it. S3DirectoryInfo licensesDir = rootDirectory.CreateSubdirectory("licenses"); S3FileInfo licenseFile = licensesDir.GetFile("license.txt"); using (StreamWriter writer = new StreamWriter(licenseFile.OpenWrite())) writer.WriteLine("A license to code"); Console.WriteLine("Write Directory Structure"); Console.WriteLine("------------------------------------"); WriteDirectoryStructure(rootDirectory, 0); Console.WriteLine("\n\n"); foreach (var file in codeDir.GetFiles()) { Console.WriteLine("Content of {0}", file.Name); Console.WriteLine("------------------------------------"); using (StreamReader reader = file.OpenText()) { Console.WriteLine(reader.ReadToEnd()); } } // Deletes all the files and then the bucket. if (deleteAtEnd) { rootDirectory.Delete(true); } } } Console.WriteLine("Press any key to continue..."); Console.ReadKey(); }