public override void Execute() { AWSAuthConnection svc = new AWSAuthConnection(); if (listBuckets) { ListAllMyBucketsResponse allResp = svc.listAllMyBuckets(null); allResp.Connection.Close(); foreach (Bucket b in allResp.Buckets) Console.WriteLine(b.Name); Console.WriteLine(string.Format("{0} files listed", allResp.Buckets.Count)); } else { if (prefix.EndsWith("*")) prefix = prefix.Substring(0, prefix.Length - 1); int fileCount = 0; foreach (ListEntry e in new IterativeList(bucket, prefix)) { string storageDescription = (showStorageClass && e.StorageClass.Length > 0) ? e.StorageClass[0] + " " : string.Empty; Console.WriteLine(string.Format("{2}\t{1:0.0}M\t{3}{0}", e.Key, e.Size / (1024 * 1024), e.LastModified, storageDescription)); fileCount++; } Console.WriteLine(string.Format("{0} files listed", fileCount)); } }
protected void Page_Load(object sender, EventArgs e) { AWSAuthConnection conn = new AWSAuthConnection(accessKey, secretKey); XmlTextReader r = new XmlTextReader(Request.InputStream); r.MoveToContent(); string xml = r.ReadOuterXml(); XmlDocument documentToSave = new XmlDocument(); documentToSave.LoadXml(xml); SortedList metadata = new SortedList(); metadata.Add("title", bucket); metadata.Add("Content-Type", "application/xml"); S3Object titledObject = new S3Object(documentToSave.OuterXml, metadata); SortedList headers = new SortedList(); headers.Add("Content-Type", "application/xml"); headers.Add("x-amz-acl", "public-read"); conn.put(bucket, documentKey, titledObject, headers); Response.Write("saved: " + documentToSave.OuterXml); }
static void Main(string[] args) { if (args.Length < 3) { PrintUsage(); return; } accessKey = args[0]; secretKey = args[1]; command = args[2]; if (command != "listbuckets") { if (args.Length < 5) { PrintUsage(); return; } bucket = Md5Hash(accessKey) + "-" + args[3]; path = args[4]; } if (command == "getfile" || command == "putfile") { if (args.Length < 6) { PrintUsage(); return; } localfile = args[5]; } try { conn = new AWSAuthConnection(accessKey, secretKey); switch (command) { case "listbuckets": ListBuckets(); break; case "dir": GetDirectoryListing(); break; case "getfile": GetFile(); break; case "listkeys": GetKeyListing(); break; default: PrintUsage(); break; } } catch (Exception ex) { Console.WriteLine("Error executing command: {0}\n{1}", ex.Message, ex.ToString()); } }
protected void Page_Load(object sender, EventArgs e) { AWSAuthConnection conn = new AWSAuthConnection(accessKey, secretKey); GetResponse resp = conn.get(bucket, documentKey, null); string value = resp.Object.Data; SortedList metadata = resp.Object.Metadata; Response.ContentType = metadata["content-type"].ToString(); Response.Write(value); }
public override void Execute() { AWSAuthConnection svc = new AWSAuthConnection(); if (prefix.EndsWith("*")) prefix = prefix.Substring(0, prefix.Length - 1); int fileCount = 0, errorCount = 0; long fileSize = 0; foreach (ListEntry e in new IterativeList(bucket, prefix, regex)) { string prompt = string.Format("{0}\t{1,14:##,#}\t{2}", e.LastModified, e.Size, e.Key); if (Yes.Confirm(prompt)) { Response response = svc.delete(bucket, e.Key, null); response.Connection.Close(); if (response.Status != System.Net.HttpStatusCode.NoContent) { Console.Error.WriteLine(" error: {0}", response.Status); errorCount++; } else { fileCount++; fileSize += e.Size; } } else { } } if (0 == errorCount) { Console.Error.WriteLine(string.Format("{0} files, {1:##,#} bytes", fileCount, fileSize)); } else { Console.Error.WriteLine(string.Format("{0} files, {1:##,#} bytes; {2} errors", fileCount, fileSize, errorCount)); } }
public override void Execute() { AWSAuthConnection svc = new AWSAuthConnection(); string directory, filename; if (fileArgument == "") // nothing specified; assume current directory { directory = Path.GetFullPath("."); filename = "*"; } else if (Directory.Exists(fileArgument)) // only a directory specified { directory = Path.GetFullPath(fileArgument); filename = "*"; } else if (fileArgument.IndexOf(Path.DirectorySeparatorChar) != -1) // directory and filename specified { directory = Path.GetFullPath(Path.GetDirectoryName(fileArgument)); filename = Path.GetFileName(fileArgument); } else // only a filename specified { directory = Path.GetFullPath("."); filename = fileArgument; } if (!directory.EndsWith(Path.DirectorySeparatorChar.ToString())) directory = string.Concat(directory, Path.DirectorySeparatorChar.ToString()); bool foundAnything = false; IterativeList existingItems = null; if (sync && Utils.IsMono) existingItems = new IterativeList(bucket, ""); foreach (string file in Sub.GetFiles(directory, filename, sub)) { foundAnything = true; if (backup && (File.GetAttributes(file) & FileAttributes.Archive) != FileAttributes.Archive) continue; string key; if (sub) key = baseKey + file.Substring(directory.Length).Replace("\\", "/"); else key = baseKey + Path.GetFileName(file); const long maxFileBytes = 5L * 1024L * 1024L * 1024L; if (sub && Directory.Exists(file)) { // Unlike S3Fox, we don't create dummy folder keys ending with _$folder$. That's a bit of // a hack, and would require special handling with the get and list commands resulting // in us attempting to simulate a filesystem, which is not KISS enough for this project. // And the only downside appears to be that there is no way to represent empty folders. } else { if (sync) { if (Utils.IsMono) { // the getLastModified method does not work on linux, so we iterate a list of // received items instead bool changed = true; foreach (ListEntry e in existingItems) { if (e.Key == key) { if (e.LastModified > File.GetLastWriteTimeUtc(file)) { changed = false; break; } } } if (!changed) continue; } else { DateTime? lastModified = svc.getLastModified(bucket, key); if (lastModified.HasValue && lastModified.Value > File.GetLastWriteTimeUtc(file)) { Progress.reportProgress(key, 0, 0); continue; } } } SortedList headers = AWSAuthConnection.GetHeaders(acl, file, storageClass, encrypt); using (FileStream fs = new FileStream(file, FileMode.Open, FileAccess.Read)) { if (!big) { if (fs.Length > maxFileBytes) throw new ArgumentOutOfRangeException(string.Format("{0} is too big; maximum file size on S3 is {1}GB. Type s3 help and see the /big option.", Path.GetFileName(file), maxFileBytes / 1024 / 1024 / 1024)); else { Console.WriteLine(key); svc.put(bucket, key, fs, headers).Connection.Close(); } } else { const string formatString = "{0}.{1:000}"; int sequence = 0; while (fs.Position < fs.Length) { long putBytes = Math.Min(perChunkBytes, fs.Length - fs.Position); string thisKey = string.Format(formatString, key, sequence++); string remoteMD5 = svc.getChecksum(bucket, thisKey); if (remoteMD5 != null) { long positionBeforeChecksum = fs.Position; string localMD5 = Utils.BytesToHex(AWSAuthConnection.calculateMD5(fs, fs.Position, putBytes)); if (string.Equals(localMD5, remoteMD5, StringComparison.InvariantCultureIgnoreCase)) { Progress.reportProgress(thisKey, 0, 0); continue; // file position has already been advanced by calculating the checksum } else fs.Position = positionBeforeChecksum; } Console.WriteLine(thisKey); svc.put(bucket, thisKey, fs, headers, fs.Position, putBytes).Connection.Close(); } // ensure that there isn't a key on S3 corresponding to the next chunk number, perhaps // from a previous upload of the same file when it was smaller than it is now svc.delete(bucket, string.Format(formatString, key, sequence), null).Connection.Close(); } } } if (backup) File.SetAttributes(file, File.GetAttributes(file) & ~FileAttributes.Archive); } if (!foundAnything) throw new FileNotFoundException(string.Format("No files found at {0}{1}", directory, filename)); if (sub && subWithDelete) Sub.deleteKeys(directory, bucket, baseKey); }
public override void Execute() { AWSAuthConnection svc = new AWSAuthConnection(); IEnumerable<ListEntry> keys; if (!big) { if (key.EndsWith("*") || sub) { while (key.EndsWith("*")) key = key.Substring(0, key.Length - 1); IterativeList list = new IterativeList(bucket, key); if (list.Count == IterativeList.EntryCount.some && explicitFilename) throw new SyntaxException("You specified a destination filename but there is more than one key; can't copy multiple keys to one file"); keys = list; } else { List<ListEntry> singleton = new List<ListEntry>(); singleton.Add(new ListEntry(key, DateTime.UtcNow, null, 0, null, null)); keys = singleton; } } else { if (key.EndsWith("*")) throw new SyntaxException("Can't use wildcard (*) with the /big option"); else { List<ListEntry> sorted = new List<ListEntry>(); foreach (ListEntry e in new IterativeList(bucket, key + ".", new Regex("^" + Regex.Escape(key) + @"\.\d{3,5}$"))) sorted.Add(e); if (sorted.Count == 0) throw new FileNotFoundException("Not found: " + key + ".000"); sorted.Sort(NumericSuffixCompare); keys = sorted; } } if (keys is IterativeList && (keys as IterativeList).Count == IterativeList.EntryCount.zero) throw new FileNotFoundException("No keys found: " + key); else { FileStream fs = null; ConsoleCancelEventHandler deletePartialFileHandler = delegate { if (fs != null) { try { fs.Close(); } catch { } File.Delete(fs.Name); Console.WriteLine("Deleted partial file: " + fs.Name); } }; Console.CancelKeyPress += deletePartialFileHandler; try { if (big) fs = new FileStream(filename, FileMode.Create, FileAccess.ReadWrite); int sequence = 0; foreach (ListEntry entry in keys) { GetResponse getResp = svc.get(bucket, entry.Key, null, true); if (!big) { string thisFilename; if (sub) { thisFilename = Path.Combine(filename, KeyToFilename(entry.Key.Substring(key.Length))); string directoryName = Path.GetDirectoryName(thisFilename); if (!Directory.Exists(directoryName)) Directory.CreateDirectory(directoryName); } else if (explicitFilename) thisFilename = filename; else thisFilename = entry.Key.Substring(entry.Key.LastIndexOf("/") + 1); fs = new FileStream(thisFilename, FileMode.Create, FileAccess.ReadWrite); } else { if (!entry.Key.EndsWith(string.Format(".{0:000}", sequence))) { Console.Error.WriteLine(string.Format("Warning: The download has completed because there is no chunk number {0}, but there are chunks on S3 with higher numbers. These chunks were probably uploaded to S3 when the file was larger than it is now, but it could indicate a missing chunk. To surpress this message, delete the later chunks.", sequence)); break; } } Console.WriteLine(string.Format("{0}/{1}", bucket, entry.Key)); StreamToStream(getResp.Object.Stream, fs, getResp.Connection.Headers["ETag"], entry.Key, entry.Size); getResp.Object.Stream.Close(); if (!big) fs.Close(); getResp.Connection.Close(); sequence++; } if (big) fs.Close(); } catch { deletePartialFileHandler(null, null); throw; } finally { Console.CancelKeyPress -= deletePartialFileHandler; } } }
public override void Execute() { AWSAuthConnection svc = new AWSAuthConnection(); IEnumerable<ListEntry> keys; if (!big) { if (key.EndsWith("*") || sub) { while (key.EndsWith("*")) key = key.Substring(0, key.Length - 1); IterativeList list = new IterativeList(bucket, key, regex); if (list.Count == IterativeList.EntryCount.some && explicitFilename) throw new SyntaxException("You specified a destination filename but there is more than one key; can't copy multiple keys to one file"); keys = list; } else { List<ListEntry> singleton = new List<ListEntry>(); singleton.Add(new ListEntry(key, DateTime.UtcNow, null, 0, null, null)); keys = singleton; } } else { if (key.EndsWith("*")) throw new SyntaxException("Can't use wildcard (*) with the /big option"); else { List<ListEntry> sorted = new List<ListEntry>(); foreach (ListEntry e in new IterativeList(bucket, key + ".", new Regex("^" + Regex.Escape(key) + @"\.\d{3,5}$"))) sorted.Add(e); if (sorted.Count == 0) throw new FileNotFoundException("Not found: " + key + ".000"); sorted.Sort(NumericSuffixCompare); keys = sorted; } } if (keys is IterativeList && (keys as IterativeList).Count == IterativeList.EntryCount.zero) throw new FileNotFoundException("No keys found: " + key); else { FileStream fs = null; ConsoleCancelEventHandler deletePartialFileHandler = delegate { if (fs != null) { try { fs.Close(); } catch { } File.Delete(fs.Name); Console.Error.WriteLine("Deleted partial file: " + fs.Name); } }; Console.CancelKeyPress += deletePartialFileHandler; try { if (big) fs = new FileStream(filename, FileMode.Create, FileAccess.ReadWrite); int sequence = 0; foreach (ListEntry entry in keys) { string thisFilename = null; DateTime thisLastModified = DateTime.MinValue; if (!big) { if (sub) { if ("*" == Path.GetFileNameWithoutExtension(filename)) { // replace star with key/prefix: // bucket/path/prefix/[files] c:\local\* /sub ==> c:\local\path\prefix\[files] thisFilename = Path.Combine(Path.GetDirectoryName(filename), KeyToFilename(entry.Key)); } else { // strip key/prefix, leaving only filename: // bucket/path/prefix/[files] c:\local\ /sub ==> c:\local\[files] thisFilename = Path.Combine(filename, KeyToFilename(entry.Key.Substring(key.Length))); } string directoryName = Path.GetDirectoryName(thisFilename); if (!Directory.Exists(directoryName)) { Directory.CreateDirectory(directoryName); } } else if (explicitFilename) { thisFilename = filename; } else { thisFilename = entry.Key.Substring(entry.Key.LastIndexOf("/") + 1); } if(Path.GetFileName (thisFilename).Trim ().Length == 0) { continue; } thisLastModified = File.GetLastWriteTimeUtc (thisFilename); fs = null; } else { if (!entry.Key.EndsWith(string.Format(".{0:000}", sequence))) { Console.Error.WriteLine(string.Format("Warning: The download has completed because there is no chunk number {0}, but there are chunks on S3 with higher numbers. These chunks were probably uploaded to S3 when the file was larger than it is now, but it could indicate a missing chunk. To surpress this message, delete the later chunks.", sequence)); break; } } Console.Write(string.Format("{0}/{1} {2} ", bucket, entry.Key, s3.Utils.FormatFileSize(entry.Size))); if (null != install) { install.SetFile(thisFilename, !File.Exists(thisFilename)); } try { GetResponse getResp = svc.getIfModifiedSince(bucket, entry.Key, thisLastModified, true); // may throw 304 if(fs == null) fs = new FileStream(thisFilename, FileMode.Create, FileAccess.ReadWrite); StreamToStream(getResp.Object.Stream, fs, md5 ? getResp.Connection.Headers["ETag"] : null, entry.Key, entry.Size); getResp.Object.Stream.Close(); if (!big) fs.Close(); getResp.Connection.Close(); sequence++; File.SetLastWriteTimeUtc (thisFilename, entry.LastModified); Console.WriteLine(); if (null != install) { // newer file downloaded install.SetFile(thisFilename, true); } } catch(WebException x) { if(x.Message.Contains ("(304)")) { Console.WriteLine(" Not modified"); continue; } throw; } } if (big) fs.Close(); if (null != install) { install.InstallProducts(true); } } catch { deletePartialFileHandler(null, null); throw; } finally { Console.CancelKeyPress -= deletePartialFileHandler; } } }