public static void Transfer2S3(string localPath,string remotePath) { EnsureS3(); var transferClient = new TransferUtility(s3); transferClient.Upload(new TransferUtilityUploadRequest() { CannedACL = Amazon.S3.Model.S3CannedACL.PublicRead, FilePath = localPath, BucketName = AwsConfig.S3_BUCKET_NAME, Key = remotePath }); }
static void Main(string[] args) { try { TransferUtility fT = new TransferUtility(new AmazonS3Client(Amazon.RegionEndpoint.USEast1)); string fileKey = genKey(); TransferUtilityUploadRequest uR = new TransferUtilityUploadRequest { BucketName = bucketName, FilePath = filepath, CannedACL = S3CannedACL.PublicRead, Key = fileKey }; uR.Metadata.Add("Title", "Tiger"); fT.Upload(uR); Console.WriteLine("File Uploaded. Access \"S3.amazonaws.com/sheltdev/" + fileKey ); Console.ReadKey(false); } catch (AmazonS3Exception e) { Console.WriteLine(e.Message, e.InnerException); Console.ReadKey(false); } }
protected override void ExecuteS3Task() { if ( !File.Exists( this.SourceFile ) ) { throw new BuildException( "source-file does not exist: " + this.SourceFile ); } using ( TransferUtility transferUtility = new Amazon.S3.Transfer.TransferUtility( this.AccessKey, this.SecretAccessKey ) ) { TransferUtilityUploadRequest uploadRequest = new TransferUtilityUploadRequest { BucketName = this.BucketName, FilePath = this.SourceFile, Key = this.DestinationFile }; if ( PublicRead ) { uploadRequest.AddHeader( "x-amz-acl", "public-read" ); } transferUtility.Upload( uploadRequest ); } }
public static void PutBucketItem(string itemKey, string bucketName, Stream uploadContent, Action<ProgressResponse> progressHandler = null) { var uploadRequest = new TransferUtilityUploadRequest() .WithBucketName(bucketName) .WithKey(itemKey); try { var awsClient = AWSClientFactory.CreateAmazonS3Client(Properties.Resources.AmazonAccessKeyId, Properties.Resources.SecretAccessKeyId, new AmazonS3Config().WithCommunicationProtocol (Protocol.HTTP)); var fileTransferUtility = new TransferUtility(awsClient); uploadRequest.UploadProgressEvent += (s, e) => { var r = new ProgressResponse { BytesSent = e.TransferredBytes, ProgressPercentage = e.PercentDone, TotalBytesToSend = e.TotalBytes }; if (progressHandler != null) progressHandler(r); }; uploadRequest.WithInputStream(uploadContent); fileTransferUtility.Upload(uploadRequest); } catch (Exception ex) { Log.ErrorFormat("Exception occur writing to amazon S3 server\nException: {0}\nStacktrace: {1}", ex.Message, ex.StackTrace); throw; } }
static void Main(string[] args) { try { TransferUtility fileTransferUtility = new TransferUtility(new AmazonS3Client(Amazon.RegionEndpoint.USEast1)); // 1. Upload a file, file name is used as the object key name. fileTransferUtility.Upload(filePath, existingBucketName); Console.WriteLine("Upload 1 completed"); // 2. Specify object key name explicitly. fileTransferUtility.Upload(filePath, existingBucketName, keyName); Console.WriteLine("Upload 2 completed"); // 3. Upload data from a type of System.IO.Stream. using (FileStream fileToUpload = new FileStream(filePath, FileMode.Open, FileAccess.Read)) { fileTransferUtility.Upload(fileToUpload, existingBucketName, keyName); } Console.WriteLine("Upload 3 completed"); // 4.Specify advanced settings/options. TransferUtilityUploadRequest fileTransferUtilityRequest = new TransferUtilityUploadRequest { BucketName = existingBucketName, FilePath = filePath, StorageClass = S3StorageClass.ReducedRedundancy, PartSize = 5242880, // 5 MB. Key = keyName, CannedACL = S3CannedACL.PublicRead }; fileTransferUtilityRequest.Metadata.Add("param1", "Value1"); fileTransferUtilityRequest.Metadata.Add("param2", "Value2"); fileTransferUtility.Upload(fileTransferUtilityRequest); Console.WriteLine("Upload 4 completed"); } catch (AmazonS3Exception s3Exception) { Console.WriteLine(s3Exception.Message, s3Exception.InnerException); } Console.WriteLine("Press any key to continue..."); Console.ReadKey(); }
//Pushes file to Amazon S3 with public read permissions public bool UploadFile(string localFile,string fileName, string contentType) { IAmazonS3 client = GetS3Client(); var result = false; try { var request = new TransferUtilityUploadRequest { BucketName = _BucketName, Key = _Prefix+fileName, FilePath = localFile, StorageClass = S3StorageClass.Standard, CannedACL = S3CannedACL.PublicRead, ContentType = contentType }; var fileTransferUtility = new TransferUtility(client); fileTransferUtility.Upload(request); //PutObjectResponse response2 = client.PutObject(request); result = true; } catch { return result; } return result; }
public string UploadFile(string localFilePath, string s3Folder) { string uploadedFileUrl = string.Empty; try { s3Folder = CorrectFolderPath(s3Folder); string key = s3Folder + Path.GetFileName(localFilePath); using (var transferUtility = new Amazon.S3.Transfer.TransferUtility(_awsS3Client)) { using (FileStream fs = new FileStream(localFilePath, FileMode.Open)) { var request = new TransferUtilityUploadRequest { BucketName = _bucketName, Key = key, InputStream = fs, CannedACL = S3CannedACL.PublicRead, }; transferUtility.Upload(request); } uploadedFileUrl = bucketUrl + key; } } catch (Exception ex) { throw new ApplicationException("S3 UploadfileToS3 - unexpected exception.", ex); } return(uploadedFileUrl); }
public async Task <S3Response> UploadFile(MemoryStream stream, string path, string bucket) { try { using (Amazon.S3.Transfer.TransferUtility transferUti = new Amazon.S3.Transfer.TransferUtility(_client)) { transferUti.Upload(stream, bucket, path); return(new S3Response { Status = HttpStatusCode.OK, Message = "Enviado com sucesso." }); } } catch (AmazonS3Exception e) { return(new S3Response { Message = e.Message, Status = e.StatusCode }); } catch (Exception e) { return(new S3Response { Status = HttpStatusCode.InternalServerError, Message = e.Message }); } }
public void SimpleUpload() { var client = Client; using (var tu = new Amazon.S3.Transfer.TransferUtility(client)) { tu.Upload(fullPath, bucketName); var response = client.GetObjectMetadata(new GetObjectMetadataRequest { BucketName = bucketName, Key = testFile }); Assert.IsTrue(response.ETag.Length > 0); var downloadPath = fullPath + ".download"; var downloadRequest = new Amazon.S3.Transfer.TransferUtilityDownloadRequest { BucketName = bucketName, Key = testFile, FilePath = downloadPath }; tu.Download(downloadRequest); TestDownloadedFile(downloadPath); // empty out file, except for 1 byte File.WriteAllText(downloadPath, testContent.Substring(0, 1)); Assert.IsTrue(File.Exists(downloadPath)); tu.Download(downloadRequest); TestDownloadedFile(downloadPath); } }
public static void UploadFile(System.Tuple<string,string, DateTime> file, string existingBucketName) { NameValueCollection appConfig = ConfigurationManager.AppSettings; string accessKeyID = appConfig["AWSAccessKey"]; string secretAccessKey = appConfig["AWSSecretKey"]; try { TransferUtility fileTransferUtility = new TransferUtility(accessKeyID, secretAccessKey); // Use TransferUtilityUploadRequest to configure options. // In this example we subscribe to an event. TransferUtilityUploadRequest uploadRequest = new TransferUtilityUploadRequest() .WithBucketName(existingBucketName) .WithFilePath(file.Item1) .WithServerSideEncryptionMethod(ServerSideEncryptionMethod.AES256) .WithKey(file.Item2 + file.Item3.ToString("ddmmyyyymmmmhhss")); uploadRequest.UploadProgressEvent += new EventHandler<UploadProgressArgs> (uploadRequest_UploadPartProgressEvent); fileTransferUtility.Upload(uploadRequest); } catch (ArgumentException e) { Console.WriteLine(e.Message); } catch (AmazonS3Exception e) { Console.WriteLine(e.Message + e.InnerException); } }
public void SendDocument(string filePath, string bucket, string destinationPath, string fileNamOnDestinationWithExtension = "index.html", bool isPublic = false) { try { var transferUtility = new TransferUtility(amazonS3Client); if (!transferUtility.S3Client.DoesS3BucketExist(bucket)) transferUtility.S3Client.PutBucket(new PutBucketRequest { BucketName = bucket }); var request = new TransferUtilityUploadRequest { BucketName = bucket, Key = string.Format("{0}/{1}", destinationPath, fileNamOnDestinationWithExtension), FilePath = filePath }; if (isPublic) request.Headers["x-amz-acl"] = "public-read"; request.UploadProgressEvent += uploadFileProgressCallback; transferUtility.Upload(request); transferUtility.Dispose(); } catch (Exception ex) { throw new Exception("Error send file to S3. " + ex.Message); } }
public HttpResponseMessage ExternalPost() { HttpResponseMessage result = null; HttpRequest httpRequest = HttpContext.Current.Request; TransferUtility fileTransferUtility = new TransferUtility(new AmazonS3Client(ConfigService.AwsAccessKeyId , ConfigService.AwsSecretAccessKey , Amazon.RegionEndpoint.USWest2)); if (httpRequest.Files.Count > 0) { foreach (string file in httpRequest.Files) { HttpPostedFile postedFile = httpRequest.Files[file]; string guid = Guid.NewGuid().ToString(); string remoteFilePath = ConfigService.RemoteFilePath + guid + "_" + postedFile.FileName; TransferUtilityUploadRequest fileTransferUtilityRequest = new TransferUtilityUploadRequest { BucketName = ConfigService.BucketName, //FilePath = filePath, InputStream = postedFile.InputStream, //StorageClass = S3StorageClass.ReducedRedundancy, //PartSize = 6291456, // 6 MB. Key = remoteFilePath, //CannedACL = S3CannedACL.PublicRead }; fileTransferUtility.Upload(fileTransferUtilityRequest); string paraRemoteFilePath = "/" + remoteFilePath; ItemResponse<string> response = new ItemResponse<string>(); string userId = UserService.GetCurrentUserId(); ProfileService.UpdatePhotoPath(userId, paraRemoteFilePath); response.Item = remoteFilePath; return Request.CreateResponse(HttpStatusCode.Created, response.Item); } } else { result = Request.CreateResponse(HttpStatusCode.BadRequest); } return result; }
public void Upload(string filePath) { try { TransferUtility fileTransferUtility = new TransferUtility(new AmazonS3Client(Amazon.RegionEndpoint.USEast1)); using (FileStream fileToUpload = new FileStream(filePath, FileMode.Open, FileAccess.Read)) { fileTransferUtility.Upload(fileToUpload, bucketName, keyName); } } catch (AmazonS3Exception ex) { Log.Error(ex.Message, ex.InnerException); } }
public static void uploadFile(string filePath, string existingBucketName, string AccessKey, string SecretKey, string sessionToken) { Console.WriteLine("filepath: "+filePath); Console.WriteLine("bucketname: " + existingBucketName); Console.WriteLine("ak: " + AccessKey); Console.WriteLine("sk: " + SecretKey); Console.WriteLine("st: " + sessionToken); try { AmazonS3Client sclient = new AmazonS3Client(AccessKey, SecretKey, sessionToken, Amazon.RegionEndpoint.USWest2); TransferUtility fileTransferUtility = new TransferUtility(sclient); /* Find way to increase time out timer because of large file size TransferUtilityConfig config = new TransferUtilityConfig(); config.DefaultTimeout = 11111; TransferUtility utility = new TransferUtility(config); */ // 1. Upload a file, file name is used as the object key name. fileTransferUtility.Upload(filePath, existingBucketName); Console.WriteLine("Upload 1 completed"); /* // 2. Specify object key name explicitly. fileTransferUtility.Upload(filePath,existingBucketName, keyName); Console.WriteLine("Upload 2 completed"); // 4.Specify advanced settings/options. TransferUtilityUploadRequest fileTransferUtilityRequest = new TransferUtilityUploadRequest { BucketName = existingBucketName, FilePath = filePath, StorageClass = S3StorageClass.ReducedRedundancy, PartSize = 6291456, // 6 MB. Key = keyName }; fileTransferUtility.Upload(fileTransferUtilityRequest); Console.WriteLine("Upload completed"); * */ } catch (AmazonS3Exception s3Exception) { Console.WriteLine(s3Exception.Message, s3Exception.InnerException); } }
public IPhoto UploadPhoto(Stream stream, string filename, string title, string descriptioSn, string tags) { TransferUtilityUploadRequest request = new TransferUtilityUploadRequest(); request.InputStream = stream; request.BucketName = photoBucket; request.Key = filename; request.CannedACL = Amazon.S3.Model.S3CannedACL.PublicRead; TransferUtility transferUtility = new TransferUtility(ConfigurationManager.AppSettings["AWSAccessKey"], ConfigurationManager.AppSettings["AWSSecretKey"]); transferUtility.Upload(request); S3Photo photo = new S3Photo(); photo.WebUrl = string.Format("http://s3.amazonaws.com/{0}/{1}", photoBucket, filename); photo.Title = filename; return photo; }
protected override void InnerExecute(string[] arguments) { _writer.WriteLine("Getting upload credentials... "); _writer.WriteLine(); var uploadCredentials = GetCredentials(); var temporaryFileName = Path.GetTempFileName(); try { using (var packageStream = new FileStream(temporaryFileName, FileMode.Create, FileAccess.ReadWrite, FileShare.ReadWrite)) using (var gzipStream = new GZipStream(packageStream, CompressionMode.Compress, true)) { var sourceDirectory = new DirectoryInfo(Directory.GetCurrentDirectory()); sourceDirectory.ToTar(gzipStream, excludedDirectoryNames: _excludedDirectories.ToArray()); } using (var s3Client = new AmazonS3Client(uploadCredentials.GetSessionCredentials())) using (var transferUtility = new TransferUtility(s3Client)) { var request = new TransferUtilityUploadRequest { FilePath = temporaryFileName, BucketName = uploadCredentials.Bucket, Key = uploadCredentials.ObjectKey, Timeout = (int)TimeSpan.FromHours(2).TotalMilliseconds, }; var progressBar = new MegaByteProgressBar(); request.UploadProgressEvent += (object x, UploadProgressArgs y) => progressBar .Update("Uploading package", y.TransferredBytes, y.TotalBytes); transferUtility.Upload(request); Console.CursorTop++; _writer.WriteLine(); } } finally { File.Delete(temporaryFileName); } TriggerAppHarborBuild(uploadCredentials); }
public bool sendMyFileToS3(string localFilePath, string bucketName, string subDirectoryInBucket, System.IO.Stream stream, string fileName) { // input explained : // localFilePath = the full local file path e.g. "c:\mydir\mysubdir\myfilename.zip" // bucketName : the name of the bucket in S3 ,the bucket should be alreadt created // subDirectoryInBucket : if this string is not empty the file will be uploaded to // a subdirectory with this name // fileNameInS3 = the file name in the S3 // create an instance of IAmazonS3 class ,in my case i choose RegionEndpoint.EUWest1 // you can change that to APNortheast1 , APSoutheast1 , APSoutheast2 , CNNorth1 // SAEast1 , USEast1 , USGovCloudWest1 , USWest1 , USWest2 . this choice will not // store your file in a different cloud storage but (i think) it differ in performance // depending on your location IAmazonS3 client = Amazon.AWSClientFactory.CreateAmazonS3Client(Amazon.RegionEndpoint.USEast1); // create a TransferUtility instance passing it the IAmazonS3 created in the first step TransferUtility utility = new TransferUtility(client); // making a TransferUtilityUploadRequest instance var request = new TransferUtilityUploadRequest(); var uploadRequest = new TransferUtilityUploadRequest { InputStream = stream, BucketName = bucketName, CannedACL = S3CannedACL.PublicRead, Key = fileName }; if (subDirectoryInBucket == "" || subDirectoryInBucket == null) { request.BucketName = bucketName; //no subdirectory just bucket name } else { // subdirectory and bucket name request.BucketName = bucketName + @"/" + subDirectoryInBucket; } //request.Key = fileNameInS3; //file name up in S3 request.FilePath = localFilePath; //local file name utility.Upload(uploadRequest); //commensing the transfer return true; //indicate that the file was sent }
public void uploadfile() { try { TransferUtility fileTransferUtility = new TransferUtility(new AmazonS3Client(Amazon.RegionEndpoint.EUWest1)); // 2. Specify object key name explicitly. fileTransferUtility.Upload(filePath, existingBucketName, keyName); Console.WriteLine("Upload 2 completed"); } catch (AmazonS3Exception s3Exception) { Console.WriteLine(s3Exception.Message, s3Exception.InnerException); } }
protected override void ExecuteS3Task() { if (!File.Exists(this.SourceFile)) { throw new BuildException("source-file does not exist: " + this.SourceFile); } using (TransferUtility transferUtility = new Amazon.S3.Transfer.TransferUtility(this.AccessKey, this.SecretAccessKey)) { TransferUtilityUploadRequest uploadRequest = new TransferUtilityUploadRequest { BucketName = this.BucketName, FilePath = this.SourceFile, Key = this.DestinationFile }; if (PublicRead) { uploadRequest.AddHeader("x-amz-acl", "public-read"); } transferUtility.Upload(uploadRequest); } }
public bool SendFileToS3(string fileNameInS3) { string localFilePath = string.Concat(@"C:\source\MyBookLibrary\MyBookLibrary.Data\Database\", fileNameInS3); using (_client = new Amazon.S3.AmazonS3Client(Amazon.RegionEndpoint.USEast1)) { TransferUtility utility = new TransferUtility(_client); TransferUtilityUploadRequest request = new TransferUtilityUploadRequest(); request.BucketName = BucketName; request.Key = fileNameInS3; request.FilePath = localFilePath; utility.Upload(request); Console.Write($"File {fileNameInS3} uploaded."); return true; } }
public bool Upload(string userID, string fileName, Stream file) { bool retval = false; try { TransferUtility fileTransferUtility = new TransferUtility(new AmazonS3Client(Amazon.RegionEndpoint.APSoutheast1)); TransferUtilityUploadRequest request = new TransferUtilityUploadRequest(); request.InputStream = file; request.Key = fileName; request.BucketName = userID; request.CannedACL = S3CannedACL.PublicRead; fileTransferUtility.Upload(request);//file, userID, fileName); } catch (AmazonS3Exception amazonS3Exception) { throw amazonS3Exception; } return retval; }
static void Main(string[] args) { try { var client = new AmazonS3Client(); PutObjectResponse putResponse = client.PutObject(new PutObjectRequest { BucketName = BUCKET_NAME, FilePath = TEST_FILE }); GetObjectResponse getResponse = client.GetObject(new GetObjectRequest { BucketName = BUCKET_NAME, Key = TEST_FILE }); getResponse.WriteResponseStreamToFile(@"c:\talk\" + TEST_FILE); var url = client.GetPreSignedURL(new GetPreSignedUrlRequest { BucketName = BUCKET_NAME, Key = TEST_FILE, Expires = DateTime.Now.AddHours(1) }); OpenURL(url); TransferUtility utility = new TransferUtility(client); utility.Upload(TEST_FILE, BUCKET_NAME); } catch(Exception e) { Console.WriteLine(e.Message); } }
public void SimpleUpload() { var client = Client; using (var tu = new Amazon.S3.Transfer.TransferUtility(client)) { tu.Upload(testFilePath, testBucketName); var response = WaitUtils.WaitForComplete( () => { return(client.GetObjectMetadataAsync(new GetObjectMetadataRequest { BucketName = testBucketName, Key = TEST_FILENAME }).Result); }); Assert.True(response.ETag.Length > 0); var downloadPath = testFilePath + ".download"; var downloadRequest = new Amazon.S3.Transfer.TransferUtilityDownloadRequest { BucketName = testBucketName, Key = TEST_FILENAME, FilePath = downloadPath }; tu.Download(downloadRequest); TestDownloadedFile(downloadPath); // empty out file, except for 1 byte File.WriteAllText(downloadPath, TEST_CONTENT.Substring(0, 1)); Assert.True(File.Exists(downloadPath)); tu.Download(downloadRequest); TestDownloadedFile(downloadPath); } }
public void Run() { MakeConsoleNicer(); //PrepareForPackaging(); //using (RestorePoint rp = new RestorePoint(q.files(new Pattern("^/Samples/*/*.(cs|vb)proj$")))) { // //Replace all project references temporarily // foreach (string pf in q.files(new Pattern("^/Samples/[^/]+/*.(cs|vb)proj$"))) { // new ProjectFileEditor(pf).ReplaceAllProjectReferencesWithDllReferences("..\\..\\dlls\\release"); // } //} say("Project root: " + f.ParentPath); nl(); //The base name for creating zip packags. string packageBase = v.get("PackageName"); // // [assembly: PackageName("Resizer")] //List the file version number [assembly: AssemblyFileVersion("3.0.5.*")] string fileVer = list("FileVersion", v.get("AssemblyFileVersion").TrimEnd('.', '*')); //List the assembly version number. AssemblyVersion("3.0.5.*")] string assemblyVer = list("AssemblyVersion", v.get("AssemblyVersion").TrimEnd('.', '*')); //List the information version number. (used in zip package names) [assembly: AssemblyInformationalVersion("3-alpha-5")] string infoVer = list("InfoVersion", v.get("AssemblyInformationalVersion").TrimEnd('.', '*')); //List the Nuget package version number. New builds need to have a 4th number specified. string nugetVer = list("NugetVersion", v.get("NugetVersion").TrimEnd('.', '*')); //a. Ask if version numbers need to be modified if (ask("Change version numbers?")) { //b. Ask for file version number [assembly: AssemblyFileVersion("3.0.5.*")] fileVer = change("FileVersion", v.get("AssemblyFileVersion").TrimEnd('.', '*')); //c. Ask for assembly version number. AssemblyVersion("3.0.5.*")] assemblyVer = change("AssemblyVersion", v.get("AssemblyVersion").TrimEnd('.', '*')); //d: Ask for information version number. (used in zip package names) [assembly: AssemblyInformationalVersion("3-alpha-5")] infoVer = change("InfoVersion", v.get("AssemblyInformationalVersion").TrimEnd('.', '*')); //e. Ask for Nuget package version number. New builds need to have a 4th number specified. nugetVer = change("NugetVersion", v.get("NugetVersion").TrimEnd('.', '*')); } //b. Ask about hotfix - for hotfixes, we embed warnings and stuff so they don't get used in production. bool isHotfix = ask("Is this a hotfix? Press Y to tag the assembiles and packages as such."); //Build the hotfix name string packageHotfix = isHotfix ? ("-hotfix-" + DateTime.Now.ToString("htt").ToLower()) : ""; //Get the download server from SharedAssemblyInfo.cs if specified string downloadServer = v.get("DownloadServer"); if (downloadServer == null) downloadServer = "http://downloads.imageresizing.net/"; //f. For each package, specify options: choose 'c' (create and/or overwrite), 'u' (upload), 'p' (make private). //Should inform if the file already exists. nl(); say("For each zip package, specify all operations to perform, then press enter."); say("'c' - Create package (overwrite if exists), 'u' (upload to S3), 's' (skip), 'p' (make private)"); bool isBuilding = false; StringBuilder downloadPaths = new StringBuilder(); foreach (PackageDescriptor desc in packages) { desc.Path = getReleasePath(packageBase, infoVer, desc.Kind, packageHotfix); if (desc.Exists) say("\n" + Path.GetFileName(desc.Path) + " already exists"); string opts = ""; Console.Write(desc.Kind + " (" + opts + "):"); opts = Console.ReadLine().Trim(); desc.Options = opts; if (desc.Build) isBuilding = true; if (desc.Upload) { downloadPaths.AppendLine(downloadServer + Path.GetFileName(desc.Path)); } } if (downloadPaths.Length > 0){ say("Once complete, your files will be available at"); say(downloadPaths.ToString()); if (ask("Copy these to the clipboard?")) System.Windows.Clipboard.SetText(downloadPaths.ToString()); } //Get all the .nuspec packages on in the /nuget directory. IList<NPackageDescriptor> npackages =NPackageDescriptor.GetPackagesIn(Path.Combine(f.ParentPath,"nuget")); bool isMakingNugetPackage = false; if (ask("Create or upload NuGet packages?")) { foreach (NPackageDescriptor desc in npackages) { desc.VariableSubstitutions = GetNugetVariables(); desc.VariableSubstitutions["version"] = nugetVer; desc.Version = nugetVer; desc.OutputDirectory = Path.Combine(Path.Combine(f.ParentPath, "Releases", "nuget-packages")); if (!Directory.Exists(desc.OutputDirectory)) Directory.CreateDirectory(desc.OutputDirectory); say(Path.GetFileName(desc.PackagePath) + (desc.PackageExists ? " exists" : " not found"), desc.PackageExists ? ConsoleColor.Green : ConsoleColor.Gray); say(Path.GetFileName(desc.SymbolPackagePath) + (desc.SymbolPackageExists ? " exists" : " not found"), desc.SymbolPackageExists ? ConsoleColor.Green : (desc.PackageExists ? ConsoleColor.Red : ConsoleColor.Gray)); } say("What should we do with these packages? Enter multiple options like 'ou' "); say("r (create missing packages), c (overwrite all packages), u (upload all packages to nuget.org), i (enter interactive mode - choose per package), s (skip)"); string selection = Console.ReadLine().Trim().ToLowerInvariant(); bool interactive = selection.IndexOf('i') > -1; if (interactive) selection = selection.Replace("i",""); //Set the default for every package foreach (NPackageDescriptor desc in npackages) desc.Options = selection; //Let the user pick per package if (interactive) { foreach (NPackageDescriptor desc in npackages) { Console.Write(desc.BaseName + " (" + desc.Options + "):"); desc.Options = Console.ReadLine().Trim().ToLowerInvariant(); } } isMakingNugetPackage = npackages.Any(desc => desc.Build); } var cs = new CredentialStore(); if (downloadPaths.Length > 0) { cs.Need("S3ID", "Amazon S3 AccessKey ID"); cs.Need("S3KEY", "Amazon S3 SecretAccessKey"); } if (isMakingNugetPackage) cs.Need("NugetKey", "NuGet API Key"); cs.AcquireCredentials(); nuget.apiKey = cs.Get("NugetKey",null); string s3ID = cs.Get("S3ID",null); string s3Key = cs.Get("S3KEY", null); s3 = new TransferUtility(s3ID, s3Key, Amazon.RegionEndpoint.USEast1); if (!isBuilding && isMakingNugetPackage) { isBuilding = ask("You're creating 1 or more NuGet packages. Rebuild software?"); } if (isBuilding) { //1 (moved execution to 8a) bool cleanAll = ask("Clean All?"); //2 - Set version numbers (with *, if missing) string originalContents = v.Contents; //Save for checking changes. v.set("AssemblyFileVersion", v.join(fileVer, "*")); v.set("AssemblyVersion", v.join(assemblyVer, "*")); v.set("AssemblyInformationalVersion", infoVer); v.set("NugetVersion", nugetVer); v.set("Commit", "git-commit-guid-here"); v.Save(); //Save contents for reverting later string fileContents = v.Contents; //Generate hard revision number for building (so all dlls use the same number) short revision = (short)(DateTime.UtcNow.TimeOfDay.Milliseconds % short.MaxValue); //the part under 32767. Can actually go up to, 65534, but what's the point. string exactVersion = v.join(fileVer, revision.ToString()); string fullInfoVer = infoVer + (isHotfix ? ("-temp-hotfix-" + DateTime.Now.ToString("MMM-d-yyyy-htt").ToLower()) : ""); string tag = "resizer" + v.join(infoVer, revision.ToString()) + (isHotfix ? "-hotfix": ""); //3 - Prompt to commit and tag bool versionsChanged = !fileContents.Equals(originalContents); string question = versionsChanged ? "SharedAssemblyInfo.cs was modified. Commit it (and any other changes) to the repository, then hit 'y'." : "Are all changes commited? Hit 'y' to continue. The SHA-1 of HEAD will be embedded in the DLLs."; while (!ask(question)) { } if (ask("Tag HEAD with '" + tag + "'?")) g.Tag(tag); //[assembly: Commit("git-commit-guid-here")] //4 - Embed git commit value string gitCommit = g.CanExecute ? g.GetHeadHash() : "git-could-not-run-during-build"; v.set("Commit", gitCommit); //4b - change to hard version number for building v.set("AssemblyFileVersion", exactVersion); v.set("AssemblyVersion", exactVersion); //Add hotfix suffix for hotfixes v.set("AssemblyInformationalVersion", fullInfoVer); v.Save(); //Prepare searchersq PrepareForPackaging(); bool success = false; //Allows use to temporarily edit all the sample project files using (RestorePoint rp = new RestorePoint(q.files(new Pattern("^/Plugins/*/*.(cs|vb)proj$"), new Pattern("^/Contrib/*/*.(cs|vb)proj$")))) { //Replace all project references temporarily foreach (string pf in rp.Paths) { new ProjectFileEditor(pf).RemoveStrongNameRefs(); } //8a Clean projects if specified if (cleanAll) { CleanAll(); } //6 - if (c) was specified for any package, build all. success = BuildAll(true); //isMakingNugetPackage); } //7 - Revert file to state at commit (remove 'full' version numbers and 'commit' value) v.Contents = fileContents; v.Save(); if (!success) return; //If the build didn't go ok, pause and exit //8b - run cleanup routine RemoveUselessFiles(); //Allows use to temporarily edit all the sample project files using (RestorePoint rp = new RestorePoint(q.files(new Pattern("^/Samples/*/*.(cs|vb)proj$")))) { //Replace all project references temporarily foreach (string pf in q.files(new Pattern("^/Samples/[^/]+/*.(cs|vb)proj$"))) { new ProjectFileEditor(pf).ReplaceAllProjectReferencesWithDllReferences("..\\..\\dlls\\release").RemoveStrongNameRefs(); } //9 - Pacakge all selected zip configurations foreach (PackageDescriptor pd in packages) { if (pd.Skip || !pd.Build) continue; if (pd.Exists && pd.Build) { File.Delete(pd.Path); say("Deleted " + pd.Path); } pd.Builder(pd); //Copy to a 'tozip' version for e-mailing //File.Copy(pd.Path, pd.Path.Replace(".zip", ".tozip"), true); } } } //10 - Pacakge all nuget configurations foreach (NPackageDescriptor pd in npackages) { if (pd.Skip) continue; if (pd.Build) nuget.Pack(pd); } //11 - Upload all selected zip configurations foreach (PackageDescriptor pd in packages) { if (pd.Skip) continue; if (pd.Upload) { if (!pd.Exists) { say("Can't upload, file missing: " + pd.Path); continue; } var request = new TransferUtilityUploadRequest(); request.CannedACL = pd.Private ? Amazon.S3.S3CannedACL.Private : Amazon.S3.S3CannedACL.PublicRead; request.BucketName = bucketName; request.Timeout = null; request.ContentType = "application/zip"; request.Key = Path.GetFileName(pd.Path); request.FilePath = pd.Path; say("Uploading " + Path.GetFileName(pd.Path) + " to " + bucketName + " with CannedAcl:" + request.CannedACL.ToString()); bool retry = false; do { //Upload try { s3.Upload(request); } catch (Exception ex) { say("Upload failed: " + ex.Message); retry = ask("Retry upload?"); } } while (retry); say("Finished uploading " + Path.GetFileName(pd.Path)); } } //2 - Upload all nuget configurations foreach (NPackageDescriptor pd in npackages) { if (pd.Skip || !pd.Upload) continue; nuget.Push(pd); } //12 - Generate template for release notes article say("Everything is done."); }
public override Uri UploadWithoutQuota(string domain, string path, Stream stream, string contentType, string contentDisposition) { var acl = ACL.Auto; using (var client = GetClient()) { var util = new TransferUtility(client); var mime = string.IsNullOrEmpty(contentType) ? MimeMapping.GetMimeMapping(Path.GetFileName(path)) : contentType; var buffered = stream.GetBuffered(); var request = new TransferUtilityUploadRequest { BucketName = _bucket, Key = MakePath(domain, path), CannedACL = acl == ACL.Auto ? GetDomainACL(domain) : GetS3Acl(acl), ContentType = mime, InputStream = buffered, AutoCloseStream = false, Headers = { CacheControl = string.Format("public, maxage={0}", (int)TimeSpan.FromDays(5).TotalSeconds), ContentMD5 = Hasher.Base64Hash(buffered.GetCorrectBuffer(), HashAlg.MD5), Expires = DateTime.UtcNow.Add(TimeSpan.FromDays(5)) } }; if (!string.IsNullOrEmpty(contentDisposition)) { request.Headers.ContentDisposition = Uri.EscapeDataString(contentDisposition); } else if (mime == "application/octet-stream") { request.Headers.ContentDisposition = "attachment"; } util.Upload(request); InvalidateCloudFront(MakePath(domain, path)); return GetUri(domain, path); } }
public void UploadToAWS(System.IO.Stream file, string keyName) { string existingBucketName = "sabio-training"; TransferUtility fileTransferUtility = new TransferUtility(new AmazonS3Client(_awsAccess, _awsSecretAccess, Amazon.RegionEndpoint.USWest2)); TransferUtilityUploadRequest fileTransferUtilityRequest = new TransferUtilityUploadRequest { BucketName = existingBucketName, Key = keyName, CannedACL = S3CannedACL.PublicRead, InputStream = file }; fileTransferUtility.Upload(fileTransferUtilityRequest); }
/// <summary> /// Allows a file to be put into the root of the bucket. /// Could be enhanced to specify a sub folder path, but I don't need that for the current use. /// </summary> /// <returns>url to the uploaded file</returns> public string UploadSingleFile(string pathToFile, IProgress progress) { using(var transferUtility = new TransferUtility(GetAmazonS3(_bucketName))) { var request = new TransferUtilityUploadRequest { BucketName = _bucketName, FilePath = pathToFile, Key = Path.GetFileName(pathToFile), CannedACL = S3CannedACL.PublicRead // Allows any browser to download it. }; progress.WriteStatus("Uploading book to Bloom Support..."); transferUtility.Upload(request); return "https://s3.amazonaws.com/" + _bucketName + "/" + HttpUtility.UrlEncode(request.Key); } }
public HttpResponseMessage UploadGalleryPhoto() { HttpResponseMessage result = null; HttpRequest httpRequest = HttpContext.Current.Request; TransferUtility fileTransferUtility = new TransferUtility(new AmazonS3Client(ConfigService.AwsAccessKeyId , ConfigService.AwsSecretAccessKey , Amazon.RegionEndpoint.USWest2)); if (httpRequest.Files.Count > 0) { foreach (string file in httpRequest.Files) { HttpPostedFile postedFile = httpRequest.Files[file]; string guid = Guid.NewGuid().ToString(); string remoteFilePath = ConfigService.RemoteFilePath + guid + "_" + postedFile.FileName; TransferUtilityUploadRequest fileTransferUtilityRequest = new TransferUtilityUploadRequest { BucketName = ConfigService.BucketName, InputStream = postedFile.InputStream, Key = remoteFilePath, }; fileTransferUtility.Upload(fileTransferUtilityRequest); string paraRemoteFilePath = "/" + remoteFilePath; ItemResponse<string> response = new ItemResponse<string>(); PhotosAdd model = new PhotosAdd(); //model binding by hand. creating a new instance of the model w/ file uploads model.userId = UserService.GetCurrentUserId(); model.URL = paraRemoteFilePath; PhotosService.PhotosInsert(model); response.Item = remoteFilePath; return Request.CreateResponse(HttpStatusCode.Created, response.Item); } } else { result = Request.CreateResponse(HttpStatusCode.BadRequest); } return result; }
/// <summary> /// Uploads the backup set to Aws. /// </summary> /// <param name="path">The path of the compressed backup to upload.</param> public void UploadBackup(string path) { string fileName = Path.GetFileName(path); if (!String.IsNullOrEmpty(this.Target.AwsPrefix)) { fileName = this.Target.AwsPrefix.EndsWith("/", StringComparison.Ordinal) ? this.Target.AwsPrefix + fileName : this.Target.AwsPrefix + "/" + fileName; } TransferInfo info = new TransferInfo() { BytesTransferred = 0, FileName = fileName, FileSize = new FileInfo(path).Length }; using (TransferUtility transfer = new TransferUtility(S3Client)) { TransferUtilityUploadRequest request = new TransferUtilityUploadRequest() .WithCannedACL(S3CannedACL.Private) .WithBucketName(AwsConfig.BucketName) .WithFilePath(path) .WithKey(fileName) .WithTimeout(-1); request.UploadProgressEvent += (sender, e) => { info.BytesTransferred = e.TransferredBytes; this.Fire(this.TransferProgress, new TransferInfo(info)); }; this.Fire(this.TransferStart, new TransferInfo(info)); transfer.Upload(request); } this.Fire(this.TransferComplete, new TransferInfo(info)); }
public void Publish(SyndicationFeedFormatter feedFormatter) { var config = new AmazonS3Config { Timeout = TimeSpan.FromMinutes(5), ReadWriteTimeout = TimeSpan.FromMinutes(5), RegionEndpoint = RegionEndpoint.GetBySystemName(_s3PublisherSettings.Region) }; IAmazonS3 s3Client = new AmazonS3Client(_s3PublisherSettings.AccessKey, _s3PublisherSettings.SecretKey, config); var memStream = new MemoryStream(); var settings = new XmlWriterSettings(){ Encoding = Encoding.UTF8 }; using (var writer = XmlWriter.Create(memStream, settings)) { feedFormatter.WriteTo(writer); } using (var transferUtility = new TransferUtility(s3Client)) { var uploadRequest = new TransferUtilityUploadRequest() { AutoCloseStream = true, BucketName = _s3PublisherSettings.BucketName, Key = string.Format(_s3PublisherSettings.FileName, DateTime.Now.ToString("yyyyMMddHHmmss")), // Adding datetime for debugging purposess only. // In order for this to take effect change the config file to something like this // <add key="S3Publisher.FileName" value="rareburg.articles.{0}.rss" /> ContentType = string.Format("application/{0}+xml", _feedSettings.FeedFormat), CannedACL = S3CannedACL.PublicRead, InputStream = memStream }; transferUtility.Upload(uploadRequest); } }
private void sendFilesToBucket() { string bucket = this.userData.contract.bucketName; string targetFolder = this.configs.targetCloudFolder; string key = targetFolder.Equals(" ") ? "" : targetFolder+"/"; decimal progress = 0; decimal filesSent = 0; decimal totalFiles = (decimal)files.Count; int currentProgress = 0; foreach(String file in this.files) { if (sendFileLabel.InvokeRequired) { sendFileLabel.Invoke(new MethodInvoker(delegate { sendFileLabel.Text = "Enviando arquivo "+(filesSent+1)+" de "+totalFiles; })); } String[] fileData = file.Split('\\'); String fileName = fileData[(fileData.Length - 1)]; TransferUtility fileTransferUtility = new TransferUtility(new AmazonS3Client(Amazon.RegionEndpoint.SAEast1)); try { updateSendStatus("Enviando arquivo..."); TransferUtilityUploadRequest fileTransferUtilityRequest = new TransferUtilityUploadRequest { BucketName = bucket, FilePath = file, StorageClass = S3StorageClass.Standard, Key = key+fileName, CannedACL = S3CannedACL.Private, }; if(this.userData.contract.encryption > 0) { fileTransferUtilityRequest.ServerSideEncryptionMethod = ServerSideEncryptionMethod.AES256; } fileTransferUtility.Upload(fileTransferUtilityRequest); updateSendStatus("Gerando log de envio"); string status = HttpComm.newFileUploaded(this.userData.token, key, file, this.configs.consultGateway); if(status.Equals("OK")) { updateSendStatus("Arquivo enviado com sucesso"); } else { updateSendStatus(status); } filesSent++; progress = filesSent / totalFiles * 100; currentProgress = Convert.ToInt32(progress); if(sendFileProgress.InvokeRequired) { sendFileProgress.Invoke(new MethodInvoker(delegate{sendFileProgress.Value = currentProgress;})); } } catch (Exception exception) { this.updateDialogLabel("Erro ao enviar o arquivo para nuvem"); } try { string[] filePath = file.Split('\\'); int last = filePath.Length - 1; string from = this.configs.sourceFolder + "\\" + filePath[last]; string to = this.configs.targetLocalFolder + "\\" + filePath[last]; File.Move(from, to); } catch(Exception e) { this.updateDialogLabel("Impossível mover o arquivo para a pasta destino"); } if (!keepAlive) break; } if(sendFileLabel.InvokeRequired) { sendFileLabel.Invoke(new MethodInvoker(delegate { sendFileLabel.Text = "Envio realizado com sucesso"; })); } if(button3.InvokeRequired) { button3.Invoke(new MethodInvoker(delegate { button3.Enabled = true; })); } }
/// <summary> /// Uploads a file to the cloud service and assiciates it with the meta data provided. To use upload /// feature make sure configuration provider has AmazonBucket specified. /// </summary> /// <param name="file">Stream to the content that is to be uploaded.</param> /// <param name="newFileName">Name of the file on cloud.</param> /// <param name="metaData">Any meta data you want to be associated with the file.</param> public void UploadFile(Stream file, string newFileName, NameValueCollection metaData) { try { if (file == null || file.Length == 0) { throw new CloudServiceException("Error occured when uploading file. The stream provided is invalid."); } else { if (file.CanSeek) file.Seek(0, SeekOrigin.Begin); } if (string.IsNullOrEmpty(newFileName)) { throw new CloudServiceException("Error occured when uploading file. New file key must be provided."); } using (AmazonS3 s3 = AWSClientFactory.CreateAmazonS3Client(this.cloudServiceConfigProvider.AWSAccessKeyId, this.cloudServiceConfigProvider.AWSSecretKey)) { TransferUtilityUploadRequest uploadRequest = new TransferUtilityUploadRequest() .WithBucketName(this.cloudServiceConfigProvider.AmazonBucket) .WithAutoCloseStream(true) .WithMetadata(metaData) .WithKey(newFileName); uploadRequest.InputStream = file; using (TransferUtility transferUtility = new TransferUtility(s3)) { transferUtility.Upload(uploadRequest); } } } catch (AmazonS3Exception s3Exception) { throw new CloudServiceException(s3Exception, "Error occured when uploading file."); } catch (CloudServiceException apex) { throw apex; } catch (Exception ex) { throw new CloudServiceException(ex, "Error occured when uploading file."); } }
/// <summary> /// THe weird thing here is that S3 doesn't really have folders, but you can give it a key like "collection/book2/file3.htm" /// and it will name it that, and gui client apps then treat that like a folder structure, so you feel like there are folders. /// </summary> private void UploadDirectory(string prefix, string directoryPath, IProgress progress) { if (!Directory.Exists(directoryPath)) { throw new DirectoryNotFoundException( "Source directory does not exist or could not be found: " + directoryPath); } prefix = prefix + Path.GetFileName(directoryPath) + kDirectoryDelimeterForS3; var filesToUpload = Directory.GetFiles(directoryPath); // Remember the url that can be used to download files like thumbnails and preview.pdf. This seems to work but I wish // I could find a way to get a definitive URL from the response to UploadPart or some similar way. // This method gets called for the root directory (ending in guid), the main directory (ending in book name), and subdirectories. // We want to keep the one that ends in the book name...the main root directory. // This should be the first non-empty directory we are passed (the root only has a folder in it) if (BaseUrl == null && filesToUpload.Length > 0) BaseUrl = "https://s3.amazonaws.com/" + _bucketName + "/" + HttpUtility.UrlEncode(prefix);; using(var transferUtility = new TransferUtility(_amazonS3)) { foreach(string file in filesToUpload) { var fileName = Path.GetFileName(file); if(excludedFileExtensionsLowerCase.Contains(Path.GetExtension(fileName.ToLowerInvariant()))) continue; // BL-2246: skip uploading this one var request = new TransferUtilityUploadRequest() { BucketName = _bucketName, FilePath = file, Key = prefix + fileName }; // The effect of this is that navigating to the file's URL is always treated as an attempt to download the file. // This is definitely not desirable for the PDF (typically a preview) which we want to navigate to in the Preview button // of BloomLibrary. // I'm not sure whether there is still any reason to do it for other files. // It was temporarily important for the BookOrder file when the Open In Bloom button just downloaded it. // However, now the download link uses the bloom: prefix to get the URL passed directly to Bloom, // it may not be needed for anything. Still, at least for the files a browser would not know how to // open, it seems desirable to download them with their original names, if such a thing should ever happen. // So I'm leaving the code in for now except in cases where we know we don't want it. // It is possible to also set the filename ( after attachment, put ; filename='" + Path.GetFileName(file) + "'). // In principle this would be a good thing, since the massive AWS filenames are not useful. // However, AWSSDK can't cope with setting this for files with non-ascii names. // It seems that the header we insert here eventually becomes a header for a web request, and these allow only ascii. // There may be some way to encode non-ascii filenames to get the effect, if we ever want it again. Or AWS may fix the problem. // If you put setting the filename back in without such a workaround, be sure to test with a non-ascii book title. if(Path.GetExtension(file).ToLowerInvariant() != ".pdf") request.Headers.ContentDisposition = "attachment"; request.CannedACL = S3CannedACL.PublicRead; // Allows any browser to download it. progress.WriteStatus(LocalizationManager.GetString("PublishTab.Upload.UploadingStatus", "Uploading {0}"), fileName); if (progress.CancelRequested) return; try { transferUtility.Upload(request); } catch(Exception e) { throw; } if(fileName.EndsWith(BookTransfer.BookOrderExtension)) { // Remember the url that can be used to download the book. This seems to work but I wish // I could find a way to get a definitive URL from the response to UploadPart or some similar way. BookOrderUrlOfRecentUpload = BloomLinkArgs.kBloomUrlPrefix + BloomLinkArgs.kOrderFile + "=" + _bucketName + "/" + HttpUtility.UrlEncode(prefix + fileName); } } foreach(string subdir in Directory.GetDirectories(directoryPath)) { UploadDirectory(prefix, subdir, progress); if (progress.CancelRequested) return; } } }
public string UploadFileWithKey(Hashtable State, string file_name, string local_file_path,string key) { string AWSAccessKey = ConfigurationManager.AppSettings["AWSAccessKey"]; string AWSSecretKey = ConfigurationManager.AppSettings["AWSSecretKey"]; string Bucket = null; if (file_name.EndsWith(".html") || file_name.EndsWith(".xml")) Bucket = ConfigurationManager.AppSettings["WebAppBucket"]; else Bucket = ConfigurationManager.AppSettings["ImageBucket"]; TransferUtility transferUtility = new TransferUtility(AWSAccessKey, AWSSecretKey); TransferUtilityUploadRequest request = null; string url = "https://s3.amazonaws.com/" + Bucket + "/" + key; try { // // Make sure the bucket exists // transferUtility.S3Client.PutBucket(new PutBucketRequest().WithBucketName(Bucket)); if (S3ObjectExists(Bucket, key)) { DeleteS3Object(Bucket, key); } request = new TransferUtilityUploadRequest() .WithBucketName(Bucket) .WithFilePath(local_file_path) .WithTimeout(ONE_MINUTE) .WithKey(key) .WithCannedACL(S3CannedACL.PublicRead); transferUtility.Upload(request); } catch (AmazonS3Exception ex) { return ex.Message + ": " + ex.StackTrace; } return url; }
public static void Main(string[] args) { var options = new CommandLineOptions(); var parser = new CommandLineParser(); if (!parser.ParseArguments(args, options)) { Console.WriteLine(options.GetUsage()); Environment.Exit(1); return; } var timestamp = Math.Round((DateTime.UtcNow - new DateTime(1970, 1, 1, 0, 0, 0)).TotalSeconds); var key = options.Key.Replace("__unix_timestamp__", timestamp.ToString()); Console.WriteLine("Using key <{0}>", key); try { var s3Client = new AmazonS3Client(options.AccessKey, options.SecretKey); var transferUtility = new TransferUtility(s3Client); var transferRequest = new TransferUtilityUploadRequest() .WithBucketName(options.Bucket) .WithFilePath(options.File) .WithKey(key); if (options.IsPublic) { transferRequest.WithCannedACL(S3CannedACL.PublicRead); } if (options.ShouldShowProgress) { Console.WriteLine(); transferRequest.WithSubscriber(new EventHandler<UploadProgressArgs>((obj, progress) => { Console.Write("\r{0}% complete. Uploaded {1} / {2} bytes. ", progress.PercentDone, progress.TransferredBytes, progress.TotalBytes); })); } transferUtility.Upload(transferRequest); Console.WriteLine(); Console.WriteLine("Done!"); } catch (Exception e) { Console.WriteLine("Error uploading: {0}", e.Message); Environment.Exit(1); } }