public void UploadFile(string bucketName, Stream uploadFileStream, string remoteFileName) { using (client = Amazon.AWSClientFactory.CreateAmazonS3Client(accessKeyID, secretAccessKeyID, config)) { try { PutObjectRequest request = new PutObjectRequest(); request.WithBucketName(bucketName) .WithCannedACL(S3CannedACL.PublicRead) .WithKey(remoteFileName) .WithInputStream(uploadFileStream); using (S3Response response = client.PutObject(request)) { WebHeaderCollection headers = response.Headers; foreach (string key in headers.Keys) { //log headers ("Response Header: {0}, Value: {1}", key, headers.Get(key)); } } } catch (AmazonS3Exception amazonS3Exception) { if (amazonS3Exception.ErrorCode != null && (amazonS3Exception.ErrorCode.Equals("InvalidAccessKeyId") || amazonS3Exception.ErrorCode.Equals("InvalidSecurity"))) { //log exception - ("Please check the provided AWS Credentials."); } else { //log exception -("An error occurred with the message '{0}' when writing an object", amazonS3Exception.Message); } } } }
public static void Main(string[] args) { if (checkRequiredFields()) { using (client = Amazon.AWSClientFactory.CreateAmazonS3Client(RegionEndpoint.USWest2)) { Console.WriteLine("Listing buckets"); ListingBuckets(); Console.WriteLine("Creating a bucket"); CreateABucket(); Console.WriteLine("Writing an object"); WritingAnObject(); Console.WriteLine("Reading an object"); ReadingAnObject(); Console.WriteLine("Deleting an object"); DeletingAnObject(); Console.WriteLine("Listing objects"); ListingObjects(); } } Console.WriteLine("Press any key to continue..."); Console.ReadKey(); }
/// <summary> /// Get Items /// </summary> /// <param name="container">Container</param> /// <param name="client">Client</param> /// <param name="path">Path</param> /// <returns>Storage Items</returns> private IEnumerable <IStorageItem> GetItems(CloudBlobContainer container, AmazonS3 client, string path) { if (null != container) { var options = new BlobRequestOptions() { UseFlatBlobListing = true, }; return(container.ListBlobs(options).Select(b => new Azure(container, b.Uri.ToString())).Where(c => c.Exists())); } else if (null != client) { var request = new ListObjectsRequest() { BucketName = path, }; using (var response = client.ListObjects(request)) { return(response.S3Objects.Select(s3 => new S3(client, path, s3.Key, s3.ETag))); } } else { return(this.GetFiles(path, path, new List <IStorageItem>())); } }
public static void ResizeImageAndUpload(AmazonS3 anAmazonS3Client, string aBucketName, string aCurrentPhotoName, string aNewImageName, int aSize) { GetObjectRequest myGetRequest = new GetObjectRequest().WithBucketName(aBucketName).WithKey(aCurrentPhotoName); GetObjectResponse myResponse = anAmazonS3Client.GetObject(myGetRequest); Stream myStream = myResponse.ResponseStream; ResizeAndUpload(myStream, anAmazonS3Client, aBucketName, aNewImageName, aSize); }
public ListVersionsResponse MssListFileVersions(string sObjectKey) { AmazonS3 client = AWSClientFactory.CreateAmazonS3Client(S3ACCESSKEY, S3SECRETKEY); string BUCKET_NAME = ConfigurationManager.AppSettings["AWSBUCKET"]; return(client.ListVersions(new ListVersionsRequest().WithBucketName(BUCKET_NAME).WithKeyMarker(sObjectKey))); }
public bool FileExists(string folderName, string fileName) { //folder ignored - packages stored on top level of S3 bucket if (String.IsNullOrWhiteSpace(folderName)) { throw new ArgumentNullException("folderName"); } if (String.IsNullOrWhiteSpace(fileName)) { throw new ArgumentNullException("fileName"); } var request = new ListObjectsRequest(); request.WithBucketName(clientContext.BucketName); request.WithPrefix(fileName); using (AmazonS3 client = clientContext.CreateInstance()) { ListObjectsResponse response = WrapRequestInErrorHandler(() => client.ListObjects(request)); var count = response.S3Objects.Count; if (count == 1) { return(true); } } return(false); }
//For testing - allows mocking of files and s3 public S3FileSystem(AmazonS3 s3Client, Func<string, IFileStreamWrap> fileLoader) { Logger = new TraceLogger(); S3Client = s3Client; TransferUtility = new TransferUtility(s3Client); FileLoader = fileLoader; }
public S3FileSystem(IPsCmdletLogger logger, string accessKey, string secret, AmazonS3Config config) { Logger = logger ?? new TraceLogger(); S3Client = new AmazonS3Client(accessKey, secret, config); TransferUtility = new TransferUtility(S3Client); FileLoader = (fileFullName) => new FileWrap().Open(fileFullName, FileMode.Open, FileAccess.ReadWrite); }
public Teacher GetByTeacherID(int teacherID, int schoolID) { SqlConnection conn = null; SqlDataReader drUsers; String strSqlUser; SqlCommand cmdUser; SqlParameter prmteacherID = null; SqlParameter prmSchoolID = null; try { Teacher objUser = new Teacher(); conn = new SqlConnection(Functions.GetConnectionString()); strSqlUser = "******"; cmdUser = new SqlCommand(strSqlUser, conn); cmdUser.CommandType = CommandType.StoredProcedure; prmteacherID = new SqlParameter(); prmteacherID.ParameterName = "@teacherID"; prmteacherID.SqlDbType = SqlDbType.Int; prmteacherID.Value = teacherID; cmdUser.Parameters.Add(prmteacherID); prmSchoolID = new SqlParameter(); prmSchoolID.ParameterName = "@schoolID"; prmSchoolID.SqlDbType = SqlDbType.Int; prmSchoolID.Value = schoolID; cmdUser.Parameters.Add(prmSchoolID); cmdUser.Connection.Open(); drUsers = cmdUser.ExecuteReader(); AmazonS3 s3 = new AmazonS3(); s3 = AmazonS3Factory.setAmazonS3(); if (drUsers.Read()) { objUser = new Teacher(); objUser.teacherID = drUsers.GetInt32(drUsers.GetOrdinal("TeacherID")); objUser.name = drUsers.GetString(drUsers.GetOrdinal("Name")); objUser.shortName = drUsers.GetString(drUsers.GetOrdinal("shortName")); objUser.roleID = drUsers.GetInt32(drUsers.GetOrdinal("RoleID")); objUser.schoolID = drUsers.GetInt32(drUsers.GetOrdinal("SchoolID")); objUser.Dni = drUsers.GetString(drUsers.GetOrdinal("Dni")); objUser.ImageKey = drUsers.GetString(drUsers.GetOrdinal("ImageKey")); objUser.endPoint = s3.getTeacherEndPoint(objUser.schoolID, objUser.ImageKey); } cmdUser.Connection.Close(); conn.Dispose(); return(objUser); } catch (Exception ex) { conn.Dispose(); throw ex; } }
/// <summary> /// A Wrapper for the AWS.net SDK /// </summary> public S3Storage() { var accessKeyId = _dbContext.Key.SingleOrDefault(k => k.Name == "AccessKeyId").Data; var secretAccessKey = _dbContext.Key.SingleOrDefault(k => k.Name == "SecretAccessKey").Data; _client = AWSClientFactory.CreateAmazonS3Client(accessKeyId, secretAccessKey, RegionEndpoint.USEast1); }
/// <summary> /// /////////////////////List all of objects in a bucket////////////////////// /// </summary> /// <param name="s3Client"></param> /// <returns></returns> public static string SliceNumbers(AmazonS3 s3Client, string cmap, string clipping) { //ListBucketsRequest buckr = new ListBucketsRequest(); //ListBucketsResponse response = s3Client.ListBuckets(buckr); ListObjectsRequest Lor = new ListObjectsRequest() { BucketName = cmap + "0", Prefix = clipping //with Delimiter is '/', it will not get folder. {we need just count files in a bucket which are not forlsers! //Delimiter= "/" }; ListObjectsResponse response1 = s3Client.ListObjects(Lor); if (response1.S3Objects.Count - 1 < 0) { return("0"); } else { return((response1.S3Objects.Count - 1).ToString()); } }
private void CreateBucket(AmazonS3 client) { PutBucketRequest request = new PutBucketRequest(); request.BucketName = this.BucketName; client.PutBucket(request); }
protected virtual long GetFolderSize(AmazonS3 client, string folderPath, string bucketName) { ListObjectsRequest request = new ListObjectsRequest(); request.WithBucketName(bucketName); request.WithPrefix(folderPath); long total = 0; do { ListObjectsResponse response = client.ListObjects(request); if (response != null && response.S3Objects != null) { total += response.S3Objects.Sum(s => s.Size); } if (response.IsTruncated) { request.Marker = response.NextMarker; } else { request = null; } } while (request != null); return(total); }
public S3StorageProvider(AmazonS3 s3, string bucket, string rootKey = null) { _s3 = s3; _bucket = bucket; _rootKey = SanitizeKey(rootKey ?? ""); MaxParallelStreams = 4; }
public string UploadImageToS3(AST.Address addr, Bitmap b, string secret) { // convert Bitmap to MemoryStream MemoryStream stream = new MemoryStream(); b.Save(stream, System.Drawing.Imaging.ImageFormat.Png); // the image name is the md var imagename = GetImageName(addr); // the url to the bitmap string url; // upload MemoryStream to S3 using (AmazonS3 client = Amazon.AWSClientFactory.CreateAmazonS3Client(_id, secret)) { // generate url GetPreSignedUrlRequest request = new GetPreSignedUrlRequest() { BucketName = _s3bucket, Key = imagename, Verb = HttpVerb.GET, Expires = DateTime.Now.AddMonths(24) }; url = client.GetPreSignedURL(request); // upload image var tu = new Amazon.S3.Transfer.TransferUtility(client); tu.Upload(stream, _s3bucket, imagename); } return(url); }
public bool file_exists(string folderName, string fileName) { // It's allowed to have an empty folder name. // if (String.IsNullOrWhiteSpace(folderName)) throw new ArgumentNullException("folderName"); if (String.IsNullOrWhiteSpace(fileName)) { throw new ArgumentNullException("fileName"); } folderName = (string.IsNullOrEmpty(folderName) ? String.Empty : folderName.Substring(folderName.Length - 1, 1) == "/" ? folderName : folderName + "/"); fileName = string.Format("{0}{1}", folderName, fileName); var request = new ListObjectsRequest(); request.WithBucketName(clientContext.BucketName); request.WithPrefix(fileName); using (AmazonS3 client = clientContext.create_instance()) { ListObjectsResponse response = wrap_request_in_error_handler(() => client.ListObjects(request)); var count = response.S3Objects.Count; if (count == 1) { return(true); } } return(false); }
public static void Main(string[] args) { NameValueCollection appConfig = ConfigurationManager.AppSettings; accessKeyID = appConfig["AWSAccessKey"]; secretAccessKeyID = appConfig["AWSSecretKey"]; using (client = Amazon.AWSClientFactory.CreateAmazonS3Client(accessKeyID, secretAccessKeyID)) { Console.WriteLine("Listing buckets"); ListingBuckets(); Console.WriteLine("Creating a bucket"); CreateABucket(); Console.WriteLine("Writing an object"); WritingAnObject(); Console.WriteLine("Reading an object"); ReadingAnObject(); Console.WriteLine("Deleting an object"); DeletingAnObject(); Console.WriteLine("Listing objects"); ListingObjects(); } Console.WriteLine("Press any key to continue..."); Console.ReadKey(); }
public async Task S3Service_Should_List_Files_In_A_Bucket() { var bucketName = Guid.NewGuid().ToString(); await CreateTestBucket(bucketName); const int uploadCount = 4; var fileNames = new string[uploadCount]; for (var i = 0; i < uploadCount; i++) { var fileName = $"SampleData{i}.txt"; await UploadTestFile(fileName, bucketName); fileNames[i] = fileName; } ListObjectsResponse listObjectsResponse = await AmazonS3.ListObjectsAsync(bucketName); List <S3Object> s3Objects = listObjectsResponse.S3Objects; Assert.Equal(uploadCount, s3Objects.Count); Assert.All(fileNames, s => Assert.NotNull(s3Objects.FirstOrDefault(o => o.Key == s))); }
public List <FileListModel> GetAmazonS3List(int planId, string directory, PlatformInsertModel planData) { dynamic platformDetail = JObject.Parse(planData.JsonData); AmazonS3 s3 = new AmazonS3(); return(s3.GetDirectoryList(directory, platformDetail.apiAccessKey.ToString() + "-" + platformDetail.apiSecretKey.ToString() + "-" + platformDetail.region.ToString())); }
public byte[] FetchFile(string sObjectKey, string sVersionId) { AmazonS3 client = AWSClientFactory.CreateAmazonS3Client(S3ACCESSKEY, S3SECRETKEY); string BUCKET_NAME = ConfigurationManager.AppSettings["AWSBUCKET"]; GetObjectRequest request = new GetObjectRequest(); request.WithKey(sObjectKey); request.WithBucketName(BUCKET_NAME); if (sVersionId != "") { request.WithVersionId(sVersionId); } GetObjectResponse response = client.GetObject(request); byte[] buffer = new byte[response.ContentLength]; int read; MemoryStream ms = new MemoryStream(); while ((read = response.ResponseStream.Read(buffer, 0, buffer.Length)) > 0) { ms.Write(buffer, 0, read); } return(ms.ToArray()); }
public override IVirtualFile GetFile(string virtualPath) { if (string.IsNullOrEmpty(virtualPath)) { return(null); } var filePath = SanitizePath(virtualPath); try { var response = AmazonS3.GetObject(new GetObjectRequest { Key = filePath, BucketName = BucketName, }); var dirPath = GetDirPath(filePath); return(new S3VirtualFile(this, new S3VirtualDirectory(this, dirPath, GetParentDirectory(dirPath))).Init(response)); } catch (AmazonS3Exception ex) { if (ex.StatusCode == HttpStatusCode.NotFound) { return(null); } throw; } }
public static void Main(string[] args) { _threadPool = new Semaphore(MaxThreads, MaxThreads); using (_s3Client = AWSClientFactory.CreateAmazonS3Client(ConfigurationManager.AppSettings["AWSAccessKey"], ConfigurationManager.AppSettings["AWSSecretKey"])) { for (int level = StartLevel; level <= EndLevel; ++level) { double tileSize = WebMercatorDelta * Math.Pow(2, 1 - level); int startRow = Convert.ToInt32(Math.Truncate((WebMercatorDelta - ExtentMaxY) / tileSize)) - TilePaddingY; int endRow = Convert.ToInt32(Math.Truncate((WebMercatorDelta - ExtentMinY) / tileSize)) + 1 + TilePaddingY; int startColumn = Convert.ToInt32(Math.Truncate((ExtentMinX + WebMercatorDelta) / tileSize)) - TilePaddingX; int endColumn = Convert.ToInt32(Math.Truncate((ExtentMaxX + WebMercatorDelta) / tileSize)) + 1 + TilePaddingX; for (int r = startRow; r <= endRow; ++r) { for (int c = startColumn; c <= endColumn; ++c) { _threadPool.WaitOne(); Thread t = new Thread(new ParameterizedThreadStart(CopyImage)); t.Start(new UserData(level, r, c)); Console.Write(String.Format("{0}Level {1} Row {2} Column {3}", new String('\b', 40), level, r, c).PadRight(80)); } } } } Console.WriteLine((new String('\b', 40) + "Done").PadRight(80)); Console.Read(); }
public override IVirtualDirectory GetDirectory(string virtualPath) { if (virtualPath == null) { return(null); } var dirPath = SanitizePath(virtualPath); if (string.IsNullOrEmpty(dirPath)) { return(RootDirectory); } var seekPath = dirPath[dirPath.Length - 1] != DirSep ? dirPath + DirSep : dirPath; var response = AmazonS3.ListObjects(new ListObjectsRequest { BucketName = BucketName, Prefix = seekPath, MaxKeys = 1, }); if (response.S3Objects.Count == 0) { return(null); } return(new S3VirtualDirectory(this, dirPath, GetParentDirectory(dirPath))); }
/// <summary> /// Initializes a new instance of the <see cref="MultipartUploadCommand"/> class. /// </summary> /// <param name="s3Client">The s3 client.</param> /// <param name="config">The config object that has the number of threads to use.</param> /// <param name="fileTransporterRequest">The file transporter request.</param> internal MultipartUploadCommand(AmazonS3 s3Client, TransferUtilityConfig config, TransferUtilityUploadRequest fileTransporterRequest) { this._config = config; if (fileTransporterRequest.IsSetFilePath()) { this._logger.DebugFormat("Beginning upload of file {0}.", fileTransporterRequest.FilePath); } else { this._logger.DebugFormat("Beginning upload of stream."); } this._s3Client = s3Client; this._fileTransporterRequest = fileTransporterRequest; this._contentLength = this._fileTransporterRequest.ContentLength; if (fileTransporterRequest.IsSetPartSize()) { this._partSize = fileTransporterRequest.PartSize; } else { this._partSize = calculatePartSize(this._contentLength); } this._logger.DebugFormat("Upload part size {0}.", this._partSize); }
public ZAwsEc2Controller() { ec2 = null; route53 = null; s3 = null; myTaskQueue = new ZAwsTaskHandler(this); }
public S3FileSystem() { this.accessKeyId = ConfigurationManager.AppSettings["AWSAccessKeyID"]; this.secretAccessKey = ConfigurationManager.AppSettings["AWSSecretAccessKey"]; this.bucketName = ConfigurationManager.AppSettings["AWSBucketName"]; this.s3 = AWSClientFactory.CreateAmazonS3Client(this.accessKeyId, this.secretAccessKey); }
private static void SetInstance() { if(theAmazonClient == null) { theAmazonClient = Amazon.AWSClientFactory.CreateAmazonS3Client(Configuration.SiteConfiguration.AWSAccessKey(), Configuration.SiteConfiguration.AWSSecretKey()); } }
public static S3Response PhysicallyDeletePhoto(AmazonS3 anS3Client, string aBucketName, string aFileName) { DeleteObjectRequest myDeleteRequest = new DeleteObjectRequest(); myDeleteRequest.WithBucketName(aBucketName).WithKey(aFileName); return anS3Client.DeleteObject(myDeleteRequest); }
public bool CreateBucket() { config.ServiceURL = "s3.amazonaws.com"; AmazonS3 client = Amazon.AWSClientFactory.CreateAmazonS3Client( am.PublicKey, am.PrivateKey ); ListBucketsResponse response = client.ListBuckets(); bool found = false; foreach (S3Bucket bucket in response.Buckets) { if (bucket.BucketName == "Drone") { found = true; break; } } if (found == false) { client.PutBucket(new PutBucketRequest().WithBucketName("Drone")); } return(found); }
public static Job DeserializeFromS3(string bucket, string state_id, string aws_id, string aws_secret) { Job j; // download Job from S3 using (AmazonS3 client = Amazon.AWSClientFactory.CreateAmazonS3Client(aws_id, aws_secret)) { GetObjectRequest getObjectRequest = new GetObjectRequest() { BucketName = bucket, Key = "state_" + state_id }; using (S3Response getObjectResponse = client.GetObject(getObjectRequest)) { using (Stream s = getObjectResponse.ResponseStream) { // deserialize IFormatter formatter = new BinaryFormatter(); j = (Job)formatter.Deserialize(s); } } } return(j); }
public override void CopyDirectory(string srcdomain, string srcdir, string newdomain, string newdir) { string srckey = MakePath(srcdomain, srcdir); string dstkey = MakePath(newdomain, newdir); //List files from src using (AmazonS3 client = GetClient()) { var request = new ListObjectsRequest { BucketName = _bucket, Prefix = srckey }; using (ListObjectsResponse response = client.ListObjects(request)) { foreach (S3Object s3Object in response.S3Objects) { if (QuotaController != null) { QuotaController.QuotaUsedAdd(_modulename, newdomain, _dataList.GetData(newdomain), s3Object.Size); } client.CopyObject(new CopyObjectRequest { SourceBucket = _bucket, SourceKey = s3Object.Key, DestinationBucket = _bucket, DestinationKey = s3Object.Key.Replace(srckey, dstkey), CannedACL = GetDomainACL(newdomain), ServerSideEncryptionMethod = ServerSideEncryptionMethod.AES256 }); } } } }
/// <summary> /// Initializes a new instance of the <see cref="MultipartUploadCommand"/> class. /// </summary> /// <param name="s3Client">The s3 client.</param> /// <param name="config">The config object that has the number of threads to use.</param> /// <param name="fileTransporterRequest">The file transporter request.</param> internal MultipartUploadCommand(AmazonS3 s3Client, TransferUtilityConfig config, TransferUtilityUploadRequest fileTransporterRequest) { this._config = config; if (fileTransporterRequest.IsSetFilePath()) { this._logger.DebugFormat("Beginning upload of file {0}.", fileTransporterRequest.FilePath); } else { this._logger.DebugFormat("Beginning upload of stream."); } this._s3Client = s3Client; this._fileTransporterRequest = fileTransporterRequest; this._contentLength = this._fileTransporterRequest.ContentLength; if (fileTransporterRequest.IsSetPartSize()) this._partSize = fileTransporterRequest.PartSize; else this._partSize = calculatePartSize(this._contentLength); this._logger.DebugFormat("Upload part size {0}.", this._partSize); }
public override void DeleteFiles(string domain, string path, string pattern, bool recursive) { var objToDel = GetS3Objects(domain, path) .Where(x => Wildcard.IsMatch(pattern, Path.GetFileName(x.Key))); using (AmazonS3 client = GetClient()) { foreach (S3Object s3Object in objToDel) { if (QuotaController != null) { QuotaController.QuotaUsedDelete(_modulename, domain, _dataList.GetData(domain), Convert.ToInt64(s3Object.Size)); } var deleteRequest = new DeleteObjectRequest { BucketName = _bucket, Key = s3Object.Key }; using (client.DeleteObject(deleteRequest)) { } } } }
/// <summary> /// Used to create a new instance of the MessageGears client. /// </summary> /// <param name="props"> /// Contains the credentials needed to access MessageGears, Amazon S3, and Amazon SQS.<see cref="MessageGearsProperties"/> /// </param> public MessageGearsAwsClient(MessageGearsAwsProperties props) { this.properties = props; this.sqs = new AmazonSQSClient(properties.MyAWSAccountKey, properties.MyAWSSecretKey); this.s3 = new AmazonS3Client(properties.MyAWSAccountKey, properties.MyAWSSecretKey); log.Info("MessageGears AWS client initialized"); }
public override Uri Move(string srcdomain, string srcpath, string newdomain, string newpath) { using (AmazonS3 client = GetClient()) { string srcKey = MakePath(srcdomain, srcpath); string dstKey = MakePath(newdomain, newpath); long size = QuotaDelete(srcdomain, client, srcKey); if (QuotaController != null) { QuotaController.QuotaUsedAdd(_modulename, newdomain, _dataList.GetData(newdomain), size); } var request = new CopyObjectRequest { SourceBucket = _bucket, SourceKey = srcKey, DestinationBucket = _bucket, DestinationKey = dstKey, CannedACL = GetDomainACL(newdomain), Directive = S3MetadataDirective.REPLACE, ServerSideEncryptionMethod = ServerSideEncryptionMethod.AES256 }; client.CopyObject(request); Delete(srcdomain, srcpath, false); return(GetUri(newdomain, newpath)); } }
public void save_file(string folderName, string fileName, Stream fileStream) { // It's allowed to have an empty folder name. // if (String.IsNullOrWhiteSpace(folderName)) throw new ArgumentNullException("folderName"); if (String.IsNullOrWhiteSpace(fileName)) { throw new ArgumentNullException("fileName"); } if (fileStream == null) { throw new ArgumentNullException("fileStream"); } folderName = (string.IsNullOrEmpty(folderName) ? String.Empty : folderName.Substring(folderName.Length - 1, 1) == "/" ? folderName : folderName + "/"); fileName = string.Format("{0}{1}", folderName, fileName); var request = new PutObjectRequest(); request.WithBucketName(clientContext.BucketName); request.WithKey(fileName); request.WithInputStream(fileStream); request.AutoCloseStream = true; request.CannedACL = S3CannedACL.PublicRead; request.WithTimeout((int)TimeSpan.FromMinutes(30).TotalMilliseconds); using (AmazonS3 client = clientContext.create_instance()) { S3Response response = wrap_request_in_error_handler(() => client.PutObject(request)); } }
public void SaveFile(string folderName, string fileName, Stream fileStream) { //folder ignored - packages stored on top level of S3 bucket if (String.IsNullOrWhiteSpace(folderName)) { throw new ArgumentNullException("folderName"); } if (String.IsNullOrWhiteSpace(fileName)) { throw new ArgumentNullException("fileName"); } if (fileStream == null) { throw new ArgumentNullException("fileStream"); } var request = new PutObjectRequest(); request.WithBucketName(clientContext.BucketName); request.WithKey(fileName); request.WithInputStream(fileStream); request.AutoCloseStream = true; request.CannedACL = S3CannedACL.PublicRead; request.WithTimeout((int)TimeSpan.FromMinutes(30).TotalMilliseconds); using (AmazonS3 client = clientContext.CreateInstance()) { S3Response response = WrapRequestInErrorHandler(() => client.PutObject(request)); } }
// Parse the input request /// <summary> /// //////////////////////////////Create a new bucket////////////////////////////// /// </summary> /// <param name="client"></param> public static void CreateBucket(AmazonS3 client, string cmap) { Console.Out.WriteLine("Checking S3 bucket with name " + cmap); ListBucketsResponse response = client.ListBuckets(); bool found = false; foreach (S3Bucket bucket in response.Buckets) { if (bucket.BucketName == cmap) { Console.Out.WriteLine(" Bucket found will not create it."); found = true; break; } } if (found == false) { Console.Out.WriteLine(" Bucket not found will create it."); client.PutBucket(new PutBucketRequest().WithBucketName(cmap)); Console.Out.WriteLine("Created S3 bucket with name " + cmap); } }
public bool DeleteFile(string fileName) { try { //if (System.IO.File.Exists(HttpContext.Current.Server.MapPath("~" + url))) // System.IO.File.Delete(HttpContext.Current.Server.MapPath("~" + url)); string[] tmp = fileName.Split('/'); string path = ""; for (int i = 0; i < tmp.Length - 1; i++) { if (tmp[i] != "") { path += tmp[i] + "/"; } } fileName = tmp[tmp.Length - 1]; AmazonS3.DeleteFile(path, fileName); return(true); } catch (Exception ex) { return(false); } }
public S3FetchClient(IHttpRetryService httpRetryService, IFileSystem fileSystem, AmazonS3 s3) { this.httpRetryService = httpRetryService; this.fileSystem = fileSystem; this.s3 = s3; }
public static void Main(string[] args) { log4net.Config.XmlConfigurator.Configure(); log.Info("Initializing and connecting to AWS..."); s3 = AWSClientFactory.CreateAmazonS3Client(RegionEndpoint.USWest1); indexer = new FileIndexer("Files"); indexer.Index(); s3indexer = new S3Indexer(Settings.Default.BucketName, Settings.Default.FolderName, "S3Tmp", s3); s3indexer.Index(); log.Info("Comparing local index and remote index."); var filesToUpload = (from filePair in indexer.FileIndex where !s3indexer.HashedFiles.ContainsKey(filePair.Key) || !s3indexer.HashedFiles[filePair.Key].SequenceEqual(filePair.Value) select filePair.Key).ToList(); var filesToDelete = (from filePair in s3indexer.HashedFiles where !indexer.FileIndex.ContainsKey(filePair.Key) select filePair.Key).ToList(); foreach(var fileDelete in filesToDelete) { log.Debug("Deleting file "+fileDelete); s3.DeleteObject(new DeleteObjectRequest() { BucketName = Settings.Default.BucketName, Key = Settings.Default.FolderName + "/" + fileDelete }); } foreach(var fileUpload in filesToUpload) { log.Debug("Uploading file "+fileUpload); s3.PutObject(new PutObjectRequest() { BucketName = Settings.Default.BucketName, Key = Settings.Default.FolderName + "/" + fileUpload, AutoCloseStream = true, InputStream = new FileStream("Files/" + fileUpload, FileMode.Open) }); } log.Info("Re-indexing files..."); using (MemoryStream stream = new MemoryStream()) { Serializer.Serialize(stream, indexer.FileIndex); stream.Position = 0; s3.PutObject(new PutObjectRequest() { BucketName = Settings.Default.BucketName, Key = Settings.Default.FolderName + "/" + "index.mhash", InputStream = stream }); } log.Info("Done!"); Console.Read(); }
public static void DeleteFile(AmazonS3 Client, string filekey) { DeleteObjectRequest request = new DeleteObjectRequest() { BucketName = BUCKET_NAME, Key = filekey }; S3Response response = Client.DeleteObject(request); }
/// <summary> /// Initializes a new instance of the S3 /// </summary> /// <param name="client">Client</param> /// <param name="bucket">Bucket</param> /// <param name="relativePath">Relative Path</param> /// <param name="etag">ETag</param> public S3(AmazonS3 client, string bucket, string relativePath, string etag = null) { this.client = client; this.bucket = bucket; this.RelativePath = relativePath.Replace('\\', '/'); if (!string.IsNullOrWhiteSpace(etag)) { this.MD5 = System.Convert.ToBase64String(StringToByteArray(etag.Replace("\"", string.Empty))); } }
public static string CreateNewFolder(AmazonS3 client, string foldername) { String S3_KEY = foldername; PutObjectRequest request = new PutObjectRequest(); request.WithBucketName(BUCKET_NAME); request.WithKey(S3_KEY); request.WithContentBody(""); client.PutObject(request); return S3_KEY; }
/// <summary> /// Create an indexer given a bucket and folder. /// </summary> /// <param name="bucketName"></param> /// <param name="folderName"></param> public S3Indexer(string bucketName, string folderName, string downloadFolderName, AmazonS3 s3) { HashedFiles = new Dictionary<string, byte[]>(); if(Directory.Exists(downloadFolderName)) Directory.Delete(downloadFolderName, true); Directory.CreateDirectory(downloadFolderName); downloadPath = downloadFolderName; this.s3 = s3; this.bucketName = bucketName; this.folderName = folderName; }
public static string CreateNewFileInFolder(AmazonS3 client, string foldername, string filepath) { String S3_KEY = foldername + "/" + System.IO.Path.GetFileName(filepath); PutObjectRequest request = new PutObjectRequest(); request.WithBucketName(BUCKET_NAME); request.WithKey(S3_KEY); request.WithFilePath(filepath); //request.WithContentBody("This is body of S3 object."); client.PutObject(request); return S3_KEY; }
private static void GetObject(AmazonS3 s3Client, string bucket, string key) { var getObjectRequest = new GetObjectRequest().WithBucketName(bucket).WithKey(key); using (var getObjectResponse = s3Client.GetObject(getObjectRequest)) { var memoryStream = new MemoryStream(); getObjectResponse.ResponseStream.CopyTo(memoryStream); var content = Encoding.Default.GetString(memoryStream.ToArray()); Console.WriteLine(content); } }
public static void ClassInitialize(TestContext context) { try { client = ClientTests.CreateClient(); } catch (Exception e) { Assert.Inconclusive("prerequisite: unable to create client. Error: {0}", e.Message); } }
public static void CopyFile(AmazonS3 s3Client, string sourcekey, string targetkey) { String destinationPath = targetkey; CopyObjectRequest request = new CopyObjectRequest() { SourceBucket = BUCKET_NAME, SourceKey = sourcekey, DestinationBucket = BUCKET_NAME, DestinationKey = targetkey }; CopyObjectResponse response = s3Client.CopyObject(request); }
public string Save(string fileName, Stream fileStream) { AmazonS3Config S3Config = new AmazonS3Config() { ServiceURL = "s3.amazonaws.com", CommunicationProtocol = Amazon.S3.Model.Protocol.HTTP, }; using (client = Amazon.AWSClientFactory.CreateAmazonS3Client( accessKeyID, secretAccessKeyID, S3Config )) { return UploadToAmazon(fileName, fileStream); } }
private static void ExportAndImport(string folder,CloudBlobContainer container, AmazonS3 s3) { var listRequest = new ListObjectsRequest{ BucketName = ConfigurationManager.AppSettings["S3Bucket"], }.WithPrefix(folder); Console.WriteLine("Fetching all S3 object in " + folder); var s3response = s3.ListObjects(listRequest); //Checking if container exists, and creating it if not if (container.CreateIfNotExists()) { Console.WriteLine("Creating the blob container"); } foreach (var s3Item in s3response.S3Objects) { if (s3Item.Key == folder) { continue; } if (s3Item.Key.EndsWith("/")) { ExportAndImport(s3Item.Key, container, s3); continue; } Console.WriteLine("---------------------------------------------------"); var blockBlob = container.GetBlockBlobReference(s3Item.Key); Console.WriteLine("Blob: " + blockBlob.Uri.AbsoluteUri); var id = blockBlob.StartCopyFromBlob(new Uri("http://" + awsServiceUrl + "/" + s3Bucket + "/" + HttpUtility.UrlEncode(s3Item.Key)), null, null, null); bool continueLoop = true; while (continueLoop && id == string.Empty) { var copyState = blockBlob.CopyState; if (copyState != null) { var percentComplete = copyState.BytesCopied / copyState.TotalBytes; Console.WriteLine("Status of blob copy...." + copyState.Status + " " + copyState.TotalBytes + " of " + copyState.BytesCopied + "bytes copied. " + string.Format("{0:0.0%}", percentComplete)); if (copyState.Status != CopyStatus.Pending) { continueLoop = false; } } System.Threading.Thread.Sleep(1000); } } }
public void getFilefromS3(resume resume, string zipName) { zipDirectoryName = zipName; System.IO.Directory.CreateDirectory(zipDirectoryName); //string baseFilename = resume.LastName + "_" + resume.FirstName + "_resume"; string baseFilename = resume.resumeID.ToString() +"_" + resume.LastName + "_" + resume.FirstName + "_resume"; using (client = Amazon.AWSClientFactory.CreateAmazonS3Client("AKIAJ47VSG7WMA62WLCA", "3tqlHujlftpk6j/z5OtDw2eg9N2FJtz1RwL8bEa3")) { string keyName; if (resume.hasIMG) { var pageNo = 0; var append = false; for (int i = 0; i < (resume.pageCnt.HasValue? resume.pageCnt: 1 ); i++ ) { keyName = "img/" + resume.resumeID.ToString(); keyName = append ? (keyName + "_" + pageNo.ToString()) : keyName; getAndSaveFile(keyName, baseFilename + ((pageNo==0)? "": ("_" + pageNo.ToString())) + ".jpg"); append = true; pageNo++; } } if (resume.hasTXT) { keyName = "txt/" + resume.resumeID.ToString(); getAndSaveFile(keyName, baseFilename + ".txt"); } if (resume.hasPDF) { keyName = "pdf/" + resume.resumeID.ToString(); getAndSaveFile(keyName, baseFilename + ".pdf"); } if (resume.hasDOCX) { keyName = "docx/" + resume.resumeID.ToString(); getAndSaveFile(keyName, baseFilename + ".doc"); } } }
protected void Page_Load(object sender, EventArgs e) { Hashtable State = (Hashtable)HttpRuntime.Cache[Session.SessionID]; if (State == null || State.Count <= 2) { Page.ClientScript.RegisterStartupScript(this.GetType(), Guid.NewGuid().ToString(), "timeOut('../Default.aspx');", true); return; } if (!IsPostBack) { AmazonS3 S3 = new AmazonS3(); ArrayList images = S3.GetAppImageArchiveUrls(State); images.Sort(); ImageList.Items.Clear(); foreach (string url in images) { string name = url.Substring(url.LastIndexOf("/") + 1); ImageList.Items.Add(new RadComboBoxItem(name, url)); } } }
public static void CheckForBucket(string itemKey, AmazonS3 s3Client) { if (HttpContext.Current.User.Identity.IsAuthenticated) { string userBucketName = String.Format(Settings.Default.BucketNameFormat, HttpContext.Current.User.Identity.Name, itemKey); using (ListBucketsResponse listBucketsResponse = s3Client.ListBuckets()) { S3Bucket bucket = listBucketsResponse.Buckets.FirstOrDefault(b => b.BucketName == userBucketName); if (bucket == null) { PutBucketRequest putBucketRequest = new PutBucketRequest() .WithBucketName(userBucketName); PutBucketResponse putBucketResponse = s3Client.PutBucket(putBucketRequest); putBucketResponse.Dispose(); } } } }
protected void Page_Load(object sender, EventArgs e) { Hashtable State = (Hashtable)HttpRuntime.Cache[Session.SessionID]; if (State == null || State.Count <= 2) { Page.ClientScript.RegisterStartupScript(this.GetType(), Guid.NewGuid().ToString(), "timeOut('../Default.aspx');", true); return; } AmazonS3 S3 = new AmazonS3(); ArrayList images = S3.GetAccountImageArchiveUrls(State); DataSet paramsDS = new DataSet("ParameterDataSet"); DataTable paramTable = paramsDS.Tables.Add("ParamTable"); DataColumn paramCol0 = paramTable.Columns.Add("image_url", typeof(String)); DataColumn paramCol1 = paramTable.Columns.Add("id", typeof(String)); int index = 0; foreach (string url in images) { //filter out pages and camera images string file = url.Substring(url.LastIndexOf("/")+1); if (file.Length > 30 && file[13] == '-' && file[18] == '-' && file[23] == '-' && file[28] == '-') //this is a camera image continue; int n_periods = 0; foreach (char c in file) { if (c == '.') n_periods++; } if (n_periods == 2) // these are storyboard images continue; if(file.EndsWith("qrcode.png"))//these are qrcode images continue; //string prefix = file.Substring(0, file.IndexOf(".")); //if (apps.Contains(prefix)) //storyboard prefix // continue; DataRow paramRow = paramTable.NewRow(); string[] row_array = new string[2]; row_array[0] = url; row_array[1] = "image" + index.ToString(); paramRow.ItemArray = row_array; paramTable.Rows.Add(paramRow); index++; } ParamRepeater.DataSource = paramsDS; ParamRepeater.DataBind(); }
public static bool Initialize_S3_stuff() { Console.WriteLine("starting Initialize_S3_stuff()"); s3_client = null; bucketName = null; try { if (!Utils.CFG.ContainsKey("s3_bucketName")) { Console.WriteLine("param s3_bucketName is not found in ez3d.config"); return false; } s3_client = AWSClientFactory.CreateAmazonS3Client(); // ListBucketsRequest listBucketsRequest = new ListBucketsRequest(); ListBucketsResponse response = s3_client.ListBuckets(); foreach (S3Bucket bucket in response.Buckets) { if (bucket.BucketName == (String)Utils.CFG["s3_bucketName"]) { bucketName = bucket.BucketName; Console.WriteLine("bucketName =" + bucketName); } } if (bucketName == null) { Console.WriteLine("(bucketName == null)"); return false; } Console.WriteLine("Initialize_S3_stuff fininshed succefully"); return true; } catch (AmazonS3Exception e) { Console.WriteLine("AmazonS3Exception caught !!!"); Console.WriteLine(e.Message); return false; } }
public void Delete(string fileName) { string uniqueKeyItemName = string.Format("{0}-{1}", keyName, fileName); DeleteObjectRequest deleteObjectRequest = new DeleteObjectRequest() .WithBucketName(bucketName) .WithKey(uniqueKeyItemName ); using (client = new AmazonS3Client(accessKeyID, secretAccessKeyID)) { try { client.DeleteObject(deleteObjectRequest); } catch (AmazonS3Exception s3Exception) { throw new Exception( String.Format("Error Occurred in Delete operation for ObjectKeyID: {0}", uniqueKeyItemName ),s3Exception); } } }