public Stream GetFileIfChangedSince(string s3Url, DateTime lastModified, out DateTime newLastModified) { S3PathParts path = _pathParser.Parse(s3Url); var request = new GetObjectRequest { BucketName = path.Bucket, Key = path.Key, ModifiedSinceDate = lastModified }; try { // Do NOT dispose of the response here because it will dispose of the response stream also // (and that is ALL it does). It's a little gross, but I'll accept it because the alternative // is to return a custom Stream that will dispose the response when the Stream itself is // disposed, which is grosser. GetObjectResponse response = _s3Client.GetObject(request); newLastModified = response.LastModified; return(response.ResponseStream); } catch (AmazonS3Exception e) { if (e.StatusCode == HttpStatusCode.NotModified) { newLastModified = default(DateTime); return(null); } throw; } }
private void SynchronizedLoadAllHistory(Guid profileId) { IAwsClient client; AwsProfile profile; if (!TryInitialize(profileId, out client, out profile)) { return; } if (string.IsNullOrEmpty(profile.DetailedBillingS3Bucket)) { // detailed billing not configured for this profile return; } if (profile.IsBillingHistoryLoading) { return; } // Prevent other processes from trying to load at the same time. // This still has a race condition that would be eliminated using optimistic concurrency. profile.IsBillingHistoryLoading = true; _awsProfileRepository.Update(profile); // http://s3.amazonaws.com/509438855493-aws-billing-detailed-line-items-with-resources-and-tags-2014-08.csv.zip var pattern = new Regex(@"\A\d+-aws-billing-detailed-line-items-with-resources-and-tags-(\d+\-\d+).csv.zip\z"); var rootPath = new S3PathParts(profile.DetailedBillingS3Bucket, ""); List <string> availablePeriods = client.StorageService .ListFiles(rootPath.ToString()) .Select(path => _pathParser.Parse(path)) .Select(parsedPath => pattern.Match(parsedPath.Key)) .Where(match => match.Success) .Select(match => match.Groups[1].Captures[0].Value) .OrderBy(period => period, StringComparer.InvariantCulture) .ToList(); // Forget that we have pulled CSVs before... profile.BillingMetadata.Clear(); // ... or loaded them into the ledger. _billingManager.WipeAllData(); DateTime utcNow = _clock.UtcNow; foreach (string period in availablePeriods) { RefreshDataForPeriod(profile, period, client, utcNow); } profile.IsBillingHistoryLoaded = true; profile.IsBillingHistoryLoading = false; _awsProfileRepository.Update(profile); }
public IList <string> ListFiles(string path) { S3PathParts parts = _pathParser.Parse(path); return(_s3Client.ListObjects((new ListObjectsRequest { BucketName = parts.Bucket, Delimiter = "/", Prefix = parts.Key })).S3Objects.Select(o => new S3PathParts(parts.Bucket, o.Key).ToString()).ToList()); }
public void ToString_S3Url() { var parts = new S3PathParts("mybucket", "mykey"); parts.ToString().Should().Be("s3://mybucket/mykey"); }
public string GetFile(string path) { S3PathParts parts = _pathParser.Parse(path); return(GetFile(parts.Bucket, parts.Key)); }