private void retrieveFile(S3Object o, string filename) { DateTime lastModified = o.LastModified; FileInfo fi = null; if (File.Exists(filename)) { fi = new FileInfo(filename); if (fi.LastWriteTime < lastModified) { if ( !OnlyCount ) transferUtility.Download(filename, Bucket, o.Key); Console.WriteLine("Update.. {0}", filename); CopiedFiles.Add(filename); } else { // Console.WriteLine("Skip.... {0}", filename); } } else { if (!OnlyCount) { transferUtility.Download(filename, Bucket, o.Key); } Console.WriteLine("Create.. {0}", filename); CopiedFiles.Add(filename); } /// カウントモードでなければ、タイムスタンプを再設定する fi = new FileInfo(filename); if ( lastModified != DateTime.MinValue && !OnlyCount ) fi.LastWriteTime = lastModified; }
private void ProcessFile(S3Object file) { Trace.WriteLine(string.Format("Processing item {0}", file.Key)); var lines = ReadS3File(file); var trimmedDate = file.Key.Substring(DATE_EXTRACTION_PREFIX.Length, 11); var date = DateTime.ParseExact(trimmedDate, FORMAT, PROVIDER); var fileMod = file.GetHashCode() % ComputeNode.GlobalBucketCount; var buckets = ComputeNode.Catalogs.Values.Cast<ICatalog>().Where(c => c.CatalogName == CATALOG).First().Buckets; var bucketMod = buckets.First(b => b.Value.BucketMod == fileMod).Value; Trace.WriteLine(string.Format("Adding data items from {0}", file.Key)); lines.AsParallel().ForAll(line => { var items = line.Split(' '); Debug.Assert(items.Length == 4); var projectCode = HttpUtility.UrlDecode(items[0]); var pageName = HttpUtility.UrlDecode(items[1]); var pageViews = int.Parse(items[2]); var pageSizeKB = long.Parse(items[3]); var wikiStat = new WikipediaHourlyPageStats(date, projectCode, pageName, pageViews, pageSizeKB); bucketMod.BucketDataTables[TABLE].AddItem(wikiStat); }); Trace.WriteLine(string.Format("Added data items from {0}", file.Key)); }
/// <summary> /// Sets the storage class for the S3 Object to the value /// specified. /// </summary> /// <param name="s3Object">The S3 Object whose storage class needs changing</param> /// <param name="sClass">The new Storage Class for the object</param> /// <param name="s3Client">The Amazon S3 Client to use for S3 specific operations.</param> /// <seealso cref="T:Amazon.S3.Model.S3StorageClass"/> public static void SetObjectStorageClass(S3Object s3Object, S3StorageClass sClass, AmazonS3 s3Client) { SetObjectStorageClass(s3Object.BucketName, s3Object.Key, sClass, s3Client); }
private TransferUtilityDownloadRequest ConstructTransferUtilityDownloadRequest(S3Object s3Object, int prefixLength) { var downloadRequest = new TransferUtilityDownloadRequest(); downloadRequest.BucketName = this._request.BucketName; downloadRequest.Key = s3Object.Key; var file = s3Object.Key.Substring(prefixLength).Replace('/','\\'); downloadRequest.FilePath = Path.Combine(this._request.LocalDirectory, file); downloadRequest.WriteObjectProgressEvent += downloadedProgressEventCallback; return downloadRequest; }
public S3StorageFolder(S3Object entry, long folderSize) { // TODO: Complete member initialization this._folderSize = folderSize; this._entry = entry; }
/// <summary> /// Initializes a new instance of the FileObject class. /// </summary> /// <param name="s3Object">The Amazon S3 object to create this instance from.</param> public FileObject(S3Object s3Object) { this.Key = s3Object.Key; this.LastModified = DateTime.Parse(s3Object.LastModified, CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal).ToUniversalTime(); this.LastModifiedGMTString = String.Format(CultureInfo.InvariantCulture, "{0:s}Z", this.LastModified); this.Size = Int64.Parse(s3Object.Size, CultureInfo.InvariantCulture); }
/// <summary> /// Sets the redirect location for the S3 Object's when being accessed through the S3 website endpoint. /// </summary> /// <param name="s3Object">The S3 Object</param> /// <param name="websiteRedirectLocation">The redirect location</param> /// <param name="s3Client">The Amazon S3 Client to use for S3 specific operations.</param> public static void SetWebsiteRedirectLocation(S3Object s3Object, string websiteRedirectLocation, AmazonS3 s3Client) { SetWebsiteRedirectLocation(s3Object.BucketName, s3Object.Key, websiteRedirectLocation, s3Client); }
/// <summary> /// Sets the server side encryption method for the S3 Object to the value /// specified. /// </summary> /// <param name="s3Object">The S3 Object</param> /// <param name="method">The server side encryption method</param> /// <param name="s3Client">The Amazon S3 Client to use for S3 specific operations.</param> public static void SetServerSideEncryption(S3Object s3Object, ServerSideEncryptionMethod method, AmazonS3 s3Client) { SetServerSideEncryption(s3Object.BucketName, s3Object.Key, method, s3Client); }
public S3StorageFile(S3Object entry, string contentType) { // TODO: Complete member initialization this._entry = entry; this._contentType = contentType; }
public RemoteFile(S3Object s3Object) : base(s3Object.Key) { this.s3Obj = s3Object; }
public RemoteFile(string absolutePath) : base(absolutePath) { this.s3Obj = new S3Object(); }
public S3StorageFolder(S3Object entry) { // TODO: Complete member initialization this._entry = entry; }
private String[] ReadS3File(S3Object fileName) { var gor = new GetObjectRequest().WithBucketName(BUCKET).WithKey(fileName.Key); var file = s3.GetObject(gor); string text = ""; using (var ms = new MemoryStream()) { Trace.WriteLine("Started reading file"); file.ResponseStream.CopyTo(ms); //actually fetches the file from S3 Trace.WriteLine("Finished reading file"); using (var gzipStream = new GZipStream(new MemoryStream(ms.ToArray()), CompressionMode.Decompress)) { Trace.WriteLine("Decompressing file"); const int size = 4096; byte[] buffer = new byte[size]; using (MemoryStream memory = new MemoryStream()) { int count = 0; do { count = gzipStream.Read(buffer, 0, size); if (count > 0) { memory.Write(buffer, 0, count); } } while (count > 0); var memArray = memory.ToArray(); text = ASCIIEncoding.ASCII.GetString(memArray); } Trace.WriteLine("Finished decompressing file"); } } var lines = text.Split(delimiters, StringSplitOptions.RemoveEmptyEntries); Trace.WriteLine("Finished reading file"); return lines; }
private static Item Map(string root, S3Object src) { return new Item { Type = "S3Object", Path = src.Key, Length = src.Size, S3Object = src, Relative = src.Key.Substring(root.Length), Hash = src.ETag.Replace("\"", string.Empty) }; }
private static void DownloadFile(S3Object s3Object) { System.Console.WriteLine("Downloading " + s3Object.Key); GetObjectResponse getObjectResponse = _amazonS3Client.GetObject(new GetObjectRequest { BucketName = BucketName, Key = s3Object.Key }); string filePath = Path.Combine(_folder, s3Object.Key); using (BufferedStream inputBufferedStream = new BufferedStream(getObjectResponse.ResponseStream)) using (CryptoStream cryptoStream = new CryptoStream(inputBufferedStream, _aesDecryptor, CryptoStreamMode.Read)) using (FileStream outputFileStream = new FileStream(filePath, FileMode.Create, FileAccess.ReadWrite)) { int data; while ((data = cryptoStream.ReadByte()) != -1) { outputFileStream.WriteByte((byte)data); } } new FileInfo(filePath).LastWriteTime = DateTime.Parse(getObjectResponse.Metadata["x-amz-meta-LWT"]); }
public AmazonFilesStatus(S3Object s3File,string bucket,string prefix) { var filename = s3File.Key.Replace(prefix,""); var fileExt = filename.Remove(0,filename.LastIndexOf('.')); type = getContentTypeByExtension(fileExt); isimage = Regex.Match(filename.ToLower(),AmazonHelper.ImgExtensions()).Success; var client = AmazonHelper.GetS3Client(); var metareq = new GetObjectMetadataRequest().WithBucketName(bucket).WithKey(s3File.Key); var meta = client.GetObjectMetadata(metareq); var height = 0; var width = 0; if(isimage){ height = String.IsNullOrWhiteSpace(meta.Headers["x-amz-meta-height"])? 0 : Int32.Parse(meta.Headers["x-amz-meta-height"]); width = String.IsNullOrWhiteSpace(meta.Headers["x-amz-meta-width"])? 0 : Int32.Parse(meta.Headers["x-amz-meta-width"]); } var size = Convert.ToInt32(s3File.Size); SetValues(filename,bucket,prefix,size,height,width); }