public byte[] FetchFile(string sObjectKey, string sVersionId) { AmazonS3 client = AWSClientFactory.CreateAmazonS3Client(S3ACCESSKEY, S3SECRETKEY); string BUCKET_NAME = ConfigurationManager.AppSettings["AWSBUCKET"]; GetObjectRequest request = new GetObjectRequest(); request.WithKey(sObjectKey); request.WithBucketName(BUCKET_NAME); if (sVersionId != "") { request.WithVersionId(sVersionId); } GetObjectResponse response = client.GetObject(request); byte[] buffer = new byte[response.ContentLength]; int read; MemoryStream ms = new MemoryStream(); while ((read = response.ResponseStream.Read(buffer, 0, buffer.Length)) > 0) { ms.Write(buffer, 0, read); } return(ms.ToArray()); }
public System.Tuple <List <ChunkInfo>, byte[]> GetObjectMetadata(string s3objectName) { List <ChunkInfo> retVal = null; byte[] hash = null; string metadataObjectName = ChunkMetadataObjectPrefix + s3objectName; bool s3ObjectExists = S3ObjectExists(metadataObjectName); if (s3ObjectExists) { GetObjectRequest request = new GetObjectRequest(); request.WithBucketName(bucketName); request.WithKey(metadataObjectName); GetObjectResponse response = amazonS3Client.GetObject(request); StreamReader reader = new StreamReader(response.ResponseStream); string chunkMD_JSON = reader.ReadToEnd(); FileMD fileMD = JsonConvert.DeserializeObject <FileMD>(chunkMD_JSON); SHA1 sha1 = new SHA1CryptoServiceProvider(); hash = sha1.ComputeHash(Encoding.ASCII.GetBytes(chunkMD_JSON)); retVal = fileMD.ChunkList; } return(new System.Tuple <List <ChunkInfo>, byte[]>(retVal, hash)); }
public byte[] DownloadS3ObjectToBytes(string s3ObjectName) { try { GetObjectRequest request = new GetObjectRequest(); request.WithBucketName(bucketName); request.WithKey(s3ObjectName); GetObjectResponse response = amazonS3Client.GetObject(request); byte[] buffer = new byte[1024]; using (MemoryStream ms = new MemoryStream()) { int read; while ((read = response.ResponseStream.Read(buffer, 0, buffer.Length)) > 0) { ms.Write(buffer, 0, read); } return(ms.ToArray()); } } catch (Exception e) { structuredLog("E", "Exception in DownloadS3ObjectToBytes: " + e); return(null); } }
public bool DownloadS3ObjectToFile(string s3ObjectName, string filePath) { try { if (!S3ObjectExists(ChunkMetadataObjectPrefix + s3ObjectName)) { return(false); } GetObjectRequest request = new GetObjectRequest(); request.WithBucketName(bucketName); request.WithKey(s3ObjectName); GetObjectResponse response = amazonS3Client.GetObject(request); if (File.Exists(filePath)) { File.Delete(filePath); } var localFileStream = File.Create(filePath); response.ResponseStream.CopyTo(localFileStream); localFileStream.Close(); return(true); } catch (Exception e) { structuredLog("E", "Exception in DownloadS3ObjectToFile: " + e); return(false); } }
private void Download(ProcessItem file) { using (var client = CreateAmazonS3Client()) { file.Action = ProcessAction.DownloadingException; if (client == null) { return; } try { var request = new GetObjectRequest(); request.WithBucketName(_bucketName).WithKey(file.S3Path); using (var response = client.GetObject(request)) { var fileName = ""; if (response.Metadata != null && response.Metadata.Count > 0) { var localPath = response.Metadata.Get("x-amz-meta-localpath"); if (String.IsNullOrWhiteSpace(localPath)) { return; } localPath = HttpUtility.UrlDecode(localPath); if (localPath != null) { var localPathArr = localPath.Split('\\'); var rootDirectory = KnownFolders.Root.FolderName; for (var i = 1; i < localPathArr.Length - 1; i++) { rootDirectory = Path.Combine(rootDirectory, localPathArr[i]); if (!Directory.Exists(rootDirectory)) { Directory.CreateDirectory(rootDirectory); } } fileName = Path.Combine(rootDirectory, localPathArr[localPathArr.Length - 1]); } } if (String.IsNullOrWhiteSpace(fileName)) { return; } using (var fileStream = new FileStream(fileName, FileMode.Create)) { using (var stream = response.ResponseStream) { var data = new byte[32768]; int bytesRead; do { bytesRead = stream.Read(data, 0, data.Length); fileStream.Write(data, 0, bytesRead); } while (bytesRead > 0); fileStream.Flush(); file.Action = ProcessAction.DownloadingDone; } } } } catch (Exception) { } } }
public string GetObjectInformation(string bucketName, string objectKey) { GetObjectRequest request = new GetObjectRequest(); request.WithBucketName(bucketName).WithKey(objectKey); GetObjectResponse response = _client.GetObject(request); return(response.AmazonId2); }
public Stream GetFile(string folderName, string fileName, bool useCache) { //folder ignored - packages stored on top level of S3 bucket if (String.IsNullOrWhiteSpace(folderName)) { throw new ArgumentNullException("folderName"); } if (String.IsNullOrWhiteSpace(fileName)) { throw new ArgumentNullException("fileName"); } if (useCache && !string.IsNullOrWhiteSpace(clientContext.PackagesUrl)) { var url = new Uri(string.Format("{0}/{1}", clientContext.PackagesUrl, fileName)); WebRequest request = WebRequest.Create(url); WebResponse response = request.GetResponse(); return(response.GetResponseStream()); } else { var request = new GetObjectRequest(); request.WithBucketName(clientContext.BucketName); request.WithKey(fileName); request.WithTimeout((int)TimeSpan.FromMinutes(30).TotalMilliseconds); using (AmazonS3 client = clientContext.CreateInstance()) { try { S3Response response = WrapRequestInErrorHandler(() => client.GetObject(request)); if (response != null) { return(response.ResponseStream); } } catch (Exception) { //hate swallowing an error } return(null); } } }
public Stream get_file(string folderName, string fileName, bool useCache) { // It's allowed to have an empty folder name. // if (String.IsNullOrWhiteSpace(folderName)) throw new ArgumentNullException("folderName"); if (String.IsNullOrWhiteSpace(fileName)) { throw new ArgumentNullException("fileName"); } folderName = (string.IsNullOrEmpty(folderName) ? String.Empty : folderName.Substring(folderName.Length - 1, 1) == "/" ? folderName : folderName + "/"); fileName = string.Format("{0}{1}", folderName, fileName); if (useCache && !string.IsNullOrWhiteSpace(clientContext.ImagesUrl)) { var url = new Uri(string.Format("{0}/{1}", clientContext.ImagesUrl, fileName)); WebRequest request = WebRequest.Create(url); WebResponse response = request.GetResponse(); return(response.GetResponseStream()); } else { var request = new GetObjectRequest(); request.WithBucketName(clientContext.BucketName); request.WithKey(fileName); request.WithTimeout((int)TimeSpan.FromMinutes(30).TotalMilliseconds); using (AmazonS3 client = clientContext.create_instance()) { try { S3Response response = wrap_request_in_error_handler(() => client.GetObject(request)); if (response != null) { return(response.ResponseStream); } } catch (Exception) { //hate swallowing an error } return(null); } } }
//gets file from S3 public static void GetObjectFromS3(string filePathS3, string destFilePath) { AmazonS3 client; if (CheckS3Credentials()) { NameValueCollection appConfig = ConfigurationManager.AppSettings; string accessKeyID = appConfig["AWSAccessKey"]; string secretAccessKeyID = appConfig["AWSSecretKey"]; using (client = Amazon.AWSClientFactory.CreateAmazonS3Client( accessKeyID, secretAccessKeyID, RegionEndpoint.USWest1)) { try { GetObjectRequest request = new GetObjectRequest(); request.WithBucketName(Constants.AmazonS3BucketName) .WithKey(filePathS3); using (GetObjectResponse response = client.GetObject(request)) { response.WriteResponseStreamToFile(destFilePath); } } catch (AmazonS3Exception amazonS3Exception) { if (amazonS3Exception.ErrorCode != null && (amazonS3Exception.ErrorCode.Equals("InvalidAccessKeyId") || amazonS3Exception.ErrorCode.Equals("InvalidSecurity"))) { Console.WriteLine("Check the provided AWS Credentials."); Console.WriteLine( "For service sign up go to http://aws.amazon.com/s3"); } else { Console.WriteLine( "Error occurred. Message:'{0}' when writing an object" , amazonS3Exception.Message); } } } } }
public static bool ReadLogFile(string s3FileName, string credentialFilePath) { Type t = System.Reflection.MethodBase.GetCurrentMethod().DeclaringType; try { if (ReadS3Credentials(credentialFilePath) == false) { LogEvents.S3NoCredentials(t); return(false); } AmazonS3 client = Amazon.AWSClientFactory.CreateAmazonS3Client(_accessKeyId, _secretAccessKey); GetObjectRequest request = new GetObjectRequest(); request.WithBucketName(_bucketName).WithKey(s3FileName); S3Response responseWithMetadata = client.GetObject(request); return(true); } catch (AmazonS3Exception amazonS3Exception) { LogEvents.S3Error(t, amazonS3Exception); return(false); } }
public byte[] GetData(string path) { using (var client = CreateAmazonS3Client()) { var request = new GetObjectRequest(); request.WithBucketName(_bucket.BucketName).WithKey(path); using (var response = client.GetObject(request)) { byte[] bytes; using (var memory = new MemoryStream()) { using (var stream = response.ResponseStream) { var data = new byte[32768]; int bytesRead; do { bytesRead = stream.Read(data, 0, data.Length); memory.Write(data, 0, bytesRead); } while (bytesRead > 0); memory.Flush(); bytes = memory.ToArray(); } } return(bytes); } } }
public byte[] UploadFileAsChunks(string filePath) { string s3objectName; List <ChunkInfo> chunkList_cloud = new List <ChunkInfo>();; // list of chunk indexed by chunk-index (e.g. 0, 1, 2,....) List <ChunkInfo> chunkList_local; // list of chunk indexed by chunk-index (e.g. 0, 1, 2,....) try { if (logger != null) { logger.Log("Start Synchronizer Check Blob Exists"); } s3objectName = Path.GetFileName(filePath); bool s3ObjectExists = S3ObjectExists(ChunkMetadataObjectPrefix + s3objectName); if (logger != null) { logger.Log("End Synchronizer Check Blob Exists"); } if (s3ObjectExists) { if (logger != null) { logger.Log("Start Synchronizer Fill Remote ChunkList"); } GetObjectRequest request = new GetObjectRequest(); request.WithBucketName(bucketName); request.WithKey(ChunkMetadataObjectPrefix + s3objectName); GetObjectResponse response = amazonS3Client.GetObject(request); StreamReader reader = new StreamReader(response.ResponseStream); string chunkMD_JSON = reader.ReadToEnd(); FileMD fileMD = JsonConvert.DeserializeObject <FileMD>(chunkMD_JSON); StaticChunkSize = fileMD.StaticChunkSize; chunkList_cloud = fileMD.ChunkList; if (logger != null) { logger.Log("End Synchronizer Fill Remote ChunkList"); } chunkCompressionType = SyncFactory.GetCompressionType(fileMD.compressionType); chunkEncryptionType = SyncFactory.GetEncryptionType(fileMD.encryptionType); } if (logger != null) { logger.Log("Start Synchronizer Fill Local ChunkList"); } StaticChunk staticChunker = new StaticChunk(StaticChunkSize); chunkList_local = staticChunker.GetCurrentChunkList(filePath); // if doing other class that implements the IChunk interface // structuredLog("I", "Number of chunks locally: " + chunkList_local.Count); if (logger != null) { logger.Log("End Synchronizer Fill Local ChunkList"); } if (logger != null) { logger.Log("Start Synchronizer ChunkList Compare"); } List <ChunkInfo> chunkList_toUpload = staticChunker.GetUploadChunkList(chunkList_local, chunkList_cloud); // structuredLog("I", "Number of chunks on cloud blob: " + chunkList_cloud.Count); // structuredLog("I", "Number of chunks to be uploaded: " + chunkList_toUpload.Count); if (logger != null) { logger.Log("End Synchronizer ChunkList Compare"); } if (logger != null) { logger.Log("Start Synchronizer Upload Multiple Chunks"); } UploadChunkList(ref chunkList_toUpload, filePath, s3objectName); if (logger != null) { logger.Log("End Synchronizer Upload Multiple Chunks"); } // structuredLog("I", "Number of chunks uploaded: " + chunkList_toUpload.Count); if (logger != null) { logger.Log("Start Synchronizer ChunkList Upload"); } string json = JsonConvert.SerializeObject(new FileMD(StaticChunkSize, chunkList_local, SyncFactory.GetCompressionTypeAsString(this.chunkCompressionType), SyncFactory.GetEncryptionTypeAsString(this.chunkEncryptionType)), new KeyValuePairConverter()); if (chunkList_toUpload.Count > 0) //upload new chunk list only if we uploaded some new chunks { UploadStringToS3Object(ChunkMetadataObjectPrefix + s3objectName, json); } SHA1 sha1 = new SHA1CryptoServiceProvider(); byte[] ret = sha1.ComputeHash(Encoding.ASCII.GetBytes(json)); if (logger != null) { logger.Log("End Synchronizer ChunkList Upload"); } return(ret); } catch (Exception e) { structuredLog("E", " . UploadFileAsChunks: " + e); return(null); } }
/* * Sample call for upload:- * byte[] array = new byte[1024*1024*1024]; * Random random = new Random(); * random.NextBytes(array); * double timeTaken_Upload = Experiment.doRawCloudPerf(array, SynchronizerType.Azure, SynchronizeDirection.Upload, "fooContainer", "fooBlob"); * double timeTaken_Download = Experiment.doRawCloudPerf(array, SynchronizerType.Azure, SynchronizeDirection.Download, "fooContainer", "fooBlob"); * * */ public static double doRawCloudPerf(byte[] input, SynchronizerType synchronizerType, SynchronizeDirection syncDirection, string exp_directory, Logger logger, string containerName = null, string blobName = null) { string accountName = ConfigurationManager.AppSettings.Get("AccountName"); string accountKey = ConfigurationManager.AppSettings.Get("AccountSharedKey"); DateTime begin = DateTime.Now, end = DateTime.Now; if (synchronizerType == SynchronizerType.Azure) { #region azure download/upload if (containerName == null) { containerName = "testingraw"; } if (blobName == null) { blobName = Guid.NewGuid().ToString(); } CloudStorageAccount storageAccount = new CloudStorageAccount(new StorageCredentialsAccountAndKey(accountName, accountKey), true); CloudBlobClient blobClient = storageAccount.CreateCloudBlobClient(); CloudBlobContainer container = blobClient.GetContainerReference(containerName); if (syncDirection == SynchronizeDirection.Upload) { logger.Log("Start Stream Append"); container.CreateIfNotExist(); begin = DateTime.UtcNow;////////////////////////////////////// try { using (MemoryStream memoryStream = new System.IO.MemoryStream(input)) { CloudBlockBlob blockBlob = container.GetBlockBlobReference(blobName); blockBlob.UploadFromStream(memoryStream); } } catch (Exception e) { Console.WriteLine("exception: " + e); } end = DateTime.UtcNow;////////////////////////////////////// logger.Log("End Stream Append"); } if (syncDirection == SynchronizeDirection.Download) { logger.Log("Start Stream Get"); logger.Log("Start Stream GetAll"); try { CloudBlockBlob blockBlob = container.GetBlockBlobReference(blobName); byte[] blobContents = blockBlob.DownloadByteArray(); //if (File.Exists(blobName)) // File.Delete(blobName); begin = DateTime.UtcNow;////////////////////////////////////// // using (FileStream fs = new FileStream(blobName, FileMode.OpenOrCreate)) // { byte[] contents = blockBlob.DownloadByteArray(); // fs.Write(contents, 0, contents.Length); // } } catch (Exception e) { Console.WriteLine("exception: " + e); } end = DateTime.UtcNow;////////////////////////////////////// logger.Log("End Stream Get"); logger.Log("End Stream GetAll"); } #endregion } else if (synchronizerType == SynchronizerType.AmazonS3) { #region amazon s3 stuff if (containerName == null) { containerName = "testingraw"; } if (blobName == null) { blobName = Guid.NewGuid().ToString(); } AmazonS3Client amazonS3Client = new AmazonS3Client(accountName, accountKey); if (syncDirection == SynchronizeDirection.Upload) { ListBucketsResponse response = amazonS3Client.ListBuckets(); foreach (S3Bucket bucket in response.Buckets) { if (bucket.BucketName == containerName) { break; } } amazonS3Client.PutBucket(new PutBucketRequest().WithBucketName(containerName)); begin = DateTime.UtcNow;////////////////////////////////////// MemoryStream ms = new MemoryStream(); ms.Write(input, 0, input.Length); PutObjectRequest request = new PutObjectRequest(); request.WithBucketName(containerName); request.WithKey(blobName); request.InputStream = ms; amazonS3Client.PutObject(request); end = DateTime.UtcNow;////////////////////////////////////// } if (syncDirection == SynchronizeDirection.Download) { if (File.Exists(blobName)) { File.Delete(blobName); } begin = DateTime.UtcNow;////////////////////////////////////// GetObjectRequest request = new GetObjectRequest(); request.WithBucketName(containerName); request.WithKey(blobName); GetObjectResponse response = amazonS3Client.GetObject(request); var localFileStream = File.Create(blobName); response.ResponseStream.CopyTo(localFileStream); localFileStream.Close(); end = DateTime.UtcNow;////////////////////////////////////// } #endregion } else { throw new InvalidDataException("syncronizer type is not valid"); } return((end - begin).TotalMilliseconds);// return total time to upload in milliseconds }